From d66bc464ed23ce6b5380cdea7108df266b45c312 Mon Sep 17 00:00:00 2001 From: Jon Date: Thu, 16 Apr 2026 05:01:55 +0000 Subject: [PATCH 01/22] Fix hermes ingestion so it ingests targets and spectroscopic data too. --- docs/managing_data/stream_pub_sub.rst | 6 +- .../alertstreams/hermes_ingester.py | 294 ++++++++++++++++++ .../{hermes.py => hermes_publisher.py} | 127 +------- .../processors/spectroscopy_processor.py | 44 ++- tom_dataproducts/sharing.py | 2 +- .../templatetags/dataproduct_extras.py | 13 +- tom_dataproducts/tests/test_sharing.py | 2 +- tom_targets/sharing.py | 2 +- tom_targets/views.py | 2 +- 9 files changed, 352 insertions(+), 140 deletions(-) create mode 100644 tom_dataproducts/alertstreams/hermes_ingester.py rename tom_dataproducts/alertstreams/{hermes.py => hermes_publisher.py} (68%) diff --git a/docs/managing_data/stream_pub_sub.rst b/docs/managing_data/stream_pub_sub.rst index 4575b75e8..08cb0c040 100644 --- a/docs/managing_data/stream_pub_sub.rst +++ b/docs/managing_data/stream_pub_sub.rst @@ -17,7 +17,7 @@ buttons to open your data in hermes with the form pre-filled - this is a good op to your message or data before sharing. To customize what data is sent to hermes from your ReducedDatum or Target models, please re-implement your own -``tom_dataproducts.alertstreams.hermes.HermesDataConverter`` and customize the `get_hermes_*` methods to pull out +``tom_dataproducts.alertstreams.hermes_publisher.HermesDataConverter`` and customize the `get_hermes_*` methods to pull out the proper data you want to share. You then provide the class dotpath to your custom class in your TOM's settings for hermes ``DATA_SHARING`` in the `DATA_CONVERTER_CLASS` key. This is especially useful if you store extra target or datum information in custom associated models in your TOM or with custom model field keys. For more information on @@ -41,7 +41,7 @@ for the various streams with which you wish to share data. 'HERMES_API_KEY': os.getenv('HERMES_API_KEY', 'set HERMES_API_KEY value in environment'), 'DEFAULT_AUTHORS': os.getenv('HERMES_DEFAULT_AUTHORS', 'set your default authors here'), 'USER_TOPICS': ['hermes.test', 'tomtoolkit.test'] # You must have write permissions on these topics - 'DATA_CONVERTER_CLASS': 'tom_dataproducts.alertstreams.hermes.HermesDataConverter' + 'DATA_CONVERTER_CLASS': 'tom_dataproducts.alertstreams.hermes_publisher.HermesDataConverter' }, } @@ -81,7 +81,7 @@ out the ``tom-alertstreams`` `README uuid.UUID: + """Extract the UUID from the message metadata, or generate a UUID if none present in metadata. + + The headers property of the metadata is a list of tuples of the form [('key', value), ...]. + """ + # get the tuple with the uuid: key is '_id' + message_uuid_tuple = None + if metadata.headers: + message_uuid_tuple = next((item for item in metadata.headers if item[0] == '_id'), None) + if message_uuid_tuple: + message_uuid = uuid.UUID(bytes=message_uuid_tuple[1]) + else: + # this message header metadata didn't have UUID, so make one + message_uuid = uuid.uuid4() + return message_uuid + + +HERMES_SPECTROSCOPY_FILE_EXTENSIONS = ('.fits.fz', '.fits', '.csv', '.txt', '.ascii') + + +def hermes_alert_handler(alert, metadata): + """Example Alert Handler to record data streamed through Hermes as a new ReducedDatum. + -- Reads Photometry and Spectroscopy Data (both inline and file-based) + -- Creates a new Target if no match is found for Target Name or aliases + -- Does not Ingest Data if exact match already exists + -- Requires 'tom_alertstreams' in settings.INSTALLED_APPS + -- Requires ALERT_STREAMS['topic_handlers'] in settings + """ + alert_as_dict = alert.content + alert_id = get_or_create_uuid_from_metadata(metadata) + photometry_table = alert_as_dict['data'].get('photometry') or [] + spectroscopy_table = alert_as_dict['data'].get('spectroscopy') or [] + target_table = alert_as_dict['data'].get('targets', []) + # Set a hermes_base_url to link ingested messages to + if hasattr(settings, 'DATA_SHARING'): + hermes_base_url = settings.DATA_SHARING.get('hermes', {}).get('BASE_URL', 'https://hermes.lco.global') + else: + hermes_base_url = 'https://hermes.lco.global' + hermes_message_url = urljoin(hermes_base_url, f'/message/{alert_id}') + + if not photometry_table and not spectroscopy_table: + return + + hermes_alert, created = AlertStreamMessage.objects.get_or_create( + topic=metadata.topic, exchange_status='ingested', message_id=alert_id) + + if not created: + # Only try to read and ingest the hermes message if we haven't already done so! + return + + # Cache of target names in alert message -> Target model instance in TOM + target_cache = {} + + def resolve_target(target_name): + if target_name not in target_cache: + # We first attempt to match to an existing target in the TOM by target_name or any specified aliases + target_entry = next((t for t in target_table if t.get('name', '') == target_name), {}) if target_table else {} + aliases = target_entry.get('aliases', []) + query = Target.matches.match_name(target_name) + if not query: + for alias in aliases: + query = Target.matches.match_name(alias) + if query: + break + if query: + target_cache[target_name] = query[0] + # If we fail to find a local Target, we will create a target from what's in the alert message + elif target_table: + new_target = create_new_hermes_target(target_table, target_name) + if new_target is not None: + target_cache[target_name] = new_target + return target_cache.get(target_name) + + # Now we ingest all the photometry rows in the alert message + for row in photometry_table: + target = resolve_target(row['target_name']) + if target is None: + continue + + try: + obs_date = parse(row['date_obs']) + except ValueError: + continue + + datum = { + 'target': target, + 'data_type': 'photometry', + 'source_name': metadata.topic, + 'source_location': hermes_message_url, + 'timestamp': obs_date, + 'value': get_hermes_phot_value(row) + } + new_rd, created = ReducedDatum.objects.get_or_create(**datum) + if created: + new_rd.message.add(hermes_alert) + new_rd.save() + + # Now ingest all spectroscopy rows, either by downloading referenced files as a DataProduct or ingesting raw data + for row in spectroscopy_table: + target = resolve_target(row['target_name']) + if target is None: + continue + + try: + obs_date = parse(row['date_obs']) + except ValueError: + continue + + # If file_info exists on the spectroscopy row, attempt to get a data file url from there + file_url = _get_spectroscopy_file_url(row.get('file_info') or []) + if file_url: + _ingest_hermes_spectroscopy_file(file_url, row, target, hermes_alert, hermes_message_url) + # Otherwise, check if flux and wavelength arrays of data are specified in the row + elif row.get('flux') and row.get('wavelength'): + value = { + 'flux': row['flux'], + 'flux_units': row.get('flux_units', ''), + 'wavelength': row['wavelength'], + 'wavelength_units': row.get('wavelength_units', ''), + } + for key in ('telescope', 'instrument', 'reducer', 'observer', 'spec_type', 'flux_type', 'classification', + 'comments', 'exposure_time', 'setup', 'proprietary_period', 'proprietary_period_units'): + if row.get(key): + value[key] = row[key] + if row.get('flux_error'): + value['flux_error'] = row['flux_error'] + + datum = { + 'target': target, + 'data_type': 'spectroscopy', + 'source_name': metadata.topic, + 'source_location': hermes_message_url, + 'timestamp': obs_date, + 'value': value, + } + new_rd, created = ReducedDatum.objects.get_or_create(**datum) + if created: + new_rd.message.add(hermes_alert) + new_rd.save() + + +def _get_spectroscopy_file_url(file_info_list): + """Return the URL of the first entry in a file_info list whose filename matches a supported + spectroscopy file extension, or None if no match is found.""" + for entry in file_info_list: + url = entry.get('url', '') + filename = os.path.basename(urlparse(url).path) + for ext in HERMES_SPECTROSCOPY_FILE_EXTENSIONS: + if filename.lower().endswith(ext): + return url + return None + + +def _ingest_hermes_spectroscopy_file(url, spectroscopy_row, target, hermes_alert, alert_url): + """ + Downloads a spectroscopy file from the given URL, saves it as a DataProduct, and processes + it into ReducedDatum objects using the configured spectroscopy data processor. + Skips the download if a DataProduct with this URL as its product_id already exists. + """ + filename = os.path.basename(urlparse(url).path) + + try: + response = requests.get(url) + response.raise_for_status() + except Exception as ex: + logger.error(f'Failed to download spectroscopy file from {url}: {repr(ex)}') + return + + spectroscopy_keys = ['date_obs', 'flux_units', 'wavelength_units', 'telescope', 'instrument', 'reducer', + 'observer', 'spec_type', 'flux_type', 'classification', 'comments', 'exposure_time', + 'setup', 'proprietary_period', 'proprietary_period_units'] + spectroscopy_data = {key: spectroscopy_row[key] for key in spectroscopy_keys if key in spectroscopy_row and spectroscopy_row[key]} + # Inject these two extra fields since they should be associated with the ReducedDatums somehow + spectroscopy_data['source_name'] = hermes_alert.topic + spectroscopy_data['source_location'] = alert_url + + try: + dp, created = DataProduct.objects.get_or_create( + product_id=url, + defaults={'target': target, 'data_product_type': 'spectroscopy', 'extra_data': json.dumps(spectroscopy_data)}, + ) + if created: + _, ext = os.path.splitext(filename) + with tempfile.NamedTemporaryFile(suffix=ext, delete=False) as tmpfile: + tmpfile.write(response.content) + tmpfile_path = tmpfile.name + try: + with open(tmpfile_path, 'rb') as f: + dp.data.save(filename, File(f), save=True) + finally: + os.unlink(tmpfile_path) + + reduced_datums = run_data_processor(dp) + for rd in reduced_datums: + rd.message.add(hermes_alert) + except Exception as ex: + logger.error(f'Failed to ingest spectroscopy file from {url}: {repr(ex)}') + + +def get_hermes_phot_value(phot_data): + """ + Convert Hermes Message format for a row of Photometry table into parameters accepted by the Reduced Datum model + :param phot_data: Dictionary containing Hermes Photometry table. + :return: Dictionary containing properly formatted parameters for Reduced_Datum + """ + data_dictionary = { + 'error': phot_data.get('brightness_error', ''), + 'filter': phot_data['bandpass'], + 'telescope': phot_data.get('telescope', ''), + 'instrument': phot_data.get('instrument', ''), + 'unit': phot_data['brightness_unit'], + } + + if phot_data.get('brightness', None): + data_dictionary['magnitude'] = phot_data['brightness'] + elif phot_data.get('limiting_brightness', None): + data_dictionary['limit'] = phot_data['limiting_brightness'] + if phot_data.get('limiting_brightness_error'): + data_dictionary['limit_error'] = phot_data['limiting_brightness_error'] + if phot_data.get('limiting_brightness_unit'): + data_dictionary['limit_unit'] = phot_data['limiting_brightness_unit'] + + for key in ('observer', 'comments', 'exposure_time', 'catalog'): + if phot_data.get(key): + data_dictionary[key] = phot_data[key] + + return data_dictionary + + +def create_new_hermes_target(target_table, target_name=None, target_list_name=None): + """ + Ingest a target into your TOM from Hermes. + Takes a target_table and a target_name. If no target name is given, every target on the target table will be + ingested. + :param target_table: Hermes Target table from a Hermes Message + :param target_name: Name for individual target to ingest from target table. + :param target_list_name: Name of TargetList within which new target should be placed. + :return: + """ + target = None + for hermes_target in target_table: + if target_name == hermes_target['name'] or target_name is None: + + new_target = {"name": hermes_target.pop('name')} + if "ra" in hermes_target and "dec" in hermes_target: + new_target['type'] = 'SIDEREAL' + new_target['ra'] = hermes_target.pop('ra') + new_target['dec'] = hermes_target.pop('dec') + new_target['pm_ra'] = hermes_target.pop('pm_ra', None) + new_target['pm_dec'] = hermes_target.pop('pm_dec', None) + new_target['epoch'] = hermes_target.pop('epoch', None) + elif "orbital_elements" in hermes_target: + orbital_elements = hermes_target.pop('orbital_elements') + new_target['type'] = 'NON_SIDEREAL' + new_target['epoch_of_elements'] = orbital_elements.pop('epoch_of_elements', None) + new_target['mean_anomaly'] = orbital_elements.pop('mean_anomaly', None) + new_target['arg_of_perihelion'] = orbital_elements.pop('argument_of_the_perihelion', None) + new_target['eccentricity'] = orbital_elements.pop('eccentricity', None) + new_target['lng_asc_node'] = orbital_elements.pop('longitude_of_the_ascending_node', None) + new_target['inclination'] = orbital_elements.pop('orbital_inclination', None) + new_target['semimajor_axis'] = orbital_elements.pop('semimajor_axis', None) + new_target['epoch_of_perihelion'] = orbital_elements.pop('epoch_of_perihelion', None) + new_target['perihdist'] = orbital_elements.pop('perihelion_distance', None) + aliases = hermes_target.pop('aliases', []) + target = Target(**new_target) + target.full_clean() + target.save(names=aliases, extras=hermes_target) + if target_list_name: + target_list, created = TargetList.objects.get_or_create(name=target_list_name) + if created: + logger.debug(f'New target_list created: {target_list_name}') + target_list.targets.add(target) + return target diff --git a/tom_dataproducts/alertstreams/hermes.py b/tom_dataproducts/alertstreams/hermes_publisher.py similarity index 68% rename from tom_dataproducts/alertstreams/hermes.py rename to tom_dataproducts/alertstreams/hermes_publisher.py index d3cd8b2c7..9c7cd10d8 100644 --- a/tom_dataproducts/alertstreams/hermes.py +++ b/tom_dataproducts/alertstreams/hermes_publisher.py @@ -1,17 +1,14 @@ import logging -from dateutil.parser import parse +import requests from django.conf import settings from django.core.cache import cache from django.utils.module_loading import import_string -# from hop.io import Metadata - from tom_alerts.models import AlertStreamMessage -from tom_targets.models import Target, TargetList -from tom_dataproducts.models import ReducedDatum +from tom_targets.models import Target + -import requests logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) @@ -23,7 +20,7 @@ class HermesMessageException(Exception): def get_hermes_data_converter_class(): return import_string(settings.DATA_SHARING['hermes'].get( - 'DATA_CONVERTER_CLASS', 'tom_dataproducts.alertstreams.hermes.HermesDataConverter')) + 'DATA_CONVERTER_CLASS', 'tom_dataproducts.alertstreams.hermes_publisher.HermesDataConverter')) class HermesDataConverter(): @@ -86,7 +83,7 @@ def get_hermes_photometry(self, datum): else: phot_table_row['limiting_brightness'] = datum.value.get('limit', None) error_value = datum.value.get('error', datum.value.get('magnitude_error', None)) - if error_value is not None: + if error_value is not None and isinstance(error_value, (int, float)): phot_table_row['brightness_error'] = error_value return phot_table_row @@ -297,117 +294,3 @@ def get_hermes_topics(**kwargs): return topics -def hermes_alert_handler(alert, metadata): - """Example Alert Handler to record data streamed through Hermes as a new ReducedDatum. - -- Only Reads Photometry Data - -- Only ingests Data if exact match for Target Name - -- Does not Ingest Data if exact match already exists - -- Requires 'tom_alertstreams' in settings.INSTALLED_APPS - -- Requires ALERT_STREAMS['topic_handlers'] in settings - """ - alert_as_dict = alert.content - photometry_table = alert_as_dict['data'].get('photometry', None) - # target_table = alert_as_dict['data'].get('targets', None) - if photometry_table: - hermes_alert = AlertStreamMessage(topic=alert_as_dict['topic'], - exchange_status='ingested', - message_id=alert_as_dict.get("uuid", None)) - target_name = '' - query = [] - for row in photometry_table: - if row['target_name'] != target_name: - target_name = row['target_name'] - query = Target.matches.match_name(target_name) - if query: - target = query[0] - else: - # add conditional statements for whether to ingest a target here. - # target = create_new_hermes_target(target_table, target_name, target_list_name="new_hermes_object") - continue - - try: - obs_date = parse(row['date_obs']) - except ValueError: - continue - - datum = { - 'target': target, - 'data_type': 'photometry', - 'source_name': alert_as_dict['topic'], - 'source_location': 'Hermes via HOP', # TODO Add message URL here once message ID's exist - 'timestamp': obs_date, - 'value': get_hermes_phot_value(row) - } - new_rd, created = ReducedDatum.objects.get_or_create(**datum) - if created: - hermes_alert.save() - new_rd.message.add(hermes_alert) - new_rd.save() - - -def get_hermes_phot_value(phot_data): - """ - Convert Hermes Message format for a row of Photometry table into parameters accepted by the Reduced Datum model - :param phot_data: Dictionary containing Hermes Photometry table. - :return: Dictionary containing properly formatted parameters for Reduced_Datum - """ - data_dictionary = { - 'error': phot_data.get('brightness_error', ''), - 'filter': phot_data['bandpass'], - 'telescope': phot_data.get('telescope', ''), - 'instrument': phot_data.get('instrument', ''), - 'unit': phot_data['brightness_unit'], - } - - if phot_data.get('brightness', None): - data_dictionary['magnitude'] = phot_data['brightness'] - elif phot_data.get('limiting_brightness', None): - data_dictionary['limit'] = phot_data['limiting_brightness'] - - return data_dictionary - - -def create_new_hermes_target(target_table, target_name=None, target_list_name=None): - """ - Ingest a target into your TOM from Hermes. - Takes a target_table and a target_name. If no target name is given, every target on the target table will be - ingested. - :param target_table: Hermes Target table from a Hermes Message - :param target_name: Name for individual target to ingest from target table. - :param target_list_name: Name of TargetList within which new target should be placed. - :return: - """ - target = None - for hermes_target in target_table: - if target_name == hermes_target['name'] or target_name is None: - - new_target = {"name": hermes_target.pop('name')} - if "ra" in hermes_target and "dec" in hermes_target: - new_target['type'] = 'SIDEREAL' - new_target['ra'] = hermes_target.pop('ra') - new_target['dec'] = hermes_target.pop('dec') - new_target['pm_ra'] = hermes_target.pop('pm_ra', None) - new_target['pm_dec'] = hermes_target.pop('pm_dec', None) - new_target['epoch'] = hermes_target.pop('epoch', None) - elif "orbital_elements" in hermes_target: - orbital_elements = hermes_target.pop('orbital_elements') - new_target['type'] = 'NON_SIDEREAL' - new_target['epoch_of_elements'] = orbital_elements.pop('epoch_of_elements', None) - new_target['mean_anomaly'] = orbital_elements.pop('mean_anomaly', None) - new_target['arg_of_perihelion'] = orbital_elements.pop('argument_of_the_perihelion', None) - new_target['eccentricity'] = orbital_elements.pop('eccentricity', None) - new_target['lng_asc_node'] = orbital_elements.pop('longitude_of_the_ascending_node', None) - new_target['inclination'] = orbital_elements.pop('orbital_inclination', None) - new_target['semimajor_axis'] = orbital_elements.pop('semimajor_axis', None) - new_target['epoch_of_perihelion'] = orbital_elements.pop('epoch_of_perihelion', None) - new_target['perihdist'] = orbital_elements.pop('perihelion_distance', None) - aliases = hermes_target.pop('aliases', []) - target = Target(**new_target) - target.full_clean() - target.save(names=aliases, extras=hermes_target) - if target_list_name: - target_list, created = TargetList.objects.get_or_create(name=target_list_name) - if created: - logger.debug(f'New target_list created: {target_list_name}') - target_list.targets.add(target) - return target diff --git a/tom_dataproducts/processors/spectroscopy_processor.py b/tom_dataproducts/processors/spectroscopy_processor.py index 51e5cadbe..baf1bd7c2 100644 --- a/tom_dataproducts/processors/spectroscopy_processor.py +++ b/tom_dataproducts/processors/spectroscopy_processor.py @@ -1,3 +1,4 @@ +import json import mimetypes import numpy as np @@ -100,6 +101,10 @@ def _process_spectrum_from_plaintext(self, data_product): matching the name of a valid facility in the TOM. # http://docs.astropy.org/en/stable/io/ascii/read.html + Alternatively, It can also process raw ascii files of data if the other information has been provided in the + DataProduct's extra_data field as a json serialized dict of keys like 'date_obs', 'wavelength_units', + 'flux_units'. + Parameters ---------- :param data_product: Spectroscopic DataProduct which will be processed into a Spectrum @@ -116,8 +121,16 @@ def _process_spectrum_from_plaintext(self, data_product): data = astropy_ascii.read(data_product.data.path) if len(data) < 1: raise InvalidFileFormatException('Empty table or invalid file type') - facility_name = None + # Having a facility name of None will fail to ingest the data completely, so lets see if we can find a name + facility_name = '' date_obs = datetime.now() + # Attempt to get json serialized data within the DataProduct's extra_data field + try: + extra_data = json.loads(data_product.extra_data) + except json.JSONDecodeError: + # Field is empty or not a JSON serialized string + extra_data = None + comments = data.meta.get('comments', []) for comment in comments: @@ -127,11 +140,32 @@ def _process_spectrum_from_plaintext(self, data_product): facility_name = comment.split(':')[1].strip() facility = get_service_class(facility_name)() if facility_name else None - wavelength_units = facility.get_wavelength_units() if facility else self.DEFAULT_WAVELENGTH_UNITS - flux_constant = facility.get_flux_constant() if facility else self.DEFAULT_FLUX_CONSTANT + # Try to find what is needed within the text file itself first + # If that fails, try to find it in the data products extra_data + # If that fails, use the default values + if facility: + wavelength_units = facility.get_wavelength_units() + elif extra_data and 'wavelength_units' in extra_data: + wavelength_units = units.Unit(extra_data['wavelength_units']) + else: + wavelength_units = self.DEFAULT_WAVELENGTH_UNITS + if facility: + flux_constant = facility.get_flux_constant() + elif extra_data and 'flux_units' in extra_data: + flux_constant = units.Unit(extra_data['flux_units']) + else: + flux_constant = self.DEFAULT_FLUX_CONSTANT + + try: + spectral_axis = np.array(data['wavelength']) * wavelength_units + flux = np.array(data['flux']) * flux_constant + except KeyError: + spectral_axis = np.array(data.columns[0]) * wavelength_units + flux = np.array(data.columns[1]) * flux_constant - spectral_axis = np.array(data['wavelength']) * wavelength_units - flux = np.array(data['flux']) * flux_constant spectrum = Spectrum(flux=flux, spectral_axis=spectral_axis) + if not facility_name and extra_data and 'source_name' in extra_data: + facility_name = extra_data.get('source_name', '') + return spectrum, Time(date_obs).to_datetime(), facility_name diff --git a/tom_dataproducts/sharing.py b/tom_dataproducts/sharing.py index 58ce87088..83422aa9e 100644 --- a/tom_dataproducts/sharing.py +++ b/tom_dataproducts/sharing.py @@ -14,7 +14,7 @@ from tom_targets.models import Target from tom_dataproducts.models import DataProduct, ReducedDatum -from tom_dataproducts.alertstreams.hermes import publish_to_hermes, BuildHermesMessage, get_hermes_topics +from tom_dataproducts.alertstreams.hermes_publisher import publish_to_hermes, BuildHermesMessage, get_hermes_topics from tom_dataproducts.serializers import DataProductSerializer, ReducedDatumSerializer diff --git a/tom_dataproducts/templatetags/dataproduct_extras.py b/tom_dataproducts/templatetags/dataproduct_extras.py index a7085eb09..313b5c5e7 100644 --- a/tom_dataproducts/templatetags/dataproduct_extras.py +++ b/tom_dataproducts/templatetags/dataproduct_extras.py @@ -383,18 +383,19 @@ def spectroscopy_for_target(context, target, dataproduct=None): spectroscopy_data_type = settings.DATA_PRODUCT_TYPES['spectroscopy'][0] except (AttributeError, KeyError): spectroscopy_data_type = 'spectroscopy' - spectral_dataproducts = DataProduct.objects.filter(target=target, - data_product_type=spectroscopy_data_type) - if dataproduct: - spectral_dataproducts = DataProduct.objects.get(data_product=dataproduct) plot_data = [] if settings.TARGET_PERMISSIONS_ONLY: - datums = ReducedDatum.objects.filter(data_product__in=spectral_dataproducts) + datums = ReducedDatum.objects.filter(target=target, data_type=spectroscopy_data_type) else: datums = get_objects_for_user(context['request'].user, 'tom_dataproducts.view_reduceddatum', - klass=ReducedDatum.objects.filter(data_product__in=spectral_dataproducts)) + klass=ReducedDatum.objects.filter(target=target, data_type=spectroscopy_data_type)) + + # If a dataproduct is specified, filter on that specific dataproduct + if dataproduct: + datums.filter(data_product=dataproduct) + for datum in datums: deserialized = SpectrumSerializer().deserialize(datum.value) plot_data.append(go.Scatter( diff --git a/tom_dataproducts/tests/test_sharing.py b/tom_dataproducts/tests/test_sharing.py index 8aac8bb30..3ad12aa0f 100644 --- a/tom_dataproducts/tests/test_sharing.py +++ b/tom_dataproducts/tests/test_sharing.py @@ -1,6 +1,6 @@ from django.test import TestCase, override_settings -from tom_dataproducts.alertstreams.hermes import create_hermes_alert, BuildHermesMessage, HermesMessageException +from tom_dataproducts.alertstreams.hermes_publisher import create_hermes_alert, BuildHermesMessage, HermesMessageException from tom_dataproducts.models import ReducedDatum from tom_observations.tests.utils import FakeRoboticFacility from tom_observations.tests.factories import SiderealTargetFactory, ObservingRecordFactory diff --git a/tom_targets/sharing.py b/tom_targets/sharing.py index ebeba11f9..11a99b699 100644 --- a/tom_targets/sharing.py +++ b/tom_targets/sharing.py @@ -8,7 +8,7 @@ from tom_dataproducts.sharing import (check_for_share_safe_datums, share_data_with_tom, get_destination_target, sharing_feedback_converter) from tom_dataproducts.models import ReducedDatum -from tom_dataproducts.alertstreams.hermes import publish_to_hermes, BuildHermesMessage +from tom_dataproducts.alertstreams.hermes_publisher import publish_to_hermes, BuildHermesMessage def share_target_and_all_data(share_destination, target): diff --git a/tom_targets/views.py b/tom_targets/views.py index 18c0b2024..4c4c09c9f 100644 --- a/tom_targets/views.py +++ b/tom_targets/views.py @@ -62,7 +62,7 @@ from tom_observations.utils import get_sidereal_visibility from tom_targets.seed import seed_messier_targets from tom_targets.tables import TargetTable, TargetGroupTable -from tom_dataproducts.alertstreams.hermes import BuildHermesMessage, preload_to_hermes +from tom_dataproducts.alertstreams.hermes_publisher import BuildHermesMessage, preload_to_hermes logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) From 275e398695a1b1784a5d659958d22d6761cc5374 Mon Sep 17 00:00:00 2001 From: Jon Date: Thu, 16 Apr 2026 05:12:19 +0000 Subject: [PATCH 02/22] Try to make it happy with docstrings --- tom_dataproducts/alertstreams/hermes_ingester.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/tom_dataproducts/alertstreams/hermes_ingester.py b/tom_dataproducts/alertstreams/hermes_ingester.py index f3a088c92..dfe5d8f5b 100644 --- a/tom_dataproducts/alertstreams/hermes_ingester.py +++ b/tom_dataproducts/alertstreams/hermes_ingester.py @@ -20,7 +20,8 @@ def get_or_create_uuid_from_metadata(metadata) -> uuid.UUID: - """Extract the UUID from the message metadata, or generate a UUID if none present in metadata. + """ + Extract the UUID from the message metadata, or generate a UUID if none present in metadata. The headers property of the metadata is a list of tuples of the form [('key', value), ...]. """ @@ -40,7 +41,9 @@ def get_or_create_uuid_from_metadata(metadata) -> uuid.UUID: def hermes_alert_handler(alert, metadata): - """Example Alert Handler to record data streamed through Hermes as a new ReducedDatum. + """ + Example Alert Handler to record data streamed through Hermes as a new ReducedDatum. + -- Reads Photometry and Spectroscopy Data (both inline and file-based) -- Creates a new Target if no match is found for Target Name or aliases -- Does not Ingest Data if exact match already exists @@ -161,8 +164,10 @@ def resolve_target(target_name): def _get_spectroscopy_file_url(file_info_list): - """Return the URL of the first entry in a file_info list whose filename matches a supported - spectroscopy file extension, or None if no match is found.""" + """ + Return the URL of the first entry in a file_info list whose filename matches a supported + spectroscopy file extension, or None if no match is found. + """ for entry in file_info_list: url = entry.get('url', '') filename = os.path.basename(urlparse(url).path) From 918d6151671cfede4ea4906db16a0e3348673fd8 Mon Sep 17 00:00:00 2001 From: Jon Date: Thu, 16 Apr 2026 05:19:53 +0000 Subject: [PATCH 03/22] appease the line length gods --- tom_dataproducts/alertstreams/hermes_ingester.py | 9 ++++++--- tom_dataproducts/alertstreams/hermes_publisher.py | 4 ---- tom_dataproducts/templatetags/dataproduct_extras.py | 3 ++- tom_dataproducts/tests/test_sharing.py | 3 ++- 4 files changed, 10 insertions(+), 9 deletions(-) diff --git a/tom_dataproducts/alertstreams/hermes_ingester.py b/tom_dataproducts/alertstreams/hermes_ingester.py index dfe5d8f5b..bf6305187 100644 --- a/tom_dataproducts/alertstreams/hermes_ingester.py +++ b/tom_dataproducts/alertstreams/hermes_ingester.py @@ -78,7 +78,8 @@ def hermes_alert_handler(alert, metadata): def resolve_target(target_name): if target_name not in target_cache: # We first attempt to match to an existing target in the TOM by target_name or any specified aliases - target_entry = next((t for t in target_table if t.get('name', '') == target_name), {}) if target_table else {} + target_entry = next((t for t in target_table if t.get('name', '') + == target_name), {}) if target_table else {} aliases = target_entry.get('aliases', []) query = Target.matches.match_name(target_name) if not query: @@ -195,7 +196,8 @@ def _ingest_hermes_spectroscopy_file(url, spectroscopy_row, target, hermes_alert spectroscopy_keys = ['date_obs', 'flux_units', 'wavelength_units', 'telescope', 'instrument', 'reducer', 'observer', 'spec_type', 'flux_type', 'classification', 'comments', 'exposure_time', 'setup', 'proprietary_period', 'proprietary_period_units'] - spectroscopy_data = {key: spectroscopy_row[key] for key in spectroscopy_keys if key in spectroscopy_row and spectroscopy_row[key]} + spectroscopy_data = {key: spectroscopy_row[key] + for key in spectroscopy_keys if key in spectroscopy_row and spectroscopy_row[key]} # Inject these two extra fields since they should be associated with the ReducedDatums somehow spectroscopy_data['source_name'] = hermes_alert.topic spectroscopy_data['source_location'] = alert_url @@ -203,7 +205,8 @@ def _ingest_hermes_spectroscopy_file(url, spectroscopy_row, target, hermes_alert try: dp, created = DataProduct.objects.get_or_create( product_id=url, - defaults={'target': target, 'data_product_type': 'spectroscopy', 'extra_data': json.dumps(spectroscopy_data)}, + defaults={'target': target, 'data_product_type': 'spectroscopy', + 'extra_data': json.dumps(spectroscopy_data)}, ) if created: _, ext = os.path.splitext(filename) diff --git a/tom_dataproducts/alertstreams/hermes_publisher.py b/tom_dataproducts/alertstreams/hermes_publisher.py index 9c7cd10d8..4085fddae 100644 --- a/tom_dataproducts/alertstreams/hermes_publisher.py +++ b/tom_dataproducts/alertstreams/hermes_publisher.py @@ -8,8 +8,6 @@ from tom_alerts.models import AlertStreamMessage from tom_targets.models import Target - - logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) @@ -292,5 +290,3 @@ def get_hermes_topics(**kwargs): except (KeyError, requests.exceptions.JSONDecodeError): pass return topics - - diff --git a/tom_dataproducts/templatetags/dataproduct_extras.py b/tom_dataproducts/templatetags/dataproduct_extras.py index 313b5c5e7..52b912840 100644 --- a/tom_dataproducts/templatetags/dataproduct_extras.py +++ b/tom_dataproducts/templatetags/dataproduct_extras.py @@ -390,7 +390,8 @@ def spectroscopy_for_target(context, target, dataproduct=None): else: datums = get_objects_for_user(context['request'].user, 'tom_dataproducts.view_reduceddatum', - klass=ReducedDatum.objects.filter(target=target, data_type=spectroscopy_data_type)) + klass=ReducedDatum.objects.filter(target=target, + data_type=spectroscopy_data_type)) # If a dataproduct is specified, filter on that specific dataproduct if dataproduct: diff --git a/tom_dataproducts/tests/test_sharing.py b/tom_dataproducts/tests/test_sharing.py index 3ad12aa0f..346f37591 100644 --- a/tom_dataproducts/tests/test_sharing.py +++ b/tom_dataproducts/tests/test_sharing.py @@ -1,6 +1,7 @@ from django.test import TestCase, override_settings -from tom_dataproducts.alertstreams.hermes_publisher import create_hermes_alert, BuildHermesMessage, HermesMessageException +from tom_dataproducts.alertstreams.hermes_publisher import (create_hermes_alert, BuildHermesMessage, + HermesMessageException) from tom_dataproducts.models import ReducedDatum from tom_observations.tests.utils import FakeRoboticFacility from tom_observations.tests.factories import SiderealTargetFactory, ObservingRecordFactory From 388e2cdbcf2ef20704a13c8f2683a4bb5789f040 Mon Sep 17 00:00:00 2001 From: Jon Date: Sat, 18 Apr 2026 05:14:35 +0000 Subject: [PATCH 04/22] Fix up re-ingesting same datums from different topics to not crash, and fix up sending hermes-received spectra files out over hermes --- .../alertstreams/hermes_ingester.py | 29 ++++++++------ .../alertstreams/hermes_publisher.py | 40 ++++++++++++++----- 2 files changed, 46 insertions(+), 23 deletions(-) diff --git a/tom_dataproducts/alertstreams/hermes_ingester.py b/tom_dataproducts/alertstreams/hermes_ingester.py index bf6305187..97cb29618 100644 --- a/tom_dataproducts/alertstreams/hermes_ingester.py +++ b/tom_dataproducts/alertstreams/hermes_ingester.py @@ -110,12 +110,14 @@ def resolve_target(target_name): datum = { 'target': target, 'data_type': 'photometry', - 'source_name': metadata.topic, - 'source_location': hermes_message_url, 'timestamp': obs_date, 'value': get_hermes_phot_value(row) } - new_rd, created = ReducedDatum.objects.get_or_create(**datum) + datum_defaults = { + 'source_name': metadata.topic, + 'source_location': hermes_message_url, + } + new_rd, created = ReducedDatum.objects.get_or_create(**datum, defaults=datum_defaults) if created: new_rd.message.add(hermes_alert) new_rd.save() @@ -153,12 +155,14 @@ def resolve_target(target_name): datum = { 'target': target, 'data_type': 'spectroscopy', - 'source_name': metadata.topic, - 'source_location': hermes_message_url, 'timestamp': obs_date, 'value': value, } - new_rd, created = ReducedDatum.objects.get_or_create(**datum) + datum_defaults = { + 'source_name': metadata.topic, + 'source_location': hermes_message_url, + } + new_rd, created = ReducedDatum.objects.get_or_create(**datum, defaults=datum_defaults) if created: new_rd.message.add(hermes_alert) new_rd.save() @@ -204,9 +208,8 @@ def _ingest_hermes_spectroscopy_file(url, spectroscopy_row, target, hermes_alert try: dp, created = DataProduct.objects.get_or_create( - product_id=url, - defaults={'target': target, 'data_product_type': 'spectroscopy', - 'extra_data': json.dumps(spectroscopy_data)}, + product_id=url, target=target, data_product_type='spectroscopy', + defaults={'extra_data': json.dumps(spectroscopy_data)}, ) if created: _, ext = os.path.splitext(filename) @@ -218,10 +221,10 @@ def _ingest_hermes_spectroscopy_file(url, spectroscopy_row, target, hermes_alert dp.data.save(filename, File(f), save=True) finally: os.unlink(tmpfile_path) - - reduced_datums = run_data_processor(dp) - for rd in reduced_datums: - rd.message.add(hermes_alert) + # only re-ingest the file if its a new dataproduct for us + reduced_datums = run_data_processor(dp) + for rd in reduced_datums: + rd.message.add(hermes_alert) except Exception as ex: logger.error(f'Failed to ingest spectroscopy file from {url}: {repr(ex)}') diff --git a/tom_dataproducts/alertstreams/hermes_publisher.py b/tom_dataproducts/alertstreams/hermes_publisher.py index 4085fddae..cefd497c3 100644 --- a/tom_dataproducts/alertstreams/hermes_publisher.py +++ b/tom_dataproducts/alertstreams/hermes_publisher.py @@ -1,4 +1,5 @@ import logging +import json import requests from django.conf import settings @@ -73,7 +74,7 @@ def get_hermes_photometry(self, datum): 'instrument': datum.value.get('instrument'), 'bandpass': datum.value.get('filter', ''), } - brightness_unit = convert_astropy_brightness_to_hermes(datum.value.get('unit')) + brightness_unit = convert_astropy_brightness_unit_to_hermes(datum.value.get('unit')) if brightness_unit: phot_table_row['brightness_unit'] = brightness_unit if datum.value.get('magnitude', None): @@ -123,17 +124,27 @@ def get_hermes_spectroscopy(self, datum): logger.error(msg) raise HermesMessageException(msg) + # Make sure we have either telescope or instrument set. If not, attempt to pull them from the data product itself + try: + dp_extras = json.loads(datum.data_product.extra_data) + except (json.JSONDecodeError, ValueError): + dp_extras = {} + telescope = datum.value.get('telescope') or dp_extras.get('telescope') + instrument = datum.value.get('instrument') or dp_extras.get('instrument') + reducer = datum.value.get('reducer') or dp_extras.get('reducer') + observer = datum.value.get('observer') or dp_extras.get('observer') + spectroscopy_table_row = { 'target_name': datum.target.name, 'date_obs': datum.timestamp.isoformat(), - 'telescope': datum.value.get('telescope'), - 'instrument': datum.value.get('instrument'), - 'reducer': datum.value.get('reducer'), - 'observer': datum.value.get('observer'), + 'telescope': telescope, + 'instrument': instrument, + 'reducer': reducer, + 'observer': observer, 'flux': flux_list, 'wavelength': wavelength_list, - 'flux_units': datum.value.get('flux_units'), - 'wavelength_units': convert_astropy_wavelength_to_hermes(datum.value.get('wavelength_units')), + 'flux_units': convert_astropy_flux_unit_to_hermes(datum.value.get('flux_units')), + 'wavelength_units': convert_astropy_wavelength_unit_to_hermes(datum.value.get('wavelength_units')), } if flux_error_list: spectroscopy_table_row['flux_error'] = flux_error_list @@ -141,7 +152,7 @@ def get_hermes_spectroscopy(self, datum): return spectroscopy_table_row -def convert_astropy_brightness_to_hermes(brightness_unit): +def convert_astropy_brightness_unit_to_hermes(brightness_unit): if not brightness_unit: return brightness_unit elif brightness_unit.upper() == 'AB' or brightness_unit.upper() == 'ABFLUX': @@ -150,12 +161,21 @@ def convert_astropy_brightness_to_hermes(brightness_unit): return brightness_unit -def convert_astropy_wavelength_to_hermes(wavelength_unit): +def convert_astropy_flux_unit_to_hermes(flux_unit): + if not flux_unit: + return flux_unit + elif flux_unit == 'erg / (Angstrom s cm2)': + return 'erg / s / cm² / Å' + else: + return flux_unit + + +def convert_astropy_wavelength_unit_to_hermes(wavelength_unit): if not wavelength_unit: return wavelength_unit elif wavelength_unit.lower() == 'angstrom' or wavelength_unit == 'AA': return 'Å' - elif wavelength_unit.lower() == 'micron': + elif wavelength_unit.lower() in ['micron', 'micrometer', 'um']: return 'µm' elif wavelength_unit.lower() == 'hertz': return 'Hz' From 0223913171d363714ffba3ae9c2c58e09731764b Mon Sep 17 00:00:00 2001 From: Jon Date: Mon, 20 Apr 2026 04:10:07 +0000 Subject: [PATCH 05/22] Thank goodness for linters --- tom_dataproducts/alertstreams/hermes_publisher.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tom_dataproducts/alertstreams/hermes_publisher.py b/tom_dataproducts/alertstreams/hermes_publisher.py index cefd497c3..7c6c59408 100644 --- a/tom_dataproducts/alertstreams/hermes_publisher.py +++ b/tom_dataproducts/alertstreams/hermes_publisher.py @@ -124,7 +124,7 @@ def get_hermes_spectroscopy(self, datum): logger.error(msg) raise HermesMessageException(msg) - # Make sure we have either telescope or instrument set. If not, attempt to pull them from the data product itself + # Make sure we have either telescope or instrument set. If not, attempt to pull them from the dataproduct try: dp_extras = json.loads(datum.data_product.extra_data) except (json.JSONDecodeError, ValueError): @@ -132,7 +132,7 @@ def get_hermes_spectroscopy(self, datum): telescope = datum.value.get('telescope') or dp_extras.get('telescope') instrument = datum.value.get('instrument') or dp_extras.get('instrument') reducer = datum.value.get('reducer') or dp_extras.get('reducer') - observer = datum.value.get('observer') or dp_extras.get('observer') + observer = datum.value.get('observer') or dp_extras.get('observer') spectroscopy_table_row = { 'target_name': datum.target.name, From 477fd927499e60fb2f558c25d571590a32d88a55 Mon Sep 17 00:00:00 2001 From: Jon Date: Mon, 20 Apr 2026 20:19:55 +0000 Subject: [PATCH 06/22] Fix bug in fallback of getting hermes spectro data --- tom_dataproducts/alertstreams/hermes_publisher.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tom_dataproducts/alertstreams/hermes_publisher.py b/tom_dataproducts/alertstreams/hermes_publisher.py index 7c6c59408..492763a5c 100644 --- a/tom_dataproducts/alertstreams/hermes_publisher.py +++ b/tom_dataproducts/alertstreams/hermes_publisher.py @@ -127,7 +127,7 @@ def get_hermes_spectroscopy(self, datum): # Make sure we have either telescope or instrument set. If not, attempt to pull them from the dataproduct try: dp_extras = json.loads(datum.data_product.extra_data) - except (json.JSONDecodeError, ValueError): + except (json.JSONDecodeError, ValueError, AttributeError): dp_extras = {} telescope = datum.value.get('telescope') or dp_extras.get('telescope') instrument = datum.value.get('instrument') or dp_extras.get('instrument') From d9fd1c1c75e73c376bceb576997dde1ece916602 Mon Sep 17 00:00:00 2001 From: "William (Lindy) Lindstrom" Date: Fri, 1 May 2026 14:31:30 -0700 Subject: [PATCH 07/22] Add SharingBackend registry to tom_common - Adds integration point for `SharingBackend` subclasses - Adds `tom_common.sharing.SharingBackend` abstract base class - tom_hermes will define a subclass - tom_common.sharing.TomToolkitSharingBackend defined for TOM-TOM sharing - The `SharingBackend` ABC defines `get_destination_choices`, `share`, `validate_credentials`), `get_sharing_backends()` / `get_sharing_backend()` helper methods - `TomToolkitSharingBackend` (supports both DRF API_KEY and HTTP Basic auth for TOM-to-TOM sharing), and a `sharing_backends()` integration point on `TomCommonConfig`. --- tom_common/apps.py | 15 ++ tom_common/sharing.py | 497 +++++++++++++++++++++++++++++++++++++ tom_common/test_sharing.py | 173 +++++++++++++ 3 files changed, 685 insertions(+) create mode 100644 tom_common/sharing.py create mode 100644 tom_common/test_sharing.py diff --git a/tom_common/apps.py b/tom_common/apps.py index 8eb4ddd90..c7acec98b 100644 --- a/tom_common/apps.py +++ b/tom_common/apps.py @@ -32,3 +32,18 @@ def profile_details(self): """ return [{'partial': 'tom_common/partials/user_data.html', 'context': 'tom_common.templatetags.user_extras.user_data'}] + + def sharing_backends(self): + """ + Integration point for registering data-sharing backends. + + Called by ``tom_common.sharing.get_sharing_backends()`` when it iterates installed AppConfigs to build the + registry of SharingBackend classes. Each returned dict has a ``class`` key pointing to the dot-separated path + of a SharingBackend subclass. + + TOM Toolkit includes ``TomToolkitSharingBackend`` (for sharing with another TOM Toolkit-based TOM). Other apps + (e.g., ``tom_hermes``) register their own SharingBackends by adding this method to their AppConfig. + + Additional configuration is required in ``settings.DATA_SHARING``. + """ + return [{'class': 'tom_common.sharing.TomToolkitSharingBackend'}] diff --git a/tom_common/sharing.py b/tom_common/sharing.py new file mode 100644 index 000000000..38bc2cebe --- /dev/null +++ b/tom_common/sharing.py @@ -0,0 +1,497 @@ +""" +Sharing-backends integration point. + +A SharingBackend is a class that publishes TOM data (Targets, DataProducts, +ReducedDatums) to an external destination (e.g., HERMES, another TOM). + +### Terminology + +- "registry" — in this module, a Python dict that maps each registered + backend's ``name`` string to the ``SharingBackend`` subclass itself. + The registry is rebuilt by ``get_sharing_backends()`` on every call. + We call it a "registry" because callers look a backend up by name and + get back the class, like looking up a name in a phone book. + +### Discovery + +Backends are discovered at runtime by iterating installed AppConfigs +(``django.apps.apps.get_app_configs()``) and calling each app's +``sharing_backends()`` method if it has one. This is the same plug-in +mechanism used by ``tom_dataservices.dataservices.get_data_service_classes()``. + +### Consumers + +``tom_dataproducts.views.DataShareView``, ``tom_dataproducts.forms.DataShareForm``, +and ``tom_targets.sharing`` call ``get_sharing_backend(name)().share(...)`` +rather than hardcoding destination-specific branches. + +### Included here + +- ``TomToolkitSharingBackend`` — publishes to another TOM Toolkit-based TOM + via its HTTP API. Registered by ``tom_common.apps.TomCommonConfig.sharing_backends()``. + +### Registered elsewhere + +- ``tom_hermes.sharing.HermesSharingBackend`` — publishes to HERMES. + Registered by ``tom_hermes.apps.TomHermesConfig.sharing_backends()``. +""" +from __future__ import annotations + +import logging +import os +from abc import ABC, abstractmethod + +import requests +from django.apps import apps as django_apps +from django.conf import settings +from django.contrib.auth.models import User +from django.db.models import QuerySet +from django.utils.module_loading import import_string + +# Local (non-TOM) imports above; TOM Toolkit imports below follow the project +# convention of grouping stdlib / third-party / Django / TOM Toolkit. +from tom_dataproducts.models import ReducedDatum +from tom_dataproducts.serializers import DataProductSerializer, ReducedDatumSerializer +from tom_targets.models import Target + +logger = logging.getLogger(__name__) + + +class SharingBackend(ABC): + """Base class (abstract) for a data-sharing destination. + + Each subclass represents one destination family: one class for HERMES, + one for TOM-to-TOM, and so on. Instances are short-lived and created + per ``share()`` invocation by the consumer code. + """ + + # ``name`` is: + # - the lookup key in the dict returned by ``get_sharing_backends()``; + # - the prefix of the form's ``share_destination`` value, which is + # formatted as the string ``':'`` + # (e.g. ``'hermes:gw.lvk.public'``, or ``'tom:tom_b'``); + # - the value that ``DataShareView.post()`` parses out of + # ``share_destination`` to look up this backend's class and + # dispatch to its ``share()`` method. + # Required: set to a short, unique, machine-readable string in every subclass. + name: str = '' + + # ``verbose_name`` is the human-readable label shown as the heading above + # this backend's destinations in the share-destination dropdown. For a + # backend with multiple sub-destinations (e.g., several HERMES topics), + # the dropdown groups them under this heading. + # Required: set in every subclass. + verbose_name: str = '' + + @classmethod + def get_destination_choices(cls, user: User | None = None) -> list: + """Return the (value, label) pairs that populate this backend's options in the share-destination dropdown. + + Called by ``DataShareForm.__init__`` at form-render time. Each + returned ``value`` is a string formatted as + ``':'`` so ``DataShareView.post()`` can + parse the prefix and look up this class in the registry. The + returned ``label`` is what the user sees. + + Implementations typically call the destination (e.g., to list + topics), or read ``settings.DATA_SHARING``, to enumerate configured + sub-destinations. + """ + raise NotImplementedError + + @abstractmethod + def share(self, form_data: dict, *, + reduced_datums: QuerySet | None = None, + targets: QuerySet | None = None, + data_products: QuerySet | None = None, + user: User | None = None, + **kwargs) -> dict: + """Execute the share operation. + + Called by ``DataShareView.post()`` (and by the shims in + ``tom_dataproducts.sharing``) after a successful form submission. + Returns a feedback dict with at least + ``{'status': 'success'|'error', 'message': str}`` (older code + returns just ``{'message': str}``; both are tolerated by the + sharing feedback handler). + """ + + def validate_credentials(self, user: User | None = None) -> bool: + """Check that the user has credentials configured for this backend. + + Called by ``DataShareForm.clean()`` when the form is about to + submit. Default returns True (the backend needs no per-user + credentials). Subclasses override to check e.g. that a + ``HermesProfile`` API key is set for the current user. + """ + return True + + +def get_sharing_backends() -> dict: + """Build and return the registry of all SharingBackend subclasses registered by installed apps. + + The registry is a dict that maps each backend's ``name`` string to the + ``SharingBackend`` subclass itself. It is rebuilt on every call so + tests that patch installed apps see the patched state. + + How it is built: + + 1. Iterate ``django_apps.get_app_configs()``. + 2. For each AppConfig that has a ``sharing_backends()`` method, call it. + The method returns a list of dicts of the form + ``[{'class': dotted_path}, ...]``. + 3. Import each class by its dotted path. Each class's ``name`` attribute + becomes the registry key. + + AppConfigs without the method are skipped (``AttributeError`` is + caught). Import failures are logged and the offending entry is + skipped, so one broken backend does not prevent the others from being + registered. + """ + # Dict keyed by SharingBackend.name; value is the SharingBackend subclass itself. + registry: dict = {} + for app in django_apps.get_app_configs(): + try: + entries = app.sharing_backends() + except AttributeError: + # This AppConfig does not declare the integration point; that is fine. + continue + for entry in entries or []: + try: + clazz = import_string(entry['class']) + except ImportError as exc: + logger.warning('Could not import SharingBackend %s for %s: %s', + entry.get('class'), app.name, exc) + continue + registry[clazz.name] = clazz + return registry + + +def get_sharing_backend(name: str) -> type: + """Look up one SharingBackend class by its ``name`` attribute. + + Builds the registry via ``get_sharing_backends()`` and does a dict + lookup. Raises ``ImportError`` with a message that tells the user + how to fix the missing backend (install the providing app). Uses the + same structure as + ``tom_dataservices.dataservices.get_data_service_class`` so the + behavior is consistent across integration points. + """ + registry = get_sharing_backends() + try: + return registry[name] + except KeyError: + raise ImportError( + f"Could not find a SharingBackend named '{name}'. " + f"Did you install the app that provides it, and add it to INSTALLED_APPS?" + ) + + +class TomToolkitSharingBackend(SharingBackend): + """SharingBackend for publishing to another TOM Toolkit-based TOM. + + Destinations come from ``settings.DATA_SHARING``. Each entry whose + value dict has ``BASE_URL`` and does NOT have ``HERMES_API_KEY`` is + treated as a TOM destination. (HERMES entries are identified by the + presence of ``HERMES_API_KEY`` and are handled by + ``HermesSharingBackend`` in the ``tom_hermes`` app.) + + Authentication supports two methods, chosen per destination by what + the TOM operator puts in ``settings.DATA_SHARING[]``: + + - ``API_KEY`` set — uses ``Authorization: Token `` (DRF + TokenAuth). Preferred: TOM Toolkit auto-generates a DRF token per + user, and the destination TOM's operator can create a + service-account token to share. + - Otherwise — uses HTTP Basic with ``USERNAME`` and ``PASSWORD`` + (existing method, kept for continuity). + + Example ``settings.DATA_SHARING`` supporting multiple TOM destinations + plus HERMES:: + + DATA_SHARING = { + # TOM destinations (picked up by TomToolkitSharingBackend): + 'tom_a': { + 'DISPLAY_NAME': 'TOM A', + 'BASE_URL': 'https://tom-a.example.org/', + 'API_KEY': 'drf-token-string', # preferred; Token auth + }, + 'tom_b': { + 'DISPLAY_NAME': 'TOM B', + 'BASE_URL': 'https://tom-b.example.org/', + 'USERNAME': 'alice', # fallback: Basic auth + 'PASSWORD': 's3cret', + }, + # HERMES destination (picked up by HermesSharingBackend): + 'hermes': { + 'BASE_URL': 'https://hermes.lco.global/', + 'HERMES_API_KEY': 'hermes-api-key', + }, + } + """ + + # ``name = 'tom'`` because the share-destination form field encodes a + # choice from this backend as ``'tom:'``. + # ``DataShareView.post()`` parses the ``'tom'`` prefix and looks up + # this class in the registry. + name = 'tom' + + # ``verbose_name = 'Another TOM'`` is the heading shown above this + # backend's destinations in the share-destination dropdown. + verbose_name = 'Another TOM' + + @classmethod + def get_destination_choices(cls, user: User | None = None) -> list: + """Enumerate ``settings.DATA_SHARING`` entries that look like TOM destinations. + + A "TOM destination" entry is one whose value dict has a + ``BASE_URL`` and does NOT have a ``HERMES_API_KEY``. Each such + entry becomes one dropdown option formatted as + ``('tom:', cfg.get('DISPLAY_NAME', key))``. + + Returns an empty list if ``settings.DATA_SHARING`` is not set or + contains no TOM-shaped entries. + """ + choices: list = [] + data_sharing = getattr(settings, 'DATA_SHARING', {}) or {} + for key, cfg in data_sharing.items(): + # Skip HERMES entries; they belong to HermesSharingBackend. + if not isinstance(cfg, dict) or cfg.get('HERMES_API_KEY'): + continue + if not cfg.get('BASE_URL'): + # Not enough info to publish — skip quietly rather than erroring + # at form-render time. + continue + display = cfg.get('DISPLAY_NAME', key) + choices.append((f'{cls.name}:{key}', display)) + return choices + + @staticmethod + def _split_destination(share_destination: str) -> str: + """Return the ```` half of a ``'tom:'`` share-destination string. + + Tolerant of the legacy format where the form field carries just the + bare settings-key (e.g. ``'tom_b'`` with no ``'tom:'`` prefix) so + that older callers still work during the deprecation window. + """ + prefix, sep, sub = share_destination.partition(':') + if sep: + return sub + return prefix + + @staticmethod + def _build_auth(cfg: dict): + """Return a ``(headers_update, auth_tuple_or_None)`` pair for ``requests``. + + Prefers DRF TokenAuth if ``API_KEY`` is set in the settings entry; + otherwise falls back to HTTP Basic with ``USERNAME`` / ``PASSWORD``. + ``headers_update`` is a dict to merge into the base request headers; + ``auth_tuple_or_None`` is the ``auth=`` argument for ``requests``. + """ + api_key = cfg.get('API_KEY') + if api_key: + # DRF TokenAuthentication expects the literal word "Token" (not "Bearer"). + return {'Authorization': f'Token {api_key}'}, None + username = cfg.get('USERNAME') + password = cfg.get('PASSWORD') + if username is not None and password is not None: + return {}, (username, password) + # Nothing configured — let the destination TOM reject the request; + # that produces a clearer error than raising here. + return {}, None + + def share(self, form_data: dict, *, + reduced_datums: QuerySet | None = None, + targets: QuerySet | None = None, + data_products: QuerySet | None = None, + user: User | None = None, + **kwargs) -> dict: + """POST the share payload to the destination TOM's HTTP API. + + Behavior preserved from ``tom_dataproducts.sharing.share_data_with_tom`` + with one addition: authentication now prefers a DRF API token if + ``settings.DATA_SHARING[]['API_KEY']`` is set, falling back to + HTTP Basic if only ``USERNAME`` / ``PASSWORD`` are configured. + + The caller passes whichever of ``reduced_datums`` / ``targets`` / + ``data_products`` is relevant to the share action; the three are + mutually-exclusive in practice (matches the existing behavior). + """ + # Parse the destination sub-key out of the form value (e.g. 'tom:tom_b' -> 'tom_b'). + share_destination = form_data.get('share_destination', '') if form_data else '' + destination_key = self._split_destination(share_destination) + + # Look up the destination's settings entry. If it's missing or + # malformed, return an error-shaped feedback dict rather than + # raising, so the view can surface it to the user via the messages + # framework. + data_sharing = getattr(settings, 'DATA_SHARING', {}) or {} + try: + cfg = data_sharing[destination_key] + destination_tom_base_url = cfg['BASE_URL'] + except KeyError as err: + return {'message': ( + f'ERROR: Check DATA_SHARING configuration for ' + f"'{destination_key}': key {err} not found." + )} + + # Build the base headers and the appropriate auth. Auth is chosen + # per-destination: API_KEY wins; otherwise USERNAME/PASSWORD. + base_headers = {'Content-Type': 'application/json', 'Accept': 'application/json'} + auth_headers, auth = self._build_auth(cfg) + headers = {**base_headers, **auth_headers} + + # Destination endpoints on the receiving TOM. + dataproducts_url = destination_tom_base_url + 'api/dataproducts/' + targets_url = destination_tom_base_url + 'api/targets/' + reduced_datums_url = destination_tom_base_url + 'api/reduceddatums/' + + # Dispatch on which of the three querysets was supplied. Mirrors the + # original ``share_data_with_tom`` shape; one and only one of + # ``data_products`` / ``reduced_datums`` / ``targets`` is typically + # populated per call. + if data_products is not None and data_products.exists(): + return self._share_data_products( + data_products, targets_url, dataproducts_url, headers, auth, + ) + if reduced_datums is not None and reduced_datums.exists(): + return self._share_reduced_datums( + reduced_datums, targets_url, reduced_datums_url, headers, auth, destination_key, + ) + if targets is not None and targets.exists(): + # A Target-only share: push every ReducedDatum that belongs to + # the target. We do NOT create the Target on the destination + # TOM — that target must already exist there. This mirrors the + # original behavior. + target = targets.first() + owned_datums = ReducedDatum.objects.filter(target=target) + return self._share_reduced_datums( + owned_datums, targets_url, reduced_datums_url, headers, auth, destination_key, + ) + return {'message': 'ERROR: No valid data to share.'} + + @staticmethod + def _share_data_products(data_products: QuerySet, targets_url: str, dataproducts_url: str, + headers: dict, auth) -> dict: + """Upload each DataProduct file to the destination TOM. + + Finds a matching Target on the destination by fuzzy-matching on + target names and aliases (``get_destination_target``), then POSTs + the serialized DataProduct plus its file to ``api/dataproducts/``. + """ + # We currently support one DataProduct per call (matches the + # existing view contract). If there are multiple, only the first + # is processed; others are ignored. That is the existing behavior. + product = data_products.first() + target = product.target + serialized_data = DataProductSerializer(product).data + + destination_target_id, _ = get_destination_target(target, targets_url, headers, auth) + if destination_target_id is None: + return {'message': 'ERROR: No matching target found.'} + if isinstance(destination_target_id, list) and len(destination_target_id) > 1: + return {'message': 'ERROR: Multiple targets with matching name found in destination TOM.'} + serialized_data['target'] = destination_target_id + + # TODO: this path join should be replaced once tom_dataproducts uses + # django.core.files.storage (pre-existing TODO from the original code). + dataproduct_filename = os.path.join(settings.MEDIA_ROOT, product.data.name) + with open(dataproduct_filename, 'rb') as dataproduct_filep: + files = {'file': (product.data.name, dataproduct_filep, 'text/csv')} + # For multipart/form-data, requests sets the Content-Type header + # itself; override the JSON content-type we set above. Auth header + # (if any) flows through unchanged. + upload_headers = {k: v for k, v in headers.items() if k != 'Content-Type'} + upload_headers['Media-Type'] = 'multipart/form-data' + response = requests.post(dataproducts_url, data=serialized_data, files=files, + headers=upload_headers, auth=auth) + return response + + @staticmethod + def _share_reduced_datums(reduced_datums: QuerySet, targets_url: str, reduced_datums_url: str, + headers: dict, auth, destination_key: str) -> dict: + """POST each ReducedDatum to the destination TOM. + + Resolves each datum's Target on the destination TOM (by fuzzy + name match), then POSTs the serialized datum to + ``api/reduceddatums/``. Returns a summary message with the number + of datums saved. + """ + # First resolve every source Target to its destination-TOM id. + targets_set = {reduced_datum.target for reduced_datum in reduced_datums} + target_dict: dict = {} + for target in targets_set: + destination_target_id, _ = get_destination_target(target, targets_url, headers, auth) + if isinstance(destination_target_id, list) and len(destination_target_id) > 1: + return {'message': 'ERROR: Multiple targets with matching name found in destination TOM.'} + target_dict[target.name] = destination_target_id + if all(value is None for value in target_dict.values()): + return {'message': 'ERROR: No matching targets found.'} + + # Run datums through the existing sharing-protocol filter so a + # datum already published to this destination is not re-sent. + # ``check_for_share_safe_datums`` lives in tom_dataproducts.sharing + # for now; we import lazily to avoid a circular import at module load + # (tom_dataproducts.sharing depends on tom_common at other points). + from tom_dataproducts.sharing import check_for_share_safe_datums + reduced_datums = check_for_share_safe_datums(destination_key, reduced_datums) + if not reduced_datums: + return {'message': 'ERROR: No valid data to share.'} + + response_codes: list = [] + for datum in reduced_datums: + if not target_dict.get(datum.target.name): + continue + serialized_data = ReducedDatumSerializer(datum).data + serialized_data['target'] = target_dict[datum.target.name] + serialized_data['data_product'] = '' + # Stamp provenance on the outgoing datum if the source did not + # already provide it, so the destination TOM can trace where + # the datum came from. + if not serialized_data.get('source_name'): + serialized_data['source_name'] = settings.TOM_NAME + serialized_data['source_location'] = ( + f"ReducedDatum shared from <{settings.TOM_NAME}.url>" + f"/api/reduceddatums/{datum.id}/" + ) + response = requests.post(reduced_datums_url, json=serialized_data, + headers=headers, auth=auth) + response_codes.append(response.status_code) + + failed_count = len([rc for rc in response_codes if rc >= 300]) + if failed_count < len(response_codes): + saved = len(response_codes) - failed_count + return {'message': f'{saved} of {len(response_codes)} datums successfully saved.'} + return {'message': 'ERROR: No valid data shared. These data may already exist in target TOM.'} + + +def get_destination_target(target: Target, targets_url: str, headers: dict, auth) -> tuple: + """Find the destination-TOM target id that matches the given source target. + + Uses the destination TOM's ``api/targets/?name_fuzzy=`` filter with a + comma-separated list of the source target's name and aliases. Returns + ``(id_or_list_or_None, http_response)``: + + - A single matched id if exactly one destination target matches. + - The full list of result dicts if more than one matched (caller can + decide which one to use, or surface an error). + - ``None`` if no target matches. + + Moved here from ``tom_dataproducts.sharing`` so that TomToolkitSharingBackend + and future TOM-to-TOM utilities can share one implementation. + """ + # Build a comma-separated list of target names plus aliases that the + # TOM API name_fuzzy filter will parse. + target_names = ','.join(map(str, target.names)) + target_response = requests.get(f'{targets_url}?name_fuzzy={target_names}', + headers=headers, auth=auth) + target_response_json = target_response.json() + try: + if target_response_json['results']: + if len(target_response_json['results']) > 1: + return target_response_json['results'], target_response + destination_target_id = target_response_json['results'][0]['id'] + return destination_target_id, target_response + return None, target_response + except KeyError: + return None, target_response diff --git a/tom_common/test_sharing.py b/tom_common/test_sharing.py new file mode 100644 index 000000000..1e482a5ef --- /dev/null +++ b/tom_common/test_sharing.py @@ -0,0 +1,173 @@ +""" +Tests for the ``sharing_backends`` integration point in ``tom_common.sharing``. + +These tests must pass with ``tom_hermes`` NOT installed — the discovery +mechanism is supposed to work without any particular third-party +SharingBackend being available. A test-only fake ``SharingBackend`` +subclass declared below stands in for HERMES / other real backends. +""" +from __future__ import annotations + +from unittest.mock import MagicMock, patch + +from django.test import TestCase, override_settings + +from tom_common.sharing import ( + SharingBackend, + TomToolkitSharingBackend, + get_sharing_backend, + get_sharing_backends, +) + + +class _FakeSharingBackend(SharingBackend): + """Test-only backend. Declared at module scope so ``import_string`` can resolve it.""" + + name = 'fake' + verbose_name = 'Fake' + + @classmethod + def get_destination_choices(cls, user=None): + return [('fake:one', 'one'), ('fake:two', 'two')] + + def share(self, form_data, **kwargs): + # Returns a feedback dict shaped like the real backends do on success. + return {'status': 'success', 'message': 'ok'} + + +class _FakeAppConfig: + """Minimal AppConfig stand-in used to register ``_FakeSharingBackend``. + + ``get_sharing_backends`` reads each installed AppConfig's + ``sharing_backends()`` method, imports the listed class paths, and + keys them by class ``name``. To simulate a registered backend we + inject this AppConfig into ``apps.get_app_configs()`` via patch. + """ + + name = 'fake_app' + + def sharing_backends(self): + return [{'class': 'tom_common.test_sharing._FakeSharingBackend'}] + + +class SharingRegistryTests(TestCase): + """Verify the registry discovery / lookup helpers.""" + + def test_get_sharing_backends_includes_fake_from_patched_appconfig(self): + # ``get_sharing_backends`` iterates ``apps.get_app_configs()``. + # Patching it to return a single fake AppConfig lets us assert + # the discovery path without relying on any real third-party + # SharingBackend being installed. + with patch('tom_common.sharing.django_apps.get_app_configs', + return_value=[_FakeAppConfig()]): + registry = get_sharing_backends() + self.assertIn('fake', registry) + self.assertIs(registry['fake'], _FakeSharingBackend) + + def test_get_sharing_backends_skips_appconfig_without_method(self): + # An AppConfig without ``sharing_backends()`` must be silently + # skipped (the hook is optional). + appconfig_no_hook = MagicMock(spec=['name']) + appconfig_no_hook.name = 'no_hook' + del appconfig_no_hook.sharing_backends # make AttributeError real + with patch('tom_common.sharing.django_apps.get_app_configs', + return_value=[appconfig_no_hook, _FakeAppConfig()]): + registry = get_sharing_backends() + self.assertEqual(list(registry), ['fake']) + + def test_get_sharing_backends_skips_unresolvable_class_path(self): + # When a class path fails to import, the offending entry is + # skipped — one broken backend must not prevent the others + # from being registered. + class BadAppConfig: + name = 'bad_app' + + def sharing_backends(self_inner): + return [{'class': 'nonexistent.module.DoesNotExist'}] + + with patch('tom_common.sharing.django_apps.get_app_configs', + return_value=[BadAppConfig(), _FakeAppConfig()]): + registry = get_sharing_backends() + self.assertEqual(list(registry), ['fake']) + + def test_get_sharing_backend_returns_class_by_name(self): + with patch('tom_common.sharing.django_apps.get_app_configs', + return_value=[_FakeAppConfig()]): + self.assertIs(get_sharing_backend('fake'), _FakeSharingBackend) + + def test_get_sharing_backend_raises_import_error_when_missing(self): + # Unknown names raise ImportError with a message that names the + # fix — the caller should see "install the app that provides it". + with patch('tom_common.sharing.django_apps.get_app_configs', + return_value=[_FakeAppConfig()]): + with self.assertRaises(ImportError) as ctx: + get_sharing_backend('no-such-backend') + self.assertIn('INSTALLED_APPS', str(ctx.exception)) + + +class TomToolkitSharingBackendChoicesTests(TestCase): + """Verify that destination-dropdown choices come from ``settings.DATA_SHARING``.""" + + @override_settings(DATA_SHARING={ + 'tom_a': {'DISPLAY_NAME': 'TOM A', 'BASE_URL': 'https://a.example/'}, + 'tom_b': {'DISPLAY_NAME': 'TOM B', 'BASE_URL': 'https://b.example/'}, + # HERMES-shaped entry is recognized by HERMES_API_KEY and should NOT + # appear under TomToolkitSharingBackend's choices. + 'hermes': {'BASE_URL': 'https://hermes.example/', 'HERMES_API_KEY': 'x'}, + # Missing BASE_URL is skipped quietly rather than erroring at render. + 'broken': {'DISPLAY_NAME': 'Broken'}, + }) + def test_choices_include_tom_entries_and_exclude_hermes(self): + choices = TomToolkitSharingBackend.get_destination_choices(user=None) + # Ordering matches dict insertion order. + self.assertEqual(choices, [ + ('tom:tom_a', 'TOM A'), + ('tom:tom_b', 'TOM B'), + ]) + + @override_settings(DATA_SHARING={}) + def test_choices_empty_when_no_settings(self): + self.assertEqual(TomToolkitSharingBackend.get_destination_choices(user=None), []) + + +class TomToolkitSharingBackendAuthTests(TestCase): + """Verify that _build_auth picks API_KEY over USERNAME/PASSWORD.""" + + def test_api_key_produces_token_auth_header(self): + cfg = {'API_KEY': 'abc123', 'USERNAME': 'u', 'PASSWORD': 'p'} + headers, auth = TomToolkitSharingBackend._build_auth(cfg) + # API_KEY wins even when USERNAME/PASSWORD are also set. + self.assertEqual(headers, {'Authorization': 'Token abc123'}) + self.assertIsNone(auth) + + def test_username_password_produces_basic_auth_tuple(self): + cfg = {'USERNAME': 'alice', 'PASSWORD': 'secret'} + headers, auth = TomToolkitSharingBackend._build_auth(cfg) + self.assertEqual(headers, {}) + self.assertEqual(auth, ('alice', 'secret')) + + def test_no_credentials_returns_empty(self): + # When neither API_KEY nor USERNAME/PASSWORD are set, we return + # empty auth and let the destination TOM reject the request — + # that produces a clearer error than raising here. + headers, auth = TomToolkitSharingBackend._build_auth({}) + self.assertEqual(headers, {}) + self.assertIsNone(auth) + + +class TomToolkitSharingBackendSplitDestinationTests(TestCase): + """Verify the parser that extracts the sub-destination from ``:``.""" + + def test_prefixed_form(self): + self.assertEqual( + TomToolkitSharingBackend._split_destination('tom:tom_b'), + 'tom_b', + ) + + def test_legacy_bare_form(self): + # Legacy callers still pass the bare settings key without the prefix. + # _split_destination must tolerate that and return the whole string. + self.assertEqual( + TomToolkitSharingBackend._split_destination('mytom'), + 'mytom', + ) From c7c5c5234c4b7fb9267eb7f986c3f89691c24056 Mon Sep 17 00:00:00 2001 From: "William (Lindy) Lindstrom" Date: Fri, 1 May 2026 14:58:44 -0700 Subject: [PATCH 08/22] Route tom_dataproducts sharing through the SharingBackend registry - this is part of the tom_hermes refactor; to remove HERMES related code from `tom_base` to `tom_hermes` - These are breaking changes going in the v3: - Removed `share_data_with_hermes`, `share_target_list_with_hermes`, `share_data_with_tom`, the internal `_dispatch_to_backend`, and the `check_for_save_safe_datums` placeholder. Callers should use `get_sharing_backend(name)().share(...)` directly. - Removed `tom_dataproducts/tests/test_sharing.py` to `tom_hermes` --- tom_dataproducts/forms.py | 6 +- tom_dataproducts/sharing.py | 388 +++++------------- .../templatetags/dataproduct_extras.py | 8 +- tom_dataproducts/tests/test_sharing.py | 174 -------- tom_dataproducts/views.py | 58 ++- 5 files changed, 160 insertions(+), 474 deletions(-) delete mode 100644 tom_dataproducts/tests/test_sharing.py diff --git a/tom_dataproducts/forms.py b/tom_dataproducts/forms.py index b5301cf36..e14f1f785 100644 --- a/tom_dataproducts/forms.py +++ b/tom_dataproducts/forms.py @@ -62,5 +62,9 @@ class DataShareForm(forms.Form): ) def __init__(self, *args, **kwargs): + # The view passes ``user`` so that per-user SharingBackend choices + # (e.g. HERMES topics the logged-in user can publish to) can be + # looked up from that user's profile / session credentials. + self.user = kwargs.pop('user', None) super().__init__(*args, **kwargs) - self.fields['share_destination'].choices = get_sharing_destination_options() + self.fields['share_destination'].choices = get_sharing_destination_options(user=self.user) diff --git a/tom_dataproducts/sharing.py b/tom_dataproducts/sharing.py index 83422aa9e..cdec44e2c 100644 --- a/tom_dataproducts/sharing.py +++ b/tom_dataproducts/sharing.py @@ -1,333 +1,163 @@ -import requests -import os +""" +Data-sharing helpers retained in tom_dataproducts. + +The SharingBackend framework and the bundled ``TomToolkitSharingBackend`` +(share with another TOM) live in ``tom_common.sharing``. HERMES-specific +code lives in ``tom_hermes``. This module retains: + +- ``get_sharing_destination_options`` — iterates every registered + SharingBackend and collects its ``get_destination_choices(user)`` output + for the share-destination dropdown. +- ``check_for_share_safe_datums`` — generic filter that drops ReducedDatums + already published to the destination topic. Used by + ``HermesSharingBackend.share`` (lazy import). +- Sharing feedback helpers (``sharing_feedback_converter``, + ``sharing_feedback_handler``). +- CSV download helpers (``process_spectro_data_for_download``, + ``download_data``) — the "download" sharing destination. +- ``get_destination_target`` — re-exported from ``tom_common.sharing`` so + existing ``tom_targets.sharing`` imports keep working. +""" from io import StringIO -from astropy.table import Table -from astropy.io import ascii -from django.conf import settings -from django.core.exceptions import ImproperlyConfigured +from astropy.io import ascii +from astropy.table import Table from django.contrib import messages from django.db.models import Q from django.http import StreamingHttpResponse from django.utils.text import slugify -from tom_targets.models import Target +# get_destination_target is re-exported here so existing tom_targets.sharing +# imports keep working after its move into tom_common.sharing. +from tom_common.sharing import get_destination_target # noqa: F401 +from tom_common.sharing import get_sharing_backends +from tom_dataproducts.models import ReducedDatum +from tom_dataproducts.serializers import ReducedDatumSerializer -from tom_dataproducts.models import DataProduct, ReducedDatum -from tom_dataproducts.alertstreams.hermes_publisher import publish_to_hermes, BuildHermesMessage, get_hermes_topics -from tom_dataproducts.serializers import DataProductSerializer, ReducedDatumSerializer - - -def share_target_list_with_hermes(share_destination, form_data, selected_targets=None, include_all_data=False): - """ - Serialize and share a set of selected targets and their data with Hermes - :param share_destination: Topic to share data to. (e.g. 'hermes.test') - :param form_data: Sharing Form data - :param selected_targets: List of selected target ids to share - :param include_all_data: Boolean flag to include all dataproducts when sharing or not - :return: json response for the sharing - """ - if selected_targets is None: - selected_targets = [] - target_list = form_data.get('target_list') - title_name = f"{target_list.name} target list" - targets = Target.objects.filter(id__in=selected_targets) - if include_all_data: - reduced_datums = ReducedDatum.objects.filter(target__id__in=selected_targets, data_type='photometry') - else: - reduced_datums = ReducedDatum.objects.none() - return _share_with_hermes(share_destination, form_data, title_name, reduced_datums, targets) +# --------------------------------------------------------------------------- +# Sharing-destination dropdown: iterate the SharingBackend registry +# --------------------------------------------------------------------------- -def share_data_with_hermes(share_destination, form_data, product_id=None, target_id=None, selected_data=None): - """ - Serialize and share data with Hermes (hermes.lco.global) - :param share_destination: Topic to share data to. (e.g. 'hermes.test') - :param form_data: Sharing Form data - :param product_id: DataProduct ID (if provided) - :param target_id: Target ID (if provided) - :param selected_data: List of ReducedDatum IDs (if provided) - :return: json response for the sharing - """ - # Query relevant Reduced Datums Queryset - accepted_data_types = ['photometry', 'spectroscopy'] - if product_id: - product = DataProduct.objects.get(pk=product_id) - target = product.target - reduced_datums = ReducedDatum.objects.filter(data_product=product) - elif selected_data: - reduced_datums = ReducedDatum.objects.filter(pk__in=selected_data) - target = reduced_datums[0].target - elif target_id: - target = Target.objects.get(pk=target_id) - reduced_datums = ReducedDatum.objects.none() - else: - reduced_datums = ReducedDatum.objects.none() - target = Target.objects.none() - title_name = target.name if target else '' - reduced_datums.filter(data_type__in=accepted_data_types) - return _share_with_hermes( - share_destination, form_data, title_name, reduced_datums, targets=Target.objects.filter(pk=target.pk) - ) +def get_sharing_destination_options(include_download=True, user=None): + """Build the choices tuple for ``DataShareForm.share_destination``. + Called by ``DataShareForm.__init__``. Iterates every SharingBackend + registered via the ``sharing_backends()`` AppConfig integration point + and collects each backend's ``get_destination_choices(user)``. Each + backend's options are grouped under its ``verbose_name`` heading in + the dropdown. -def _share_with_hermes(share_destination, form_data, title_name, - reduced_datums=ReducedDatum.objects.none(), - targets=Target.objects.none()): - """ - Helper method to serialize and share data with hermes - :param share_destination: Topic to share data to. (e.g. 'hermes.test') - :param form_data: Sharing Form data - :param reduced_datums: filtered queryset of reduced datums to submit - :return: json response for the sharing + The optional ``download`` entry (not a backend — emits a CSV to the + user directly) is still handled as a hardcoded first option. """ - # Build and submit hermes table from Reduced Datums - hermes_topic = share_destination.split(':')[1] - destination = share_destination.split(':')[0] - sharing = getattr(settings, "DATA_SHARING", {}) - authors = form_data.get('share_authors', sharing.get('hermes', {}).get('DEFAULT_AUTHORS', None)) - message_info = BuildHermesMessage(title=form_data.get('share_title', - f"Updated data for {title_name} from " - f"{getattr(settings, 'TOM_NAME', 'TOM Toolkit')}."), - submitter=form_data.get('submitter'), - authors=authors, - message=form_data.get('share_message', None), - topic=hermes_topic - ) - # Run ReducedDatums Queryset through sharing protocols to make sure they are safe to share. - filtered_reduced_datums = check_for_share_safe_datums(destination, reduced_datums, topic=hermes_topic) - if filtered_reduced_datums.count() > 0 or targets.count() > 0: - response = publish_to_hermes(message_info, filtered_reduced_datums, targets) - else: - return {'message': 'ERROR: No valid data or targets to share. (Check Sharing Protocol. Note that ' - 'only photometry data types are supported for sharing with hermes'} - return response - - -def share_data_with_tom(share_destination, form_data, product_id=None, target_id=None, selected_data=None): - """ - Serialize and share data with another TOM - :param share_destination: TOM to share data to as described in settings.DATA_SHARING. (e.g. 'mytom') - :param form_data: Sharing Form data - :param product_id: DataProduct ID (if provided) - :param target_id: Target ID (if provided) - :param selected_data: List of ReducedDatum IDs (if provided) - :return: - """ - # Build destination TOM headers and URL information - try: - destination_tom_base_url = settings.DATA_SHARING[share_destination]['BASE_URL'] - username = settings.DATA_SHARING[share_destination]['USERNAME'] - password = settings.DATA_SHARING[share_destination]['PASSWORD'] - except KeyError as err: - raise ImproperlyConfigured(f'Check DATA_SHARING configuration for {share_destination}: Key {err} not found.') - auth = (username, password) - headers = {'Content-Type': 'application/json', 'Accept': 'application/json'} + choices = [] + if include_download: + choices.append(('download', 'download')) - dataproducts_url = destination_tom_base_url + 'api/dataproducts/' - targets_url = destination_tom_base_url + 'api/targets/' - reduced_datums_url = destination_tom_base_url + 'api/reduceddatums/' - reduced_datums = ReducedDatum.objects.none() + for backend_cls in get_sharing_backends().values(): + backend_choices = list(backend_cls.get_destination_choices(user=user)) + if not backend_choices: + continue + # Django select widgets render a tuple ``(group_label, tuple_of_choices)`` + # as an optgroup. Wrap this backend's choices in that shape so + # they appear under the backend's verbose_name heading. + choices.append((backend_cls.verbose_name, tuple(backend_choices))) - # If a DataProduct is provided, share that DataProduct - if product_id: - product = DataProduct.objects.get(pk=product_id) - target = product.target - serialized_data = DataProductSerializer(product).data - # Find matching target in destination TOM - destination_target_id, _ = get_destination_target(target, targets_url, headers, auth) - if destination_target_id is None: - return {'message': 'ERROR: No matching target found.'} - elif isinstance(destination_target_id, list) and len(destination_target_id) > 1: - return {'message': 'ERROR: Multiple targets with matching name found in destination TOM.'} - serialized_data['target'] = destination_target_id - # TODO: this should be updated when tom_dataproducts is updated to use django.core.storage - dataproduct_filename = os.path.join(settings.MEDIA_ROOT, product.data.name) - # Save DataProduct in Destination TOM - with open(dataproduct_filename, 'rb') as dataproduct_filep: - files = {'file': (product.data.name, dataproduct_filep, 'text/csv')} - headers = {'Media-Type': 'multipart/form-data'} - response = requests.post(dataproducts_url, data=serialized_data, files=files, headers=headers, auth=auth) - elif selected_data or target_id: - # If ReducedDatums are provided, share those ReducedDatums - if selected_data: - reduced_datums = ReducedDatum.objects.filter(pk__in=selected_data) - targets = set(reduced_datum.target for reduced_datum in reduced_datums) - target_dict = {} - for target in targets: - # get destination Target - destination_target_id, _ = get_destination_target(target, targets_url, headers, auth) - if isinstance(destination_target_id, list) and len(destination_target_id) > 1: - return {'message': 'ERROR: Multiple targets with matching name found in destination TOM.'} - target_dict[target.name] = destination_target_id - if all(value is None for value in target_dict.values()): - return {'message': 'ERROR: No matching targets found.'} - else: - # If Target is provided, share all ReducedDatums for that Target - # (Will not create New Target in Destination TOM) - target = Target.objects.get(pk=target_id) - reduced_datums = ReducedDatum.objects.filter(target=target) - destination_target_id, _ = get_destination_target(target, targets_url, headers, auth) - if destination_target_id is None: - return {'message': 'ERROR: No matching target found.'} - elif isinstance(destination_target_id, list) and len(destination_target_id) > 1: - return {'message': 'ERROR: Multiple targets with matching name found in destination TOM.'} - target_dict = {target.name: destination_target_id} - response_codes = [] - reduced_datums = check_for_share_safe_datums(share_destination, reduced_datums) - if not reduced_datums: - return {'message': 'ERROR: No valid data to share.'} - for datum in reduced_datums: - if target_dict[datum.target.name]: - serialized_data = ReducedDatumSerializer(datum).data - serialized_data['target'] = target_dict[datum.target.name] - serialized_data['data_product'] = '' - if not serialized_data['source_name']: - serialized_data['source_name'] = settings.TOM_NAME - serialized_data['source_location'] = f"ReducedDatum shared from " \ - f"<{settings.TOM_NAME}.url>/api/reduceddatums/{datum.id}/" - response = requests.post(reduced_datums_url, json=serialized_data, headers=headers, auth=auth) - response_codes.append(response.status_code) - failed_data_count = len([rc for rc in response_codes if rc >= 300]) - if failed_data_count < len(response_codes): - return {'message': f'{len(response_codes)-failed_data_count} of {len(response_codes)} ' - 'datums successfully saved.'} - else: - return {'message': 'ERROR: No valid data shared. These data may already exist in target TOM.'} - else: - return {'message': 'ERROR: No valid data to share.'} + return tuple(choices) - return response +# --------------------------------------------------------------------------- +# Generic share helpers +# --------------------------------------------------------------------------- -def get_destination_target(target, targets_url, headers, auth): - """ - Retrieve the target ID from a destination TOM that is a fuzzy match the given target name and aliases - :param target: Target Model - :param targets_url: Destination API URL for TOM Target List - :param headers: TOM API headers - :param auth: TOM API authorization - :return: - """ - # Create coma separated list of target names plus aliases that can be recognized and parsed by the TOM API Filter - target_names = ','.join(map(str, target.names)) - target_response = requests.get(f'{targets_url}?name_fuzzy={target_names}', headers=headers, auth=auth) - target_response_json = target_response.json() - try: - if target_response_json['results']: - if len(target_response_json['results']) > 1: - return target_response_json['results'], target_response - destination_target_id = target_response_json['results'][0]['id'] - return destination_target_id, target_response - else: - return None, target_response - except KeyError: - return None, target_response +def check_for_share_safe_datums(destination, reduced_datums, **kwargs): + """Drop ReducedDatums that have already been published to the given destination+topic. + Generic hook — subclassable / replaceable by a TOM operator for a + different selection experience. Today has one built-in rule: for + HERMES, exclude datums already linked to an ``AlertStreamMessage`` + with ``exchange_status='published'`` on the same topic. For other + destinations, it is a no-op. -def check_for_share_safe_datums(destination, reduced_datums, **kwargs): - """ - Custom sharing protocols used to determine when data is shared with a destination. - This example prevents sharing if a datum has already been published to the given Hermes topic. - :param destination: sharing destination string - :param reduced_datums: selected input datums - :return: queryset of reduced datums to be shared + Called by ``HermesSharingBackend.share`` (lazy import) and by + ``TomToolkitSharingBackend.share`` (lazy import). """ if 'hermes' in destination: message_topic = kwargs.get('topic', None) - filtered_datums = reduced_datums.exclude(Q(message__exchange_status='published') - & Q(message__topic=message_topic)) + filtered_datums = reduced_datums.exclude( + Q(message__exchange_status='published') & Q(message__topic=message_topic), + ) else: filtered_datums = reduced_datums return filtered_datums -def check_for_save_safe_datums(): - return - - -def get_sharing_destination_options(include_download=True): - """ - Build the Display options and headers for the dropdown form for choosing sharing topics. - Customize for a different selection experience. - :return: Tuple: Possible Destinations and their Display Names - """ - if include_download: - choices = [('download', 'download')] - else: - choices = [] - try: - for destination, details in settings.DATA_SHARING.items(): - new_destination = [details.get('DISPLAY_NAME', destination)] - destination_topics = details.get('USER_TOPICS', []) - if destination.lower() == 'hermes': - # If this is a hermes share, get the topics from hermes and override what the users provide - hermes_topics = get_hermes_topics() - # If we have no writable hermes topics, then we can't share with hermes! - if not hermes_topics: - destination_topics = [] - elif destination_topics: - # If we've set USER_TOPICS with hermes, filter them to those available to your user account - destination_topics = [topic for topic in destination_topics if topic in hermes_topics] - else: - destination_topics = hermes_topics - if destination_topics: - topic_list = [(f'{destination}:{topic}', topic) for topic in destination_topics] - new_destination.append(tuple(topic_list)) - else: - new_destination.insert(0, destination) - choices.append(tuple(new_destination)) - except AttributeError: - pass - return tuple(choices) +def sharing_feedback_converter(response): + """Extract a human-readable message from a sharing response object. + Tolerant of three shapes that the share functions return: -def sharing_feedback_converter(response): - """ - Takes a sharing feedback response and returns its error or success message + - A ``requests.Response`` object: extract ``.json()['message']`` if + present, else a generic "Submitted message succesfully". + - A feedback dict (``{'message': str}``): return the message directly. + - Anything that raises — render the status code and content. """ try: response.raise_for_status() if 'message' in response.json(): feedback_message = response.json()['message'] else: - feedback_message = "Submitted message succesfully" + feedback_message = 'Submitted message succesfully' except AttributeError: + # response is a plain dict (from the share_* shims in error cases). feedback_message = response['message'] except Exception: - feedback_message = f"ERROR: Returned Response code {response.status_code} with content: {response.content}" - + feedback_message = ( + f'ERROR: Returned Response code {response.status_code} ' + f'with content: {response.content}' + ) return feedback_message def sharing_feedback_handler(response, request): - """ - Handle the response from a sharing request and prepare a message to the user - :return: + """Push a sharing response's feedback into Django's messages framework. + + ERROR-shaped feedback becomes a ``messages.error``; anything else + becomes ``messages.success``. Called from every view that runs a + share operation. """ publish_feedback = sharing_feedback_converter(response) - if "ERROR" in publish_feedback.upper(): + if 'ERROR' in publish_feedback.upper(): messages.error(request, publish_feedback) else: messages.success(request, publish_feedback) return +# --------------------------------------------------------------------------- +# CSV download (the "download" sharing destination) +# --------------------------------------------------------------------------- + def process_spectro_data_for_download(serialized_datum): - """ Turns a serialized spectrograph datum into a list of serialized datums with the - spectrograph info expanded one piece per line + """Expand a serialized spectroscopy ReducedDatum into one row per flux/wavelength pair. + + Used by ``download_data`` so a spectroscopy ReducedDatum (which + carries parallel flux/wavelength arrays in its ``value`` dict) + becomes multiple rows in the downloaded CSV. Also handles the + legacy "dict of rows" shape. """ download_datums = [] spectra_data = serialized_datum.pop('value') if ('flux' in spectra_data and isinstance(spectra_data['flux'], list) - and 'wavelength' in spectra_data and isinstance(spectra_data['wavelength'], list) + and 'wavelength' in spectra_data and isinstance(spectra_data['wavelength'], list) and len(spectra_data['flux']) == len(spectra_data['wavelength'])): datum_to_copy = serialized_datum.copy() - # If its a data dict with certain array or dict fields, then first copy the scalar fields over + # Copy scalar (non-array, non-dict) fields through to the per-row + # copies so they appear on every expanded row. for key, value in spectra_data.items(): if not isinstance(value, (list, dict)) and key not in datum_to_copy: datum_to_copy[key] = value - # And then iterate over the expected array fields to build output rows for i, flux in enumerate(spectra_data['flux']): expanded_datum = datum_to_copy.copy() expanded_datum['flux'] = flux @@ -336,23 +166,21 @@ def process_spectro_data_for_download(serialized_datum): expanded_datum['flux_error'] = spectra_data['flux_error'][i] download_datums.append(expanded_datum) else: + # Legacy "dict of rows" shape: each value is itself a row dict. for entry in spectra_data.values(): if isinstance(entry, dict): expanded_datum = serialized_datum.copy() - # If its an "array" of dicts, just expand each dict into the output expanded_datum.update(entry) download_datums.append(expanded_datum) return download_datums def download_data(form_data, selected_data): - """ - Produces a CSV photometry or spectroscopy table from the DataShareForm and provides it for download - as a StreamingHttpResponse. - The "title" becomes the filename, and the "message" becomes a comment at the top of the file. - :param form_data: data from the DataShareForm - :param selected_data: ReducucedDatums selected via the checkboxes in the DataShareForm - :return: CSV photometry or spectroscopy table as a StreamingHttpResponse + """Produce a CSV photometry/spectroscopy table as a ``StreamingHttpResponse``. + + The share-destination ``'download'`` dispatches here (see + ``DataShareView.post``). The form's ``share_title`` becomes the CSV + filename; ``share_message`` becomes a top-of-file comment line. """ reduced_datums = ReducedDatum.objects.filter(pk__in=selected_data) serialized_data = [ReducedDatumSerializer(rd).data for rd in reduced_datums] @@ -360,11 +188,11 @@ def download_data(form_data, selected_data): sort_fields = ['timestamp'] for datum in serialized_data: if datum.get('data_type') == 'photometry': + # Flatten the photometry value dict into the datum row. datum.update(datum.pop('value')) data_to_save.append(datum) elif datum.get('data_type') == 'spectroscopy': sort_fields = ['timestamp', 'wavelength'] - # Attempt to expand the photometry table stored in the .value into multiple entries in serialized data data_to_save.extend(process_spectro_data_for_download(datum)) table = Table(data_to_save) if form_data.get('share_message'): @@ -372,8 +200,8 @@ def download_data(form_data, selected_data): table.sort(sort_fields) file_buffer = StringIO() ascii.write(table, file_buffer, format='csv', comment='# ') - file_buffer.seek(0) # goto the beginning of the buffer - response = StreamingHttpResponse(file_buffer, content_type="text/ascii") + file_buffer.seek(0) + response = StreamingHttpResponse(file_buffer, content_type='text/ascii') filename = slugify(form_data['share_title']) + '.csv' response['Content-Disposition'] = f'attachment; filename="{filename}"' return response diff --git a/tom_dataproducts/templatetags/dataproduct_extras.py b/tom_dataproducts/templatetags/dataproduct_extras.py index 52b912840..dff8328b9 100644 --- a/tom_dataproducts/templatetags/dataproduct_extras.py +++ b/tom_dataproducts/templatetags/dataproduct_extras.py @@ -45,7 +45,9 @@ def dataproduct_list_for_target(context, target): initial = {'submitter': context['request'].user, 'target': target, 'share_title': f"Updated data for {target.name}."} - form = DataShareForm(initial=initial) + # Pass user so the DataShareForm's share-destination choices can include + # per-user SharingBackend entries (e.g. HERMES topics the user can publish to). + form = DataShareForm(initial=initial, user=context['request'].user) return { 'products': target_products_for_user, @@ -203,7 +205,9 @@ def get_photometry_data(context, target, target_share=False): 'data_type': 'photometry', 'share_title': f"Updated data for {target.name} from {getattr(settings, 'TOM_NAME', 'TOM Toolkit')}.", } - form = DataShareForm(initial=initial) + # Pass user so the DataShareForm's share-destination choices can include + # per-user SharingBackend entries (e.g. HERMES topics the user can publish to). + form = DataShareForm(initial=initial, user=context['request'].user) form.fields['data_type'].widget = forms.HiddenInput() sharing = getattr(settings, "DATA_SHARING", None) diff --git a/tom_dataproducts/tests/test_sharing.py b/tom_dataproducts/tests/test_sharing.py deleted file mode 100644 index 346f37591..000000000 --- a/tom_dataproducts/tests/test_sharing.py +++ /dev/null @@ -1,174 +0,0 @@ -from django.test import TestCase, override_settings - -from tom_dataproducts.alertstreams.hermes_publisher import (create_hermes_alert, BuildHermesMessage, - HermesMessageException) -from tom_dataproducts.models import ReducedDatum -from tom_observations.tests.utils import FakeRoboticFacility -from tom_observations.tests.factories import SiderealTargetFactory, ObservingRecordFactory -from django.contrib.auth.models import User - - -DATA_SHARING = { - 'hermes': { - 'DISPLAY_NAME': 'Hermes', - 'BASE_URL': 'http://hermes.test/', - 'HERMES_API_KEY': '123fake', - 'DEFAULT_AUTHORS': 'test author', - 'USER_TOPICS': ['hermes.test', 'tomtoolkit.test'], - } -} - - -@override_settings(DATA_SHARING=DATA_SHARING) -class TestHermesSharing(TestCase): - def setUp(self): - self.target = SiderealTargetFactory.create() - self.observation_record = ObservingRecordFactory.create( - target_id=self.target.id, - facility=FakeRoboticFacility.name, - parameters={} - ) - self.user = User.objects.create_user(username='test', email='test@example.com') - self.rd1 = ReducedDatum.objects.create( - target=self.target, - data_type='photometry', - value={'magnitude': 18.5, 'error': .5, 'filter': 'V', 'telescope': 'tst'} - ) - self.rd2 = ReducedDatum.objects.create( - target=self.target, - data_type='photometry', - value={'magnitude': 19.5, 'error': .5, 'filter': 'B', 'telescope': 'tst'} - ) - self.rd3 = ReducedDatum.objects.create( - target=self.target, - data_type='photometry', - value={'magnitude': 17.5, 'error': .5, 'filter': 'R', 'telescope': 'tst'} - ) - self.rd4 = ReducedDatum.objects.create( - target=self.target, - data_type='spectroscopy', - value={ - 'flux': [1, 2, 3], - 'wavelength': [6000, 5999, 5998], - 'error': [0.11, 0.22, 0.33], - 'telescope': 'SpectraTelescope' - } - ) - self.rd5 = ReducedDatum.objects.create( - target=self.target, - data_type='spectroscopy', - value={ - '1': {'flux': 20, 'wavelength': 3000}, - '2': {'flux': 21, 'wavelength': 3001}, - '3': {'flux': 22, 'wavelength': 3002}, - } - ) - self.bad_rd = ReducedDatum.objects.create( - target=self.target, - data_type='spectroscopy', - value={ - 'myflux': [1, 2, 3], - 'wavelength_function': 'lambda_xyz' - } - ) - self.message_info = BuildHermesMessage( - title='Test Title', - submitter=self.user.email, - authors=self.user.username, - message='Test Message' - ) - - def _check_alert(self, alert, message_info, datums, targets): - self.assertEqual(alert['topic'], message_info.topic) - self.assertEqual(alert['title'], message_info.title) - self.assertEqual(alert['message_text'], message_info.message) - self.assertEqual(alert['authors'], message_info.authors) - self.assertEqual(alert['submitter'], message_info.submitter) - if targets: - self.assertEqual(len(alert['data']['targets']), len(targets)) - targets_by_name = {target.name: target for target in targets} - for target in alert['data']['targets']: - self.assertIn(target['name'], targets_by_name) - original_target = targets_by_name[target['name']] - self.assertEqual(target['ra'], original_target.ra) - self.assertEqual(target['dec'], original_target.dec) - self.assertEqual(target['epoch'], original_target.epoch) - self.assertEqual(target['pm_ra'], original_target.pm_ra) - self.assertEqual(target['pm_dec'], original_target.pm_dec) - if datums: - photometry_datums = len(alert['data'].get('photometry', [])) - spectroscopy_datums = len(alert['data'].get('spectroscopy', [])) - photometry_count = 0 - spectroscopy_count = 0 - self.assertEqual(photometry_datums + spectroscopy_datums, len(datums)) - # These should line up - for datum in datums: - if datum.data_type == 'photometry': - hermes_datum = alert['data']['photometry'][photometry_count] - self.assertEqual(hermes_datum['target_name'], datum.target.name) - self.assertEqual(hermes_datum['date_obs'], datum.timestamp.isoformat()) - self.assertEqual(hermes_datum['telescope'], datum.value.get('telescope')) - self.assertEqual(hermes_datum['brightness'], datum.value.get('magnitude')) - self.assertEqual(hermes_datum['brightness_error'], datum.value.get('error')) - self.assertEqual(hermes_datum['bandpass'], datum.value.get('filter')) - photometry_count += 1 - elif datum.data_type == 'spectroscopy': - hermes_datum = alert['data']['spectroscopy'][spectroscopy_count] - self.assertEqual(hermes_datum['target_name'], datum.target.name) - self.assertEqual(hermes_datum['date_obs'], datum.timestamp.isoformat()) - if 'flux' in datum.value and 'wavelength' in datum.value: - self.assertEqual(hermes_datum['flux'], datum.value.get('flux')) - self.assertEqual(hermes_datum['wavelength'], datum.value.get('wavelength')) - if 'error' in datum.value: - self.assertEqual(hermes_datum['flux_error'], datum.value.get('error')) - else: - for i, entry in enumerate(datum.value.values()): - if 'flux' in entry: - self.assertEqual(hermes_datum['flux'][i], entry['flux']) - self.assertEqual(hermes_datum['wavelength'][i], entry['wavelength']) - spectroscopy_count += 1 - - def test_convert_to_hermes_format(self): - datums = [self.rd1, self.rd2, self.rd3] - targets = [self.target] - alert = create_hermes_alert(self.message_info, datums, targets) - # Now check the alerts formatting is correct - self._check_alert(alert, self.message_info, datums, targets) - - def test_convert_to_hermes_format_extra_target(self): - target2 = SiderealTargetFactory.create() - datums = [self.rd1, self.rd2, self.rd3] - targets = [target2, self.target] - alert = create_hermes_alert(self.message_info, datums, targets) - # Now check the alerts formatting is correct - self._check_alert(alert, self.message_info, datums, targets) - - def test_convert_to_hermes_format_only_targets(self): - target2 = SiderealTargetFactory.create() - targets = [target2, self.target] - alert = create_hermes_alert(self.message_info, [], targets) - # Now check the alerts formatting is correct - self._check_alert(alert, self.message_info, [], targets) - - def test_convert_to_hermes_format_only_datums(self): - datums = [self.rd1, self.rd2, self.rd3] - alert = create_hermes_alert(self.message_info, datums, []) - # Now check the alerts formatting is correct - self._check_alert(alert, self.message_info, datums, [self.target]) - - def test_convert_to_hermes_format_spectro_datums(self): - datums = [self.rd4, self.rd5] - alert = create_hermes_alert(self.message_info, datums, []) - # Now check the alerts formatting is correct - self._check_alert(alert, self.message_info, datums, [self.target]) - - def test_convert_to_hermes_format_mixed_datums(self): - datums = [self.rd1, self.rd2, self.rd3, self.rd4, self.rd5] - alert = create_hermes_alert(self.message_info, datums, []) - # Now check the alerts formatting is correct - self._check_alert(alert, self.message_info, datums, [self.target]) - - def test_convert_to_hermes_format_bad_spectro_datum_fails(self): - datums = [self.rd5, self.bad_rd] - with self.assertRaises(HermesMessageException): - create_hermes_alert(self.message_info, datums, []) diff --git a/tom_dataproducts/views.py b/tom_dataproducts/views.py index d5787ea73..65da99c9e 100644 --- a/tom_dataproducts/views.py +++ b/tom_dataproducts/views.py @@ -30,8 +30,8 @@ from tom_dataproducts.data_processor import run_data_processor from tom_observations.models import ObservationRecord from tom_observations.facility import get_service_class -from tom_dataproducts.sharing import (share_data_with_hermes, share_data_with_tom, sharing_feedback_handler, - download_data) +from tom_common.sharing import get_sharing_backend +from tom_dataproducts.sharing import download_data, sharing_feedback_handler import tom_dataproducts.single_target_data_service.single_target_data_service as stds from tom_targets.models import Target @@ -404,29 +404,53 @@ def form_invalid(self, form): return redirect(form.cleaned_data.get('referrer', '/')) def post(self, request, *args, **kwargs): + """Handle POST requests for sharing data. + + Shares Data Products, ReducedDatums, and whole-target data to whichever + destination the user picked in the share form. Dispatch is via the + SharingBackend registry: ``share_destination`` has the form + ``':'`` (e.g. ``'hermes:hermes.test'`` or + ``'tom:tom_b'``). The registry lookup finds the backend class; we call + its ``share()`` method with the querysets assembled from the URL/POST + arguments. + + The ``'download'`` destination is special-cased because it is not a + SharingBackend — it emits a CSV file to the browser instead of + publishing anywhere. """ - Method that handles the POST requests for sharing data. - Handles Data Products and All the data of a type for a target as well as individual Reduced Datums. - Submit to Hermes, or Share with TOM (soon). - """ - data_share_form = DataShareForm(request.POST, request.FILES) + data_share_form = DataShareForm(request.POST, request.FILES, user=request.user) if data_share_form.is_valid(): form_data = data_share_form.cleaned_data share_destination = form_data['share_destination'] - product_id = kwargs.get('dp_pk', None) - target_id = kwargs.get('tg_pk', None) - # Check if data points have been selected. - selected_data = request.POST.getlist("share-box") + # Selected ReducedDatum primary keys from the share-box checkboxes. + selected_data = request.POST.getlist('share-box') - # Check Destination - if 'HERMES' in share_destination.upper(): - response = share_data_with_hermes(share_destination, form_data, product_id, target_id, selected_data) - elif share_destination == 'download': + # The 'download' destination is handled specially: it writes a CSV + # directly to the response rather than going through a SharingBackend. + if share_destination == 'download': return download_data(form_data, selected_data=selected_data) - else: - response = share_data_with_tom(share_destination, form_data, product_id, target_id, selected_data) + + # Turn the URL-provided ids into querysets for the backend. + product_id = kwargs.get('dp_pk', None) + target_id = kwargs.get('tg_pk', None) + data_products = DataProduct.objects.filter(pk=product_id) if product_id else None + targets = Target.objects.filter(pk=target_id) if target_id else None + reduced_datums = ReducedDatum.objects.filter(pk__in=selected_data) if selected_data else None + + # Dispatch via the SharingBackend registry. The backend name is + # the prefix before the ':' in share_destination. A missing ':' + # (legacy TOM-to-TOM 'mytom' form) is treated as backend 'tom'. + backend_name = share_destination.partition(':')[0] or 'tom' + backend = get_sharing_backend(backend_name)() + response = backend.share( + form_data, + data_products=data_products, + reduced_datums=reduced_datums, + targets=targets, + user=request.user, + ) sharing_feedback_handler(response, self.request) return redirect(reverse('tom_targets:detail', kwargs={'pk': request.POST.get('target')})) From e395603ff05d86f0fdac6ca049f01ae4eb407d27 Mon Sep 17 00:00:00 2001 From: "William (Lindy) Lindstrom" Date: Fri, 1 May 2026 15:04:13 -0700 Subject: [PATCH 09/22] Route tom_targets Target sharing through the SharingBackend registry `TargetShareView.form_valid`, `TargetListShareView.form_valid`, `share_target_and_all_data`, and `continuous_share_data` now all use the `TomToolkitSharingBackend` (via `get_sharing_backend(name)().share(...)` `share_target_with_tom` now supports DRF API_KEY auth (sent as `Authorization: Token `) as well as the existing HTTP Basic (username/password) path. --- tom_targets/sharing.py | 236 ++++++++++++++++++++++++++--------------- tom_targets/views.py | 94 ++++++++++++---- 2 files changed, 229 insertions(+), 101 deletions(-) diff --git a/tom_targets/sharing.py b/tom_targets/sharing.py index 11a99b699..d739b55bd 100644 --- a/tom_targets/sharing.py +++ b/tom_targets/sharing.py @@ -1,123 +1,193 @@ -import requests +""" +Target-level sharing helpers. + +Used by ``tom_targets.views.TargetShareView`` and related views, and by the +post-save signal that implements ``PersistentShare`` (continuous sharing). + +Dispatch now goes through the SharingBackend registry: parse the +``share_destination`` string's ``:`` prefix, look up +the backend class with ``tom_common.sharing.get_sharing_backend``, and +call its ``share()`` method. The hardcoded ``if 'HERMES' in destination`` +branch was removed during the refactor; HERMES is now just one more +registered backend. + +``share_target_with_tom`` (below) is a helper that creates or updates a +Target on a destination TOM; it is separate from ``TomToolkitSharingBackend.share`` +because its job is Target registration rather than data sharing. +""" +import logging +import requests from django.conf import settings from django.core.exceptions import ImproperlyConfigured -from tom_targets.serializers import TargetSerializer -from tom_targets.models import PersistentShare -from tom_dataproducts.sharing import (check_for_share_safe_datums, share_data_with_tom, - get_destination_target, sharing_feedback_converter) +from tom_common.sharing import get_destination_target, get_sharing_backend from tom_dataproducts.models import ReducedDatum -from tom_dataproducts.alertstreams.hermes_publisher import publish_to_hermes, BuildHermesMessage +from tom_dataproducts.sharing import sharing_feedback_converter +from tom_targets.models import PersistentShare +from tom_targets.serializers import TargetSerializer +logger = logging.getLogger(__name__) -def share_target_and_all_data(share_destination, target): + +def _parse_backend_name(share_destination: str) -> str: + """Return the backend name prefix from a ``:`` share-destination. + + A missing ':' (legacy TOM-to-TOM form ``'mytom'``) is treated as + backend ``'tom'``; that keeps old saved PersistentShare values working. """ - Given a sharing destination, shares the target and all its current dataproducts - with that destination. Will raise an Exception is any portion of sharing fails. - :param share_destination: String sharing destination from the DATA_SHARING setting - :param target: Target instance that should be shared with all its data + prefix, sep, _sub = share_destination.partition(':') + return prefix if sep else 'tom' + + +def share_target_and_all_data(share_destination, target): + """Share ``target`` and all of its current DataProducts / ReducedDatums to ``share_destination``. + + Called by ``PersistentShare`` setup when a user first enables + continuous sharing of a target; we push the one-shot backfill of + existing data so the destination starts in sync with the source. + Raises if any step fails so the caller can surface the error. """ - if 'HERMES' in share_destination.upper(): - hermes_topic = share_destination.split(':')[1] - destination = share_destination.split(':')[0] - filtered_reduced_datums = check_for_share_safe_datums( - destination, ReducedDatum.objects.filter(target=target), topic=hermes_topic) - sharing = getattr(settings, "DATA_SHARING", {}) - tom_name = f"{getattr(settings, 'TOM_NAME', 'TOM Toolkit')}" - message = BuildHermesMessage(title=f"Setting up continuous sharing for {target.name} from " - f"{tom_name}.", - authors=sharing.get('hermes', {}).get('DEFAULT_AUTHORS', None), - submitter='', - message='', - topic=hermes_topic - ) - return sharing_feedback_converter(publish_to_hermes(message, filtered_reduced_datums)) - else: - response = share_target_with_tom(share_destination, {'target': target}) - response_feedback = sharing_feedback_converter(response) - if 'ERROR' in response_feedback.upper(): - return response_feedback - return sharing_feedback_converter(share_data_with_tom(share_destination, None, target_id=target.id)) + backend_name = _parse_backend_name(share_destination) + + if backend_name == 'hermes': + # HERMES share: push every existing ReducedDatum for this target. + # The backend filters out any datum already published to this + # topic via check_for_share_safe_datums. + datums = ReducedDatum.objects.filter(target=target) + backend = get_sharing_backend('hermes')() + tom_name = getattr(settings, 'TOM_NAME', 'TOM Toolkit') + form_data = { + 'share_destination': share_destination, + 'share_title': f'Setting up continuous sharing for {target.name} from {tom_name}.', + } + return sharing_feedback_converter( + backend.share(form_data, reduced_datums=datums), + ) + + # TOM-to-TOM: first create the Target on the destination TOM (if + # missing), then push all of its data via the registered backend. + response = share_target_with_tom(share_destination, {'target': target}) + response_feedback = sharing_feedback_converter(response) + if 'ERROR' in response_feedback.upper(): + return response_feedback + backend = get_sharing_backend(backend_name)() + return sharing_feedback_converter( + backend.share( + {'share_destination': share_destination}, + targets=type(target).objects.filter(pk=target.id), + ), + ) def continuous_share_data(target, reduced_datums): + """Push newly-created ``reduced_datums`` to any ``PersistentShare`` destinations for ``target``. + + Triggered from the ``post_save`` signal on ReducedDatum (see + ``tom_targets.signals``). For each PersistentShare on the target, + dispatches through the SharingBackend registry. """ - Triggered when new ReducedDatums are created. - Shares those ReducedDatums to the sharing destination of any PersistentShares on the target. - :param target: Target instance that these reduced_datums belong to - :param reduced_datums: list of ReducedDatum instances to share - """ - persistentshares = PersistentShare.objects.filter(target=target) - for persistentshare in persistentshares: - share_destination = persistentshare.destination + persistent_shares = PersistentShare.objects.filter(target=target) + for persistent_share in persistent_shares: + share_destination = persistent_share.destination reduced_datum_pks = [rd.pk for rd in reduced_datums] - if 'HERMES' in share_destination.upper(): - hermes_topic = share_destination.split(':')[1] - destination = share_destination.split(':')[0] - filtered_reduced_datums = check_for_share_safe_datums( - destination, ReducedDatum.objects.filter(pk__in=reduced_datum_pks), topic=hermes_topic) - sharing = getattr(settings, "DATA_SHARING", {}) - tom_name = f"{getattr(settings, 'TOM_NAME', 'TOM Toolkit')}" - message = BuildHermesMessage(title=f"Updated data for {target.name} from " - f"{tom_name}.", - authors=sharing.get('hermes', {}).get('DEFAULT_AUTHORS', None), - submitter=tom_name, - message='', - topic=hermes_topic - ) - publish_to_hermes(message, filtered_reduced_datums) - else: - share_data_with_tom(share_destination, None, None, None, selected_data=reduced_datum_pks) + datums_qs = ReducedDatum.objects.filter(pk__in=reduced_datum_pks) + + backend_name = _parse_backend_name(share_destination) + tom_name = getattr(settings, 'TOM_NAME', 'TOM Toolkit') + form_data = { + 'share_destination': share_destination, + # The share title is used by HERMES for message attribution and + # is ignored by TomToolkitSharingBackend. + 'share_title': f'Updated data for {target.name} from {tom_name}.', + 'submitter': tom_name, + } + + try: + backend = get_sharing_backend(backend_name)() + except ImportError: + # Configured destination references a backend that is not + # installed; log and skip rather than crashing the signal. + logger.warning( + 'PersistentShare destination %s references unknown backend %s; skipping.', + share_destination, backend_name, + ) + continue + backend.share(form_data, reduced_datums=datums_qs) def share_target_with_tom(share_destination, form_data, target_lists=()): + """Create or update a Target on a destination TOM. + + Separate from ``TomToolkitSharingBackend.share`` because this is Target + registration (``api/targets/``), not data publishing. Called when a + user first sets up sharing for a target that does not yet exist on the + destination TOM. Supports both authentication methods + ``TomToolkitSharingBackend`` supports: DRF API key (``API_KEY``) or + HTTP Basic (``USERNAME`` / ``PASSWORD``). """ - Share a target with a remote TOM. - :param share_destination: The name of the destination TOM as defined in settings.DATA_SHARING - :param form_data: The form data from the target form - :param target_lists: The target lists to add the target to in the destination TOM - :return: The response from the destination TOM - """ - # Try to get destination tom authentication/URL information + # share_destination may be the legacy bare key ('mytom') or the new + # prefixed form ('tom:mytom'). Strip the prefix so we can look up the + # settings.DATA_SHARING entry by its dict key. + _prefix, _sep, sub = share_destination.partition(':') + destination_key = sub if _sep else share_destination + try: - destination_tom_base_url = settings.DATA_SHARING[share_destination]['BASE_URL'] - username = settings.DATA_SHARING[share_destination]['USERNAME'] - password = settings.DATA_SHARING[share_destination]['PASSWORD'] + destination_tom_base_url = settings.DATA_SHARING[destination_key]['BASE_URL'] except KeyError as err: - raise ImproperlyConfigured(f'Check DATA_SHARING configuration for {share_destination}: Key {err} not found.') - auth = (username, password) + raise ImproperlyConfigured( + f'Check DATA_SHARING configuration for {destination_key}: Key {err} not found.', + ) + + # Auth: prefer DRF TokenAuth if API_KEY is set; else fall back to HTTP Basic. + cfg = settings.DATA_SHARING[destination_key] headers = {'Content-Type': 'application/json', 'Accept': 'application/json'} + auth = None + if cfg.get('API_KEY'): + headers['Authorization'] = f"Token {cfg['API_KEY']}" + else: + try: + auth = (cfg['USERNAME'], cfg['PASSWORD']) + except KeyError as err: + raise ImproperlyConfigured( + f'Check DATA_SHARING configuration for {destination_key}: Key {err} not found. ' + 'Provide either API_KEY (preferred) or USERNAME + PASSWORD.', + ) - # establish destination TOM URLs targets_url = destination_tom_base_url + 'api/targets/' - # Check if target already exists in destination DB - destination_target_id, target_search_response = get_destination_target(form_data['target'], targets_url, headers, - auth) - # Handle errors or multiple targets found + # Resolve whether the target already exists on the destination TOM by + # fuzzy-matching on its name and aliases. + destination_target_id, target_search_response = get_destination_target( + form_data['target'], targets_url, headers, auth, + ) if target_search_response.status_code != 200: return target_search_response - elif isinstance(destination_target_id, list) and len(destination_target_id) > 1: + if isinstance(destination_target_id, list) and len(destination_target_id) > 1: return {'message': 'ERROR: Multiple targets with matching name found in destination TOM.'} - # Build list of targetlists to add target to in destination TOM + # Always tag the created-or-updated target with one TargetList per + # provided target_list, plus a TargetList named after the source TOM + # so the destination-TOM operator can see where the target came from. target_dict_list = [{'name': f'Imported From {settings.TOM_NAME}'}] for target_list in target_lists: target_dict_list.append({'name': target_list.name}) - # Create or update target in destination TOM if destination_target_id is None: - # If target is not in Destination, serialize and create new target. + # Target is new on the destination: serialize and create it. serialized_target = TargetSerializer(form_data['target']).data - # Remove local User Groups + # Source-TOM group memberships do not translate to the destination; + # strip them so the destination TOM's permission system starts clean. serialized_target['groups'] = [] - # Add target lists serialized_target['target_lists'] = target_dict_list - target_create_response = requests.post(targets_url, json=serialized_target, headers=headers, auth=auth) + target_create_response = requests.post( + targets_url, json=serialized_target, headers=headers, auth=auth, + ) else: - # Add target to target lists if it already exists in destination TOM + # Target exists: just append the new target_lists via PATCH. update_target_data = {'target_lists': target_dict_list} update_target_url = targets_url + f'{destination_target_id}/' - target_create_response = requests.patch(update_target_url, json=update_target_data, headers=headers, auth=auth) + target_create_response = requests.patch( + update_target_url, json=update_target_data, headers=headers, auth=auth, + ) return target_create_response diff --git a/tom_targets/views.py b/tom_targets/views.py index 4c4c09c9f..52cff4141 100644 --- a/tom_targets/views.py +++ b/tom_targets/views.py @@ -46,8 +46,8 @@ UnknownTypeTargetCreateForm, TargetSelectionForm from tom_targets.sharing import share_target_with_tom from tom_targets.merge import target_merge -from tom_dataproducts.sharing import (share_data_with_hermes, share_data_with_tom, sharing_feedback_handler, - share_target_list_with_hermes) +from tom_common.sharing import get_sharing_backend +from tom_dataproducts.sharing import sharing_feedback_handler from tom_dataproducts.models import ReducedDatum from tom_targets.groups import ( add_all_to_grouping, add_selected_to_grouping, remove_all_from_grouping, remove_selected_from_grouping, @@ -62,7 +62,7 @@ from tom_observations.utils import get_sidereal_visibility from tom_targets.seed import seed_messier_targets from tom_targets.tables import TargetTable, TargetGroupTable -from tom_dataproducts.alertstreams.hermes_publisher import BuildHermesMessage, preload_to_hermes +from tom_hermes.sharing import BuildHermesMessage, preload_to_hermes logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) @@ -432,23 +432,48 @@ def form_invalid(self, form): return redirect(self.get_success_url()) def form_valid(self, form): - """ - Shares the target with the selected destination(s) and redirects to the target detail page. + """Share the target with the selected destination(s) and redirect to the target detail page. + + Dispatch is via the SharingBackend registry: + - ``hermes:`` → HermesSharingBackend.share with the selected + ReducedDatums and the current Target. + - ``tom:`` (or the legacy bare ````) → first + register the Target on the destination TOM via + ``share_target_with_tom``, then, if the user picked specific + ReducedDatums, push them through TomToolkitSharingBackend.share. """ form_data = form.cleaned_data share_destination = form_data['share_destination'] target_id = self.kwargs.get('pk', None) - selected_data = self.request.POST.getlist("share-box") - if 'HERMES' in share_destination.upper(): - response = share_data_with_hermes(share_destination, form_data, None, target_id, selected_data) + selected_data = self.request.POST.getlist('share-box') + backend_name = share_destination.partition(':')[0] or 'tom' + + targets_qs = Target.objects.filter(pk=target_id) if target_id else None + reduced_datums_qs = ( + ReducedDatum.objects.filter(pk__in=selected_data) if selected_data else None + ) + + if backend_name == 'hermes': + backend = get_sharing_backend('hermes')() + response = backend.share( + form_data, + reduced_datums=reduced_datums_qs, + targets=targets_qs, + user=self.request.user, + ) sharing_feedback_handler(response, self.request) else: - # Share Target with Destination TOM + # Step 1: ensure the Target exists on the destination TOM. response = share_target_with_tom(share_destination, form_data) sharing_feedback_handler(response, self.request) + # Step 2: optionally push the selected ReducedDatums. if selected_data: - # Share Data with Destination TOM - response = share_data_with_tom(share_destination, form_data, selected_data=selected_data) + backend = get_sharing_backend(backend_name)() + response = backend.share( + form_data, + reduced_datums=reduced_datums_qs, + user=self.request.user, + ) sharing_feedback_handler(response, self.request) return redirect(self.get_success_url()) @@ -877,23 +902,56 @@ def form_invalid(self, form): return redirect(self.get_success_url()) def form_valid(self, form): + """Share the selected targets (and optionally their photometry) to the chosen destination. + + Dispatch is via the SharingBackend registry: + - ``hermes:`` → one HermesSharingBackend.share call that + publishes all selected targets together (and, if dataSwitch is + on, their photometry ReducedDatums) under the TargetList's + title (see HermesSharingBackend._resolve_inputs). + - ``tom:`` (or legacy bare ````) → loop each + selected target: register it on the destination TOM, then + optionally push all its data via TomToolkitSharingBackend.share. + """ form_data = form.cleaned_data share_destination = form_data['share_destination'] selected_targets = self.request.POST.getlist('selected-target') data_switch = self.request.POST.get('dataSwitch', False) - if 'hermes' in share_destination.lower(): - response = share_target_list_with_hermes( - share_destination, form_data, selected_targets, include_all_data=data_switch) + backend_name = share_destination.partition(':')[0] or 'tom' + + if backend_name == 'hermes': + # HERMES: push everything in one call. form_data['target_list'] + # is already populated by the form's clean(); the backend reads + # it to set the message title. + targets_qs = Target.objects.filter(id__in=selected_targets) + reduced_datums_qs = None + if data_switch: + reduced_datums_qs = ReducedDatum.objects.filter( + target__id__in=selected_targets, data_type='photometry', + ) + backend = get_sharing_backend('hermes')() + response = backend.share( + form_data, + targets=targets_qs, + reduced_datums=reduced_datums_qs, + user=self.request.user, + ) sharing_feedback_handler(response, self.request) else: + # TOM-to-TOM: per-target registration + optional data push. + tom_backend = get_sharing_backend(backend_name)() for target in selected_targets: - # Share each target individually form_data['target'] = Target.objects.get(id=target) - response = share_target_with_tom(share_destination, form_data, target_lists=[form_data['target_list']]) + response = share_target_with_tom( + share_destination, form_data, target_lists=[form_data['target_list']], + ) sharing_feedback_handler(response, self.request) if data_switch: - # If Data sharing request, share all data associated with the target - response = share_data_with_tom(share_destination, form_data, target_id=target) + response = tom_backend.share( + form_data, + targets=Target.objects.filter(pk=target), + user=self.request.user, + ) sharing_feedback_handler(response, self.request) if not selected_targets: messages.error(self.request, f'No targets shared. {form.errors.as_json()}') From 998901a4b06ad6875bded5a37e13b2b352e64fbb Mon Sep 17 00:00:00 2001 From: "William (Lindy) Lindstrom" Date: Fri, 1 May 2026 15:30:14 -0700 Subject: [PATCH 10/22] Remove tom_dataproducts.alertstreams HERMES files These have been refactored into tom_hermes. See `tom_hermes.alertstreams` and `tom_hermes.sharing` sub-packages. --- .../alertstreams/hermes_ingester.py | 305 ----------------- .../alertstreams/hermes_publisher.py | 312 ------------------ 2 files changed, 617 deletions(-) delete mode 100644 tom_dataproducts/alertstreams/hermes_ingester.py delete mode 100644 tom_dataproducts/alertstreams/hermes_publisher.py diff --git a/tom_dataproducts/alertstreams/hermes_ingester.py b/tom_dataproducts/alertstreams/hermes_ingester.py deleted file mode 100644 index 97cb29618..000000000 --- a/tom_dataproducts/alertstreams/hermes_ingester.py +++ /dev/null @@ -1,305 +0,0 @@ -import os -import tempfile -import uuid -import json -from urllib.parse import urlparse, urljoin -from dateutil.parser import parse -import logging -import requests - -from django.core.files import File -from django.conf import settings - -from tom_alerts.models import AlertStreamMessage -from tom_targets.models import Target, TargetList -from tom_dataproducts.data_processor import run_data_processor -from tom_dataproducts.models import DataProduct, ReducedDatum - -logger = logging.getLogger(__name__) -logger.setLevel(logging.DEBUG) - - -def get_or_create_uuid_from_metadata(metadata) -> uuid.UUID: - """ - Extract the UUID from the message metadata, or generate a UUID if none present in metadata. - - The headers property of the metadata is a list of tuples of the form [('key', value), ...]. - """ - # get the tuple with the uuid: key is '_id' - message_uuid_tuple = None - if metadata.headers: - message_uuid_tuple = next((item for item in metadata.headers if item[0] == '_id'), None) - if message_uuid_tuple: - message_uuid = uuid.UUID(bytes=message_uuid_tuple[1]) - else: - # this message header metadata didn't have UUID, so make one - message_uuid = uuid.uuid4() - return message_uuid - - -HERMES_SPECTROSCOPY_FILE_EXTENSIONS = ('.fits.fz', '.fits', '.csv', '.txt', '.ascii') - - -def hermes_alert_handler(alert, metadata): - """ - Example Alert Handler to record data streamed through Hermes as a new ReducedDatum. - - -- Reads Photometry and Spectroscopy Data (both inline and file-based) - -- Creates a new Target if no match is found for Target Name or aliases - -- Does not Ingest Data if exact match already exists - -- Requires 'tom_alertstreams' in settings.INSTALLED_APPS - -- Requires ALERT_STREAMS['topic_handlers'] in settings - """ - alert_as_dict = alert.content - alert_id = get_or_create_uuid_from_metadata(metadata) - photometry_table = alert_as_dict['data'].get('photometry') or [] - spectroscopy_table = alert_as_dict['data'].get('spectroscopy') or [] - target_table = alert_as_dict['data'].get('targets', []) - # Set a hermes_base_url to link ingested messages to - if hasattr(settings, 'DATA_SHARING'): - hermes_base_url = settings.DATA_SHARING.get('hermes', {}).get('BASE_URL', 'https://hermes.lco.global') - else: - hermes_base_url = 'https://hermes.lco.global' - hermes_message_url = urljoin(hermes_base_url, f'/message/{alert_id}') - - if not photometry_table and not spectroscopy_table: - return - - hermes_alert, created = AlertStreamMessage.objects.get_or_create( - topic=metadata.topic, exchange_status='ingested', message_id=alert_id) - - if not created: - # Only try to read and ingest the hermes message if we haven't already done so! - return - - # Cache of target names in alert message -> Target model instance in TOM - target_cache = {} - - def resolve_target(target_name): - if target_name not in target_cache: - # We first attempt to match to an existing target in the TOM by target_name or any specified aliases - target_entry = next((t for t in target_table if t.get('name', '') - == target_name), {}) if target_table else {} - aliases = target_entry.get('aliases', []) - query = Target.matches.match_name(target_name) - if not query: - for alias in aliases: - query = Target.matches.match_name(alias) - if query: - break - if query: - target_cache[target_name] = query[0] - # If we fail to find a local Target, we will create a target from what's in the alert message - elif target_table: - new_target = create_new_hermes_target(target_table, target_name) - if new_target is not None: - target_cache[target_name] = new_target - return target_cache.get(target_name) - - # Now we ingest all the photometry rows in the alert message - for row in photometry_table: - target = resolve_target(row['target_name']) - if target is None: - continue - - try: - obs_date = parse(row['date_obs']) - except ValueError: - continue - - datum = { - 'target': target, - 'data_type': 'photometry', - 'timestamp': obs_date, - 'value': get_hermes_phot_value(row) - } - datum_defaults = { - 'source_name': metadata.topic, - 'source_location': hermes_message_url, - } - new_rd, created = ReducedDatum.objects.get_or_create(**datum, defaults=datum_defaults) - if created: - new_rd.message.add(hermes_alert) - new_rd.save() - - # Now ingest all spectroscopy rows, either by downloading referenced files as a DataProduct or ingesting raw data - for row in spectroscopy_table: - target = resolve_target(row['target_name']) - if target is None: - continue - - try: - obs_date = parse(row['date_obs']) - except ValueError: - continue - - # If file_info exists on the spectroscopy row, attempt to get a data file url from there - file_url = _get_spectroscopy_file_url(row.get('file_info') or []) - if file_url: - _ingest_hermes_spectroscopy_file(file_url, row, target, hermes_alert, hermes_message_url) - # Otherwise, check if flux and wavelength arrays of data are specified in the row - elif row.get('flux') and row.get('wavelength'): - value = { - 'flux': row['flux'], - 'flux_units': row.get('flux_units', ''), - 'wavelength': row['wavelength'], - 'wavelength_units': row.get('wavelength_units', ''), - } - for key in ('telescope', 'instrument', 'reducer', 'observer', 'spec_type', 'flux_type', 'classification', - 'comments', 'exposure_time', 'setup', 'proprietary_period', 'proprietary_period_units'): - if row.get(key): - value[key] = row[key] - if row.get('flux_error'): - value['flux_error'] = row['flux_error'] - - datum = { - 'target': target, - 'data_type': 'spectroscopy', - 'timestamp': obs_date, - 'value': value, - } - datum_defaults = { - 'source_name': metadata.topic, - 'source_location': hermes_message_url, - } - new_rd, created = ReducedDatum.objects.get_or_create(**datum, defaults=datum_defaults) - if created: - new_rd.message.add(hermes_alert) - new_rd.save() - - -def _get_spectroscopy_file_url(file_info_list): - """ - Return the URL of the first entry in a file_info list whose filename matches a supported - spectroscopy file extension, or None if no match is found. - """ - for entry in file_info_list: - url = entry.get('url', '') - filename = os.path.basename(urlparse(url).path) - for ext in HERMES_SPECTROSCOPY_FILE_EXTENSIONS: - if filename.lower().endswith(ext): - return url - return None - - -def _ingest_hermes_spectroscopy_file(url, spectroscopy_row, target, hermes_alert, alert_url): - """ - Downloads a spectroscopy file from the given URL, saves it as a DataProduct, and processes - it into ReducedDatum objects using the configured spectroscopy data processor. - Skips the download if a DataProduct with this URL as its product_id already exists. - """ - filename = os.path.basename(urlparse(url).path) - - try: - response = requests.get(url) - response.raise_for_status() - except Exception as ex: - logger.error(f'Failed to download spectroscopy file from {url}: {repr(ex)}') - return - - spectroscopy_keys = ['date_obs', 'flux_units', 'wavelength_units', 'telescope', 'instrument', 'reducer', - 'observer', 'spec_type', 'flux_type', 'classification', 'comments', 'exposure_time', - 'setup', 'proprietary_period', 'proprietary_period_units'] - spectroscopy_data = {key: spectroscopy_row[key] - for key in spectroscopy_keys if key in spectroscopy_row and spectroscopy_row[key]} - # Inject these two extra fields since they should be associated with the ReducedDatums somehow - spectroscopy_data['source_name'] = hermes_alert.topic - spectroscopy_data['source_location'] = alert_url - - try: - dp, created = DataProduct.objects.get_or_create( - product_id=url, target=target, data_product_type='spectroscopy', - defaults={'extra_data': json.dumps(spectroscopy_data)}, - ) - if created: - _, ext = os.path.splitext(filename) - with tempfile.NamedTemporaryFile(suffix=ext, delete=False) as tmpfile: - tmpfile.write(response.content) - tmpfile_path = tmpfile.name - try: - with open(tmpfile_path, 'rb') as f: - dp.data.save(filename, File(f), save=True) - finally: - os.unlink(tmpfile_path) - # only re-ingest the file if its a new dataproduct for us - reduced_datums = run_data_processor(dp) - for rd in reduced_datums: - rd.message.add(hermes_alert) - except Exception as ex: - logger.error(f'Failed to ingest spectroscopy file from {url}: {repr(ex)}') - - -def get_hermes_phot_value(phot_data): - """ - Convert Hermes Message format for a row of Photometry table into parameters accepted by the Reduced Datum model - :param phot_data: Dictionary containing Hermes Photometry table. - :return: Dictionary containing properly formatted parameters for Reduced_Datum - """ - data_dictionary = { - 'error': phot_data.get('brightness_error', ''), - 'filter': phot_data['bandpass'], - 'telescope': phot_data.get('telescope', ''), - 'instrument': phot_data.get('instrument', ''), - 'unit': phot_data['brightness_unit'], - } - - if phot_data.get('brightness', None): - data_dictionary['magnitude'] = phot_data['brightness'] - elif phot_data.get('limiting_brightness', None): - data_dictionary['limit'] = phot_data['limiting_brightness'] - if phot_data.get('limiting_brightness_error'): - data_dictionary['limit_error'] = phot_data['limiting_brightness_error'] - if phot_data.get('limiting_brightness_unit'): - data_dictionary['limit_unit'] = phot_data['limiting_brightness_unit'] - - for key in ('observer', 'comments', 'exposure_time', 'catalog'): - if phot_data.get(key): - data_dictionary[key] = phot_data[key] - - return data_dictionary - - -def create_new_hermes_target(target_table, target_name=None, target_list_name=None): - """ - Ingest a target into your TOM from Hermes. - Takes a target_table and a target_name. If no target name is given, every target on the target table will be - ingested. - :param target_table: Hermes Target table from a Hermes Message - :param target_name: Name for individual target to ingest from target table. - :param target_list_name: Name of TargetList within which new target should be placed. - :return: - """ - target = None - for hermes_target in target_table: - if target_name == hermes_target['name'] or target_name is None: - - new_target = {"name": hermes_target.pop('name')} - if "ra" in hermes_target and "dec" in hermes_target: - new_target['type'] = 'SIDEREAL' - new_target['ra'] = hermes_target.pop('ra') - new_target['dec'] = hermes_target.pop('dec') - new_target['pm_ra'] = hermes_target.pop('pm_ra', None) - new_target['pm_dec'] = hermes_target.pop('pm_dec', None) - new_target['epoch'] = hermes_target.pop('epoch', None) - elif "orbital_elements" in hermes_target: - orbital_elements = hermes_target.pop('orbital_elements') - new_target['type'] = 'NON_SIDEREAL' - new_target['epoch_of_elements'] = orbital_elements.pop('epoch_of_elements', None) - new_target['mean_anomaly'] = orbital_elements.pop('mean_anomaly', None) - new_target['arg_of_perihelion'] = orbital_elements.pop('argument_of_the_perihelion', None) - new_target['eccentricity'] = orbital_elements.pop('eccentricity', None) - new_target['lng_asc_node'] = orbital_elements.pop('longitude_of_the_ascending_node', None) - new_target['inclination'] = orbital_elements.pop('orbital_inclination', None) - new_target['semimajor_axis'] = orbital_elements.pop('semimajor_axis', None) - new_target['epoch_of_perihelion'] = orbital_elements.pop('epoch_of_perihelion', None) - new_target['perihdist'] = orbital_elements.pop('perihelion_distance', None) - aliases = hermes_target.pop('aliases', []) - target = Target(**new_target) - target.full_clean() - target.save(names=aliases, extras=hermes_target) - if target_list_name: - target_list, created = TargetList.objects.get_or_create(name=target_list_name) - if created: - logger.debug(f'New target_list created: {target_list_name}') - target_list.targets.add(target) - return target diff --git a/tom_dataproducts/alertstreams/hermes_publisher.py b/tom_dataproducts/alertstreams/hermes_publisher.py deleted file mode 100644 index 492763a5c..000000000 --- a/tom_dataproducts/alertstreams/hermes_publisher.py +++ /dev/null @@ -1,312 +0,0 @@ -import logging -import json -import requests - -from django.conf import settings -from django.core.cache import cache -from django.utils.module_loading import import_string - -from tom_alerts.models import AlertStreamMessage -from tom_targets.models import Target - -logger = logging.getLogger(__name__) -logger.setLevel(logging.DEBUG) - - -class HermesMessageException(Exception): - pass - - -def get_hermes_data_converter_class(): - return import_string(settings.DATA_SHARING['hermes'].get( - 'DATA_CONVERTER_CLASS', 'tom_dataproducts.alertstreams.hermes_publisher.HermesDataConverter')) - - -class HermesDataConverter(): - """ Class is used to encapsulate getting all the hermes values associated with - a ReducedDatum for either spectroscopy or photometry or a Target. This class - can be subclassed and reimplemented for TOMs that store the properties of - their ReducedDatums in a different way, or store Target props in a different way. - """ - def __init__(self, validate=True): - self.validate = validate - - def get_hermes_target(self, target): - """Build a row for a Hermes Target Table from a TOM BaseTarget Model. - """ - if target.type == "SIDEREAL": - target_table_row = { - 'name': target.name, - 'ra': target.ra, - 'dec': target.dec, - } - if target.epoch: - target_table_row['epoch'] = target.epoch - if target.pm_ra: - target_table_row['pm_ra'] = target.pm_ra - if target.pm_dec: - target_table_row['pm_dec'] = target.pm_dec - else: - target_table_row = { - 'name': target.name, - 'orbital_elements': { - "epoch_of_elements": target.epoch_of_elements, - "eccentricity": target.eccentricity, - "argument_of_the_perihelion": target.arg_of_perihelion, - "mean_anomaly": target.mean_anomaly, - "orbital_inclination": target.inclination, - "longitude_of_the_ascending_node": target.lng_asc_node, - "semimajor_axis": target.semimajor_axis, - "epoch_of_perihelion": target.epoch_of_perihelion, - "perihelion_distance": target.perihdist, - } - } - target_table_row['aliases'] = [alias.name for alias in target.aliases.all()] - return target_table_row - - def get_hermes_photometry(self, datum): - """Build a row for a Hermes Photometry Table using a TOM Photometry datum - """ - phot_table_row = { - 'target_name': datum.target.name, - 'date_obs': datum.timestamp.isoformat(), - 'telescope': datum.value.get('telescope'), - 'instrument': datum.value.get('instrument'), - 'bandpass': datum.value.get('filter', ''), - } - brightness_unit = convert_astropy_brightness_unit_to_hermes(datum.value.get('unit')) - if brightness_unit: - phot_table_row['brightness_unit'] = brightness_unit - if datum.value.get('magnitude', None): - phot_table_row['brightness'] = datum.value['magnitude'] - else: - phot_table_row['limiting_brightness'] = datum.value.get('limit', None) - error_value = datum.value.get('error', datum.value.get('magnitude_error', None)) - if error_value is not None and isinstance(error_value, (int, float)): - phot_table_row['brightness_error'] = error_value - return phot_table_row - - def get_hermes_spectroscopy(self, datum): - """Build a row for a Hermes Spectroscopy Table using a TOM Spectroscopy datum - The datum is assumed to have is json value be of the form {1: {flux: 1, wavelength:200}, 2: {},...} - Or the form {'flux': [1,2,3,...], 'wavelength': [1,2,3,...]} - """ - flux_list = [] - flux_error_list = [] - wavelength_list = [] - if 'flux' in datum.value and 'wavelength' in datum.value: - flux_list = datum.value['flux'] - wavelength_list = datum.value['wavelength'] - flux_error_list = datum.value.get('flux_error', datum.value.get('error', [])) - else: - for entry in datum.value.values(): - if 'flux' in entry: - flux_list.append(entry['flux']) - if 'wavelength' in entry: - wavelength_list.append(entry['wavelength']) - if 'error' in entry: - flux_error_list.append(entry['error']) - if 'flux_error' in entry: - flux_error_list.append(entry['flux_error']) - - if self.validate: - if len(flux_list) != len(wavelength_list): - msg = f"Spectroscopy Datum {datum.id} has mismatched flux and wavelength values" - logger.error(msg) - raise HermesMessageException(msg) - elif len(flux_list) == 0 or len(wavelength_list) == 0: - msg = f"Spectroscopy Datum {datum.id} has spectrum data in unknown format." - msg += "Please implement a custom HermesDatumConverter to support your data format." - logger.error(msg) - raise HermesMessageException(msg) - if flux_error_list and len(flux_error_list) != len(flux_list): - msg = f"Spectroscopy Datum {datum.id} must have the same number of flux and flux error datapoints" - logger.error(msg) - raise HermesMessageException(msg) - - # Make sure we have either telescope or instrument set. If not, attempt to pull them from the dataproduct - try: - dp_extras = json.loads(datum.data_product.extra_data) - except (json.JSONDecodeError, ValueError, AttributeError): - dp_extras = {} - telescope = datum.value.get('telescope') or dp_extras.get('telescope') - instrument = datum.value.get('instrument') or dp_extras.get('instrument') - reducer = datum.value.get('reducer') or dp_extras.get('reducer') - observer = datum.value.get('observer') or dp_extras.get('observer') - - spectroscopy_table_row = { - 'target_name': datum.target.name, - 'date_obs': datum.timestamp.isoformat(), - 'telescope': telescope, - 'instrument': instrument, - 'reducer': reducer, - 'observer': observer, - 'flux': flux_list, - 'wavelength': wavelength_list, - 'flux_units': convert_astropy_flux_unit_to_hermes(datum.value.get('flux_units')), - 'wavelength_units': convert_astropy_wavelength_unit_to_hermes(datum.value.get('wavelength_units')), - } - if flux_error_list: - spectroscopy_table_row['flux_error'] = flux_error_list - - return spectroscopy_table_row - - -def convert_astropy_brightness_unit_to_hermes(brightness_unit): - if not brightness_unit: - return brightness_unit - elif brightness_unit.upper() == 'AB' or brightness_unit.upper() == 'ABFLUX': - return 'AB mag' - else: - return brightness_unit - - -def convert_astropy_flux_unit_to_hermes(flux_unit): - if not flux_unit: - return flux_unit - elif flux_unit == 'erg / (Angstrom s cm2)': - return 'erg / s / cm² / Å' - else: - return flux_unit - - -def convert_astropy_wavelength_unit_to_hermes(wavelength_unit): - if not wavelength_unit: - return wavelength_unit - elif wavelength_unit.lower() == 'angstrom' or wavelength_unit == 'AA': - return 'Å' - elif wavelength_unit.lower() in ['micron', 'micrometer', 'um']: - return 'µm' - elif wavelength_unit.lower() == 'hertz': - return 'Hz' - else: - return wavelength_unit - - -class BuildHermesMessage(object): - """ - A HERMES Message Object that can be submitted to HOP through HERMES - """ - def __init__(self, title='', submitter='', authors='', message='', topic='hermes.test', **kwargs): - self.title = title - self.submitter = submitter - self.authors = authors - self.message = message - self.topic = topic - self.extra_info = kwargs - - -def publish_to_hermes(message_info, datums, targets=Target.objects.none(), **kwargs): - """ - Submits a typical hermes alert using the photometry and targets supplied to build a photometry table. - -- Stores an AlertStreamMessage connected to each datum to show that the datum has previously been shared. - :param message_info: HERMES Message Object created with BuildHermesMessage - :param datums: Queryset of Reduced Datums to be built into table. (Will also pull in targets) - :param targets: Queryset of Targets to be built into table. - :return: response - """ - if 'BASE_URL' not in settings.DATA_SHARING['hermes']: - return {'message': 'BASE_URL is not set for hermes in the settings.py DATA_SHARING section'} - if 'HERMES_API_KEY' not in settings.DATA_SHARING['hermes']: - return {'message': 'HERMES_API_KEY is not set for hermes in the settings.py DATA_SHARING section'} - - stream_base_url = settings.DATA_SHARING['hermes']['BASE_URL'] - submit_url = stream_base_url + 'api/v0/' + 'submit_message/' - # You will need your Hermes API key. This can be found on your Hermes profile page. - headers = {'Authorization': f"Token {settings.DATA_SHARING['hermes']['HERMES_API_KEY']}"} - try: - alert = create_hermes_alert(message_info, datums, targets, **kwargs) - except HermesMessageException as e: - # We have failed in building a valid hermes message, so report that error back - return {'message': 'ERROR: ' + str(e)} - - try: - response = requests.post(url=submit_url, json=alert, headers=headers) - response.raise_for_status() - # Only mark the datums as shared if the sharing was successful - hermes_alert = AlertStreamMessage( - topic=message_info.topic, message_id=response.json().get('uuid'), exchange_status='published') - hermes_alert.save() - for tomtoolkit_photometry in datums: - tomtoolkit_photometry.message.add(hermes_alert) - except Exception as ex: - logger.error(repr(ex)) - logger.error(response.content) - return response - - return response - - -def preload_to_hermes(message_info, reduced_datums, targets): - stream_base_url = settings.DATA_SHARING['hermes']['BASE_URL'] - preload_url = stream_base_url + 'api/v0/submit_message/preload/' - # You will need your Hermes API key. This can be found on your Hermes profile page. - headers = {'Authorization': f"Token {settings.DATA_SHARING['hermes']['HERMES_API_KEY']}"} - - alert = create_hermes_alert(message_info, reduced_datums, targets) - try: - response = requests.post(url=preload_url, json=alert, headers=headers) - response.raise_for_status() - return response.json()['key'] - except Exception as ex: - logger.error(repr(ex)) - logger.error(response.content) - - return '' - - -def create_hermes_alert(message_info, datums, targets=Target.objects.none(), **kwargs): - hermes_photometry_data = [] - hermes_spectroscopy_data = [] - hermes_target_dict = {} - - hermes_data_converter = get_hermes_data_converter_class()(validate=True) - for datum in datums: - if datum.target.name not in hermes_target_dict: - hermes_target_dict[datum.target.name] = hermes_data_converter.get_hermes_target(datum.target) - if datum.data_type == 'photometry': - hermes_photometry_data.append(hermes_data_converter.get_hermes_photometry(datum)) - elif datum.data_type == 'spectroscopy': - hermes_spectroscopy_data.append(hermes_data_converter.get_hermes_spectroscopy(datum)) - - # Now go through the targets queryset and ensure we have all of them in the table - # This is needed since some targets may have no corresponding photometry datums but that is still valid to share - for target in targets: - if target.name not in hermes_target_dict: - hermes_target_dict[target.name] = hermes_data_converter.get_hermes_target(target) - - alert = { - 'topic': message_info.topic, - 'title': message_info.title, - 'submitter': message_info.submitter, - 'authors': message_info.authors, - 'data': { - 'targets': list(hermes_target_dict.values()), - 'photometry': hermes_photometry_data, - 'spectroscopy': hermes_spectroscopy_data, - 'extra_data': message_info.extra_info - }, - 'message_text': message_info.message, - } - return alert - - -def get_hermes_topics(**kwargs): - """ - Method to retrieve a list of available topics from HOP. - Intended to be called from forms when building topic list. - Extend this method to restrict topics for individual users. - :return: List of writable topics available for TOM. - """ - topics = cache.get('hermes_writable_topics', []) - if not topics: - try: - stream_base_url = settings.DATA_SHARING['hermes']['BASE_URL'] - submit_url = stream_base_url + "api/v0/profile/" - headers = {'Authorization': f"Token {settings.DATA_SHARING['hermes']['HERMES_API_KEY']}"} - response = requests.get(url=submit_url, headers=headers) - topics = response.json()['writable_topics'] - cache.set('hermes_writable_topics', topics, 86400) - except (KeyError, requests.exceptions.JSONDecodeError): - pass - return topics From a1988d1c3bbc680183891893006f1ee6a7696268 Mon Sep 17 00:00:00 2001 From: "William (Lindy) Lindstrom" Date: Fri, 1 May 2026 15:35:25 -0700 Subject: [PATCH 11/22] use HermesProfile.hermes_api_key; handle to_target returning None MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit DataService instances need access to the request user so they can authenticate against per-user credentials. HermesDataService's `/api/v0/topics/` and `/api/v0/query/` endpoints both reject anonymous requests. So, the form (when populating topic choices) and the service (when running queries) derive an `Authorization: Token ` header from the logged-in user's `HermesProfile.hermes_api_key`. - `DataService.__init__` accepts `user=None` and stashes it on `self.user`. - `BaseQueryForm.__init__` pops a `user` kwarg into `self.user` (silently ignored by concrete forms that don't use it). - `DataServiceQueryCreateView.get_form_kwargs` and `DataServiceQueryUpdateView.get_form_kwargs` inject `user=self.request.user`. - `RunQueryView` and `CreateTargetFromQueryView` instantiate the DataService class with `user=self.request.user`. Also fixes `CreateTargetFromQueryView.post()` crashing with `'NoneType' object has no attribute 'save'` when a DataService's `to_target` returns `(None, {}, [])` — which can happen legitimately when a second-stage fetch fails (HermesDataService's archive-query → full-message-body flow). The view now records a per-row error and continues to the next result. --- tom_dataservices/dataservices.py | 7 ++++- tom_dataservices/forms.py | 6 ++++ tom_dataservices/views.py | 51 ++++++++++++++++++++++++++++---- 3 files changed, 57 insertions(+), 7 deletions(-) diff --git a/tom_dataservices/dataservices.py b/tom_dataservices/dataservices.py index 655c33b59..a525ba36e 100644 --- a/tom_dataservices/dataservices.py +++ b/tom_dataservices/dataservices.py @@ -96,7 +96,7 @@ class DataService(ABC): # Link to app github repo app_link = None - def __init__(self, query_parameters=None, *args, **kwargs): + def __init__(self, query_parameters=None, user=None, *args, **kwargs): super().__init__(*args, **kwargs) # Instance variable that can store target query results if necessary self.target_results = {} @@ -106,6 +106,11 @@ def __init__(self, query_parameters=None, *args, **kwargs): self.query_results = {} # Instance variable that can store query parameters if necessary self.query_parameters = query_parameters or {} + # The logged-in User who triggered the query, or None for background / + # anonymous callers. Subclasses that authenticate to a per-user external + # service (e.g. HermesDataService) use this to resolve credentials. + # RunQueryView / CreateTargetFromQueryView pass request.user here. + self.user = user @abstractmethod def query_service(self, query_parameters, **kwargs): diff --git a/tom_dataservices/forms.py b/tom_dataservices/forms.py index 754ede33f..2e1fb012c 100644 --- a/tom_dataservices/forms.py +++ b/tom_dataservices/forms.py @@ -22,6 +22,12 @@ class BaseQueryForm(forms.Form): ) def __init__(self, *args, **kwargs): + # The DataServiceQueryCreateView / DataServiceQueryUpdateView pass the + # logged-in user via ``get_form_kwargs`` so a DataService subclass can + # use per-user credentials when fetching choices (e.g. HermesForm uses + # it to authenticate the /topics/ lookup). Pop here so forms that do + # not care still work — ``self.user`` is ``None`` in that case. + self.user = kwargs.pop('user', None) super().__init__(*args, **kwargs) self.helper = FormHelper() self.helper.form_tag = False diff --git a/tom_dataservices/views.py b/tom_dataservices/views.py index 3fc535045..6bcdc674c 100644 --- a/tom_dataservices/views.py +++ b/tom_dataservices/views.py @@ -100,6 +100,15 @@ def get_form(self, form_class=None): form.helper.form_action = reverse('tom_dataservices:create') return form + def get_form_kwargs(self): + """Add ``user`` to the form kwargs so DataService-specific forms can populate + choices that depend on per-user credentials (e.g. authenticated topic lookups). + BaseQueryForm swallows the kwarg when the concrete form does not use it. + """ + kwargs = super().get_form_kwargs() + kwargs['user'] = self.request.user + return kwargs + def get_initial(self): """ Returns the initial data to use for forms on this view. @@ -168,19 +177,29 @@ def get_context_data(self, *args, **kwargs): # get the DataService class. Pull saved query if PK available, otherwise use session data. if self.kwargs.get('pk', None) is not None: query = get_object_or_404(DataServiceQuery, pk=self.kwargs['pk']) - data_service_class = get_data_service_class(query.data_service)() + data_service_class = get_data_service_class(query.data_service)(user=self.request.user) query_parameters = data_service_class.build_query_parameters(query.parameters) query.last_run = timezone.now() query.save() else: input_parameters = self.request.session.get('query_parameters', {}) - data_service_class = get_data_service_class(input_parameters['data_service'])() + data_service_class = get_data_service_class(input_parameters['data_service'])(user=self.request.user) query_parameters = data_service_class.build_query_parameters(input_parameters) - # Check cached query is the same and pull cache if needed. + # Re-use cached rows when the same query parameters were just + # run. ``CreateTargetFromQueryView`` reads the row dicts back + # via ``cache.get(f'result_{i}')`` to dispatch ``to_target``, + # so we keep both sides of the cache in sync. + # + # On a cache miss (different parameters), drop just our own + # keys rather than ``cache.clear()`` — the latter would wipe + # every other app's cached entries (e.g. HERMES topic lists, + # template-fragment caches, rate-limit counters). if query_parameters == cache.get('query_params'): - cached_results = cache.get_many([f'result_{result_id}' for result_id in range(0, 99)]) + cached_results = cache.get_many( + [f'result_{result_id}' for result_id in range(0, 99)]) else: - cache.clear() + cache.delete('query_params') + cache.delete_many([f'result_{result_id}' for result_id in range(0, 99)]) if cached_results: results = [cached_results[key] for key in cached_results] else: @@ -274,6 +293,15 @@ def get_form(self, form_class=None): ) return form + def get_form_kwargs(self): + """Same ``user`` injection as DataServiceQueryCreateView. See that method + for the rationale (DataService forms may need per-user credentials to + populate choices). + """ + kwargs = super().get_form_kwargs() + kwargs['user'] = self.request.user + return kwargs + def get_initial(self): """ Returns the initial data to use for forms on this view. Initial data for this form consists of the name of @@ -327,7 +355,7 @@ def post(self, request, *args, **kwargs): """ query_id = self.request.POST['query_id'] data_service_name = self.request.POST['data_service'] - data_service_class = get_data_service_class(data_service_name)() + data_service_class = get_data_service_class(data_service_name)(user=self.request.user) results = self.request.POST.getlist('selected_results') errors = [] target = None @@ -347,6 +375,17 @@ def post(self, request, *args, **kwargs): else: return redirect(reverse('dataservices:run')) target, extras, aliases = data_service_class.to_target(cached_result) + # ``to_target`` may legitimately return ``(None, {}, [])`` when + # the selected row cannot be materialized into a Target — for + # instance when a DataService has to follow up with a + # second-stage fetch (HermesDataService does this for the + # archive-query → full-message-body step) and that fetch fails. + # Skip this row, record an error for the summary message + # below, and keep going so the other selected rows still + # get a chance to create. + if target is None: + errors.append(f'(result {result_id})') + continue try: target.save(extras=extras, names=aliases) # Give the user access to the target they created From 4b26724ac459dcd8c856933bee389f26e651a2d7 Mon Sep 17 00:00:00 2001 From: "William (Lindy) Lindstrom" Date: Fri, 1 May 2026 16:00:31 -0700 Subject: [PATCH 12/22] Update stream-pub-sub guide for TOMToolkit v3 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Brings the guide in line with the new tom_base/tom_hermes split: - Top note reframed from "deprecation shims will keep old paths working for one release" to "this release is a major version bump; old import paths are removed; see Migration summary." - Subscribe-to-Kafka section's TOPIC_HANDLERS guidance no longer promises a backward-compatible shim — the old dotted path no longer resolves and must be updated. - Migration summary expanded with full lists of names that moved out of tom_dataproducts.alertstreams.hermes_publisher (now in tom_hermes.sharing) and tom_dataproducts.alertstreams.hermes_ingester (now in tom_hermes.alertstreams.ingester), plus the share_*_with_* functions that were removed entirely from tom_dataproducts.sharing in favor of get_sharing_backend(name)().share(...). --- docs/managing_data/stream_pub_sub.rst | 168 +++++++++++++++++++++----- 1 file changed, 136 insertions(+), 32 deletions(-) diff --git a/docs/managing_data/stream_pub_sub.rst b/docs/managing_data/stream_pub_sub.rst index 08cb0c040..c91825de5 100644 --- a/docs/managing_data/stream_pub_sub.rst +++ b/docs/managing_data/stream_pub_sub.rst @@ -3,6 +3,37 @@ Publish and Subscribe to a Kafka Stream Publishing data to a stream and subscribing to a stream are handled independently and we describe each below. +.. note:: + + For TOMToolkit version 3, all HERMES-specific code has moved from ``tom_base`` into the + ``tom_hermes`` package and the old import paths in ``tom_base`` are removed. If you are upgrading from a + pre-refactor ``tom_base`` / ``tom_hermes``, see the *Migration summary* at the bottom of this page for the + list of dotted paths that need to be updated. + + +SharingBackend integration point +################################ + +Data-sharing destinations are discovered by plug-in rather than by hardcoded string match. An app advertises +its SharingBackend classes by adding a ``sharing_backends()`` method (i.e. integration point) to its AppConfig; +``tom_common.sharing`` iterates installed apps, imports the listed class paths, and builds a registry keyed by +each backend's ``name``. Existing backends: + +* **``TomToolkitSharingBackend``** (``tom_common.sharing``) — "share with another TOM Toolkit-based TOM." + Registered by ``tom_common.apps.TomCommonConfig``. ``name = 'tom'``. +* **``HermesSharingBackend``** (``tom_hermes.sharing``) — "publish to HERMES." Registered by + ``tom_hermes.apps.TomHermesConfig``. ``name = 'hermes'``. + +The share-destination form field's value is formatted as ``':'`` (for example +``'hermes:hermes.test'`` or ``'tom:tom_b'``); ``DataShareView.post`` parses the prefix and dispatches to the +matching backend's ``share()`` method. A future publisher is registered by writing a ``SharingBackend`` +subclass in its app and returning it from that app's AppConfig ``sharing_backends()`` hook — no changes to +``tom_base`` required. + +To write a new SharingBackend, subclass ``tom_common.sharing.SharingBackend`` and implement ``share()`` and +``get_destination_choices()``. See the ``tom_common.sharing.SharingBackend`` docstring and the two included +subclasses for reference. + Publish Data to a Kafka Topic ############################# @@ -11,64 +42,99 @@ TOM Toolkit supports publishing data to a Kafka stream such as `Hermes `_) and `GCNClassicOverKafka `_. When sharing photometry data via Hermes, the TOM publishes the data to be shared to a topic on the HOPSKOTCH -Kafka stream. At this time, only photometry data is supported by TOM Toolkit. To submit via the Hermes API, you will -need to copy your Hermes API Key from your Hermes profile page. When hermes sharing is configured, you will also see -buttons to open your data in hermes with the form pre-filled - this is a good option if you want to make slight changes -to your message or data before sharing. +Kafka stream. To submit via the Hermes API, you will need a HERMES API Key. You can store it either per-user +on the ``HermesProfile`` (see *Per-user HERMES credentials* below) or TOM-wide in ``settings.DATA_SHARING``. +``HermesProfile`` credentials take precedence when set. + +To customize what data is sent to HERMES from your ReducedDatum or Target models, subclass +``tom_hermes.sharing.HermesDataConverter`` and override the ``get_hermes_*`` methods to pull the data out of +your TOM's model fields. Provide the class dotpath in ``settings.DATA_SHARING['hermes']['DATA_CONVERTER_CLASS']``. +For more information on the structure HERMES expects, see the +`API Schema Registry here `_. -To customize what data is sent to hermes from your ReducedDatum or Target models, please re-implement your own -``tom_dataproducts.alertstreams.hermes_publisher.HermesDataConverter`` and customize the `get_hermes_*` methods to pull out -the proper data you want to share. You then provide the class dotpath to your custom class in your TOM's settings -for hermes ``DATA_SHARING`` in the `DATA_CONVERTER_CLASS` key. This is especially useful if you store extra target -or datum information in custom associated models in your TOM or with custom model field keys. For more information on -the structure of data HERMES expects, check the `API Schema Registry here `_. This is -the structure you should be mapping your ReducedDatum values to in the Data Converter Class. +Configuring your TOM to publish data +************************************ -Configuring your TOM to Publish Data to a stream: -************************************************* +``settings.DATA_SHARING`` is a dict keyed by destination name. Multiple TOM-to-TOM destinations are supported +(add one entry per destination TOM); the HERMES destination is identified by the presence of +``HERMES_API_KEY``. -You will need to add a ``DATA_SHARING`` configuration dictionary to your ``settings.py`` that gives the credentials -for the various streams with which you wish to share data. +Authentication for TOM-to-TOM destinations: prefer a DRF API key via the ``API_KEY`` key (TOM Toolkit +auto-generates a DRF token per user, and a service-account token can be created on the destination TOM); +fall back to HTTP Basic via ``USERNAME`` and ``PASSWORD``. .. code:: python - # Define the valid data sharing destinations for your TOM. DATA_SHARING = { + # One or more TOM-to-TOM destinations. This configuration is used by + # tom_common.sharing.TomToolkitSharingBackend. + 'tom_alice': { + 'DISPLAY_NAME': 'TOM Alice', + 'BASE_URL': 'https://tom-alice.example.org/', + 'API_KEY': os.getenv('TOM_ALICE_API_KEY', ''), # preferred; Token auth + }, + 'tom_bob': { + 'DISPLAY_NAME': 'TOM Bob', + 'BASE_URL': 'https://tom-bob.example.org/', + 'USERNAME': os.getenv('TOM_BOB_USERNAME', ''), # fallback: HTTP Basic; not preferred + 'PASSWORD': os.getenv('TOM_BOB_PASSWORD', ''), + }, + + # HERMES destination. This configuration used by tom_hermes.sharing.HermesSharingBackend. 'hermes': { - 'DISPLAY_NAME': os.getenv('HERMES_DISPLAY_NAME', 'Hermes'), - 'BASE_URL': os.getenv('HERMES_BASE_URL', 'https://hermes.lco.global/'), - 'HERMES_API_KEY': os.getenv('HERMES_API_KEY', 'set HERMES_API_KEY value in environment'), - 'DEFAULT_AUTHORS': os.getenv('HERMES_DEFAULT_AUTHORS', 'set your default authors here'), - 'USER_TOPICS': ['hermes.test', 'tomtoolkit.test'] # You must have write permissions on these topics - 'DATA_CONVERTER_CLASS': 'tom_dataproducts.alertstreams.hermes_publisher.HermesDataConverter' + 'DISPLAY_NAME': os.getenv('HERMES_DISPLAY_NAME', 'Hermes'), + 'BASE_URL': os.getenv('HERMES_BASE_URL', 'https://hermes.lco.global/'), + 'HERMES_API_KEY': os.getenv('HERMES_API_KEY', ''), + 'DEFAULT_AUTHORS': os.getenv('HERMES_DEFAULT_AUTHORS', ''), + 'USER_TOPICS': ['hermes.test', 'tomtoolkit.test'], + 'DATA_CONVERTER_CLASS': 'tom_hermes.sharing.HermesDataConverter', }, } + +Per-user HERMES credentials +*************************** + +Instead of (or in addition to) a TOM-wide HERMES API key in ``settings.DATA_SHARING``, each TOM user can +store their own HERMES credentials on the user profile page. Visit the profile page and click the pencil +icon on the "HERMES Credentials" card to set: + +* ``HERMES API Key`` — the user's HERMES submit API key. +* ``Hopskotch Username`` / ``Hopskotch Password`` — SCRAM credentials for reading from Hopskotch. + +Lookup order when publishing: the user's ``HermesProfile`` first; if unset, the TOM-wide +``settings.DATA_SHARING['hermes']`` fallback. + + Subscribe to a Kafka Topic ########################## TOM Toolkit allows a TOM to subscribe to a topic on a Kafka stream, ingesting messages from that topic and handling the data. This could involve simply logging the message or extracting the data from the message and saving it if it is properly formatted. -Configuring your TOM to subscribe to a stream: + +Configuring your TOM to subscribe to a stream ********************************************** -First you will need to add ``tom_alertstreams`` to your list of ``INSTALLED_APPS`` in your ``settings.py``. +First add ``tom_alertstreams`` (and ``tom_hermes``, if you plan to subscribe to HERMES topics) to your +``INSTALLED_APPS``: .. code:: python INSTALLED_APPS = [ ... 'tom_alertstreams', + 'tom_hermes', ] -Then you will need to add an ``ALERT_STREAMS`` configuration dictionary to your ``settings.py``. This gives the credentials -for the various streams to which you wish to subscribe. Additionally, the ``TOPIC_HANDLERS`` section of the stream ``OPTIONS`` -will include a list of handlers for each topic. +Then add an ``ALERT_STREAMS`` configuration dictionary. This gives the credentials for the various streams +and maps each subscribed topic to the dotted path of its handler callable. -Some alert handlers are included as examples. Below we demonstrate how to connect to a Hermes Topic. You'll want to check -out the ``tom-alertstreams`` `README `_ for more details. +A HERMES alert handler lives at ``tom_hermes.alertstreams.ingester.hermes_alert_handler``. If you are +upgrading from a pre-refactor TOM, update any ``TOPIC_HANDLERS`` dotted paths that pointed at the old +location (``tom_dataproducts.alertstreams.hermes_ingester.hermes_alert_handler``) — the old path no longer +exists. .. code:: python @@ -78,11 +144,49 @@ out the ``tom-alertstreams`` `README Date: Thu, 7 May 2026 12:22:26 -0700 Subject: [PATCH 13/22] Switch check_for_share_safe_datums to a source_name filter The previous filter joined ReducedDatum.message (an M2M to AlertStreamMessage) on (exchange_status='published', topic=...) to prevent re-publishing a datum to a topic it had previously been sent to. That model and that field are being removed. The new filter reads source_name directly: queryset.exclude(source_name=f'Hermes:{message_topic}') --- tom_dataproducts/sharing.py | 33 ++++++++++++++++++++++----------- 1 file changed, 22 insertions(+), 11 deletions(-) diff --git a/tom_dataproducts/sharing.py b/tom_dataproducts/sharing.py index cdec44e2c..9e9578421 100644 --- a/tom_dataproducts/sharing.py +++ b/tom_dataproducts/sharing.py @@ -23,7 +23,6 @@ from astropy.io import ascii from astropy.table import Table from django.contrib import messages -from django.db.models import Q from django.http import StreamingHttpResponse from django.utils.text import slugify @@ -72,22 +71,34 @@ def get_sharing_destination_options(include_download=True, user=None): # --------------------------------------------------------------------------- def check_for_share_safe_datums(destination, reduced_datums, **kwargs): - """Drop ReducedDatums that have already been published to the given destination+topic. + """Drop ReducedDatums whose origin is the destination topic (round-trip guard). Generic hook — subclassable / replaceable by a TOM operator for a different selection experience. Today has one built-in rule: for - HERMES, exclude datums already linked to an ``AlertStreamMessage`` - with ``exchange_status='published'`` on the same topic. For other - destinations, it is a no-op. - - Called by ``HermesSharingBackend.share`` (lazy import) and by - ``TomToolkitSharingBackend.share`` (lazy import). + HERMES, exclude datums whose ``source_name`` already names this + HERMES topic — i.e. datums that came *from* the topic we're about + to publish *to*. For other destinations, no-op. + + Earlier versions of this filter joined on ``ReducedDatum.message`` + (a M2M to ``AlertStreamMessage``) keyed by + ``(exchange_status='published', topic=...)``; that model and field + have been removed in the larger refactor. The new rule reads + ``source_name`` directly: ingestion in + ``tom_hermes.alertstreams.ingester.ingest_hermes_alert`` writes + ``source_name = f'Hermes:{topic}'`` on every ReducedDatum it + creates, so a round-trip is detectable from that field alone. + + **Semantic narrowing.** The previous implementation also caught the + "this datum was previously published to this topic" case, even if + the datum hadn't originated there. That tracking ability is gone + along with the AlertStreamMessage model — TOMs that need it should + re-introduce a per-datum publication log of their own. + + Called by ``HermesSharingBackend.share`` (lazy import). """ if 'hermes' in destination: message_topic = kwargs.get('topic', None) - filtered_datums = reduced_datums.exclude( - Q(message__exchange_status='published') & Q(message__topic=message_topic), - ) + filtered_datums = reduced_datums.exclude(source_name=f'Hermes:{message_topic}') else: filtered_datums = reduced_datums return filtered_datums From 3623efdb9c9f8e43f232819bc21eca4988ae6494 Mon Sep 17 00:00:00 2001 From: "William (Lindy) Lindstrom" Date: Thu, 7 May 2026 17:10:02 -0700 Subject: [PATCH 14/22] Anchor tom_dataproducts test data paths on __file__, not CWD --- tom_dataproducts/tests/test_api.py | 17 +++++++++++++---- tom_dataproducts/tests/test_atlas.py | 11 +++++++++-- 2 files changed, 22 insertions(+), 6 deletions(-) diff --git a/tom_dataproducts/tests/test_api.py b/tom_dataproducts/tests/test_api.py index a8ad6dc69..2b5171a2d 100644 --- a/tom_dataproducts/tests/test_api.py +++ b/tom_dataproducts/tests/test_api.py @@ -1,3 +1,5 @@ +import os + from django.contrib.auth.models import Group, User from django.core.files.uploadedfile import SimpleUploadedFile from django.urls import reverse @@ -11,6 +13,13 @@ from tom_targets.tests.factories import SiderealTargetFactory +# Directory holding fixture CSVs / FITS files for these tests. Anchored on +# ``__file__`` so the tests work regardless of CWD — running them from the +# tom_base repo root, from a parent directory, or from an integration TOM +# all resolve the test_data path correctly. +TEST_DATA_DIR = os.path.join(os.path.dirname(__file__), 'test_data') + + class TestDataProductViewset(APITestCase): def setUp(self): self.user = User.objects.create(username='testuser') @@ -34,7 +43,7 @@ def test_data_product_upload_for_target(self): group.user_set.add(self.user) group.user_set.add(collaborator) - with open('tom_dataproducts/tests/test_data/test_lightcurve.csv', 'rb') as lightcurve_file: + with open(os.path.join(TEST_DATA_DIR, 'test_lightcurve.csv'), 'rb') as lightcurve_file: self.dp_data['file'] = lightcurve_file response = self.client.post(reverse('api:dataproducts-list'), self.dp_data, format='multipart') self.assertEqual(response.status_code, status.HTTP_201_CREATED) @@ -50,7 +59,7 @@ def test_data_product_upload_for_target(self): def test_data_product_upload_for_observation(self): self.dp_data['observation_record'] = self.obsr.id - with open('tom_dataproducts/tests/test_data/test_lightcurve.csv', 'rb') as lightcurve_file: + with open(os.path.join(TEST_DATA_DIR, 'test_lightcurve.csv'), 'rb') as lightcurve_file: self.dp_data['file'] = lightcurve_file response = self.client.post(reverse('api:dataproducts-list'), self.dp_data, format='multipart') @@ -64,7 +73,7 @@ def test_data_product_upload_for_observation(self): def test_data_product_upload_invalid_type(self): self.dp_data['data_product_type'] = 'invalid' - with open('tom_dataproducts/tests/test_data/test_lightcurve.csv', 'rb') as lightcurve_file: + with open(os.path.join(TEST_DATA_DIR, 'test_lightcurve.csv'), 'rb') as lightcurve_file: self.dp_data['file'] = lightcurve_file response = self.client.post(reverse('api:dataproducts-list'), self.dp_data, format='multipart') @@ -73,7 +82,7 @@ def test_data_product_upload_invalid_type(self): def test_data_product_upload_failed_processing(self): self.dp_data['data_product_type'] = 'spectroscopy' - with open('tom_dataproducts/tests/test_data/test_lightcurve.csv', 'rb') as lightcurve_file: + with open(os.path.join(TEST_DATA_DIR, 'test_lightcurve.csv'), 'rb') as lightcurve_file: self.dp_data['file'] = lightcurve_file response = self.client.post(reverse('api:dataproducts-list'), self.dp_data, format='multipart') diff --git a/tom_dataproducts/tests/test_atlas.py b/tom_dataproducts/tests/test_atlas.py index 6101fa5f1..60117c07a 100644 --- a/tom_dataproducts/tests/test_atlas.py +++ b/tom_dataproducts/tests/test_atlas.py @@ -1,5 +1,6 @@ -from unittest.mock import patch import logging +import os +from unittest.mock import patch from django.core.files.uploadedfile import SimpleUploadedFile from django.test import TestCase # , override_settings @@ -13,6 +14,12 @@ logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) +# Directory holding fixture CSVs for these tests. Anchored on ``__file__`` +# so the tests work regardless of CWD — running them from the tom_base +# repo root, from a parent directory, or from an integration TOM all +# resolve the test_data path correctly. +TEST_DATA_DIR = os.path.join(os.path.dirname(__file__), 'test_data') + class TestAtlasProcessor(TestCase): """Test the AtlasProcessor(DataProcessor) class. @@ -41,7 +48,7 @@ def test_mags_under_SN_cutoff_become_limits(self): shown in the csv file. """ # read the test data in as a data_product's data - with open('tom_dataproducts/tests/test_data/test_atlas_fp.csv') as atlas_fp_file: + with open(os.path.join(TEST_DATA_DIR, 'test_atlas_fp.csv')) as atlas_fp_file: self.data_product.data.save('test_data.csv', atlas_fp_file) # this is the call under test From ca53315557bf744bce4e72856229946818e59017 Mon Sep 17 00:00:00 2001 From: "William (Lindy) Lindstrom" Date: Thu, 7 May 2026 17:24:31 -0700 Subject: [PATCH 15/22] Add dep-group in PEP 621 format to pip install -e ".[test]" works "works" means that the testing dependencies get installed and you don't bet Module not found errors when you run the tom_base tests. --- pyproject.toml | 9 +++++++++ tom_dataproducts/tests/tests.py | 11 ++++++++--- 2 files changed, 17 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index f7c2bac79..3bf9b020b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -72,6 +72,15 @@ dependencies = [ "django-tasks (>=0.6.1,<0.7.0)", ] +# Optional dependency groups, in the standard PEP 621 location so tooling +# other than poetry can install them. ``pip install -e ".[test]"`` +[project.optional-dependencies] +test = [ + "responses >=0.23,<0.26", + "factory_boy >3.2.1,<3.4", + "psycopg2-binary", # postgres test backend +] + [tool.poetry] packages = [ # tell poetry build that the repo is named tom_base (which != name, above) { include="tom_alerts"}, diff --git a/tom_dataproducts/tests/tests.py b/tom_dataproducts/tests/tests.py index b53fd58e9..afa616527 100644 --- a/tom_dataproducts/tests/tests.py +++ b/tom_dataproducts/tests/tests.py @@ -27,6 +27,11 @@ from tom_dataproducts.processors.photometry_processor import PhotometryProcessor from tom_dataproducts.processors.spectroscopy_processor import SpectroscopyProcessor from tom_dataproducts.utils import create_image_dataproduct + + +# Directory holding fixture CSVs / FITS files for these tests. Anchored +# on ``__file__`` so the tests work regardless of CWD. +TEST_DATA_DIR = os.path.join(os.path.dirname(__file__), 'test_data') from tom_observations.tests.utils import FakeRoboticFacility from tom_observations.tests.factories import SiderealTargetFactory, ObservingRecordFactory @@ -481,7 +486,7 @@ def test_process_spectroscopy_with_invalid_file_type(self): self.spectrum_data_processor.process_data(self.data_product) def test_process_spectrum_from_fits(self): - with open('tom_dataproducts/tests/test_data/test_spectrum.fits', 'rb') as spectrum_file: + with open(os.path.join(TEST_DATA_DIR, 'test_spectrum.fits'), 'rb') as spectrum_file: self.data_product.data.save('spectrum.fits', spectrum_file) spectrum, _, _ = self.spectrum_data_processor._process_spectrum_from_fits(self.data_product) self.assertTrue(isinstance(spectrum, Spectrum)) @@ -489,7 +494,7 @@ def test_process_spectrum_from_fits(self): self.assertAlmostEqual(spectrum.wavelength.mean().value, 6600.478789, places=5) def test_process_spectrum_from_plaintext(self): - with open('tom_dataproducts/tests/test_data/test_spectrum.csv', 'rb') as spectrum_file: + with open(os.path.join(TEST_DATA_DIR, 'test_spectrum.csv'), 'rb') as spectrum_file: self.data_product.data.save('spectrum.csv', spectrum_file) spectrum, _, _ = self.spectrum_data_processor._process_spectrum_from_plaintext(self.data_product) self.assertTrue(isinstance(spectrum, Spectrum)) @@ -508,7 +513,7 @@ def test_process_photometry_with_invalid_file_type(self): self.photometry_data_processor.process_data(self.data_product) def test_process_photometry_from_plaintext(self): - with open('tom_dataproducts/tests/test_data/test_lightcurve.csv', 'rb') as lightcurve_file: + with open(os.path.join(TEST_DATA_DIR, 'test_lightcurve.csv'), 'rb') as lightcurve_file: self.data_product.data.save('lightcurve.csv', lightcurve_file) lightcurve = self.photometry_data_processor._process_photometry_from_plaintext(self.data_product) self.assertTrue(isinstance(lightcurve, list)) From b7d44b21d2656ed4ed3d6dfdf47eb0fa3f3123f0 Mon Sep 17 00:00:00 2001 From: "William (Lindy) Lindstrom" Date: Fri, 8 May 2026 09:59:20 -0700 Subject: [PATCH 16/22] update sharing tests for new SharingBackend infrastructure --- tom_common/sharing.py | 2 +- tom_dataproducts/tests/tests.py | 36 ++++++++++++++++++++++----------- tom_dataproducts/views.py | 7 +++++-- tom_targets/tests/tests.py | 16 +++++++-------- 4 files changed, 38 insertions(+), 23 deletions(-) diff --git a/tom_common/sharing.py b/tom_common/sharing.py index 38bc2cebe..3633dc4cf 100644 --- a/tom_common/sharing.py +++ b/tom_common/sharing.py @@ -426,7 +426,7 @@ def _share_reduced_datums(reduced_datums: QuerySet, targets_url: str, reduced_da return {'message': 'ERROR: Multiple targets with matching name found in destination TOM.'} target_dict[target.name] = destination_target_id if all(value is None for value in target_dict.values()): - return {'message': 'ERROR: No matching targets found.'} + return {'message': 'ERROR: No matching target found.'} # Run datums through the existing sharing-protocol filter so a # datum already published to this destination is not re-sent. diff --git a/tom_dataproducts/tests/tests.py b/tom_dataproducts/tests/tests.py index afa616527..2430134bf 100644 --- a/tom_dataproducts/tests/tests.py +++ b/tom_dataproducts/tests/tests.py @@ -27,13 +27,13 @@ from tom_dataproducts.processors.photometry_processor import PhotometryProcessor from tom_dataproducts.processors.spectroscopy_processor import SpectroscopyProcessor from tom_dataproducts.utils import create_image_dataproduct +from tom_observations.tests.utils import FakeRoboticFacility +from tom_observations.tests.factories import SiderealTargetFactory, ObservingRecordFactory # Directory holding fixture CSVs / FITS files for these tests. Anchored # on ``__file__`` so the tests work regardless of CWD. TEST_DATA_DIR = os.path.join(os.path.dirname(__file__), 'test_data') -from tom_observations.tests.utils import FakeRoboticFacility -from tom_observations.tests.factories import SiderealTargetFactory, ObservingRecordFactory def mock_fits2image(file1, file2, width, height): @@ -685,13 +685,17 @@ def test_share_dataproduct_no_valid_responses(self): 'share_authors': ['test_author'], 'target': self.target.id, 'submitter': ['test_submitter'], - 'share_destination': [share_destination], + 'share_destination': [f'tom:{share_destination}'], 'share_title': ['Updated data for thingy.'], 'share_message': ['test_message'] }, follow=True ) - self.assertContains(response, 'ERROR: No matching target found.') + # Substring match so the test stays robust to cosmetic wording + # changes (singular vs plural, exact phrasing) — the substantive + # assertion is "an error citing 'no matching target' surfaced to + # the user", not the exact string. + self.assertContains(response, 'No matching target') @responses.activate def test_share_reduceddatums_target_no_valid_responses(self): @@ -718,13 +722,17 @@ def test_share_reduceddatums_target_no_valid_responses(self): 'share_authors': ['test_author'], 'target': self.target.id, 'submitter': ['test_submitter'], - 'share_destination': [share_destination], + 'share_destination': [f'tom:{share_destination}'], 'share_title': ['Updated data for thingy.'], 'share_message': ['test_message'] }, follow=True ) - self.assertContains(response, 'ERROR: No matching target found.') + # Substring match so the test stays robust to cosmetic wording + # changes (singular vs plural, exact phrasing) — the substantive + # assertion is "an error citing 'no matching target' surfaced to + # the user", not the exact string. + self.assertContains(response, 'No matching target') @responses.activate def test_share_reduced_datums_no_valid_responses(self): @@ -751,14 +759,18 @@ def test_share_reduced_datums_no_valid_responses(self): 'share_authors': ['test_author'], 'target': self.target.id, 'submitter': ['test_submitter'], - 'share_destination': [share_destination], + 'share_destination': [f'tom:{share_destination}'], 'share_title': ['Updated data for thingy.'], 'share_message': ['test_message'], 'share-box': [1, 2] }, follow=True ) - self.assertContains(response, 'ERROR: No matching targets found.') + # Substring match so the test stays robust to cosmetic wording + # changes (singular vs plural, exact phrasing) — the substantive + # assertion is "an error citing 'no matching target' surfaced to + # the user", not the exact string. + self.assertContains(response, 'No matching target') @responses.activate def test_share_dataproduct_valid_target_found(self): @@ -791,7 +803,7 @@ def test_share_dataproduct_valid_target_found(self): 'share_authors': ['test_author'], 'target': self.target.id, 'submitter': ['test_submitter'], - 'share_destination': [share_destination], + 'share_destination': [f'tom:{share_destination}'], 'share_title': ['Updated data for thingy.'], 'share_message': ['test_message'] }, @@ -830,7 +842,7 @@ def test_share_reduceddatums_target_valid_responses(self): 'share_authors': ['test_author'], 'target': self.target.id, 'submitter': ['test_submitter'], - 'share_destination': [share_destination], + 'share_destination': [f'tom:{share_destination}'], 'share_title': ['Updated data for thingy.'], 'share_message': ['test_message'] }, @@ -869,7 +881,7 @@ def test_share_reduced_datums_valid_responses(self): 'share_authors': ['test_author'], 'target': self.target.id, 'submitter': ['test_submitter'], - 'share_destination': [share_destination], + 'share_destination': [f'tom:{share_destination}'], 'share_title': ['Updated data for thingy.'], 'share_message': ['test_message'], 'share-box': [1, 2] @@ -901,7 +913,7 @@ def test_share_reduced_datums_invalid_responses(self): 'share_authors': ['test_author'], 'target': self.target.id, 'submitter': ['test_submitter'], - 'share_destination': [share_destination], + 'share_destination': [f'tom:{share_destination}'], 'share_title': ['Updated data for thingy.'], 'share_message': ['test_message'], 'share-box': [1, 2] diff --git a/tom_dataproducts/views.py b/tom_dataproducts/views.py index 65da99c9e..d577d3c11 100644 --- a/tom_dataproducts/views.py +++ b/tom_dataproducts/views.py @@ -441,8 +441,11 @@ def post(self, request, *args, **kwargs): # Dispatch via the SharingBackend registry. The backend name is # the prefix before the ':' in share_destination. A missing ':' - # (legacy TOM-to-TOM 'mytom' form) is treated as backend 'tom'. - backend_name = share_destination.partition(':')[0] or 'tom' + # (legacy TOM-to-TOM 'mytom' form, where the form value is the + # bare destination key) is treated as backend 'tom' — that + # backend's ``_split_destination`` tolerates the bare form. + prefix, sep, _sub = share_destination.partition(':') + backend_name = prefix if sep else 'tom' backend = get_sharing_backend(backend_name)() response = backend.share( form_data, diff --git a/tom_targets/tests/tests.py b/tom_targets/tests/tests.py index 522b09b49..b5bbde7ac 100644 --- a/tom_targets/tests/tests.py +++ b/tom_targets/tests/tests.py @@ -1506,7 +1506,7 @@ def test_share_target_no_valid_responses(self): { 'submitter': ['test_submitter'], 'target': self.target.id, - 'share_destination': [share_destination], + 'share_destination': [f'tom:{share_destination}'], }, follow=True ) @@ -1541,7 +1541,7 @@ def test_share_target_valid_connection_no_target_found(self): { 'submitter': ['test_submitter'], 'target': self.target.id, - 'share_destination': [share_destination], + 'share_destination': [f'tom:{share_destination}'], }, follow=True ) @@ -1572,7 +1572,7 @@ def test_share_target_valid_connection_multiple_target_found(self): { 'submitter': ['test_submitter'], 'target': self.target.id, - 'share_destination': [share_destination], + 'share_destination': [f'tom:{share_destination}'], }, follow=True ) @@ -1615,7 +1615,7 @@ def test_share_reduceddatums_target_valid_responses(self): { 'submitter': ['test_submitter'], 'target': self.target.id, - 'share_destination': [share_destination], + 'share_destination': [f'tom:{share_destination}'], 'share-box': [1, 2] }, follow=True @@ -1691,7 +1691,7 @@ def test_share_group_no_valid_responses(self): { 'submitter': ['test_submitter'], 'target_list': self.target_list.id, - 'share_destination': [share_destination], + 'share_destination': [f'tom:{share_destination}'], 'selected-target': [self.target.id, self.target2.id] }, follow=True @@ -1727,7 +1727,7 @@ def test_share_group_valid_connection_selected_target_not_found(self): { 'submitter': ['test_submitter'], 'target_list': self.target_list.id, - 'share_destination': [share_destination], + 'share_destination': [f'tom:{share_destination}'], 'selected-target': [self.target.id, self.target2.id] }, follow=True @@ -1771,7 +1771,7 @@ def test_share_reduceddatums_group_valid_responses(self): { 'submitter': ['test_submitter'], 'target_list': self.target_list.id, - 'share_destination': [share_destination], + 'share_destination': [f'tom:{share_destination}'], 'dataSwitch': 'on', 'selected-target': [self.target.id, self.target2.id] }, @@ -1796,7 +1796,7 @@ def test_share_empty_group(self): { 'submitter': ['test_submitter'], 'target_list': self.target_list.id, - 'share_destination': [share_destination], + 'share_destination': [f'tom:{share_destination}'], 'dataSwitch': 'on', 'selected-target': [] }, From beabc33af4ca13c39e9454d08985d513e6dad7ed Mon Sep 17 00:00:00 2001 From: "William (Lindy) Lindstrom" Date: Fri, 8 May 2026 10:18:36 -0700 Subject: [PATCH 17/22] Mask the DRF API Token on the User Info card with click-to-reveal --- .../templates/tom_common/partials/user_data.html | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/tom_common/templates/tom_common/partials/user_data.html b/tom_common/templates/tom_common/partials/user_data.html index 2bfefaef5..e24617a1f 100644 --- a/tom_common/templates/tom_common/partials/user_data.html +++ b/tom_common/templates/tom_common/partials/user_data.html @@ -25,8 +25,19 @@

User Info

{% endif %} {% endfor %} {% if drf_api_token %} -
API Token
-
{{ drf_api_token }}
+
API Token
+
+ {# Render the DRF API token as a click-to-reveal masked password #} + +
{% endif %} From 4283cc1ce16ccdc9e511aeaa70634ae0a4122f4f Mon Sep 17 00:00:00 2001 From: "William (Lindy) Lindstrom" Date: Fri, 8 May 2026 13:16:38 -0700 Subject: [PATCH 18/22] clean up comments et al --- tom_common/sharing.py | 51 +++++++++++----------------- tom_dataproducts/sharing.py | 6 ++-- tom_dataproducts/tests/test_api.py | 7 ++-- tom_dataproducts/tests/test_atlas.py | 4 --- tom_dataservices/dataservices.py | 7 ++-- 5 files changed, 30 insertions(+), 45 deletions(-) diff --git a/tom_common/sharing.py b/tom_common/sharing.py index 3633dc4cf..0dd1780cb 100644 --- a/tom_common/sharing.py +++ b/tom_common/sharing.py @@ -48,8 +48,6 @@ from django.db.models import QuerySet from django.utils.module_loading import import_string -# Local (non-TOM) imports above; TOM Toolkit imports below follow the project -# convention of grouping stdlib / third-party / Django / TOM Toolkit. from tom_dataproducts.models import ReducedDatum from tom_dataproducts.serializers import DataProductSerializer, ReducedDatumSerializer from tom_targets.models import Target @@ -66,8 +64,8 @@ class SharingBackend(ABC): """ # ``name`` is: - # - the lookup key in the dict returned by ``get_sharing_backends()``; - # - the prefix of the form's ``share_destination`` value, which is + # - the key in the dict returned by ``get_sharing_backends()``; + # - used as the prefix of the form's ``share_destination`` value, which is # formatted as the string ``':'`` # (e.g. ``'hermes:gw.lvk.public'``, or ``'tom:tom_b'``); # - the value that ``DataShareView.post()`` parses out of @@ -77,9 +75,7 @@ class SharingBackend(ABC): name: str = '' # ``verbose_name`` is the human-readable label shown as the heading above - # this backend's destinations in the share-destination dropdown. For a - # backend with multiple sub-destinations (e.g., several HERMES topics), - # the dropdown groups them under this heading. + # this backend's destinations in the share-destination dropdown. # Required: set in every subclass. verbose_name: str = '' @@ -110,6 +106,7 @@ def share(self, form_data: dict, *, Called by ``DataShareView.post()`` (and by the shims in ``tom_dataproducts.sharing``) after a successful form submission. + Returns a feedback dict with at least ``{'status': 'success'|'error', 'message': str}`` (older code returns just ``{'message': str}``; both are tolerated by the @@ -177,8 +174,10 @@ def get_sharing_backend(name: str) -> type: ``tom_dataservices.dataservices.get_data_service_class`` so the behavior is consistent across integration points. """ + # build the registry registry = get_sharing_backends() try: + # look up the given SharingBackend by name return registry[name] except KeyError: raise ImportError( @@ -242,7 +241,8 @@ class TomToolkitSharingBackend(SharingBackend): @classmethod def get_destination_choices(cls, user: User | None = None) -> list: - """Enumerate ``settings.DATA_SHARING`` entries that look like TOM destinations. + """Enumerate ``settings.DATA_SHARING`` entries that look like TOM destinations and + return list of tuples to populate ChoiceField choices. A "TOM destination" entry is one whose value dict has a ``BASE_URL`` and does NOT have a ``HERMES_API_KEY``. Each such @@ -258,21 +258,18 @@ def get_destination_choices(cls, user: User | None = None) -> list: # Skip HERMES entries; they belong to HermesSharingBackend. if not isinstance(cfg, dict) or cfg.get('HERMES_API_KEY'): continue + display_name = cfg.get('DISPLAY_NAME', key) if not cfg.get('BASE_URL'): # Not enough info to publish — skip quietly rather than erroring # at form-render time. + logger.warning(f'No BASE_URL found in DATA_SHARING config for {display_name}') continue - display = cfg.get('DISPLAY_NAME', key) - choices.append((f'{cls.name}:{key}', display)) + choices.append((f'{cls.name}:{key}', display_name)) return choices @staticmethod def _split_destination(share_destination: str) -> str: """Return the ```` half of a ``'tom:'`` share-destination string. - - Tolerant of the legacy format where the form field carries just the - bare settings-key (e.g. ``'tom_b'`` with no ``'tom:'`` prefix) so - that older callers still work during the deprecation window. """ prefix, sep, sub = share_destination.partition(':') if sep: @@ -308,14 +305,13 @@ def share(self, form_data: dict, *, **kwargs) -> dict: """POST the share payload to the destination TOM's HTTP API. - Behavior preserved from ``tom_dataproducts.sharing.share_data_with_tom`` - with one addition: authentication now prefers a DRF API token if + Authentication now prefers a DRF API token if ``settings.DATA_SHARING[]['API_KEY']`` is set, falling back to HTTP Basic if only ``USERNAME`` / ``PASSWORD`` are configured. The caller passes whichever of ``reduced_datums`` / ``targets`` / ``data_products`` is relevant to the share action; the three are - mutually-exclusive in practice (matches the existing behavior). + mutually-exclusive in practice. """ # Parse the destination sub-key out of the form value (e.g. 'tom:tom_b' -> 'tom_b'). share_destination = form_data.get('share_destination', '') if form_data else '' @@ -336,7 +332,7 @@ def share(self, form_data: dict, *, )} # Build the base headers and the appropriate auth. Auth is chosen - # per-destination: API_KEY wins; otherwise USERNAME/PASSWORD. + # per-destination: API_KEY take precedence over USERNAME/PASSWORD. base_headers = {'Content-Type': 'application/json', 'Accept': 'application/json'} auth_headers, auth = self._build_auth(cfg) headers = {**base_headers, **auth_headers} @@ -346,10 +342,8 @@ def share(self, form_data: dict, *, targets_url = destination_tom_base_url + 'api/targets/' reduced_datums_url = destination_tom_base_url + 'api/reduceddatums/' - # Dispatch on which of the three querysets was supplied. Mirrors the - # original ``share_data_with_tom`` shape; one and only one of - # ``data_products`` / ``reduced_datums`` / ``targets`` is typically - # populated per call. + # Dispatch on which of the three querysets was supplied: + # ``data_products`` / ``reduced_datums`` / ``targets`` if data_products is not None and data_products.exists(): return self._share_data_products( data_products, targets_url, dataproducts_url, headers, auth, @@ -361,8 +355,7 @@ def share(self, form_data: dict, *, if targets is not None and targets.exists(): # A Target-only share: push every ReducedDatum that belongs to # the target. We do NOT create the Target on the destination - # TOM — that target must already exist there. This mirrors the - # original behavior. + # TOM — that target must already exist there. target = targets.first() owned_datums = ReducedDatum.objects.filter(target=target) return self._share_reduced_datums( @@ -379,9 +372,8 @@ def _share_data_products(data_products: QuerySet, targets_url: str, dataproducts target names and aliases (``get_destination_target``), then POSTs the serialized DataProduct plus its file to ``api/dataproducts/``. """ - # We currently support one DataProduct per call (matches the - # existing view contract). If there are multiple, only the first - # is processed; others are ignored. That is the existing behavior. + # We currently support one DataProduct per call. + # If there are multiple, only the first is processed; others are ignored. product = data_products.first() target = product.target serialized_data = DataProductSerializer(product).data @@ -394,7 +386,7 @@ def _share_data_products(data_products: QuerySet, targets_url: str, dataproducts serialized_data['target'] = destination_target_id # TODO: this path join should be replaced once tom_dataproducts uses - # django.core.files.storage (pre-existing TODO from the original code). + # django.core.files.storage dataproduct_filename = os.path.join(settings.MEDIA_ROOT, product.data.name) with open(dataproduct_filename, 'rb') as dataproduct_filep: files = {'file': (product.data.name, dataproduct_filep, 'text/csv')} @@ -430,9 +422,6 @@ def _share_reduced_datums(reduced_datums: QuerySet, targets_url: str, reduced_da # Run datums through the existing sharing-protocol filter so a # datum already published to this destination is not re-sent. - # ``check_for_share_safe_datums`` lives in tom_dataproducts.sharing - # for now; we import lazily to avoid a circular import at module load - # (tom_dataproducts.sharing depends on tom_common at other points). from tom_dataproducts.sharing import check_for_share_safe_datums reduced_datums = check_for_share_safe_datums(destination_key, reduced_datums) if not reduced_datums: diff --git a/tom_dataproducts/sharing.py b/tom_dataproducts/sharing.py index 9e9578421..50d06ce4b 100644 --- a/tom_dataproducts/sharing.py +++ b/tom_dataproducts/sharing.py @@ -82,13 +82,13 @@ def check_for_share_safe_datums(destination, reduced_datums, **kwargs): Earlier versions of this filter joined on ``ReducedDatum.message`` (a M2M to ``AlertStreamMessage``) keyed by ``(exchange_status='published', topic=...)``; that model and field - have been removed in the larger refactor. The new rule reads + have been removed. The new rule reads ``source_name`` directly: ingestion in ``tom_hermes.alertstreams.ingester.ingest_hermes_alert`` writes ``source_name = f'Hermes:{topic}'`` on every ReducedDatum it creates, so a round-trip is detectable from that field alone. - **Semantic narrowing.** The previous implementation also caught the + The previous implementation also caught the "this datum was previously published to this topic" case, even if the datum hadn't originated there. That tracking ability is gone along with the AlertStreamMessage model — TOMs that need it should @@ -96,9 +96,11 @@ def check_for_share_safe_datums(destination, reduced_datums, **kwargs): Called by ``HermesSharingBackend.share`` (lazy import). """ + # first filtering rule: if 'hermes' in destination: message_topic = kwargs.get('topic', None) filtered_datums = reduced_datums.exclude(source_name=f'Hermes:{message_topic}') + # add additional filtering rules as needed here: else: filtered_datums = reduced_datums return filtered_datums diff --git a/tom_dataproducts/tests/test_api.py b/tom_dataproducts/tests/test_api.py index 2b5171a2d..5b32a8b04 100644 --- a/tom_dataproducts/tests/test_api.py +++ b/tom_dataproducts/tests/test_api.py @@ -13,10 +13,9 @@ from tom_targets.tests.factories import SiderealTargetFactory -# Directory holding fixture CSVs / FITS files for these tests. Anchored on -# ``__file__`` so the tests work regardless of CWD — running them from the -# tom_base repo root, from a parent directory, or from an integration TOM -# all resolve the test_data path correctly. +# TEST_DATA_DIR is the directory holding fixture CSVs/FITS files for these tests. +# It is derieved from ``__file__`` so the tests work regardless of CWD and resolve +# the test_data path correctly. TEST_DATA_DIR = os.path.join(os.path.dirname(__file__), 'test_data') diff --git a/tom_dataproducts/tests/test_atlas.py b/tom_dataproducts/tests/test_atlas.py index 60117c07a..0f32480f4 100644 --- a/tom_dataproducts/tests/test_atlas.py +++ b/tom_dataproducts/tests/test_atlas.py @@ -14,10 +14,6 @@ logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) -# Directory holding fixture CSVs for these tests. Anchored on ``__file__`` -# so the tests work regardless of CWD — running them from the tom_base -# repo root, from a parent directory, or from an integration TOM all -# resolve the test_data path correctly. TEST_DATA_DIR = os.path.join(os.path.dirname(__file__), 'test_data') diff --git a/tom_dataservices/dataservices.py b/tom_dataservices/dataservices.py index a525ba36e..80ea0aae2 100644 --- a/tom_dataservices/dataservices.py +++ b/tom_dataservices/dataservices.py @@ -106,10 +106,9 @@ def __init__(self, query_parameters=None, user=None, *args, **kwargs): self.query_results = {} # Instance variable that can store query parameters if necessary self.query_parameters = query_parameters or {} - # The logged-in User who triggered the query, or None for background / - # anonymous callers. Subclasses that authenticate to a per-user external - # service (e.g. HermesDataService) use this to resolve credentials. - # RunQueryView / CreateTargetFromQueryView pass request.user here. + + # self.user is the logged-in User who triggered the query, or None for + # background / anonymous callers. Used this to resolve credentials. self.user = user @abstractmethod From b335e9ea441dccc5ad24a44e752733c4095de270 Mon Sep 17 00:00:00 2001 From: "William (Lindy) Lindstrom" Date: Fri, 8 May 2026 13:18:42 -0700 Subject: [PATCH 19/22] update lock file --- poetry.lock | 69 ++++++++++++++++++++++++++++------------------------- 1 file changed, 36 insertions(+), 33 deletions(-) diff --git a/poetry.lock b/poetry.lock index 72d7a5cea..1f2e7e348 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand. [[package]] name = "alabaster" @@ -49,7 +49,7 @@ semantic_version = ">=2.8" [package.extras] all = ["asdf[http]", "asdf[lz4]"] benchmark = ["asdf[tests]", "pytest-benchmark"] -docs = ["furo", "graphviz", "sphinx-asdf (>=0.2.2)", "sphinx-inline-tabs", "tomli ; python_version < \"3.11\""] +docs = ["furo", "graphviz", "sphinx-asdf (>=0.2.2)", "sphinx-inline-tabs", "tomli"] http = ["fsspec[http] (>=2022.8.2)"] lz4 = ["lz4 (>=0.10)"] test = ["asdf[tests]"] @@ -113,7 +113,7 @@ files = [ ] [package.extras] -docs = ["docutils", "furo", "graphviz", "sphinx (>=4.0)", "sphinx-asdf (>=0.1.3)", "tomli ; python_version < \"3.11\""] +docs = ["docutils", "furo", "graphviz", "sphinx (>=4.0)", "sphinx-asdf (>=0.1.3)", "tomli"] test = ["asdf (>=3.0.0)", "packaging (>=16.0)", "pytest", "pytest-asdf-plugin", "pyyaml"] [[package]] @@ -245,7 +245,7 @@ PyYAML = ">=3.13" [package.extras] all = ["asdf-astropy (>=0.3)", "astropy[recommended]", "astropy[typing]", "beautifulsoup4", "bleach", "bottleneck", "certifi", "dask[array]", "fsspec[http] (>=2023.4.0)", "h5py", "html5lib", "ipython (>=4.2)", "jplephem", "mpmath", "pandas", "pre-commit", "pyarrow (>=7.0.0)", "pytest (>=7.0)", "pytz", "s3fs (>=2023.4.0)", "sortedcontainers"] -docs = ["Jinja2 (>=3.1.3)", "astropy[recommended]", "matplotlib (>=3.9.1)", "numpy (<2.0)", "pytest (>=7.0)", "sphinx", "sphinx-astropy[confv2] (>=1.9.1)", "sphinx-changelog (>=1.2.0)", "sphinx_design", "sphinxcontrib-globalsubs (>=0.1.1)", "tomli ; python_version < \"3.11\""] +docs = ["Jinja2 (>=3.1.3)", "astropy[recommended]", "matplotlib (>=3.9.1)", "numpy (<2.0)", "pytest (>=7.0)", "sphinx", "sphinx-astropy[confv2] (>=1.9.1)", "sphinx-changelog (>=1.2.0)", "sphinx_design", "sphinxcontrib-globalsubs (>=0.1.1)", "tomli"] recommended = ["matplotlib (>=3.5.0,!=3.5.2)", "scipy (>=1.8)"] test = ["pytest (>=7.0)", "pytest-astropy (>=0.10)", "pytest-astropy-header (>=0.2.1)", "pytest-doctestplus (>=0.12)", "pytest-xdist", "threadpoolctl"] test-all = ["array-api-strict", "astropy[test]", "coverage[toml]", "ipython (>=4.2)", "objgraph", "sgp4 (>=2.3)", "skyfield (>=1.20)"] @@ -318,7 +318,7 @@ files = [ ] [package.extras] -dev = ["backports.zoneinfo ; python_version < \"3.9\"", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata ; sys_platform == \"win32\""] +dev = ["backports.zoneinfo", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata"] [[package]] name = "backports-tarfile" @@ -772,7 +772,7 @@ files = [ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] -toml = ["tomli ; python_full_version <= \"3.11.0a6\""] +toml = ["tomli"] [[package]] name = "coveralls" @@ -870,8 +870,8 @@ files = [ ] [package.dependencies] -cffi = {version = ">=2.0.0", markers = "python_full_version >= \"3.9.0\" and platform_python_implementation != \"PyPy\""} -typing-extensions = {version = ">=4.13.2", markers = "python_full_version < \"3.11.0\""} +cffi = {version = ">=2.0.0", markers = "python_full_version >= \"3.9\" and platform_python_implementation != \"PyPy\""} +typing-extensions = {version = ">=4.13.2", markers = "python_full_version < \"3.11\""} [package.extras] docs = ["sphinx (>=5.3.0)", "sphinx-inline-tabs", "sphinx-rtd-theme (>=3.0.0)"] @@ -1112,7 +1112,7 @@ version = "3.3.3" description = "A versatile test fixtures replacement based on thoughtbot's factory_bot for Ruby." optional = false python-versions = ">=3.8" -groups = ["test"] +groups = ["main", "test"] files = [ {file = "factory_boy-3.3.3-py2.py3-none-any.whl", hash = "sha256:1c39e3289f7e667c4285433f305f8d506efc2fe9c73aaea4151ebd5cdea394fc"}, {file = "factory_boy-3.3.3.tar.gz", hash = "sha256:866862d226128dfac7f2b4160287e899daf54f2612778327dd03d0e2cb1e3d03"}, @@ -1131,7 +1131,7 @@ version = "40.13.0" description = "Faker is a Python package that generates fake data for you." optional = false python-versions = ">=3.10" -groups = ["test"] +groups = ["main", "test"] files = [ {file = "faker-40.13.0-py3-none-any.whl", hash = "sha256:c1298fd0d819b3688fb5fd358c4ba8f56c7c8c740b411fd3dbd8e30bf2c05019"}, {file = "faker-40.13.0.tar.gz", hash = "sha256:a0751c84c3abac17327d7bb4c98e8afe70ebf7821e01dd7d0b15cd8856415525"}, @@ -1198,7 +1198,7 @@ numpy = ">=1.24" scipy = ">=1.14.1" [package.extras] -docs = ["pydata-sphinx-theme", "sphinx", "sphinx-asdf", "sphinx-astropy", "sphinx-automodapi", "sphinx-copybutton", "sphinx-rtd-theme", "stsci-rtd-theme", "tomli ; python_version < \"3.11\""] +docs = ["pydata-sphinx-theme", "sphinx", "sphinx-asdf", "sphinx-astropy", "sphinx-automodapi", "sphinx-copybutton", "sphinx-rtd-theme", "stsci-rtd-theme", "tomli"] test = ["ci-watson (>=0.3.0)", "pytest (>=8.0.0)", "pytest-astropy (>=0.11.0)"] [[package]] @@ -1218,10 +1218,10 @@ six = ">=1.9" webencodings = "*" [package.extras] -all = ["chardet (>=2.2)", "genshi", "lxml ; platform_python_implementation == \"CPython\""] +all = ["chardet (>=2.2)", "genshi", "lxml"] chardet = ["chardet (>=2.2)"] genshi = ["genshi"] -lxml = ["lxml ; platform_python_implementation == \"CPython\""] +lxml = ["lxml"] [[package]] name = "idna" @@ -1267,13 +1267,13 @@ files = [ zipp = ">=3.20" [package.extras] -check = ["pytest-checkdocs (>=2.14)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +check = ["pytest-checkdocs (>=2.14)", "pytest-ruff (>=0.2.1)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=3.4)"] perf = ["ipython"] test = ["packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] -type = ["pytest-mypy (>=1.0.1) ; platform_python_implementation != \"PyPy\""] +type = ["pytest-mypy (>=1.0.1)"] [[package]] name = "importlib-resources" @@ -1288,7 +1288,7 @@ files = [ ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] @@ -1330,12 +1330,12 @@ files = [ "backports.tarfile" = {version = "*", markers = "python_version < \"3.12\""} [package.extras] -check = ["pytest-checkdocs (>=2.14)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +check = ["pytest-checkdocs (>=2.14)", "pytest-ruff (>=0.2.1)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=3.4)"] test = ["jaraco.test (>=5.6.0)", "portend", "pytest (>=6,!=8.1.*)"] -type = ["pytest-mypy (>=1.0.1) ; platform_python_implementation != \"PyPy\""] +type = ["pytest-mypy (>=1.0.1)"] [[package]] name = "jaraco-functools" @@ -1353,12 +1353,12 @@ files = [ more_itertools = "*" [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=3.4)"] test = ["jaraco.classes", "pytest (>=6,!=8.1.*)"] -type = ["mypy (<1.19) ; platform_python_implementation == \"PyPy\"", "pytest-mypy (>=1.0.1)"] +type = ["mypy (<1.19)", "pytest-mypy (>=1.0.1)"] [[package]] name = "jeepney" @@ -1374,7 +1374,7 @@ files = [ ] [package.extras] -test = ["async-timeout ; python_version < \"3.11\"", "pytest", "pytest-asyncio (>=0.17)", "pytest-trio", "testpath", "trio"] +test = ["async-timeout", "pytest", "pytest-asyncio (>=0.17)", "pytest-trio", "testpath", "trio"] trio = ["trio"] [[package]] @@ -1429,7 +1429,7 @@ pywin32-ctypes = {version = ">=0.2.0", markers = "sys_platform == \"win32\""} SecretStorage = {version = ">=3.2", markers = "sys_platform == \"linux\""} [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] completion = ["shtab (>=1.1.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] @@ -1636,7 +1636,7 @@ dev = ["ndcube[docs,plotting,reproject,tests]"] docs = ["matplotlib", "mpl-animators (>=1.0)", "packaging", "sphinx", "sphinx-automodapi", "sphinx-changelog (>=1.1.0)", "sphinx-gallery", "sphinxext-opengraph", "sunpy (>=5.0.0)", "sunpy-sphinx-theme"] plotting = ["matplotlib (>=3.5.0)", "mpl_animators (>=1.0)"] reproject = ["reproject (>=0.7.1)"] -tests = ["dask", "pytest", "pytest-astropy", "pytest-cov", "pytest-doctestplus", "pytest-memray ; sys_platform != \"win32\"", "pytest-mpl (>=0.12)", "pytest-xdist", "scipy", "specutils", "sunpy (>=5.0.0)"] +tests = ["dask", "pytest", "pytest-astropy", "pytest-cov", "pytest-doctestplus", "pytest-memray", "pytest-mpl (>=0.12)", "pytest-xdist", "scipy", "specutils", "sunpy (>=5.0.0)"] [[package]] name = "numpy" @@ -1846,7 +1846,7 @@ version = "2.9.11" description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = false python-versions = ">=3.9" -groups = ["test"] +groups = ["main", "test"] files = [ {file = "psycopg2-binary-2.9.11.tar.gz", hash = "sha256:b6aed9e096bf63f9e75edf2581aa9a7e7186d97ab5c177aa6c87797cd591236c"}, {file = "psycopg2_binary-2.9.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d6fe6b47d0b42ce1c9f1fa3e35bb365011ca22e39db37074458f27921dca40f2"}, @@ -2174,7 +2174,7 @@ version = "0.25.8" description = "A utility library for mocking out the `requests` Python library." optional = false python-versions = ">=3.8" -groups = ["test"] +groups = ["main", "test"] files = [ {file = "responses-0.25.8-py3-none-any.whl", hash = "sha256:0c710af92def29c8352ceadff0c3fe340ace27cf5af1bbe46fb71275bcd2831c"}, {file = "responses-0.25.8.tar.gz", hash = "sha256:9374d047a575c8f781b94454db5cab590b6029505f488d12899ddb10a4af1cf4"}, @@ -2186,7 +2186,7 @@ requests = ">=2.30.0,<3.0" urllib3 = ">=1.25.10,<3.0" [package.extras] -tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli ; python_version < \"3.11\"", "tomli-w", "types-PyYAML", "types-requests"] +tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-PyYAML", "types-requests"] [[package]] name = "rich" @@ -2269,7 +2269,7 @@ numpy = ">=1.23.5,<2.5" [package.extras] dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy (==1.10.0)", "pycodestyle", "pydevtool", "rich-click", "ruff (>=0.0.292)", "types-psutil", "typing_extensions"] doc = ["intersphinx_registry", "jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.19.1)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0,<8.0.0)", "sphinx-copybutton", "sphinx-design (>=0.4.0)"] -test = ["Cython", "array-api-strict (>=2.0,<2.1.1)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja ; sys_platform != \"emscripten\"", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] +test = ["Cython", "array-api-strict (>=2.0,<2.1.1)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] [[package]] name = "secretstorage" @@ -2301,7 +2301,7 @@ files = [ ] [package.extras] -dev = ["Django (>=1.11)", "check-manifest", "colorama (<=0.4.1) ; python_version == \"3.4\"", "coverage", "flake8", "nose2", "readme-renderer (<25.0) ; python_version == \"3.4\"", "tox", "wheel", "zest.releaser[recommended]"] +dev = ["Django (>=1.11)", "check-manifest", "colorama (<=0.4.1)", "coverage", "flake8", "nose2", "readme-renderer (<25.0)", "tox", "wheel", "zest.releaser[recommended]"] doc = ["Sphinx", "sphinx-rtd-theme"] [[package]] @@ -2725,7 +2725,7 @@ files = [ {file = "tzdata-2026.1-py2.py3-none-any.whl", hash = "sha256:4b1d2be7ac37ceafd7327b961aa3a54e467efbdb563a23655fbfe0d39cfc42a9"}, {file = "tzdata-2026.1.tar.gz", hash = "sha256:67658a1903c75917309e753fdc349ac0efd8c27db7a0cb406a25be4840f87f98"}, ] -markers = {main = "sys_platform == \"win32\"", test = "platform_system == \"Windows\""} +markers = {main = "sys_platform == \"win32\" or platform_system == \"Windows\"", test = "platform_system == \"Windows\""} [[package]] name = "urllib3" @@ -2740,10 +2740,10 @@ files = [ ] [package.extras] -brotli = ["brotli (>=1.2.0) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=1.2.0.0) ; platform_python_implementation != \"CPython\""] +brotli = ["brotli (>=1.2.0)", "brotlicffi (>=1.2.0.0)"] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["backports-zstd (>=1.0.0) ; python_version < \"3.14\""] +zstd = ["backports-zstd (>=1.0.0)"] [[package]] name = "webencodings" @@ -2771,14 +2771,17 @@ files = [ ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] test = ["big-O", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more_itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] +[extras] +test = ["factory_boy", "psycopg2-binary", "responses"] + [metadata] lock-version = "2.1" python-versions = ">=3.10.0,<3.14" -content-hash = "d20f1afe99aee56b9fec3bb1a36e8ac2db563df7a4315426388a81ab8191e9d1" +content-hash = "6b2b1e268aec304acdcbba99c5f138c6af9ba74a8775d22fb9e08577b202d46d" From 5fc1fa4295d69574408dd33043da62717a68d1a7 Mon Sep 17 00:00:00 2001 From: "William (Lindy) Lindstrom" Date: Fri, 8 May 2026 15:27:27 -0700 Subject: [PATCH 20/22] Move HERMES preload views out of tom_targets Drop the two preload views and their URL patterns; their replacements now live in tom_hermes. Update three template {% url %} refs to the tom_hermes: namespace. Removes the last hard import of tom_hermes from tom_base. --- docs/managing_data/stream_pub_sub.rst | 9 +++ .../photometry_datalist_for_target.html | 2 +- .../tom_targets/target_group_share.html | 2 +- .../templates/tom_targets/target_share.html | 2 +- tom_targets/urls.py | 5 +- tom_targets/views.py | 65 +------------------ 6 files changed, 15 insertions(+), 70 deletions(-) diff --git a/docs/managing_data/stream_pub_sub.rst b/docs/managing_data/stream_pub_sub.rst index c91825de5..79caeeae1 100644 --- a/docs/managing_data/stream_pub_sub.rst +++ b/docs/managing_data/stream_pub_sub.rst @@ -190,3 +190,12 @@ j * The ``share_data_with_hermes`` / ``share_target_list_with_hermes`` / ``share_data_with_tom`` functions in ``tom_dataproducts.sharing`` are removed. Replace call sites with ``tom_common.sharing.get_sharing_backend(name)().share(...)``. +* The HERMES "preload" views (the ``Open in Hermes 🗗`` button) have moved from + ``tom_targets`` to ``tom_hermes``. URL names change accordingly: any custom + template referencing ``{% url 'tom_targets:hermes-preload' %}`` / + ``{% url 'targets:hermes-preload' %}`` must use + ``{% url 'tom_hermes:target-preload' %}``, and ``targets:group-hermes-preload`` + becomes ``tom_hermes:target-grouping-preload``. The URL paths themselves + also change (``/targets//hermes-preload/`` → ``/hermes/targets//preload/``), + but unless a TOM operator hardcoded those paths instead of using ``{% url %}``, + no further action is required. diff --git a/tom_dataproducts/templates/tom_dataproducts/partials/photometry_datalist_for_target.html b/tom_dataproducts/templates/tom_dataproducts/partials/photometry_datalist_for_target.html index cd40de7b7..d831e5476 100644 --- a/tom_dataproducts/templates/tom_dataproducts/partials/photometry_datalist_for_target.html +++ b/tom_dataproducts/templates/tom_dataproducts/partials/photometry_datalist_for_target.html @@ -78,7 +78,7 @@ or
- +
{% endif %} diff --git a/tom_targets/templates/tom_targets/target_group_share.html b/tom_targets/templates/tom_targets/target_group_share.html index 15144cf1b..78c4c1808 100644 --- a/tom_targets/templates/tom_targets/target_group_share.html +++ b/tom_targets/templates/tom_targets/target_group_share.html @@ -27,7 +27,7 @@

Share or
- +
{% endif %} diff --git a/tom_targets/templates/tom_targets/target_share.html b/tom_targets/templates/tom_targets/target_share.html index 95cf43d06..d19eeb4b8 100644 --- a/tom_targets/templates/tom_targets/target_share.html +++ b/tom_targets/templates/tom_targets/target_share.html @@ -40,7 +40,7 @@

Share {{ tar or
-
{% endif %} diff --git a/tom_targets/urls.py b/tom_targets/urls.py index 4086bb6b5..01c138553 100644 --- a/tom_targets/urls.py +++ b/tom_targets/urls.py @@ -5,7 +5,7 @@ from .views import (TargetGroupingView, TargetGroupingDeleteView, TargetGroupingCreateView, TargetAddRemoveGroupingView, TargetMergeView, TargetPersistentShareManageFormView, PersistentShareManageFormView, TargetPersistentShareManageTable, PersistentShareManageTable) -from .views import TargetGroupingShareView, TargetHermesPreloadView, TargetGroupingHermesPreloadView +from .views import TargetGroupingShareView from .views import TargetFacilitySelectionView from .views import TargetSeedView from .viewsets import PersistentShareViewSet @@ -34,14 +34,11 @@ path('/update/', TargetUpdateView.as_view(), name='update'), path('/delete/', TargetDeleteView.as_view(), name='delete'), path('/share/', TargetShareView.as_view(), name='share'), - path('/hermes-preload/', TargetHermesPreloadView.as_view(), name='hermes-preload'), path('/', TargetDetailView.as_view(), name='detail'), path('/observation-list/', render_observation_table, name='render-observation-table'), path('targetgrouping//delete/', TargetGroupingDeleteView.as_view(), name='delete-group'), path('targetgrouping/create/', TargetGroupingCreateView.as_view(), name='create-group'), path('targetgrouping//share/', TargetGroupingShareView.as_view(), name='share-group'), - path('targetgrouping//hermes-preload/', TargetGroupingHermesPreloadView.as_view(), - name='group-hermes-preload'), path('persistentshare/manage/', PersistentShareManageFormView.as_view(), name='persistent-share-manage-form'), path('/persistentshare/manage/', TargetPersistentShareManageFormView.as_view(), name='target-persistent-share-manage-form'), diff --git a/tom_targets/views.py b/tom_targets/views.py index 52cff4141..10d894360 100644 --- a/tom_targets/views.py +++ b/tom_targets/views.py @@ -16,7 +16,7 @@ from django.db import transaction from django_filters.views import FilterView from django.http import HttpResponse -from django.http import HttpResponseRedirect, QueryDict, StreamingHttpResponse, HttpResponseBadRequest +from django.http import HttpResponseRedirect, QueryDict, StreamingHttpResponse from django.forms import HiddenInput from django.shortcuts import redirect, render from django.template.loader import render_to_string @@ -24,7 +24,7 @@ from django.utils.text import slugify from django.utils.safestring import mark_safe from django.views.generic.edit import CreateView, UpdateView, DeleteView, FormView -from django.views.generic.detail import DetailView, SingleObjectMixin +from django.views.generic.detail import DetailView from django.views.generic import RedirectView, TemplateView, View from rest_framework.views import APIView @@ -62,7 +62,6 @@ from tom_observations.utils import get_sidereal_visibility from tom_targets.seed import seed_messier_targets from tom_targets.tables import TargetTable, TargetGroupTable -from tom_hermes.sharing import BuildHermesMessage, preload_to_hermes logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) @@ -559,34 +558,6 @@ def render_observation_table(request, pk): return render(request, 'tom_targets/partials/observation_table.html', context={'object': Target.objects.get(id=pk)}) -class TargetHermesPreloadView(SingleObjectMixin, View): - model = Target - # Set app_name for Django-Guardian Permissions in case of Custom Target Model - permission_required = f'{Target._meta.app_label}.change_target' - - def post(self, request, *args, **kwargs): - target = self.get_object() - sharing = getattr(settings, "DATA_SHARING", None) - if sharing and sharing.get('hermes', {}).get('HERMES_API_KEY'): - topic = request.POST.get('share_destination', '').split(':')[-1] - title = request.POST.get('share_title', '') - if not title: - title = f'Updated data for {target.name}' - hermes_message = BuildHermesMessage( - title=title, - topic=topic, - submitter=request.POST.get('submitter'), - message=request.POST.get('share_message', ''), - authors=sharing['hermes'].get('DEFAULT_AUTHORS') - ) - reduced_datums = ReducedDatum.objects.filter(pk__in=request.POST.getlist('share-box', [])) - preload_key = preload_to_hermes(hermes_message, reduced_datums, [target]) - load_url = sharing['hermes']['BASE_URL'] + f'submit-message?id={preload_key}' - return HttpResponseRedirect(load_url) - else: - return HttpResponseBadRequest("Must have hermes section with HERMES_API_KEY set in DATA_SHARING settings") - - class TargetImportView(LoginRequiredMixin, TemplateView): """ View that handles the import of targets from a CSV. Requires authentication. @@ -959,38 +930,6 @@ def form_valid(self, form): return redirect(self.get_success_url()) -class TargetGroupingHermesPreloadView(SingleObjectMixin, View): - model = TargetList - # Set app_name for Django-Guardian Permissions in case of Custom Target Model - permission_required = f'{Target._meta.app_label}.change_target' - - def post(self, request, *args, **kwargs): - targetlist = self.get_object() - sharing = getattr(settings, "DATA_SHARING", None) - if sharing and sharing.get('hermes', {}).get('HERMES_API_KEY'): - topic = request.POST.get('share_destination', '').split(':')[-1] - title = request.POST.get('share_title', '') - if not title: - title = f'Updated targets for group {targetlist.name}.' - hermes_message = BuildHermesMessage( - title=title, - topic=topic, - submitter=request.POST.get('submitter'), - message=request.POST.get('share_message', ''), - authors=sharing['hermes'].get('DEFAULT_AUTHORS') - ) - targets = Target.objects.filter(pk__in=request.POST.getlist('selected-target', [])) - if request.POST.get('dataSwitch', '') == 'on': - reduced_datums = ReducedDatum.objects.filter(target__in=targets, data_type='photometry') - else: - reduced_datums = ReducedDatum.objects.none() - preload_key = preload_to_hermes(hermes_message, reduced_datums, targets) - load_url = sharing['hermes']['BASE_URL'] + f'submit-message?id={preload_key}' - return HttpResponseRedirect(load_url) - else: - return HttpResponseBadRequest("Must have hermes section with HERMES_API_KEY set in DATA_SHARING settings") - - class TargetFacilitySelectionView(Raise403PermissionRequiredMixin, FormView): """ View to select targets suitable to observe from a specific facility/location, taking into account target visibility From c8f292fbfc77107c59cdc76219242b04325c1019 Mon Sep 17 00:00:00 2001 From: "William (Lindy) Lindstrom" Date: Fri, 8 May 2026 15:29:29 -0700 Subject: [PATCH 21/22] Gate hermes_sharing on apps.is_installed('tom_hermes') The "Open in Hermes" button's formaction resolves to a URL owned by tom_hermes. So, hide the button when tom_hermes is absent. --- .../templatetags/dataproduct_extras.py | 10 +++++++- tom_targets/views.py | 24 +++++++++++-------- 2 files changed, 23 insertions(+), 11 deletions(-) diff --git a/tom_dataproducts/templatetags/dataproduct_extras.py b/tom_dataproducts/templatetags/dataproduct_extras.py index dff8328b9..a461afce0 100644 --- a/tom_dataproducts/templatetags/dataproduct_extras.py +++ b/tom_dataproducts/templatetags/dataproduct_extras.py @@ -3,6 +3,7 @@ from django import template from django import forms +from django.apps import apps from django.conf import settings from django.contrib.auth.models import Group from django.core.paginator import Paginator @@ -210,8 +211,15 @@ def get_photometry_data(context, target, target_share=False): form = DataShareForm(initial=initial, user=context['request'].user) form.fields['data_type'].widget = forms.HiddenInput() + # ``hermes_sharing`` gates the "Open in Hermes" button in the share dialog. + # The button's formaction resolves a URL that lives in tom_hermes, so the + # button must be hidden when tom_hermes is not installed (otherwise + # ``{% url 'tom_hermes:target-preload' %}`` raises NoReverseMatch). sharing = getattr(settings, "DATA_SHARING", None) - hermes_sharing = sharing and sharing.get('hermes', {}).get('HERMES_API_KEY') + hermes_sharing = ( + apps.is_installed('tom_hermes') + and sharing and sharing.get('hermes', {}).get('HERMES_API_KEY') + ) context = {'data': data, 'target': target, diff --git a/tom_targets/views.py b/tom_targets/views.py index 10d894360..7390b4a29 100644 --- a/tom_targets/views.py +++ b/tom_targets/views.py @@ -6,6 +6,7 @@ from urllib.parse import urlencode import numpy as np +from django.apps import apps from django.conf import settings from django.contrib import messages from django.contrib.auth.mixins import LoginRequiredMixin @@ -407,12 +408,15 @@ def get_context_data(self, *args, **kwargs): form = TargetShareForm(initial=initial) context['form'] = form - # Add into the context whether hermes-sharing is setup or not + # Add into the context whether hermes-sharing is setup or not. + # Both halves matter: the "Open in Hermes" button needs (a) a HERMES + # API key configured AND (b) tom_hermes installed, since the button's + # formaction resolves a URL name owned by tom_hermes. sharing = getattr(settings, "DATA_SHARING", None) - if sharing and sharing.get('hermes', {}).get('HERMES_API_KEY'): - context['hermes_sharing'] = True - else: - context['hermes_sharing'] = False + context['hermes_sharing'] = bool( + apps.is_installed('tom_hermes') + and sharing and sharing.get('hermes', {}).get('HERMES_API_KEY') + ) return context @@ -849,12 +853,12 @@ def get_context_data(self, *args, **kwargs): form = TargetListShareForm(initial=initial) context['form'] = form - # Add into the context whether hermes-sharing is setup or not + # See TargetShareView.get_context_data for why both halves are required. sharing = getattr(settings, "DATA_SHARING", None) - if sharing and sharing.get('hermes', {}).get('HERMES_API_KEY'): - context['hermes_sharing'] = True - else: - context['hermes_sharing'] = False + context['hermes_sharing'] = bool( + apps.is_installed('tom_hermes') + and sharing and sharing.get('hermes', {}).get('HERMES_API_KEY') + ) return context From efc1f3e261e35f753b45453aca534afab72b9080 Mon Sep 17 00:00:00 2001 From: "William (Lindy) Lindstrom" Date: Fri, 8 May 2026 15:56:47 -0700 Subject: [PATCH 22/22] make pyproject.toml self-consistent and update lock file --- poetry.lock | 791 +++++++++---------------------------------------- pyproject.toml | 2 +- 2 files changed, 149 insertions(+), 644 deletions(-) diff --git a/poetry.lock b/poetry.lock index 3c786c823..f88a4b0b1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -7,7 +7,7 @@ description = "A light, configurable Sphinx theme" optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, @@ -20,7 +20,7 @@ description = "ALeRCE Client" optional = false python-versions = ">=3.10" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "alerce-2.3.0-py3-none-any.whl", hash = "sha256:b35bc4136d484bcee9179043aa8dfeaba22f29aba93420798f2ff1451a969cdf"}, {file = "alerce-2.3.0.tar.gz", hash = "sha256:8dead5d06a544294f24c6b1213080adc01205c8cc584e03a654178e13cc5dc36"}, @@ -40,7 +40,7 @@ description = "Document parameters, class attributes, return types, and variable optional = false python-versions = ">=3.8" groups = ["coverage"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320"}, {file = "annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4"}, @@ -53,7 +53,7 @@ description = "Python implementation of the ASDF Standard" optional = false python-versions = ">=3.10" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "asdf-5.3.0-py3-none-any.whl", hash = "sha256:013ed65bd7b7440be3f9f175ec583734fd08ee5b84ad4a072d1a6b847634f26c"}, {file = "asdf-5.3.0.tar.gz", hash = "sha256:7f03fd1ea17b1c6f78599e5325673284a466a428732f11eba1b950658fded7d2"}, @@ -85,7 +85,7 @@ description = "ASDF serialization support for astropy" optional = false python-versions = ">=3.10" groups = ["main"] -markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and python_version < \"3.11\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "asdf_astropy-0.7.1-py3-none-any.whl", hash = "sha256:003b6969e066b3bae07027c724fe3e6434c49c3ba9d09f889bfecdc587086ba0"}, {file = "asdf_astropy-0.7.1.tar.gz", hash = "sha256:5aa5a448ee0945bd834a9ba8fb86cf43b39e85d24260e1339b734173ab6024c7"}, @@ -104,32 +104,6 @@ packaging = ">=19" docs = ["docutils", "graphviz", "matplotlib", "sphinx", "sphinx-asdf", "sphinx-astropy", "sphinx-automodapi", "tomli"] test = ["coverage", "pytest", "pytest-astropy", "scipy"] -[[package]] -name = "asdf-astropy" -version = "0.11.0" -description = "ASDF serialization support for astropy" -optional = false -python-versions = ">=3.11" -groups = ["main"] -markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and python_version >= \"3.11\"" -files = [ - {file = "asdf_astropy-0.11.0-py3-none-any.whl", hash = "sha256:10ff554382bb10b1bc931159d2c91e4399487c27afc0051c90a062112e9f0b7f"}, - {file = "asdf_astropy-0.11.0.tar.gz", hash = "sha256:6944700e3394a324a23772bdf97abb9803cd66a86b095101a548e9dfc650e2c0"}, -] - -[package.dependencies] -asdf = ">=3.3.0" -asdf-coordinates-schemas = ">=0.4" -asdf-standard = ">=1.1.0" -asdf-transform-schemas = ">=0.6" -astropy = ">=6.0" -numpy = ">=1.26.4" -packaging = ">=19" - -[package.extras] -docs = ["docutils", "graphviz", "matplotlib", "sphinx", "sphinx-asdf", "sphinx-astropy", "sphinx-automodapi", "tomli"] -test = ["coverage", "gwcs (>=0.22)", "pytest", "pytest-asdf-plugin", "pytest-astropy", "scipy (>=1.14.1)"] - [[package]] name = "asdf-coordinates-schemas" version = "0.5.1" @@ -137,7 +111,7 @@ description = "ASDF schemas for coordinates" optional = false python-versions = ">=3.10" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "asdf_coordinates_schemas-0.5.1-py3-none-any.whl", hash = "sha256:6c80cf928fd2de7cbc7c1dd003809fca2678c90c8843586f5670f0cf99bb48a7"}, {file = "asdf_coordinates_schemas-0.5.1.tar.gz", hash = "sha256:d9cf72fc312f27cb8f2b9e6ce10c38ebf826e2e6b80ba1f591f75bb0eaf734e2"}, @@ -158,7 +132,7 @@ description = "The ASDF Standard schemas" optional = false python-versions = ">=3.10" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "asdf_standard-1.5.0-py3-none-any.whl", hash = "sha256:487d192c5eb1335a62c07a88d82503e1fe57911c0876ee183cf572d385f97213"}, {file = "asdf_standard-1.5.0.tar.gz", hash = "sha256:5942caf7d143f39f72f634484373c7f40ce48571d1db3c271e13858e33fe5966"}, @@ -175,7 +149,7 @@ description = "ASDF schemas for transforms" optional = false python-versions = ">=3.9" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "asdf_transform_schemas-0.6.0-py3-none-any.whl", hash = "sha256:f63a5cc90c421209fd8e3b064c9cca8372220aa1c932676cad558942dc6153b0"}, {file = "asdf_transform_schemas-0.6.0.tar.gz", hash = "sha256:0f50f8e096fffd2d14b9c82995901266ef25b23d0dffc30ad41bba46851a9732"}, @@ -195,7 +169,7 @@ description = "ASDF WCS schemas" optional = false python-versions = ">=3.9" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "asdf_wcs_schemas-0.5.0-py3-none-any.whl", hash = "sha256:7050c7dfd252f2aa9c32e4fe3711c1823336d923f631816f8f3e9f27c95491f7"}, {file = "asdf_wcs_schemas-0.5.0.tar.gz", hash = "sha256:af7bdda46c20195b97272d8d3fdc7d9286beb143dbd69503f59c5ee1fe1559b5"}, @@ -217,7 +191,7 @@ description = "ASGI specs, helper code, and adapters" optional = false python-versions = ">=3.9" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "asgiref-3.11.1-py3-none-any.whl", hash = "sha256:e8667a091e69529631969fd45dc268fa79b99c92c5fcdda727757e52146ec133"}, {file = "asgiref-3.11.1.tar.gz", hash = "sha256:5f184dc43b7e763efe848065441eac62229c9f7b0475f41f80e207a114eda4ce"}, @@ -236,7 +210,7 @@ description = "Observation planning package for astronomers" optional = false python-versions = ">=3.7" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "astroplan-0.10.1.tar.gz", hash = "sha256:39d97c3377e1630abff3a94d8c956980f77a3e809e27a0376dd7d30abe3b6959"}, ] @@ -260,7 +234,7 @@ description = "Astronomy and astrophysics core library" optional = false python-versions = ">=3.10" groups = ["main"] -markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and python_version < \"3.11\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "astropy-6.1.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:be954c5f7707a089609053665aeb76493b79e5c4753c39486761bc6d137bf040"}, {file = "astropy-6.1.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b5e48df5ab2e3e521e82a7233a4b1159d071e64e6cbb76c45415dc68d3b97af1"}, @@ -308,45 +282,6 @@ test = ["pytest (>=7.0)", "pytest-astropy (>=0.10)", "pytest-astropy-header (>=0 test-all = ["array-api-strict", "astropy[test]", "coverage[toml]", "ipython (>=4.2)", "objgraph", "sgp4 (>=2.3)", "skyfield (>=1.20)"] typing = ["typing_extensions (>=4.0.0)"] -[[package]] -name = "astropy" -version = "7.2.0" -description = "Astronomy and astrophysics core library" -optional = false -python-versions = ">=3.11" -groups = ["main"] -markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and python_version >= \"3.11\"" -files = [ - {file = "astropy-7.2.0-cp311-abi3-macosx_10_9_x86_64.whl", hash = "sha256:efac04df4cc488efe630c2fff1992d6516dfb16a06e197fb68bc9e8e3b85def1"}, - {file = "astropy-7.2.0-cp311-abi3-macosx_11_0_arm64.whl", hash = "sha256:52e9a7d9c86b21f1af911a2930cd0c4a275fb302d455c89e11eedaffef6f2ad0"}, - {file = "astropy-7.2.0-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97c370421b9bb13d4c762c7af06d172bad7c01bd5bcf88314f6913c3c235b770"}, - {file = "astropy-7.2.0-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2f39ce2c80211fbceb005d377a5478cd0d66c42aa1498d252f2239fe5a025c24"}, - {file = "astropy-7.2.0-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ad4d71db994d45f046a1a5449000cf0f88ab6367cb67658500654a0586d6ab19"}, - {file = "astropy-7.2.0-cp311-abi3-win32.whl", hash = "sha256:95161f26602433176483e8bde8ab1a8ca09148f5b4bf5190569a26d381091598"}, - {file = "astropy-7.2.0-cp311-abi3-win_amd64.whl", hash = "sha256:dc7c340ba1713e55c93071b32033f3153470a0f663a4d539c03a7c9b44020790"}, - {file = "astropy-7.2.0-cp311-abi3-win_arm64.whl", hash = "sha256:0c428735a3f15b05c2095bc6ccb5f98a64bc99fb7015866af19ff8492420ddaf"}, - {file = "astropy-7.2.0.tar.gz", hash = "sha256:ae48bc26b1feaeb603cd94bd1fa1aa39137a115fe931b7f13787ab420e8c3070"}, -] - -[package.dependencies] -astropy-iers-data = ">=0.2025.10.27.0.39.10" -numpy = ">=1.24" -packaging = ">=22.0.0" -pyerfa = ">=2.0.1.1" -PyYAML = ">=6.0.0" - -[package.extras] -all = ["asdf-astropy (>=0.3)", "astropy[ipython]", "astropy[jupyter]", "astropy[recommended]", "beautifulsoup4 (>=4.9.3)", "bleach (>=3.2.1)", "bottleneck (>=1.3.3)", "certifi (>=2022.6.15.1)", "dask[dataframe] (>=2024.8.0)", "fsspec[http] (>=2023.4.0)", "h5py (>=3.9.0)", "html5lib (>=1.1)", "jplephem (>=2.17.0)", "mpmath (>=1.2.1)", "pyarrow (>=14.0.2)", "pytz (>=2016.10)", "s3fs (>=2023.4.0)", "sortedcontainers (>=2.1.0)", "uncompresspy (>=0.4.0)"] -dev = ["astropy[docs]", "astropy[recommended]", "astropy[test]", "astropy[typing]"] -dev-all = ["astropy[dev]", "astropy[test-all]", "tox (>=4.22.0)"] -docs = ["Jinja2 (>=3.1.3)", "astropy[recommended]", "matplotlib (>=3.9.1)", "pytest (>=8.0.0)", "sphinx (>=8.2.0)", "sphinx-astropy[confv2] (>=1.9.1)", "sphinx-changelog (>=1.2.0)", "sphinx_design (>=0.6.1)", "sphinxcontrib-globalsubs (>=0.1.1)"] -ipython = ["ipython (>=8.0.0)"] -jupyter = ["astropy[ipython]", "ipydatagrid (>=1.1.13)", "ipykernel (>=6.16.0)", "ipywidgets (>=7.7.3)", "jupyter-core (>=4.11.2)", "pandas (>=2.0.0)"] -recommended = ["matplotlib (>=3.8.0)", "narwhals (>=1.42.0)", "scipy (>=1.9.2)"] -test = ["coverage (>=6.4.4)", "pre-commit (>=2.9.3)", "pytest (>=8.0.0)", "pytest-astropy (>=0.10.0)", "pytest-astropy-header (>=0.2.1)", "pytest-doctestplus (>=1.4.0)", "pytest-xdist (>=2.5.0)", "threadpoolctl (>=3.0.0)"] -test-all = ["array-api-strict (<2.4) ; python_version < \"3.12\"", "array-api-strict (>=1.0)", "astropy[all]", "astropy[test]", "objgraph (>=3.1.2)", "sgp4 (>=2.3)", "skyfield (>=1.42.0)"] -typing = ["narwhals (>=1.42.0)", "pandas-stubs (>=2.0.0)"] - [[package]] name = "astropy-iers-data" version = "0.2026.4.27.1.3.2" @@ -354,7 +289,7 @@ description = "IERS Earth Rotation and Leap Second tables for the astropy core p optional = false python-versions = ">=3.10" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "astropy_iers_data-0.2026.4.27.1.3.2-py3-none-any.whl", hash = "sha256:3c09006b1b7c369a4dd9ba7e395b04cbfedb41a9253013b3bf7e5b8ac53a7699"}, {file = "astropy_iers_data-0.2026.4.27.1.3.2.tar.gz", hash = "sha256:fc71b5b2e601afb1b8c4f22a35161c551d67469ec65502123591dae6a87d453b"}, @@ -371,7 +306,7 @@ description = "Functions and classes to access online astronomical data resource optional = false python-versions = ">=3.9" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "astroquery-0.4.11-py3-none-any.whl", hash = "sha256:e34f114b285dd07a10ddb2065ebce829b01b0e740fd89dbc81a3077808e24b2d"}, {file = "astroquery-0.4.11.tar.gz", hash = "sha256:5537529bddc7fa07e773d5cd9baca593e3f5d93474edd1914f68e89506042b33"}, @@ -398,7 +333,7 @@ description = "Annotate AST trees with source code positions" optional = false python-versions = ">=3.8" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "asttokens-3.0.1-py3-none-any.whl", hash = "sha256:15a3ebc0f43c2d0a50eeafea25e19046c68398e487b9f1f5b517f7c0f40f976a"}, {file = "asttokens-3.0.1.tar.gz", hash = "sha256:71a4ee5de0bde6a31d64f6b13f2293ac190344478f081c3d1bccfcf5eacb0cb7"}, @@ -415,7 +350,7 @@ description = "Classes Without Boilerplate" optional = false python-versions = ">=3.9" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "attrs-26.1.0-py3-none-any.whl", hash = "sha256:c647aa4a12dfbad9333ca4e71fe62ddc36f4e63b2d260a37a8b83d2f043ac309"}, {file = "attrs-26.1.0.tar.gz", hash = "sha256:d03ceb89cb322a8fd706d4fb91940737b6642aa36998fe130a9bc96c985eff32"}, @@ -428,7 +363,7 @@ description = "Internationalization utilities" optional = false python-versions = ">=3.8" groups = ["docs"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "babel-2.18.0-py3-none-any.whl", hash = "sha256:e2b422b277c2b9a9630c1d7903c2a00d0830c409c59ac8cae9081c92f1aeba35"}, {file = "babel-2.18.0.tar.gz", hash = "sha256:b80b99a14bd085fcacfa15c9165f651fbb3406e66cc603abf11c5750937c992d"}, @@ -461,7 +396,7 @@ description = "Screen-scraping library" optional = false python-versions = ">=3.7.0" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "beautifulsoup4-4.14.3-py3-none-any.whl", hash = "sha256:0918bfe44902e6ad8d57732ba310582e98da931428d231a5ecb9e7c703a735bb"}, {file = "beautifulsoup4-4.14.3.tar.gz", hash = "sha256:6292b1c5186d356bba669ef9f7f051757099565ad9ada5dd630bd9de5fa7fb86"}, @@ -485,7 +420,7 @@ description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.7" groups = ["main", "coverage", "docs", "test"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "certifi-2026.4.22-py3-none-any.whl", hash = "sha256:3cb2210c8f88ba2318d29b0388d1023c8492ff72ecdde4ebdaddbb13a31b1c4a"}, {file = "certifi-2026.4.22.tar.gz", hash = "sha256:8d455352a37b71bf76a79caa83a3d6c25afee4a385d632127b6afb3963f1c580"}, @@ -498,7 +433,7 @@ description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.9" groups = ["main"] -markers = "platform_python_implementation != \"PyPy\" and (sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\")" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\") and platform_python_implementation != \"PyPy\"" files = [ {file = "cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44"}, {file = "cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49"}, @@ -596,7 +531,7 @@ description = "The Real First Universal Charset Detector. Open, modern and activ optional = false python-versions = ">=3.7" groups = ["main", "coverage", "docs", "test"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "charset_normalizer-3.4.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cdd68a1fb318e290a2077696b7eb7a21a49163c455979c639bf5a5dcdc46617d"}, {file = "charset_normalizer-3.4.7-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e17b8d5d6a8c47c85e68ca8379def1303fd360c3e22093a807cd34a71cd082b8"}, @@ -736,7 +671,7 @@ description = "Composable command line interface toolkit" optional = false python-versions = ">=3.10" groups = ["coverage"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "click-8.3.3-py3-none-any.whl", hash = "sha256:a2bf429bb3033c89fa4936ffb35d5cb471e3719e1f3c8a7c3fff0b8314305613"}, {file = "click-8.3.3.tar.gz", hash = "sha256:398329ad4837b2ff7cbe1dd166a4c0f8900c3ca3a218de04466f38f6497f18a2"}, @@ -756,7 +691,7 @@ files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] -markers = {main = "sys_platform == \"win32\"", coverage = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and platform_system == \"Windows\"", docs = "sys_platform == \"win32\""} +markers = {main = "sys_platform == \"win32\" and (python_version <= \"3.11\" or python_version >= \"3.12\")", coverage = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\") and platform_system == \"Windows\"", docs = "sys_platform == \"win32\" and (python_version <= \"3.11\" or python_version >= \"3.12\")"} [[package]] name = "commonmark" @@ -765,7 +700,7 @@ description = "Python parser for the CommonMark Markdown spec" optional = false python-versions = "*" groups = ["docs"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "commonmark-0.9.1-py2.py3-none-any.whl", hash = "sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9"}, {file = "commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"}, @@ -781,7 +716,7 @@ description = "Python library for calculating contours of 2D quadrilateral grids optional = false python-versions = ">=3.10" groups = ["main"] -markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and python_version < \"3.11\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "contourpy-1.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ba38e3f9f330af820c4b27ceb4b9c7feee5fe0493ea53a8720f4792667465934"}, {file = "contourpy-1.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dc41ba0714aa2968d1f8674ec97504a8f7e334f48eeacebcaa6256213acb0989"}, @@ -852,99 +787,6 @@ mypy = ["bokeh", "contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.15.0)", " test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] test-no-images = ["pytest", "pytest-cov", "pytest-rerunfailures", "pytest-xdist", "wurlitzer"] -[[package]] -name = "contourpy" -version = "1.3.3" -description = "Python library for calculating contours of 2D quadrilateral grids" -optional = false -python-versions = ">=3.11" -groups = ["main"] -markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and python_version >= \"3.11\"" -files = [ - {file = "contourpy-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:709a48ef9a690e1343202916450bc48b9e51c049b089c7f79a267b46cffcdaa1"}, - {file = "contourpy-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:23416f38bfd74d5d28ab8429cc4d63fa67d5068bd711a85edb1c3fb0c3e2f381"}, - {file = "contourpy-1.3.3-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:929ddf8c4c7f348e4c0a5a3a714b5c8542ffaa8c22954862a46ca1813b667ee7"}, - {file = "contourpy-1.3.3-cp311-cp311-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9e999574eddae35f1312c2b4b717b7885d4edd6cb46700e04f7f02db454e67c1"}, - {file = "contourpy-1.3.3-cp311-cp311-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0bf67e0e3f482cb69779dd3061b534eb35ac9b17f163d851e2a547d56dba0a3a"}, - {file = "contourpy-1.3.3-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:51e79c1f7470158e838808d4a996fa9bac72c498e93d8ebe5119bc1e6becb0db"}, - {file = "contourpy-1.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:598c3aaece21c503615fd59c92a3598b428b2f01bfb4b8ca9c4edeecc2438620"}, - {file = "contourpy-1.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:322ab1c99b008dad206d406bb61d014cf0174df491ae9d9d0fac6a6fda4f977f"}, - {file = "contourpy-1.3.3-cp311-cp311-win32.whl", hash = "sha256:fd907ae12cd483cd83e414b12941c632a969171bf90fc937d0c9f268a31cafff"}, - {file = "contourpy-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:3519428f6be58431c56581f1694ba8e50626f2dd550af225f82fb5f5814d2a42"}, - {file = "contourpy-1.3.3-cp311-cp311-win_arm64.whl", hash = "sha256:15ff10bfada4bf92ec8b31c62bf7c1834c244019b4a33095a68000d7075df470"}, - {file = "contourpy-1.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b08a32ea2f8e42cf1d4be3169a98dd4be32bafe4f22b6c4cb4ba810fa9e5d2cb"}, - {file = "contourpy-1.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:556dba8fb6f5d8742f2923fe9457dbdd51e1049c4a43fd3986a0b14a1d815fc6"}, - {file = "contourpy-1.3.3-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92d9abc807cf7d0e047b95ca5d957cf4792fcd04e920ca70d48add15c1a90ea7"}, - {file = "contourpy-1.3.3-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b2e8faa0ed68cb29af51edd8e24798bb661eac3bd9f65420c1887b6ca89987c8"}, - {file = "contourpy-1.3.3-cp312-cp312-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:626d60935cf668e70a5ce6ff184fd713e9683fb458898e4249b63be9e28286ea"}, - {file = "contourpy-1.3.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d00e655fcef08aba35ec9610536bfe90267d7ab5ba944f7032549c55a146da1"}, - {file = "contourpy-1.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:451e71b5a7d597379ef572de31eeb909a87246974d960049a9848c3bc6c41bf7"}, - {file = "contourpy-1.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:459c1f020cd59fcfe6650180678a9993932d80d44ccde1fa1868977438f0b411"}, - {file = "contourpy-1.3.3-cp312-cp312-win32.whl", hash = "sha256:023b44101dfe49d7d53932be418477dba359649246075c996866106da069af69"}, - {file = "contourpy-1.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:8153b8bfc11e1e4d75bcb0bff1db232f9e10b274e0929de9d608027e0d34ff8b"}, - {file = "contourpy-1.3.3-cp312-cp312-win_arm64.whl", hash = "sha256:07ce5ed73ecdc4a03ffe3e1b3e3c1166db35ae7584be76f65dbbe28a7791b0cc"}, - {file = "contourpy-1.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:177fb367556747a686509d6fef71d221a4b198a3905fe824430e5ea0fda54eb5"}, - {file = "contourpy-1.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d002b6f00d73d69333dac9d0b8d5e84d9724ff9ef044fd63c5986e62b7c9e1b1"}, - {file = "contourpy-1.3.3-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:348ac1f5d4f1d66d3322420f01d42e43122f43616e0f194fc1c9f5d830c5b286"}, - {file = "contourpy-1.3.3-cp313-cp313-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:655456777ff65c2c548b7c454af9c6f33f16c8884f11083244b5819cc214f1b5"}, - {file = "contourpy-1.3.3-cp313-cp313-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:644a6853d15b2512d67881586bd03f462c7ab755db95f16f14d7e238f2852c67"}, - {file = "contourpy-1.3.3-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4debd64f124ca62069f313a9cb86656ff087786016d76927ae2cf37846b006c9"}, - {file = "contourpy-1.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a15459b0f4615b00bbd1e91f1b9e19b7e63aea7483d03d804186f278c0af2659"}, - {file = "contourpy-1.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ca0fdcd73925568ca027e0b17ab07aad764be4706d0a925b89227e447d9737b7"}, - {file = "contourpy-1.3.3-cp313-cp313-win32.whl", hash = "sha256:b20c7c9a3bf701366556e1b1984ed2d0cedf999903c51311417cf5f591d8c78d"}, - {file = "contourpy-1.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:1cadd8b8969f060ba45ed7c1b714fe69185812ab43bd6b86a9123fe8f99c3263"}, - {file = "contourpy-1.3.3-cp313-cp313-win_arm64.whl", hash = "sha256:fd914713266421b7536de2bfa8181aa8c699432b6763a0ea64195ebe28bff6a9"}, - {file = "contourpy-1.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:88df9880d507169449d434c293467418b9f6cbe82edd19284aa0409e7fdb933d"}, - {file = "contourpy-1.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d06bb1f751ba5d417047db62bca3c8fde202b8c11fb50742ab3ab962c81e8216"}, - {file = "contourpy-1.3.3-cp313-cp313t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e4e6b05a45525357e382909a4c1600444e2a45b4795163d3b22669285591c1ae"}, - {file = "contourpy-1.3.3-cp313-cp313t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ab3074b48c4e2cf1a960e6bbeb7f04566bf36b1861d5c9d4d8ac04b82e38ba20"}, - {file = "contourpy-1.3.3-cp313-cp313t-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c3d53c796f8647d6deb1abe867daeb66dcc8a97e8455efa729516b997b8ed99"}, - {file = "contourpy-1.3.3-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50ed930df7289ff2a8d7afeb9603f8289e5704755c7e5c3bbd929c90c817164b"}, - {file = "contourpy-1.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4feffb6537d64b84877da813a5c30f1422ea5739566abf0bd18065ac040e120a"}, - {file = "contourpy-1.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2b7e9480ffe2b0cd2e787e4df64270e3a0440d9db8dc823312e2c940c167df7e"}, - {file = "contourpy-1.3.3-cp313-cp313t-win32.whl", hash = "sha256:283edd842a01e3dcd435b1c5116798d661378d83d36d337b8dde1d16a5fc9ba3"}, - {file = "contourpy-1.3.3-cp313-cp313t-win_amd64.whl", hash = "sha256:87acf5963fc2b34825e5b6b048f40e3635dd547f590b04d2ab317c2619ef7ae8"}, - {file = "contourpy-1.3.3-cp313-cp313t-win_arm64.whl", hash = "sha256:3c30273eb2a55024ff31ba7d052dde990d7d8e5450f4bbb6e913558b3d6c2301"}, - {file = "contourpy-1.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fde6c716d51c04b1c25d0b90364d0be954624a0ee9d60e23e850e8d48353d07a"}, - {file = "contourpy-1.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:cbedb772ed74ff5be440fa8eee9bd49f64f6e3fc09436d9c7d8f1c287b121d77"}, - {file = "contourpy-1.3.3-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:22e9b1bd7a9b1d652cd77388465dc358dafcd2e217d35552424aa4f996f524f5"}, - {file = "contourpy-1.3.3-cp314-cp314-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a22738912262aa3e254e4f3cb079a95a67132fc5a063890e224393596902f5a4"}, - {file = "contourpy-1.3.3-cp314-cp314-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:afe5a512f31ee6bd7d0dda52ec9864c984ca3d66664444f2d72e0dc4eb832e36"}, - {file = "contourpy-1.3.3-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f64836de09927cba6f79dcd00fdd7d5329f3fccc633468507079c829ca4db4e3"}, - {file = "contourpy-1.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1fd43c3be4c8e5fd6e4f2baeae35ae18176cf2e5cced681cca908addf1cdd53b"}, - {file = "contourpy-1.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:6afc576f7b33cf00996e5c1102dc2a8f7cc89e39c0b55df93a0b78c1bd992b36"}, - {file = "contourpy-1.3.3-cp314-cp314-win32.whl", hash = "sha256:66c8a43a4f7b8df8b71ee1840e4211a3c8d93b214b213f590e18a1beca458f7d"}, - {file = "contourpy-1.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:cf9022ef053f2694e31d630feaacb21ea24224be1c3ad0520b13d844274614fd"}, - {file = "contourpy-1.3.3-cp314-cp314-win_arm64.whl", hash = "sha256:95b181891b4c71de4bb404c6621e7e2390745f887f2a026b2d99e92c17892339"}, - {file = "contourpy-1.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:33c82d0138c0a062380332c861387650c82e4cf1747aaa6938b9b6516762e772"}, - {file = "contourpy-1.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ea37e7b45949df430fe649e5de8351c423430046a2af20b1c1961cae3afcda77"}, - {file = "contourpy-1.3.3-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d304906ecc71672e9c89e87c4675dc5c2645e1f4269a5063b99b0bb29f232d13"}, - {file = "contourpy-1.3.3-cp314-cp314t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca658cd1a680a5c9ea96dc61cdbae1e85c8f25849843aa799dfd3cb370ad4fbe"}, - {file = "contourpy-1.3.3-cp314-cp314t-manylinux_2_26_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ab2fd90904c503739a75b7c8c5c01160130ba67944a7b77bbf36ef8054576e7f"}, - {file = "contourpy-1.3.3-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7301b89040075c30e5768810bc96a8e8d78085b47d8be6e4c3f5a0b4ed478a0"}, - {file = "contourpy-1.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:2a2a8b627d5cc6b7c41a4beff6c5ad5eb848c88255fda4a8745f7e901b32d8e4"}, - {file = "contourpy-1.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:fd6ec6be509c787f1caf6b247f0b1ca598bef13f4ddeaa126b7658215529ba0f"}, - {file = "contourpy-1.3.3-cp314-cp314t-win32.whl", hash = "sha256:e74a9a0f5e3fff48fb5a7f2fd2b9b70a3fe014a67522f79b7cca4c0c7e43c9ae"}, - {file = "contourpy-1.3.3-cp314-cp314t-win_amd64.whl", hash = "sha256:13b68d6a62db8eafaebb8039218921399baf6e47bf85006fd8529f2a08ef33fc"}, - {file = "contourpy-1.3.3-cp314-cp314t-win_arm64.whl", hash = "sha256:b7448cb5a725bb1e35ce88771b86fba35ef418952474492cf7c764059933ff8b"}, - {file = "contourpy-1.3.3-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:cd5dfcaeb10f7b7f9dc8941717c6c2ade08f587be2226222c12b25f0483ed497"}, - {file = "contourpy-1.3.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:0c1fc238306b35f246d61a1d416a627348b5cf0648648a031e14bb8705fcdfe8"}, - {file = "contourpy-1.3.3-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:70f9aad7de812d6541d29d2bbf8feb22ff7e1c299523db288004e3157ff4674e"}, - {file = "contourpy-1.3.3-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5ed3657edf08512fc3fe81b510e35c2012fbd3081d2e26160f27ca28affec989"}, - {file = "contourpy-1.3.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:3d1a3799d62d45c18bafd41c5fa05120b96a28079f2393af559b843d1a966a77"}, - {file = "contourpy-1.3.3.tar.gz", hash = "sha256:083e12155b210502d0bca491432bb04d56dc3432f95a979b429f2848c3dbe880"}, -] - -[package.dependencies] -numpy = ">=1.25" - -[package.extras] -bokeh = ["bokeh", "selenium"] -docs = ["furo", "sphinx (>=7.2)", "sphinx-copybutton"] -mypy = ["bokeh", "contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.17.0)", "types-Pillow"] -test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] -test-no-images = ["pytest", "pytest-cov", "pytest-rerunfailures", "pytest-xdist", "wurlitzer"] - [[package]] name = "coverage" version = "7.13.5" @@ -952,7 +794,7 @@ description = "Code coverage measurement for Python" optional = false python-versions = ">=3.10" groups = ["coverage"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "coverage-7.13.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0723d2c96324561b9aa76fb982406e11d93cdb388a7a7da2b16e04719cf7ca5"}, {file = "coverage-7.13.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52f444e86475992506b32d4e5ca55c24fc88d73bcbda0e9745095b28ef4dc0cf"}, @@ -1075,7 +917,7 @@ description = "Show coverage stats online via coveralls.io" optional = false python-versions = "<4.0,>=3.10" groups = ["coverage"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "coveralls-4.1.0-py3-none-any.whl", hash = "sha256:bfacfda2d443c24fc90d67035027cec15015fff2dbd036427e8bf8f4953dda2e"}, {file = "coveralls-4.1.0.tar.gz", hash = "sha256:dab364025ba80cbb95ce56c6fc62cd9172d7fd637060ea235dde99d9b46a4494"}, @@ -1096,7 +938,7 @@ description = "Bootstrap4 template pack for django-crispy-forms" optional = false python-versions = ">=3.8" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "crispy-bootstrap4-2024.10.tar.gz", hash = "sha256:503e8922b0f3b5262a6fdf303a3a94eb2a07514812f1ca130b88f7c02dd25e2b"}, {file = "crispy_bootstrap4-2024.10-py3-none-any.whl", hash = "sha256:138a97884044ae4c4799c80595b36c42066e4e933431e2e971611e251c84f96c"}, @@ -1113,7 +955,7 @@ description = "cryptography is a package which provides cryptographic recipes an optional = false python-versions = "!=3.9.0,!=3.9.1,>=3.8" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "cryptography-47.0.0-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:160ad728f128972d362e714054f6ba0067cab7fb350c5202a9ae8ae4ce3ef1a0"}, {file = "cryptography-47.0.0-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b9a8943e359b7615db1a3ba587994618e094ff3d6fa5a390c73d079ce18b3973"}, @@ -1180,7 +1022,7 @@ description = "Composable style cycles" optional = false python-versions = ">=3.8" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"}, {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"}, @@ -1197,7 +1039,7 @@ description = "Decorators for Humans" optional = false python-versions = ">=3.8" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a"}, {file = "decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360"}, @@ -1210,7 +1052,7 @@ description = "A high-level Python web framework that encourages rapid developme optional = false python-versions = ">=3.10" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "django-5.2.13-py3-none-any.whl", hash = "sha256:5788fce61da23788a8ce6f02583765ab060d396720924789f97fa42119d37f7a"}, {file = "django-5.2.13.tar.gz", hash = "sha256:a31589db5188d074c63f0945c3888fad104627dfcc236fb2b97f71f89da33bc4"}, @@ -1232,7 +1074,7 @@ description = "Bootstrap 4 for Django" optional = false python-versions = ">=3.8" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "django_bootstrap4-24.4-py3-none-any.whl", hash = "sha256:90f642d8c9a6622694544a4fdbf8e6734a037b33b5d6b2884cfd79c87c5d610f"}, {file = "django_bootstrap4-24.4.tar.gz", hash = "sha256:b6147eeaa534057f866962d50bcf506b3d378720340d7a8243494a6e33f4e23e"}, @@ -1249,7 +1091,7 @@ description = "The code formerly known as django.contrib.comments." optional = false python-versions = "*" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "django-contrib-comments-2.2.0.tar.gz", hash = "sha256:48de00f15677e016a216aeff205d6e00e4391c9a5702136c64119c472b7356da"}, {file = "django_contrib_comments-2.2.0-py3-none-any.whl", hash = "sha256:2ca79060bbc8fc5b636981ef6e50f35ab83649af75fc1be47bf770636be3271c"}, @@ -1265,7 +1107,7 @@ description = "Best way to have Django DRY forms" optional = false python-versions = ">=3.10" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "django_crispy_forms-2.6-py3-none-any.whl", hash = "sha256:8ee0ae28b6b0ac41ff48a65944480c049fe8d1b0047086874fd7efabf4ec1374"}, {file = "django_crispy_forms-2.6.tar.gz", hash = "sha256:4921a1087c6cd4f9fa3c139654c1de1c1c385f8bd6729aaee530bc0121ab4b93"}, @@ -1281,7 +1123,7 @@ description = "Extensions for Django" optional = false python-versions = ">=3.6" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "django-extensions-3.2.3.tar.gz", hash = "sha256:44d27919d04e23b3f40231c4ab7af4e61ce832ef46d610cc650d53e68328410a"}, {file = "django_extensions-3.2.3-py3-none-any.whl", hash = "sha256:9600b7562f79a92cbf1fde6403c04fee314608fefbb595502e34383ae8203401"}, @@ -1297,7 +1139,7 @@ description = "Django-filter is a reusable Django application for allowing users optional = false python-versions = ">=3.8" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "django_filter-24.3-py3-none-any.whl", hash = "sha256:c4852822928ce17fb699bcfccd644b3574f1a2d80aeb2b4ff4f16b02dd49dc64"}, {file = "django_filter-24.3.tar.gz", hash = "sha256:d8ccaf6732afd21ca0542f6733b11591030fa98669f8d15599b358e24a2cd9c3"}, @@ -1313,7 +1155,7 @@ description = "Essential Gravatar support for Django. Features helper methods, t optional = false python-versions = "*" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "django_gravatar2-1.4.5-py2.py3-none-any.whl", hash = "sha256:7e6c8f63f59b0077d42402531684807ea6295867ebd2195a638d87b851f0d41c"}, {file = "django_gravatar2-1.4.5.tar.gz", hash = "sha256:2dbb56465e395dd8b3920d4017e27a4756912cc2ad9b11ba48cf143871a80364"}, @@ -1326,7 +1168,7 @@ description = "Implementation of per object permissions for Django." optional = false python-versions = ">=3.5" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "django-guardian-2.4.0.tar.gz", hash = "sha256:c58a68ae76922d33e6bdc0e69af1892097838de56e93e78a8361090bcd9f89a0"}, {file = "django_guardian-2.4.0-py3-none-any.whl", hash = "sha256:440ca61358427e575323648b25f8384739e54c38b3d655c81d75e0cd0d61b697"}, @@ -1342,7 +1184,7 @@ description = "Extensions for using Django with htmx." optional = false python-versions = ">=3.10" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "django_htmx-1.27.0-py3-none-any.whl", hash = "sha256:13e1e13b87d39b57f95aae6e4987cb3df056d0b1373a41f4a94504a00298ffd8"}, {file = "django_htmx-1.27.0.tar.gz", hash = "sha256:036e5da801bfdf5f1ca815f21592cfb9f004a898f330c842f15e55c70e301a75"}, @@ -1359,7 +1201,7 @@ description = "Monkey-patching and extensions for django-stubs" optional = false python-versions = ">=3.10" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "django_stubs_ext-6.0.3-py3-none-any.whl", hash = "sha256:9e4105955419ae310d7da9cfd808e039d4dae3092c628f021057bb4f2c237f8f"}, {file = "django_stubs_ext-6.0.3.tar.gz", hash = "sha256:3307d42132bc295d5744de6276bc5fdf6896efc70f891e21c0ae8bdf529d2762"}, @@ -1376,7 +1218,7 @@ description = "Table/data-grid framework for Django" optional = false python-versions = ">=3.10" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "django_tables2-3.0.0-py3-none-any.whl", hash = "sha256:2a5b5447f10d7a8cfb7a2e8f0b139d969c7eb2e675079a4b8ba0107956345bfa"}, {file = "django_tables2-3.0.0.tar.gz", hash = "sha256:3c5343f72663b0d6684e8e6cbcb16caae10d7621a51c1cf3c581f57ce605f8ee"}, @@ -1395,7 +1237,7 @@ description = "An implementation and backport of background workers and tasks in optional = false python-versions = ">=3.8" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "django_tasks-0.6.1-py3-none-any.whl", hash = "sha256:b3648e28bdcda809cb7831f3aff98aa46c327025447c462b8943cce9dfbb0281"}, {file = "django_tasks-0.6.1.tar.gz", hash = "sha256:4086e7eb9e965f79c4ac76f5c3690ec3bf41c461585237b71b4bde729ced9826"}, @@ -1418,7 +1260,7 @@ description = "Web APIs for Django, made easy." optional = false python-versions = ">=3.10" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "djangorestframework-3.17.1-py3-none-any.whl", hash = "sha256:c3c74dd3e83a5a3efc37b3c18d92bd6f86a6791c7b7d4dff62bb068500e76457"}, {file = "djangorestframework-3.17.1.tar.gz", hash = "sha256:a6def5f447fe78ff853bff1d47a3c59bf38f5434b031780b351b0c73a62db1a5"}, @@ -1434,7 +1276,7 @@ description = "Docutils -- Python Documentation Utilities" optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, @@ -1466,14 +1308,14 @@ description = "Get the currently executing AST node of a frame, and other inform optional = false python-versions = ">=3.8" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "executing-2.2.1-py2.py3-none-any.whl", hash = "sha256:760643d3452b4d777d295bb167ccc74c64a81df23fb5e08eff250c425a4b2017"}, {file = "executing-2.2.1.tar.gz", hash = "sha256:3632cc370565f6648cc328b32435bd120a1e4ebb20c77e3fdde9a13cd1e533c4"}, ] [package.extras] -tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich ; python_version >= \"3.11\""] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] [[package]] name = "factory-boy" @@ -1481,8 +1323,8 @@ version = "3.3.3" description = "A versatile test fixtures replacement based on thoughtbot's factory_bot for Ruby." optional = false python-versions = ">=3.8" -groups = ["test"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +groups = ["main", "test"] +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "factory_boy-3.3.3-py2.py3-none-any.whl", hash = "sha256:1c39e3289f7e667c4285433f305f8d506efc2fe9c73aaea4151ebd5cdea394fc"}, {file = "factory_boy-3.3.3.tar.gz", hash = "sha256:866862d226128dfac7f2b4160287e899daf54f2612778327dd03d0e2cb1e3d03"}, @@ -1501,8 +1343,8 @@ version = "40.15.0" description = "Faker is a Python package that generates fake data for you." optional = false python-versions = ">=3.10" -groups = ["test"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +groups = ["main", "test"] +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "faker-40.15.0-py3-none-any.whl", hash = "sha256:71ab3c3370da9d2205ab74ffb0fd51273063ad562b3a3bb69d0026a20923e318"}, {file = "faker-40.15.0.tar.gz", hash = "sha256:20f3a6ec8c266b74d4c554e34118b21c3c2056c0b4a519d15c8decb3a4e6e795"}, @@ -1521,7 +1363,7 @@ description = "Common libraries for the conversion and scaling of fits images" optional = false python-versions = "*" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "fits2image-0.4.11-py3-none-any.whl", hash = "sha256:ce6047934a570915a7d0a02fc5e83d93f961fc53c620a86aeb69bac1da991a9f"}, {file = "fits2image-0.4.11.tar.gz", hash = "sha256:9ac63a3e01d17a36091e3dff394dd3dfc03acfba4fb69e93b340d958425e5450"}, @@ -1539,7 +1381,7 @@ description = "the modular source code checker: pep8 pyflakes and co" optional = false python-versions = ">=3.9" groups = ["lint"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "flake8-7.3.0-py2.py3-none-any.whl", hash = "sha256:b9696257b9ce8beb888cdbe31cf885c90d31928fe202be0889a7cdafad32f01e"}, {file = "flake8-7.3.0.tar.gz", hash = "sha256:fe044858146b9fc69b551a4b490d69cf960fcb78ad1edcb84e7fbb1b4a8e3872"}, @@ -1557,7 +1399,7 @@ description = "Tools to manipulate font files" optional = false python-versions = ">=3.10" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "fonttools-4.62.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ad5cca75776cd453b1b035b530e943334957ae152a36a88a320e779d61fc980c"}, {file = "fonttools-4.62.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0b3ae47e8636156a9accff64c02c0924cbebad62854c4a6dbdc110cd5b4b341a"}, @@ -1612,17 +1454,17 @@ files = [ ] [package.extras] -all = ["brotli (>=1.0.1) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\"", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres ; platform_python_implementation == \"PyPy\"", "pycairo", "scipy ; platform_python_implementation != \"PyPy\"", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.45.0)", "unicodedata2 (>=17.0.0) ; python_version <= \"3.14\"", "xattr ; sys_platform == \"darwin\"", "zopfli (>=0.1.4)"] +all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "pycairo", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.45.0)", "unicodedata2 (>=17.0.0)", "xattr", "zopfli (>=0.1.4)"] graphite = ["lz4 (>=1.7.4.2)"] -interpolatable = ["munkres ; platform_python_implementation == \"PyPy\"", "pycairo", "scipy ; platform_python_implementation != \"PyPy\""] +interpolatable = ["munkres", "pycairo", "scipy"] lxml = ["lxml (>=4.0)"] pathops = ["skia-pathops (>=0.5.0)"] plot = ["matplotlib"] repacker = ["uharfbuzz (>=0.45.0)"] symfont = ["sympy"] -type1 = ["xattr ; sys_platform == \"darwin\""] -unicode = ["unicodedata2 (>=17.0.0) ; python_version <= \"3.14\""] -woff = ["brotli (>=1.0.1) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\"", "zopfli (>=0.1.4)"] +type1 = ["xattr"] +unicode = ["unicodedata2 (>=17.0.0)"] +woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] [[package]] name = "gwcs" @@ -1631,7 +1473,7 @@ description = "Generalized World Coordinate System" optional = false python-versions = ">=3.10" groups = ["main"] -markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and python_version < \"3.11\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "gwcs-0.24.0-py3-none-any.whl", hash = "sha256:19c6f68193bf17468d8f0cbe1ec36235a6c380e766576595f891d6037ef3c694"}, {file = "gwcs-0.24.0.tar.gz", hash = "sha256:6387bd4492ab25e74522b6ead1971602462ed57432e4eb274a3734426d166e60"}, @@ -1649,31 +1491,6 @@ scipy = ">=1.14.1" docs = ["pydata-sphinx-theme", "sphinx", "sphinx-asdf", "sphinx-astropy", "sphinx-automodapi", "sphinx-copybutton", "sphinx-rtd-theme", "stsci-rtd-theme", "tomli"] test = ["ci-watson (>=0.3.0)", "pytest (>=8.0.0)", "pytest-astropy (>=0.11.0)"] -[[package]] -name = "gwcs" -version = "1.0.3" -description = "Generalized World Coordinate System" -optional = false -python-versions = ">=3.11" -groups = ["main"] -markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and python_version >= \"3.11\"" -files = [ - {file = "gwcs-1.0.3-py3-none-any.whl", hash = "sha256:c7bf303695a68e42719e661c91aaf802c46bf62c20a5148522ff3f969778b598"}, - {file = "gwcs-1.0.3.tar.gz", hash = "sha256:c7cc83b0a2faf4433d94a047065e1686ba0e94b736c6453083a82d5111fc114a"}, -] - -[package.dependencies] -asdf = ">=3.3.0" -asdf-astropy = ">=0.8.0" -asdf_wcs_schemas = ">=0.5.0" -astropy = ">=6.0" -numpy = ">=1.25" -scipy = ">=1.14.1" - -[package.extras] -docs = ["furo", "matplotlib", "sphinx", "sphinx-asdf", "sphinx-astropy", "sphinx-automodapi", "sphinx-copybutton", "sphinx-inline-tabs"] -test = ["ci-watson (>=0.3.0)", "pytest (>=9.0)", "pytest-astropy (>=0.11.0)"] - [[package]] name = "html5lib" version = "1.1" @@ -1681,7 +1498,7 @@ description = "HTML parser based on the WHATWG HTML specification" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d"}, {file = "html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f"}, @@ -1704,7 +1521,7 @@ description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.8" groups = ["main", "coverage", "docs", "test"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "idna-3.13-py3-none-any.whl", hash = "sha256:892ea0cde124a99ce773decba204c5552b69c3c67ffd5f232eb7696135bc8bb3"}, {file = "idna-3.13.tar.gz", hash = "sha256:585ea8fe5d69b9181ec1afba340451fba6ba764af97026f92a91d4eef164a242"}, @@ -1720,7 +1537,7 @@ description = "Get image size from headers (BMP/PNG/JPEG/JPEG2000/GIF/TIFF/SVG/N optional = false python-versions = "<3.15,>=3.10" groups = ["docs"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "imagesize-2.0.0-py2.py3-none-any.whl", hash = "sha256:5667c5bbb57ab3f1fa4bc366f4fbc971db3d5ed011fd2715fd8001f782718d96"}, {file = "imagesize-2.0.0.tar.gz", hash = "sha256:8e8358c4a05c304f1fccf7ff96f036e7243a189e9e42e90851993c558cfe9ee3"}, @@ -1758,7 +1575,7 @@ description = "Read resources from Python packages" optional = false python-versions = ">=3.8" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "importlib_resources-6.4.5-py3-none-any.whl", hash = "sha256:ac29d5f956f01d5e4bb63102a5a19957f1b9175e45649977264a1416783bb717"}, {file = "importlib_resources-6.4.5.tar.gz", hash = "sha256:980862a1d16c9e147a59603677fa2aa5fd82b87f223b6cb870695bcfce830065"}, @@ -1779,7 +1596,7 @@ description = "IPython: Productive Interactive Computing" optional = false python-versions = ">=3.10" groups = ["main"] -markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and python_version < \"3.11\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "ipython-8.39.0-py3-none-any.whl", hash = "sha256:bb3c51c4fa8148ab1dea07a79584d1c854e234ea44aa1283bcb37bc75054651f"}, {file = "ipython-8.39.0.tar.gz", hash = "sha256:4110ae96012c379b8b6db898a07e186c40a2a1ef5d57a7fa83166047d9da7624"}, @@ -1801,7 +1618,7 @@ typing_extensions = {version = ">=4.6", markers = "python_version < \"3.12\""} [package.extras] all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"] black = ["black"] -doc = ["docrepr", "exceptiongroup", "intersphinx_registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli ; python_version < \"3.11\"", "typing_extensions"] +doc = ["docrepr", "exceptiongroup", "intersphinx_registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli", "typing_extensions"] kernel = ["ipykernel"] matplotlib = ["matplotlib"] nbconvert = ["nbconvert"] @@ -1812,57 +1629,6 @@ qtconsole = ["qtconsole"] test = ["packaging", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"] test-extra = ["curio", "ipython[test]", "jupyter_ai", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] -[[package]] -name = "ipython" -version = "9.13.0" -description = "IPython: Productive Interactive Computing" -optional = false -python-versions = ">=3.11" -groups = ["main"] -markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and python_version >= \"3.11\"" -files = [ - {file = "ipython-9.13.0-py3-none-any.whl", hash = "sha256:57f9d4639e20818d328d287c7b549af3d05f12486ea8f2e7f73e52a36ec4d201"}, - {file = "ipython-9.13.0.tar.gz", hash = "sha256:7e834b6afc99f020e3f05966ced34792f40267d64cb1ea9043886dab0dde5967"}, -] - -[package.dependencies] -colorama = {version = ">=0.4.4", markers = "sys_platform == \"win32\""} -decorator = ">=5.1.0" -ipython-pygments-lexers = ">=1.0.0" -jedi = ">=0.18.2" -matplotlib-inline = ">=0.1.6" -pexpect = {version = ">4.6", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} -prompt_toolkit = ">=3.0.41,<3.1.0" -psutil = ">=7" -pygments = ">=2.14.0" -stack_data = ">=0.6.0" -traitlets = ">=5.13.0" -typing_extensions = {version = ">=4.6", markers = "python_version < \"3.12\""} - -[package.extras] -all = ["argcomplete (>=3.0)", "ipython[doc,matplotlib,terminal,test,test-extra]", "types-decorator"] -black = ["black"] -doc = ["docrepr", "exceptiongroup", "intersphinx_registry", "ipykernel", "ipython[matplotlib,test]", "setuptools (>=80.0)", "sphinx (>=8.0)", "sphinx-rtd-theme (>=0.1.8)", "sphinx_toml (==0.0.4)", "typing_extensions"] -matplotlib = ["matplotlib (>3.9)"] -test = ["packaging (>=23.0.0)", "pytest (>=7.0.0)", "pytest-asyncio (>=1.0.0)", "setuptools (>=80.0)", "testpath (>=0.2)"] -test-extra = ["curio", "ipykernel (>6.30)", "ipython[matplotlib]", "ipython[test]", "jupyter_ai", "nbclient", "nbformat", "numpy (>=2.0)", "pandas (>2.1)", "trio (>=0.22.0)"] - -[[package]] -name = "ipython-pygments-lexers" -version = "1.1.1" -description = "Defines a variety of Pygments lexers for highlighting IPython code." -optional = false -python-versions = ">=3.8" -groups = ["main"] -markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and python_version >= \"3.11\"" -files = [ - {file = "ipython_pygments_lexers-1.1.1-py3-none-any.whl", hash = "sha256:a9462224a505ade19a605f71f8fa63c2048833ce50abc86768a0d81d876dc81c"}, - {file = "ipython_pygments_lexers-1.1.1.tar.gz", hash = "sha256:09c0138009e56b6854f9535736f4171d855c8c08a563a0dcd8022f78355c7e81"}, -] - -[package.dependencies] -pygments = "*" - [[package]] name = "jaraco-classes" version = "3.4.0" @@ -1870,7 +1636,7 @@ description = "Utility functions for Python class constructs" optional = false python-versions = ">=3.8" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "jaraco.classes-3.4.0-py3-none-any.whl", hash = "sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790"}, {file = "jaraco.classes-3.4.0.tar.gz", hash = "sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd"}, @@ -1890,7 +1656,7 @@ description = "Useful decorators and context managers" optional = false python-versions = ">=3.10" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "jaraco_context-6.1.2-py3-none-any.whl", hash = "sha256:bf8150b79a2d5d91ae48629d8b427a8f7ba0e1097dd6202a9059f29a36379535"}, {file = "jaraco_context-6.1.2.tar.gz", hash = "sha256:f1a6c9d391e661cc5b8d39861ff077a7dc24dc23833ccee564b234b81c82dfe3"}, @@ -1914,7 +1680,7 @@ description = "Functools like those found in stdlib" optional = false python-versions = ">=3.9" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "jaraco_functools-4.4.0-py3-none-any.whl", hash = "sha256:9eec1e36f45c818d9bf307c8948eb03b2b56cd44087b3cdc989abca1f20b9176"}, {file = "jaraco_functools-4.4.0.tar.gz", hash = "sha256:da21933b0417b89515562656547a77b4931f98176eb173644c0d35032a33d6bb"}, @@ -1938,7 +1704,7 @@ description = "An autocompletion tool for Python that can be used for text edito optional = false python-versions = ">=3.10" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "jedi-0.20.0-py2.py3-none-any.whl", hash = "sha256:7bdd9c2634f56713299976f4cbd59cb3fa92165cc5e05ea811fb253480728b67"}, {file = "jedi-0.20.0.tar.gz", hash = "sha256:c3f4ccbd276696f4b19c54618d4fb18f9fc24b0aef02acf704b23f487daa1011"}, @@ -1958,7 +1724,7 @@ description = "Low-level, pure Python DBus protocol wrapper." optional = false python-versions = ">=3.7" groups = ["main"] -markers = "sys_platform == \"linux\"" +markers = "sys_platform == \"linux\" and python_version <= \"3.11\" or sys_platform == \"linux\" and python_version >= \"3.12\"" files = [ {file = "jeepney-0.9.0-py3-none-any.whl", hash = "sha256:97e5714520c16fc0a45695e5365a2e11b81ea79bba796e26f9f1d178cb182683"}, {file = "jeepney-0.9.0.tar.gz", hash = "sha256:cf0e9e845622b81e4a28df94c40345400256ec608d0e55bb8a3feaa9163f5732"}, @@ -1975,7 +1741,7 @@ description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" groups = ["docs"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, @@ -1994,7 +1760,7 @@ description = "JSON Matching Expressions" optional = false python-versions = ">=3.9" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "jmespath-1.1.0-py3-none-any.whl", hash = "sha256:a5663118de4908c91729bea0acadca56526eb2698e83de10cd116ae0f4e97c64"}, {file = "jmespath-1.1.0.tar.gz", hash = "sha256:472c87d80f36026ae83c6ddd0f1d05d4e510134ed462851fd5f754c8c3cbb88d"}, @@ -2007,7 +1773,7 @@ description = "Store and access your passwords safely." optional = false python-versions = ">=3.9" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "keyring-25.7.0-py3-none-any.whl", hash = "sha256:be4a0b195f149690c166e850609a477c532ddbfbaed96a404d4e43f8d5e2689f"}, {file = "keyring-25.7.0.tar.gz", hash = "sha256:fe01bd85eb3f8fb3dd0405defdeac9a5b4f6f0439edbb3149577f244a2e8245b"}, @@ -2038,7 +1804,7 @@ description = "A fast implementation of the Cassowary constraint solver" optional = false python-versions = ">=3.10" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "kiwisolver-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:32cc0a5365239a6ea0c6ed461e8838d053b57e397443c0ca894dcc8e388d4374"}, {file = "kiwisolver-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cc0b66c1eec9021353a4b4483afb12dfd50e3669ffbb9152d6842eb34c7e29fd"}, @@ -2166,7 +1932,7 @@ description = "Python implementation of John Gruber's Markdown." optional = false python-versions = ">=3.10" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "markdown-3.10.2-py3-none-any.whl", hash = "sha256:e91464b71ae3ee7afd3017d9f358ef0baf158fd9a298db92f1d4761133824c36"}, {file = "markdown-3.10.2.tar.gz", hash = "sha256:994d51325d25ad8aa7ce4ebaec003febcce822c3f8c911e3b17c52f7f589f950"}, @@ -2183,7 +1949,7 @@ description = "Python port of markdown-it. Markdown parsing, done right!" optional = false python-versions = ">=3.10" groups = ["coverage"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147"}, {file = "markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3"}, @@ -2208,7 +1974,7 @@ description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "markupsafe-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f981d352f04553a7171b8e44369f2af4055f888dfb147d55e42d29e29e74559"}, {file = "markupsafe-3.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c1493fb6e50ab01d20a22826e57520f1284df32f2d8601fdd90b6304601419"}, @@ -2308,7 +2074,7 @@ description = "Python plotting package" optional = false python-versions = ">=3.10" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "matplotlib-3.10.9-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:77210dce9cb8153dffc967efaae990543392563d5a376d4dd8539bebcb0ed217"}, {file = "matplotlib-3.10.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1e7698ac9868428e84d2c967424803b2472ff7167d9d6590d4204ed775343c3b"}, @@ -2388,7 +2154,7 @@ description = "Inline Matplotlib backend for Jupyter" optional = false python-versions = ">=3.9" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "matplotlib_inline-0.2.1-py3-none-any.whl", hash = "sha256:d56ce5156ba6085e00a9d54fead6ed29a9c47e215cd1bba2e976ef39f5710a76"}, {file = "matplotlib_inline-0.2.1.tar.gz", hash = "sha256:e1ee949c340d771fc39e241ea75683deb94762c8fa5f2927ec57c83c4dffa9fe"}, @@ -2407,7 +2173,7 @@ description = "McCabe checker, plugin for flake8" optional = false python-versions = ">=3.6" groups = ["lint"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, @@ -2420,7 +2186,7 @@ description = "Markdown URL utilities" optional = false python-versions = ">=3.7" groups = ["coverage"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, @@ -2433,7 +2199,7 @@ description = "More routines for operating on iterables, beyond itertools" optional = false python-versions = ">=3.10" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "more_itertools-11.0.2-py3-none-any.whl", hash = "sha256:6e35b35f818b01f691643c6c611bc0902f2e92b46c18fffa77ae1e7c46e912e4"}, {file = "more_itertools-11.0.2.tar.gz", hash = "sha256:392a9e1e362cbc106a2457d37cabf9b36e5e12efd4ebff1654630e76597df804"}, @@ -2446,7 +2212,7 @@ description = "A package for multi-dimensional contiguous and non-contiguous coo optional = false python-versions = ">=3.10" groups = ["main"] -markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and python_version < \"3.11\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "ndcube-2.3.5-py3-none-any.whl", hash = "sha256:5a8bafd09ba54be4207e32dc059d80303c58f2d8128fec8bdd983a2c2ceaa31f"}, {file = "ndcube-2.3.5.tar.gz", hash = "sha256:44f643ca5eaf492d4df6c6129bb49bfdf8d009e31a3968a505f5c9af7eb47567"}, @@ -2466,38 +2232,6 @@ plotting = ["matplotlib (>=3.5.0)", "mpl_animators (>=1.0)"] reproject = ["reproject (>=0.7.1)"] tests = ["dask", "pytest", "pytest-astropy", "pytest-cov", "pytest-doctestplus", "pytest-memray", "pytest-mpl (>=0.12)", "pytest-xdist", "scipy", "specutils", "sunpy (>=5.0.0)"] -[[package]] -name = "ndcube" -version = "2.4.0" -description = "A package for multi-dimensional contiguous and non-contiguous coordinate aware arrays." -optional = false -python-versions = ">=3.11" -groups = ["main"] -markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and python_version >= \"3.11\"" -files = [ - {file = "ndcube-2.4.0-py3-none-any.whl", hash = "sha256:c572c8e03b58ce69e23cce36e08b0c1f285657df38ed0ce37ef34e163af5c0e0"}, - {file = "ndcube-2.4.0.tar.gz", hash = "sha256:1cc77afdf7056c7619d2a973117bd8ea7ae2977ee0b8c9b663498374e5ff848c"}, -] - -[package.dependencies] -astropy = ">=6.1.0" -gwcs = ">=0.21.0" -numpy = ">=1.26.0" -scipy = ">=1.12.0" - -[package.extras] -all = ["ndcube[plotting,reproject]"] -asdf = ["asdf (>=3.1.0)", "asdf-astropy (>=0.7.0)"] -asdf-tests = ["ndcube[asdf]", "pytest-asdf-plugin"] -dev = ["ndcube[asdf,docs,plotting,reproject,tests-only]"] -docs = ["ndcube[all,tests-optional]", "packaging", "sphinx", "sphinx-automodapi", "sphinx-changelog (>=1.5.0)", "sphinx-gallery", "sphinxext-opengraph", "sunpy-sphinx-theme"] -plotting = ["matplotlib (>=3.9.0)", "mpl_animators (>=1.2)"] -reproject = ["reproject (>=0.14)"] -tests = ["ndcube[asdf-tests,plotting,reproject,tests-only,tests-optional]"] -tests-minimal = ["ndcube[plotting,reproject,tests-only]", "sunpy (>=6.1.0)"] -tests-only = ["pytest", "pytest-astropy", "pytest-cov", "pytest-doctestplus", "pytest-memray ; sys_platform != \"win32\"", "pytest-mpl (>=0.12)", "pytest-xdist"] -tests-optional = ["dask (>=2024.1.0)", "specutils (>=1.13.0)", "sunpy (>=6.1.0)"] - [[package]] name = "numpy" version = "2.1.3" @@ -2505,7 +2239,7 @@ description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.10" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "numpy-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c894b4305373b9c5576d7a12b473702afdf48ce5369c074ba304cc5ad8730dff"}, {file = "numpy-2.1.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b47fbb433d3260adcd51eb54f92a2ffbc90a4595f8970ee00e064c644ac788f5"}, @@ -2571,7 +2305,7 @@ description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" groups = ["main", "docs"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "packaging-26.2-py3-none-any.whl", hash = "sha256:5fc45236b9446107ff2415ce77c807cee2862cb6fac22b8a73826d0693b0980e"}, {file = "packaging-26.2.tar.gz", hash = "sha256:ff452ff5a3e828ce110190feff1178bb1f2ea2281fa2075aadb987c2fb221661"}, @@ -2584,7 +2318,7 @@ description = "Powerful data structures for data analysis, time series, and stat optional = false python-versions = ">=3.9" groups = ["main"] -markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and python_version < \"3.11\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "pandas-2.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:376c6446ae31770764215a6c937f72d917f214b43560603cd60da6408f183b6c"}, {file = "pandas-2.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e19d192383eab2f4ceb30b412b22ea30690c9e618f78870357ae1d682912015a"}, @@ -2644,7 +2378,11 @@ files = [ ] [package.dependencies] -numpy = {version = ">=1.22.4", markers = "python_version < \"3.11\""} +numpy = [ + {version = ">=1.22.4", markers = "python_version < \"3.11\""}, + {version = ">=1.23.2", markers = "python_version == \"3.11\""}, + {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, +] python-dateutil = ">=2.8.2" pytz = ">=2020.1" tzdata = ">=2022.7" @@ -2674,96 +2412,6 @@ sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-d test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] xml = ["lxml (>=4.9.2)"] -[[package]] -name = "pandas" -version = "3.0.2" -description = "Powerful data structures for data analysis, time series, and statistics" -optional = false -python-versions = ">=3.11" -groups = ["main"] -markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and python_version >= \"3.11\"" -files = [ - {file = "pandas-3.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a727a73cbdba2f7458dc82449e2315899d5140b449015d822f515749a46cbbe0"}, - {file = "pandas-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dbbd4aa20ca51e63b53bbde6a0fa4254b1aaabb74d2f542df7a7959feb1d760c"}, - {file = "pandas-3.0.2-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:339dda302bd8369dedeae979cb750e484d549b563c3f54f3922cb8ff4978c5eb"}, - {file = "pandas-3.0.2-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:61c2fd96d72b983a9891b2598f286befd4ad262161a609c92dc1652544b46b76"}, - {file = "pandas-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c934008c733b8bbea273ea308b73b3156f0181e5b72960790b09c18a2794fe1e"}, - {file = "pandas-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:60a80bb4feacbef5e1447a3f82c33209c8b7e07f28d805cfd1fb951e5cb443aa"}, - {file = "pandas-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:ed72cb3f45190874eb579c64fa92d9df74e98fd63e2be7f62bce5ace0ade61df"}, - {file = "pandas-3.0.2-cp311-cp311-win_arm64.whl", hash = "sha256:f12b1a9e332c01e09510586f8ca9b108fd631fd656af82e452d7315ef6df5f9f"}, - {file = "pandas-3.0.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:232a70ebb568c0c4d2db4584f338c1577d81e3af63292208d615907b698a0f18"}, - {file = "pandas-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:970762605cff1ca0d3f71ed4f3a769ea8f85fc8e6348f6e110b8fea7e6eb5a14"}, - {file = "pandas-3.0.2-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aff4e6f4d722e0652707d7bcb190c445fe58428500c6d16005b02401764b1b3d"}, - {file = "pandas-3.0.2-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ef8b27695c3d3dc78403c9a7d5e59a62d5464a7e1123b4e0042763f7104dc74f"}, - {file = "pandas-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f8d68083e49e16b84734eb1a4dcae4259a75c90fb6e2251ab9a00b61120c06ab"}, - {file = "pandas-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:32cc41f310ebd4a296d93515fcac312216adfedb1894e879303987b8f1e2b97d"}, - {file = "pandas-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:a4785e1d6547d8427c5208b748ae2efb64659a21bd82bf440d4262d02bfa02a4"}, - {file = "pandas-3.0.2-cp312-cp312-win_arm64.whl", hash = "sha256:08504503f7101300107ecdc8df73658e4347586db5cfdadabc1592e9d7e7a0fd"}, - {file = "pandas-3.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b5918ba197c951dec132b0c5929a00c0bf05d5942f590d3c10a807f6e15a57d3"}, - {file = "pandas-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d606a041c89c0a474a4702d532ab7e73a14fe35c8d427b972a625c8e46373668"}, - {file = "pandas-3.0.2-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:710246ba0616e86891b58ab95f2495143bb2bc83ab6b06747c74216f583a6ac9"}, - {file = "pandas-3.0.2-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5d3cfe227c725b1f3dff4278b43d8c784656a42a9325b63af6b1492a8232209e"}, - {file = "pandas-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c3b723df9087a9a9a840e263ebd9f88b64a12075d1bf2ea401a5a42f254f084d"}, - {file = "pandas-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a3096110bf9eac0070b7208465f2740e2d8a670d5cb6530b5bb884eca495fd39"}, - {file = "pandas-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:07a10f5c36512eead51bc578eb3354ad17578b22c013d89a796ab5eee90cd991"}, - {file = "pandas-3.0.2-cp313-cp313-win_arm64.whl", hash = "sha256:5fdbfa05931071aba28b408e59226186b01eb5e92bea2ab78b65863ca3228d84"}, - {file = "pandas-3.0.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:dbc20dea3b9e27d0e66d74c42b2d0c1bed9c2ffe92adea33633e3bedeb5ac235"}, - {file = "pandas-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b75c347eff42497452116ce05ef461822d97ce5b9ff8df6edacb8076092c855d"}, - {file = "pandas-3.0.2-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1478075142e83a5571782ad007fb201ed074bdeac7ebcc8890c71442e96adf7"}, - {file = "pandas-3.0.2-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5880314e69e763d4c8b27937090de570f1fb8d027059a7ada3f7f8e98bdcb677"}, - {file = "pandas-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b5329e26898896f06035241a626d7c335daa479b9bbc82be7c2742d048e41172"}, - {file = "pandas-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:81526c4afd31971f8b62671442a4b2b51e0aa9acc3819c9f0f12a28b6fcf85f1"}, - {file = "pandas-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:7cadd7e9a44ec13b621aec60f9150e744cfc7a3dd32924a7e2f45edff31823b0"}, - {file = "pandas-3.0.2-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:db0dbfd2a6cdf3770aa60464d50333d8f3d9165b2f2671bcc299b72de5a6677b"}, - {file = "pandas-3.0.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0555c5882688a39317179ab4a0ed41d3ebc8812ab14c69364bbee8fb7a3f6288"}, - {file = "pandas-3.0.2-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:01f31a546acd5574ef77fe199bc90b55527c225c20ccda6601cf6b0fd5ed597c"}, - {file = "pandas-3.0.2-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:deeca1b5a931fdf0c2212c8a659ade6d3b1edc21f0914ce71ef24456ca7a6535"}, - {file = "pandas-3.0.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:0f48afd9bb13300ffb5a3316973324c787054ba6665cda0da3fbd67f451995db"}, - {file = "pandas-3.0.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:6c4d8458b97a35717b62469a4ea0e85abd5ed8687277f5ccfc67f8a5126f8c53"}, - {file = "pandas-3.0.2-cp314-cp314-win_amd64.whl", hash = "sha256:b35d14bb5d8285d9494fe93815a9e9307c0876e10f1e8e89ac5b88f728ec8dcf"}, - {file = "pandas-3.0.2-cp314-cp314-win_arm64.whl", hash = "sha256:63d141b56ef686f7f0d714cfb8de4e320475b86bf4b620aa0b7da89af8cbdbbb"}, - {file = "pandas-3.0.2-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:140f0cffb1fa2524e874dde5b477d9defe10780d8e9e220d259b2c0874c89d9d"}, - {file = "pandas-3.0.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ae37e833ff4fed0ba352f6bdd8b73ba3ab3256a85e54edfd1ab51ae40cca0af8"}, - {file = "pandas-3.0.2-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4d888a5c678a419a5bb41a2a93818e8ed9fd3172246555c0b37b7cc27027effd"}, - {file = "pandas-3.0.2-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b444dc64c079e84df91baa8bf613d58405645461cabca929d9178f2cd392398d"}, - {file = "pandas-3.0.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:4544c7a54920de8eeacaa1466a6b7268ecfbc9bc64ab4dbb89c6bbe94d5e0660"}, - {file = "pandas-3.0.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:734be7551687c00fbd760dc0522ed974f82ad230d4a10f54bf51b80d44a08702"}, - {file = "pandas-3.0.2-cp314-cp314t-win_amd64.whl", hash = "sha256:57a07209bebcbcf768d2d13c9b78b852f9a15978dac41b9e6421a81ad4cdd276"}, - {file = "pandas-3.0.2-cp314-cp314t-win_arm64.whl", hash = "sha256:5371b72c2d4d415d08765f32d689217a43227484e81b2305b52076e328f6f482"}, - {file = "pandas-3.0.2.tar.gz", hash = "sha256:f4753e73e34c8d83221ba58f232433fca2748be8b18dbca02d242ed153945043"}, -] - -[package.dependencies] -numpy = {version = ">=1.26.0", markers = "python_version < \"3.14\""} -python-dateutil = ">=2.8.2" -tzdata = {version = "*", markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\""} - -[package.extras] -all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.36)", "adbc-driver-postgresql (>=1.2.0)", "adbc-driver-sqlite (>=1.2.0)", "beautifulsoup4 (>=4.12.3)", "bottleneck (>=1.4.2)", "fastparquet (>=2024.11.0)", "fsspec (>=2024.10.0)", "gcsfs (>=2024.10.0)", "html5lib (>=1.1)", "hypothesis (>=6.116.0)", "jinja2 (>=3.1.5)", "lxml (>=5.3.0)", "matplotlib (>=3.9.3)", "numba (>=0.60.0)", "numexpr (>=2.10.2)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.5)", "psycopg2 (>=2.9.10)", "pyarrow (>=13.0.0)", "pyiceberg (>=0.8.1)", "pymysql (>=1.1.1)", "pyreadstat (>=1.2.8)", "pytest (>=8.3.4)", "pytest-xdist (>=3.6.1)", "python-calamine (>=0.3.0)", "pytz (>=2024.2)", "pyxlsb (>=1.0.10)", "qtpy (>=2.4.2)", "s3fs (>=2024.10.0)", "scipy (>=1.14.1)", "tables (>=3.10.1)", "tabulate (>=0.9.0)", "xarray (>=2024.10.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.2.0)", "zstandard (>=0.23.0)"] -aws = ["s3fs (>=2024.10.0)"] -clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.4.2)"] -compression = ["zstandard (>=0.23.0)"] -computation = ["scipy (>=1.14.1)", "xarray (>=2024.10.0)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.5)", "python-calamine (>=0.3.0)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.2.0)"] -feather = ["pyarrow (>=13.0.0)"] -fss = ["fsspec (>=2024.10.0)"] -gcp = ["gcsfs (>=2024.10.0)"] -hdf5 = ["tables (>=3.10.1)"] -html = ["beautifulsoup4 (>=4.12.3)", "html5lib (>=1.1)", "lxml (>=5.3.0)"] -iceberg = ["pyiceberg (>=0.8.1)"] -mysql = ["SQLAlchemy (>=2.0.36)", "pymysql (>=1.1.1)"] -output-formatting = ["jinja2 (>=3.1.5)", "tabulate (>=0.9.0)"] -parquet = ["pyarrow (>=13.0.0)"] -performance = ["bottleneck (>=1.4.2)", "numba (>=0.60.0)", "numexpr (>=2.10.2)"] -plot = ["matplotlib (>=3.9.3)"] -postgresql = ["SQLAlchemy (>=2.0.36)", "adbc-driver-postgresql (>=1.2.0)", "psycopg2 (>=2.9.10)"] -pyarrow = ["pyarrow (>=13.0.0)"] -spss = ["pyreadstat (>=1.2.8)"] -sql-other = ["SQLAlchemy (>=2.0.36)", "adbc-driver-postgresql (>=1.2.0)", "adbc-driver-sqlite (>=1.2.0)"] -test = ["hypothesis (>=6.116.0)", "pytest (>=8.3.4)", "pytest-xdist (>=3.6.1)"] -timezone = ["pytz (>=2024.2)"] -xml = ["lxml (>=5.3.0)"] - [[package]] name = "parso" version = "0.8.7" @@ -2771,7 +2419,7 @@ description = "A Python Parser" optional = false python-versions = ">=3.6" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "parso-0.8.7-py2.py3-none-any.whl", hash = "sha256:a8926eb2a1b915486941fdbd31e86a4baf88fe8c210f25f2f35ecec5b574ca1c"}, {file = "parso-0.8.7.tar.gz", hash = "sha256:eaaac4c9fdd5e9e8852dc778d2d7405897ec510f2a298071453e5e3a07914bb1"}, @@ -2788,7 +2436,7 @@ description = "Pexpect allows easy control of interactive console applications." optional = false python-versions = "*" groups = ["main"] -markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform != \"win32\" and sys_platform != \"emscripten\") and python_version <= \"3.11\" or (sys_platform != \"win32\" and sys_platform != \"emscripten\") and python_version >= \"3.12\"" files = [ {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, @@ -2804,7 +2452,7 @@ description = "Python Imaging Library (fork)" optional = false python-versions = ">=3.10" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "pillow-12.2.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:a4e8f36e677d3336f35089648c8955c51c6d386a13cf6ee9c189c5f5bd713a9f"}, {file = "pillow-12.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e589959f10d9824d39b350472b92f0ce3b443c0a3442ebf41c40cb8361c5b97"}, @@ -2914,7 +2562,7 @@ description = "An open-source, interactive data visualization library for Python optional = false python-versions = ">=3.8" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "plotly-5.24.1-py3-none-any.whl", hash = "sha256:f67073a1e637eb0dc3e46324d9d51e2fe76e9727c892dde64ddf1e1b51f29089"}, {file = "plotly-5.24.1.tar.gz", hash = "sha256:dbc8ac8339d248a4bcc36e08a5659bacfe1b079390b8953533f4eb22169b4bae"}, @@ -2931,7 +2579,7 @@ description = "Library for building powerful interactive command lines in Python optional = false python-versions = ">=3.8" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "prompt_toolkit-3.0.52-py3-none-any.whl", hash = "sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955"}, {file = "prompt_toolkit-3.0.52.tar.gz", hash = "sha256:28cde192929c8e7321de85de1ddbe736f1375148b02f2e17edd840042b1be855"}, @@ -2940,50 +2588,14 @@ files = [ [package.dependencies] wcwidth = "*" -[[package]] -name = "psutil" -version = "7.2.2" -description = "Cross-platform lib for process and system monitoring." -optional = false -python-versions = ">=3.6" -groups = ["main"] -markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and python_version >= \"3.11\"" -files = [ - {file = "psutil-7.2.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2edccc433cbfa046b980b0df0171cd25bcaeb3a68fe9022db0979e7aa74a826b"}, - {file = "psutil-7.2.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e78c8603dcd9a04c7364f1a3e670cea95d51ee865e4efb3556a3a63adef958ea"}, - {file = "psutil-7.2.2-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1a571f2330c966c62aeda00dd24620425d4b0cc86881c89861fbc04549e5dc63"}, - {file = "psutil-7.2.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:917e891983ca3c1887b4ef36447b1e0873e70c933afc831c6b6da078ba474312"}, - {file = "psutil-7.2.2-cp313-cp313t-win_amd64.whl", hash = "sha256:ab486563df44c17f5173621c7b198955bd6b613fb87c71c161f827d3fb149a9b"}, - {file = "psutil-7.2.2-cp313-cp313t-win_arm64.whl", hash = "sha256:ae0aefdd8796a7737eccea863f80f81e468a1e4cf14d926bd9b6f5f2d5f90ca9"}, - {file = "psutil-7.2.2-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:eed63d3b4d62449571547b60578c5b2c4bcccc5387148db46e0c2313dad0ee00"}, - {file = "psutil-7.2.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7b6d09433a10592ce39b13d7be5a54fbac1d1228ed29abc880fb23df7cb694c9"}, - {file = "psutil-7.2.2-cp314-cp314t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1fa4ecf83bcdf6e6c8f4449aff98eefb5d0604bf88cb883d7da3d8d2d909546a"}, - {file = "psutil-7.2.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e452c464a02e7dc7822a05d25db4cde564444a67e58539a00f929c51eddda0cf"}, - {file = "psutil-7.2.2-cp314-cp314t-win_amd64.whl", hash = "sha256:c7663d4e37f13e884d13994247449e9f8f574bc4655d509c3b95e9ec9e2b9dc1"}, - {file = "psutil-7.2.2-cp314-cp314t-win_arm64.whl", hash = "sha256:11fe5a4f613759764e79c65cf11ebdf26e33d6dd34336f8a337aa2996d71c841"}, - {file = "psutil-7.2.2-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ed0cace939114f62738d808fdcecd4c869222507e266e574799e9c0faa17d486"}, - {file = "psutil-7.2.2-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:1a7b04c10f32cc88ab39cbf606e117fd74721c831c98a27dc04578deb0c16979"}, - {file = "psutil-7.2.2-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:076a2d2f923fd4821644f5ba89f059523da90dc9014e85f8e45a5774ca5bc6f9"}, - {file = "psutil-7.2.2-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b0726cecd84f9474419d67252add4ac0cd9811b04d61123054b9fb6f57df6e9e"}, - {file = "psutil-7.2.2-cp36-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:fd04ef36b4a6d599bbdb225dd1d3f51e00105f6d48a28f006da7f9822f2606d8"}, - {file = "psutil-7.2.2-cp36-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b58fabe35e80b264a4e3bb23e6b96f9e45a3df7fb7eed419ac0e5947c61e47cc"}, - {file = "psutil-7.2.2-cp37-abi3-win_amd64.whl", hash = "sha256:eb7e81434c8d223ec4a219b5fc1c47d0417b12be7ea866e24fb5ad6e84b3d988"}, - {file = "psutil-7.2.2-cp37-abi3-win_arm64.whl", hash = "sha256:8c233660f575a5a89e6d4cb65d9f938126312bca76d8fe087b947b3a1aaac9ee"}, - {file = "psutil-7.2.2.tar.gz", hash = "sha256:0746f5f8d406af344fd547f1c8daa5f5c33dbc293bb8d6a16d80b4bb88f59372"}, -] - -[package.extras] -dev = ["abi3audit", "black", "check-manifest", "colorama ; os_name == \"nt\"", "coverage", "packaging", "psleak", "pylint", "pyperf", "pypinfo", "pyreadline3 ; os_name == \"nt\"", "pytest", "pytest-cov", "pytest-instafail", "pytest-xdist", "pywin32 ; os_name == \"nt\" and implementation_name != \"pypy\"", "requests", "rstcheck", "ruff", "setuptools", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "validate-pyproject[all]", "virtualenv", "vulture", "wheel", "wheel ; os_name == \"nt\" and implementation_name != \"pypy\"", "wmi ; os_name == \"nt\" and implementation_name != \"pypy\""] -test = ["psleak", "pytest", "pytest-instafail", "pytest-xdist", "pywin32 ; os_name == \"nt\" and implementation_name != \"pypy\"", "setuptools", "wheel ; os_name == \"nt\" and implementation_name != \"pypy\"", "wmi ; os_name == \"nt\" and implementation_name != \"pypy\""] - [[package]] name = "psycopg2-binary" version = "2.9.12" description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = false python-versions = ">=3.9" -groups = ["test"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +groups = ["main", "test"] +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "psycopg2_binary-2.9.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9b818ceff717f98851a64bffd4c5eb5b3059ae280276dcecc52ac658dcf006a4"}, {file = "psycopg2_binary-2.9.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d2fa0d7caca8635c56e373055094eeda3208d901d55dd0ff5abc1d4e47f82b56"}, @@ -3061,7 +2673,7 @@ description = "Run a subprocess in a pseudo terminal" optional = false python-versions = "*" groups = ["main"] -markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform != \"win32\" and sys_platform != \"emscripten\") and python_version <= \"3.11\" or (sys_platform != \"win32\" and sys_platform != \"emscripten\") and python_version >= \"3.12\"" files = [ {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, @@ -3074,7 +2686,7 @@ description = "Safely evaluate AST nodes without side effects" optional = false python-versions = "*" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, @@ -3090,7 +2702,7 @@ description = "Python style guide checker" optional = false python-versions = ">=3.9" groups = ["lint"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "pycodestyle-2.14.0-py2.py3-none-any.whl", hash = "sha256:dd6bf7cb4ee77f8e016f9c8e74a35ddd9f67e1d5fd4184d86c3b98e07099f42d"}, {file = "pycodestyle-2.14.0.tar.gz", hash = "sha256:c4b5b517d278089ff9d0abdec919cd97262a3367449ea1c8b49b91529167b783"}, @@ -3103,7 +2715,7 @@ description = "C parser in Python" optional = false python-versions = ">=3.10" groups = ["main"] -markers = "platform_python_implementation != \"PyPy\" and implementation_name != \"PyPy\" and (sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\")" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\") and platform_python_implementation != \"PyPy\" and implementation_name != \"PyPy\"" files = [ {file = "pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992"}, {file = "pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29"}, @@ -3116,7 +2728,7 @@ description = "Python bindings for ERFA" optional = false python-versions = ">=3.9" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "pyerfa-2.0.1.5-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:b282d7c60c4c47cf629c484c17ac504fcb04abd7b3f4dfcf53ee042afc3a5944"}, {file = "pyerfa-2.0.1.5-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:be1aeb70390dd03a34faf96749d5cabc58437410b4aab7213c512323932427df"}, @@ -3145,7 +2757,7 @@ description = "passive checker of Python programs" optional = false python-versions = ">=3.9" groups = ["lint"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "pyflakes-3.4.0-py2.py3-none-any.whl", hash = "sha256:f742a7dbd0d9cb9ea41e9a24a918996e8170c799fa528688d40dd582c8265f4f"}, {file = "pyflakes-3.4.0.tar.gz", hash = "sha256:b24f96fafb7d2ab0ec5075b7350b3d2d2218eab42003821c06344973d3ea2f58"}, @@ -3158,7 +2770,7 @@ description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.9" groups = ["main", "coverage", "docs"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "pygments-2.20.0-py3-none-any.whl", hash = "sha256:81a9e26dd42fd28a23a2d169d86d7ac03b46e2f8b59ed4698fb4785f946d0176"}, {file = "pygments-2.20.0.tar.gz", hash = "sha256:6757cd03768053ff99f3039c1a36d6c0aa0b263438fcab17520b30a303a82b5f"}, @@ -3174,7 +2786,7 @@ description = "pyparsing - Classes and methods to define and execute parsing gra optional = false python-versions = ">=3.9" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "pyparsing-3.3.2-py3-none-any.whl", hash = "sha256:850ba148bd908d7e2411587e247a1e4f0327839c40e2e5e6d05a007ecc69911d"}, {file = "pyparsing-3.3.2.tar.gz", hash = "sha256:c777f4d763f140633dcb6d8a3eda953bf7a214dc4eff598413c070bcdc117cbc"}, @@ -3190,7 +2802,7 @@ description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -3206,7 +2818,7 @@ description = "World timezone definitions, modern and historical" optional = false python-versions = "*" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "pytz-2026.1.post1-py2.py3-none-any.whl", hash = "sha256:f2fd16142fda348286a75e1a524be810bb05d444e5a081f37f7affc635035f7a"}, {file = "pytz-2026.1.post1.tar.gz", hash = "sha256:3378dde6a0c3d26719182142c56e60c7f9af7e968076f31aae569d72a0358ee1"}, @@ -3219,7 +2831,7 @@ description = "Astropy affiliated package for accessing Virtual Observatory data optional = false python-versions = ">=3.9" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "pyvo-1.8.1-py3-none-any.whl", hash = "sha256:d6531288f928e149dc059fc3ef56cf43bda0acff8edb1b6c61f91113e4c111fd"}, {file = "pyvo-1.8.1.tar.gz", hash = "sha256:d3cc60aa3d3416d22c89e465a04dfa9f521085fdd5228cce2cffd2fee3a9e709"}, @@ -3241,7 +2853,7 @@ description = "A (partial) reimplementation of pywin32 using ctypes/cffi" optional = false python-versions = ">=3.6" groups = ["main"] -markers = "sys_platform == \"win32\"" +markers = "sys_platform == \"win32\" and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "pywin32-ctypes-0.2.3.tar.gz", hash = "sha256:d162dc04946d704503b2edc4d55f3dba5c1d539ead017afa00142c38b9885755"}, {file = "pywin32_ctypes-0.2.3-py3-none-any.whl", hash = "sha256:8a1513379d709975552d202d942d9837758905c8d01eb82b8bcc30918929e7b8"}, @@ -3254,7 +2866,7 @@ description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" groups = ["main", "test"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "PyYAML-6.0.3-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:c2514fceb77bc5e7a2f7adfaa1feb2fb311607c9cb518dbc378688ec73d8292f"}, {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c57bb8c96f6d1808c030b1687b9b5fb476abaa47f0db9c0101f5e9f394e97f4"}, @@ -3338,7 +2950,7 @@ description = "A docutils-compatibility bridge to CommonMark, enabling you to wr optional = false python-versions = "*" groups = ["docs"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "recommonmark-0.7.1-py2.py3-none-any.whl", hash = "sha256:1b1db69af0231efce3fa21b94ff627ea33dee7079a01dd0a7f8482c3da148b3f"}, {file = "recommonmark-0.7.1.tar.gz", hash = "sha256:bdb4db649f2222dcd8d2d844f0006b958d627f732415d399791ee436a3686d67"}, @@ -3356,7 +2968,7 @@ description = "Python HTTP for Humans." optional = false python-versions = ">=3.10" groups = ["main", "coverage", "docs", "test"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "requests-2.33.1-py3-none-any.whl", hash = "sha256:4e6d1ef462f3626a1f0a0a9c42dd93c63bad33f9f1c1937509b8c5c8718ab56a"}, {file = "requests-2.33.1.tar.gz", hash = "sha256:18817f8c57c6263968bc123d237e3b8b08ac046f5456bd1e307ee8f4250d3517"}, @@ -3378,8 +2990,8 @@ version = "0.26.0" description = "A utility library for mocking out the `requests` Python library." optional = false python-versions = ">=3.8" -groups = ["test"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +groups = ["main", "test"] +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "responses-0.26.0-py3-none-any.whl", hash = "sha256:03ec4409088cd5c66b71ecbbbd27fe2c58ddfad801c66203457b3e6a04868c37"}, {file = "responses-0.26.0.tar.gz", hash = "sha256:c7f6923e6343ef3682816ba421c006626777893cb0d5e1434f674b649bac9eb4"}, @@ -3400,7 +3012,7 @@ description = "Render rich text, tables, progress bars, syntax highlighting, mar optional = false python-versions = ">=3.9.0" groups = ["coverage"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "rich-15.0.0-py3-none-any.whl", hash = "sha256:33bd4ef74232fb73fe9279a257718407f169c09b78a87ad3d296f548e27de0bb"}, {file = "rich-15.0.0.tar.gz", hash = "sha256:edd07a4824c6b40189fb7ac9bc4c52536e9780fbbfbddf6f1e2502c31b068c36"}, @@ -3420,7 +3032,7 @@ description = "Fundamental algorithms for scientific computing in Python" optional = false python-versions = ">=3.10" groups = ["main"] -markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and python_version < \"3.11\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "scipy-1.15.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:a345928c86d535060c9c2b25e71e87c39ab2f22fc96e9636bd74d1dbf9de448c"}, {file = "scipy-1.15.3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:ad3432cb0f9ed87477a8d97f03b763fd1d57709f1bbde3c9369b1dff5503b253"}, @@ -3478,86 +3090,6 @@ dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy (==1.10.0)", "pycodest doc = ["intersphinx_registry", "jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.19.1)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0,<8.0.0)", "sphinx-copybutton", "sphinx-design (>=0.4.0)"] test = ["Cython", "array-api-strict (>=2.0,<2.1.1)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] -[[package]] -name = "scipy" -version = "1.17.1" -description = "Fundamental algorithms for scientific computing in Python" -optional = false -python-versions = ">=3.11" -groups = ["main"] -markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and python_version >= \"3.11\"" -files = [ - {file = "scipy-1.17.1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:1f95b894f13729334fb990162e911c9e5dc1ab390c58aa6cbecb389c5b5e28ec"}, - {file = "scipy-1.17.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:e18f12c6b0bc5a592ed23d3f7b891f68fd7f8241d69b7883769eb5d5dfb52696"}, - {file = "scipy-1.17.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:a3472cfbca0a54177d0faa68f697d8ba4c80bbdc19908c3465556d9f7efce9ee"}, - {file = "scipy-1.17.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:766e0dc5a616d026a3a1cffa379af959671729083882f50307e18175797b3dfd"}, - {file = "scipy-1.17.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:744b2bf3640d907b79f3fd7874efe432d1cf171ee721243e350f55234b4cec4c"}, - {file = "scipy-1.17.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43af8d1f3bea642559019edfe64e9b11192a8978efbd1539d7bc2aaa23d92de4"}, - {file = "scipy-1.17.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd96a1898c0a47be4520327e01f874acfd61fb48a9420f8aa9f6483412ffa444"}, - {file = "scipy-1.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4eb6c25dd62ee8d5edf68a8e1c171dd71c292fdae95d8aeb3dd7d7de4c364082"}, - {file = "scipy-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:d30e57c72013c2a4fe441c2fcb8e77b14e152ad48b5464858e07e2ad9fbfceff"}, - {file = "scipy-1.17.1-cp311-cp311-win_arm64.whl", hash = "sha256:9ecb4efb1cd6e8c4afea0daa91a87fbddbce1b99d2895d151596716c0b2e859d"}, - {file = "scipy-1.17.1-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:35c3a56d2ef83efc372eaec584314bd0ef2e2f0d2adb21c55e6ad5b344c0dcb8"}, - {file = "scipy-1.17.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:fcb310ddb270a06114bb64bbe53c94926b943f5b7f0842194d585c65eb4edd76"}, - {file = "scipy-1.17.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:cc90d2e9c7e5c7f1a482c9875007c095c3194b1cfedca3c2f3291cdc2bc7c086"}, - {file = "scipy-1.17.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:c80be5ede8f3f8eded4eff73cc99a25c388ce98e555b17d31da05287015ffa5b"}, - {file = "scipy-1.17.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e19ebea31758fac5893a2ac360fedd00116cbb7628e650842a6691ba7ca28a21"}, - {file = "scipy-1.17.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:02ae3b274fde71c5e92ac4d54bc06c42d80e399fec704383dcd99b301df37458"}, - {file = "scipy-1.17.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8a604bae87c6195d8b1045eddece0514d041604b14f2727bbc2b3020172045eb"}, - {file = "scipy-1.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f590cd684941912d10becc07325a3eeb77886fe981415660d9265c4c418d0bea"}, - {file = "scipy-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:41b71f4a3a4cab9d366cd9065b288efc4d4f3c0b37a91a8e0947fb5bd7f31d87"}, - {file = "scipy-1.17.1-cp312-cp312-win_arm64.whl", hash = "sha256:f4115102802df98b2b0db3cce5cb9b92572633a1197c77b7553e5203f284a5b3"}, - {file = "scipy-1.17.1-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:5e3c5c011904115f88a39308379c17f91546f77c1667cea98739fe0fccea804c"}, - {file = "scipy-1.17.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:6fac755ca3d2c3edcb22f479fceaa241704111414831ddd3bc6056e18516892f"}, - {file = "scipy-1.17.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:7ff200bf9d24f2e4d5dc6ee8c3ac64d739d3a89e2326ba68aaf6c4a2b838fd7d"}, - {file = "scipy-1.17.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:4b400bdc6f79fa02a4d86640310dde87a21fba0c979efff5248908c6f15fad1b"}, - {file = "scipy-1.17.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2b64ca7d4aee0102a97f3ba22124052b4bd2152522355073580bf4845e2550b6"}, - {file = "scipy-1.17.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:581b2264fc0aa555f3f435a5944da7504ea3a065d7029ad60e7c3d1ae09c5464"}, - {file = "scipy-1.17.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:beeda3d4ae615106d7094f7e7cef6218392e4465cc95d25f900bebabfded0950"}, - {file = "scipy-1.17.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6609bc224e9568f65064cfa72edc0f24ee6655b47575954ec6339534b2798369"}, - {file = "scipy-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:37425bc9175607b0268f493d79a292c39f9d001a357bebb6b88fdfaff13f6448"}, - {file = "scipy-1.17.1-cp313-cp313-win_arm64.whl", hash = "sha256:5cf36e801231b6a2059bf354720274b7558746f3b1a4efb43fcf557ccd484a87"}, - {file = "scipy-1.17.1-cp313-cp313t-macosx_10_14_x86_64.whl", hash = "sha256:d59c30000a16d8edc7e64152e30220bfbd724c9bbb08368c054e24c651314f0a"}, - {file = "scipy-1.17.1-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:010f4333c96c9bb1a4516269e33cb5917b08ef2166d5556ca2fd9f082a9e6ea0"}, - {file = "scipy-1.17.1-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:2ceb2d3e01c5f1d83c4189737a42d9cb2fc38a6eeed225e7515eef71ad301dce"}, - {file = "scipy-1.17.1-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:844e165636711ef41f80b4103ed234181646b98a53c8f05da12ca5ca289134f6"}, - {file = "scipy-1.17.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:158dd96d2207e21c966063e1635b1063cd7787b627b6f07305315dd73d9c679e"}, - {file = "scipy-1.17.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:74cbb80d93260fe2ffa334efa24cb8f2f0f622a9b9febf8b483c0b865bfb3475"}, - {file = "scipy-1.17.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:dbc12c9f3d185f5c737d801da555fb74b3dcfa1a50b66a1a93e09190f41fab50"}, - {file = "scipy-1.17.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:94055a11dfebe37c656e70317e1996dc197e1a15bbcc351bcdd4610e128fe1ca"}, - {file = "scipy-1.17.1-cp313-cp313t-win_amd64.whl", hash = "sha256:e30bdeaa5deed6bc27b4cc490823cd0347d7dae09119b8803ae576ea0ce52e4c"}, - {file = "scipy-1.17.1-cp313-cp313t-win_arm64.whl", hash = "sha256:a720477885a9d2411f94a93d16f9d89bad0f28ca23c3f8daa521e2dcc3f44d49"}, - {file = "scipy-1.17.1-cp314-cp314-macosx_10_14_x86_64.whl", hash = "sha256:a48a72c77a310327f6a3a920092fa2b8fd03d7deaa60f093038f22d98e096717"}, - {file = "scipy-1.17.1-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:45abad819184f07240d8a696117a7aacd39787af9e0b719d00285549ed19a1e9"}, - {file = "scipy-1.17.1-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:3fd1fcdab3ea951b610dc4cef356d416d5802991e7e32b5254828d342f7b7e0b"}, - {file = "scipy-1.17.1-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:7bdf2da170b67fdf10bca777614b1c7d96ae3ca5794fd9587dce41eb2966e866"}, - {file = "scipy-1.17.1-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:adb2642e060a6549c343603a3851ba76ef0b74cc8c079a9a58121c7ec9fe2350"}, - {file = "scipy-1.17.1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eee2cfda04c00a857206a4330f0c5e3e56535494e30ca445eb19ec624ae75118"}, - {file = "scipy-1.17.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:d2650c1fb97e184d12d8ba010493ee7b322864f7d3d00d3f9bb97d9c21de4068"}, - {file = "scipy-1.17.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:08b900519463543aa604a06bec02461558a6e1cef8fdbb8098f77a48a83c8118"}, - {file = "scipy-1.17.1-cp314-cp314-win_amd64.whl", hash = "sha256:3877ac408e14da24a6196de0ddcace62092bfc12a83823e92e49e40747e52c19"}, - {file = "scipy-1.17.1-cp314-cp314-win_arm64.whl", hash = "sha256:f8885db0bc2bffa59d5c1b72fad7a6a92d3e80e7257f967dd81abb553a90d293"}, - {file = "scipy-1.17.1-cp314-cp314t-macosx_10_14_x86_64.whl", hash = "sha256:1cc682cea2ae55524432f3cdff9e9a3be743d52a7443d0cba9017c23c87ae2f6"}, - {file = "scipy-1.17.1-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:2040ad4d1795a0ae89bfc7e8429677f365d45aa9fd5e4587cf1ea737f927b4a1"}, - {file = "scipy-1.17.1-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:131f5aaea57602008f9822e2115029b55d4b5f7c070287699fe45c661d051e39"}, - {file = "scipy-1.17.1-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:9cdc1a2fcfd5c52cfb3045feb399f7b3ce822abdde3a193a6b9a60b3cb5854ca"}, - {file = "scipy-1.17.1-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e3dcd57ab780c741fde8dc68619de988b966db759a3c3152e8e9142c26295ad"}, - {file = "scipy-1.17.1-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a9956e4d4f4a301ebf6cde39850333a6b6110799d470dbbb1e25326ac447f52a"}, - {file = "scipy-1.17.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:a4328d245944d09fd639771de275701ccadf5f781ba0ff092ad141e017eccda4"}, - {file = "scipy-1.17.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a77cbd07b940d326d39a1d1b37817e2ee4d79cb30e7338f3d0cddffae70fcaa2"}, - {file = "scipy-1.17.1-cp314-cp314t-win_amd64.whl", hash = "sha256:eb092099205ef62cd1782b006658db09e2fed75bffcae7cc0d44052d8aa0f484"}, - {file = "scipy-1.17.1-cp314-cp314t-win_arm64.whl", hash = "sha256:200e1050faffacc162be6a486a984a0497866ec54149a01270adc8a59b7c7d21"}, - {file = "scipy-1.17.1.tar.gz", hash = "sha256:95d8e012d8cb8816c226aef832200b1d45109ed4464303e997c5b13122b297c0"}, -] - -[package.dependencies] -numpy = ">=1.26.4,<2.7" - -[package.extras] -dev = ["click (<8.3.0)", "cython-lint (>=0.12.2)", "mypy (==1.10.0)", "pycodestyle", "ruff (>=0.12.0)", "spin", "types-psutil", "typing_extensions"] -doc = ["intersphinx_registry", "jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.19.1)", "jupytext", "linkify-it-py", "matplotlib (>=3.5)", "myst-nb (>=1.2.0)", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0,<8.2.0)", "sphinx-copybutton", "sphinx-design (>=0.4.0)", "tabulate"] -test = ["Cython", "array-api-strict (>=2.3.1)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja ; sys_platform != \"emscripten\"", "pooch", "pytest (>=8.0.0)", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] - [[package]] name = "secretstorage" version = "3.5.0" @@ -3565,7 +3097,7 @@ description = "Python bindings to FreeDesktop.org Secret Service API" optional = false python-versions = ">=3.10" groups = ["main"] -markers = "sys_platform == \"linux\"" +markers = "sys_platform == \"linux\" and python_version <= \"3.11\" or sys_platform == \"linux\" and python_version >= \"3.12\"" files = [ {file = "secretstorage-3.5.0-py3-none-any.whl", hash = "sha256:0ce65888c0725fcb2c5bc0fdb8e5438eece02c523557ea40ce0703c266248137"}, {file = "secretstorage-3.5.0.tar.gz", hash = "sha256:f04b8e4689cbce351744d5537bf6b1329c6fc68f91fa666f60a380edddcd11be"}, @@ -3582,7 +3114,7 @@ description = "A library implementing the 'SemVer' scheme." optional = false python-versions = ">=2.7" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "semantic_version-2.10.0-py2.py3-none-any.whl", hash = "sha256:de78a3b8e0feda74cabc54aab2da702113e33ac9d9eb9d2389bcf1f58b7d9177"}, {file = "semantic_version-2.10.0.tar.gz", hash = "sha256:bdabb6d336998cbb378d4b9db3a4b56a1e3235701dc05ea2690d9a997ed5041c"}, @@ -3599,7 +3131,7 @@ description = "Tool to Detect Surrounding Shell" optional = false python-versions = ">=3.7" groups = ["coverage"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, @@ -3612,7 +3144,7 @@ description = "Python 2 and 3 compatibility utilities" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, @@ -3625,7 +3157,7 @@ description = "This package provides 32 stemmers for 30 languages generated from optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*" groups = ["docs"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "snowballstemmer-3.0.1-py3-none-any.whl", hash = "sha256:6cd7b3897da8d6c9ffb968a6781fa6532dce9c3618a4b127d920dab764a19064"}, {file = "snowballstemmer-3.0.1.tar.gz", hash = "sha256:6d5eeeec8e9f84d4d56b847692bacf79bc2c8e90c7f80ca4444ff8b6f2e52895"}, @@ -3638,7 +3170,7 @@ description = "A modern CSS selector implementation for Beautiful Soup." optional = false python-versions = ">=3.9" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "soupsieve-2.8.3-py3-none-any.whl", hash = "sha256:ed64f2ba4eebeab06cc4962affce381647455978ffc1e36bb79a545b91f45a95"}, {file = "soupsieve-2.8.3.tar.gz", hash = "sha256:3267f1eeea4251fb42728b6dfb746edc9acaffc4a45b27e19450b676586e8349"}, @@ -3651,7 +3183,7 @@ description = "Package for spectroscopic astronomical data" optional = false python-versions = ">=3.10" groups = ["main"] -markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and python_version < \"3.11\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "specutils-2.1.0-py3-none-any.whl", hash = "sha256:90652041792b236d9e028f74bb4230005ff86f211f6746eaed814e05f1dd5349"}, {file = "specutils-2.1.0.tar.gz", hash = "sha256:4b0189b6d3650a7f49885354487409f992ab68dbf40d0a2c3e1272868106ce0b"}, @@ -3671,33 +3203,6 @@ docs = ["graphviz", "matplotlib", "sphinx-astropy"] jwst = ["stdatamodels (>=1.1.0)"] test = ["matplotlib", "pytest-astropy", "spectral-cube", "tox"] -[[package]] -name = "specutils" -version = "2.3.0" -description = "Package for spectroscopic astronomical data" -optional = false -python-versions = ">=3.11" -groups = ["main"] -markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and python_version >= \"3.11\"" -files = [ - {file = "specutils-2.3.0-py3-none-any.whl", hash = "sha256:d29f3aeb8d1daa579660589b6728ea4da4bcfe5cd4f619a49f11feb5836fc396"}, - {file = "specutils-2.3.0.tar.gz", hash = "sha256:f7986afd835b38b83fd510e9022ce41e576785b48bb83fe3759dedcbf9bcff8f"}, -] - -[package.dependencies] -asdf = ">=3.3.0" -asdf-astropy = ">=0.8" -astropy = ">=7.2" -gwcs = ">=0.25.1" -ndcube = ">=2.0" -numpy = ">=1.24" -scipy = ">=1.14" - -[package.extras] -docs = ["graphviz", "matplotlib", "sphinx (<9)", "sphinx-astropy"] -jwst = ["stdatamodels (>=1.1.0)"] -test = ["matplotlib", "pytest-asdf-plugin", "pytest-astropy", "spectral-cube", "tox"] - [[package]] name = "sphinx" version = "7.4.7" @@ -3705,7 +3210,7 @@ description = "Python documentation generator" optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239"}, {file = "sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe"}, @@ -3742,7 +3247,7 @@ description = "Add a copy button to each of your code cells." optional = false python-versions = ">=3.7" groups = ["docs"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "sphinx-copybutton-0.5.2.tar.gz", hash = "sha256:4cf17c82fb9646d1bc9ca92ac280813a3b605d8c421225fd9913154103ee1fbd"}, {file = "sphinx_copybutton-0.5.2-py3-none-any.whl", hash = "sha256:fb543fd386d917746c9a2c50360c7905b605726b9355cd26e9974857afeae06e"}, @@ -3762,7 +3267,7 @@ description = "A sphinx extension for designing beautiful, view size responsive optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "sphinx_design-0.6.1-py3-none-any.whl", hash = "sha256:b11f37db1a802a183d61b159d9a202314d4d2fe29c163437001324fe2f19549c"}, {file = "sphinx_design-0.6.1.tar.gz", hash = "sha256:b44eea3719386d04d765c1a8257caca2b3e6f8421d7b3a5e742c0fd45f84e632"}, @@ -3789,7 +3294,7 @@ description = "Read the Docs theme for Sphinx" optional = false python-versions = ">=3.8" groups = ["docs"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "sphinx_rtd_theme-3.1.0-py2.py3-none-any.whl", hash = "sha256:1785824ae8e6632060490f67cf3a72d404a85d2d9fc26bce3619944de5682b89"}, {file = "sphinx_rtd_theme-3.1.0.tar.gz", hash = "sha256:b44276f2c276e909239a4f6c955aa667aaafeb78597923b1c60babc76db78e4c"}, @@ -3810,7 +3315,7 @@ description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, @@ -3828,7 +3333,7 @@ description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, @@ -3846,7 +3351,7 @@ description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML h optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, @@ -3864,7 +3369,7 @@ description = "Extension to include jQuery on newer Sphinx releases" optional = false python-versions = ">=2.7" groups = ["docs"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "sphinxcontrib-jquery-4.1.tar.gz", hash = "sha256:1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a"}, {file = "sphinxcontrib_jquery-4.1-py2.py3-none-any.whl", hash = "sha256:f936030d7d0147dd026a4f2b5a57343d233f1fc7b363f68b3d4f1cb0993878ae"}, @@ -3880,7 +3385,7 @@ description = "A sphinx extension which renders display math in HTML via JavaScr optional = false python-versions = ">=3.5" groups = ["docs"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, @@ -3896,7 +3401,7 @@ description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp d optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, @@ -3914,7 +3419,7 @@ description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs optional = false python-versions = ">=3.9" groups = ["docs"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, @@ -3932,7 +3437,7 @@ description = "A non-validating SQL parser." optional = false python-versions = ">=3.8" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "sqlparse-0.5.5-py3-none-any.whl", hash = "sha256:12a08b3bf3eec877c519589833aed092e2444e68240a3577e8e26148acc7b1ba"}, {file = "sqlparse-0.5.5.tar.gz", hash = "sha256:e20d4a9b0b8585fdf63b10d30066c7c94c5d7a7ec47c889a2d83a3caa93ff28e"}, @@ -3949,7 +3454,7 @@ description = "Extract data from python stack frames and tracebacks for informat optional = false python-versions = "*" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, @@ -3970,7 +3475,7 @@ description = "Retry code until it succeeds" optional = false python-versions = ">=3.10" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "tenacity-9.1.4-py3-none-any.whl", hash = "sha256:6095a360c919085f28c6527de529e76a06ad89b23659fa881ae0649b867a9d55"}, {file = "tenacity-9.1.4.tar.gz", hash = "sha256:adb31d4c263f2bd041081ab33b498309a57c77f9acf2db65aadf0898179cf93a"}, @@ -3987,7 +3492,6 @@ description = "A lil' TOML parser" optional = false python-versions = ">=3.8" groups = ["coverage", "docs"] -markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and python_version < \"3.11\"" files = [ {file = "tomli-2.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f8f0fc26ec2cc2b965b7a3b87cd19c5c6b8c5e5f436b984e85f486d652285c30"}, {file = "tomli-2.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4ab97e64ccda8756376892c53a72bd1f964e519c77236368527f758fbc36a53a"}, @@ -4037,6 +3541,7 @@ files = [ {file = "tomli-2.4.1-py3-none-any.whl", hash = "sha256:0d85819802132122da43cb86656f8d1f8c6587d54ae7dcaf30e90533028b49fe"}, {file = "tomli-2.4.1.tar.gz", hash = "sha256:7c7e1a961a0b2f2472c1ac5b69affa0ae1132c39adcb67aba98568702b9cc23f"}, ] +markers = {coverage = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and python_full_version <= \"3.11.0a6\"", docs = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and python_version < \"3.11\""} [[package]] name = "traitlets" @@ -4045,7 +3550,7 @@ description = "Traitlets Python configuration system" optional = false python-versions = ">=3.8" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, @@ -4062,7 +3567,7 @@ description = "Typer, build great CLIs. Easy to code. Based on Python type hints optional = false python-versions = ">=3.10" groups = ["coverage"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "typer-0.25.1-py3-none-any.whl", hash = "sha256:75caa44ed46a03fb2dab8808753ffacdbfea88495e74c85a28c5eefcf5f39c89"}, {file = "typer-0.25.1.tar.gz", hash = "sha256:9616eb8853a09ffeabab1698952f33c6f29ffdbceb4eaeecf571880e8d7664cc"}, @@ -4081,7 +3586,7 @@ description = "Backported and Experimental Type Hints for Python 3.9+" optional = false python-versions = ">=3.9" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, @@ -4098,7 +3603,7 @@ files = [ {file = "tzdata-2026.2-py2.py3-none-any.whl", hash = "sha256:bbe9af844f658da81a5f95019480da3a89415801f6cc966806612cc7169bffe7"}, {file = "tzdata-2026.2.tar.gz", hash = "sha256:9173fde7d80d9018e02a662e168e5a2d04f87c41ea174b139fbef642eda62d10"}, ] -markers = {main = "sys_platform != \"win32\" and sys_platform != \"emscripten\" and python_version < \"3.11\" or sys_platform == \"win32\" or sys_platform == \"emscripten\"", test = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and platform_system == \"Windows\""} +markers = {main = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")", test = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\") and platform_system == \"Windows\""} [[package]] name = "urllib3" @@ -4107,7 +3612,7 @@ description = "HTTP library with thread-safe connection pooling, file post, and optional = false python-versions = ">=3.9" groups = ["main", "coverage", "docs", "test"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4"}, {file = "urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed"}, @@ -4126,7 +3631,7 @@ description = "Measures the displayed width of unicode strings in a terminal" optional = false python-versions = ">=3.8" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "wcwidth-0.6.0-py3-none-any.whl", hash = "sha256:1a3a1e510b553315f8e146c54764f4fb6264ffad731b3d78088cdb1478ffbdad"}, {file = "wcwidth-0.6.0.tar.gz", hash = "sha256:cdc4e4262d6ef9a1a57e018384cbeb1208d8abbc64176027e2c2455c81313159"}, @@ -4139,7 +3644,7 @@ description = "Character encoding aliases for legacy web content" optional = false python-versions = "*" groups = ["main"] -markers = "sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\"" +markers = "(sys_platform == \"win32\" or sys_platform == \"emscripten\" or sys_platform != \"win32\" and sys_platform != \"emscripten\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, @@ -4172,4 +3677,4 @@ test = ["factory_boy", "psycopg2-binary", "responses"] [metadata] lock-version = "2.1" python-versions = ">=3.10.0,<3.14" -content-hash = "2f5acb86aafdc8bf765a7117decd8935b1787241ab28a554d38073ee32774ba0" +content-hash = "876034df05abb39699e98a455d95b465da5885e8a7b5b9d5a8ffc9ee5bfc6c4f" diff --git a/pyproject.toml b/pyproject.toml index 78c386290..c78231011 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -77,7 +77,7 @@ dependencies = [ # other than poetry can install them. ``pip install -e ".[test]"`` [project.optional-dependencies] test = [ - "responses >=0.23,<0.26", + "responses >=0.26.0,<0.27", "factory_boy >3.2.1,<3.4", "psycopg2-binary", # postgres test backend ]