IAMC Data Interface

Datapoints (ixmp4.data.iamc.datapoint)

Service

class ixmp4.data.iamc.datapoint.service.DataPointService(transport: Transport)

Bases: Service

router_prefix: ClassVar[str] = '/iamc/datapoints'
router_tags: ClassVar[Sequence[str]] = ['iamc-datapoints']
http_controller

alias of EnumerationCompatibilityController

executor: SessionExecutor
pandas: PandasRepository
versions: VersionRepository
default_filter: DataPointFilter = {'run': {'default_only': True}}
full_key = {'step_category', 'step_datetime', 'step_year', 'time_series__id', 'type'}
base_columns = {'id', 'step_category', 'step_datetime', 'step_year', 'time_series__id', 'type', 'value'}
ts_columns = {'region', 'unit', 'variable'}
run_columns = {'model', 'scenario', 'version'}
get_columns(*, join_parameters: bool, join_runs: bool, join_run_id: bool) tuple[str, ...] | None
tabulate(join_parameters: bool = False, join_runs: bool = False, join_run_id: bool = False, **kwargs: Unpack[DataPointFilter]) WithJsonSchema(json_schema={'properties': {'index': {'anyOf': [{'items': {'type': 'integer'}, 'type': 'array'}, {'items': {'type': 'string'}, 'type': 'array'}, {'type': 'null'}], 'default': None, 'title': 'Index'}, 'columns': {'anyOf': [{'items': {'type': 'string'}, 'type': 'array'}, {'type': 'null'}], 'default': None, 'title': 'Columns'}, 'dtypes': {'anyOf': [{'items': {'type': 'string'}, 'type': 'array'}, {'type': 'null'}], 'default': None, 'title': 'Dtypes'}, 'data': {'anyOf': [{'items': {'items': {'anyOf': [{'type': 'boolean'}, {'type': 'integer'}, {'type': 'number'}, {'type': 'string'}, {'additionalProperties': True, 'type': 'object'}, {'items': {'type': 'number'}, 'type': 'array'}, {'items': {'type': 'integer'}, 'type': 'array'}, {'items': {'type': 'string'}, 'type': 'array'}, {'format': 'date-time', 'type': 'string'}, {'type': 'null'}]}, 'type': 'array'}, 'type': 'array'}, {'type': 'null'}], 'title': 'Data'}}, 'required': ['data'], 'title': 'DataFrameTypeAdapter', 'type': 'object'}, mode=serialization)]

Tabulates datapoints by specified criteria.

Parameters:
  • join_parameters (bool, optional) – Whether to include region, unit and variable in the data frame. Default: False

  • join_runs (bool, optional) – Whether to include model, scenario and version in the data frame. Default: False

  • join_run_id (bool, optional) – Whether to include run__id in the data frame. Default: False

  • **kwargs (any) – Filter parameters as specified in DataPointFilter.

Returns:

A data frame with the columns:
  • step_year

  • step_category

  • step_datetime

  • type

  • values

if join_parameters is True:
  • region

  • unit

  • variable

if join_runs is True:
  • model

  • scenario

  • version

if join_run_id is True:
  • run__id

Return type:

pandas.DataFrame

tabulate_auth_check(auth_ctx: AuthorizationContext, platform: PlatformProtocol) None
paginated_tabulate(pagination: Pagination, join_parameters: bool = False, join_runs: bool = False, join_run_id: bool = False, **kwargs: Unpack[DataPointFilter]) PaginatedResult[Annotated[DataFrame, PlainValidator, PlainSerializer, WithJsonSchema]]
bulk_upsert(df: WithJsonSchema(json_schema={'properties': {'index': {'anyOf': [{'items': {'type': 'integer'}, 'type': 'array'}, {'items': {'type': 'string'}, 'type': 'array'}, {'type': 'null'}], 'default': None, 'title': 'Index'}, 'columns': {'anyOf': [{'items': {'type': 'string'}, 'type': 'array'}, {'type': 'null'}], 'default': None, 'title': 'Columns'}, 'dtypes': {'anyOf': [{'items': {'type': 'string'}, 'type': 'array'}, {'type': 'null'}], 'default': None, 'title': 'Dtypes'}, 'data': {'anyOf': [{'items': {'items': {'anyOf': [{'type': 'boolean'}, {'type': 'integer'}, {'type': 'number'}, {'type': 'string'}, {'additionalProperties': True, 'type': 'object'}, {'items': {'type': 'number'}, 'type': 'array'}, {'items': {'type': 'integer'}, 'type': 'array'}, {'items': {'type': 'string'}, 'type': 'array'}, {'format': 'date-time', 'type': 'string'}, {'type': 'null'}]}, 'type': 'array'}, 'type': 'array'}, {'type': 'null'}], 'title': 'Data'}}, 'required': ['data'], 'title': 'DataFrameTypeAdapter', 'type': 'object'}, mode=serialization)]) None

Bulk inserts or updates datapoints from a supplied dataframe.

This method accepts a dataframe containing datapoint data and validates it against the upsert schema before inserting or updating records in the database. The upsert operation is keyed on the subset of full key columns present in the dataframe.

Parameters:

df (pandas.DataFrame) –

DataFrame containing rows of datapoint data to upsert. Must conform to UpsertDataPointFrameSchema structure.

Key columns include:
  • time_series__id

  • step_category and/or step_year or step_datetime

  • type, optional

  • value

Raises:

InvalidDataFrame – If the dataframe does not conform to UpsertDataPointFrameSchema.

bulk_upsert_auth_check(auth_ctx: AuthorizationContext, platform: PlatformProtocol, /, df: WithJsonSchema(json_schema={'properties': {'index': {'anyOf': [{'items': {'type': 'integer'}, 'type': 'array'}, {'items': {'type': 'string'}, 'type': 'array'}, {'type': 'null'}], 'default': None, 'title': 'Index'}, 'columns': {'anyOf': [{'items': {'type': 'string'}, 'type': 'array'}, {'type': 'null'}], 'default': None, 'title': 'Columns'}, 'dtypes': {'anyOf': [{'items': {'type': 'string'}, 'type': 'array'}, {'type': 'null'}], 'default': None, 'title': 'Dtypes'}, 'data': {'anyOf': [{'items': {'items': {'anyOf': [{'type': 'boolean'}, {'type': 'integer'}, {'type': 'number'}, {'type': 'string'}, {'additionalProperties': True, 'type': 'object'}, {'items': {'type': 'number'}, 'type': 'array'}, {'items': {'type': 'integer'}, 'type': 'array'}, {'items': {'type': 'string'}, 'type': 'array'}, {'format': 'date-time', 'type': 'string'}, {'type': 'null'}]}, 'type': 'array'}, 'type': 'array'}, {'type': 'null'}], 'title': 'Data'}}, 'required': ['data'], 'title': 'DataFrameTypeAdapter', 'type': 'object'}, mode=serialization)]) None
bulk_delete(df: WithJsonSchema(json_schema={'properties': {'index': {'anyOf': [{'items': {'type': 'integer'}, 'type': 'array'}, {'items': {'type': 'string'}, 'type': 'array'}, {'type': 'null'}], 'default': None, 'title': 'Index'}, 'columns': {'anyOf': [{'items': {'type': 'string'}, 'type': 'array'}, {'type': 'null'}], 'default': None, 'title': 'Columns'}, 'dtypes': {'anyOf': [{'items': {'type': 'string'}, 'type': 'array'}, {'type': 'null'}], 'default': None, 'title': 'Dtypes'}, 'data': {'anyOf': [{'items': {'items': {'anyOf': [{'type': 'boolean'}, {'type': 'integer'}, {'type': 'number'}, {'type': 'string'}, {'additionalProperties': True, 'type': 'object'}, {'items': {'type': 'number'}, 'type': 'array'}, {'items': {'type': 'integer'}, 'type': 'array'}, {'items': {'type': 'string'}, 'type': 'array'}, {'format': 'date-time', 'type': 'string'}, {'type': 'null'}]}, 'type': 'array'}, 'type': 'array'}, {'type': 'null'}], 'title': 'Data'}}, 'required': ['data'], 'title': 'DataFrameTypeAdapter', 'type': 'object'}, mode=serialization)]) None

Bulk deletes datapoints from a supplied dataframe.

This method accepts a dataframe containing datapoint identifiers and deletes the matching records from the database. After deletion, orphaned timeseries (those with no remaining datapoints) are also removed.

Parameters:

df (pandas.DataFrame) –

DataFrame containing rows of datapoint identifiers to delete. Must conform to DeleteDataPointFrameSchema structure.

Key columns include:
  • time_series__id

  • step_category and/or step_year or step_datetime

  • type, optional

Raises:

InvalidDataFrame – If the dataframe does not conform to DeleteDataPointFrameSchema.

bulk_delete_auth_check(auth_ctx: AuthorizationContext, platform: PlatformProtocol, df: WithJsonSchema(json_schema={'properties': {'index': {'anyOf': [{'items': {'type': 'integer'}, 'type': 'array'}, {'items': {'type': 'string'}, 'type': 'array'}, {'type': 'null'}], 'default': None, 'title': 'Index'}, 'columns': {'anyOf': [{'items': {'type': 'string'}, 'type': 'array'}, {'type': 'null'}], 'default': None, 'title': 'Columns'}, 'dtypes': {'anyOf': [{'items': {'type': 'string'}, 'type': 'array'}, {'type': 'null'}], 'default': None, 'title': 'Dtypes'}, 'data': {'anyOf': [{'items': {'items': {'anyOf': [{'type': 'boolean'}, {'type': 'integer'}, {'type': 'number'}, {'type': 'string'}, {'additionalProperties': True, 'type': 'object'}, {'items': {'type': 'number'}, 'type': 'array'}, {'items': {'type': 'integer'}, 'type': 'array'}, {'items': {'type': 'string'}, 'type': 'array'}, {'format': 'date-time', 'type': 'string'}, {'type': 'null'}]}, 'type': 'array'}, 'type': 'array'}, {'type': 'null'}], 'title': 'Data'}}, 'required': ['data'], 'title': 'DataFrameTypeAdapter', 'type': 'object'}, mode=serialization)]) None

Filters

class ixmp4.data.iamc.datapoint.filter.DataPointFilter

Bases: DataPointFilter

region: InstrumentedAttribute object at 0x79d927827ba0>)]
variable: InstrumentedAttribute object at 0x79d927827ce0>)]
unit: InstrumentedAttribute object at 0x79d927827d80>)]
run: InstrumentedAttribute object at 0x79d927827b00>)]
model: InstrumentedAttribute object at 0x79d927323420>)]
scenario: InstrumentedAttribute object at 0x79d9273234c0>)]
id: int
id__in: list[int]
type: str
type__in: list[str]
step_year: int
step_year__lte: int
step_year__lt: int
step_year__gte: int
step_year__gt: int
step_year__in: list[int]
step_category: str
step_category__in: list[str]
time_series__id: int
time_series__id__in: list[int]
value: float
value__lte: float
value__lt: float
value__gte: float
value__gt: float
value__in: list[float]
class ixmp4.data.iamc.datapoint.filter.DataPointVersionFilter

Bases: DataPointFilter, VersionFilter

timeseries: InstrumentedAttribute object at 0x79d92787a8e0>]
id: int
id__in: list[int]
type: str
type__in: list[str]
step_year: int
step_year__lte: int
step_year__lt: int
step_year__gte: int
step_year__gt: int
step_year__in: list[int]
step_category: str
step_category__in: list[str]
time_series__id: int
time_series__id__in: list[int]
value: float
value__lte: float
value__lt: float
value__gte: float
value__gt: float
value__in: list[float]
valid_at_transaction: Annotated[int, <function filter_by_valid_at_transaction at 0x79d925ad8fe0>]
class ixmp4.data.iamc.datapoint.filter.FacadeStepYearFilter

Bases: TypedDict

year: int
year__lte: int
year__lt: int
year__gte: int
year__gt: int
year__in: list[int]
class ixmp4.data.iamc.datapoint.filter.FacadeStepCategoryFilter

Bases: TypedDict

category: str
category__in: list[str]
class ixmp4.data.iamc.datapoint.filter.FacadeDataPointFilter

Bases: DataPointFilter, FacadeStepYearFilter, FacadeStepCategoryFilter

region: RegionFilter | str | Iterable[str]
unit: UnitFilter | str | Iterable[str]
variable: VariableFilter | str | Iterable[str]
model: ModelFilter | str | Iterable[str]
scenario: ScenarioFilter | str | Iterable[str]
run: FacadeRunFilter
id: int
id__in: list[int]
type: str
type__in: list[str]
step_year: int
step_year__lte: int
step_year__lt: int
step_year__gte: int
step_year__gt: int
step_year__in: list[int]
step_category: str
step_category__in: list[str]
time_series__id: int
time_series__id__in: list[int]
value: float
value__lte: float
value__lt: float
value__gte: float
value__gt: float
value__in: list[float]
year: int
year__lte: int
year__lt: int
year__gte: int
year__gt: int
year__in: list[int]
category: str
category__in: list[str]
ixmp4.data.iamc.datapoint.filter.facade_to_data_filter(filter_values: Mapping[str, Any]) DataPointFilter

Measurands (ixmp4.data.iamc.measurand)

TimeSeries (ixmp4.data.iamc.timeseries)

Service

class ixmp4.data.iamc.timeseries.service.TimeSeriesService(transport: Transport)

Bases: Service

router_prefix: ClassVar[str] = '/iamc/timeseries'
router_tags: ClassVar[Sequence[str]] = ['iamc', 'timeseries']
http_controller

alias of EnumerationCompatibilityController

executor: SessionExecutor
pandas: PandasRepository
versions: VersionRepository
measurands: PandasRepository
regions: PandasRepository
units: PandasRepository
variables: PandasRepository
runs: ItemRepository
default_filter: TimeSeriesFilter = {'run': {'default_only': True}}
tabulate_by_df(df: WithJsonSchema(json_schema={'properties': {'index': {'anyOf': [{'items': {'type': 'integer'}, 'type': 'array'}, {'items': {'type': 'string'}, 'type': 'array'}, {'type': 'null'}], 'default': None, 'title': 'Index'}, 'columns': {'anyOf': [{'items': {'type': 'string'}, 'type': 'array'}, {'type': 'null'}], 'default': None, 'title': 'Columns'}, 'dtypes': {'anyOf': [{'items': {'type': 'string'}, 'type': 'array'}, {'type': 'null'}], 'default': None, 'title': 'Dtypes'}, 'data': {'anyOf': [{'items': {'items': {'anyOf': [{'type': 'boolean'}, {'type': 'integer'}, {'type': 'number'}, {'type': 'string'}, {'additionalProperties': True, 'type': 'object'}, {'items': {'type': 'number'}, 'type': 'array'}, {'items': {'type': 'integer'}, 'type': 'array'}, {'items': {'type': 'string'}, 'type': 'array'}, {'format': 'date-time', 'type': 'string'}, {'type': 'null'}]}, 'type': 'array'}, 'type': 'array'}, {'type': 'null'}], 'title': 'Data'}}, 'required': ['data'], 'title': 'DataFrameTypeAdapter', 'type': 'object'}, mode=serialization)]) WithJsonSchema(json_schema={'properties': {'index': {'anyOf': [{'items': {'type': 'integer'}, 'type': 'array'}, {'items': {'type': 'string'}, 'type': 'array'}, {'type': 'null'}], 'default': None, 'title': 'Index'}, 'columns': {'anyOf': [{'items': {'type': 'string'}, 'type': 'array'}, {'type': 'null'}], 'default': None, 'title': 'Columns'}, 'dtypes': {'anyOf': [{'items': {'type': 'string'}, 'type': 'array'}, {'type': 'null'}], 'default': None, 'title': 'Dtypes'}, 'data': {'anyOf': [{'items': {'items': {'anyOf': [{'type': 'boolean'}, {'type': 'integer'}, {'type': 'number'}, {'type': 'string'}, {'additionalProperties': True, 'type': 'object'}, {'items': {'type': 'number'}, 'type': 'array'}, {'items': {'type': 'integer'}, 'type': 'array'}, {'items': {'type': 'string'}, 'type': 'array'}, {'format': 'date-time', 'type': 'string'}, {'type': 'null'}]}, 'type': 'array'}, 'type': 'array'}, {'type': 'null'}], 'title': 'Data'}}, 'required': ['data'], 'title': 'DataFrameTypeAdapter', 'type': 'object'}, mode=serialization)]

Tabulates timeseries by values in a supplied dataframe.

Parameters:

df (pandas.DataFrame) –

DataFrame containing rows of timeseries keys to tabulate.
  • run__id

  • region

  • variable

  • unit

Returns:

A data frame with the columns:
  • id

  • run__id

  • region

  • variable

  • unit

Return type:

pandas.DataFrame

tabulate_by_df_auth_check(auth_ctx: AuthorizationContext, platform: PlatformProtocol) None
tabulate(join_parameters: bool = False, **kwargs: Unpack[TimeSeriesFilter]) WithJsonSchema(json_schema={'properties': {'index': {'anyOf': [{'items': {'type': 'integer'}, 'type': 'array'}, {'items': {'type': 'string'}, 'type': 'array'}, {'type': 'null'}], 'default': None, 'title': 'Index'}, 'columns': {'anyOf': [{'items': {'type': 'string'}, 'type': 'array'}, {'type': 'null'}], 'default': None, 'title': 'Columns'}, 'dtypes': {'anyOf': [{'items': {'type': 'string'}, 'type': 'array'}, {'type': 'null'}], 'default': None, 'title': 'Dtypes'}, 'data': {'anyOf': [{'items': {'items': {'anyOf': [{'type': 'boolean'}, {'type': 'integer'}, {'type': 'number'}, {'type': 'string'}, {'additionalProperties': True, 'type': 'object'}, {'items': {'type': 'number'}, 'type': 'array'}, {'items': {'type': 'integer'}, 'type': 'array'}, {'items': {'type': 'string'}, 'type': 'array'}, {'format': 'date-time', 'type': 'string'}, {'type': 'null'}]}, 'type': 'array'}, 'type': 'array'}, {'type': 'null'}], 'title': 'Data'}}, 'required': ['data'], 'title': 'DataFrameTypeAdapter', 'type': 'object'}, mode=serialization)]

Tabulates timeseries by specified criteria.

Parameters:
  • join_parameters (bool, optional) – Include names of related region, variable and unit rows.

  • **kwargs (any) – Filter parameters as specified in TimeSeriesFilter.

Returns:

A data frame with the columns:
  • id

  • run__id

if join_parameters is False (default):
  • region__id

  • measurand__id

if join_parameters is True:
  • region

  • variable

  • unit

Return type:

pandas.DataFrame

tabulate_auth_check(auth_ctx: AuthorizationContext, platform: PlatformProtocol) None
paginated_tabulate(pagination: Pagination, join_parameters: bool = False, **kwargs: Unpack[TimeSeriesFilter]) PaginatedResult[Annotated[DataFrame, PlainValidator, PlainSerializer, WithJsonSchema]]
merge_regions(df: DataFrame) DataFrame
merge_units(df: DataFrame) DataFrame
merge_variables(df: DataFrame) DataFrame
merge_measurands(df: DataFrame) DataFrame
bulk_upsert(df: WithJsonSchema(json_schema={'properties': {'index': {'anyOf': [{'items': {'type': 'integer'}, 'type': 'array'}, {'items': {'type': 'string'}, 'type': 'array'}, {'type': 'null'}], 'default': None, 'title': 'Index'}, 'columns': {'anyOf': [{'items': {'type': 'string'}, 'type': 'array'}, {'type': 'null'}], 'default': None, 'title': 'Columns'}, 'dtypes': {'anyOf': [{'items': {'type': 'string'}, 'type': 'array'}, {'type': 'null'}], 'default': None, 'title': 'Dtypes'}, 'data': {'anyOf': [{'items': {'items': {'anyOf': [{'type': 'boolean'}, {'type': 'integer'}, {'type': 'number'}, {'type': 'string'}, {'additionalProperties': True, 'type': 'object'}, {'items': {'type': 'number'}, 'type': 'array'}, {'items': {'type': 'integer'}, 'type': 'array'}, {'items': {'type': 'string'}, 'type': 'array'}, {'format': 'date-time', 'type': 'string'}, {'type': 'null'}]}, 'type': 'array'}, 'type': 'array'}, {'type': 'null'}], 'title': 'Data'}}, 'required': ['data'], 'title': 'DataFrameTypeAdapter', 'type': 'object'}, mode=serialization)]) None

Bulk inserts or updates timeseries from a supplied dataframe.

This method accepts a dataframe containing timeseries data and automatically resolves region, unit, and variable references, upserting them as needed. Measurands (variable/unit pairs) are also created automatically. The method performs validation and merging operations before inserting or updating the timeseries records.

Parameters:

df (pandas.DataFrame) –

DataFrame containing rows of timeseries data to upsert.

Minimum required columns:
  • run__id

Optional columns that will be resolved automatically:
  • region: region name; will be resolved to region__id

  • unit: unit name; will be resolved to unit__id

  • variable: variable name; will be instantiated if missing

To skip resolving related rows, supply ids directly:
  • region__id

  • measurand__id or unit__id and variable__id

Raises:
  • RegionNotFound – If one or more region names in the dataframe do not exist.

  • UnitNotFound – If one or more unit names in the dataframe do not exist.

bulk_upsert_auth_check(auth_ctx: AuthorizationContext, platform: PlatformProtocol, /, df: WithJsonSchema(json_schema={'properties': {'index': {'anyOf': [{'items': {'type': 'integer'}, 'type': 'array'}, {'items': {'type': 'string'}, 'type': 'array'}, {'type': 'null'}], 'default': None, 'title': 'Index'}, 'columns': {'anyOf': [{'items': {'type': 'string'}, 'type': 'array'}, {'type': 'null'}], 'default': None, 'title': 'Columns'}, 'dtypes': {'anyOf': [{'items': {'type': 'string'}, 'type': 'array'}, {'type': 'null'}], 'default': None, 'title': 'Dtypes'}, 'data': {'anyOf': [{'items': {'items': {'anyOf': [{'type': 'boolean'}, {'type': 'integer'}, {'type': 'number'}, {'type': 'string'}, {'additionalProperties': True, 'type': 'object'}, {'items': {'type': 'number'}, 'type': 'array'}, {'items': {'type': 'integer'}, 'type': 'array'}, {'items': {'type': 'string'}, 'type': 'array'}, {'format': 'date-time', 'type': 'string'}, {'type': 'null'}]}, 'type': 'array'}, 'type': 'array'}, {'type': 'null'}], 'title': 'Data'}}, 'required': ['data'], 'title': 'DataFrameTypeAdapter', 'type': 'object'}, mode=serialization)]) None

Filters

class ixmp4.data.iamc.timeseries.filter.TimeSeriesFilter

Bases: TimeSeriesFilter

region: InstrumentedAttribute object at 0x79d927827ba0>]
variable: InstrumentedAttribute object at 0x79d927827ce0>]
unit: InstrumentedAttribute object at 0x79d927827d80>]
run: InstrumentedAttribute object at 0x79d927827b00>]
id: int
id__in: list[int]
run__id: int
run__id__in: list[int]
class ixmp4.data.iamc.timeseries.filter.TimeSeriesVersionFilter

Bases: VersionFilter, TimeSeriesFilter

valid_at_transaction: Annotated[int, <function filter_by_valid_at_transaction at 0x79d925ad8fe0>]
id: int
id__in: list[int]
run__id: int
run__id__in: list[int]
class ixmp4.data.iamc.timeseries.filter.FacadeTimeSeriesFilter

Bases: TimeSeriesFilter

region: RegionFilter
variable: VariableFilter
unit: UnitFilter
run: FacadeRunFilter
id: int
id__in: list[int]
run__id: int
run__id__in: list[int]
ixmp4.data.iamc.timeseries.filter.facade_to_data_filter(filter_values: Mapping[str, Any]) TimeSeriesFilter

Variables (ixmp4.data.iamc.variable)

Data Transfer Object

class ixmp4.data.iamc.variable.dto.Variable(*, created_at: datetime | None, created_by: str | None, id: int, name: str)

Bases: BaseModel, HasCreationInfo

IAMC variable data model.

name: str

Unique name of the variable.

model_config = {'from_attributes': True}

Configuration for the model, should be a dictionary conforming to [ConfigDict][pydantic.config.ConfigDict].

Service

class ixmp4.data.iamc.variable.service.VariableService(transport: Transport)

Bases: DocsService, GetByIdService

router_prefix: ClassVar[str] = '/iamc/variables'
router_tags: ClassVar[Sequence[str]] = ['iamc', 'variables']
http_controller

alias of EnumerationCompatibilityController

executor: SessionExecutor
items: ItemRepository
pandas: PandasRepository
versions: VersionRepository
default_filter: VariableFilter = {'run': {'default_only': True}}
create(name: str) Variable

Creates a variable.

Parameters:

name (str) – The name of the model.

Raises:

VariableNotUnique – If the variable with name is not unique.

Returns:

The created variable.

Return type:

Variable

create_auth_check(auth_ctx: AuthorizationContext, platform: PlatformProtocol, name: str) None
delete_by_id(id: int) None

Deletes a variable.

Parameters:

id (int) – The unique integer id of the variable.

Raises:
delete_auth_check(auth_ctx: AuthorizationContext, platform: PlatformProtocol) None
get_by_name(name: str) Variable

Retrieves a variable by its name.

Parameters:

name (str) – The unique name of the variable.

Raises:

VariableNotFound – If the variable with name does not exist.

Returns:

The retrieved variable.

Return type:

ixmp4.data.base.iamc.Variable

get_by_name_auth_check(auth_ctx: AuthorizationContext, platform: PlatformProtocol) None
get_by_id(id: int) Variable

Retrieves a variable by its id.

Parameters:

id (int) – The integer id of the variable.

Raises:

NotFound – If the variable with id does not exist.

Returns:

The retrieved variable.

Return type:

ixmp4.data.base.iamc.Variable

get_by_id_auth_check(auth_ctx: AuthorizationContext, platform: PlatformProtocol) None
get_or_create(name: str) Variable
list(**kwargs: Unpack[VariableFilter]) List[Variable]

Lists variables by specified criteria.

Parameters:

**kwargs (any) – Filter parameters as specified in VariableFilter.

Returns:

List of variables.

Return type:

list[ixmp4.data.iamc.variable.dto.Variable]

list_auth_check(auth_ctx: AuthorizationContext, platform: PlatformProtocol) None
paginated_list(pagination: Pagination, **kwargs: Unpack[VariableFilter]) PaginatedResult[List[Variable]]
tabulate(**kwargs: Unpack[VariableFilter]) WithJsonSchema(json_schema={'properties': {'index': {'anyOf': [{'items': {'type': 'integer'}, 'type': 'array'}, {'items': {'type': 'string'}, 'type': 'array'}, {'type': 'null'}], 'default': None, 'title': 'Index'}, 'columns': {'anyOf': [{'items': {'type': 'string'}, 'type': 'array'}, {'type': 'null'}], 'default': None, 'title': 'Columns'}, 'dtypes': {'anyOf': [{'items': {'type': 'string'}, 'type': 'array'}, {'type': 'null'}], 'default': None, 'title': 'Dtypes'}, 'data': {'anyOf': [{'items': {'items': {'anyOf': [{'type': 'boolean'}, {'type': 'integer'}, {'type': 'number'}, {'type': 'string'}, {'additionalProperties': True, 'type': 'object'}, {'items': {'type': 'number'}, 'type': 'array'}, {'items': {'type': 'integer'}, 'type': 'array'}, {'items': {'type': 'string'}, 'type': 'array'}, {'format': 'date-time', 'type': 'string'}, {'type': 'null'}]}, 'type': 'array'}, 'type': 'array'}, {'type': 'null'}], 'title': 'Data'}}, 'required': ['data'], 'title': 'DataFrameTypeAdapter', 'type': 'object'}, mode=serialization)]

Tabulates variables by specified criteria.

Parameters:

**kwargs (any) – Filter parameters as specified in VariableFilter.

Returns:

A data frame with the columns:
  • id

  • name

Return type:

pandas.DataFrame

tabulate_auth_check(auth_ctx: AuthorizationContext, platform: PlatformProtocol) None
paginated_tabulate(pagination: Pagination, **kwargs: Unpack[VariableFilter]) PaginatedResult[Annotated[DataFrame, PlainValidator, PlainSerializer, WithJsonSchema]]

Filters

ixmp4.data.iamc.variable.filter.filter_by_run(exc: Select, value: dict[str, Any] | None, *, schema: type[Any], repo: BaseRepository[Any]) Select
class ixmp4.data.iamc.variable.filter.VariableFilter

Bases: VariableFilter

unit: InstrumentedAttribute object at 0x79d927827d80>)]
region: InstrumentedAttribute object at 0x79d927827ba0>)]
run: InstrumentedAttribute object at 0x79d927827b00>), <function filter_by_run at 0x79d9258f89a0>]
id: int
id__in: list[int]
name: str
name__in: list[str]
name__like: str
name__ilike: str
name__notlike: str
name__notilike: str
class ixmp4.data.iamc.variable.filter.VariableVersionFilter

Bases: VersionFilter, VariableFilter

valid_at_transaction: Annotated[int, <function filter_by_valid_at_transaction at 0x79d925ad8fe0>]
id: int
id__in: list[int]
name: str
name__in: list[str]
name__like: str
name__ilike: str
name__notlike: str
name__notilike: str
class ixmp4.data.iamc.variable.filter.FacadeVariableFilter

Bases: VariableFilter

unit: UnitFilter
region: RegionFilter
run: FacadeRunFilter | None
id: int
id__in: list[int]
name: str
name__in: list[str]
name__like: str
name__ilike: str
name__notlike: str
name__notilike: str
ixmp4.data.iamc.variable.filter.facade_to_data_filter(filter_values: Mapping[str, Any]) VariableFilter

Base Filters

class ixmp4.data.filters.iamc.TimeSeriesIdFilter

Bases: TypedDict

time_series__id: int
time_series__id__in: list[int]
class ixmp4.data.filters.iamc.DataPointTypeFilter

Bases: TypedDict

type: str
type__in: list[str]
class ixmp4.data.filters.iamc.StepYearFilter

Bases: TypedDict

step_year: int
step_year__lte: int
step_year__lt: int
step_year__gte: int
step_year__gt: int
step_year__in: list[int]
class ixmp4.data.filters.iamc.StepCategoryFilter

Bases: TypedDict

step_category: str
step_category__in: list[str]
class ixmp4.data.filters.iamc.UnitIdFilter

Bases: TypedDict

unit__id: int
unit__id__lte: int
unit__id__lt: int
unit__id__gte: int
unit__id__gt: int
unit__id__in: list[int]
class ixmp4.data.filters.iamc.VariableIdFilter

Bases: TypedDict

variable__id: int
variable__id__lte: int
variable__id__lt: int
variable__id__gte: int
variable__id__gt: int
variable__id__in: list[int]
class ixmp4.data.filters.iamc.VariableFilter

Bases: IdFilter, NameFilter

id: int
id__in: list[int]
name: str
name__in: list[str]
name__like: str
name__ilike: str
name__notlike: str
name__notilike: str
class ixmp4.data.filters.iamc.MeasurandFilter

Bases: IdFilter, UnitIdFilter, VariableIdFilter

id: int
id__in: list[int]
unit__id: int
unit__id__lte: int
unit__id__lt: int
unit__id__gte: int
unit__id__gt: int
unit__id__in: list[int]
variable__id: int
variable__id__lte: int
variable__id__lt: int
variable__id__gte: int
variable__id__gt: int
variable__id__in: list[int]
class ixmp4.data.filters.iamc.TimeSeriesFilter

Bases: IdFilter, RunIdFilter

id: int
id__in: list[int]
run__id: int
run__id__in: list[int]
class ixmp4.data.filters.iamc.DataPointFilter

Bases: IdFilter, DataPointTypeFilter, StepYearFilter, StepCategoryFilter, TimeSeriesIdFilter, ValueFilter

id: int
id__in: list[int]
type: str
type__in: list[str]
step_year: int
step_year__lte: int
step_year__lt: int
step_year__gte: int
step_year__gt: int
step_year__in: list[int]
step_category: str
step_category__in: list[str]
time_series__id: int
time_series__id__in: list[int]
value: float
value__lte: float
value__lt: float
value__gte: float
value__gt: float
value__in: list[float]