Skip to content

Python SDK

Classiq SDK.

analyzer special

analyzer

Analyzer module, implementing facilities for analyzing circuits using Classiq platform.

Analyzer (AnalyzerUtilities)

Analyzer is the wrapper object for all analysis capabilities.

Source code in classiq/analyzer/analyzer.py
class Analyzer(AnalyzerUtilities, metaclass=Asyncify):
    """Analyzer is the wrapper object for all analysis capabilities."""

    def __init__(self, circuit: generator_result.GeneratedCircuit):
        """Init self.

        Args:
            circuit (): The circuit to be analyzed.
        """
        if circuit.qasm is None:
            raise ClassiqAnalyzerError(
                "Analysis requires a circuit with valid QASM code"
            )
        self._params: analysis_params.AnalysisParams = analysis_params.AnalysisParams(
            qasm=circuit.qasm
        )
        self.circuit: generator_result.GeneratedCircuit = circuit
        self.qc_graph: Optional[go.Figure] = None
        self.heatmap: Optional[go.Figure] = None
        self.gate_histogram: Optional[go.Figure] = None
        self.hardware_comparison_table: Optional[go.Figure] = None
        self.available_devices: ProviderAvailableDevices = dict()
        self.hardware_graphs: HardwareGraphs = dict()

    async def analyzer_app_async(self) -> None:
        """Opens the analyzer app with synthesis interactive results.

        Returns:
            None.
        """
        result = await ApiWrapper.call_analyzer_app(self.circuit)
        webbrowser.open_new_tab(
            urljoin(
                client_ide_base_url(),
                circuit_page_uri(
                    circuit_id=result.id, circuit_version=self.circuit.version
                ),
            )
        )

    async def get_available_devices_async(
        self, providers: Optional[List[ProviderNameEnum]] = None
    ) -> Dict[ProviderNameEnum, List[DeviceName]]:
        """Returns dict of the available devices by the providers. only devices
        with sufficient number of qubits are returns

        Args: providers (): List of providers (string or `AnalyzerProviderVendor`).
        if None, the table include all the available hardware.

        Returns:
            available devices (): dict of the available devices (Dict[str,List[str]]).
        """
        if providers is None:
            providers = list(AnalyzerProviderVendor)
        await self.request_available_devices_async(providers=providers)
        return {
            provider: self._filter_devices_by_qubits_count(provider)
            for provider in providers
        }

    async def get_qubits_connectivity_async(self) -> None:
        """create a network connectivity graph of the analysed circuit.

        Returns:
            None.
        """
        result = await ApiWrapper.call_qubits_connectivity_graphs_task(self._params)
        self.qc_graph = go.Figure(json.loads(result.details))

    async def plot_qubits_connectivity_async(self) -> None:
        """plot the connectivity graph. if it has not been created it, it first creates the graph.

        Returns:
            None.
        """
        if self.qc_graph is None:
            await self.get_qubits_connectivity_async()
        self.qc_graph.show()  # type: ignore[union-attr]

    async def plot_hardware_connectivity_async(
        self,
        provider: Optional[ProviderNameEnum] = None,
        device: Optional[DeviceName] = None,
    ) -> VBox:
        """plot the hardware_connectivity graph. It is required to required  install the
        analyzer_sdk extra.

        Args:
            provider (): provider name (optional - string or `AnalyzerProviderVendor`).
            device (): device name (optional - string).
        Returns:
         hardware_connectivity_graph (): interactive graph.
        """

        self._validate_analyzer_extra()
        interactive_hardware = InteractiveHardware(
            circuit=self.circuit,
            params=self._params,
            available_devices=self.available_devices,
            hardware_graphs=self.hardware_graphs,
        )
        await interactive_hardware.enable_interactivity_async()
        if provider is not None:
            interactive_hardware.providers_combobox.value = provider
            if device is not None:
                interactive_hardware.devices_combobox.value = device

        return interactive_hardware.show_interactive_graph()

    async def get_hardware_comparison_table_async(
        self,
        providers: Optional[List[Union[str, AnalyzerProviderVendor]]] = None,
        devices: Optional[List[str]] = None,
    ) -> None:
        """create a comparison table between the transpiled circuits result on different hardware.
        The  comparison table included the depth, multi qubit gates count,and total gates count of the circuits.

        Args: providers (): List of providers (string or `AnalyzerProviderVendor`). if None, the table include all
        the available hardware.
        devices (): List of devices (string). if None, the table include all the available devices of the selected
        providers.
        Returns: None.
        """
        if providers is None:
            providers = list(AnalyzerProviderVendor)
        params = analysis_params.AnalysisHardwareListParams(
            qasm=self._params.qasm, providers=providers, devices=devices
        )
        result = await ApiWrapper.call_table_graphs_task(params=params)
        self.hardware_comparison_table = go.Figure(json.loads(result.details))

    async def plot_hardware_comparison_table_async(
        self,
        providers: Optional[List[Union[str, AnalyzerProviderVendor]]] = None,
        devices: Optional[List[str]] = None,
    ) -> None:
        """plot the comparison table. if it has not been created it, it first creates the table using all the
        available hardware.

        Returns:
            None.
        """
        await self._hardware_comparison_condition_async(
            providers=providers, devices=devices
        )
        self.hardware_comparison_table.show()  # type: ignore[union-attr]

    async def get_heatmap_async(self) -> None:
        """create a heatmap of the analysed circuit.

        Returns:
            None.
        """
        result = await ApiWrapper.call_heatmap_graphs(self._params)
        self.heatmap = _create_heatmap_graph(result, self.circuit.data.width)

    async def plot_heatmap_async(self) -> None:
        """plot the circuit heatmap. if it has not been created it, it will create the graph.

        Returns:
            None.
        """
        if self.heatmap is None:
            await self.get_heatmap_async()
        self.heatmap.show()  # type: ignore[union-attr]

    async def plot_gate_histogram_async(self) -> None:
        """plot the circuit gate histogram. if it has not been created it, it will create the graph.

        Returns:
            None.
        """
        if self.gate_histogram is None:
            await self.get_gate_histogram_async()
        self.gate_histogram.show()  # type: ignore[union-attr]

    async def get_gate_histogram_async(self) -> None:
        """create a gate histogram of the analysed circuit.

        Returns:
            None.
        """
        result = await ApiWrapper.call_gate_histogram_graphs(params=self._params)
        self.gate_histogram = _create_gate_histogram(
            result=result, num_qubits=self.circuit.data.width
        )

    async def hardware_aware_resynthesize_async(
        self, device: str, provider: Union[str, AnalyzerProviderVendor]
    ) -> generator_result.GeneratedCircuit:
        """resynthesize the analyzed circuit using its original model, and a new  backend preferences.

        Args:
            provider (): Provider company or cloud for the requested backend (string or `AnalyzerProviderVendor`).
            device (): Name of the requested backend"
        Returns:
            circuit (): resynthesize circuit (`GeneratedCircuit`).
        """

        update_preferences = self._validated_update_preferences(
            device=device, provider=provider
        )

        model = Model()
        model._model = self.circuit.model.copy(deep=True)  # type: ignore[union-attr]
        return await model.synthesize_async(preferences=update_preferences)

    async def optimized_hardware_resynthesize_async(
        self,
        comparison_property: Union[str, ComparisonProperties],
        providers: Optional[List[Union[str, AnalyzerProviderVendor]]] = None,
        devices: Optional[List[str]] = None,
    ) -> generator_result.GeneratedCircuit:
        """Re-synthesize the analyzed circuit using its original model, and a new backend preferences, which is the
         devices with the best fit to the selected comparison property.

        Args: comparison_property (): A comparison properties using to compare between the devices (string or
        `ComparisonProperties`).
        providers (): List of providers (string or `AnalyzerProviderVendor`). If None, the comparison include all the
        available hardware.
        devices (): List of devices (string). If None, the comparison include all the available devices of the selected
        providers.
        Returns: circuit (): resynthesize circuit (`GeneratedCircuit`).
        """
        optimized_device, optimized_provider = await self._get_optimized_hardware_async(
            providers=providers,
            devices=devices,
            comparison_property=comparison_property,
        )
        return await self.hardware_aware_resynthesize_async(
            provider=optimized_provider, device=optimized_device
        )

    async def _get_optimized_hardware_async(
        self,
        comparison_property: Union[str, ComparisonProperties],
        providers: Optional[List[Union[str, AnalyzerProviderVendor]]] = None,
        devices: Optional[List[str]] = None,
    ) -> Tuple[str, str]:
        await self._hardware_comparison_condition_async(
            providers=providers, devices=devices
        )
        optimized_device, optimized_provider = self._choose_optimized_hardware(
            comparison_property=comparison_property
        )
        return optimized_device, optimized_provider

    def _choose_optimized_hardware(
        self, comparison_property: Union[str, ComparisonProperties]
    ) -> Tuple[str, str]:
        comparison_params = AnalysisComparisonParams(property=comparison_property)
        if not isinstance(self.hardware_comparison_table, go.Figure):
            raise ClassiqAnalyzerError(
                "The analyzer does not contains a valid hardware comparison table"
            )
        column_names = self.hardware_comparison_table.data[0].header.values
        property_index = column_names.index(comparison_params.property.upper())

        sort_button = self.hardware_comparison_table.layout.updatemenus[0]
        sort_data = sort_button.buttons[property_index].args[0]["cells"]["values"]
        return sort_data[0][0], sort_data[1][0]

    def _validated_update_preferences(
        self, device: str, provider: Union[str, AnalyzerProviderVendor]
    ) -> Preferences:
        if not isinstance(self.circuit.model, APIModel):
            raise ClassiqAnalyzerError("The circuit does not contains a valid model")

        preferences_dict = self.circuit.model.preferences.dict()
        preferences_dict.update(
            dict(backend_service_provider=provider, backend_name=device)
        )

        return Preferences.parse_obj(preferences_dict)

    async def _hardware_comparison_condition_async(
        self,
        providers: Optional[List[Union[str, AnalyzerProviderVendor]]] = None,
        devices: Optional[List[str]] = None,
    ) -> None:
        if (
            providers is not None
            or devices is not None
            or self.hardware_comparison_table is None
        ):
            await self.get_hardware_comparison_table_async(
                providers=providers, devices=devices
            )

    @staticmethod
    def _open_route(path: str) -> None:
        backend_uri = client.client().get_backend_uri()
        webbrowser.open_new_tab(f"{backend_uri}{path}")

    @staticmethod
    def _validate_analyzer_extra() -> None:
        if find_ipywidgets is None:
            raise ClassiqAnalyzerError(
                "To use this method, please install the `analyzer sdk`. Run the  \
                following line: - pip install classiq[analyzer_sdk]"
            )
__init__(self, circuit) special

Init self.

Parameters:

Name Type Description Default
circuit

The circuit to be analyzed.

required
Source code in classiq/analyzer/analyzer.py
def __init__(self, circuit: generator_result.GeneratedCircuit):
    """Init self.

    Args:
        circuit (): The circuit to be analyzed.
    """
    if circuit.qasm is None:
        raise ClassiqAnalyzerError(
            "Analysis requires a circuit with valid QASM code"
        )
    self._params: analysis_params.AnalysisParams = analysis_params.AnalysisParams(
        qasm=circuit.qasm
    )
    self.circuit: generator_result.GeneratedCircuit = circuit
    self.qc_graph: Optional[go.Figure] = None
    self.heatmap: Optional[go.Figure] = None
    self.gate_histogram: Optional[go.Figure] = None
    self.hardware_comparison_table: Optional[go.Figure] = None
    self.available_devices: ProviderAvailableDevices = dict()
    self.hardware_graphs: HardwareGraphs = dict()
analyzer_app(self) async

Opens the analyzer app with synthesis interactive results.

Returns:

Type Description
None

None.

Source code in classiq/analyzer/analyzer.py
async def analyzer_app_async(self) -> None:
    """Opens the analyzer app with synthesis interactive results.

    Returns:
        None.
    """
    result = await ApiWrapper.call_analyzer_app(self.circuit)
    webbrowser.open_new_tab(
        urljoin(
            client_ide_base_url(),
            circuit_page_uri(
                circuit_id=result.id, circuit_version=self.circuit.version
            ),
        )
    )
analyzer_app_async(self) async

Opens the analyzer app with synthesis interactive results.

Returns:

Type Description
None

None.

Source code in classiq/analyzer/analyzer.py
async def analyzer_app_async(self) -> None:
    """Opens the analyzer app with synthesis interactive results.

    Returns:
        None.
    """
    result = await ApiWrapper.call_analyzer_app(self.circuit)
    webbrowser.open_new_tab(
        urljoin(
            client_ide_base_url(),
            circuit_page_uri(
                circuit_id=result.id, circuit_version=self.circuit.version
            ),
        )
    )
get_available_devices(self, providers=None) async

Returns dict of the available devices by the providers. only devices with sufficient number of qubits are returns

Args: providers (): List of providers (string or AnalyzerProviderVendor). if None, the table include all the available hardware.

Returns:

Type Description
available devices ()

dict of the available devices (Dict[str,List[str]]).

Source code in classiq/analyzer/analyzer.py
async def get_available_devices_async(
    self, providers: Optional[List[ProviderNameEnum]] = None
) -> Dict[ProviderNameEnum, List[DeviceName]]:
    """Returns dict of the available devices by the providers. only devices
    with sufficient number of qubits are returns

    Args: providers (): List of providers (string or `AnalyzerProviderVendor`).
    if None, the table include all the available hardware.

    Returns:
        available devices (): dict of the available devices (Dict[str,List[str]]).
    """
    if providers is None:
        providers = list(AnalyzerProviderVendor)
    await self.request_available_devices_async(providers=providers)
    return {
        provider: self._filter_devices_by_qubits_count(provider)
        for provider in providers
    }
get_available_devices_async(self, providers=None) async

Returns dict of the available devices by the providers. only devices with sufficient number of qubits are returns

Args: providers (): List of providers (string or AnalyzerProviderVendor). if None, the table include all the available hardware.

Returns:

Type Description
available devices ()

dict of the available devices (Dict[str,List[str]]).

Source code in classiq/analyzer/analyzer.py
async def get_available_devices_async(
    self, providers: Optional[List[ProviderNameEnum]] = None
) -> Dict[ProviderNameEnum, List[DeviceName]]:
    """Returns dict of the available devices by the providers. only devices
    with sufficient number of qubits are returns

    Args: providers (): List of providers (string or `AnalyzerProviderVendor`).
    if None, the table include all the available hardware.

    Returns:
        available devices (): dict of the available devices (Dict[str,List[str]]).
    """
    if providers is None:
        providers = list(AnalyzerProviderVendor)
    await self.request_available_devices_async(providers=providers)
    return {
        provider: self._filter_devices_by_qubits_count(provider)
        for provider in providers
    }
get_gate_histogram(self) async

create a gate histogram of the analysed circuit.

Returns:

Type Description
None

None.

Source code in classiq/analyzer/analyzer.py
async def get_gate_histogram_async(self) -> None:
    """create a gate histogram of the analysed circuit.

    Returns:
        None.
    """
    result = await ApiWrapper.call_gate_histogram_graphs(params=self._params)
    self.gate_histogram = _create_gate_histogram(
        result=result, num_qubits=self.circuit.data.width
    )
get_gate_histogram_async(self) async

create a gate histogram of the analysed circuit.

Returns:

Type Description
None

None.

Source code in classiq/analyzer/analyzer.py
async def get_gate_histogram_async(self) -> None:
    """create a gate histogram of the analysed circuit.

    Returns:
        None.
    """
    result = await ApiWrapper.call_gate_histogram_graphs(params=self._params)
    self.gate_histogram = _create_gate_histogram(
        result=result, num_qubits=self.circuit.data.width
    )
get_hardware_comparison_table(self, providers=None, devices=None) async

create a comparison table between the transpiled circuits result on different hardware. The comparison table included the depth, multi qubit gates count,and total gates count of the circuits.

Args: providers (): List of providers (string or AnalyzerProviderVendor). if None, the table include all the available hardware. devices (): List of devices (string). if None, the table include all the available devices of the selected providers. Returns: None.

Source code in classiq/analyzer/analyzer.py
async def get_hardware_comparison_table_async(
    self,
    providers: Optional[List[Union[str, AnalyzerProviderVendor]]] = None,
    devices: Optional[List[str]] = None,
) -> None:
    """create a comparison table between the transpiled circuits result on different hardware.
    The  comparison table included the depth, multi qubit gates count,and total gates count of the circuits.

    Args: providers (): List of providers (string or `AnalyzerProviderVendor`). if None, the table include all
    the available hardware.
    devices (): List of devices (string). if None, the table include all the available devices of the selected
    providers.
    Returns: None.
    """
    if providers is None:
        providers = list(AnalyzerProviderVendor)
    params = analysis_params.AnalysisHardwareListParams(
        qasm=self._params.qasm, providers=providers, devices=devices
    )
    result = await ApiWrapper.call_table_graphs_task(params=params)
    self.hardware_comparison_table = go.Figure(json.loads(result.details))
get_hardware_comparison_table_async(self, providers=None, devices=None) async

create a comparison table between the transpiled circuits result on different hardware. The comparison table included the depth, multi qubit gates count,and total gates count of the circuits.

Args: providers (): List of providers (string or AnalyzerProviderVendor). if None, the table include all the available hardware. devices (): List of devices (string). if None, the table include all the available devices of the selected providers. Returns: None.

Source code in classiq/analyzer/analyzer.py
async def get_hardware_comparison_table_async(
    self,
    providers: Optional[List[Union[str, AnalyzerProviderVendor]]] = None,
    devices: Optional[List[str]] = None,
) -> None:
    """create a comparison table between the transpiled circuits result on different hardware.
    The  comparison table included the depth, multi qubit gates count,and total gates count of the circuits.

    Args: providers (): List of providers (string or `AnalyzerProviderVendor`). if None, the table include all
    the available hardware.
    devices (): List of devices (string). if None, the table include all the available devices of the selected
    providers.
    Returns: None.
    """
    if providers is None:
        providers = list(AnalyzerProviderVendor)
    params = analysis_params.AnalysisHardwareListParams(
        qasm=self._params.qasm, providers=providers, devices=devices
    )
    result = await ApiWrapper.call_table_graphs_task(params=params)
    self.hardware_comparison_table = go.Figure(json.loads(result.details))
get_heatmap(self) async

create a heatmap of the analysed circuit.

Returns:

Type Description
None

None.

Source code in classiq/analyzer/analyzer.py
async def get_heatmap_async(self) -> None:
    """create a heatmap of the analysed circuit.

    Returns:
        None.
    """
    result = await ApiWrapper.call_heatmap_graphs(self._params)
    self.heatmap = _create_heatmap_graph(result, self.circuit.data.width)
get_heatmap_async(self) async

create a heatmap of the analysed circuit.

Returns:

Type Description
None

None.

Source code in classiq/analyzer/analyzer.py
async def get_heatmap_async(self) -> None:
    """create a heatmap of the analysed circuit.

    Returns:
        None.
    """
    result = await ApiWrapper.call_heatmap_graphs(self._params)
    self.heatmap = _create_heatmap_graph(result, self.circuit.data.width)
get_qubits_connectivity(self) async

create a network connectivity graph of the analysed circuit.

Returns:

Type Description
None

None.

Source code in classiq/analyzer/analyzer.py
async def get_qubits_connectivity_async(self) -> None:
    """create a network connectivity graph of the analysed circuit.

    Returns:
        None.
    """
    result = await ApiWrapper.call_qubits_connectivity_graphs_task(self._params)
    self.qc_graph = go.Figure(json.loads(result.details))
get_qubits_connectivity_async(self) async

create a network connectivity graph of the analysed circuit.

Returns:

Type Description
None

None.

Source code in classiq/analyzer/analyzer.py
async def get_qubits_connectivity_async(self) -> None:
    """create a network connectivity graph of the analysed circuit.

    Returns:
        None.
    """
    result = await ApiWrapper.call_qubits_connectivity_graphs_task(self._params)
    self.qc_graph = go.Figure(json.loads(result.details))
hardware_aware_resynthesize(self, device, provider) async

resynthesize the analyzed circuit using its original model, and a new backend preferences.

Parameters:

Name Type Description Default
provider

Provider company or cloud for the requested backend (string or AnalyzerProviderVendor).

required
device

Name of the requested backend"

required

Returns:

Type Description
circuit ()

resynthesize circuit (GeneratedCircuit).

Source code in classiq/analyzer/analyzer.py
async def hardware_aware_resynthesize_async(
    self, device: str, provider: Union[str, AnalyzerProviderVendor]
) -> generator_result.GeneratedCircuit:
    """resynthesize the analyzed circuit using its original model, and a new  backend preferences.

    Args:
        provider (): Provider company or cloud for the requested backend (string or `AnalyzerProviderVendor`).
        device (): Name of the requested backend"
    Returns:
        circuit (): resynthesize circuit (`GeneratedCircuit`).
    """

    update_preferences = self._validated_update_preferences(
        device=device, provider=provider
    )

    model = Model()
    model._model = self.circuit.model.copy(deep=True)  # type: ignore[union-attr]
    return await model.synthesize_async(preferences=update_preferences)
hardware_aware_resynthesize_async(self, device, provider) async

resynthesize the analyzed circuit using its original model, and a new backend preferences.

Parameters:

Name Type Description Default
provider

Provider company or cloud for the requested backend (string or AnalyzerProviderVendor).

required
device

Name of the requested backend"

required

Returns:

Type Description
circuit ()

resynthesize circuit (GeneratedCircuit).

Source code in classiq/analyzer/analyzer.py
async def hardware_aware_resynthesize_async(
    self, device: str, provider: Union[str, AnalyzerProviderVendor]
) -> generator_result.GeneratedCircuit:
    """resynthesize the analyzed circuit using its original model, and a new  backend preferences.

    Args:
        provider (): Provider company or cloud for the requested backend (string or `AnalyzerProviderVendor`).
        device (): Name of the requested backend"
    Returns:
        circuit (): resynthesize circuit (`GeneratedCircuit`).
    """

    update_preferences = self._validated_update_preferences(
        device=device, provider=provider
    )

    model = Model()
    model._model = self.circuit.model.copy(deep=True)  # type: ignore[union-attr]
    return await model.synthesize_async(preferences=update_preferences)
optimized_hardware_resynthesize(self, comparison_property, providers=None, devices=None) async

Re-synthesize the analyzed circuit using its original model, and a new backend preferences, which is the devices with the best fit to the selected comparison property.

Args: comparison_property (): A comparison properties using to compare between the devices (string or ComparisonProperties). providers (): List of providers (string or AnalyzerProviderVendor). If None, the comparison include all the available hardware. devices (): List of devices (string). If None, the comparison include all the available devices of the selected providers. Returns: circuit (): resynthesize circuit (GeneratedCircuit).

Source code in classiq/analyzer/analyzer.py
async def optimized_hardware_resynthesize_async(
    self,
    comparison_property: Union[str, ComparisonProperties],
    providers: Optional[List[Union[str, AnalyzerProviderVendor]]] = None,
    devices: Optional[List[str]] = None,
) -> generator_result.GeneratedCircuit:
    """Re-synthesize the analyzed circuit using its original model, and a new backend preferences, which is the
     devices with the best fit to the selected comparison property.

    Args: comparison_property (): A comparison properties using to compare between the devices (string or
    `ComparisonProperties`).
    providers (): List of providers (string or `AnalyzerProviderVendor`). If None, the comparison include all the
    available hardware.
    devices (): List of devices (string). If None, the comparison include all the available devices of the selected
    providers.
    Returns: circuit (): resynthesize circuit (`GeneratedCircuit`).
    """
    optimized_device, optimized_provider = await self._get_optimized_hardware_async(
        providers=providers,
        devices=devices,
        comparison_property=comparison_property,
    )
    return await self.hardware_aware_resynthesize_async(
        provider=optimized_provider, device=optimized_device
    )
optimized_hardware_resynthesize_async(self, comparison_property, providers=None, devices=None) async

Re-synthesize the analyzed circuit using its original model, and a new backend preferences, which is the devices with the best fit to the selected comparison property.

Args: comparison_property (): A comparison properties using to compare between the devices (string or ComparisonProperties). providers (): List of providers (string or AnalyzerProviderVendor). If None, the comparison include all the available hardware. devices (): List of devices (string). If None, the comparison include all the available devices of the selected providers. Returns: circuit (): resynthesize circuit (GeneratedCircuit).

Source code in classiq/analyzer/analyzer.py
async def optimized_hardware_resynthesize_async(
    self,
    comparison_property: Union[str, ComparisonProperties],
    providers: Optional[List[Union[str, AnalyzerProviderVendor]]] = None,
    devices: Optional[List[str]] = None,
) -> generator_result.GeneratedCircuit:
    """Re-synthesize the analyzed circuit using its original model, and a new backend preferences, which is the
     devices with the best fit to the selected comparison property.

    Args: comparison_property (): A comparison properties using to compare between the devices (string or
    `ComparisonProperties`).
    providers (): List of providers (string or `AnalyzerProviderVendor`). If None, the comparison include all the
    available hardware.
    devices (): List of devices (string). If None, the comparison include all the available devices of the selected
    providers.
    Returns: circuit (): resynthesize circuit (`GeneratedCircuit`).
    """
    optimized_device, optimized_provider = await self._get_optimized_hardware_async(
        providers=providers,
        devices=devices,
        comparison_property=comparison_property,
    )
    return await self.hardware_aware_resynthesize_async(
        provider=optimized_provider, device=optimized_device
    )
plot_gate_histogram(self) async

plot the circuit gate histogram. if it has not been created it, it will create the graph.

Returns:

Type Description
None

None.

Source code in classiq/analyzer/analyzer.py
async def plot_gate_histogram_async(self) -> None:
    """plot the circuit gate histogram. if it has not been created it, it will create the graph.

    Returns:
        None.
    """
    if self.gate_histogram is None:
        await self.get_gate_histogram_async()
    self.gate_histogram.show()  # type: ignore[union-attr]
plot_gate_histogram_async(self) async

plot the circuit gate histogram. if it has not been created it, it will create the graph.

Returns:

Type Description
None

None.

Source code in classiq/analyzer/analyzer.py
async def plot_gate_histogram_async(self) -> None:
    """plot the circuit gate histogram. if it has not been created it, it will create the graph.

    Returns:
        None.
    """
    if self.gate_histogram is None:
        await self.get_gate_histogram_async()
    self.gate_histogram.show()  # type: ignore[union-attr]
plot_hardware_comparison_table(self, providers=None, devices=None) async

plot the comparison table. if it has not been created it, it first creates the table using all the available hardware.

Returns:

Type Description
None

None.

Source code in classiq/analyzer/analyzer.py
async def plot_hardware_comparison_table_async(
    self,
    providers: Optional[List[Union[str, AnalyzerProviderVendor]]] = None,
    devices: Optional[List[str]] = None,
) -> None:
    """plot the comparison table. if it has not been created it, it first creates the table using all the
    available hardware.

    Returns:
        None.
    """
    await self._hardware_comparison_condition_async(
        providers=providers, devices=devices
    )
    self.hardware_comparison_table.show()  # type: ignore[union-attr]
plot_hardware_comparison_table_async(self, providers=None, devices=None) async

plot the comparison table. if it has not been created it, it first creates the table using all the available hardware.

Returns:

Type Description
None

None.

Source code in classiq/analyzer/analyzer.py
async def plot_hardware_comparison_table_async(
    self,
    providers: Optional[List[Union[str, AnalyzerProviderVendor]]] = None,
    devices: Optional[List[str]] = None,
) -> None:
    """plot the comparison table. if it has not been created it, it first creates the table using all the
    available hardware.

    Returns:
        None.
    """
    await self._hardware_comparison_condition_async(
        providers=providers, devices=devices
    )
    self.hardware_comparison_table.show()  # type: ignore[union-attr]
plot_hardware_connectivity(self, provider=None, device=None) async

plot the hardware_connectivity graph. It is required to required install the analyzer_sdk extra.

Parameters:

Name Type Description Default
provider

provider name (optional - string or AnalyzerProviderVendor).

None
device

device name (optional - string).

None

Returns:

Type Description
hardware_connectivity_graph ()

interactive graph.

Source code in classiq/analyzer/analyzer.py
async def plot_hardware_connectivity_async(
    self,
    provider: Optional[ProviderNameEnum] = None,
    device: Optional[DeviceName] = None,
) -> VBox:
    """plot the hardware_connectivity graph. It is required to required  install the
    analyzer_sdk extra.

    Args:
        provider (): provider name (optional - string or `AnalyzerProviderVendor`).
        device (): device name (optional - string).
    Returns:
     hardware_connectivity_graph (): interactive graph.
    """

    self._validate_analyzer_extra()
    interactive_hardware = InteractiveHardware(
        circuit=self.circuit,
        params=self._params,
        available_devices=self.available_devices,
        hardware_graphs=self.hardware_graphs,
    )
    await interactive_hardware.enable_interactivity_async()
    if provider is not None:
        interactive_hardware.providers_combobox.value = provider
        if device is not None:
            interactive_hardware.devices_combobox.value = device

    return interactive_hardware.show_interactive_graph()
plot_hardware_connectivity_async(self, provider=None, device=None) async

plot the hardware_connectivity graph. It is required to required install the analyzer_sdk extra.

Parameters:

Name Type Description Default
provider

provider name (optional - string or AnalyzerProviderVendor).

None
device

device name (optional - string).

None

Returns:

Type Description
hardware_connectivity_graph ()

interactive graph.

Source code in classiq/analyzer/analyzer.py
async def plot_hardware_connectivity_async(
    self,
    provider: Optional[ProviderNameEnum] = None,
    device: Optional[DeviceName] = None,
) -> VBox:
    """plot the hardware_connectivity graph. It is required to required  install the
    analyzer_sdk extra.

    Args:
        provider (): provider name (optional - string or `AnalyzerProviderVendor`).
        device (): device name (optional - string).
    Returns:
     hardware_connectivity_graph (): interactive graph.
    """

    self._validate_analyzer_extra()
    interactive_hardware = InteractiveHardware(
        circuit=self.circuit,
        params=self._params,
        available_devices=self.available_devices,
        hardware_graphs=self.hardware_graphs,
    )
    await interactive_hardware.enable_interactivity_async()
    if provider is not None:
        interactive_hardware.providers_combobox.value = provider
        if device is not None:
            interactive_hardware.devices_combobox.value = device

    return interactive_hardware.show_interactive_graph()
plot_heatmap(self) async

plot the circuit heatmap. if it has not been created it, it will create the graph.

Returns:

Type Description
None

None.

Source code in classiq/analyzer/analyzer.py
async def plot_heatmap_async(self) -> None:
    """plot the circuit heatmap. if it has not been created it, it will create the graph.

    Returns:
        None.
    """
    if self.heatmap is None:
        await self.get_heatmap_async()
    self.heatmap.show()  # type: ignore[union-attr]
plot_heatmap_async(self) async

plot the circuit heatmap. if it has not been created it, it will create the graph.

Returns:

Type Description
None

None.

Source code in classiq/analyzer/analyzer.py
async def plot_heatmap_async(self) -> None:
    """plot the circuit heatmap. if it has not been created it, it will create the graph.

    Returns:
        None.
    """
    if self.heatmap is None:
        await self.get_heatmap_async()
    self.heatmap.show()  # type: ignore[union-attr]
plot_qubits_connectivity(self) async

plot the connectivity graph. if it has not been created it, it first creates the graph.

Returns:

Type Description
None

None.

Source code in classiq/analyzer/analyzer.py
async def plot_qubits_connectivity_async(self) -> None:
    """plot the connectivity graph. if it has not been created it, it first creates the graph.

    Returns:
        None.
    """
    if self.qc_graph is None:
        await self.get_qubits_connectivity_async()
    self.qc_graph.show()  # type: ignore[union-attr]
plot_qubits_connectivity_async(self) async

plot the connectivity graph. if it has not been created it, it first creates the graph.

Returns:

Type Description
None

None.

Source code in classiq/analyzer/analyzer.py
async def plot_qubits_connectivity_async(self) -> None:
    """plot the connectivity graph. if it has not been created it, it first creates the graph.

    Returns:
        None.
    """
    if self.qc_graph is None:
        await self.get_qubits_connectivity_async()
    self.qc_graph.show()  # type: ignore[union-attr]

rb

RBAnalysis

Source code in classiq/analyzer/rb.py
class RBAnalysis(metaclass=Asyncify):
    def __init__(self, experiments_data: List[AnalysisRBParams]):
        """Init self.

        Args:
            experiments_data: List of results from varius RB experiments.
        """

        self.experiments_data = experiments_data
        self._total_results: pd.DataFrame = pd.DataFrame()

    async def _get_multiple_hardware_results_async(self) -> Dict[str, RbResults]:
        total_result: Dict[str, RbResults] = {}
        for batch in self.experiments_data:
            if len(batch.num_clifford) < 5:
                raise ClassiqAnalyzerError(
                    f"An experiment mush contain at least five sequences,"
                    f" this sequence is {len(batch.num_clifford)}"
                )
            rb_result = await ApiWrapper.call_rb_analysis_task(batch)
            total_result[batch.hardware] = rb_result
        return total_result

    @staticmethod
    def _get_df_indices(results) -> List[str]:
        temp_res = results.copy()
        _, rb_result_keys = temp_res.popitem()
        return list(rb_result_keys.__dict__.keys())

    async def show_multiple_hardware_data_async(self) -> pd.DataFrame:
        """Run the RB analysis.

        Returns:
            The RB result.
        """
        results = await self._get_multiple_hardware_results_async()
        indices = RBAnalysis._get_df_indices(results)
        result_df = pd.DataFrame(index=indices)
        for hardware, result in results.items():
            result_df[hardware] = result.__dict__.values()
        self._total_results = result_df
        return result_df

    def plot_multiple_hardware_results(self) -> go.Figure:
        """Plot Bar graph of the results.

        Returns:
            None.
        """
        df = self._total_results.loc[["mean_fidelity", "average_error"]].transpose()
        hardware = list(df.index)
        params = list(df.columns)
        data = []
        for param in params:
            data.append(go.Bar(name=param, x=hardware, y=df[param].values * 100))
        fig = go.Figure(data).update_layout(
            title="RB hardware comparison",
            barmode="group",
            yaxis=dict(title="Fidelity in %"),
            xaxis=dict(title="Hardware"),
        )
        return fig
__init__(self, experiments_data) special

Init self.

Parameters:

Name Type Description Default
experiments_data List[classiq.interface.analyzer.analysis_params.AnalysisRBParams]

List of results from varius RB experiments.

required
Source code in classiq/analyzer/rb.py
def __init__(self, experiments_data: List[AnalysisRBParams]):
    """Init self.

    Args:
        experiments_data: List of results from varius RB experiments.
    """

    self.experiments_data = experiments_data
    self._total_results: pd.DataFrame = pd.DataFrame()
plot_multiple_hardware_results(self)

Plot Bar graph of the results.

Returns:

Type Description
Figure

None.

Source code in classiq/analyzer/rb.py
def plot_multiple_hardware_results(self) -> go.Figure:
    """Plot Bar graph of the results.

    Returns:
        None.
    """
    df = self._total_results.loc[["mean_fidelity", "average_error"]].transpose()
    hardware = list(df.index)
    params = list(df.columns)
    data = []
    for param in params:
        data.append(go.Bar(name=param, x=hardware, y=df[param].values * 100))
    fig = go.Figure(data).update_layout(
        title="RB hardware comparison",
        barmode="group",
        yaxis=dict(title="Fidelity in %"),
        xaxis=dict(title="Hardware"),
    )
    return fig
show_multiple_hardware_data(self) async

Run the RB analysis.

Returns:

Type Description
DataFrame

The RB result.

Source code in classiq/analyzer/rb.py
async def show_multiple_hardware_data_async(self) -> pd.DataFrame:
    """Run the RB analysis.

    Returns:
        The RB result.
    """
    results = await self._get_multiple_hardware_results_async()
    indices = RBAnalysis._get_df_indices(results)
    result_df = pd.DataFrame(index=indices)
    for hardware, result in results.items():
        result_df[hardware] = result.__dict__.values()
    self._total_results = result_df
    return result_df
show_multiple_hardware_data_async(self) async

Run the RB analysis.

Returns:

Type Description
DataFrame

The RB result.

Source code in classiq/analyzer/rb.py
async def show_multiple_hardware_data_async(self) -> pd.DataFrame:
    """Run the RB analysis.

    Returns:
        The RB result.
    """
    results = await self._get_multiple_hardware_results_async()
    indices = RBAnalysis._get_df_indices(results)
    result_df = pd.DataFrame(index=indices)
    for hardware, result in results.items():
        result_df[hardware] = result.__dict__.values()
    self._total_results = result_df
    return result_df

applications special

qnn special

datasets special

datasets_utils
all_bits_to_one(n)

Return an integer of length n bits, where all the bits are 1

Source code in classiq/applications/qnn/datasets/datasets_utils.py
def all_bits_to_one(n: int) -> int:
    """
    Return an integer of length `n` bits, where all the bits are `1`
    """
    return (2**n) - 1
all_bits_to_zero(n)

Return an integer of length n bits, where all the bits are 0

Source code in classiq/applications/qnn/datasets/datasets_utils.py
def all_bits_to_zero(n: int) -> int:
    """
    Return an integer of length `n` bits, where all the bits are `0`
    """
    return 0
state_to_label(pure_state)

input: a Tensor of binary numbers (0 or 1) - the return value of a measurement output: probability (from that measurement) of measuring 0 (in other words, |0> translates to 100% chance for measuring |0> ==> return value is 1.0 |1> translates to 0% chance for measuring |0> ==> return value is 0.0 )

Source code in classiq/applications/qnn/datasets/datasets_utils.py
def state_to_label(pure_state: Tensor) -> Tensor:
    """
    input: a `Tensor` of binary numbers (0 or 1) - the return value of a measurement
    output: probability (from that measurement) of measuring 0
    (in other words,
        |0> translates to 100% chance for measuring |0> ==> return value is 1.0
        |1> translates to   0% chance for measuring |0> ==> return value is 0.0
    )
    """
    # |0> means 100% chance to get |0> ==> 100% == 1.0
    # |1> means   0% chance to get |0> ==>   0% == 0.0

    # This line basically does `1 - bool(pure_state)`
    return 1 - pure_state.bool().int()
state_to_weights(pure_state)

input: a Tensor of binary numbers (0 or 1) output: the required angle of rotation for Rx (in other words, |0> translates to no rotation, and |1> translates to pi)

Source code in classiq/applications/qnn/datasets/datasets_utils.py
def state_to_weights(pure_state: Tensor) -> Tensor:
    """
    input: a `Tensor` of binary numbers (0 or 1)
    output: the required angle of rotation for `Rx`
    (in other words, |0> translates to no rotation, and |1> translates to `pi`)
    """
    # |0> requires a rotation by 0
    # |1> requires a rotation by pi
    return pure_state.bool().int() * np.pi

qlayer

QLayer (Module)
Source code in classiq/applications/qnn/qlayer.py
class QLayer(nn.Module):
    def __init__(
        self,
        circuit: Circuit,
        execute: ExecuteFunciton,
        post_process: PostProcessFunction,
        # Optional parameters:
        head_start: Union[float, Tensor, None] = None,
        # Experimental parameters:
        calc_num_out_features: CalcNumOutFeatures = calc_num_out_features_single_output,
    ):
        validate_circuit(circuit)

        super().__init__()

        self._execute = execute
        self._post_process = post_process
        self._head_start = head_start

        self.circuit = circuit

        weights, _ = extract_parameters(circuit)
        self.in_features = len(weights)
        self.out_features = calc_num_out_features(circuit)

        self._initialize_parameters()

    def _initialize_parameters(self) -> None:
        shape = (self.out_features, self.in_features)

        if self._head_start is None:
            value = torch.rand(shape)
        elif isinstance(self._head_start, (float, int)):
            value = torch.zeros(shape) + self._head_start
        elif isinstance(self._head_start, Tensor):
            value = self._head_start.clone()
        else:
            raise ClassiqQNNError(
                f"Unsupported feature - head_start of type {type(self._head_start)}"
            )

        self.weight = Parameter(value)

    def forward(self, x: Tensor) -> Tensor:
        return QLayerFunction.apply(
            x, self.weight, self.circuit, self._execute, self._post_process
        )
forward(self, x)

Defines the computation performed at every call.

Should be overridden by all subclasses.

.. note:: Although the recipe for forward pass needs to be defined within this function, one should call the :class:Module instance afterwards instead of this since the former takes care of running the registered hooks while the latter silently ignores them.

Source code in classiq/applications/qnn/qlayer.py
def forward(self, x: Tensor) -> Tensor:
    return QLayerFunction.apply(
        x, self.weight, self.circuit, self._execute, self._post_process
    )
QLayerFunction (Function)
Source code in classiq/applications/qnn/qlayer.py
class QLayerFunction(torch.autograd.Function):
    @staticmethod
    def forward(  # type: ignore[override]
        ctx,
        inputs: Tensor,
        weights: Tensor,
        circuit: Circuit,
        execute: ExecuteFunciton,
        post_process: PostProcessFunction,
    ) -> Tensor:
        """
        This function receives:
            inputs: a 2D Tensor of floats - (batch_size, in_features)
            weights: a 2D Tensor of floats - (out_features, num_weights)
            circuit: a `GeneratedCircuit` object
            execute: a function taking a `GeneratedCircuit` and `MultipleArguments`
                and returning `MultipleExecutionDetails`
            post_process: a function taking a single `ExecutionDetails`
                and returning a `Tensor`

        """
        validate_circuit(circuit)

        # save for backward
        ctx.save_for_backward(inputs, weights)
        ctx.circuit = circuit
        ctx.execute = execute
        ctx.post_process = post_process
        ctx.quantum_gradient = SimpleQuantumGradient(circuit, execute, post_process)

        ctx.batch_size, ctx.num_in_features = inputs.shape
        ctx.num_out_features, ctx.num_weights = weights.shape

        # Todo: avoid computing `_get_extracted_parameters` on every `forward`
        extracted_parameters = extract_parameters(circuit)

        # Todo: avoid defining `convert_tensors_to_arguments` on every `forward`
        def convert_tensors_to_arguments(
            inputs_: Tensor, weights_: Tensor
        ) -> MultipleArguments:
            arguments = map_parameters(
                extracted_parameters,
                inputs_,
                weights_,
            )
            return (arguments,)

        return iter_inputs_weights(
            inputs,
            weights,
            convert_tensors_to_arguments,
            functools.partial(execute, circuit),
            post_process,
        )

    @staticmethod
    def backward(  # type: ignore[override]
        ctx, grad_output: Tensor
    ) -> Tuple[Optional[Tensor], Optional[Tensor], None, None, None]:
        """
        grad_output: Tensor
            is of shape (ctx.batch_size, ctx.num_out_features)
        """
        inputs, weights = ctx.saved_tensors

        grad_weights = grad_inputs = None
        grad_circuit = grad_execute = grad_post_process = None

        if ctx.needs_input_grad[1]:
            grad_weights = ctx.quantum_gradient.gradient_weights(inputs, weights)
            grad_weights = einsum_weigths(grad_output, grad_weights)

        if ctx.needs_input_grad[0]:
            grad_inputs = ctx.quantum_gradient.gradient_inputs(inputs, weights)
            grad_inputs = einsum_inputs(grad_output, grad_inputs)

        if any(ctx.needs_input_grad[i] for i in (2, 3, 4)):
            raise ClassiqTorchError(
                f"Grad required for unknown type: {ctx.needs_input_grad}"
            )

        return grad_inputs, grad_weights, grad_circuit, grad_execute, grad_post_process
backward(ctx, grad_output) staticmethod

Tensor

is of shape (ctx.batch_size, ctx.num_out_features)

Source code in classiq/applications/qnn/qlayer.py
@staticmethod
def backward(  # type: ignore[override]
    ctx, grad_output: Tensor
) -> Tuple[Optional[Tensor], Optional[Tensor], None, None, None]:
    """
    grad_output: Tensor
        is of shape (ctx.batch_size, ctx.num_out_features)
    """
    inputs, weights = ctx.saved_tensors

    grad_weights = grad_inputs = None
    grad_circuit = grad_execute = grad_post_process = None

    if ctx.needs_input_grad[1]:
        grad_weights = ctx.quantum_gradient.gradient_weights(inputs, weights)
        grad_weights = einsum_weigths(grad_output, grad_weights)

    if ctx.needs_input_grad[0]:
        grad_inputs = ctx.quantum_gradient.gradient_inputs(inputs, weights)
        grad_inputs = einsum_inputs(grad_output, grad_inputs)

    if any(ctx.needs_input_grad[i] for i in (2, 3, 4)):
        raise ClassiqTorchError(
            f"Grad required for unknown type: {ctx.needs_input_grad}"
        )

    return grad_inputs, grad_weights, grad_circuit, grad_execute, grad_post_process
forward(ctx, inputs, weights, circuit, execute, post_process) staticmethod

This function receives: inputs: a 2D Tensor of floats - (batch_size, in_features) weights: a 2D Tensor of floats - (out_features, num_weights) circuit: a GeneratedCircuit object !!! execute "a function taking a GeneratedCircuit and MultipleArguments" and returning MultipleExecutionDetails !!! post_process "a function taking a single ExecutionDetails" and returning a Tensor

Source code in classiq/applications/qnn/qlayer.py
@staticmethod
def forward(  # type: ignore[override]
    ctx,
    inputs: Tensor,
    weights: Tensor,
    circuit: Circuit,
    execute: ExecuteFunciton,
    post_process: PostProcessFunction,
) -> Tensor:
    """
    This function receives:
        inputs: a 2D Tensor of floats - (batch_size, in_features)
        weights: a 2D Tensor of floats - (out_features, num_weights)
        circuit: a `GeneratedCircuit` object
        execute: a function taking a `GeneratedCircuit` and `MultipleArguments`
            and returning `MultipleExecutionDetails`
        post_process: a function taking a single `ExecutionDetails`
            and returning a `Tensor`

    """
    validate_circuit(circuit)

    # save for backward
    ctx.save_for_backward(inputs, weights)
    ctx.circuit = circuit
    ctx.execute = execute
    ctx.post_process = post_process
    ctx.quantum_gradient = SimpleQuantumGradient(circuit, execute, post_process)

    ctx.batch_size, ctx.num_in_features = inputs.shape
    ctx.num_out_features, ctx.num_weights = weights.shape

    # Todo: avoid computing `_get_extracted_parameters` on every `forward`
    extracted_parameters = extract_parameters(circuit)

    # Todo: avoid defining `convert_tensors_to_arguments` on every `forward`
    def convert_tensors_to_arguments(
        inputs_: Tensor, weights_: Tensor
    ) -> MultipleArguments:
        arguments = map_parameters(
            extracted_parameters,
            inputs_,
            weights_,
        )
        return (arguments,)

    return iter_inputs_weights(
        inputs,
        weights,
        convert_tensors_to_arguments,
        functools.partial(execute, circuit),
        post_process,
    )

executor

Executor module, implementing facilities for executing quantum programs using Classiq platform.

SINGLE_ARGUMENTS_ERROR_MESSAGE

positional arguments, keyword arguments or as a quantum_program.

Executor

Executor is the entry point for executing quantum programs on multiple quantum hardware vendors.

Source code in classiq/executor.py
class Executor(metaclass=Asyncify):
    """Executor is the entry point for executing quantum programs on multiple quantum hardware vendors."""

    def __init__(
        self, preferences: Optional[ExecutionPreferences] = None, **kwargs
    ) -> None:
        """Init self.

        Args:
            preferences (): Execution preferences, such as number of shots.
        """
        self._preferences = preferences or ExecutionPreferences(**kwargs)

    @property
    def preferences(self) -> ExecutionPreferences:
        return self._preferences

    def _create_payload(
        self, payload: Union[ExecutionPayloads, dict]
    ) -> ExecutionRequest:
        return ExecutionRequest(
            preferences=self._preferences,
            execution_payload=payload,
        )

    @staticmethod
    def _combine_arguments(
        arguments_list: MultipleArguments,
        arguments: MultipleArguments,
        arguments_from_quantum_program: MultipleArguments,
        is_assert_multiple_definitions: bool = False,
    ) -> MultipleArguments:
        # Allow `arguments` to be a single dict, for backwards compatibility
        arguments_as_tuple = (arguments,) if isinstance(arguments, dict) else arguments
        # Allow a single positional arguments which is a tuple of arguments
        #   (This goes agains mypy, since it's parsing `arguments_list` as `Tuple[Tuple[dict]]`, whereas mypy expects `Tuple[dict]`)
        if len(arguments_list) == 1 and isinstance(arguments_list[0], tuple):  # type: ignore[unreachable]
            arguments_list = arguments_list[0]  # type: ignore[unreachable]

        if (
            is_assert_multiple_definitions
            and sum(
                [
                    bool(arguments_list),
                    bool(arguments_as_tuple),
                    bool(arguments_from_quantum_program),
                ]
            )
            > 1
        ):
            raise ClassiqExecutionError(SINGLE_ARGUMENTS_ERROR_MESSAGE)

        return (
            arguments_list or arguments_as_tuple or arguments_from_quantum_program or ()
        )

    def _pre_process_quantum_program_request(
        self,
        quantum_program_like: QuantumProgramLike,
        *arguments_list: Arguments,
        arguments: MultipleArguments = (),
        initial_values: Optional[InitialConditions] = None,
    ) -> ExecutionRequest:
        quantum_program = _convert_to_quantum_program(
            quantum_program_like, initial_values
        )

        quantum_program.arguments = self._combine_arguments(
            arguments_list,
            arguments,
            quantum_program.arguments,
            is_assert_multiple_definitions=True,
        )

        return self._create_payload(quantum_program.dict())

    def _post_process_quantum_program_request(
        self,
        result: MultipleExecutionDetails,
        request: ExecutionRequest,
        arguments_list: MultipleArguments,
        arguments: MultipleArguments,
    ) -> Union[ExecutionDetails, MultipleExecutionDetails]:
        request.execution_payload = cast(
            QuantumProgramExecution, request.execution_payload
        )
        output_qubits_map = request.execution_payload.output_qubits_map
        for res in result.details:
            res.output_qubits_map = output_qubits_map

        if self._should_return_single_item(
            request.execution_payload, result, arguments_list, arguments
        ):
            return result[0]
        else:
            return result

    def _should_return_single_item(
        self,
        execution_payload: QuantumProgramExecution,
        result: MultipleExecutionDetails,
        arguments_list: MultipleArguments,
        arguments: MultipleArguments,
    ) -> bool:
        is_passed_as_single_arguments = (
            len(arguments_list) == 1 and not arguments
        ) or (isinstance(arguments, dict))

        is_no_arguments_at_all = not self._combine_arguments(
            arguments_list, arguments, execution_payload.arguments
        )

        should_return_single_item = len(result.details) == 1 and (
            is_no_arguments_at_all or is_passed_as_single_arguments
        )
        return should_return_single_item

    async def _execute_quantum_program(
        self,
        quantum_program_like: QuantumProgramLike,
        *arguments_list: Arguments,
        arguments: MultipleArguments = (),
        initial_values: Optional[InitialConditions] = None,
    ) -> Union[ExecutionDetails, MultipleExecutionDetails]:
        request = self._pre_process_quantum_program_request(
            quantum_program_like,
            *arguments_list,
            arguments=arguments,
            initial_values=initial_values,
        )

        result = await ApiWrapper.call_execute_quantum_program(request=request)

        return self._post_process_quantum_program_request(
            result,
            request,
            arguments_list,
            arguments,
        )

    async def batch_execute_quantum_program_async(
        self, quantum_programs: Collection[QuantumProgram]
    ) -> List[ProgramAndResult]:
        results = await asyncio.gather(
            *map(self._execute_quantum_program, quantum_programs),
            return_exceptions=True,
        )
        return list(zip(quantum_programs, results))

    async def _execute_amplitude_estimation(
        self,
        quantum_program_like: QuantumProgramLike,
    ) -> ExecutionDetails:
        quantum_base_program = _convert_to_quantum_base_program(quantum_program_like)

        request = self._create_payload(
            execution_request.AmplitudeEstimationExecution(
                **quantum_base_program.dict()
            )
        )

        return await ApiWrapper.call_execute_amplitude_estimation(request=request)

    async def _execute_operators_estimation(
        self, operators_estimation: OperatorsEstimation
    ) -> EstimationResult:
        request = self._create_payload(
            execution_request.EstimateOperatorsExecution.parse_obj(operators_estimation)
        )

        return await ApiWrapper.call_execute_estimate(request)

    async def _execute_hamiltonian_minimization(
        self,
        hamiltonian_minimization_problem: HamiltonianMinimizationProblem,
    ) -> VQESolverResult:
        payload = execution_request.HamiltonianMinimizationProblemExecution(
            **hamiltonian_minimization_problem.dict()
        )
        request = ExecutionRequest(
            preferences=self._preferences,
            execution_payload=payload,
        )
        return await ApiWrapper.call_execute_vqe(request=request)

    @staticmethod
    def _extract_special_execution_params(
        generated_circuit: GeneratedCircuit,
    ) -> Optional[SpecialExecutionParams]:
        if not generated_circuit.model:
            return None
        non_identity_params = [
            call.function_params
            for call in generated_circuit.model.logic_flow
            if not isinstance(call.function_params, identity.Identity)
        ]
        if len(non_identity_params) != 1:
            return None
        params = non_identity_params[0]
        return params if type(params) in _SPECIAL_EXECUTION_METHODS else None  # type: ignore[return-value]

    async def _execute_special_params(
        self, generation_result: GeneratedCircuit
    ) -> SpecialExecutionResult:
        special_params = self._extract_special_execution_params(generation_result)
        assert (
            special_params is not None
        )  # this line is here for mypy, since we're sure
        # to enter this functino if this is not None
        api = _SPECIAL_EXECUTION_METHODS[type(special_params)]

        request = self._create_payload(
            execution_request.GeneratedCircuitExecution(**generation_result.dict())
        )

        return await api(request)

    async def _execute_with_qctrl_optimization(
        self,
        quantum_program_like: QuantumProgramLike,
        *arguments_list: Arguments,
        arguments: MultipleArguments = (),
        initial_values: Optional[InitialConditions] = None,
    ) -> Union[ExecutionDetails, MultipleExecutionDetails]:
        if sys.version_info < (3, 8):
            raise ClassiqExecutionError("Cannot run QCtrl with python < 3.8")
        else:
            from classiq import qctrl_execution_tools

            self.preferences.backend_preferences.qctrl_preferences = (
                await qctrl_execution_tools.validate_qctrl(self.preferences)
            )
            return await self._execute_quantum_program(
                quantum_program_like, *arguments_list
            )

    async def execute_async(
        self,
        execution_payload: Union[
            QuantumProgramLike, HamiltonianMinimizationProblem, OperatorsEstimation
        ],
        *args,
        **kwargs,
    ) -> Union[
        VQESolverResult,
        SpecialExecutionResult,
        ExecutionDetails,
        MultipleExecutionDetails,
        EstimationResult,
    ]:
        method: Callable

        if isinstance(execution_payload, HamiltonianMinimizationProblem):
            method = self._execute_hamiltonian_minimization
        elif isinstance(execution_payload, OperatorsEstimation):
            method = self._execute_operators_estimation
        elif (
            isinstance(execution_payload, GeneratedCircuit)
            and self._extract_special_execution_params(execution_payload) is not None
        ):
            method = self._execute_special_params
        elif self._preferences.amplitude_estimation is not None:
            method = self._execute_amplitude_estimation
        elif self.preferences.backend_preferences.qctrl_preferences.use_qctrl:
            method = self._execute_with_qctrl_optimization
        else:
            method = self._execute_quantum_program

        return await method(execution_payload, *args, **kwargs)

__init__(self, preferences=None, **kwargs) special

Init self.

Parameters:

Name Type Description Default
preferences

Execution preferences, such as number of shots.

None
Source code in classiq/executor.py
def __init__(
    self, preferences: Optional[ExecutionPreferences] = None, **kwargs
) -> None:
    """Init self.

    Args:
        preferences (): Execution preferences, such as number of shots.
    """
    self._preferences = preferences or ExecutionPreferences(**kwargs)

batch_execute_multiple_backends(preferences_template, backend_preferences, quantum_programs) async

Execute all the provided quantum programs (n) on all the provided backends (m). In total, m * n executions. The return value is a list of the following tuples:

  • An element from backend_preferences
  • An element from quantum_programs
  • The execution result of the quantum program on the backend. If the execution failed, the value is an exception.

The length of the list is m * n.

The preferences_template argument is used to supplement all other preferences.

The code is equivalent to:

for backend in backend_preferences:
    for program in quantum_programs:
        preferences = preferences_template.copy()
        preferences.backend_preferences = backend
        Executor(preferences).execute(program)

Source code in classiq/executor.py
async def batch_execute_multiple_backends_async(
    preferences_template: ExecutionPreferences,
    backend_preferences: Sequence[BackendPreferencesTypes],
    quantum_programs: Collection[QuantumProgram],
) -> List[BackendPreferencesProgramAndResult]:
    """
    Execute all the provided quantum programs (n) on all the provided backends (m).
    In total, m * n executions.
    The return value is a list of the following tuples:

    - An element from `backend_preferences`
    - An element from `quantum_programs`
    - The execution result of the quantum program on the backend. If the execution failed,
      the value is an exception.

    The length of the list is m * n.

    The `preferences_template` argument is used to supplement all other preferences.

    The code is equivalent to:
    ```
    for backend in backend_preferences:
        for program in quantum_programs:
            preferences = preferences_template.copy()
            preferences.backend_preferences = backend
            Executor(preferences).execute(program)
    ```
    """
    executors = [
        Executor(
            preferences=preferences_template.copy(
                update={"backend_preferences": backend}
            )
        )
        for backend in backend_preferences
    ]
    results = await asyncio.gather(
        *(
            executor.batch_execute_quantum_program_async(quantum_programs)
            for executor in executors
        ),
        return_exceptions=True,
    )

    def map_return_value(
        executor: Executor,
        result: Union[List[ProgramAndResult], BaseException],
    ) -> Iterable[BackendPreferencesProgramAndResult]:
        nonlocal quantum_programs
        preferences = executor.preferences.backend_preferences
        if isinstance(result, BaseException):
            return ((preferences, program, result) for program in quantum_programs)
        else:
            return (
                (preferences, program, single_result)
                for program, single_result in result
            )

    return list(
        itertools.chain.from_iterable(
            map_return_value(executor, result)
            for executor, result in zip(executors, results)
        )
    )

batch_execute_multiple_backends_async(preferences_template, backend_preferences, quantum_programs) async

Execute all the provided quantum programs (n) on all the provided backends (m). In total, m * n executions. The return value is a list of the following tuples:

  • An element from backend_preferences
  • An element from quantum_programs
  • The execution result of the quantum program on the backend. If the execution failed, the value is an exception.

The length of the list is m * n.

The preferences_template argument is used to supplement all other preferences.

The code is equivalent to:

for backend in backend_preferences:
    for program in quantum_programs:
        preferences = preferences_template.copy()
        preferences.backend_preferences = backend
        Executor(preferences).execute(program)

Source code in classiq/executor.py
async def batch_execute_multiple_backends_async(
    preferences_template: ExecutionPreferences,
    backend_preferences: Sequence[BackendPreferencesTypes],
    quantum_programs: Collection[QuantumProgram],
) -> List[BackendPreferencesProgramAndResult]:
    """
    Execute all the provided quantum programs (n) on all the provided backends (m).
    In total, m * n executions.
    The return value is a list of the following tuples:

    - An element from `backend_preferences`
    - An element from `quantum_programs`
    - The execution result of the quantum program on the backend. If the execution failed,
      the value is an exception.

    The length of the list is m * n.

    The `preferences_template` argument is used to supplement all other preferences.

    The code is equivalent to:
    ```
    for backend in backend_preferences:
        for program in quantum_programs:
            preferences = preferences_template.copy()
            preferences.backend_preferences = backend
            Executor(preferences).execute(program)
    ```
    """
    executors = [
        Executor(
            preferences=preferences_template.copy(
                update={"backend_preferences": backend}
            )
        )
        for backend in backend_preferences
    ]
    results = await asyncio.gather(
        *(
            executor.batch_execute_quantum_program_async(quantum_programs)
            for executor in executors
        ),
        return_exceptions=True,
    )

    def map_return_value(
        executor: Executor,
        result: Union[List[ProgramAndResult], BaseException],
    ) -> Iterable[BackendPreferencesProgramAndResult]:
        nonlocal quantum_programs
        preferences = executor.preferences.backend_preferences
        if isinstance(result, BaseException):
            return ((preferences, program, result) for program in quantum_programs)
        else:
            return (
                (preferences, program, single_result)
                for program, single_result in result
            )

    return list(
        itertools.chain.from_iterable(
            map_return_value(executor, result)
            for executor, result in zip(executors, results)
        )
    )

interface special

analyzer special

analysis_params

AnalysisComparisonParams (BaseModel) pydantic-model
Source code in classiq/interface/analyzer/analysis_params.py
class AnalysisComparisonParams(pydantic.BaseModel):
    property: ComparisonProperties = pydantic.Field(
        default=...,
        description="The comparison property used to select the best devices",
    )
property: ComparisonProperties pydantic-field required

The comparison property used to select the best devices

AnalysisOptionalDevicesParams (HardwareListParams) pydantic-model
Source code in classiq/interface/analyzer/analysis_params.py
class AnalysisOptionalDevicesParams(HardwareListParams):
    qubit_count: int = pydantic.Field(
        default=..., description="number of qubits in the data"
    )
qubit_count: int pydantic-field required

number of qubits in the data

ComparisonProperties (str, Enum)

An enumeration.

Source code in classiq/interface/analyzer/analysis_params.py
class ComparisonProperties(str, Enum):
    DEPTH = "depth"
    MULTI_QUBIT_GATE_COUNT = "multi_qubit_gate_count"
    TOTAL_GATE_COUNT = "total_gate_count"
HardwareListParams (BaseModel) pydantic-model
Source code in classiq/interface/analyzer/analysis_params.py
class HardwareListParams(pydantic.BaseModel):
    devices: Optional[List[PydanticNonEmptyString]] = pydantic.Field(
        default=None, description="Devices"
    )
    providers: List[AnalyzerProviderVendor]

    @pydantic.validator("providers", always=True)
    def set_default_providers(cls, providers: Optional[List[AnalyzerProviderVendor]]):
        if providers is None:
            providers = list(AnalyzerProviderVendor)
        return providers
devices: List[classiq.interface.helpers.custom_pydantic_types.ConstrainedStrValue] pydantic-field

Devices

HardwareParams (BaseModel) pydantic-model
Source code in classiq/interface/analyzer/analysis_params.py
class HardwareParams(pydantic.BaseModel):
    device: PydanticNonEmptyString = pydantic.Field(default=None, description="Devices")
    provider: AnalyzerProviderVendor
device: ConstrainedStrValue pydantic-field

Devices

cytoscape_graph

CytoScapeEdge (BaseModel) pydantic-model
Source code in classiq/interface/analyzer/cytoscape_graph.py
class CytoScapeEdge(pydantic.BaseModel):
    data: CytoScapeEdgeData = pydantic.Field(
        default=..., description="Edge's Data, mainly the source and target of the Edge"
    )
data: CytoScapeEdgeData pydantic-field required

Edge's Data, mainly the source and target of the Edge

CytoScapeEdgeData (BaseModel) pydantic-model
Source code in classiq/interface/analyzer/cytoscape_graph.py
class CytoScapeEdgeData(pydantic.BaseModel):
    source: str = pydantic.Field(
        default=..., description="the Id of the Node that is the Source of the edge"
    )
    target: str = pydantic.Field(
        default=..., description="the Id of the Node that is the Target the edge"
    )
source: str pydantic-field required

the Id of the Node that is the Source of the edge

target: str pydantic-field required

the Id of the Node that is the Target the edge

CytoScapeGraph (BaseModel) pydantic-model
Source code in classiq/interface/analyzer/cytoscape_graph.py
class CytoScapeGraph(pydantic.BaseModel):
    nodes: List[CytoScapeNode] = pydantic.Field(
        default_factory=list,
        description="Nodes of the Graph",
    )
    edges: List[CytoScapeEdge] = pydantic.Field(
        default_factory=list,
        description="Edges of the Graph",
    )
edges: List[classiq.interface.analyzer.cytoscape_graph.CytoScapeEdge] pydantic-field

Edges of the Graph

nodes: List[classiq.interface.analyzer.cytoscape_graph.CytoScapeNode] pydantic-field

Nodes of the Graph

CytoScapeNode (BaseModel) pydantic-model
Source code in classiq/interface/analyzer/cytoscape_graph.py
class CytoScapeNode(pydantic.BaseModel):
    data: Dict[str, Any] = pydantic.Field(
        default=...,
        description="Data of the Node, such as label, and color, can be of free form",
    )
    position: Optional[CytoScapePosition] = pydantic.Field(
        default=..., description="Position of the Node to be rendered in Cytocape"
    )
data: Dict[str, Any] pydantic-field required

Data of the Node, such as label, and color, can be of free form

position: CytoScapePosition pydantic-field required

Position of the Node to be rendered in Cytocape

CytoScapePosition (BaseModel) pydantic-model
Source code in classiq/interface/analyzer/cytoscape_graph.py
class CytoScapePosition(pydantic.BaseModel):
    x: int = pydantic.Field(
        default=..., description="X coordinate in the Cytoscape View"
    )
    y: int = pydantic.Field(
        default=..., description="Y coordinate in the Cytoscape View"
    )
x: int pydantic-field required

X coordinate in the Cytoscape View

y: int pydantic-field required

Y coordinate in the Cytoscape View

HardwareConnectivityGraphResult (VersionedModel) pydantic-model
Source code in classiq/interface/analyzer/cytoscape_graph.py
class HardwareConnectivityGraphResult(VersionedModel):
    graph: Optional[CytoScapeGraph] = pydantic.Field(
        default=...,
        description="The Cytoscape graph in the desired Structure for the FE",
    )
    error: str = pydantic.Field(
        default="",
        description="Any errors encountered while generating the graph",
    )
error: str pydantic-field

Any errors encountered while generating the graph

graph: CytoScapeGraph pydantic-field required

The Cytoscape graph in the desired Structure for the FE

result

Analysis (VersionedModel) pydantic-model
Source code in classiq/interface/analyzer/result.py
class Analysis(VersionedModel):
    input_properties: QuantumCircuitProperties = pydantic.Field(
        default=..., description="Input circuit properties"
    )
    native_properties: NativeQuantumCircuitProperties = pydantic.Field(
        default=..., description="Transpiled circuit properties"
    )
    pattern_analysis: Optional[PatternAnalysis] = pydantic.Field(
        default=None,
        description="Pattern analysis, including pattern matching and pattern recognition",
    )
input_properties: QuantumCircuitProperties pydantic-field required

Input circuit properties

native_properties: NativeQuantumCircuitProperties pydantic-field required

Transpiled circuit properties

pattern_analysis: PatternAnalysis pydantic-field

Pattern analysis, including pattern matching and pattern recognition

AnalysisStatus (str, Enum)

An enumeration.

Source code in classiq/interface/analyzer/result.py
class AnalysisStatus(str, Enum):
    NONE = "none"
    SUCCESS = "success"
    CANCELLED = "cancelled"
    ERROR = "error"
AvailableHardware (BaseModel) pydantic-model
Source code in classiq/interface/analyzer/result.py
class AvailableHardware(pydantic.BaseModel):
    ibm_quantum: Optional[Dict[PydanticNonEmptyString, bool]] = pydantic.Field(
        default=None,
        description="available IBM Quantum devices with boolean indicates if a given device has enough qubits.",
    )
    azure_quantum: Optional[Dict[PydanticNonEmptyString, bool]] = pydantic.Field(
        default=None,
        description="available Azure Quantum devices with boolean indicates if a given device has enough qubits.",
    )
    amazon_braket: Optional[Dict[PydanticNonEmptyString, bool]] = pydantic.Field(
        default=None,
        description="available AWS Braket devices with boolean indicates if a given device has enough qubits.",
    )
amazon_braket: Dict[classiq.interface.helpers.custom_pydantic_types.ConstrainedStrValue, bool] pydantic-field

available AWS Braket devices with boolean indicates if a given device has enough qubits.

azure_quantum: Dict[classiq.interface.helpers.custom_pydantic_types.ConstrainedStrValue, bool] pydantic-field

available Azure Quantum devices with boolean indicates if a given device has enough qubits.

ibm_quantum: Dict[classiq.interface.helpers.custom_pydantic_types.ConstrainedStrValue, bool] pydantic-field

available IBM Quantum devices with boolean indicates if a given device has enough qubits.

BasisGates (str, Enum)

An enumeration.

Source code in classiq/interface/analyzer/result.py
class BasisGates(str, Enum):
    CX = "cx"
    CY = "cy"
    CZ = "cz"
    U = "u"
    U2 = "u2"
    P = "p"
EntanglementAnalysisStatus (str, Enum)

An enumeration.

Source code in classiq/interface/analyzer/result.py
class EntanglementAnalysisStatus(str, Enum):
    SUCCESS = "success"
    TIMEOUT = "timeout"
    ERROR = "error"
GraphStatus (str, Enum)

An enumeration.

Source code in classiq/interface/analyzer/result.py
class GraphStatus(str, Enum):
    SUCCESS = "success"
    ERROR = "error"
HardwareComparisonInformation (BaseModel) pydantic-model
Source code in classiq/interface/analyzer/result.py
class HardwareComparisonInformation(pydantic.BaseModel):
    devices: List[PydanticNonEmptyString] = pydantic.Field(
        default=..., description="Device which is used for the transpilation."
    )
    providers: List[PydanticNonEmptyString] = pydantic.Field(
        default=..., description="Provider cloud of the device."
    )
    depth: List[pydantic.NonNegativeInt] = pydantic.Field(
        default=..., description="Circuit depth."
    )
    multi_qubit_gate_count: List[pydantic.NonNegativeInt] = pydantic.Field(
        default=..., description="Number of multi qubit gates."
    )
    total_gate_count: List[pydantic.NonNegativeInt] = pydantic.Field(
        default=..., description="Number of total gates."
    )
depth: List[pydantic.types.NonNegativeInt] pydantic-field required

Circuit depth.

devices: List[classiq.interface.helpers.custom_pydantic_types.ConstrainedStrValue] pydantic-field required

Device which is used for the transpilation.

multi_qubit_gate_count: List[pydantic.types.NonNegativeInt] pydantic-field required

Number of multi qubit gates.

providers: List[classiq.interface.helpers.custom_pydantic_types.ConstrainedStrValue] pydantic-field required

Provider cloud of the device.

total_gate_count: List[pydantic.types.NonNegativeInt] pydantic-field required

Number of total gates.

NativeQuantumCircuitProperties (QuantumCircuitProperties) pydantic-model
Source code in classiq/interface/analyzer/result.py
class NativeQuantumCircuitProperties(QuantumCircuitProperties):
    native_gates: Set[BasisGates] = pydantic.Field(
        default=..., description="Native gates used for decomposition"
    )
native_gates: Set[classiq.interface.analyzer.result.BasisGates] pydantic-field required

Native gates used for decomposition

PatternAnalysis (BaseModel) pydantic-model
Source code in classiq/interface/analyzer/result.py
class PatternAnalysis(pydantic.BaseModel):
    pattern_matching: Optional[PatternMatchingResult] = pydantic.Field(
        default=..., description="Pattern matching algorithm"
    )
    pattern_recognition: Optional[PatternRecognitionResult] = pydantic.Field(
        default=..., description="Find unknown patterns"
    )
    circuit: Circuit = pydantic.Field(
        default=..., description="Quantum circuit after pattern analysis"
    )
circuit: Circuit pydantic-field required

Quantum circuit after pattern analysis

pattern_matching: PatternMatchingResult pydantic-field required

Pattern matching algorithm

pattern_recognition: PatternRecognitionResult pydantic-field required

Find unknown patterns

QuantumCircuitProperties (BaseModel) pydantic-model
Source code in classiq/interface/analyzer/result.py
class QuantumCircuitProperties(pydantic.BaseModel):
    depth: pydantic.NonNegativeInt = pydantic.Field(
        default=..., description="Circuit depth"
    )
    auxiliary_qubits: pydantic.NonNegativeInt = pydantic.Field(
        default=..., description="Number of Auxiliary qubits"
    )
    classical_bits: pydantic.NonNegativeInt = pydantic.Field(
        default=..., description="Number of classical bits"
    )
    gates_count: pydantic.NonNegativeInt = pydantic.Field(
        default=..., description="Total number of gates in the circuit"
    )
    multi_qubit_gates_count: pydantic.NonNegativeInt = pydantic.Field(
        default=..., description="Number of multi-qubit gates in circuit"
    )
    non_entangled_subcircuits_count: pydantic.NonNegativeInt = pydantic.Field(
        default=..., description="Number of non-entangled sub-circuit "
    )
    entanglement_upper_bound: EntanglementAnalysisResult = pydantic.Field(
        default=...,
        description="An upper bound to the entanglement (measured by the Schmidt rank width) of states that can"
        "generated by the circuit. None is returned if the entanglement analysis took too long to complete",
    )
auxiliary_qubits: NonNegativeInt pydantic-field required

Number of Auxiliary qubits

classical_bits: NonNegativeInt pydantic-field required

Number of classical bits

depth: NonNegativeInt pydantic-field required

Circuit depth

entanglement_upper_bound: EntanglementAnalysisResult pydantic-field required

An upper bound to the entanglement (measured by the Schmidt rank width) of states that cangenerated by the circuit. None is returned if the entanglement analysis took too long to complete

gates_count: NonNegativeInt pydantic-field required

Total number of gates in the circuit

multi_qubit_gates_count: NonNegativeInt pydantic-field required

Number of multi-qubit gates in circuit

non_entangled_subcircuits_count: NonNegativeInt pydantic-field required

Number of non-entangled sub-circuit

applications special

qsvm

QSVMFeatureMapEntanglement (str, Enum)

An enumeration.

Source code in classiq/interface/applications/qsvm.py
class QSVMFeatureMapEntanglement(str, Enum):
    FULL = "full"
    LINEAR = "linear"
    CIRCULAR = "circular"
    SCA = "sca"
    PAIRWISE = "pairwise"

backend special

backend_preferences

AwsBackendPreferences (BackendPreferences) pydantic-model
Source code in classiq/interface/backend/backend_preferences.py
class AwsBackendPreferences(BackendPreferences):
    backend_service_provider: ProviderTypeVendor.AWS_BRAKET = (
        ProviderVendor.AMAZON_BRAKET
    )
    # Allow running any backend supported by the vendor
    backend_name: Union[AWSBackendNames, str]
    aws_role_arn: pydantic_backend.PydanticAwsRoleArn = pydantic.Field(
        description="ARN of the role to be assumed for execution on your Braket account."
    )
    s3_bucket_name: pydantic_backend.PydanticS3BucketName = pydantic.Field(
        description="S3 Bucket Name"
    )
    s3_bucket_key: pydantic_backend.PydanticS3BucketKey = pydantic.Field(
        description="S3 Bucket Key"
    )
    job_timeout: pydantic_backend.PydanticExecutionTimeout = pydantic.Field(
        description="Timeout for Jobs sent for execution in seconds.",
        default=AWS_DEFAULT_JOB_TIMEOUT_SECONDS,
    )
aws_role_arn: ConstrainedStrValue pydantic-field required

ARN of the role to be assumed for execution on your Braket account.

job_timeout: ConstrainedIntValue pydantic-field

Timeout for Jobs sent for execution in seconds.

s3_bucket_key: ConstrainedStrValue pydantic-field required

S3 Bucket Key

s3_bucket_name: ConstrainedStrValue pydantic-field required

S3 Bucket Name

AzureBackendPreferences (BackendPreferences) pydantic-model
Source code in classiq/interface/backend/backend_preferences.py
class AzureBackendPreferences(BackendPreferences):
    backend_service_provider: ProviderTypeVendor.AZURE_QUANTUM = (
        ProviderVendor.AZURE_QUANTUM
    )
    # Allow running any backend supported by the vendor
    backend_name: Union[AzureQuantumBackendNames, str]

    location: str = pydantic.Field(
        default="East US", description="Azure personal resource region"
    )

    run_through_classiq: bool = pydantic.Field(
        default=True,
        description="When set to True backend will run through classiq resources",
    )

    credentials: Optional[AzureCredential] = pydantic.Field(
        default=None,
        description="The service principal credential to access personal quantum workspace",
    )

    @validator("credentials", pre=True, always=True)
    def set_default_credentials(
        cls, credentials: Optional[AzureCredential], values: Dict[str, Any]
    ):
        run_through_classiq: bool = values["run_through_classiq"]
        if run_through_classiq:
            if credentials is not None:
                raise ValueError(
                    "No need to provide credentials when running through classiq"
                )
        else:
            if credentials is None:
                credentials = AzureCredential()
        return credentials
credentials: AzureCredential pydantic-field

The service principal credential to access personal quantum workspace

location: str pydantic-field

Azure personal resource region

run_through_classiq: bool pydantic-field

When set to True backend will run through classiq resources

AzureCredential (BaseSettings) pydantic-model
Source code in classiq/interface/backend/backend_preferences.py
class AzureCredential(pydantic.BaseSettings):
    tenant_id: str = pydantic.Field(description="Azure Tenant ID")
    client_id: str = pydantic.Field(description="Azure Client ID")
    client_secret: str = pydantic.Field(description="Azure Client Secret")
    resource_id: pydantic_backend.PydanticAzureResourceIDType = pydantic.Field(
        description="Azure Resource ID (including Azure subscription ID, resource "
        "group and workspace), for personal resource",
    )

    class Config:
        title = "Azure Service Principal Credential"
        env_prefix = "AZURE_"
        case_sensitive = False
client_id: str pydantic-field required

Azure Client ID

client_secret: str pydantic-field required

Azure Client Secret

resource_id: ConstrainedStrValue pydantic-field required

Azure Resource ID (including Azure subscription ID, resource group and workspace), for personal resource

tenant_id: str pydantic-field required

Azure Tenant ID

BackendPreferences (BaseModel) pydantic-model
Source code in classiq/interface/backend/backend_preferences.py
class BackendPreferences(BaseModel):
    # Due to the way the field is currently implemented, i.e. it redefined with different types
    # in the subclass, it shouldn't be dumped with exclude_*. This causes this field not to appear.
    # For example: don't use obj.dict(exclude_unset=True).
    backend_service_provider: str = pydantic.Field(
        ..., description="Provider company or cloud for the requested backend."
    )
    backend_name: str = pydantic.Field(
        ..., description="Name of the requested backend or target."
    )
    qctrl_preferences: QctrlOptimizationPreferences = pydantic.Field(
        default=QctrlOptimizationPreferences(use_qctrl=False),
        description="QCtrl preferences.",
    )

    @classmethod
    def batch_preferences(
        cls, *, backend_names: Iterable[str], **kwargs
    ) -> List[BackendPreferences]:
        return [cls(backend_name=name, **kwargs) for name in backend_names]
backend_name: str pydantic-field required

Name of the requested backend or target.

backend_service_provider: str pydantic-field required

Provider company or cloud for the requested backend.

qctrl_preferences: QctrlOptimizationPreferences pydantic-field

QCtrl preferences.

IBMBackendPreferences (BackendPreferences) pydantic-model
Source code in classiq/interface/backend/backend_preferences.py
class IBMBackendPreferences(BackendPreferences):
    backend_service_provider: ProviderTypeVendor.IBM_QUANTUM = (
        ProviderVendor.IBM_QUANTUM
    )
    backend_name: Union[IBMQBackendNames, str]
    access_token: Optional[str] = pydantic.Field(
        default=None,
        description="IBM Quantum access token to be used"
        " with IBM Quantum hosted backends",
    )
    provider: IBMBackendProvider = pydantic.Field(
        default_factory=IBMBackendProvider,
        description="Provider specs. for identifying a single IBM Quantum provider.",
    )
    use_ibm_runtime: bool = pydantic.Field(
        default=False,
        description="Whether to execute using IBM runtime. Ignored if not applicable.",
    )
access_token: str pydantic-field

IBM Quantum access token to be used with IBM Quantum hosted backends

provider: IBMBackendProvider pydantic-field

Provider specs. for identifying a single IBM Quantum provider.

use_ibm_runtime: bool pydantic-field

Whether to execute using IBM runtime. Ignored if not applicable.

IonqBackendPreferences (BackendPreferences) pydantic-model
Source code in classiq/interface/backend/backend_preferences.py
class IonqBackendPreferences(BackendPreferences):
    backend_service_provider: ProviderTypeVendor.IONQ = ProviderVendor.IONQ
    backend_name: IonqBackendNames = pydantic.Field(
        default=IonqBackendNames.SIMULATOR,
        description="IonQ backend for quantum programs execution.",
    )
    api_key: pydantic_backend.PydanticIonQApiKeyType = pydantic.Field(
        ..., description="IonQ API key"
    )
api_key: ConstrainedStrValue pydantic-field required

IonQ API key

QctrlOptimizationPreferences (BaseModel) pydantic-model
Source code in classiq/interface/backend/backend_preferences.py
class QctrlOptimizationPreferences(BaseModel):
    use_qctrl: bool = pydantic.Field(
        False,
        description="Execute using QCtrl FireOpal (requires Qctrl authentication).",
    )
    qctrl_access_token: Union[dict, None] = pydantic.Field(
        None, description="Qctrl access token for FireOpal backend."
    )
qctrl_access_token: dict pydantic-field

Qctrl access token for FireOpal backend.

use_qctrl: bool pydantic-field

Execute using QCtrl FireOpal (requires Qctrl authentication).

quantum_backend_providers

AWSBackendNames (str, Enum)

An enumeration.

Source code in classiq/interface/backend/quantum_backend_providers.py
class AWSBackendNames(str, Enum):
    AMAZON_BRAKET_SV1 = "SV1"
    AMAZON_BRAKET_TN1 = "TN1"
    AMAZON_BRAKET_DM1 = "dm1"
    AMAZON_BRAKET_ASPEN_11 = "Aspen-11"
    AMAZON_BRAKET_M_1 = "Aspen-M-1"
    AMAZON_BRAKET_IONQ = "IonQ Device"
    AMAZON_BRAKET_LUCY = "Lucy"
AnalyzerProviderVendor (str, Enum)

An enumeration.

Source code in classiq/interface/backend/quantum_backend_providers.py
class AnalyzerProviderVendor(str, Enum):
    IBM_QUANTUM = "IBM Quantum"
    AZURE_QUANTUM = "Azure Quantum"
    AMAZON_BRAKET = "Amazon Braket"
AzureQuantumBackendNames (str, Enum)

An enumeration.

Source code in classiq/interface/backend/quantum_backend_providers.py
class AzureQuantumBackendNames(str, Enum):
    IONQ_ARIA = "ionq.qpu.aria-1"
    IONQ_QPU = "ionq.qpu"
    IONQ_SIMULATOR = "ionq.simulator"
    MICROSOFT_ESTIMATOR = "microsoft.estimator"
    MICROSOFT_FULLSTATE_SIMULATOR = "microsoft.simulator.fullstate"
    RIGETTI_ASPEN1 = "rigetti.qpu.aspen-11"
    RIGETTI_ASPEN2 = "rigetti.qpu.aspen-m-2"
    RIGETTI_SIMULATOR = "rigetti.sim.qvm"
    QCI_MACHINE1 = "qci.machine1"
    QCI_NOISY_SIMULATOR = "qci.simulator.noisy"
    QCI_SIMULATOR = "qci.simulator"
    QUANTINUUM_API_VALIDATOR1 = "quantinuum.sim.h1-1sc"
    QUANTINUUM_API_VALIDATOR1_OLD = "quantinuum.hqs-lt-s1-apival"
    QUANTINUUM_API_VALIDATOR2 = "quantinuum.sim.h1-2sc"
    QUANTINUUM_API_VALIDATOR2_OLD = "quantinuum.hqs-lt-s2-apival"
    QUANTINUUM_QPU1 = "quantinuum.qpu.h1-1"
    QUANTINUUM_QPU1_OLD = "quantinuum.hqs-lt-s1"
    QUANTINUUM_QPU2 = "quantinuum.qpu.h1-2"
    QUANTINUUM_QPU2_OLD = "quantinuum.hqs-lt-s2"
    QUANTINUUM_SIMULATOR1 = "quantinuum.sim.h1-1e"
    QUANTINUUM_SIMULATOR1_OLD = "quantinuum.hqs-lt-s1-sim"
    QUANTINUUM_SIMULATOR2 = "quantinuum.sim.h1-2e"
    QUANTINUUM_SIMULATOR2_OLD = "quantinuum.hqs-lt-s2-sim"
IBMQBackendNames (str, Enum)

An enumeration.

Source code in classiq/interface/backend/quantum_backend_providers.py
class IBMQBackendNames(str, Enum):
    IBMQ_AER_SIMULATOR = "aer_simulator"
    IBMQ_AER_SIMULATOR_STATEVECTOR = "aer_simulator_statevector"
    IBMQ_AER_SIMULATOR_DENSITY_MATRIX = "aer_simulator_density_matrix"
    IBMQ_AER_SIMULATOR_MATRIX_PRODUCT_STATE = "aer_simulator_matrix_product_state"
IonqBackendNames (str, Enum)

An enumeration.

Source code in classiq/interface/backend/quantum_backend_providers.py
class IonqBackendNames(str, Enum):
    SIMULATOR = "simulator"
    HARMONY = "qpu.harmony"
    ARIA = "qpu.aria-1"
    S11 = "qpu.s11"
NvidiaBackendNames (str, Enum)

An enumeration.

Source code in classiq/interface/backend/quantum_backend_providers.py
class NvidiaBackendNames(str, Enum):
    STATEVECTOR = "statevector"
ProviderVendor (str, Enum)

An enumeration.

Source code in classiq/interface/backend/quantum_backend_providers.py
class ProviderVendor(str, Enum):
    IBM_QUANTUM = "IBM Quantum"
    AZURE_QUANTUM = "Azure Quantum"
    AMAZON_BRAKET = "Amazon Braket"
    IONQ = "IonQ"
    NVIDIA = "Nvidia"

chemistry special

fermionic_operator

FermionicOperator (HashablePydanticBaseModel) pydantic-model

Specification of a Fermionic operator. Input: List of ladder operators, each ladder operator is described by a tuple of its index and a character indicating if it's a creation ('+') or annihilation operator ('-').

Source code in classiq/interface/chemistry/fermionic_operator.py
class FermionicOperator(HashablePydanticBaseModel):
    """
    Specification of a Fermionic operator.
    Input:
    List of ladder operators, each ladder operator is described by a tuple of its
    index and a character indicating if it's a creation ('+') or annihilation operator ('-').
    """

    op_list: list = pydantic.Field(
        description="A list of tuples each containing an index and a character; for example [('+', 0), ('-', 1)].",
    )

    @staticmethod
    def _validate_single_op(op: tuple) -> LadderOperator:
        if not isinstance(op, tuple):
            try:  # type: ignore[unreachable] # it is reachable...
                op = tuple(op)
            except Exception as exc:
                raise ValueError("Ladder operator should be a tuple.") from exc
        if len(op) != 2:
            raise ValueError(
                "Ladder operator tuple should be of length two; for example (1, '+')."
            )

        if op[0] not in ("+", "-"):
            raise ValueError(
                "The first term in a ladder operator tuple indicates if its a raising ('+')"
                " or lowering ('-') operator. Allowed input is: '+' or '-'."
            )
        if not isinstance(op[1], int):
            raise ValueError(
                "The second term in a ladder operator tuple indicates its index and should be of type int"
            )

        return op  # type: ignore[return-value] # mypy thinks that it is `Tuple[Any, ...]`, though the asserts here tell otherwise..

    @pydantic.validator("op_list")
    def _validate_op_list(cls, op_list: list) -> list:
        return list(map(cls._validate_single_op, op_list))

    def __mul__(self, coeff: Union[float, int]) -> SummedFermionicOperator:
        if isinstance(coeff, (float, int)):
            return SummedFermionicOperator(op_list=[(self, float(coeff))])
        raise ValueError(
            "The coefficient multiplying Fermionic Operator should be of type float"
        )

    __rmul__ = __mul__

    def __add__(
        self, other: Union[SummedFermionicOperator, FermionicOperator]
    ) -> SummedFermionicOperator:
        if isinstance(other, SummedFermionicOperator):
            return SummedFermionicOperator(op_list=[(self, 1.0)] + other.op_list)
        elif isinstance(other, FermionicOperator):
            return SummedFermionicOperator(op_list=[(self, 1.0)] + [(other, 1.0)])
        raise ValueError(
            "FermionicOperator can be summed together only with type FermionicOperator or SummedFermionicOperator"
        )

    class Config:
        frozen = True

    @staticmethod
    def _to_ladder_op(char: str) -> str:
        return "a" + _SUPERSCRIPT_PLUS if char == "+" else "a"

    @staticmethod
    def _to_subscript(num: int) -> str:
        return "".join(_SUBSCRIPT_UNICODE_CHARS[digit] for digit in str(num))

    def __str__(self) -> str:
        return "".join(
            f"{self._to_ladder_op(char)}{self._to_subscript(index)}"
            for (char, index) in self.op_list
        )

    @property
    def all_indices(self) -> Set[int]:
        return {op[1] for op in self.op_list}
op_list: list pydantic-field required

A list of tuples each containing an index and a character; for example [('+', 0), ('-', 1)].

__str__(self) special

Return str(self).

Source code in classiq/interface/chemistry/fermionic_operator.py
def __str__(self) -> str:
    return "".join(
        f"{self._to_ladder_op(char)}{self._to_subscript(index)}"
        for (char, index) in self.op_list
    )
SummedFermionicOperator (HashablePydanticBaseModel) pydantic-model

Specification of a summed Fermionic operator. Input: List of fermionic operators tuples, The first term in the tuple is the FermionicOperator and the second term is its coefficient. For example: op1 = FermionicOperator(op_list=[('+', 0), ('-', 1)]) op2 = FermionicOperator(op_list=[('-', 0), ('-', 1)]) summed_operator = SummedFermionicOperator(op_list=[(op1, 0.2), (op2, 6.7)])

Source code in classiq/interface/chemistry/fermionic_operator.py
class SummedFermionicOperator(HashablePydanticBaseModel):
    """
    Specification of a summed Fermionic operator.
    Input:
    List of fermionic operators tuples, The first term in the tuple is the FermionicOperator and the second term is its coefficient.
    For example:
    op1 = FermionicOperator(op_list=[('+', 0), ('-', 1)])
    op2 = FermionicOperator(op_list=[('-', 0), ('-', 1)])
    summed_operator = SummedFermionicOperator(op_list=[(op1, 0.2), (op2, 6.7)])
    """

    op_list: list = pydantic.Field(
        description="A list of tuples each containing a FermionicOperator and a coefficient.",
    )

    class Config:
        frozen = True

    @staticmethod
    def _validate_single_op(op: tuple) -> FermionicOperatorTuple:
        # is it tuple - if not, convert to tuple
        if not isinstance(op, tuple):
            try:  # type: ignore[unreachable] # it is reachable...
                op = tuple(op)
            except Exception as exc:
                raise ValueError("Operator should be a tuple.") from exc
        if len(op) != 2:
            raise ValueError("Operator tuple should be of length two.")

        # is it FermionicOperator - if not, convert to FermionicOperator
        if type(op[0]) != FermionicOperator:
            try:
                op = (FermionicOperator(**op[0]), op[1])
            except Exception as exc:
                raise ValueError(
                    "The first term in the operator tuple should be an instance of the FermionicOperator class"
                ) from exc

        if type(op[1]) != float:
            raise ValueError(
                "The second term in the operator tuple indicates its coefficient and should be of type float"
            )

        return op  # type: ignore[return-value] # mypy thinks that it is `Tuple[Any, ...]`, though the asserts here tell otherwise..

    @pydantic.validator("op_list")
    def _validate_op_list(cls, op_list: list) -> list:
        return list(map(cls._validate_single_op, op_list))

    def __add__(
        self, other: Union[SummedFermionicOperator, FermionicOperator]
    ) -> SummedFermionicOperator:
        if isinstance(other, SummedFermionicOperator):
            return SummedFermionicOperator(op_list=self.op_list + other.op_list)
        elif isinstance(other, FermionicOperator):
            return SummedFermionicOperator(op_list=self.op_list + [(other, 1.0)])
        raise ValueError(
            "FermionicOperator can be summed together only with type FermionicOperator or SummedFermionicOperator"
        )

    def is_close(self, other: SummedFermionicOperator) -> bool:
        if not isinstance(other, SummedFermionicOperator):
            return False  # type: ignore[unreachable]

        if len(self.op_list) != len(other.op_list):
            return False

        for (op1, coeff1), (op2, coeff2) in zip(self.op_list, other.op_list):
            if op1 != op2 or not np.isclose(coeff1, coeff2):
                return False

        return True

    @property
    def _all_indices(self) -> Set[int]:
        return set(
            itertools.chain.from_iterable(op.all_indices for op, _ in self.op_list)
        )

    @property
    def num_qubits(self) -> int:
        return len(self._all_indices)

    def __str__(self) -> str:
        return " + \n".join(str(op[1]) + " * " + str(op[0]) for op in self.op_list)
op_list: list pydantic-field required

A list of tuples each containing a FermionicOperator and a coefficient.

__str__(self) special

Return str(self).

Source code in classiq/interface/chemistry/fermionic_operator.py
def __str__(self) -> str:
    return " + \n".join(str(op[1]) + " * " + str(op[0]) for op in self.op_list)

ground_state_problem

FermionMapping (str, Enum)

An enumeration.

Source code in classiq/interface/chemistry/ground_state_problem.py
class FermionMapping(str, Enum):
    JORDAN_WIGNER = "jordan_wigner"
    PARITY = "parity"
    BRAVYI_KITAEV = "bravyi_kitaev"
    FAST_BRAVYI_KITAEV = "fast_bravyi_kitaev"
GroundStateProblem (HashablePydanticBaseModel) pydantic-model
Source code in classiq/interface/chemistry/ground_state_problem.py
class GroundStateProblem(HashablePydanticBaseModel):
    mapping: FermionMapping = pydantic.Field(
        default=FermionMapping.JORDAN_WIGNER, description="Fermionic mapping type"
    )
    z2_symmetries: bool = pydantic.Field(
        default=False,
        description="whether to perform z2 symmetries reduction",
    )
    num_qubits: Optional[int] = pydantic.Field(default=None)

    @pydantic.validator("z2_symmetries")
    def _validate_z2_symmetries(
        cls, z2_symmetries: bool, values: Dict[str, Any]
    ) -> bool:
        if z2_symmetries and values.get("mapping") == FermionMapping.FAST_BRAVYI_KITAEV:
            raise ValueError(
                "z2 symmetries reduction can not be used for fast_bravyi_kitaev mapping"
            )
        return z2_symmetries

    class Config:
        frozen = True
mapping: FermionMapping pydantic-field

Fermionic mapping type

z2_symmetries: bool pydantic-field

whether to perform z2 symmetries reduction

HamiltonianProblem (GroundStateProblem) pydantic-model
Source code in classiq/interface/chemistry/ground_state_problem.py
class HamiltonianProblem(GroundStateProblem):
    hamiltonian: SummedFermionicOperator = pydantic.Field(
        description="Hamiltonian as a fermionic operator"
    )
    num_particles: List[pydantic.PositiveInt] = pydantic.Field(
        description="Tuple containing the numbers of alpha particles and beta particles"
    )

    @pydantic.validator("num_particles")
    def _validate_num_particles(cls, num_particles: List[int]) -> List[int]:
        assert isinstance(num_particles, list)
        assert len(num_particles) == 2

        # This probably will never happen, since pydantic automatically converts
        #   floats to ints
        assert isinstance(num_particles[0], int)
        assert num_particles[0] >= 1

        assert isinstance(num_particles[1], int)
        assert num_particles[1] >= 1

        return num_particles
hamiltonian: SummedFermionicOperator pydantic-field required

Hamiltonian as a fermionic operator

num_particles: List[pydantic.types.PositiveInt] pydantic-field required

Tuple containing the numbers of alpha particles and beta particles

MoleculeProblem (GroundStateProblem) pydantic-model
Source code in classiq/interface/chemistry/ground_state_problem.py
class MoleculeProblem(GroundStateProblem):
    molecule: molecule.Molecule
    basis: str = pydantic.Field(default="sto3g", description="Molecular basis set")
    freeze_core: bool = pydantic.Field(default=False)
    remove_orbitals: List[int] = pydantic.Field(
        default_factory=list, description="list of orbitals to remove"
    )
basis: str pydantic-field

Molecular basis set

remove_orbitals: List[int] pydantic-field

list of orbitals to remove

ground_state_solver

GroundStateSolver (BaseModel) pydantic-model
Source code in classiq/interface/chemistry/ground_state_solver.py
class GroundStateSolver(BaseModel):
    ground_state_problem: CHEMISTRY_PROBLEMS_TYPE = pydantic.Field(
        description=f"{CHEMISTRY_PROBLEMS} object"
    )
    ansatz: AnsatzType = pydantic.Field(
        description="GeneratedCircuit object or a str of the ansatz circuit"
    )
    optimizer_preferences: Optional[GroundStateOptimizer] = pydantic.Field(
        description="GroundStateOptimizer object"
    )
    backend_preferences: Optional[BackendPreferencesTypes] = backend_preferences_field()
    hamiltonian: Optional[PauliOperator] = pydantic.Field(
        description="A direct input of the Hamiltonian as a PauliOperator object"
    )
ansatz: Union[str, classiq.interface.generator.result.GeneratedCircuit] pydantic-field

GeneratedCircuit object or a str of the ansatz circuit

backend_preferences: Union[classiq.interface.backend.backend_preferences.AzureBackendPreferences, classiq.interface.backend.backend_preferences.IBMBackendPreferences, classiq.interface.backend.backend_preferences.AwsBackendPreferences, classiq.interface.backend.backend_preferences.IonqBackendPreferences, classiq.interface.backend.backend_preferences.NvidiaBackendPreferences] pydantic-field

Preferences for the requested backend to run the quantum circuit.

ground_state_problem: Union[classiq.interface.chemistry.ground_state_problem.MoleculeProblem, classiq.interface.chemistry.ground_state_problem.HamiltonianProblem] pydantic-field required

(, ) object

hamiltonian: PauliOperator pydantic-field

A direct input of the Hamiltonian as a PauliOperator object

optimizer_preferences: GroundStateOptimizer pydantic-field

GroundStateOptimizer object

molecule

Molecule (HashablePydanticBaseModel) pydantic-model
Source code in classiq/interface/chemistry/molecule.py
class Molecule(HashablePydanticBaseModel):
    atoms: List[AtomType] = pydantic.Field(
        description="A list of atoms each containing the string of the atom's symbol and a list of its (x,y,z) location; for example [('H', (0.0, 0.0, 0.0)), ('H', (0.0, 0.0, 0.735))]."
    )
    spin: pydantic.NonNegativeInt = pydantic.Field(
        default=1, description="spin of the molecule"
    )
    charge: pydantic.NonNegativeInt = pydantic.Field(
        default=0, description="charge of the molecule"
    )

    @pydantic.validator("atoms", each_item=True)
    def _validate_atoms(cls, atom: AtomType) -> AtomType:
        if len(atom) != 2:
            raise ValueError(
                "each atom should be a list of two entries: 1) name pf the elemnt (str) 2) list of its (x,y,z) location"
            )
        if type(atom[0]) != str:
            raise ValueError(
                f"atom name should be a string. unknown element: {atom[0]}."
            )
        if atom[0] not in ELEMENTS:
            raise ValueError(f"unknown element: {atom[0]}.")
        if len(atom[1]) != 3:
            raise ValueError(
                f"location of the atom is of length three, representing the (x,y,z) coordinates of the atom, error value: {atom[1]}"
            )
        for idx in atom[1]:
            if type(idx) != float and type(idx) != int:
                raise ValueError(
                    f"coordinates of the atom should be of type float. error value: {idx}"
                )
        return atom

    class Config:
        frozen = True
atoms: List[types.ConstrainedListValue] pydantic-field required

A list of atoms each containing the string of the atom's symbol and a list of its (x,y,z) location; for example [('H', (0.0, 0.0, 0.0)), ('H', (0.0, 0.0, 0.735))].

charge: NonNegativeInt pydantic-field

charge of the molecule

spin: NonNegativeInt pydantic-field

spin of the molecule

operator

PauliOperator (HashablePydanticBaseModel, VersionedModel) pydantic-model

Specification of a Pauli sum operator.

Source code in classiq/interface/chemistry/operator.py
class PauliOperator(HashablePydanticBaseModel, VersionedModel):
    """
    Specification of a Pauli sum operator.
    """

    pauli_list: PydanticPauliList = pydantic.Field(
        description="A list of tuples each containing a pauli string comprised of I,X,Y,Z characters and a complex coefficient; for example [('IZ', 0.1), ('XY', 0.2)].",
    )
    is_hermitian: bool = pydantic.Field(default=False)

    def show(self) -> str:
        if self.is_hermitian:
            return "\n".join(
                f"{summand[1].real:+.3f} * {summand[0]}" for summand in self.pauli_list
            )
        return "\n".join(
            f"+({summand[1]:+.3f}) * {summand[0]}" for summand in self.pauli_list
        )

    @pydantic.validator("pauli_list", each_item=True)
    def _validate_pauli_monomials(
        cls, monomial: Tuple[PydanticPauliMonomialStr, complex]
    ) -> Tuple[PydanticPauliMonomialStr, complex]:
        _PauliMonomialLengthValidator(  # type: ignore[call-arg]
            monomial=monomial
        )  # Validate the length of the monomial.
        parsed_monomial = _PauliMonomialParser(string=monomial[0], coeff=monomial[1])  # type: ignore[call-arg]
        return (parsed_monomial.string, parsed_monomial.coeff)

    @pydantic.validator("pauli_list")
    def _validate_pauli_list(cls, pauli_list: PydanticPauliList) -> PydanticPauliList:
        if not all_equal(len(summand[0]) for summand in pauli_list):
            raise ValueError("Pauli strings have incompatible lengths.")
        return pauli_list

    @pydantic.root_validator
    def _validate_hermitianity(cls, values: Dict[str, Any]) -> Dict[str, Any]:
        pauli_list = values.get("pauli_list", [])
        values["is_hermitian"] = all(
            np.isclose(complex(summand[1]).real, summand[1]) for summand in pauli_list
        )
        if values.get("is_hermitian", False):
            values["pauli_list"] = [
                (summand[0], complex(summand[1].real)) for summand in pauli_list
            ]
        return values

    def __mul__(self, coefficient: complex) -> "PauliOperator":
        multiplied_ising = [
            (monomial[0], monomial[1] * coefficient) for monomial in self.pauli_list
        ]
        return self.__class__(pauli_list=multiplied_ising)

    @property
    def num_qubits(self) -> int:
        return len(self.pauli_list[0][0])

    def to_matrix(self) -> np.ndarray:
        return sum(
            summand[1] * to_pauli_matrix(summand[0]) for summand in self.pauli_list
        )  # type: ignore[return-value]

    @staticmethod
    def _extend_pauli_string(
        pauli_string: PydanticPauliMonomialStr, num_extra_qubits: int
    ) -> PydanticPauliMonomialStr:
        return "I" * num_extra_qubits + pauli_string

    def extend(self, num_extra_qubits: int) -> "PauliOperator":
        new_pauli_list = [
            (self._extend_pauli_string(pauli_string, num_extra_qubits), coeff)
            for (pauli_string, coeff) in self.pauli_list
        ]
        return self.copy(update={"pauli_list": new_pauli_list}, deep=True)

    class Config:
        frozen = True
pauli_list: ConstrainedListValue pydantic-field required

A list of tuples each containing a pauli string comprised of I,X,Y,Z characters and a complex coefficient; for example [('IZ', 0.1), ('XY', 0.2)].

combinatorial_optimization special

encoding_types

EncodingType (str, Enum)

An enumeration.

Source code in classiq/interface/combinatorial_optimization/encoding_types.py
class EncodingType(str, enum.Enum):
    BINARY = "BINARY"
    ONE_HOT = "ONE_HOT"

mht_qaoa_input

MhtQaoaInput (BaseModel) pydantic-model
Source code in classiq/interface/combinatorial_optimization/mht_qaoa_input.py
class MhtQaoaInput(BaseModel):
    reps: pydantic.PositiveInt = pydantic.Field(
        default=3, description="Number of QAOA layers."
    )
    plot_list: List[PlotData] = pydantic.Field(
        description="The list of (x,y,t) plots of the MHT problem."
    )
    misdetection_maximum_time_steps: pydantic.NonNegativeInt = pydantic.Field(
        default=0,
        description="The maximum number of time steps a target might be misdetected.",
    )
    penalty_energy: float = pydantic.Field(
        default=2,
        description="Penalty energy for invalid solutions. The value affects "
        "the converges rate. Small positive values are preferred",
    )
    three_local_coeff: float = pydantic.Field(
        default=0,
        description="Coefficient for the 3-local terms in the Hamiltonian. It is related to the angular acceleration.",
    )
    one_local_coeff: float = pydantic.Field(
        default=0, description="Coefficient for the 1-local terms in the Hamiltonian."
    )
    is_penalty: bool = pydantic.Field(
        default=True, description="Build Pubo using penalty terms"
    )
    max_velocity: float = pydantic.Field(
        default=0, description="Max allowed velocity for a segment"
    )

    def is_valid_cost(self, cost: float) -> bool:
        return True

    @pydantic.validator("plot_list")
    def round_plot_list_times_and_validate(cls, plot_list):
        MhtQaoaInput._check_all_ids_are_distinct(plot_list)
        MhtQaoaInput._round_to_tolerance_decimals(plot_list)

        time_stamps = sorted({plot.t for plot in plot_list})
        time_diff_set = {
            np.round(time_stamps[i] - time_stamps[i - 1], decimals=_TOLERANCE_DECIMALS)
            for i in range(1, len(time_stamps))
        }

        if len(time_diff_set) != 1:
            raise ValueError("The time difference between each time stamp is not equal")

        return plot_list

    @staticmethod
    def _round_to_tolerance_decimals(plot_list: List[PlotData]):
        for plot in plot_list:
            plot.t = np.round(plot.t, decimals=_TOLERANCE_DECIMALS)

    @staticmethod
    def _check_all_ids_are_distinct(plot_list: List[PlotData]):
        if not more_itertools.all_unique(plot.plot_id for plot in plot_list):
            raise ValueError("Plot IDs should be unique.")
is_penalty: bool pydantic-field

Build Pubo using penalty terms

max_velocity: float pydantic-field

Max allowed velocity for a segment

misdetection_maximum_time_steps: NonNegativeInt pydantic-field

The maximum number of time steps a target might be misdetected.

one_local_coeff: float pydantic-field

Coefficient for the 1-local terms in the Hamiltonian.

penalty_energy: float pydantic-field

Penalty energy for invalid solutions. The value affects the converges rate. Small positive values are preferred

plot_list: List[classiq.interface.combinatorial_optimization.mht_qaoa_input.PlotData] pydantic-field required

The list of (x,y,t) plots of the MHT problem.

reps: PositiveInt pydantic-field

Number of QAOA layers.

three_local_coeff: float pydantic-field

Coefficient for the 3-local terms in the Hamiltonian. It is related to the angular acceleration.

PlotData (BaseModel) pydantic-model
Source code in classiq/interface/combinatorial_optimization/mht_qaoa_input.py
class PlotData(BaseModel):
    # We are currently ignoring units. This might need to be handled in the future
    x: float = pydantic.Field(description="The X coordinate of this plot")
    y: float = pydantic.Field(description="The Y coordinate of this plot")
    t: float = pydantic.Field(description="The time stamp of this plot")
    plot_id: pydantic.NonNegativeInt = pydantic.Field(
        description="The plot ID of this plot"
    )
plot_id: NonNegativeInt pydantic-field required

The plot ID of this plot

t: float pydantic-field required

The time stamp of this plot

x: float pydantic-field required

The X coordinate of this plot

y: float pydantic-field required

The Y coordinate of this plot

model_io_comon

Functions for saving and loading Pyomo objects to json

StoreSpec

A StoreSpec object tells the serializer functions what to read or write. The default settings will produce a StoreSpec configured to load/save the typical attributes required to load/save a model state.

Parameters:

Name Type Description Default
classes

A list of classes to save. Each class is represented by a list (or tuple) containing the following elements: (1) class (compared using isinstance) (2) attribute list or None, an empty list store the object, but none of its attributes, None will not store objects of this class type (3) optional load filter function. The load filter function returns a list of attributes to read based on the state of an object and its saved state. The allows, for example, loading values for unfixed variables, or only loading values whose current value is less than one. The filter function only applies to load not save. Filter functions take two arguments (a) the object (current state) and (b) the dictionary containing the saved state of an object. More specific classes should come before more general classes. For example if an object is a HeatExchanger and a UnitModel, and HeatExchanger is listed first, it will follow the HeatExchanger settings. If UnitModel is listed first in the classes list, it will follow the UnitModel settings.

((<class 'pyomo.core.base.param.Param'>, ('_mutable',)), (<class 'pyomo.core.base.var.Var'>, ()), (<class 'pyomo.core.base.expression.Expression'>, ()), (<class 'pyomo.core.base.component.Component'>, ('active',)), (<class 'pyomo.core.base.objective.Objective'>, ('sense',)), (<class 'pyomo.core.base.indexed_component.IndexedComponent'>, ('index',)))
data_classes

This takes the same form as the classes argument. This is for component data classes.

((<class 'pyomo.core.base.var._VarData'>, ('fixed', 'stale', 'value', 'lb', 'ub', 'domain')), (<class 'pyomo.core.base.param._ParamData'>, ('value',)), (<class 'int'>, ('value',)), (<class 'float'>, ('value',)), (<class 'pyomo.core.base.expression._ExpressionData'>, ()), (<class 'pyomo.core.base.component.ComponentData'>, ('active',)), (<class 'pyomo.core.base.constraint._GeneralConstraintData'>, ()), (<class 'pyomo.core.expr.numvalue.NumericConstant'>, ('value',)), (<class 'pyomo.core.expr.logical_expr.InequalityExpression'>, ('strict',)), (<class 'pyomo.core.base.objective.ScalarObjective'>, ('sense',)), (<class 'pyomo.core.base.set.RangeSet'>, ('_init_data',)))
skip_classes

This is a list of classes to skip. If a class appears in the skip list, but also appears in the classes argument, the classes argument will override skip_classes. The use for this is to specifically exclude certain classes that would get caught by more general classes (e.g. UnitModel is in the class list, but you want to exclude HeatExchanger which is derived from UnitModel).

(<class 'pyomo.core.base.external.ExternalFunction'>, <class 'pyomo.network.port.Port'>, <class 'pyomo.core.base.expression.Expression'>, <class 'pyomo.core.base.set.RangeSet'>)
ignore_missing

If True will ignore a component or attribute that exists in the model, but not in the stored state. If false an exception will be raised for things in the model that should be loaded but aren't in the stored state. Extra items in the stored state will not raise an exception regardless of this argument.

True
suffix

If True store suffixes and component ids. If false, don't store suffixes.

True
suffix_filter

None to store all suffixes if suffix=True, or a list of suffixes to store if suffix=True

None
Source code in classiq/interface/combinatorial_optimization/model_io_comon.py
class StoreSpec:
    """
    A StoreSpec object tells the serializer functions what to read or write.
    The default settings will produce a StoreSpec configured to load/save the
    typical attributes required to load/save a model state.
    Args:
        classes: A list of classes to save.  Each class is represented by a
            list (or tuple) containing the following elements: (1) class
            (compared using isinstance) (2) attribute list or None, an empty
            list store the object, but none of its attributes, None will not
            store objects of this class type (3) optional load filter function.
            The load filter function returns a list of attributes to read based
            on the state of an object and its saved state. The allows, for
            example, loading values for unfixed variables, or only loading
            values whose current value is less than one. The filter function
            only applies to load not save. Filter functions take two arguments
            (a) the object (current state) and (b) the dictionary containing the
            saved state of an object.  More specific classes should come before
            more general classes.  For example if an object is a HeatExchanger
            and a UnitModel, and HeatExchanger is listed first, it will follow
            the HeatExchanger settings.  If UnitModel is listed first in the
            classes list, it will follow the UnitModel settings.
        data_classes: This takes the same form as the classes argument.
            This is for component data classes.
        skip_classes: This is a list of classes to skip.  If a class appears
            in the skip list, but also appears in the classes argument, the
            classes argument will override skip_classes. The use for this is to
            specifically exclude certain classes that would get caught by more
            general classes (e.g. UnitModel is in the class list, but you want
            to exclude HeatExchanger which is derived from UnitModel).
        ignore_missing: If True will ignore a component or attribute that exists
            in the model, but not in the stored state. If false an exception
            will be raised for things in the model that should be loaded but
            aren't in the stored state. Extra items in the stored state will not
            raise an exception regardless of this argument.
        suffix: If True store suffixes and component ids.  If false, don't store
            suffixes.
        suffix_filter: None to store all suffixes if suffix=True, or a list of
            suffixes to store if suffix=True
    """

    def __init__(
        self,
        classes=(
            (Param, ("_mutable",)),
            (Var, ()),
            (Expression, ()),
            (Component, ("active",)),
            (pyomo.core.base.objective.Objective, ("sense",)),
            (pyomo.core.base.indexed_component.IndexedComponent, ("index",)),
        ),
        data_classes=(
            (
                pyomo.core.base.var._VarData,
                ("fixed", "stale", "value", "lb", "ub", "domain"),
            ),
            (pyomo.core.base.param._ParamData, ("value",)),
            (int, ("value",)),
            (float, ("value",)),
            (pyomo.core.base.expression._ExpressionData, ()),
            (pyomo.core.base.component.ComponentData, ("active",)),
            (pyomo.core.base.constraint._GeneralConstraintData, ()),
            (pyomo.core.expr.numvalue.NumericConstant, ("value",)),
            (pyomo.core.expr.logical_expr.InequalityExpression, ("strict",)),
            (pyomo.core.base.objective.ScalarObjective, ("sense",)),
            (pyomo.core.base.set.RangeSet, ("_init_data",)),
        ),
        skip_classes=(ExternalFunction, Port, Expression, RangeSet),
        ignore_missing=True,
        suffix=True,
        suffix_filter=None,
    ) -> None:
        """
        (see above)
        """
        # Callbacks are used for attributes that cannot be directly get or set
        self.get_functions = {
            "value": _get_value,
            "strict": _get_strict,
            "domain": _get_domain,
            "index": _get_index_name,
        }
        self.set_functions = {
            "_mutable": lambda *args: None,
            "active": _set_active,
            "fixed": _set_fixed,
            "lb": _set_lb,
            "ub": _set_ub,
            "value": _set_value,
            "strict": _set_strict,
            "sense": _set_sense,
        }

        skip_with_classes: List[Any] = [
            (i, []) for i in skip_classes if i not in classes
        ] + list(classes)
        self.classes = [i[0] for i in skip_with_classes]
        # Add skip classes to classes list, with None as attr list to skip
        self.class_attrs = [i[1] for i in skip_with_classes]
        self.data_classes = [i[0] for i in data_classes]
        self.data_class_attrs = [i[1] for i in data_classes]
        # Create filter function lists, use None if not supplied
        self.class_filter = [i[2] if len(i) > 2 else None for i in skip_with_classes]
        self.data_class_filter = [i[2] if len(i) > 2 else None for i in data_classes]
        self.ignore_missing = ignore_missing
        self.include_suffix = suffix
        self.suffix_filter = suffix_filter

    def set_read_callback(self, attr, cb=None):
        """
        Set a callback to set an attribute, when reading from json or dict.
        """
        self.set_functions[attr] = cb

    def set_write_callback(self, attr, cb=None):
        """
        Set a callback to get an attribute, when writing to json or dict.
        """
        self.get_functions[attr] = cb

    def get_class_attr_list(self, obj) -> Tuple[List[Any], Any]:
        """
        Look up what attributes to save/load for an Component object.
        Args:
            obj: Object to look up attribute list for.
        Return:
            A list of attributes and a filter function for object type
        """
        attr_list = []  # Attributes to store
        filter_function = None  # Load filter function
        for i, cl in enumerate(self.classes):
            if isinstance(obj, cl) or (isinstance(obj, type) and issubclass(obj, cl)):
                attr_list += list(self.class_attrs[i])
                filter_function = self.class_filter[i]  # this does not make sense
        return attr_list, filter_function

    def get_data_class_attr_list(self, obj) -> Tuple[List[Any], Any]:
        """
        Look up what attributes to save/load for an ComponentData object.
        Args:
            obj: Object or type to look up attribute list for.
        Return:
            A list of attributes and a filter function for object type
        """
        attr_list = []  # Attributes to store
        filter_function = None  # Load filter function
        for i, cl in enumerate(self.data_classes):
            if isinstance(obj, cl) or (isinstance(obj, type) and issubclass(obj, cl)):
                attr_list += list(self.data_class_attrs[i])
                filter_function = self.data_class_filter[
                    i
                ]  # TODO: this does not make sense
        return attr_list, filter_function

    @classmethod
    def bound(cls):
        """Returns a StoreSpec object to store variable bounds only."""
        return cls(
            classes=((Var, ()),),
            data_classes=((pyomo.core.base.var._VarData, ("lb", "ub")),),
            suffix=False,
        )

    @classmethod
    def value(cls):
        """Returns a StoreSpec object to store variable values only."""
        return cls(
            classes=((Var, ()),),
            data_classes=((pyomo.core.base.var._VarData, ("value",)),),
            suffix=False,
        )

    @classmethod
    def isfixed(cls):
        """Returns a StoreSpec object to store if variables are fixed."""
        return cls(
            classes=((Var, ()),),
            data_classes=((pyomo.core.base.var._VarData, ("fixed",)),),
            suffix=False,
        )

    @classmethod
    def suffix(cls, suffix_filter=None):
        return cls(
            classes=((Suffix, ()),),
            data_classes=(),
            suffix=True,
            suffix_filter=suffix_filter,
        )

    @classmethod
    def value_isfixed(cls, only_fixed):
        """
        Return a StoreSpec object to store variable values and if fixed.
        Args:
            only_fixed: Only load fixed variable values
        """
        if only_fixed:
            return cls(
                classes=((Var, ()),),
                data_classes=(
                    (pyomo.core.base.var._VarData, ("value", "fixed"), _only_fixed),
                ),
                suffix=False,
            )
        else:
            return cls(
                classes=((Var, ()),),
                data_classes=((pyomo.core.base.var._VarData, ("value", "fixed")),),
                suffix=False,
            )

    @classmethod
    def value_isfixed_isactive(cls, only_fixed):
        """
        Return a StoreSpec object to store variable values, if variables are
        fixed and if components are active.
        Args:
            only_fixed: Only load fixed variable values
        """
        if only_fixed:
            return cls(
                classes=((Var, ()), (Param, ()), (Component, ("active",))),
                data_classes=(
                    (pyomo.core.base.var._VarData, ("value", "fixed"), _only_fixed),
                    (pyomo.core.base.param._ParamData, ("value",)),
                    (pyomo.core.base.component.ComponentData, ("active",)),
                ),
                suffix=False,
            )
        else:
            return cls(
                classes=((Var, ()), (Param, ()), (Component, ("active",))),
                data_classes=(
                    (pyomo.core.base.var._VarData, ("value", "fixed")),
                    (pyomo.core.base.param._ParamData, ("value",)),
                    (pyomo.core.base.component.ComponentData, ("active",)),
                ),
                suffix=False,
            )
__init__(self, classes=((<class 'pyomo.core.base.param.Param'>, ('_mutable',)), (<class 'pyomo.core.base.var.Var'>, ()), (<class 'pyomo.core.base.expression.Expression'>, ()), (<class 'pyomo.core.base.component.Component'>, ('active',)), (<class 'pyomo.core.base.objective.Objective'>, ('sense',)), (<class 'pyomo.core.base.indexed_component.IndexedComponent'>, ('index',))), data_classes=((<class 'pyomo.core.base.var._VarData'>, ('fixed', 'stale', 'value', 'lb', 'ub', 'domain')), (<class 'pyomo.core.base.param._ParamData'>, ('value',)), (<class 'int'>, ('value',)), (<class 'float'>, ('value',)), (<class 'pyomo.core.base.expression._ExpressionData'>, ()), (<class 'pyomo.core.base.component.ComponentData'>, ('active',)), (<class 'pyomo.core.base.constraint._GeneralConstraintData'>, ()), (<class 'pyomo.core.expr.numvalue.NumericConstant'>, ('value',)), (<class 'pyomo.core.expr.logical_expr.InequalityExpression'>, ('strict',)), (<class 'pyomo.core.base.objective.ScalarObjective'>, ('sense',)), (<class 'pyomo.core.base.set.RangeSet'>, ('_init_data',))), skip_classes=(<class 'pyomo.core.base.external.ExternalFunction'>, <class 'pyomo.network.port.Port'>, <class 'pyomo.core.base.expression.Expression'>, <class 'pyomo.core.base.set.RangeSet'>), ignore_missing=True, suffix=True, suffix_filter=None) special

(see above)

Source code in classiq/interface/combinatorial_optimization/model_io_comon.py
def __init__(
    self,
    classes=(
        (Param, ("_mutable",)),
        (Var, ()),
        (Expression, ()),
        (Component, ("active",)),
        (pyomo.core.base.objective.Objective, ("sense",)),
        (pyomo.core.base.indexed_component.IndexedComponent, ("index",)),
    ),
    data_classes=(
        (
            pyomo.core.base.var._VarData,
            ("fixed", "stale", "value", "lb", "ub", "domain"),
        ),
        (pyomo.core.base.param._ParamData, ("value",)),
        (int, ("value",)),
        (float, ("value",)),
        (pyomo.core.base.expression._ExpressionData, ()),
        (pyomo.core.base.component.ComponentData, ("active",)),
        (pyomo.core.base.constraint._GeneralConstraintData, ()),
        (pyomo.core.expr.numvalue.NumericConstant, ("value",)),
        (pyomo.core.expr.logical_expr.InequalityExpression, ("strict",)),
        (pyomo.core.base.objective.ScalarObjective, ("sense",)),
        (pyomo.core.base.set.RangeSet, ("_init_data",)),
    ),
    skip_classes=(ExternalFunction, Port, Expression, RangeSet),
    ignore_missing=True,
    suffix=True,
    suffix_filter=None,
) -> None:
    """
    (see above)
    """
    # Callbacks are used for attributes that cannot be directly get or set
    self.get_functions = {
        "value": _get_value,
        "strict": _get_strict,
        "domain": _get_domain,
        "index": _get_index_name,
    }
    self.set_functions = {
        "_mutable": lambda *args: None,
        "active": _set_active,
        "fixed": _set_fixed,
        "lb": _set_lb,
        "ub": _set_ub,
        "value": _set_value,
        "strict": _set_strict,
        "sense": _set_sense,
    }

    skip_with_classes: List[Any] = [
        (i, []) for i in skip_classes if i not in classes
    ] + list(classes)
    self.classes = [i[0] for i in skip_with_classes]
    # Add skip classes to classes list, with None as attr list to skip
    self.class_attrs = [i[1] for i in skip_with_classes]
    self.data_classes = [i[0] for i in data_classes]
    self.data_class_attrs = [i[1] for i in data_classes]
    # Create filter function lists, use None if not supplied
    self.class_filter = [i[2] if len(i) > 2 else None for i in skip_with_classes]
    self.data_class_filter = [i[2] if len(i) > 2 else None for i in data_classes]
    self.ignore_missing = ignore_missing
    self.include_suffix = suffix
    self.suffix_filter = suffix_filter
bound() classmethod

Returns a StoreSpec object to store variable bounds only.

Source code in classiq/interface/combinatorial_optimization/model_io_comon.py
@classmethod
def bound(cls):
    """Returns a StoreSpec object to store variable bounds only."""
    return cls(
        classes=((Var, ()),),
        data_classes=((pyomo.core.base.var._VarData, ("lb", "ub")),),
        suffix=False,
    )
get_class_attr_list(self, obj)

Look up what attributes to save/load for an Component object.

Parameters:

Name Type Description Default
obj

Object to look up attribute list for.

required

Returns:

Type Description
Tuple[List[Any], Any]

A list of attributes and a filter function for object type

Source code in classiq/interface/combinatorial_optimization/model_io_comon.py
def get_class_attr_list(self, obj) -> Tuple[List[Any], Any]:
    """
    Look up what attributes to save/load for an Component object.
    Args:
        obj: Object to look up attribute list for.
    Return:
        A list of attributes and a filter function for object type
    """
    attr_list = []  # Attributes to store
    filter_function = None  # Load filter function
    for i, cl in enumerate(self.classes):
        if isinstance(obj, cl) or (isinstance(obj, type) and issubclass(obj, cl)):
            attr_list += list(self.class_attrs[i])
            filter_function = self.class_filter[i]  # this does not make sense
    return attr_list, filter_function
get_data_class_attr_list(self, obj)

Look up what attributes to save/load for an ComponentData object.

Parameters:

Name Type Description Default
obj

Object or type to look up attribute list for.

required

Returns:

Type Description
Tuple[List[Any], Any]

A list of attributes and a filter function for object type

Source code in classiq/interface/combinatorial_optimization/model_io_comon.py
def get_data_class_attr_list(self, obj) -> Tuple[List[Any], Any]:
    """
    Look up what attributes to save/load for an ComponentData object.
    Args:
        obj: Object or type to look up attribute list for.
    Return:
        A list of attributes and a filter function for object type
    """
    attr_list = []  # Attributes to store
    filter_function = None  # Load filter function
    for i, cl in enumerate(self.data_classes):
        if isinstance(obj, cl) or (isinstance(obj, type) and issubclass(obj, cl)):
            attr_list += list(self.data_class_attrs[i])
            filter_function = self.data_class_filter[
                i
            ]  # TODO: this does not make sense
    return attr_list, filter_function
isfixed() classmethod

Returns a StoreSpec object to store if variables are fixed.

Source code in classiq/interface/combinatorial_optimization/model_io_comon.py
@classmethod
def isfixed(cls):
    """Returns a StoreSpec object to store if variables are fixed."""
    return cls(
        classes=((Var, ()),),
        data_classes=((pyomo.core.base.var._VarData, ("fixed",)),),
        suffix=False,
    )
set_read_callback(self, attr, cb=None)

Set a callback to set an attribute, when reading from json or dict.

Source code in classiq/interface/combinatorial_optimization/model_io_comon.py
def set_read_callback(self, attr, cb=None):
    """
    Set a callback to set an attribute, when reading from json or dict.
    """
    self.set_functions[attr] = cb
set_write_callback(self, attr, cb=None)

Set a callback to get an attribute, when writing to json or dict.

Source code in classiq/interface/combinatorial_optimization/model_io_comon.py
def set_write_callback(self, attr, cb=None):
    """
    Set a callback to get an attribute, when writing to json or dict.
    """
    self.get_functions[attr] = cb
value() classmethod

Returns a StoreSpec object to store variable values only.

Source code in classiq/interface/combinatorial_optimization/model_io_comon.py
@classmethod
def value(cls):
    """Returns a StoreSpec object to store variable values only."""
    return cls(
        classes=((Var, ()),),
        data_classes=((pyomo.core.base.var._VarData, ("value",)),),
        suffix=False,
    )
value_isfixed(only_fixed) classmethod

Return a StoreSpec object to store variable values and if fixed.

Parameters:

Name Type Description Default
only_fixed

Only load fixed variable values

required
Source code in classiq/interface/combinatorial_optimization/model_io_comon.py
@classmethod
def value_isfixed(cls, only_fixed):
    """
    Return a StoreSpec object to store variable values and if fixed.
    Args:
        only_fixed: Only load fixed variable values
    """
    if only_fixed:
        return cls(
            classes=((Var, ()),),
            data_classes=(
                (pyomo.core.base.var._VarData, ("value", "fixed"), _only_fixed),
            ),
            suffix=False,
        )
    else:
        return cls(
            classes=((Var, ()),),
            data_classes=((pyomo.core.base.var._VarData, ("value", "fixed")),),
            suffix=False,
        )
value_isfixed_isactive(only_fixed) classmethod

Return a StoreSpec object to store variable values, if variables are fixed and if components are active.

Parameters:

Name Type Description Default
only_fixed

Only load fixed variable values

required
Source code in classiq/interface/combinatorial_optimization/model_io_comon.py
@classmethod
def value_isfixed_isactive(cls, only_fixed):
    """
    Return a StoreSpec object to store variable values, if variables are
    fixed and if components are active.
    Args:
        only_fixed: Only load fixed variable values
    """
    if only_fixed:
        return cls(
            classes=((Var, ()), (Param, ()), (Component, ("active",))),
            data_classes=(
                (pyomo.core.base.var._VarData, ("value", "fixed"), _only_fixed),
                (pyomo.core.base.param._ParamData, ("value",)),
                (pyomo.core.base.component.ComponentData, ("active",)),
            ),
            suffix=False,
        )
    else:
        return cls(
            classes=((Var, ()), (Param, ()), (Component, ("active",))),
            data_classes=(
                (pyomo.core.base.var._VarData, ("value", "fixed")),
                (pyomo.core.base.param._ParamData, ("value",)),
                (pyomo.core.base.component.ComponentData, ("active",)),
            ),
            suffix=False,
        )

model_serializer

Functions for saving and loading Pyomo objects to json

Counter

This is a counter object, which is an easy way to pass an integer pointer around between methods.

Source code in classiq/interface/combinatorial_optimization/model_serializer.py
class Counter:
    """
    This is a counter object, which is an easy way to pass an integer pointer
    around between methods.
    """

    def __init__(self) -> None:
        self.count = 0
to_json(obj, file_name=None, human_read=False, store_spec=None, metadata=None, gz=None, return_dict=False, return_json_string=False)

Save the state of a model to a Python dictionary, and optionally dump it to a json file. To load a model state, a model with the same structure must exist. The model itself cannot be recreated from this.

Parameters:

Name Type Description Default
obj

The Pyomo component object to save. Usually a Pyomo model, but could also be a subcomponent of a model (usually a sub-block).

required
file_name

json file name to save model state, if None only create python dict

None
gz

If file_name is given and gv is True gzip the json file. The default is True if the file name ends with '.gz' otherwise False.

None
human_read

if True, add indents and spacing to make the json file more readable, if false cut out whitespace and make as compact as possible

False
metadata

A dictionary of additional metadata to add.

None
store_spec

is What To Save, this is a StoreSpec object that specifies what object types and attributes to save. If None, the default is used which saves the state of the complete model state.

None
metadata

additional metadata to save beyond the standard format_version, date, and time.

None
return_dict

default is False if true returns a dictionary representation

False
return_json_string

default is False returns a json string

False

Returns:

Type Description
Optional[Dict[Any, Any]]

If return_dict is True returns a dictionary serialization of the Pyomo component. If return_dict is False and return_json_string is True returns a json string dump of the dict. If file_name is given the dictionary is also written to a json file. If gz is True and file_name is given, writes a gzipped json file.

Source code in classiq/interface/combinatorial_optimization/model_serializer.py
def to_json(
    obj,
    file_name=None,
    human_read=False,
    store_spec=None,
    metadata=None,
    gz=None,
    return_dict=False,
    return_json_string=False,
) -> Optional[Dict[Any, Any]]:
    """
    Save the state of a model to a Python dictionary, and optionally dump it
    to a json file.  To load a model state, a model with the same structure must
    exist.  The model itself cannot be recreated from this.
    Args:
        obj: The Pyomo component object to save.  Usually a Pyomo model, but could
            also be a subcomponent of a model (usually a sub-block).
        file_name: json file name to save model state, if None only create
            python dict
        gz: If file_name is given and gv is True gzip the json file. The default is
            True if the file name ends with '.gz' otherwise False.
        human_read: if True, add indents and spacing to make the json file more
            readable, if false cut out whitespace and make as compact as
            possible
        metadata: A dictionary of additional metadata to add.
        store_spec: is What To Save, this is a StoreSpec object that specifies what
            object types and attributes to save.  If None, the default is used
            which saves the state of the complete model state.
        metadata: additional metadata to save beyond the standard format_version,
            date, and time.
        return_dict: default is False if true returns a dictionary representation
        return_json_string: default is False returns a json string
    Returns:
        If return_dict is True returns a dictionary serialization of the Pyomo
        component.  If return_dict is False and return_json_string is True
        returns a json string dump of the dict.  If file_name is given the dictionary
        is also written to a json file.  If gz is True and file_name is given, writes
        a gzipped json file.
    """
    if gz is None:
        if isinstance(file_name, str):
            gz = file_name.endswith(".gz")
        else:
            gz = False
    if metadata is None:
        metadata = {}

    suffixes: List[dict] = list()
    lookup: Dict[int, int] = dict()
    count: Counter = Counter()
    start_time = time.time()
    if store_spec is None:
        store_spec = StoreSpec()

    now = datetime.datetime.now()
    obj_dict = {
        "__metadata__": {
            "format_version": __format_version__,
            "date": datetime.date.isoformat(now.date()),
            "time": datetime.time.isoformat(now.time()),
            "other": metadata,
        }
    }

    _write_component(obj_dict, obj, store_spec, count, suffixes=suffixes, lookup=lookup)
    for s in suffixes:
        _write_component_data(**s)

    obj_dict["__metadata__"]["__performance__"] = {}
    performance_dict = obj_dict["__metadata__"]["__performance__"]
    performance_dict["n_components"] = count.count
    dict_time = time.time()
    performance_dict["time_to_make_dict"] = dict_time - start_time
    dump_kw: Dict[str, Any] = (
        {"indent": 2} if human_read else {"separators": (",", ":")}
    )
    if file_name is not None:
        if gz:
            with gzip.open(file_name, "w") as f:
                json.dump(obj_dict, f, **dump_kw)
        else:
            with open(file_name, "w") as f:
                json.dump(obj_dict, f, **dump_kw)
    file_time = time.time()
    performance_dict["time_to_write_file"] = file_time - dict_time

    if return_dict:
        return obj_dict
    elif return_json_string:
        return json.dumps(obj_dict, **dump_kw)  # type: ignore[return-value]
    else:
        return None

optimization_problem

OptimizationProblem (BaseModel) pydantic-model
Source code in classiq/interface/combinatorial_optimization/optimization_problem.py
class OptimizationProblem(BaseModel):
    qsolver_preferences: QSolverPreferences = pydantic.Field(
        default=...,
        description="preferences for the QSolver: QAOAMixer, QAOAPenalty or GAS",
    )
    optimizer_preferences: CombinatorialOptimizer = pydantic.Field(
        default_factory=CombinatorialOptimizer,
        description="preferences for the VQE execution",
    )
    serialized_model: Optional[Dict[str, Any]] = None
    backend_preferences: BackendPreferencesTypes = backend_preferences_field()
    encoding_type: Optional[EncodingType] = pydantic.Field(
        default=EncodingType.BINARY,
        description="encoding scheme for integer variables",
    )
    ansatz: Optional[GeneratedCircuit] = pydantic.Field(
        default=None, description="GeneratedCircuit object of the ansatz circuit"
    )

    class Config:
        smart_union = True
        extra = "forbid"
        validate_assignment = True

    @pydantic.validator("serialized_model", pre=True)
    def serialize_model(cls, model: Any):
        if isinstance(model, pyo.ConcreteModel):
            return model_serializer.to_json(model, return_dict=True)

        return model

    @pydantic.root_validator()
    def set_should_check_valid_solutions(cls, values):
        qsolver_preferences = values.get("qsolver_preferences")
        backend_preferences = values.get("backend_preferences")
        optimizer_preferences = values.get("optimizer_preferences")

        if qsolver_preferences.qsolver == QSolver.Custom:
            pass

        elif qsolver_preferences.qsolver == QSolver.QAOAMixer and is_exact_simulator(
            backend_preferences
        ):
            optimizer_preferences.should_check_valid_solutions = True

        else:
            optimizer_preferences.should_check_valid_solutions = False

        return values
ansatz: GeneratedCircuit pydantic-field

GeneratedCircuit object of the ansatz circuit

backend_preferences: Union[classiq.interface.backend.backend_preferences.AzureBackendPreferences, classiq.interface.backend.backend_preferences.IBMBackendPreferences, classiq.interface.backend.backend_preferences.AwsBackendPreferences, classiq.interface.backend.backend_preferences.IonqBackendPreferences, classiq.interface.backend.backend_preferences.NvidiaBackendPreferences] pydantic-field

Preferences for the requested backend to run the quantum circuit.

encoding_type: EncodingType pydantic-field

encoding scheme for integer variables

optimizer_preferences: CombinatorialOptimizer pydantic-field

preferences for the VQE execution

qsolver_preferences: Union[classiq.interface.combinatorial_optimization.preferences.QAOAPreferences, classiq.interface.combinatorial_optimization.preferences.GASPreferences] pydantic-field required

preferences for the QSolver: QAOAMixer, QAOAPenalty or GAS

preferences

GASPreferences (BaseModel) pydantic-model
Source code in classiq/interface/combinatorial_optimization/preferences.py
class GASPreferences(pydantic.BaseModel):
    qsolver: Literal[QSolver.GAS] = pydantic.Field(
        default=QSolver.GAS,
        description="Indicates the qsolver type.",
    )
    num_result_qubits: int = 0
    preferences: Preferences = pydantic.Field(
        default=Preferences(transpilation_option=TranspilationOption.NONE)
    )
qsolver: Literal[<QSolver.GAS: 'GAS'>] pydantic-field

Indicates the qsolver type.

QAOAPreferences (BaseModel) pydantic-model
Source code in classiq/interface/combinatorial_optimization/preferences.py
class QAOAPreferences(BaseModel):
    qsolver: Literal[
        QSolver.QAOAPenalty, QSolver.QAOAMixer, QSolver.Custom
    ] = pydantic.Field(
        default=QSolver.QAOAPenalty,
        description="Indicates whether to use QAOA with penalty terms (QAOAPenalty), "
        "constraints-preserving QAOA (QAOAMixer) or a user-defined ansatz.",
    )
    qaoa_reps: pydantic.PositiveInt = pydantic.Field(
        default=1, description="Number of layers in qaoa ansatz."
    )
    penalty_energy: float = pydantic.Field(
        default=None,
        description="Penalty energy for invalid solutions. The value affects "
        "the converges rate. Small positive values are preferred",
    )
    initial_state: Optional[List[int]] = pydantic.Field(
        default=None,
        description="Initial state in QAOA ansatz. The state should be a single basis state in the "
        "computational basis. For problems with binary or integer variables the string "
        "consists of binary or integer values respectively.",
    )

    constraints: Constraints = pydantic.Field(default_factory=get_default_constraints)
    preferences: Preferences = pydantic.Field(default_factory=get_default_preferences)

    @pydantic.validator("penalty_energy", pre=True, always=True)
    def check_penalty_energy(cls, penalty_energy, values):
        qsolver = values.get("qsolver")
        if penalty_energy is not None and qsolver not in (
            QSolver.QAOAPenalty,
            QSolver.Custom,
        ):
            raise ValueError(
                "Use penalty_energy only for QSolver.QAOAPenalty or QSolver.Custom."
            )

        if penalty_energy is None and qsolver == QSolver.QAOAPenalty:
            penalty_energy = 2

        return penalty_energy
initial_state: List[int] pydantic-field

Initial state in QAOA ansatz. The state should be a single basis state in the computational basis. For problems with binary or integer variables the string consists of binary or integer values respectively.

penalty_energy: float pydantic-field

Penalty energy for invalid solutions. The value affects the converges rate. Small positive values are preferred

qaoa_reps: PositiveInt pydantic-field

Number of layers in qaoa ansatz.

qsolver: Literal[<QSolver.QAOAPenalty: 'QAOAPenalty'>, <QSolver.QAOAMixer: 'QAOAMixer'>, <QSolver.Custom: 'Custom'>] pydantic-field

Indicates whether to use QAOA with penalty terms (QAOAPenalty), constraints-preserving QAOA (QAOAMixer) or a user-defined ansatz.

solver_types

QSolver (str, Enum)

An enumeration.

Source code in classiq/interface/combinatorial_optimization/solver_types.py
class QSolver(str, enum.Enum):
    QAOAPenalty = "QAOAPenalty"
    QAOAMixer = "QAOAMixer"
    Custom = "Custom"
    GAS = "GAS"

executor special

aws_execution_cost

CostScope (str, Enum)

An enumeration.

Source code in classiq/interface/executor/aws_execution_cost.py
class CostScope(str, Enum):
    user = "user"
    organization = "organization"
ExecutionCostForTimePeriod (BaseModel) pydantic-model
Source code in classiq/interface/executor/aws_execution_cost.py
class ExecutionCostForTimePeriod(pydantic.BaseModel):
    start: date = pydantic.Field(
        description="The beginning of the time period for tasks usage and cost ("
        "inclusive).",
    )
    end: date = pydantic.Field<