Skip to content

Python SDK

Classiq SDK.

analyzer special

analyzer

Analyzer module, implementing facilities for analyzing circuits using Classiq platform.

Analyzer (AnalyzerUtilities)

Analyzer is the wrapper object for all analysis capabilities.

Source code in classiq/analyzer/analyzer.py
class Analyzer(AnalyzerUtilities, metaclass=Asyncify):
    """Analyzer is the wrapper object for all analysis capabilities."""

    def __init__(self, circuit: generator_result.GeneratedCircuit) -> None:
        """Init self.

        Args:
            circuit (): The circuit to be analyzed.
        """
        if circuit.qasm is None:
            raise ClassiqAnalyzerError(
                "Analysis requires a circuit with valid QASM code"
            )
        self._params: analysis_params.AnalysisParams = analysis_params.AnalysisParams(
            qasm=circuit.qasm
        )
        self.circuit: generator_result.GeneratedCircuit = circuit
        self.hardware_comparison_table: Optional[go.Figure] = None
        self.available_devices: ProviderAvailableDevices = dict()
        self.hardware_graphs: HardwareGraphs = dict()

        self.transpilation_params = analysis_params.AnalysisHardwareTranspilationParams(
            hardware_data=self.circuit.hardware_data,
            model_preferences=self.circuit.model.preferences,
        )

    async def analyzer_app_async(self) -> None:
        """Opens the analyzer app with synthesis interactive results.

        Returns:
            None.
        """
        result = await ApiWrapper.call_analyzer_app(self.circuit)
        webbrowser.open_new_tab(
            urljoin(
                client_ide_base_url(),
                circuit_page_uri(
                    circuit_id=result.id, circuit_version=self.circuit.version
                ),
            )
        )

    async def get_available_devices_async(
        self, providers: Optional[List[ProviderNameEnum]] = None
    ) -> Dict[ProviderNameEnum, List[DeviceName]]:
        """Returns dict of the available devices by the providers. only devices
        with sufficient number of qubits are returns

        Args: providers (): List of providers (string or `AnalyzerProviderVendor`).
        if None, the table include all the available hardware.

        Returns:
            available devices (): dict of the available devices (Dict[str,List[str]]).
        """
        if providers is None:
            providers = list(AnalyzerProviderVendor)
        await self.request_available_devices_async(providers=providers)
        return {
            provider: self._filter_devices_by_qubits_count(provider)
            for provider in providers
        }

    async def plot_hardware_connectivity_async(
        self,
        provider: Optional[ProviderNameEnum] = None,
        device: Optional[DeviceName] = None,
    ) -> VBox:
        """plot the hardware_connectivity graph. It is required to required  install the
        analyzer_sdk extra.

        Args:
            provider (): provider name (optional - string or `AnalyzerProviderVendor`).
            device (): device name (optional - string).
        Returns:
         hardware_connectivity_graph (): interactive graph.
        """

        self._validate_analyzer_extra()
        interactive_hardware = InteractiveHardware(
            circuit=self.circuit,
            params=self._params,
            available_devices=self.available_devices,
            hardware_graphs=self.hardware_graphs,
        )
        await interactive_hardware.enable_interactivity_async()
        if provider is not None:
            interactive_hardware.providers_combobox.value = provider
            if device is not None:
                interactive_hardware.devices_combobox.value = device

        return interactive_hardware.show_interactive_graph()

    async def get_hardware_comparison_table_async(
        self,
        providers: Optional[Sequence[Union[str, AnalyzerProviderVendor]]] = None,
        devices: Optional[List[str]] = None,
    ) -> None:
        """create a comparison table between the transpiled circuits result on different hardware.
        The  comparison table included the depth, multi qubit gates count,and total gates count of the circuits.

        Args: providers (): List of providers (string or `AnalyzerProviderVendor`). if None, the table include all
        the available hardware.
        devices (): List of devices (string). if None, the table include all the available devices of the selected
        providers.
        Returns: None.
        """
        if providers is None:
            providers = list(AnalyzerProviderVendor)
        params = analysis_params.AnalysisHardwareListParams(
            qasm=self._params.qasm,
            providers=providers,
            devices=devices,
            transpilation_params=self.transpilation_params,
        )
        result = await ApiWrapper.call_table_graphs_task(params=params)
        self.hardware_comparison_table = go.Figure(json.loads(result.details))

    async def plot_hardware_comparison_table_async(
        self,
        providers: Optional[List[Union[str, AnalyzerProviderVendor]]] = None,
        devices: Optional[List[str]] = None,
    ) -> None:
        """plot the comparison table. if it has not been created it, it first creates the table using all the
        available hardware.

        Returns:
            None.
        """
        await self._hardware_comparison_condition_async(
            providers=providers, devices=devices
        )
        self.hardware_comparison_table.show()  # type: ignore[union-attr]

    async def _hardware_comparison_condition_async(
        self,
        providers: Optional[Sequence[Union[str, AnalyzerProviderVendor]]] = None,
        devices: Optional[List[str]] = None,
    ) -> None:
        if (
            providers is not None
            or devices is not None
            or self.hardware_comparison_table is None
        ):
            await self.get_hardware_comparison_table_async(
                providers=providers, devices=devices
            )

    @staticmethod
    def _open_route(path: str) -> None:
        backend_uri = client.client().get_backend_uri()
        webbrowser.open_new_tab(f"{backend_uri}{path}")

    @staticmethod
    def _validate_analyzer_extra() -> None:
        if find_ipywidgets is None:
            raise ClassiqAnalyzerError(
                "To use this method, please install the `analyzer sdk`. Run the  \
                following line: - pip install classiq[analyzer_sdk]"
            )
__init__(self, circuit) special

Init self.

Parameters:

Name Type Description Default
circuit

The circuit to be analyzed.

required
Source code in classiq/analyzer/analyzer.py
def __init__(self, circuit: generator_result.GeneratedCircuit) -> None:
    """Init self.

    Args:
        circuit (): The circuit to be analyzed.
    """
    if circuit.qasm is None:
        raise ClassiqAnalyzerError(
            "Analysis requires a circuit with valid QASM code"
        )
    self._params: analysis_params.AnalysisParams = analysis_params.AnalysisParams(
        qasm=circuit.qasm
    )
    self.circuit: generator_result.GeneratedCircuit = circuit
    self.hardware_comparison_table: Optional[go.Figure] = None
    self.available_devices: ProviderAvailableDevices = dict()
    self.hardware_graphs: HardwareGraphs = dict()

    self.transpilation_params = analysis_params.AnalysisHardwareTranspilationParams(
        hardware_data=self.circuit.hardware_data,
        model_preferences=self.circuit.model.preferences,
    )
analyzer_app(self) async

Opens the analyzer app with synthesis interactive results.

Returns:

Type Description
None

None.

Source code in classiq/analyzer/analyzer.py
async def analyzer_app_async(self) -> None:
    """Opens the analyzer app with synthesis interactive results.

    Returns:
        None.
    """
    result = await ApiWrapper.call_analyzer_app(self.circuit)
    webbrowser.open_new_tab(
        urljoin(
            client_ide_base_url(),
            circuit_page_uri(
                circuit_id=result.id, circuit_version=self.circuit.version
            ),
        )
    )
analyzer_app_async(self) async

Opens the analyzer app with synthesis interactive results.

Returns:

Type Description
None

None.

Source code in classiq/analyzer/analyzer.py
async def analyzer_app_async(self) -> None:
    """Opens the analyzer app with synthesis interactive results.

    Returns:
        None.
    """
    result = await ApiWrapper.call_analyzer_app(self.circuit)
    webbrowser.open_new_tab(
        urljoin(
            client_ide_base_url(),
            circuit_page_uri(
                circuit_id=result.id, circuit_version=self.circuit.version
            ),
        )
    )
get_available_devices(self, providers=None) async

Returns dict of the available devices by the providers. only devices with sufficient number of qubits are returns

Args: providers (): List of providers (string or AnalyzerProviderVendor). if None, the table include all the available hardware.

Returns:

Type Description
available devices ()

dict of the available devices (Dict[str,List[str]]).

Source code in classiq/analyzer/analyzer.py
async def get_available_devices_async(
    self, providers: Optional[List[ProviderNameEnum]] = None
) -> Dict[ProviderNameEnum, List[DeviceName]]:
    """Returns dict of the available devices by the providers. only devices
    with sufficient number of qubits are returns

    Args: providers (): List of providers (string or `AnalyzerProviderVendor`).
    if None, the table include all the available hardware.

    Returns:
        available devices (): dict of the available devices (Dict[str,List[str]]).
    """
    if providers is None:
        providers = list(AnalyzerProviderVendor)
    await self.request_available_devices_async(providers=providers)
    return {
        provider: self._filter_devices_by_qubits_count(provider)
        for provider in providers
    }
get_available_devices_async(self, providers=None) async

Returns dict of the available devices by the providers. only devices with sufficient number of qubits are returns

Args: providers (): List of providers (string or AnalyzerProviderVendor). if None, the table include all the available hardware.

Returns:

Type Description
available devices ()

dict of the available devices (Dict[str,List[str]]).

Source code in classiq/analyzer/analyzer.py
async def get_available_devices_async(
    self, providers: Optional[List[ProviderNameEnum]] = None
) -> Dict[ProviderNameEnum, List[DeviceName]]:
    """Returns dict of the available devices by the providers. only devices
    with sufficient number of qubits are returns

    Args: providers (): List of providers (string or `AnalyzerProviderVendor`).
    if None, the table include all the available hardware.

    Returns:
        available devices (): dict of the available devices (Dict[str,List[str]]).
    """
    if providers is None:
        providers = list(AnalyzerProviderVendor)
    await self.request_available_devices_async(providers=providers)
    return {
        provider: self._filter_devices_by_qubits_count(provider)
        for provider in providers
    }
get_hardware_comparison_table(self, providers=None, devices=None) async

create a comparison table between the transpiled circuits result on different hardware. The comparison table included the depth, multi qubit gates count,and total gates count of the circuits.

Args: providers (): List of providers (string or AnalyzerProviderVendor). if None, the table include all the available hardware. devices (): List of devices (string). if None, the table include all the available devices of the selected providers. Returns: None.

Source code in classiq/analyzer/analyzer.py
async def get_hardware_comparison_table_async(
    self,
    providers: Optional[Sequence[Union[str, AnalyzerProviderVendor]]] = None,
    devices: Optional[List[str]] = None,
) -> None:
    """create a comparison table between the transpiled circuits result on different hardware.
    The  comparison table included the depth, multi qubit gates count,and total gates count of the circuits.

    Args: providers (): List of providers (string or `AnalyzerProviderVendor`). if None, the table include all
    the available hardware.
    devices (): List of devices (string). if None, the table include all the available devices of the selected
    providers.
    Returns: None.
    """
    if providers is None:
        providers = list(AnalyzerProviderVendor)
    params = analysis_params.AnalysisHardwareListParams(
        qasm=self._params.qasm,
        providers=providers,
        devices=devices,
        transpilation_params=self.transpilation_params,
    )
    result = await ApiWrapper.call_table_graphs_task(params=params)
    self.hardware_comparison_table = go.Figure(json.loads(result.details))
get_hardware_comparison_table_async(self, providers=None, devices=None) async

create a comparison table between the transpiled circuits result on different hardware. The comparison table included the depth, multi qubit gates count,and total gates count of the circuits.

Args: providers (): List of providers (string or AnalyzerProviderVendor). if None, the table include all the available hardware. devices (): List of devices (string). if None, the table include all the available devices of the selected providers. Returns: None.

Source code in classiq/analyzer/analyzer.py
async def get_hardware_comparison_table_async(
    self,
    providers: Optional[Sequence[Union[str, AnalyzerProviderVendor]]] = None,
    devices: Optional[List[str]] = None,
) -> None:
    """create a comparison table between the transpiled circuits result on different hardware.
    The  comparison table included the depth, multi qubit gates count,and total gates count of the circuits.

    Args: providers (): List of providers (string or `AnalyzerProviderVendor`). if None, the table include all
    the available hardware.
    devices (): List of devices (string). if None, the table include all the available devices of the selected
    providers.
    Returns: None.
    """
    if providers is None:
        providers = list(AnalyzerProviderVendor)
    params = analysis_params.AnalysisHardwareListParams(
        qasm=self._params.qasm,
        providers=providers,
        devices=devices,
        transpilation_params=self.transpilation_params,
    )
    result = await ApiWrapper.call_table_graphs_task(params=params)
    self.hardware_comparison_table = go.Figure(json.loads(result.details))
plot_hardware_comparison_table(self, providers=None, devices=None) async

plot the comparison table. if it has not been created it, it first creates the table using all the available hardware.

Returns:

Type Description
None

None.

Source code in classiq/analyzer/analyzer.py
async def plot_hardware_comparison_table_async(
    self,
    providers: Optional[List[Union[str, AnalyzerProviderVendor]]] = None,
    devices: Optional[List[str]] = None,
) -> None:
    """plot the comparison table. if it has not been created it, it first creates the table using all the
    available hardware.

    Returns:
        None.
    """
    await self._hardware_comparison_condition_async(
        providers=providers, devices=devices
    )
    self.hardware_comparison_table.show()  # type: ignore[union-attr]
plot_hardware_comparison_table_async(self, providers=None, devices=None) async

plot the comparison table. if it has not been created it, it first creates the table using all the available hardware.

Returns:

Type Description
None

None.

Source code in classiq/analyzer/analyzer.py
async def plot_hardware_comparison_table_async(
    self,
    providers: Optional[List[Union[str, AnalyzerProviderVendor]]] = None,
    devices: Optional[List[str]] = None,
) -> None:
    """plot the comparison table. if it has not been created it, it first creates the table using all the
    available hardware.

    Returns:
        None.
    """
    await self._hardware_comparison_condition_async(
        providers=providers, devices=devices
    )
    self.hardware_comparison_table.show()  # type: ignore[union-attr]
plot_hardware_connectivity(self, provider=None, device=None) async

plot the hardware_connectivity graph. It is required to required install the analyzer_sdk extra.

Parameters:

Name Type Description Default
provider

provider name (optional - string or AnalyzerProviderVendor).

None
device

device name (optional - string).

None

Returns:

Type Description
hardware_connectivity_graph ()

interactive graph.

Source code in classiq/analyzer/analyzer.py
async def plot_hardware_connectivity_async(
    self,
    provider: Optional[ProviderNameEnum] = None,
    device: Optional[DeviceName] = None,
) -> VBox:
    """plot the hardware_connectivity graph. It is required to required  install the
    analyzer_sdk extra.

    Args:
        provider (): provider name (optional - string or `AnalyzerProviderVendor`).
        device (): device name (optional - string).
    Returns:
     hardware_connectivity_graph (): interactive graph.
    """

    self._validate_analyzer_extra()
    interactive_hardware = InteractiveHardware(
        circuit=self.circuit,
        params=self._params,
        available_devices=self.available_devices,
        hardware_graphs=self.hardware_graphs,
    )
    await interactive_hardware.enable_interactivity_async()
    if provider is not None:
        interactive_hardware.providers_combobox.value = provider
        if device is not None:
            interactive_hardware.devices_combobox.value = device

    return interactive_hardware.show_interactive_graph()
plot_hardware_connectivity_async(self, provider=None, device=None) async

plot the hardware_connectivity graph. It is required to required install the analyzer_sdk extra.

Parameters:

Name Type Description Default
provider

provider name (optional - string or AnalyzerProviderVendor).

None
device

device name (optional - string).

None

Returns:

Type Description
hardware_connectivity_graph ()

interactive graph.

Source code in classiq/analyzer/analyzer.py
async def plot_hardware_connectivity_async(
    self,
    provider: Optional[ProviderNameEnum] = None,
    device: Optional[DeviceName] = None,
) -> VBox:
    """plot the hardware_connectivity graph. It is required to required  install the
    analyzer_sdk extra.

    Args:
        provider (): provider name (optional - string or `AnalyzerProviderVendor`).
        device (): device name (optional - string).
    Returns:
     hardware_connectivity_graph (): interactive graph.
    """

    self._validate_analyzer_extra()
    interactive_hardware = InteractiveHardware(
        circuit=self.circuit,
        params=self._params,
        available_devices=self.available_devices,
        hardware_graphs=self.hardware_graphs,
    )
    await interactive_hardware.enable_interactivity_async()
    if provider is not None:
        interactive_hardware.providers_combobox.value = provider
        if device is not None:
            interactive_hardware.devices_combobox.value = device

    return interactive_hardware.show_interactive_graph()

rb

RBAnalysis

Source code in classiq/analyzer/rb.py
class RBAnalysis(metaclass=Asyncify):
    def __init__(self, experiments_data: List[AnalysisRBParams]) -> None:
        """Init self.

        Args:
            experiments_data: List of results from varius RB experiments.
        """

        self.experiments_data = experiments_data
        self._total_results: pd.DataFrame = pd.DataFrame()

    async def _get_multiple_hardware_results_async(self) -> Dict[str, RbResults]:
        total_result: Dict[str, RbResults] = {}
        for batch in self.experiments_data:
            if len(batch.num_clifford) < 5:
                raise ClassiqAnalyzerError(
                    f"An experiment mush contain at least five sequences,"
                    f" this sequence is {len(batch.num_clifford)}"
                )
            rb_result = await ApiWrapper.call_rb_analysis_task(batch)
            total_result[batch.hardware] = rb_result
        return total_result

    @staticmethod
    def _get_df_indices(results: Dict[str, RbResults]) -> List[str]:
        temp_res = results.copy()
        _, rb_result_keys = temp_res.popitem()
        return list(rb_result_keys.__dict__.keys())

    async def show_multiple_hardware_data_async(self) -> pd.DataFrame:
        """Run the RB analysis.

        Returns:
            The RB result.
        """
        results = await self._get_multiple_hardware_results_async()
        indices = RBAnalysis._get_df_indices(results)
        result_df = pd.DataFrame(index=indices)
        for hardware, result in results.items():
            result_df[hardware] = result.__dict__.values()
        self._total_results = result_df
        return result_df

    def plot_multiple_hardware_results(self) -> go.Figure:
        """Plot Bar graph of the results.

        Returns:
            None.
        """
        df = self._total_results.loc[["mean_fidelity", "average_error"]].transpose()
        hardware = list(df.index)
        params = list(df.columns)
        data = []
        for param in params:
            data.append(go.Bar(name=param, x=hardware, y=df[param].values * 100))
        fig = go.Figure(data).update_layout(
            title="RB hardware comparison",
            barmode="group",
            yaxis=dict(title="Fidelity in %"),
            xaxis=dict(title="Hardware"),
        )
        return fig
__init__(self, experiments_data) special

Init self.

Parameters:

Name Type Description Default
experiments_data List[classiq.interface.analyzer.analysis_params.AnalysisRBParams]

List of results from varius RB experiments.

required
Source code in classiq/analyzer/rb.py
def __init__(self, experiments_data: List[AnalysisRBParams]) -> None:
    """Init self.

    Args:
        experiments_data: List of results from varius RB experiments.
    """

    self.experiments_data = experiments_data
    self._total_results: pd.DataFrame = pd.DataFrame()
plot_multiple_hardware_results(self)

Plot Bar graph of the results.

Returns:

Type Description
Figure

None.

Source code in classiq/analyzer/rb.py
def plot_multiple_hardware_results(self) -> go.Figure:
    """Plot Bar graph of the results.

    Returns:
        None.
    """
    df = self._total_results.loc[["mean_fidelity", "average_error"]].transpose()
    hardware = list(df.index)
    params = list(df.columns)
    data = []
    for param in params:
        data.append(go.Bar(name=param, x=hardware, y=df[param].values * 100))
    fig = go.Figure(data).update_layout(
        title="RB hardware comparison",
        barmode="group",
        yaxis=dict(title="Fidelity in %"),
        xaxis=dict(title="Hardware"),
    )
    return fig
show_multiple_hardware_data(self) async

Run the RB analysis.

Returns:

Type Description
DataFrame

The RB result.

Source code in classiq/analyzer/rb.py
async def show_multiple_hardware_data_async(self) -> pd.DataFrame:
    """Run the RB analysis.

    Returns:
        The RB result.
    """
    results = await self._get_multiple_hardware_results_async()
    indices = RBAnalysis._get_df_indices(results)
    result_df = pd.DataFrame(index=indices)
    for hardware, result in results.items():
        result_df[hardware] = result.__dict__.values()
    self._total_results = result_df
    return result_df
show_multiple_hardware_data_async(self) async

Run the RB analysis.

Returns:

Type Description
DataFrame

The RB result.

Source code in classiq/analyzer/rb.py
async def show_multiple_hardware_data_async(self) -> pd.DataFrame:
    """Run the RB analysis.

    Returns:
        The RB result.
    """
    results = await self._get_multiple_hardware_results_async()
    indices = RBAnalysis._get_df_indices(results)
    result_df = pd.DataFrame(index=indices)
    for hardware, result in results.items():
        result_df[hardware] = result.__dict__.values()
    self._total_results = result_df
    return result_df

applications special

chemistry special

ansatz_parameters

HEAParameters dataclass

HEAParameters(reps: int, num_qubits: int, connectivity_map: List[Tuple[int, int]], one_qubit_gates: List[str], two_qubit_gates: List[str])

Source code in classiq/applications/chemistry/ansatz_parameters.py
@dataclasses.dataclass
class HEAParameters:
    reps: int
    num_qubits: int
    connectivity_map: List[Tuple[int, int]]
    one_qubit_gates: List[str]
    two_qubit_gates: List[str]
HVAParameters dataclass

HVAParameters(reps: int)

Source code in classiq/applications/chemistry/ansatz_parameters.py
@dataclasses.dataclass
class HVAParameters:
    reps: int
UCCParameters dataclass

UCCParameters(excitations: List[int] = )

Source code in classiq/applications/chemistry/ansatz_parameters.py
@dataclasses.dataclass
class UCCParameters:
    excitations: EXCITATIONS_TYPE_EXACT = dataclasses.field(
        default_factory=default_excitation_factory
    )

chemistry_execution_parameters

ChemistryExecutionParameters dataclass

ChemistryExecutionParameters(optimizer: classiq.interface.executor.optimizer_preferences.OptimizerType, max_iteration: int, initial_point: Optional[numpy.ndarray] = None, tolerance: float = 0.0, step_size: float = 0.0, skip_compute_variance: bool = False)

Source code in classiq/applications/chemistry/chemistry_execution_parameters.py
@dataclasses.dataclass
class ChemistryExecutionParameters:
    optimizer: OptimizerType
    max_iteration: int
    initial_point: Optional[np.ndarray] = dataclasses.field(default=None)
    tolerance: float = dataclasses.field(default=0.0)
    step_size: float = dataclasses.field(default=0.0)
    skip_compute_variance: bool = dataclasses.field(default=False)

combinatorial_optimization special

combinatorial_optimization_config

OptimizerConfig dataclass

OptimizerConfig(opt_type: classiq.interface.executor.optimizer_preferences.OptimizerType = , max_iteration: Optional[int] = None, tolerance: float = 0.0, step_size: float = 0.0, skip_compute_variance: bool = False, cost_type: classiq.interface.executor.optimizer_preferences.CostType = , alpha_cvar: float = 1.0, initial_point: Optional[List[float]] = None)

Source code in classiq/applications/combinatorial_optimization/combinatorial_optimization_config.py
@dataclass
class OptimizerConfig:
    opt_type: OptimizerType = OptimizerType.COBYLA
    max_iteration: Optional[int] = None
    tolerance: float = 0.0
    step_size: float = 0.0
    skip_compute_variance: bool = False
    cost_type: CostType = CostType.CVAR
    alpha_cvar: float = 1.0
    initial_point: Optional[List[float]] = dataclasses.field(default=None)
QAOAConfig dataclass

QAOAConfig(num_layers: int = 2, penalty_energy: float = 2.0)

Source code in classiq/applications/combinatorial_optimization/combinatorial_optimization_config.py
@dataclass
class QAOAConfig:
    num_layers: int = 2
    penalty_energy: float = 2.0

qnn special

datasets special

datasets_utils
all_bits_to_one(n)

Return an integer of length n bits, where all the bits are 1

Source code in classiq/applications/qnn/datasets/datasets_utils.py
def all_bits_to_one(n: int) -> int:
    """
    Return an integer of length `n` bits, where all the bits are `1`
    """
    return (2**n) - 1
all_bits_to_zero(n)

Return an integer of length n bits, where all the bits are 0

Source code in classiq/applications/qnn/datasets/datasets_utils.py
def all_bits_to_zero(n: int) -> int:
    """
    Return an integer of length `n` bits, where all the bits are `0`
    """
    return 0
state_to_label(pure_state)

input: a Tensor of binary numbers (0 or 1) - the return value of a measurement output: probability (from that measurement) of measuring 0 (in other words, |0> translates to 100% chance for measuring |0> ==> return value is 1.0 |1> translates to 0% chance for measuring |0> ==> return value is 0.0 )

Source code in classiq/applications/qnn/datasets/datasets_utils.py
def state_to_label(pure_state: Tensor) -> Tensor:
    """
    input: a `Tensor` of binary numbers (0 or 1) - the return value of a measurement
    output: probability (from that measurement) of measuring 0
    (in other words,
        |0> translates to 100% chance for measuring |0> ==> return value is 1.0
        |1> translates to   0% chance for measuring |0> ==> return value is 0.0
    )
    """
    # |0> means 100% chance to get |0> ==> 100% == 1.0
    # |1> means   0% chance to get |0> ==>   0% == 0.0

    # This line basically does `1 - bool(pure_state)`
    return 1 - pure_state.bool().int()
state_to_weights(pure_state)

input: a Tensor of binary numbers (0 or 1) output: the required angle of rotation for Rx (in other words, |0> translates to no rotation, and |1> translates to pi)

Source code in classiq/applications/qnn/datasets/datasets_utils.py
def state_to_weights(pure_state: Tensor) -> Tensor:
    """
    input: a `Tensor` of binary numbers (0 or 1)
    output: the required angle of rotation for `Rx`
    (in other words, |0> translates to no rotation, and |1> translates to `pi`)
    """
    # |0> requires a rotation by 0
    # |1> requires a rotation by pi
    return pure_state.bool().int() * np.pi

qlayer

QLayer (Module)
Source code in classiq/applications/qnn/qlayer.py
class QLayer(nn.Module):
    def __init__(
        self,
        quantum_program: SerializedQuantumProgram,
        execute: ExecuteFunction,
        post_process: PostProcessFunction,
        # Optional parameters:
        head_start: Union[float, Tensor, None] = None,
        # Experimental parameters:
        calc_num_out_features: CalcNumOutFeatures = calc_num_out_features_single_output,
    ) -> None:
        circuit = Circuit.parse_raw(quantum_program)
        validate_circuit(circuit)

        super().__init__()

        self._execute = execute
        self._post_process = post_process
        self._head_start = head_start

        self.quantum_program = quantum_program

        weights, _ = extract_parameters(circuit)
        self.in_features: int = len(weights)
        self.out_features: int = calc_num_out_features(quantum_program)

        self._initialize_parameters()

    def _initialize_parameters(self) -> None:
        shape: Tuple[int, ...] = (
            (self.out_features, self.in_features)
            if self.out_features > 1
            else (self.in_features,)
        )

        if self._head_start is None:
            value = torch.rand(shape)
        elif isinstance(self._head_start, (float, int)):
            value = torch.zeros(shape) + self._head_start
        elif isinstance(self._head_start, Tensor):
            value = self._head_start.clone()
        else:
            raise ClassiqQNNError(
                f"Unsupported feature - head_start of type {type(self._head_start)}"
            )

        self.weight = Parameter(value)

    def forward(self, x: Tensor) -> Tensor:
        return QLayerFunction.apply(  # type: ignore[no-untyped-call]
            x, self.weight, self.quantum_program, self._execute, self._post_process
        )
forward(self, x)

Define the computation performed at every call.

Should be overridden by all subclasses.

.. note:: Although the recipe for forward pass needs to be defined within this function, one should call the :class:Module instance afterwards instead of this since the former takes care of running the registered hooks while the latter silently ignores them.

Source code in classiq/applications/qnn/qlayer.py
def forward(self, x: Tensor) -> Tensor:
    return QLayerFunction.apply(  # type: ignore[no-untyped-call]
        x, self.weight, self.quantum_program, self._execute, self._post_process
    )
QLayerFunction (Function)
Source code in classiq/applications/qnn/qlayer.py
class QLayerFunction(torch.autograd.Function):
    @staticmethod
    def forward(  # type: ignore[override]
        ctx: Any,
        inputs: Tensor,
        weights: Tensor,
        quantum_program: SerializedQuantumProgram,
        execute: ExecuteFunction,
        post_process: PostProcessFunction,
    ) -> Tensor:
        """
        This function receives:
            inputs: a 2D Tensor of floats - (batch_size, in_features)
            weights: a 2D Tensor of floats - (out_features, num_weights)
            circuit: a `GeneratedCircuit` object
            execute: a function taking a `GeneratedCircuit` and `MultipleArguments`
                and returning `MultipleExecutionDetails`
            post_process: a function taking a single `ExecutionDetails`
                and returning a `Tensor`

        """
        circuit = Circuit.parse_raw(quantum_program)
        validate_circuit(circuit)

        # save for backward
        ctx.save_for_backward(inputs, weights)
        ctx.quantum_program = quantum_program
        ctx.execute = execute
        ctx.post_process = post_process
        ctx.quantum_gradient = SimpleQuantumGradient(
            quantum_program, execute, post_process
        )

        ctx.batch_size, ctx.num_in_features = inputs.shape
        if is_single_layer_circuit(weights):
            ctx.num_weights = weights.shape
        else:
            ctx.num_out_features, ctx.num_weights = weights.shape

        # Todo: avoid computing `_get_extracted_parameters` on every `forward`
        extracted_parameters = extract_parameters(circuit)

        # Todo: avoid defining `convert_tensors_to_arguments` on every `forward`
        def convert_tensors_to_arguments(
            inputs_: Tensor, weights_: Tensor
        ) -> MultipleArguments:
            arguments = map_parameters(
                extracted_parameters,
                inputs_,
                weights_,
            )
            return (arguments,)

        return iter_inputs_weights(
            inputs,
            weights,
            convert_tensors_to_arguments,
            functools.partial(execute, quantum_program),
            post_process,
        )

    @staticmethod
    def backward(  # type: ignore[override]
        ctx: Any, grad_output: Tensor
    ) -> Tuple[Optional[Tensor], Optional[Tensor], None, None, None]:
        """
        grad_output: Tensor
            is of shape (ctx.batch_size, ctx.num_out_features)
        """
        inputs, weights = ctx.saved_tensors

        grad_weights = grad_inputs = None
        grad_circuit = grad_execute = grad_post_process = None
        is_single_layer = is_single_layer_circuit(weights)

        if ctx.needs_input_grad[1]:
            grad_weights = ctx.quantum_gradient.gradient_weights(inputs, weights)
            grad_weights = einsum_weigths(grad_output, grad_weights, is_single_layer)

        if ctx.needs_input_grad[0]:
            grad_inputs = ctx.quantum_gradient.gradient_inputs(inputs, weights)
            grad_inputs = einsum_inputs(grad_output, grad_inputs, is_single_layer)

        if any(ctx.needs_input_grad[i] for i in (2, 3, 4)):
            raise ClassiqTorchError(
                f"Grad required for unknown type: {ctx.needs_input_grad}"
            )

        return grad_inputs, grad_weights, grad_circuit, grad_execute, grad_post_process
backward(ctx, grad_output) staticmethod

Tensor

is of shape (ctx.batch_size, ctx.num_out_features)

Source code in classiq/applications/qnn/qlayer.py
@staticmethod
def backward(  # type: ignore[override]
    ctx: Any, grad_output: Tensor
) -> Tuple[Optional[Tensor], Optional[Tensor], None, None, None]:
    """
    grad_output: Tensor
        is of shape (ctx.batch_size, ctx.num_out_features)
    """
    inputs, weights = ctx.saved_tensors

    grad_weights = grad_inputs = None
    grad_circuit = grad_execute = grad_post_process = None
    is_single_layer = is_single_layer_circuit(weights)

    if ctx.needs_input_grad[1]:
        grad_weights = ctx.quantum_gradient.gradient_weights(inputs, weights)
        grad_weights = einsum_weigths(grad_output, grad_weights, is_single_layer)

    if ctx.needs_input_grad[0]:
        grad_inputs = ctx.quantum_gradient.gradient_inputs(inputs, weights)
        grad_inputs = einsum_inputs(grad_output, grad_inputs, is_single_layer)

    if any(ctx.needs_input_grad[i] for i in (2, 3, 4)):
        raise ClassiqTorchError(
            f"Grad required for unknown type: {ctx.needs_input_grad}"
        )

    return grad_inputs, grad_weights, grad_circuit, grad_execute, grad_post_process
forward(ctx, inputs, weights, quantum_program, execute, post_process) staticmethod

This function receives: inputs: a 2D Tensor of floats - (batch_size, in_features) weights: a 2D Tensor of floats - (out_features, num_weights) circuit: a GeneratedCircuit object !!! execute "a function taking a GeneratedCircuit and MultipleArguments" and returning MultipleExecutionDetails !!! post_process "a function taking a single ExecutionDetails" and returning a Tensor

Source code in classiq/applications/qnn/qlayer.py
@staticmethod
def forward(  # type: ignore[override]
    ctx: Any,
    inputs: Tensor,
    weights: Tensor,
    quantum_program: SerializedQuantumProgram,
    execute: ExecuteFunction,
    post_process: PostProcessFunction,
) -> Tensor:
    """
    This function receives:
        inputs: a 2D Tensor of floats - (batch_size, in_features)
        weights: a 2D Tensor of floats - (out_features, num_weights)
        circuit: a `GeneratedCircuit` object
        execute: a function taking a `GeneratedCircuit` and `MultipleArguments`
            and returning `MultipleExecutionDetails`
        post_process: a function taking a single `ExecutionDetails`
            and returning a `Tensor`

    """
    circuit = Circuit.parse_raw(quantum_program)
    validate_circuit(circuit)

    # save for backward
    ctx.save_for_backward(inputs, weights)
    ctx.quantum_program = quantum_program
    ctx.execute = execute
    ctx.post_process = post_process
    ctx.quantum_gradient = SimpleQuantumGradient(
        quantum_program, execute, post_process
    )

    ctx.batch_size, ctx.num_in_features = inputs.shape
    if is_single_layer_circuit(weights):
        ctx.num_weights = weights.shape
    else:
        ctx.num_out_features, ctx.num_weights = weights.shape

    # Todo: avoid computing `_get_extracted_parameters` on every `forward`
    extracted_parameters = extract_parameters(circuit)

    # Todo: avoid defining `convert_tensors_to_arguments` on every `forward`
    def convert_tensors_to_arguments(
        inputs_: Tensor, weights_: Tensor
    ) -> MultipleArguments:
        arguments = map_parameters(
            extracted_parameters,
            inputs_,
            weights_,
        )
        return (arguments,)

    return iter_inputs_weights(
        inputs,
        weights,
        convert_tensors_to_arguments,
        functools.partial(execute, quantum_program),
        post_process,
    )

executor

Executor module, implementing facilities for executing quantum programs using Classiq platform.

interface special

analyzer special

analysis_params

AnalysisOptionalDevicesParams (HardwareListParams) pydantic-model
Source code in classiq/interface/analyzer/analysis_params.py
class AnalysisOptionalDevicesParams(HardwareListParams):
    qubit_count: int = pydantic.Field(
        default=..., description="number of qubits in the data"
    )
qubit_count: int pydantic-field required

number of qubits in the data

ChemistryGenerationParams (BaseModel) pydantic-model
Source code in classiq/interface/analyzer/analysis_params.py
class ChemistryGenerationParams(pydantic.BaseModel):
    class Config:
        title = "Chemistry"

    molecule: MoleculeProblem = pydantic.Field(
        title="Molecule",
        default=...,
        description="The molecule to generate the VQE ansatz for",
    )
    optimizer_preferences: OptimizerPreferences = pydantic.Field(
        default=..., description="Execution options for the classical Optimizer"
    )

    def initial_point(self) -> Optional[numpy.ndarray]:
        if self.optimizer_preferences.initial_point is not None:
            return numpy.ndarray(
                self.optimizer_preferences.initial_point  # type: ignore
            )
        else:
            return None
molecule: MoleculeProblem pydantic-field required

The molecule to generate the VQE ansatz for

optimizer_preferences: OptimizerPreferences pydantic-field required

Execution options for the classical Optimizer

HardwareListParams (BaseModel) pydantic-model
Source code in classiq/interface/analyzer/analysis_params.py
class HardwareListParams(pydantic.BaseModel):
    devices: Optional[List[PydanticNonEmptyString]] = pydantic.Field(
        default=None, description="Devices"
    )
    providers: List[Provider]
    from_ide: bool = Field(default=False)

    @pydantic.validator("providers", always=True)
    def set_default_providers(
        cls, providers: Optional[List[AnalyzerProviderVendor]]
    ) -> List[AnalyzerProviderVendor]:
        if providers is None:
            providers = list(AnalyzerProviderVendor)
        return providers
devices: List[classiq.interface.helpers.custom_pydantic_types.ConstrainedStrValue] pydantic-field

Devices

HardwareParams (BaseModel) pydantic-model
Source code in classiq/interface/analyzer/analysis_params.py
class HardwareParams(pydantic.BaseModel):
    device: PydanticNonEmptyString = pydantic.Field(default=None, description="Devices")
    provider: AnalyzerProviderVendor
device: ConstrainedStrValue pydantic-field

Devices

cytoscape_graph

CytoScapeEdge (BaseModel) pydantic-model
Source code in classiq/interface/analyzer/cytoscape_graph.py
class CytoScapeEdge(pydantic.BaseModel):
    data: CytoScapeEdgeData = pydantic.Field(
        default=..., description="Edge's Data, mainly the source and target of the Edge"
    )
data: CytoScapeEdgeData pydantic-field required

Edge's Data, mainly the source and target of the Edge

CytoScapeEdgeData (BaseModel) pydantic-model
Source code in classiq/interface/analyzer/cytoscape_graph.py
class CytoScapeEdgeData(pydantic.BaseModel):
    source: str = pydantic.Field(
        default=..., description="the Id of the Node that is the Source of the edge"
    )
    target: str = pydantic.Field(
        default=..., description="the Id of the Node that is the Target the edge"
    )
source: str pydantic-field required

the Id of the Node that is the Source of the edge

target: str pydantic-field required

the Id of the Node that is the Target the edge

CytoScapeGraph (BaseModel) pydantic-model
Source code in classiq/interface/analyzer/cytoscape_graph.py
class CytoScapeGraph(pydantic.BaseModel):
    nodes: List[CytoScapeNode] = pydantic.Field(
        default_factory=list,
        description="Nodes of the Graph",
    )
    edges: List[CytoScapeEdge] = pydantic.Field(
        default_factory=list,
        description="Edges of the Graph",
    )
edges: List[classiq.interface.analyzer.cytoscape_graph.CytoScapeEdge] pydantic-field

Edges of the Graph

nodes: List[classiq.interface.analyzer.cytoscape_graph.CytoScapeNode] pydantic-field

Nodes of the Graph

CytoScapeNode (BaseModel) pydantic-model
Source code in classiq/interface/analyzer/cytoscape_graph.py
class CytoScapeNode(pydantic.BaseModel):
    data: Dict[str, Any] = pydantic.Field(
        default=...,
        description="Data of the Node, such as label, and color, can be of free form",
    )
    position: Optional[CytoScapePosition] = pydantic.Field(
        default=..., description="Position of the Node to be rendered in Cytocape"
    )
data: Dict[str, Any] pydantic-field required

Data of the Node, such as label, and color, can be of free form

position: CytoScapePosition pydantic-field required

Position of the Node to be rendered in Cytocape

CytoScapePosition (BaseModel) pydantic-model
Source code in classiq/interface/analyzer/cytoscape_graph.py
class CytoScapePosition(pydantic.BaseModel):
    x: int = pydantic.Field(
        default=..., description="X coordinate in the Cytoscape View"
    )
    y: int = pydantic.Field(
        default=..., description="Y coordinate in the Cytoscape View"
    )
x: int pydantic-field required

X coordinate in the Cytoscape View

y: int pydantic-field required

Y coordinate in the Cytoscape View

HardwareConnectivityGraphResult (VersionedModel) pydantic-model
Source code in classiq/interface/analyzer/cytoscape_graph.py
class HardwareConnectivityGraphResult(VersionedModel):
    graph: Optional[CytoScapeGraph] = pydantic.Field(
        default=...,
        description="The Cytoscape graph in the desired Structure for the FE",
    )
    error: ConnectivityErrors = pydantic.Field(
        default=ConnectivityErrors.EMPTY,
        description="Any errors encountered while generating the graph",
    )
error: ConnectivityErrors pydantic-field

Any errors encountered while generating the graph

graph: CytoScapeGraph pydantic-field required

The Cytoscape graph in the desired Structure for the FE

result

Analysis (VersionedModel) pydantic-model
Source code in classiq/interface/analyzer/result.py
class Analysis(VersionedModel):
    input_properties: QuantumCircuitProperties = pydantic.Field(
        default=..., description="Input circuit properties"
    )
    native_properties: NativeQuantumCircuitProperties = pydantic.Field(
        default=..., description="Transpiled circuit properties"
    )
input_properties: QuantumCircuitProperties pydantic-field required

Input circuit properties

native_properties: NativeQuantumCircuitProperties pydantic-field required

Transpiled circuit properties

AvailableHardware (BaseModel) pydantic-model
Source code in classiq/interface/analyzer/result.py
class AvailableHardware(pydantic.BaseModel):
    ibm_quantum: Optional[Dict[PydanticNonEmptyString, bool]] = pydantic.Field(
        default=None,
        description="available IBM Quantum devices with boolean indicates if a given device has enough qubits.",
    )
    azure_quantum: Optional[Dict[PydanticNonEmptyString, bool]] = pydantic.Field(
        default=None,
        description="available Azure Quantum devices with boolean indicates if a given device has enough qubits.",
    )
    amazon_braket: Optional[Dict[PydanticNonEmptyString, bool]] = pydantic.Field(
        default=None,
        description="available Amazon Braket devices with boolean indicates if a given device has enough qubits.",
    )
amazon_braket: Dict[classiq.interface.helpers.custom_pydantic_types.ConstrainedStrValue, bool] pydantic-field

available Amazon Braket devices with boolean indicates if a given device has enough qubits.

azure_quantum: Dict[classiq.interface.helpers.custom_pydantic_types.ConstrainedStrValue, bool] pydantic-field

available Azure Quantum devices with boolean indicates if a given device has enough qubits.

ibm_quantum: Dict[classiq.interface.helpers.custom_pydantic_types.ConstrainedStrValue, bool] pydantic-field

available IBM Quantum devices with boolean indicates if a given device has enough qubits.

HardwareComparisonInformation (BaseModel) pydantic-model
Source code in classiq/interface/analyzer/result.py
class HardwareComparisonInformation(pydantic.BaseModel):
    devices: List[str] = pydantic.Field(
        default=..., description="Device which is used for the transpilation."
    )
    providers: List[str] = pydantic.Field(
        default=..., description="Provider cloud of the device."
    )
    depth: List[pydantic.NonNegativeInt] = pydantic.Field(
        default=..., description="Circuit depth."
    )
    multi_qubit_gate_count: List[pydantic.NonNegativeInt] = pydantic.Field(
        default=..., description="Number of multi qubit gates."
    )
    total_gate_count: List[pydantic.NonNegativeInt] = pydantic.Field(
        default=..., description="Number of total gates."
    )

    @pydantic.root_validator
    def validate_equal_length(cls, values: Dict[str, list]) -> Dict[str, list]:
        lengths = list(map(len, values.values()))
        if len(set(lengths)) != 1:
            raise ValueError("All lists should have the same length")
        return values
depth: List[pydantic.types.NonNegativeInt] pydantic-field required

Circuit depth.

devices: List[str] pydantic-field required

Device which is used for the transpilation.

multi_qubit_gate_count: List[pydantic.types.NonNegativeInt] pydantic-field required

Number of multi qubit gates.

providers: List[str] pydantic-field required

Provider cloud of the device.

total_gate_count: List[pydantic.types.NonNegativeInt] pydantic-field required

Number of total gates.

NativeQuantumCircuitProperties (QuantumCircuitProperties) pydantic-model
Source code in classiq/interface/analyzer/result.py
class NativeQuantumCircuitProperties(QuantumCircuitProperties):
    native_gates: Set[BasisGates] = pydantic.Field(
        default=..., description="Native gates used for decomposition"
    )
native_gates: Set[classiq.interface.analyzer.result.BasisGates] pydantic-field required

Native gates used for decomposition

QuantumCircuitProperties (BaseModel) pydantic-model
Source code in classiq/interface/analyzer/result.py
class QuantumCircuitProperties(pydantic.BaseModel):
    depth: pydantic.NonNegativeInt = pydantic.Field(
        default=..., description="Circuit depth"
    )
    auxiliary_qubits: pydantic.NonNegativeInt = pydantic.Field(
        default=..., description="Number of Auxiliary qubits"
    )
    classical_bits: pydantic.NonNegativeInt = pydantic.Field(
        default=..., description="Number of classical bits"
    )
    gates_count: pydantic.NonNegativeInt = pydantic.Field(
        default=..., description="Total number of gates in the circuit"
    )
    multi_qubit_gates_count: pydantic.NonNegativeInt = pydantic.Field(
        default=..., description="Number of multi-qubit gates in circuit"
    )
    non_entangled_subcircuits_count: pydantic.NonNegativeInt = pydantic.Field(
        default=..., description="Number of non-entangled sub-circuit "
    )
auxiliary_qubits: NonNegativeInt pydantic-field required

Number of Auxiliary qubits

classical_bits: NonNegativeInt pydantic-field required

Number of classical bits

depth: NonNegativeInt pydantic-field required

Circuit depth

gates_count: NonNegativeInt pydantic-field required

Total number of gates in the circuit

multi_qubit_gates_count: NonNegativeInt pydantic-field required

Number of multi-qubit gates in circuit

non_entangled_subcircuits_count: NonNegativeInt pydantic-field required

Number of non-entangled sub-circuit

SingleHardwareInformation (BaseModel) pydantic-model
Source code in classiq/interface/analyzer/result.py
class SingleHardwareInformation(pydantic.BaseModel):
    devices: str = pydantic.Field(
        default=..., description="Device which is used for the transpilation."
    )
    providers: str = pydantic.Field(
        default=..., description="Provider cloud of the device."
    )
    depth: pydantic.NonNegativeInt = pydantic.Field(
        default=..., description="Circuit depth."
    )
    multi_qubit_gate_count: pydantic.NonNegativeInt = pydantic.Field(
        default=..., description="Number of multi qubit gates."
    )
    total_gate_count: pydantic.NonNegativeInt = pydantic.Field(
        default=..., description="Number of total gates."
    )
depth: NonNegativeInt pydantic-field required

Circuit depth.

devices: str pydantic-field required

Device which is used for the transpilation.

multi_qubit_gate_count: NonNegativeInt pydantic-field required

Number of multi qubit gates.

providers: str pydantic-field required

Provider cloud of the device.

total_gate_count: NonNegativeInt pydantic-field required

Number of total gates.

backend special

backend_preferences

AliceBobBackendPreferences (BackendPreferences) pydantic-model
Source code in classiq/interface/backend/backend_preferences.py
class AliceBobBackendPreferences(BackendPreferences):
    backend_service_provider: ProviderTypeVendor.ALICE_BOB
    api_key: pydantic_backend.PydanticAliceBobApiKeyType = pydantic.Field(
        ..., description="AliceBob API key"
    )

    @pydantic.root_validator(pre=True)
    def _set_backend_service_provider(cls, values: Dict[str, Any]) -> Dict[str, Any]:
        return values_with_discriminator(
            values, "backend_service_provider", ProviderVendor.ALICE_AND_BOB
        )
api_key: ConstrainedStrValue pydantic-field required

AliceBob API key

AwsBackendPreferences (BackendPreferences) pydantic-model
Source code in classiq/interface/backend/backend_preferences.py
class AwsBackendPreferences(BackendPreferences):
    backend_service_provider: ProviderTypeVendor.AMAZON_BRAKET
    aws_role_arn: pydantic_backend.PydanticAwsRoleArn = pydantic.Field(
        description="ARN of the role to be assumed for execution on your Braket account."
    )
    s3_bucket_name: str = pydantic.Field(description="S3 Bucket Name")
    s3_folder: pydantic_backend.PydanticS3BucketKey = pydantic.Field(
        description="S3 Folder Path Within The S3 Bucket"
    )
    job_timeout: pydantic_backend.PydanticExecutionTimeout = pydantic.Field(
        description="Timeout for Jobs sent for execution in seconds.",
        default=AWS_DEFAULT_JOB_TIMEOUT_SECONDS,
    )

    @validator("s3_bucket_name")
    def _validate_s3_bucket_name(
        cls, s3_bucket_name: str, values: Dict[str, Any]
    ) -> str:
        s3_bucket_name = s3_bucket_name.strip()
        if not s3_bucket_name.startswith("amazon-braket-"):
            raise ValueError('S3 bucket name should start with "amazon-braket-"')
        return s3_bucket_name

    @pydantic.root_validator(pre=True)
    def _set_backend_service_provider(cls, values: Dict[str, Any]) -> Dict[str, Any]:
        return values_with_discriminator(
            values, "backend_service_provider", ProviderVendor.AMAZON_BRAKET
        )
aws_role_arn: ConstrainedStrValue pydantic-field required

ARN of the role to be assumed for execution on your Braket account.

job_timeout: ConstrainedIntValue pydantic-field

Timeout for Jobs sent for execution in seconds.

s3_bucket_name: str pydantic-field required

S3 Bucket Name

s3_folder: ConstrainedStrValue pydantic-field required

S3 Folder Path Within The S3 Bucket

AzureBackendPreferences (BackendPreferences) pydantic-model
Source code in classiq/interface/backend/backend_preferences.py
class AzureBackendPreferences(BackendPreferences):
    backend_service_provider: ProviderTypeVendor.AZURE_QUANTUM

    location: str = pydantic.Field(
        default="East US", description="Azure personal resource region"
    )

    credentials: Optional[AzureCredential] = pydantic.Field(
        default=None,
        description="The service principal credential to access personal quantum workspace",
    )

    @property
    def run_through_classiq(self) -> bool:
        return self.credentials is None

    @pydantic.root_validator(pre=True)
    def _set_backend_service_provider(cls, values: Dict[str, Any]) -> Dict[str, Any]:
        return values_with_discriminator(
            values, "backend_service_provider", ProviderVendor.AZURE_QUANTUM
        )
credentials: AzureCredential pydantic-field

The service principal credential to access personal quantum workspace

location: str pydantic-field

Azure personal resource region

AzureCredential (BaseSettings) pydantic-model
Source code in classiq/interface/backend/backend_preferences.py
class AzureCredential(pydantic.BaseSettings):
    tenant_id: str = pydantic.Field(description="Azure Tenant ID")
    client_id: str = pydantic.Field(description="Azure Client ID")
    client_secret: str = pydantic.Field(description="Azure Client Secret")
    resource_id: pydantic_backend.PydanticAzureResourceIDType = pydantic.Field(
        description="Azure Resource ID (including Azure subscription ID, resource "
        "group and workspace), for personal resource",
    )

    class Config:
        title = "Azure Service Principal Credential"
        env_prefix = "AZURE_"
        case_sensitive = False
client_id: str pydantic-field required

Azure Client ID

client_secret: str pydantic-field required

Azure Client Secret

resource_id: ConstrainedStrValue pydantic-field required

Azure Resource ID (including Azure subscription ID, resource group and workspace), for personal resource

tenant_id: str pydantic-field required

Azure Tenant ID

BackendPreferences (BaseModel) pydantic-model
Source code in classiq/interface/backend/backend_preferences.py
class BackendPreferences(BaseModel):
    # Due to the way the field is currently implemented, i.e. it redefined with different types
    # in the subclass, it shouldn't be dumped with exclude_unset. This causes this field not to appear.
    # For example: don't use obj.dict(exclude_unset=True).
    backend_service_provider: str = pydantic.Field(
        ..., description="Provider company or cloud for the requested backend."
    )
    backend_name: str = pydantic.Field(
        ..., description="Name of the requested backend or target."
    )

    @property
    def hw_provider(self) -> Provider:
        return Provider(self.backend_service_provider)

    @pydantic.validator("backend_service_provider", pre=True)
    def validate_backend_service_provider(
        cls, backend_service_provider: Any
    ) -> Provider:
        return validate_backend_service_provider(backend_service_provider)

    @classmethod
    def batch_preferences(
        cls, *, backend_names: Iterable[str], **kwargs: Any
    ) -> List[BackendPreferences]:
        return [cls(backend_name=name, **kwargs) for name in backend_names]

    def is_nvidia_backend(self) -> bool:
        return False
backend_name: str pydantic-field required

Name of the requested backend or target.

backend_service_provider: str pydantic-field required

Provider company or cloud for the requested backend.

IBMBackendPreferences (BackendPreferences) pydantic-model
Source code in classiq/interface/backend/backend_preferences.py
class IBMBackendPreferences(BackendPreferences):
    backend_service_provider: ProviderTypeVendor.IBM_QUANTUM
    access_token: Optional[str] = pydantic.Field(
        default=None,
        description="IBM Quantum access token to be used"
        " with IBM Quantum hosted backends",
    )
    provider: IBMBackendProvider = pydantic.Field(
        default_factory=IBMBackendProvider,
        description="Provider specs. for identifying a single IBM Quantum provider.",
    )

    @pydantic.root_validator(pre=True)
    def _set_backend_service_provider(cls, values: Dict[str, Any]) -> Dict[str, Any]:
        return values_with_discriminator(
            values, "backend_service_provider", ProviderVendor.IBM_QUANTUM
        )
access_token: str pydantic-field

IBM Quantum access token to be used with IBM Quantum hosted backends

provider: IBMBackendProvider pydantic-field

Provider specs. for identifying a single IBM Quantum provider.

IonqBackendPreferences (BackendPreferences) pydantic-model
Source code in classiq/interface/backend/backend_preferences.py
class IonqBackendPreferences(BackendPreferences):
    backend_service_provider: ProviderTypeVendor.IONQ
    api_key: pydantic_backend.PydanticIonQApiKeyType = pydantic.Field(
        ..., description="IonQ API key"
    )

    @pydantic.root_validator(pre=True)
    def _set_backend_service_provider(cls, values: Dict[str, Any]) -> Dict[str, Any]:
        return values_with_discriminator(
            values, "backend_service_provider", ProviderVendor.IONQ
        )
api_key: ConstrainedStrValue pydantic-field required

IonQ API key

OQCBackendPreferences (BackendPreferences) pydantic-model
Source code in classiq/interface/backend/backend_preferences.py
class OQCBackendPreferences(BackendPreferences):
    backend_service_provider: ProviderTypeVendor.OQC
    username: str = pydantic.Field(description="OQC username")
    password: str = pydantic.Field(description="OQC password")

    @pydantic.root_validator(pre=True)
    def _set_backend_service_provider(cls, values: Dict[str, Any]) -> Dict[str, Any]:
        return values_with_discriminator(
            values, "backend_service_provider", ProviderVendor.OQC
        )
password: str pydantic-field required

OQC password

username: str pydantic-field required

OQC username

chemistry special

fermionic_operator

FermionicOperator (HashablePydanticBaseModel) pydantic-model

Specification of a Fermionic operator. Input: List of ladder operators, each ladder operator is described by a tuple of its index and a character indicating if it's a creation ('+') or annihilation operator ('-').

Source code in classiq/interface/chemistry/fermionic_operator.py
class FermionicOperator(HashablePydanticBaseModel):
    """
    Specification of a Fermionic operator.
    Input:
    List of ladder operators, each ladder operator is described by a tuple of its
    index and a character indicating if it's a creation ('+') or annihilation operator ('-').
    """

    op_list: list = pydantic.Field(
        description="A list of tuples each containing an index and a character; for example [('+', 0), ('-', 1)].",
    )

    @staticmethod
    def _validate_single_op(op: tuple) -> LadderOperator:
        if not isinstance(op, tuple):
            try:  # type: ignore[unreachable] # it is reachable...
                op = tuple(op)
            except Exception as exc:
                raise ValueError("Ladder operator should be a tuple.") from exc
        if len(op) != 2:
            raise ValueError(
                "Ladder operator tuple should be of length two; for example (1, '+')."
            )

        if op[0] not in ("+", "-"):
            raise ValueError(
                "The first term in a ladder operator tuple indicates if its a raising ('+')"
                f" or lowering ('-') operator. Allowed input is: '+' or '-', received {op[0]}"
            )
        if not isinstance(op[1], int):
            raise ValueError(
                "The second term in a ladder operator tuple indicates its index and should be of type int"
            )

        return op  # type: ignore[return-value] # mypy thinks that it is `Tuple[Any, ...]`, though the asserts here tell otherwise..

    @pydantic.validator("op_list")
    def _validate_op_list(cls, op_list: list) -> list:
        return list(map(cls._validate_single_op, op_list))

    def __mul__(self, coeff: Union[float, int]) -> SummedFermionicOperator:
        if isinstance(coeff, (float, int)):
            return SummedFermionicOperator(op_list=[(self, float(coeff))])
        raise ValueError(
            "The coefficient multiplying Fermionic Operator should be of type float"
        )

    __rmul__ = __mul__

    def __add__(
        self, other: Union[SummedFermionicOperator, FermionicOperator]
    ) -> SummedFermionicOperator:
        if isinstance(other, SummedFermionicOperator):
            return SummedFermionicOperator(op_list=[(self, 1.0)] + other.op_list)
        elif isinstance(other, FermionicOperator):
            return SummedFermionicOperator(op_list=[(self, 1.0)] + [(other, 1.0)])
        raise ValueError(
            "FermionicOperator can be summed together only with type FermionicOperator or SummedFermionicOperator"
        )

    class Config:
        frozen = True

    @staticmethod
    def _to_ladder_op(char: str) -> str:
        return "a" + _SUPERSCRIPT_PLUS if char == "+" else "a"

    @staticmethod
    def _to_subscript(num: int) -> str:
        return "".join(_SUBSCRIPT_UNICODE_CHARS[digit] for digit in str(num))

    def __str__(self) -> str:
        return "".join(
            f"{self._to_ladder_op(char)}{self._to_subscript(index)}"
            for (char, index) in self.op_list
        )

    @property
    def all_indices(self) -> Set[int]:
        return {op[1] for op in self.op_list}
op_list: list pydantic-field required

A list of tuples each containing an index and a character; for example [('+', 0), ('-', 1)].

__str__(self) special

Return str(self).

Source code in classiq/interface/chemistry/fermionic_operator.py
def __str__(self) -> str:
    return "".join(
        f"{self._to_ladder_op(char)}{self._to_subscript(index)}"
        for (char, index) in self.op_list
    )
SummedFermionicOperator (HashablePydanticBaseModel) pydantic-model

Specification of a summed Fermionic operator. Input: List of fermionic operators tuples, The first term in the tuple is the FermionicOperator and the second term is its coefficient. For example: op1 = FermionicOperator(op_list=[('+', 0), ('-', 1)]) op2 = FermionicOperator(op_list=[('-', 0), ('-', 1)]) summed_operator = SummedFermionicOperator(op_list=[(op1, 0.2), (op2, 6.7)])

Source code in classiq/interface/chemistry/fermionic_operator.py
class SummedFermionicOperator(HashablePydanticBaseModel):
    """
    Specification of a summed Fermionic operator.
    Input:
    List of fermionic operators tuples, The first term in the tuple is the FermionicOperator and the second term is its coefficient.
    For example:
    op1 = FermionicOperator(op_list=[('+', 0), ('-', 1)])
    op2 = FermionicOperator(op_list=[('-', 0), ('-', 1)])
    summed_operator = SummedFermionicOperator(op_list=[(op1, 0.2), (op2, 6.7)])
    """

    op_list: list = pydantic.Field(
        description="A list of tuples each containing a FermionicOperator and a coefficient.",
    )

    class Config:
        frozen = True

    @staticmethod
    def _validate_single_op(op: tuple) -> FermionicOperatorTuple:
        # is it tuple - if not, convert to tuple
        if not isinstance(op, tuple):
            try:  # type: ignore[unreachable] # it is reachable...
                op = tuple(op)
            except Exception as exc:
                raise ValueError("Operator should be a tuple.") from exc
        if len(op) != 2:
            raise ValueError("Operator tuple should be of length two.")

        # is it FermionicOperator - if not, convert to FermionicOperator
        if type(op[0]) is not FermionicOperator:
            try:
                op = (FermionicOperator(**op[0]), op[1])
            except Exception as exc:
                raise ValueError(
                    "The first term in the operator tuple should be an instance of the FermionicOperator class"
                ) from exc

        if type(op[1]) is not float:
            raise ValueError(
                "The second term in the operator tuple indicates its coefficient and should be of type float"
            )

        return op  # type: ignore[return-value] # mypy thinks that it is `Tuple[Any, ...]`, though the asserts here tell otherwise..

    @pydantic.validator("op_list")
    def _validate_op_list(cls, op_list: list) -> list:
        return list(map(cls._validate_single_op, op_list))

    def __add__(
        self, other: Union[SummedFermionicOperator, FermionicOperator]
    ) -> SummedFermionicOperator:
        if isinstance(other, SummedFermionicOperator):
            return SummedFermionicOperator(op_list=self.op_list + other.op_list)
        elif isinstance(other, FermionicOperator):
            return SummedFermionicOperator(op_list=self.op_list + [(other, 1.0)])
        raise ValueError(
            "FermionicOperator can be summed together only with type FermionicOperator or SummedFermionicOperator"
        )

    def is_close(self, other: SummedFermionicOperator) -> bool:
        if not isinstance(other, SummedFermionicOperator):
            return False  # type: ignore[unreachable]

        if len(self.op_list) != len(other.op_list):
            return False

        for (op1, coeff1), (op2, coeff2) in zip(self.op_list, other.op_list):
            if op1 != op2 or not np.isclose(coeff1, coeff2):
                return False

        return True

    @property
    def _all_indices(self) -> Set[int]:
        return set(
            itertools.chain.from_iterable(op.all_indices for op, _ in self.op_list)
        )

    @property
    def num_qubits(self) -> int:
        return len(self._all_indices)

    def __str__(self) -> str:
        return " + \n".join(str(op[1]) + " * " + str(op[0]) for op in self.op_list)
op_list: list pydantic-field required

A list of tuples each containing a FermionicOperator and a coefficient.

__str__(self) special

Return str(self).

Source code in classiq/interface/chemistry/fermionic_operator.py
def __str__(self) -> str:
    return " + \n".join(str(op[1]) + " * " + str(op[0]) for op in self.op_list)

ground_state_problem

GroundStateProblem (HashablePydanticBaseModel) pydantic-model
Source code in classiq/interface/chemistry/ground_state_problem.py
class GroundStateProblem(HashablePydanticBaseModel):
    mapping: FermionMapping = pydantic.Field(
        default=FermionMapping.JORDAN_WIGNER,
        description="Fermionic mapping type",
        title="Fermion Mapping",
    )
    z2_symmetries: bool = pydantic.Field(
        default=False,
        description="whether to perform z2 symmetries reduction",
    )
    num_qubits: Optional[int] = pydantic.Field(default=None)

    @pydantic.validator("z2_symmetries")
    def _validate_z2_symmetries(
        cls, z2_symmetries: bool, values: Dict[str, Any]
    ) -> bool:
        if z2_symmetries and values.get("mapping") == FermionMapping.FAST_BRAVYI_KITAEV:
            raise ValueError(
                "z2 symmetries reduction can not be used for fast_bravyi_kitaev mapping"
            )
        return z2_symmetries

    class Config:
        frozen = True
mapping: FermionMapping pydantic-field

Fermionic mapping type

z2_symmetries: bool pydantic-field

whether to perform z2 symmetries reduction

HamiltonianProblem (GroundStateProblem) pydantic-model
Source code in classiq/interface/chemistry/ground_state_problem.py
class HamiltonianProblem(GroundStateProblem):
    hamiltonian: SummedFermionicOperator = pydantic.Field(
        description="Hamiltonian as a fermionic operator"
    )
    num_particles: List[pydantic.PositiveInt] = pydantic.Field(
        description="Tuple containing the numbers of alpha particles and beta particles"
    )

    @pydantic.validator("num_particles")
    def _validate_num_particles(cls, num_particles: List[int]) -> List[int]:
        assert isinstance(num_particles, list)
        assert len(num_particles) == 2

        # This probably will never happen, since pydantic automatically converts
        #   floats to ints
        assert isinstance(num_particles[0], int)
        assert num_particles[0] >= 1

        assert isinstance(num_particles[1], int)
        assert num_particles[1] >= 1

        return num_particles
hamiltonian: SummedFermionicOperator pydantic-field required

Hamiltonian as a fermionic operator

num_particles: List[pydantic.types.PositiveInt] pydantic-field required

Tuple containing the numbers of alpha particles and beta particles

MoleculeProblem (GroundStateProblem) pydantic-model
Source code in classiq/interface/chemistry/ground_state_problem.py
class MoleculeProblem(GroundStateProblem):
    molecule: Molecule
    basis: str = pydantic.Field(default="sto3g", description="Molecular basis set")
    freeze_core: bool = pydantic.Field(default=False)
    remove_orbitals: List[int] = pydantic.Field(
        default_factory=list, description="list of orbitals to remove"
    )
basis: str pydantic-field

Molecular basis set

remove_orbitals: List[int] pydantic-field

list of orbitals to remove

molecule

Atom (HashablePydanticBaseModel) pydantic-model
Source code in classiq/interface/chemistry/molecule.py
class Atom(HashablePydanticBaseModel):
    symbol: Literal[tuple(ELEMENTS)] = pydantic.Field(description="The atom symbol")  # type: ignore[valid-type]
    x: float = pydantic.Field(description="The x coordinate of the atom")
    y: float = pydantic.Field(description="The y coordinate of the atom")
    z: float = pydantic.Field(description="The z coordinate of the atom")
symbol: Literal['H', 'He', 'Li', 'Be', 'B', 'C', 'N', 'O', 'F', 'Ne', 'Na', 'Mg', 'Al', 'Si', 'P', 'S', 'Cl', 'Ar', 'K', 'Ca', 'Sc', 'Ti', 'V', 'Cr', 'Mn', 'Fe', 'Co', 'Ni', 'Cu', 'Zn', 'Ga', 'Ge', 'As', 'Se', 'Br', 'Kr', 'Rb', 'Sr', 'Y', 'Zr', 'Nb', 'Mo', 'Tc', 'Ru', 'Rh', 'Pd', 'Ag', 'Cd', 'In', 'Sn', 'Sb', 'Te', 'I', 'Xe', 'Cs', 'Ba', 'La', 'Ce', 'Pr', 'Nd', 'Pm', 'Sm', 'Eu', 'Gd', 'Tb', 'Dy', 'Ho', 'Er', 'Tm', 'Yb', 'Lu', 'Hf', 'Ta', 'W', 'Re', 'Os', 'Ir', 'Pt', 'Au', 'Hg', 'Tl', 'Pb', 'Bi', 'Po', 'At', 'Rn', 'Fr', 'Ra', 'Ac', 'Th', 'Pa', 'U', 'Np', 'Pu', 'Am', 'Cm', 'Bk', 'Cf', 'Es', 'Fm', 'Md', 'No', 'Lr', 'Rf', 'Db', 'Sg', 'Bh', 'Hs', 'Mt', 'Ds', 'Rg', 'Cn', 'Nh', 'Fl', 'Mc', 'Lv', 'Ts', 'Og'] pydantic-field required

The atom symbol

x: float pydantic-field required

The x coordinate of the atom

y: float pydantic-field required

The y coordinate of the atom

z: float pydantic-field required

The z coordinate of the atom

Molecule (HashablePydanticBaseModel) pydantic-model
Source code in classiq/interface/chemistry/molecule.py
class Molecule(HashablePydanticBaseModel):
    atoms: List[Atom] = pydantic.Field(
        description="A list of atoms each containing the atoms symbol and  its (x,y,z) location",
        min_items=1,
    )
    spin: pydantic.NonNegativeInt = pydantic.Field(
        default=1, description="spin of the molecule"
    )
    charge: pydantic.NonNegativeInt = pydantic.Field(
        default=0, description="charge of the molecule"
    )

    @property
    def atoms_type(self) -> List[AtomType]:
        return [(atom.symbol, [atom.x, atom.y, atom.z]) for atom in self.atoms]

    @pydantic.validator("atoms", each_item=True, pre=True)
    def _validate_atoms(cls, atom: Union[AtomType, Atom]) -> Atom:
        if isinstance(atom, (list, tuple)):
            return cls._validate_old_atoms_type(atom)
        return atom

    @staticmethod
    def _validate_old_atoms_type(atom: AtomType) -> Atom:
        if len(atom) != 2:
            raise ValueError(
                "each atom should be a list of two entries: 1) name pf the elemnt (str) 2) list of its (x,y,z) location"
            )
        if type(atom[0]) is not str:
            raise ValueError(
                f"atom name should be a string. unknown element: {atom[0]}."
            )
        if len(atom[1]) != 3:
            raise ValueError(
                f"location of the atom is of length three, representing the (x,y,z) coordinates of the atom, error value: {atom[1]}"
            )
        for idx in atom[1]:
            if type(idx) is not float and type(idx) is not int:
                raise ValueError(
                    f"coordinates of the atom should be of type float. error value: {idx}"
                )
        symbol, coordinate = atom

        return Atom(symbol=symbol, x=coordinate[0], y=coordinate[1], z=coordinate[2])

    class Config:
        frozen = True
atoms: ConstrainedListValue pydantic-field required

A list of atoms each containing the atoms symbol and its (x,y,z) location

charge: NonNegativeInt pydantic-field

charge of the molecule

spin: NonNegativeInt pydantic-field

spin of the molecule

operator

PauliOperator (HashablePydanticBaseModel, VersionedModel) pydantic-model

Specification of a Pauli sum operator.

Source code in classiq/interface/chemistry/operator.py
class PauliOperator(HashablePydanticBaseModel, VersionedModel):
    """
    Specification of a Pauli sum operator.
    """

    pauli_list: PydanticPauliList = pydantic.Field(
        description="A list of tuples each containing a pauli string comprised of I,X,Y,Z characters and a complex coefficient; for example [('IZ', 0.1), ('XY', 0.2)].",
    )
    is_hermitian: bool = pydantic.Field(default=False)
    has_complex_coefficients: bool = pydantic.Field(default=True)

    def show(self) -> str:
        if self.is_hermitian:
            # If the operator is hermitian then the coefficients must be numeric
            return "\n".join(
                f"{summand[1].real:+.3f} * {summand[0]}" for summand in self.pauli_list  # type: ignore[union-attr]
            )
        return "\n".join(
            f"+({summand[1]:+.3f}) * {summand[0]}" for summand in self.pauli_list
        )

    @pydantic.validator("pauli_list", each_item=True, pre=True)
    def _validate_pauli_monomials(
        cls, monomial: Tuple[PydanticPauliMonomialStr, ParameterComplexType]
    ) -> Tuple[PydanticPauliMonomialStr, ParameterComplexType]:
        _PauliMonomialLengthValidator(  # type: ignore[call-arg]
            monomial=monomial
        )  # Validate the length of the monomial.
        coeff = cls._validate_monomial_coefficient(monomial[1])
        parsed_monomial = _PauliMonomialParser(string=monomial[0], coeff=coeff)  # type: ignore[call-arg]
        return (parsed_monomial.string, parsed_monomial.coeff)

    @staticmethod
    def _validate_monomial_coefficient(
        coeff: Union[sympy.Expr, ParameterComplexType]
    ) -> ParameterComplexType:
        if isinstance(coeff, str):
            validate_expression_str(coeff)
        elif isinstance(coeff, sympy.Expr):
            coeff = str(coeff)
        return coeff

    @pydantic.validator("pauli_list")
    def _validate_pauli_list(cls, pauli_list: PydanticPauliList) -> PydanticPauliList:
        if not all_equal(len(summand[0]) for summand in pauli_list):
            raise ValueError("Pauli strings have incompatible lengths.")
        return pauli_list

    @pydantic.root_validator
    def _validate_hermitianity(cls, values: Dict[str, Any]) -> Dict[str, Any]:
        pauli_list = values.get("pauli_list", [])
        if all(isinstance(summand[1], complex) for summand in pauli_list):
            values["is_hermitian"] = all(
                np.isclose(complex(summand[1]).real, summand[1])
                for summand in pauli_list
            )
        if values.get("is_hermitian", False):
            values["has_complex_coefficients"] = False
            values["pauli_list"] = [
                (summand[0], complex(summand[1].real)) for summand in pauli_list
            ]
        else:
            values["has_complex_coefficients"] = not all(
                np.isclose(complex(summand[1]).real, summand[1])
                for summand in pauli_list
                if isinstance(summand[1], complex)
            )
        return values

    def __mul__(self, coefficient: complex) -> "PauliOperator":
        multiplied_ising = [
            (monomial[0], self._multiply_monomial_coefficient(monomial[1], coefficient))
            for monomial in self.pauli_list
        ]
        return self.__class__(pauli_list=multiplied_ising)

    @staticmethod
    def _multiply_monomial_coefficient(
        monomial_coefficient: ParameterComplexType, coefficient: complex
    ) -> ParameterComplexType:
        if isinstance(monomial_coefficient, ParameterType):
            return str(sympy.sympify(monomial_coefficient) * coefficient)
        return monomial_coefficient * coefficient

    @property
    def is_commutative(self) -> bool:
        return all(
            self._is_sub_pauli_commutative(
                [summand[0][qubit_num] for summand in self.pauli_list]
            )
            for qubit_num in range(self.num_qubits)
        )

    @staticmethod
    def _is_sub_pauli_commutative(qubit_pauli_string: Union[List[str], str]) -> bool:
        unique_paulis = set(qubit_pauli_string) - {"I"}
        return len(unique_paulis) <= 1

    @property
    def num_qubits(self) -> int:
        return len(self.pauli_list[0][0])

    def to_matrix(self) -> np.ndarray:
        if not all(isinstance(summand[1], complex) for summand in self.pauli_list):
            raise ClassiqValueError(
                "Supporting only Hamiltonian with numeric coefficients."
            )
        return sum(
            cast(complex, summand[1]) * to_pauli_matrix(summand[0])
            for summand in self.pauli_list
        )  # type: ignore[return-value]

    @staticmethod
    def _extend_pauli_string(
        pauli_string: PydanticPauliMonomialStr, num_extra_qubits: int
    ) -> PydanticPauliMonomialStr:
        return "I" * num_extra_qubits + pauli_string

    def extend(self, num_extra_qubits: int) -> "PauliOperator":
        new_pauli_list = [
            (self._extend_pauli_string(pauli_string, num_extra_qubits), coeff)
            for (pauli_string, coeff) in self.pauli_list
        ]
        return self.copy(update={"pauli_list": new_pauli_list}, deep=True)

    @staticmethod
    def _reorder_pauli_string(
        pauli_string: PydanticPauliMonomialStr,
        order: Collection[int],
        new_num_qubits: int,
    ) -> PydanticPauliMonomialStr:
        reversed_pauli_string = pauli_string[::-1]
        reversed_new_pauli_string = ["I"] * new_num_qubits

        for logical_pos, actual_pos in enumerate(order):
            reversed_new_pauli_string[actual_pos] = reversed_pauli_string[logical_pos]

        return "".join(reversed(reversed_new_pauli_string))

    @staticmethod
    def _validate_reorder(
        order: Collection[int],
        num_qubits: int,
        num_extra_qubits: int,
    ) -> None:
        if num_extra_qubits < 0:
            raise ValueError("Number of extra qubits cannot be negative")

        if len(order) != num_qubits:
            raise ValueError("The qubits order doesn't match the Pauli operator")

        if len(order) != len(set(order)):
            raise ValueError("The qubits order is not one-to-one")

        if not all(pos < num_qubits + num_extra_qubits for pos in order):
            raise ValueError("The qubits order contains qubits which do no exist")

    @classmethod
    def reorder(
        cls,
        operator: "PauliOperator",
        order: Collection[int],
        num_extra_qubits: int = 0,
    ) -> "PauliOperator":
        cls._validate_reorder(order, operator.num_qubits, num_extra_qubits)

        new_num_qubits = operator.num_qubits + num_extra_qubits
        new_pauli_list = [
            (cls._reorder_pauli_string(pauli_string, order, new_num_qubits), coeff)
            for pauli_string, coeff in operator.pauli_list
        ]
        return cls(pauli_list=new_pauli_list)

    @classmethod
    def from_unzipped_lists(
        cls,
        operators: List[List[Pauli]],
        coefficients: Optional[List[complex]] = None,
    ) -> "PauliOperator":
        if coefficients is None:
            coefficients = [1] * len(operators)

        if len(operators) != len(coefficients):
            raise ValueError(
                f"The number of coefficients ({len(coefficients)}) must be equal to the number of pauli operators ({len(operators)})"
            )

        return cls(
            pauli_list=[
                (pauli_integers_to_str(op), coeff)
                for op, coeff in zip(operators, coefficients)
            ]
        )

    class Config:
        frozen = True
pauli_list: ConstrainedListValue pydantic-field required

A list of tuples each containing a pauli string comprised of I,X,Y,Z characters and a complex coefficient; for example [('IZ', 0.1), ('XY', 0.2)].

PauliOperatorV1 (HashablePydanticBaseModel) pydantic-model

Specification of a Pauli sum operator.

Source code in classiq/interface/chemistry/operator.py
class PauliOperatorV1(HashablePydanticBaseModel):
    """
    Specification of a Pauli sum operator.
    """

    pauli_list: PydanticPauliList = pydantic.Field(
        description="A list of tuples each containing a pauli string comprised of I,X,Y,Z characters and a complex coefficient; for example [('IZ', 0.1), ('XY', 0.2)].",
    )
    is_hermitian: bool = pydantic.Field(default=False)
    has_complex_coefficients: bool = pydantic.Field(default=True)

    def show(self) -> str:
        if self.is_hermitian:
            # If the operator is hermitian then the coefficients must be numeric
            return "\n".join(
                f"{summand[1].real:+.3f} * {summand[0]}" for summand in self.pauli_list  # type: ignore[union-attr]
            )
        return "\n".join(
            f"+({summand[1]:+.3f}) * {summand[0]}" for summand in self.pauli_list
        )

    @pydantic.validator("pauli_list", each_item=True, pre=True)
    def _validate_pauli_monomials(
        cls, monomial: Tuple[PydanticPauliMonomialStr, ParameterComplexType]
    ) -> Tuple[PydanticPauliMonomialStr, ParameterComplexType]:
        _PauliMonomialLengthValidator(  # type: ignore[call-arg]
            monomial=monomial
        )  # Validate the length of the monomial.
        coeff = cls._validate_monomial_coefficient(monomial[1])
        parsed_monomial = _PauliMonomialParser(string=monomial[0], coeff=coeff)  # type: ignore[call-arg]
        return (parsed_monomial.string, parsed_monomial.coeff)

    @staticmethod
    def _validate_monomial_coefficient(
        coeff: Union[sympy.Expr, ParameterComplexType]
    ) -> ParameterComplexType:
        if isinstance(coeff, str):
            validate_expression_str(coeff)
        elif isinstance(coeff, sympy.Expr):
            coeff = str(coeff)
        return coeff

    @pydantic.validator("pauli_list")
    def _validate_pauli_list(cls, pauli_list: PydanticPauliList) -> PydanticPauliList:
        if not all_equal(len(summand[0]) for summand in pauli_list):
            raise ValueError("Pauli strings have incompatible lengths.")
        return pauli_list

    @pydantic.root_validator
    def _validate_hermitianity(cls, values: Dict[str, Any]) -> Dict[str, Any]:
        pauli_list = values.get("pauli_list", [])
        if all(isinstance(summand[1], complex) for summand in pauli_list):
            values["is_hermitian"] = all(
                np.isclose(complex(summand[1]).real, summand[1])
                for summand in pauli_list
            )
        if values.get("is_hermitian", False):
            values["has_complex_coefficients"] = False
            values["pauli_list"] = [
                (summand[0], complex(summand[1].real)) for summand in pauli_list
            ]
        else:
            values["has_complex_coefficients"] = not all(
                np.isclose(complex(summand[1]).real, summand[1])
                for summand in pauli_list
                if isinstance(summand[1], complex)
            )
        return values

    def __mul__(self, coefficient: complex) -> "PauliOperatorV1":
        multiplied_ising = [
            (monomial[0], self._multiply_monomial_coefficient(monomial[1], coefficient))
            for monomial in self.pauli_list
        ]
        return self.__class__(pauli_list=multiplied_ising)

    @staticmethod
    def _multiply_monomial_coefficient(
        monomial_coefficient: ParameterComplexType, coefficient: complex
    ) -> ParameterComplexType:
        if isinstance(monomial_coefficient, ParameterType):
            return str(sympy.sympify(monomial_coefficient) * coefficient)
        return monomial_coefficient * coefficient

    @property
    def is_commutative(self) -> bool:
        return all(
            self._is_sub_pauli_commutative(
                [summand[0][qubit_num] for summand in self.pauli_list]
            )
            for qubit_num in range(self.num_qubits)
        )

    @staticmethod
    def _is_sub_pauli_commutative(qubit_pauli_string: Union[List[str], str]) -> bool:
        unique_paulis = set(qubit_pauli_string) - {"I"}
        return len(unique_paulis) <= 1

    @property
    def num_qubits(self) -> int:
        return len(self.pauli_list[0][0])

    def to_matrix(self) -> np.ndarray:
        if not all(isinstance(summand[1], complex) for summand in self.pauli_list):
            raise ClassiqValueError(
                "Supporting only Hamiltonian with numeric coefficients."
            )
        return sum(
            cast(complex, summand[1]) * to_pauli_matrix(summand[0])
            for summand in self.pauli_list
        )  # type: ignore[return-value]

    @staticmethod
    def _extend_pauli_string(
        pauli_string: PydanticPauliMonomialStr, num_extra_qubits: int
    ) -> PydanticPauliMonomialStr:
        return "I" * num_extra_qubits + pauli_string

    def extend(self, num_extra_qubits: int) -> "PauliOperatorV1":
        new_pauli_list = [
            (self._extend_pauli_string(pauli_string, num_extra_qubits), coeff)
            for (pauli_string, coeff) in self.pauli_list
        ]
        return self.copy(update={"pauli_list": new_pauli_list}, deep=True)

    @staticmethod
    def _reorder_pauli_string(
        pauli_string: PydanticPauliMonomialStr,
        order: Collection[int],
        new_num_qubits: int,
    ) -> PydanticPauliMonomialStr:
        reversed_pauli_string = pauli_string[::-1]
        reversed_new_pauli_string = ["I"] * new_num_qubits

        for logical_pos, actual_pos in enumerate(order):
            reversed_new_pauli_string[actual_pos] = reversed_pauli_string[logical_pos]

        return "".join(reversed(reversed_new_pauli_string))

    @staticmethod
    def _validate_reorder(
        order: Collection[int],
        num_qubits: int,
        num_extra_qubits: int,
    ) -> None:
        if num_extra_qubits < 0:
            raise ValueError("Number of extra qubits cannot be negative")

        if len(order) != num_qubits:
            raise ValueError("The qubits order doesn't match the Pauli operator")

        if len(order) != len(set(order)):
            raise ValueError("The qubits order is not one-to-one")

        if not all(pos < num_qubits + num_extra_qubits for pos in order):
            raise ValueError("The qubits order contains qubits which do no exist")

    @classmethod
    def reorder(
        cls,
        operator: "PauliOperatorV1",
        order: Collection[int],
        num_extra_qubits: int = 0,
    ) -> "PauliOperatorV1":
        cls._validate_reorder(order, operator.num_qubits, num_extra_qubits)

        new_num_qubits = operator.num_qubits + num_extra_qubits
        new_pauli_list = [
            (cls._reorder_pauli_string(pauli_string, order, new_num_qubits), coeff)
            for pauli_string, coeff in operator.pauli_list
        ]
        return cls(pauli_list=new_pauli_list)

    @classmethod
    def from_unzipped_lists(
        cls,
        operators: List[List[Pauli]],
        coefficients: Optional[List[complex]] = None,
    ) -> "PauliOperatorV1":
        if coefficients is None:
            coefficients = [1] * len(operators)

        if len(operators) != len(coefficients):
            raise ValueError(
                f"The number of coefficients ({len(coefficients)}) must be equal to the number of pauli operators ({len(operators)})"
            )

        return cls(
            pauli_list=[
                (pauli_integers_to_str(op), coeff)
                for op, coeff in zip(operators, coefficients)
            ]
        )

    class Config:
        frozen = True
pauli_list: ConstrainedListValue pydantic-field required

A list of tuples each containing a pauli string comprised of I,X,Y,Z characters and a complex coefficient; for example [('IZ', 0.1), ('XY', 0.2)].

combinatorial_optimization special

mht_qaoa_input

MhtQaoaInput (BaseModel) pydantic-model
Source code in classiq/interface/combinatorial_optimization/mht_qaoa_input.py
class MhtQaoaInput(BaseModel):
    reps: pydantic.PositiveInt = pydantic.Field(
        default=3, description="Number of QAOA layers."
    )
    plot_list: List[PlotData] = pydantic.Field(
        description="The list of (x,y,t) plots of the MHT problem."
    )
    misdetection_maximum_time_steps: pydantic.NonNegativeInt = pydantic.Field(
        default=0,
        description="The maximum number of time steps a target might be misdetected.",
    )
    penalty_energy: float = pydantic.Field(
        default=2,
        description="Penalty energy for invalid solutions. The value affects "
        "the converges rate. Small positive values are preferred",
    )
    three_local_coeff: float = pydantic.Field(
        default=0,
        description="Coefficient for the 3-local terms in the Hamiltonian. It is related to the angular acceleration.",
    )
    one_local_coeff: float = pydantic.Field(
        default=0, description="Coefficient for the 1-local terms in the Hamiltonian."
    )
    is_penalty: bool = pydantic.Field(
        default=True, description="Build Pubo using penalty terms"
    )
    max_velocity: float = pydantic.Field(
        default=0, description="Max allowed velocity for a segment"
    )

    def is_valid_cost(self, cost: float) -> bool:
        return True

    @pydantic.validator("plot_list")
    def round_plot_list_times_and_validate(cls, plot_list):
        MhtQaoaInput._check_all_ids_are_distinct(plot_list)
        MhtQaoaInput._round_to_tolerance_decimals(plot_list)

        time_stamps = sorted({plot.t for plot in plot_list})
        time_diff_set = {
            np.round(time_stamps[i] - time_stamps[i - 1], decimals=_TOLERANCE_DECIMALS)
            for i in range(1, len(time_stamps))
        }

        if len(time_diff_set) != 1:
            raise ValueError("The time difference between each time stamp is not equal")

        return plot_list

    @staticmethod
    def _round_to_tolerance_decimals(plot_list: List[PlotData]) -> None:
        for plot in plot_list:
            plot.t = np.round(plot.t, decimals=_TOLERANCE_DECIMALS)

    @staticmethod
    def _check_all_ids_are_distinct(plot_list: List[PlotData]) -> None:
        if not more_itertools.all_unique(plot.plot_id for plot in plot_list):
            raise ValueError("Plot IDs should be unique.")
is_penalty: bool pydantic-field

Build Pubo using penalty terms

max_velocity: float pydantic-field

Max allowed velocity for a segment

misdetection_maximum_time_steps: NonNegativeInt pydantic-field

The maximum number of time steps a target might be misdetected.

one_local_coeff: float pydantic-field

Coefficient for the 1-local terms in the Hamiltonian.

penalty_energy: float pydantic-field

Penalty energy for invalid solutions. The value affects the converges rate. Small positive values are preferred

plot_list: List[classiq.interface.combinatorial_optimization.mht_qaoa_input.PlotData] pydantic-field required

The list of (x,y,t) plots of the MHT problem.

reps: PositiveInt pydantic-field

Number of QAOA layers.

three_local_coeff: float pydantic-field

Coefficient for the 3-local terms in the Hamiltonian. It is related to the angular acceleration.

PlotData (BaseModel) pydantic-model
Source code in classiq/interface/combinatorial_optimization/mht_qaoa_input.py
class PlotData(BaseModel):
    # We are currently ignoring units. This might need to be handled in the future
    x: float = pydantic.Field(description="The X coordinate of this plot")
    y: float = pydantic.Field(description="The Y coordinate of this plot")
    t: float = pydantic.Field(description="The time stamp of this plot")
    plot_id: pydantic.NonNegativeInt = pydantic.Field(
        description="The plot ID of this plot"
    )
plot_id: NonNegativeInt pydantic-field required

The plot ID of this plot

t: float pydantic-field required

The time stamp of this plot

x: float pydantic-field required

The X coordinate of this plot

y: float pydantic-field required

The Y coordinate of this plot

optimization_problem

MaxCutProblem (BaseModel) pydantic-model
Source code in classiq/interface/combinatorial_optimization/optimization_problem.py
class MaxCutProblem(BaseModel):
    qaoa_reps: pydantic.PositiveInt = pydantic.Field(
        default=1, description="Number of layers in qaoa ansatz."
    )
    optimizer_preferences: CombinatorialOptimizer = pydantic.Field(
        default_factory=CombinatorialOptimizer,
        description="preferences for the VQE execution",
    )
    serialized_graph: Dict[str, Any]
optimizer_preferences: CombinatorialOptimizer pydantic-field

preferences for the VQE execution

qaoa_reps: PositiveInt pydantic-field

Number of layers in qaoa ansatz.

executor special

aws_execution_cost

ExecutionCostForTimePeriod (BaseModel) pydantic-model
Source code in classiq/interface/executor/aws_execution_cost.py
class ExecutionCostForTimePeriod(pydantic.BaseModel):
    start: date = pydantic.Field(
        description="The beginning of the time period for tasks usage and cost ("
        "inclusive).",
    )
    end: date = pydantic.Field(
        description="The end of the time period for tasks usage and cost (exclusive).",
    )
    granularity: Granularity = pydantic.Field(
        description="Either MONTHLY or DAILY, or HOURLY.", default=Granularity.daily
    )
    cost_scope: CostScope = pydantic.Field(
        description="Either user or organization", default=CostScope.user
    )

    class Config:
        json_encoders = {date: lambda v: v.strftime("%Y-%m-%d")}

    @validator("end")
    def date_order(cls, v, values, **kwargs):
        if "start" in values and v <= values["start"]:
            raise ValueError('"end" date should be after "start" date')
        return v
cost_scope: CostScope pydantic-field

Either user or organization

end: date pydantic-field required

The end of the time period for tasks usage and cost (exclusive).

granularity: Granularity pydantic-field

Either MONTHLY or DAILY, or HOURLY.

start: date pydantic-field required

The beginning of the time period for tasks usage and cost (inclusive).

__json_encoder__(obj) special staticmethod

partial(func, args, *keywords) - new function with partial application of the given arguments and keywords.

estimation

OperatorsEstimation (BaseModel) pydantic-model

Estimate the expectation value of a list of Pauli operators on a quantum state given by a quantum program.

Source code in classiq/interface/executor/estimation.py
class OperatorsEstimation(pydantic.BaseModel):
    """
    Estimate the expectation value of a list of Pauli operators on a quantum state given
    by a quantum program.
    """

    quantum_program: QuantumProgram
    operators: PauliOperators

execution_preferences

AmplitudeAmplification (BaseModel) pydantic-model
Source code in classiq/interface/executor/execution_preferences.py
class AmplitudeAmplification(pydantic.BaseModel):
    iterations: List[int] = pydantic.Field(
        default_factory=list,
        description="Number or list of numbers of iteration to use",
    )
    growth_rate: float = pydantic.Field(
        default=1.25,
        description="Number of iteration used is set to round(growth_rate**iterations)",
    )
    sample_from_iterations: bool = pydantic.Field(
        default=False,
        description="If True, number of iterations used is picked randomly from "
        "[1, iteration] range",
    )
    num_of_highest_probability_states_to_check: pydantic.PositiveInt = pydantic.Field(
        default=1, description="Then number of highest probability states to check"
    )

    @pydantic.validator("iterations")
    def _validate_iterations(cls, iterations: Union[List[int], int]) -> List[int]:
        if isinstance(iterations, int):
            return [iterations]
        return iterations
growth_rate: float pydantic-field

Number of iteration used is set to round(growth_rate**iterations)

iterations: List[int] pydantic-field

Number or list of numbers of iteration to use

num_of_highest_probability_states_to_check: PositiveInt pydantic-field

Then number of highest probability states to check

sample_from_iterations: bool pydantic-field

If True, number of iterations used is picked randomly from [1, iteration] range

ExecutionPreferences (BaseModel) pydantic-model
Source code in classiq/interface/executor/execution_preferences.py
class ExecutionPreferences(pydantic.BaseModel):
    timeout_sec: Optional[pydantic.PositiveInt] = pydantic.Field(
        default=None,
        description="If set, limits the execution runtime. Value is in seconds. "
        "Not supported on all platforms.",
    )
    amplitude_amplification: AmplitudeAmplification = pydantic.Field(
        default_factory=AmplitudeAmplification,
        description="Settings related to amplitude amplification execution, used during the grover execution.",
    )
    optimizer_preferences: Optional[OptimizerPreferences] = pydantic.Field(
        default_factory=None,
        description="Settings related to VQE execution.",
    )
    error_mitigation_method: Optional[ErrorMitigationMethod] = pydantic.Field(
        default=None,
        description="Error mitigation method. Currently supports complete and tensored "
        "measurement calibration.",
    )
    noise_properties: Optional[NoiseProperties] = pydantic.Field(
        default=None, description="Properties of the noise in the circuit"
    )
    random_seed: int = pydantic.Field(
        default=None,
        description="The random seed used for the execution",
    )
    backend_preferences: BackendPreferencesTypes = backend_preferences_field(
        backend_name=ClassiqAerBackendNames.AER_SIMULATOR
    )
    num_shots: Optional[pydantic.PositiveInt] = pydantic.Field(default=None)
    transpile_to_hardware: TranspilationOption = pydantic.Field(
        default=TranspilationOption.DECOMPOSE,
        description="Transpile the circuit to the hardware basis gates before execution",
        title="Transpilation Option",
    )
    job_name: Optional[str] = pydantic.Field(
        min_length=1,
        description="The job name",
    )

    def __init__(self, **kwargs: Any) -> None:
        super().__init__(**kwargs)

    @pydantic.validator("num_shots", always=True)
    def validate_num_shots(
        cls, original_num_shots: Optional[pydantic.PositiveInt], values: Dict[str, Any]
    ) -> Optional[pydantic.PositiveInt]:
        return _choose_original_or_optimizer_attribute(
            original_num_shots, "num_shots", None, values
        )

    @pydantic.validator("backend_preferences", always=True)
    def validate_timeout_for_aws(
        cls, backend_preferences: BackendPreferencesTypes, values: Dict[str, Any]
    ) -> BackendPreferencesTypes:
        timeout = values.get("timeout_sec", None)
        if (
            not isinstance(backend_preferences, AwsBackendPreferences)
            or timeout is None
        ):
            return backend_preferences
        if (
            timeout != backend_preferences.job_timeout
            and backend_preferences.job_timeout != AWS_DEFAULT_JOB_TIMEOUT_SECONDS
        ):
            raise ValueError(DIFFERENT_TIMEOUT_MSG)
        if timeout > MAX_EXECUTION_TIMEOUT_SECONDS:
            raise ValueError(TIMEOUT_LARGE_FOR_AWS_MSG)

        backend_preferences.job_timeout = timeout
        return backend_preferences

    @pydantic.validator("random_seed", always=True)
    def validate_random_seed(
        cls, original_random_seed: Optional[int], values: Dict[str, Any]
    ) -> int:
        return _choose_original_or_optimizer_attribute(
            original_random_seed, "random_seed", create_random_seed(), values
        )
amplitude_amplification: AmplitudeAmplification pydantic-field

Settings related to amplitude amplification execution, used during the grover execution.

backend_preferences: Union[classiq.interface.backend.backend_preferences.AzureBackendPreferences, classiq.interface.backend.backend_preferences.ClassiqBackendPreferences, classiq.interface.backend.backend_preferences.IBMBackendPreferences, classiq.interface.backend.backend_preferences.AwsBackendPreferences, classiq.interface.backend.backend_preferences.IonqBackendPreferences, classiq.interface.backend.backend_preferences.GCPBackendPreferences, classiq.interface.backend.backend_preferences.AliceBobBackendPreferences, classiq.interface.backend.backend_preferences.OQCBackendPreferences] pydantic-field

Preferences for the requested backend to run the quantum circuit.

error_mitigation_method: ErrorMitigationMethod pydantic-field

Error mitigation method. Currently supports complete and tensored measurement calibration.

job_name: ConstrainedStrValue pydantic-field

The job name

noise_properties: NoiseProperties pydantic-field

Properties of the noise in the circuit

optimizer_preferences: OptimizerPreferences pydantic-field

Settings related to VQE execution.

random_seed: int pydantic-field

The random seed used for the execution

timeout_sec: PositiveInt pydantic-field

If set, limits the execution runtime. Value is in seconds. Not supported on all platforms.

transpile_to_hardware: TranspilationOption pydantic-field

Transpile the circuit to the hardware basis gates before execution

execution_request

ExecutionRequest (BaseModel) pydantic-model
Source code in classiq/interface/executor/execution_request.py
class ExecutionRequest(BaseModel):
    execution_payload: ExecutionPayloads
    preferences: ExecutionPreferences = pydantic.Field(
        default_factory=ExecutionPreferences,
        description="preferences for the execution",
    )

    @pydantic.validator("preferences")
    def validate_ionq_backend(
        cls, preferences: ExecutionPreferences, values: Dict[str, Any]
    ) -> ExecutionPreferences:
        """
        This function implement the following check:
        BE \\ payload | IonQ program | Qasm program | Other
        --------------|--------------|--------------|------
        IonQ backend  |       V      |      V       |   X
        Other backend |       X      |      V       |   V
        Since:
        - We can't execute non-programs on the IonQ backends
        - We can't execute IonQ programs on non-IonQ backends
        """
        quantum_program = values.get("execution_payload")
        is_ionq_backend = isinstance(
            preferences.backend_preferences, IonqBackendPreferences
        )
        if isinstance(quantum_program, QuantumProgram):
            if (
                quantum_program.syntax == QuantumInstructionSet.IONQ
                and not is_ionq_backend
            ):
                raise ValueError("Can only execute IonQ code on IonQ backend.")
        else:
            # If we handle anything other than a program.
            if is_ionq_backend:
                raise ValueError(
                    "IonQ backend supports only execution of QuantumPrograms"
                )
        return preferences
preferences: ExecutionPreferences pydantic-field

preferences for the execution

validate_ionq_backend(preferences, values) classmethod

This function implement the following check: BE \ payload | IonQ program | Qasm program | Other --------------|--------------|--------------|------ IonQ backend | V | V | X Other backend | X | V | V Since: - We can't execute non-programs on the IonQ backends - We can't execute IonQ programs on non-IonQ backends

Source code in classiq/interface/executor/execution_request.py
@pydantic.validator("preferences")
def validate_ionq_backend(
    cls, preferences: ExecutionPreferences, values: Dict[str, Any]
) -> ExecutionPreferences:
    """
    This function implement the following check:
    BE \\ payload | IonQ program | Qasm program | Other
    --------------|--------------|--------------|------
    IonQ backend  |       V      |      V       |   X
    Other backend |       X      |      V       |   V
    Since:
    - We can't execute non-programs on the IonQ backends
    - We can't execute IonQ programs on non-IonQ backends
    """
    quantum_program = values.get("execution_payload")
    is_ionq_backend = isinstance(
        preferences.backend_preferences, IonqBackendPreferences
    )
    if isinstance(quantum_program, QuantumProgram):
        if (
            quantum_program.syntax == QuantumInstructionSet.IONQ
            and not is_ionq_backend
        ):
            raise ValueError("Can only execute IonQ code on IonQ backend.")
    else:
        # If we handle anything other than a program.
        if is_ionq_backend:
            raise ValueError(
                "IonQ backend supports only execution of QuantumPrograms"
            )
    return preferences

optimizer_preferences

CombinatorialOptimizer (OptimizerPreferences) pydantic-model
Source code in classiq/interface/executor/optimizer_preferences.py
class CombinatorialOptimizer(OptimizerPreferences):
    cost_type: CostType = pydantic.Field(
        default=CostType.CVAR,
        description="Summarizing method of the measured bit strings",
    )
    alpha_cvar: PydanticAlphaParamCVAR = pydantic.Field(
        default=None, description="Parameter for the CVAR summarizing method"
    )
    is_maximization: bool = pydantic.Field(
        default=False,
        description="Whether the optimization goal is to maximize",
    )
    should_check_valid_solutions: bool = pydantic.Field(
        default=False,
        description="Whether to check if all the solutions satisfy the constraints",
    )

    @pydantic.validator("alpha_cvar", pre=True, always=True)
    def check_alpha_cvar(cls, alpha_cvar, values):
        cost_type = values.get("cost_type")
        if alpha_cvar is not None and cost_type != CostType.CVAR:
            raise ValueError("Use CVAR params only for CostType.CVAR.")

        if alpha_cvar is None and cost_type == CostType.CVAR:
            alpha_cvar = 0.2

        return alpha_cvar
alpha_cvar: PydanticAlphaParamCVAR pydantic-field

Parameter for the CVAR summarizing method

cost_type: CostType pydantic-field

Summarizing method of the measured bit strings

is_maximization: bool pydantic-field

Whether the optimization goal is to maximize

should_check_valid_solutions: bool pydantic-field

Whether to check if all the solutions satisfy the constraints

OptimizerPreferences (BaseModel) pydantic-model
Source code in classiq/interface/executor/optimizer_preferences.py
class OptimizerPreferences(BaseModel):
    name: OptimizerType = pydantic.Field(
        default=OptimizerType.COBYLA, description="Classical optimization algorithm."
    )
    num_shots: Optional[pydantic.PositiveInt] = pydantic.Field(
        default=None,
        description="Number of repetitions of the quantum ansatz.",
    )
    max_iteration: pydantic.PositiveInt = pydantic.Field(
        default=100, description="Maximal number of optimizer iterations"
    )
    tolerance: pydantic.PositiveFloat = pydantic.Field(
        default=None, description="Final accuracy in the optimization"
    )
    step_size: pydantic.PositiveFloat = pydantic.Field(
        default=None,
        description="step size for numerically " "calculating the gradient",
    )
    random_seed: Optional[int] = pydantic.Field(
        default=None,
        description="The random seed used for the generation",
    )
    initial_point: Optional[List[float]] = pydantic.Field(
        default=None,
        description="Initial values for the ansatz parameters",
    )
    skip_compute_variance: bool = pydantic.Field(
        default=False,
        description="If True, the optimizer will not compute the variance of the ansatz.",
    )

    @pydantic.validator("tolerance", pre=True, always=True)
    def check_tolerance(cls, tolerance, values):
        optimizer_type = values.get("type")
        if tolerance is not None and optimizer_type == OptimizerType.SPSA:
            raise ValueError("No tolerance param for SPSA optimizer")

        if tolerance is None and optimizer_type != OptimizerType.SPSA:
            tolerance = 0.001

        return tolerance

    @pydantic.validator("step_size", pre=True, always=True)
    def check_step_size(cls, step_size, values):
        optimizer_type = values.get("name")
        if step_size is not None and optimizer_type not in (
            OptimizerType.L_BFGS_B,
            OptimizerType.ADAM,
        ):
            raise ValueError("Use step_size only for L_BFGS_B or ADAM optimizers.")

        if step_size is None and optimizer_type in (
            OptimizerType.L_BFGS_B,
            OptimizerType.ADAM,
        ):
            step_size = 0.05

        return step_size
initial_point: List[float] pydantic-field

Initial values for the ansatz parameters

max_iteration: PositiveInt pydantic-field

Maximal number of optimizer iterations

name: OptimizerType pydantic-field

Classical optimization algorithm.

num_shots: PositiveInt pydantic-field

Number of repetitions of the quantum ansatz.

random_seed: int pydantic-field

The random seed used for the generation

skip_compute_variance: bool pydantic-field

If True, the optimizer will not compute the variance of the ansatz.

step_size: PositiveFloat pydantic-field

step size for numerically calculating the gradient

tolerance: PositiveFloat pydantic-field

Final accuracy in the optimization

quantum_program

QuantumBaseProgram (BaseModel) pydantic-model
Source code in classiq/interface/executor/quantum_program.py
class QuantumBaseProgram(BaseModel):
    syntax: QuantumInstructionSet = pydantic.Field(
        default=QuantumInstructionSet.QASM, description="The syntax of the program."
    )
    code: CodeType = pydantic.Field(
        ..., description="The textual representation of the program"
    )

    @pydantic.validator("code")
    def load_quantum_program(cls, code: CodeType, values: Dict[str, Any]) -> CodeType:
        if not isinstance(code, str):
            return code

        syntax = values.get("syntax")
        if syntax == QuantumInstructionSet.IONQ:
            return ionq_quantum_program.IonqQuantumCircuit.from_string(code)
        return code
code: Union[str, classiq.interface.backend.ionq.ionq_quantum_program.IonqQuantumCircuit] pydantic-field required

The textual representation of the program

syntax: QuantumInstructionSet pydantic-field

The syntax of the program.