Python SDK
Classiq SDK.
analyzer
special
¶
analyzer
¶
Analyzer module, implementing facilities for analyzing circuits using Classiq platform.
Analyzer (AnalyzerUtilities)
¶
Analyzer is the wrapper object for all analysis capabilities.
Source code in classiq/analyzer/analyzer.py
class Analyzer(AnalyzerUtilities, metaclass=Asyncify):
"""Analyzer is the wrapper object for all analysis capabilities."""
def __init__(self, circuit: generator_result.GeneratedCircuit) -> None:
"""Init self.
Args:
circuit (): The circuit to be analyzed.
"""
if circuit.qasm is None:
raise ClassiqAnalyzerError(
"Analysis requires a circuit with valid QASM code"
)
self._params: analysis_params.AnalysisParams = analysis_params.AnalysisParams(
qasm=circuit.qasm
)
self.circuit: generator_result.GeneratedCircuit = circuit
self.hardware_comparison_table: Optional[go.Figure] = None
self.available_devices: ProviderAvailableDevices = dict()
self.hardware_graphs: HardwareGraphs = dict()
async def analyzer_app_async(self) -> None:
"""Opens the analyzer app with synthesis interactive results.
Returns:
None.
"""
result = await ApiWrapper.call_analyzer_app(self.circuit)
webbrowser.open_new_tab(
urljoin(
client_ide_base_url(),
circuit_page_uri(
circuit_id=result.id, circuit_version=self.circuit.version
),
)
)
async def get_available_devices_async(
self, providers: Optional[List[ProviderNameEnum]] = None
) -> Dict[ProviderNameEnum, List[DeviceName]]:
"""Returns dict of the available devices by the providers. only devices
with sufficient number of qubits are returns
Args: providers (): List of providers (string or `AnalyzerProviderVendor`).
if None, the table include all the available hardware.
Returns:
available devices (): dict of the available devices (Dict[str,List[str]]).
"""
if providers is None:
providers = list(AnalyzerProviderVendor)
await self.request_available_devices_async(providers=providers)
return {
provider: self._filter_devices_by_qubits_count(provider)
for provider in providers
}
async def plot_hardware_connectivity_async(
self,
provider: Optional[ProviderNameEnum] = None,
device: Optional[DeviceName] = None,
) -> VBox:
"""plot the hardware_connectivity graph. It is required to required install the
analyzer_sdk extra.
Args:
provider (): provider name (optional - string or `AnalyzerProviderVendor`).
device (): device name (optional - string).
Returns:
hardware_connectivity_graph (): interactive graph.
"""
self._validate_analyzer_extra()
interactive_hardware = InteractiveHardware(
circuit=self.circuit,
params=self._params,
available_devices=self.available_devices,
hardware_graphs=self.hardware_graphs,
)
await interactive_hardware.enable_interactivity_async()
if provider is not None:
interactive_hardware.providers_combobox.value = provider
if device is not None:
interactive_hardware.devices_combobox.value = device
return interactive_hardware.show_interactive_graph()
async def get_hardware_comparison_table_async(
self,
providers: Optional[List[Union[str, AnalyzerProviderVendor]]] = None,
devices: Optional[List[str]] = None,
) -> None:
"""create a comparison table between the transpiled circuits result on different hardware.
The comparison table included the depth, multi qubit gates count,and total gates count of the circuits.
Args: providers (): List of providers (string or `AnalyzerProviderVendor`). if None, the table include all
the available hardware.
devices (): List of devices (string). if None, the table include all the available devices of the selected
providers.
Returns: None.
"""
if providers is None:
providers = list(AnalyzerProviderVendor)
params = analysis_params.AnalysisHardwareListParams(
qasm=self._params.qasm, providers=providers, devices=devices
)
result = await ApiWrapper.call_table_graphs_task(params=params)
self.hardware_comparison_table = go.Figure(json.loads(result.details))
async def plot_hardware_comparison_table_async(
self,
providers: Optional[List[Union[str, AnalyzerProviderVendor]]] = None,
devices: Optional[List[str]] = None,
) -> None:
"""plot the comparison table. if it has not been created it, it first creates the table using all the
available hardware.
Returns:
None.
"""
await self._hardware_comparison_condition_async(
providers=providers, devices=devices
)
self.hardware_comparison_table.show() # type: ignore[union-attr]
async def hardware_aware_resynthesize_async(
self, device: str, provider: Union[str, AnalyzerProviderVendor]
) -> generator_result.GeneratedCircuit:
"""resynthesize the analyzed circuit using its original model, and a new backend preferences.
Args:
provider (): Provider company or cloud for the requested backend (string or `AnalyzerProviderVendor`).
device (): Name of the requested backend"
Returns:
circuit (): resynthesize circuit (`GeneratedCircuit`).
"""
update_preferences = self._validated_update_preferences(
device=device, provider=provider
)
model = Model()
model._model = self.circuit.model.copy(deep=True)
model._model.preferences = update_preferences
return GeneratedCircuit.parse_raw(
await synthesize_async(model._model.get_model())
)
async def optimized_hardware_resynthesize_async(
self,
comparison_property: Union[str, ComparisonProperties],
providers: Optional[List[Union[str, AnalyzerProviderVendor]]] = None,
devices: Optional[List[str]] = None,
) -> generator_result.GeneratedCircuit:
"""Re-synthesize the analyzed circuit using its original model, and a new backend preferences, which is the
devices with the best fit to the selected comparison property.
Args: comparison_property (): A comparison properties using to compare between the devices (string or
`ComparisonProperties`).
providers (): List of providers (string or `AnalyzerProviderVendor`). If None, the comparison include all the
available hardware.
devices (): List of devices (string). If None, the comparison include all the available devices of the selected
providers.
Returns: circuit (): resynthesize circuit (`GeneratedCircuit`).
"""
optimized_device, optimized_provider = await self._get_optimized_hardware_async(
providers=providers,
devices=devices,
comparison_property=comparison_property,
)
return await self.hardware_aware_resynthesize_async(
provider=optimized_provider, device=optimized_device
)
async def _get_optimized_hardware_async(
self,
comparison_property: Union[str, ComparisonProperties],
providers: Optional[List[Union[str, AnalyzerProviderVendor]]] = None,
devices: Optional[List[str]] = None,
) -> Tuple[str, str]:
await self._hardware_comparison_condition_async(
providers=providers, devices=devices
)
optimized_device, optimized_provider = self._choose_optimized_hardware(
comparison_property=comparison_property
)
return optimized_device, optimized_provider
def _choose_optimized_hardware(
self, comparison_property: Union[str, ComparisonProperties]
) -> Tuple[str, str]:
comparison_params = AnalysisComparisonParams(property=comparison_property)
if not isinstance(self.hardware_comparison_table, go.Figure):
raise ClassiqAnalyzerError(
"The analyzer does not contains a valid hardware comparison table"
)
column_names = self.hardware_comparison_table.data[0].header.values
param = self._get_right_form_of_comparison_params(
comparison_params=comparison_params
)
property_index = column_names.index(param)
sort_button = self.hardware_comparison_table.layout.updatemenus[0]
sort_data = sort_button.buttons[property_index].args[0]["cells"]["values"]
return sort_data[0][0], sort_data[1][0]
@staticmethod
def _get_right_form_of_comparison_params(
comparison_params: AnalysisComparisonParams,
) -> str:
return comparison_params.property.upper().replace("_", " ")
def _validated_update_preferences(
self, device: str, provider: Union[str, AnalyzerProviderVendor]
) -> Preferences:
if not isinstance(self.circuit.model, APIModel):
raise ClassiqAnalyzerError("The circuit does not contains a valid model")
preferences_dict = self.circuit.model.preferences.dict()
preferences_dict.update(
dict(backend_service_provider=provider, backend_name=device)
)
return Preferences.parse_obj(preferences_dict)
async def _hardware_comparison_condition_async(
self,
providers: Optional[List[Union[str, AnalyzerProviderVendor]]] = None,
devices: Optional[List[str]] = None,
) -> None:
if (
providers is not None
or devices is not None
or self.hardware_comparison_table is None
):
await self.get_hardware_comparison_table_async(
providers=providers, devices=devices
)
@staticmethod
def _open_route(path: str) -> None:
backend_uri = client.client().get_backend_uri()
webbrowser.open_new_tab(f"{backend_uri}{path}")
@staticmethod
def _validate_analyzer_extra() -> None:
if find_ipywidgets is None:
raise ClassiqAnalyzerError(
"To use this method, please install the `analyzer sdk`. Run the \
following line: - pip install classiq[analyzer_sdk]"
)
__init__(self, circuit)
special
¶
Init self.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
circuit |
The circuit to be analyzed. |
required |
Source code in classiq/analyzer/analyzer.py
def __init__(self, circuit: generator_result.GeneratedCircuit) -> None:
"""Init self.
Args:
circuit (): The circuit to be analyzed.
"""
if circuit.qasm is None:
raise ClassiqAnalyzerError(
"Analysis requires a circuit with valid QASM code"
)
self._params: analysis_params.AnalysisParams = analysis_params.AnalysisParams(
qasm=circuit.qasm
)
self.circuit: generator_result.GeneratedCircuit = circuit
self.hardware_comparison_table: Optional[go.Figure] = None
self.available_devices: ProviderAvailableDevices = dict()
self.hardware_graphs: HardwareGraphs = dict()
analyzer_app(self)
async
¶
Opens the analyzer app with synthesis interactive results.
Returns:
Type | Description |
---|---|
None |
None. |
Source code in classiq/analyzer/analyzer.py
async def analyzer_app_async(self) -> None:
"""Opens the analyzer app with synthesis interactive results.
Returns:
None.
"""
result = await ApiWrapper.call_analyzer_app(self.circuit)
webbrowser.open_new_tab(
urljoin(
client_ide_base_url(),
circuit_page_uri(
circuit_id=result.id, circuit_version=self.circuit.version
),
)
)
analyzer_app_async(self)
async
¶
Opens the analyzer app with synthesis interactive results.
Returns:
Type | Description |
---|---|
None |
None. |
Source code in classiq/analyzer/analyzer.py
async def analyzer_app_async(self) -> None:
"""Opens the analyzer app with synthesis interactive results.
Returns:
None.
"""
result = await ApiWrapper.call_analyzer_app(self.circuit)
webbrowser.open_new_tab(
urljoin(
client_ide_base_url(),
circuit_page_uri(
circuit_id=result.id, circuit_version=self.circuit.version
),
)
)
get_available_devices(self, providers=None)
async
¶
Returns dict of the available devices by the providers. only devices with sufficient number of qubits are returns
Args: providers (): List of providers (string or AnalyzerProviderVendor
).
if None, the table include all the available hardware.
Returns:
Type | Description |
---|---|
available devices () |
dict of the available devices (Dict[str,List[str]]). |
Source code in classiq/analyzer/analyzer.py
async def get_available_devices_async(
self, providers: Optional[List[ProviderNameEnum]] = None
) -> Dict[ProviderNameEnum, List[DeviceName]]:
"""Returns dict of the available devices by the providers. only devices
with sufficient number of qubits are returns
Args: providers (): List of providers (string or `AnalyzerProviderVendor`).
if None, the table include all the available hardware.
Returns:
available devices (): dict of the available devices (Dict[str,List[str]]).
"""
if providers is None:
providers = list(AnalyzerProviderVendor)
await self.request_available_devices_async(providers=providers)
return {
provider: self._filter_devices_by_qubits_count(provider)
for provider in providers
}
get_available_devices_async(self, providers=None)
async
¶
Returns dict of the available devices by the providers. only devices with sufficient number of qubits are returns
Args: providers (): List of providers (string or AnalyzerProviderVendor
).
if None, the table include all the available hardware.
Returns:
Type | Description |
---|---|
available devices () |
dict of the available devices (Dict[str,List[str]]). |
Source code in classiq/analyzer/analyzer.py
async def get_available_devices_async(
self, providers: Optional[List[ProviderNameEnum]] = None
) -> Dict[ProviderNameEnum, List[DeviceName]]:
"""Returns dict of the available devices by the providers. only devices
with sufficient number of qubits are returns
Args: providers (): List of providers (string or `AnalyzerProviderVendor`).
if None, the table include all the available hardware.
Returns:
available devices (): dict of the available devices (Dict[str,List[str]]).
"""
if providers is None:
providers = list(AnalyzerProviderVendor)
await self.request_available_devices_async(providers=providers)
return {
provider: self._filter_devices_by_qubits_count(provider)
for provider in providers
}
get_hardware_comparison_table(self, providers=None, devices=None)
async
¶
create a comparison table between the transpiled circuits result on different hardware. The comparison table included the depth, multi qubit gates count,and total gates count of the circuits.
Args: providers (): List of providers (string or AnalyzerProviderVendor
). if None, the table include all
the available hardware.
devices (): List of devices (string). if None, the table include all the available devices of the selected
providers.
Returns: None.
Source code in classiq/analyzer/analyzer.py
async def get_hardware_comparison_table_async(
self,
providers: Optional[List[Union[str, AnalyzerProviderVendor]]] = None,
devices: Optional[List[str]] = None,
) -> None:
"""create a comparison table between the transpiled circuits result on different hardware.
The comparison table included the depth, multi qubit gates count,and total gates count of the circuits.
Args: providers (): List of providers (string or `AnalyzerProviderVendor`). if None, the table include all
the available hardware.
devices (): List of devices (string). if None, the table include all the available devices of the selected
providers.
Returns: None.
"""
if providers is None:
providers = list(AnalyzerProviderVendor)
params = analysis_params.AnalysisHardwareListParams(
qasm=self._params.qasm, providers=providers, devices=devices
)
result = await ApiWrapper.call_table_graphs_task(params=params)
self.hardware_comparison_table = go.Figure(json.loads(result.details))
get_hardware_comparison_table_async(self, providers=None, devices=None)
async
¶
create a comparison table between the transpiled circuits result on different hardware. The comparison table included the depth, multi qubit gates count,and total gates count of the circuits.
Args: providers (): List of providers (string or AnalyzerProviderVendor
). if None, the table include all
the available hardware.
devices (): List of devices (string). if None, the table include all the available devices of the selected
providers.
Returns: None.
Source code in classiq/analyzer/analyzer.py
async def get_hardware_comparison_table_async(
self,
providers: Optional[List[Union[str, AnalyzerProviderVendor]]] = None,
devices: Optional[List[str]] = None,
) -> None:
"""create a comparison table between the transpiled circuits result on different hardware.
The comparison table included the depth, multi qubit gates count,and total gates count of the circuits.
Args: providers (): List of providers (string or `AnalyzerProviderVendor`). if None, the table include all
the available hardware.
devices (): List of devices (string). if None, the table include all the available devices of the selected
providers.
Returns: None.
"""
if providers is None:
providers = list(AnalyzerProviderVendor)
params = analysis_params.AnalysisHardwareListParams(
qasm=self._params.qasm, providers=providers, devices=devices
)
result = await ApiWrapper.call_table_graphs_task(params=params)
self.hardware_comparison_table = go.Figure(json.loads(result.details))
hardware_aware_resynthesize(self, device, provider)
async
¶
resynthesize the analyzed circuit using its original model, and a new backend preferences.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
provider |
Provider company or cloud for the requested backend (string or |
required | |
device |
Name of the requested backend" |
required |
Returns:
Type | Description |
---|---|
circuit () |
resynthesize circuit ( |
Source code in classiq/analyzer/analyzer.py
async def hardware_aware_resynthesize_async(
self, device: str, provider: Union[str, AnalyzerProviderVendor]
) -> generator_result.GeneratedCircuit:
"""resynthesize the analyzed circuit using its original model, and a new backend preferences.
Args:
provider (): Provider company or cloud for the requested backend (string or `AnalyzerProviderVendor`).
device (): Name of the requested backend"
Returns:
circuit (): resynthesize circuit (`GeneratedCircuit`).
"""
update_preferences = self._validated_update_preferences(
device=device, provider=provider
)
model = Model()
model._model = self.circuit.model.copy(deep=True)
model._model.preferences = update_preferences
return GeneratedCircuit.parse_raw(
await synthesize_async(model._model.get_model())
)
hardware_aware_resynthesize_async(self, device, provider)
async
¶
resynthesize the analyzed circuit using its original model, and a new backend preferences.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
provider |
Provider company or cloud for the requested backend (string or |
required | |
device |
Name of the requested backend" |
required |
Returns:
Type | Description |
---|---|
circuit () |
resynthesize circuit ( |
Source code in classiq/analyzer/analyzer.py
async def hardware_aware_resynthesize_async(
self, device: str, provider: Union[str, AnalyzerProviderVendor]
) -> generator_result.GeneratedCircuit:
"""resynthesize the analyzed circuit using its original model, and a new backend preferences.
Args:
provider (): Provider company or cloud for the requested backend (string or `AnalyzerProviderVendor`).
device (): Name of the requested backend"
Returns:
circuit (): resynthesize circuit (`GeneratedCircuit`).
"""
update_preferences = self._validated_update_preferences(
device=device, provider=provider
)
model = Model()
model._model = self.circuit.model.copy(deep=True)
model._model.preferences = update_preferences
return GeneratedCircuit.parse_raw(
await synthesize_async(model._model.get_model())
)
optimized_hardware_resynthesize(self, comparison_property, providers=None, devices=None)
async
¶
Re-synthesize the analyzed circuit using its original model, and a new backend preferences, which is the devices with the best fit to the selected comparison property.
Args: comparison_property (): A comparison properties using to compare between the devices (string or
ComparisonProperties
).
providers (): List of providers (string or AnalyzerProviderVendor
). If None, the comparison include all the
available hardware.
devices (): List of devices (string). If None, the comparison include all the available devices of the selected
providers.
Returns: circuit (): resynthesize circuit (GeneratedCircuit
).
Source code in classiq/analyzer/analyzer.py
async def optimized_hardware_resynthesize_async(
self,
comparison_property: Union[str, ComparisonProperties],
providers: Optional[List[Union[str, AnalyzerProviderVendor]]] = None,
devices: Optional[List[str]] = None,
) -> generator_result.GeneratedCircuit:
"""Re-synthesize the analyzed circuit using its original model, and a new backend preferences, which is the
devices with the best fit to the selected comparison property.
Args: comparison_property (): A comparison properties using to compare between the devices (string or
`ComparisonProperties`).
providers (): List of providers (string or `AnalyzerProviderVendor`). If None, the comparison include all the
available hardware.
devices (): List of devices (string). If None, the comparison include all the available devices of the selected
providers.
Returns: circuit (): resynthesize circuit (`GeneratedCircuit`).
"""
optimized_device, optimized_provider = await self._get_optimized_hardware_async(
providers=providers,
devices=devices,
comparison_property=comparison_property,
)
return await self.hardware_aware_resynthesize_async(
provider=optimized_provider, device=optimized_device
)
optimized_hardware_resynthesize_async(self, comparison_property, providers=None, devices=None)
async
¶
Re-synthesize the analyzed circuit using its original model, and a new backend preferences, which is the devices with the best fit to the selected comparison property.
Args: comparison_property (): A comparison properties using to compare between the devices (string or
ComparisonProperties
).
providers (): List of providers (string or AnalyzerProviderVendor
). If None, the comparison include all the
available hardware.
devices (): List of devices (string). If None, the comparison include all the available devices of the selected
providers.
Returns: circuit (): resynthesize circuit (GeneratedCircuit
).
Source code in classiq/analyzer/analyzer.py
async def optimized_hardware_resynthesize_async(
self,
comparison_property: Union[str, ComparisonProperties],
providers: Optional[List[Union[str, AnalyzerProviderVendor]]] = None,
devices: Optional[List[str]] = None,
) -> generator_result.GeneratedCircuit:
"""Re-synthesize the analyzed circuit using its original model, and a new backend preferences, which is the
devices with the best fit to the selected comparison property.
Args: comparison_property (): A comparison properties using to compare between the devices (string or
`ComparisonProperties`).
providers (): List of providers (string or `AnalyzerProviderVendor`). If None, the comparison include all the
available hardware.
devices (): List of devices (string). If None, the comparison include all the available devices of the selected
providers.
Returns: circuit (): resynthesize circuit (`GeneratedCircuit`).
"""
optimized_device, optimized_provider = await self._get_optimized_hardware_async(
providers=providers,
devices=devices,
comparison_property=comparison_property,
)
return await self.hardware_aware_resynthesize_async(
provider=optimized_provider, device=optimized_device
)
plot_hardware_comparison_table(self, providers=None, devices=None)
async
¶
plot the comparison table. if it has not been created it, it first creates the table using all the available hardware.
Returns:
Type | Description |
---|---|
None |
None. |
Source code in classiq/analyzer/analyzer.py
async def plot_hardware_comparison_table_async(
self,
providers: Optional[List[Union[str, AnalyzerProviderVendor]]] = None,
devices: Optional[List[str]] = None,
) -> None:
"""plot the comparison table. if it has not been created it, it first creates the table using all the
available hardware.
Returns:
None.
"""
await self._hardware_comparison_condition_async(
providers=providers, devices=devices
)
self.hardware_comparison_table.show() # type: ignore[union-attr]
plot_hardware_comparison_table_async(self, providers=None, devices=None)
async
¶
plot the comparison table. if it has not been created it, it first creates the table using all the available hardware.
Returns:
Type | Description |
---|---|
None |
None. |
Source code in classiq/analyzer/analyzer.py
async def plot_hardware_comparison_table_async(
self,
providers: Optional[List[Union[str, AnalyzerProviderVendor]]] = None,
devices: Optional[List[str]] = None,
) -> None:
"""plot the comparison table. if it has not been created it, it first creates the table using all the
available hardware.
Returns:
None.
"""
await self._hardware_comparison_condition_async(
providers=providers, devices=devices
)
self.hardware_comparison_table.show() # type: ignore[union-attr]
plot_hardware_connectivity(self, provider=None, device=None)
async
¶
plot the hardware_connectivity graph. It is required to required install the analyzer_sdk extra.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
provider |
provider name (optional - string or |
None |
|
device |
device name (optional - string). |
None |
Returns:
Type | Description |
---|---|
hardware_connectivity_graph () |
interactive graph. |
Source code in classiq/analyzer/analyzer.py
async def plot_hardware_connectivity_async(
self,
provider: Optional[ProviderNameEnum] = None,
device: Optional[DeviceName] = None,
) -> VBox:
"""plot the hardware_connectivity graph. It is required to required install the
analyzer_sdk extra.
Args:
provider (): provider name (optional - string or `AnalyzerProviderVendor`).
device (): device name (optional - string).
Returns:
hardware_connectivity_graph (): interactive graph.
"""
self._validate_analyzer_extra()
interactive_hardware = InteractiveHardware(
circuit=self.circuit,
params=self._params,
available_devices=self.available_devices,
hardware_graphs=self.hardware_graphs,
)
await interactive_hardware.enable_interactivity_async()
if provider is not None:
interactive_hardware.providers_combobox.value = provider
if device is not None:
interactive_hardware.devices_combobox.value = device
return interactive_hardware.show_interactive_graph()
plot_hardware_connectivity_async(self, provider=None, device=None)
async
¶
plot the hardware_connectivity graph. It is required to required install the analyzer_sdk extra.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
provider |
provider name (optional - string or |
None |
|
device |
device name (optional - string). |
None |
Returns:
Type | Description |
---|---|
hardware_connectivity_graph () |
interactive graph. |
Source code in classiq/analyzer/analyzer.py
async def plot_hardware_connectivity_async(
self,
provider: Optional[ProviderNameEnum] = None,
device: Optional[DeviceName] = None,
) -> VBox:
"""plot the hardware_connectivity graph. It is required to required install the
analyzer_sdk extra.
Args:
provider (): provider name (optional - string or `AnalyzerProviderVendor`).
device (): device name (optional - string).
Returns:
hardware_connectivity_graph (): interactive graph.
"""
self._validate_analyzer_extra()
interactive_hardware = InteractiveHardware(
circuit=self.circuit,
params=self._params,
available_devices=self.available_devices,
hardware_graphs=self.hardware_graphs,
)
await interactive_hardware.enable_interactivity_async()
if provider is not None:
interactive_hardware.providers_combobox.value = provider
if device is not None:
interactive_hardware.devices_combobox.value = device
return interactive_hardware.show_interactive_graph()
rb
¶
RBAnalysis
¶
Source code in classiq/analyzer/rb.py
class RBAnalysis(metaclass=Asyncify):
def __init__(self, experiments_data: List[AnalysisRBParams]) -> None:
"""Init self.
Args:
experiments_data: List of results from varius RB experiments.
"""
self.experiments_data = experiments_data
self._total_results: pd.DataFrame = pd.DataFrame()
async def _get_multiple_hardware_results_async(self) -> Dict[str, RbResults]:
total_result: Dict[str, RbResults] = {}
for batch in self.experiments_data:
if len(batch.num_clifford) < 5:
raise ClassiqAnalyzerError(
f"An experiment mush contain at least five sequences,"
f" this sequence is {len(batch.num_clifford)}"
)
rb_result = await ApiWrapper.call_rb_analysis_task(batch)
total_result[batch.hardware] = rb_result
return total_result
@staticmethod
def _get_df_indices(results) -> List[str]:
temp_res = results.copy()
_, rb_result_keys = temp_res.popitem()
return list(rb_result_keys.__dict__.keys())
async def show_multiple_hardware_data_async(self) -> pd.DataFrame:
"""Run the RB analysis.
Returns:
The RB result.
"""
results = await self._get_multiple_hardware_results_async()
indices = RBAnalysis._get_df_indices(results)
result_df = pd.DataFrame(index=indices)
for hardware, result in results.items():
result_df[hardware] = result.__dict__.values()
self._total_results = result_df
return result_df
def plot_multiple_hardware_results(self) -> go.Figure:
"""Plot Bar graph of the results.
Returns:
None.
"""
df = self._total_results.loc[["mean_fidelity", "average_error"]].transpose()
hardware = list(df.index)
params = list(df.columns)
data = []
for param in params:
data.append(go.Bar(name=param, x=hardware, y=df[param].values * 100))
fig = go.Figure(data).update_layout(
title="RB hardware comparison",
barmode="group",
yaxis=dict(title="Fidelity in %"),
xaxis=dict(title="Hardware"),
)
return fig
__init__(self, experiments_data)
special
¶
Init self.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
experiments_data |
List[classiq.interface.analyzer.analysis_params.AnalysisRBParams] |
List of results from varius RB experiments. |
required |
Source code in classiq/analyzer/rb.py
def __init__(self, experiments_data: List[AnalysisRBParams]) -> None:
"""Init self.
Args:
experiments_data: List of results from varius RB experiments.
"""
self.experiments_data = experiments_data
self._total_results: pd.DataFrame = pd.DataFrame()
plot_multiple_hardware_results(self)
¶
Plot Bar graph of the results.
Returns:
Type | Description |
---|---|
Figure |
None. |
Source code in classiq/analyzer/rb.py
def plot_multiple_hardware_results(self) -> go.Figure:
"""Plot Bar graph of the results.
Returns:
None.
"""
df = self._total_results.loc[["mean_fidelity", "average_error"]].transpose()
hardware = list(df.index)
params = list(df.columns)
data = []
for param in params:
data.append(go.Bar(name=param, x=hardware, y=df[param].values * 100))
fig = go.Figure(data).update_layout(
title="RB hardware comparison",
barmode="group",
yaxis=dict(title="Fidelity in %"),
xaxis=dict(title="Hardware"),
)
return fig
show_multiple_hardware_data(self)
async
¶
Run the RB analysis.
Returns:
Type | Description |
---|---|
DataFrame |
The RB result. |
Source code in classiq/analyzer/rb.py
async def show_multiple_hardware_data_async(self) -> pd.DataFrame:
"""Run the RB analysis.
Returns:
The RB result.
"""
results = await self._get_multiple_hardware_results_async()
indices = RBAnalysis._get_df_indices(results)
result_df = pd.DataFrame(index=indices)
for hardware, result in results.items():
result_df[hardware] = result.__dict__.values()
self._total_results = result_df
return result_df
show_multiple_hardware_data_async(self)
async
¶
Run the RB analysis.
Returns:
Type | Description |
---|---|
DataFrame |
The RB result. |
Source code in classiq/analyzer/rb.py
async def show_multiple_hardware_data_async(self) -> pd.DataFrame:
"""Run the RB analysis.
Returns:
The RB result.
"""
results = await self._get_multiple_hardware_results_async()
indices = RBAnalysis._get_df_indices(results)
result_df = pd.DataFrame(index=indices)
for hardware, result in results.items():
result_df[hardware] = result.__dict__.values()
self._total_results = result_df
return result_df
applications
special
¶
chemistry
special
¶
ansatz_parameters
¶
HEAParameters
dataclass
¶
HEAParameters(reps: int, num_qubits: int, connectivity_map: List[Tuple[int, int]], one_qubit_gates: List[str], two_qubit_gates: List[str])
Source code in classiq/applications/chemistry/ansatz_parameters.py
@dataclasses.dataclass
class HEAParameters:
reps: int
num_qubits: int
connectivity_map: List[Tuple[int, int]]
one_qubit_gates: List[str]
two_qubit_gates: List[str]
HVAParameters
dataclass
¶
HVAParameters(reps: int)
Source code in classiq/applications/chemistry/ansatz_parameters.py
@dataclasses.dataclass
class HVAParameters:
reps: int
UCCParameters
dataclass
¶
UCCParameters(excitations: List[int] =
Source code in classiq/applications/chemistry/ansatz_parameters.py
@dataclasses.dataclass
class UCCParameters:
excitations: EXCITATIONS_TYPE_EXACT = dataclasses.field(
default_factory=default_excitation_factory
)
chemistry_execution_parameters
¶
ChemistryExecutionParameters
dataclass
¶
ChemistryExecutionParameters(optimizer: classiq.interface.executor.optimizer_preferences.OptimizerType, max_iteration: int, initial_point: Optional[numpy.ndarray] = None, tolerance: float = 0.0, step_size: float = 0.0, skip_compute_variance: bool = False)
Source code in classiq/applications/chemistry/chemistry_execution_parameters.py
@dataclasses.dataclass
class ChemistryExecutionParameters:
optimizer: OptimizerType
max_iteration: int
initial_point: Optional[np.ndarray] = dataclasses.field(default=None)
tolerance: float = dataclasses.field(default=0.0)
step_size: float = dataclasses.field(default=0.0)
skip_compute_variance: bool = dataclasses.field(default=False)
combinatorial_optimization
special
¶
combinatorial_optimization_config
¶
OptimizerConfig
dataclass
¶
OptimizerConfig(opt_type: classiq.interface.executor.optimizer_preferences.OptimizerType =
Source code in classiq/applications/combinatorial_optimization/combinatorial_optimization_config.py
@dataclass
class OptimizerConfig:
opt_type: OptimizerType = OptimizerType.COBYLA
max_iteration: Optional[int] = None
tolerance: float = 0.0
step_size: float = 0.0
skip_compute_variance: bool = False
cost_type: CostType = CostType.CVAR
alpha_cvar: float = 1.0
initial_point: Optional[List[float]] = dataclasses.field(default=None)
QAOAConfig
dataclass
¶
QAOAConfig(num_layers: int = 2, penalty_energy: float = 2.0)
Source code in classiq/applications/combinatorial_optimization/combinatorial_optimization_config.py
@dataclass
class QAOAConfig:
num_layers: int = 2
penalty_energy: float = 2.0
qnn
special
¶
datasets
special
¶
datasets_utils
¶
all_bits_to_one(n)
¶
Return an integer of length n
bits, where all the bits are 1
Source code in classiq/applications/qnn/datasets/datasets_utils.py
def all_bits_to_one(n: int) -> int:
"""
Return an integer of length `n` bits, where all the bits are `1`
"""
return (2**n) - 1
all_bits_to_zero(n)
¶
Return an integer of length n
bits, where all the bits are 0
Source code in classiq/applications/qnn/datasets/datasets_utils.py
def all_bits_to_zero(n: int) -> int:
"""
Return an integer of length `n` bits, where all the bits are `0`
"""
return 0
state_to_label(pure_state)
¶
input: a Tensor
of binary numbers (0 or 1) - the return value of a measurement
output: probability (from that measurement) of measuring 0
(in other words,
|0> translates to 100% chance for measuring |0> ==> return value is 1.0
|1> translates to 0% chance for measuring |0> ==> return value is 0.0
)
Source code in classiq/applications/qnn/datasets/datasets_utils.py
def state_to_label(pure_state: Tensor) -> Tensor:
"""
input: a `Tensor` of binary numbers (0 or 1) - the return value of a measurement
output: probability (from that measurement) of measuring 0
(in other words,
|0> translates to 100% chance for measuring |0> ==> return value is 1.0
|1> translates to 0% chance for measuring |0> ==> return value is 0.0
)
"""
# |0> means 100% chance to get |0> ==> 100% == 1.0
# |1> means 0% chance to get |0> ==> 0% == 0.0
# This line basically does `1 - bool(pure_state)`
return 1 - pure_state.bool().int()
state_to_weights(pure_state)
¶
input: a Tensor
of binary numbers (0 or 1)
output: the required angle of rotation for Rx
(in other words, |0> translates to no rotation, and |1> translates to pi
)
Source code in classiq/applications/qnn/datasets/datasets_utils.py
def state_to_weights(pure_state: Tensor) -> Tensor:
"""
input: a `Tensor` of binary numbers (0 or 1)
output: the required angle of rotation for `Rx`
(in other words, |0> translates to no rotation, and |1> translates to `pi`)
"""
# |0> requires a rotation by 0
# |1> requires a rotation by pi
return pure_state.bool().int() * np.pi
qlayer
¶
QLayer (Module)
¶
Source code in classiq/applications/qnn/qlayer.py
class QLayer(nn.Module):
def __init__(
self,
quantum_program: SerializedQuantumProgram,
execute: ExecuteFunction,
post_process: PostProcessFunction,
# Optional parameters:
head_start: Union[float, Tensor, None] = None,
# Experimental parameters:
calc_num_out_features: CalcNumOutFeatures = calc_num_out_features_single_output,
) -> None:
circuit = Circuit.parse_raw(quantum_program)
validate_circuit(circuit)
super().__init__()
self._execute = execute
self._post_process = post_process
self._head_start = head_start
self.quantum_program = quantum_program
weights, _ = extract_parameters(circuit)
self.in_features: int = len(weights)
self.out_features: int = calc_num_out_features(quantum_program)
self._initialize_parameters()
def _initialize_parameters(self) -> None:
shape: Tuple[int, ...] = (
(self.out_features, self.in_features)
if self.out_features > 1
else (self.in_features,)
)
if self._head_start is None:
value = torch.rand(shape)
elif isinstance(self._head_start, (float, int)):
value = torch.zeros(shape) + self._head_start
elif isinstance(self._head_start, Tensor):
value = self._head_start.clone()
else:
raise ClassiqQNNError(
f"Unsupported feature - head_start of type {type(self._head_start)}"
)
self.weight = Parameter(value)
def forward(self, x: Tensor) -> Tensor:
return QLayerFunction.apply( # type: ignore[no-untyped-call]
x, self.weight, self.quantum_program, self._execute, self._post_process
)
forward(self, x)
¶
Defines the computation performed at every call.
Should be overridden by all subclasses.
.. note::
Although the recipe for forward pass needs to be defined within
this function, one should call the :class:Module
instance afterwards
instead of this since the former takes care of running the
registered hooks while the latter silently ignores them.
Source code in classiq/applications/qnn/qlayer.py
def forward(self, x: Tensor) -> Tensor:
return QLayerFunction.apply( # type: ignore[no-untyped-call]
x, self.weight, self.quantum_program, self._execute, self._post_process
)
QLayerFunction (Function)
¶
Source code in classiq/applications/qnn/qlayer.py
class QLayerFunction(torch.autograd.Function):
@staticmethod
def forward( # type: ignore[override]
ctx,
inputs: Tensor,
weights: Tensor,
quantum_program: SerializedQuantumProgram,
execute: ExecuteFunction,
post_process: PostProcessFunction,
) -> Tensor:
"""
This function receives:
inputs: a 2D Tensor of floats - (batch_size, in_features)
weights: a 2D Tensor of floats - (out_features, num_weights)
circuit: a `GeneratedCircuit` object
execute: a function taking a `GeneratedCircuit` and `MultipleArguments`
and returning `MultipleExecutionDetails`
post_process: a function taking a single `ExecutionDetails`
and returning a `Tensor`
"""
circuit = Circuit.parse_raw(quantum_program)
validate_circuit(circuit)
# save for backward
ctx.save_for_backward(inputs, weights)
ctx.quantum_program = quantum_program
ctx.execute = execute
ctx.post_process = post_process
ctx.quantum_gradient = SimpleQuantumGradient(
quantum_program, execute, post_process
)
ctx.batch_size, ctx.num_in_features = inputs.shape
if is_single_layer_circuit(weights):
ctx.num_weights = weights.shape
else:
ctx.num_out_features, ctx.num_weights = weights.shape
# Todo: avoid computing `_get_extracted_parameters` on every `forward`
extracted_parameters = extract_parameters(circuit)
# Todo: avoid defining `convert_tensors_to_arguments` on every `forward`
def convert_tensors_to_arguments(
inputs_: Tensor, weights_: Tensor
) -> MultipleArguments:
arguments = map_parameters(
extracted_parameters,
inputs_,
weights_,
)
return (arguments,)
return iter_inputs_weights(
inputs,
weights,
convert_tensors_to_arguments,
functools.partial(execute, quantum_program),
post_process,
)
@staticmethod
def backward( # type: ignore[override]
ctx, grad_output: Tensor
) -> Tuple[Optional[Tensor], Optional[Tensor], None, None, None]:
"""
grad_output: Tensor
is of shape (ctx.batch_size, ctx.num_out_features)
"""
inputs, weights = ctx.saved_tensors
grad_weights = grad_inputs = None
grad_circuit = grad_execute = grad_post_process = None
is_single_layer = is_single_layer_circuit(weights)
if ctx.needs_input_grad[1]:
grad_weights = ctx.quantum_gradient.gradient_weights(inputs, weights)
grad_weights = einsum_weigths(grad_output, grad_weights, is_single_layer)
if ctx.needs_input_grad[0]:
grad_inputs = ctx.quantum_gradient.gradient_inputs(inputs, weights)
grad_inputs = einsum_inputs(grad_output, grad_inputs, is_single_layer)
if any(ctx.needs_input_grad[i] for i in (2, 3, 4)):
raise ClassiqTorchError(
f"Grad required for unknown type: {ctx.needs_input_grad}"
)
return grad_inputs, grad_weights, grad_circuit, grad_execute, grad_post_process
backward(ctx, grad_output)
staticmethod
¶
Tensor
is of shape (ctx.batch_size, ctx.num_out_features)
Source code in classiq/applications/qnn/qlayer.py
@staticmethod
def backward( # type: ignore[override]
ctx, grad_output: Tensor
) -> Tuple[Optional[Tensor], Optional[Tensor], None, None, None]:
"""
grad_output: Tensor
is of shape (ctx.batch_size, ctx.num_out_features)
"""
inputs, weights = ctx.saved_tensors
grad_weights = grad_inputs = None
grad_circuit = grad_execute = grad_post_process = None
is_single_layer = is_single_layer_circuit(weights)
if ctx.needs_input_grad[1]:
grad_weights = ctx.quantum_gradient.gradient_weights(inputs, weights)
grad_weights = einsum_weigths(grad_output, grad_weights, is_single_layer)
if ctx.needs_input_grad[0]:
grad_inputs = ctx.quantum_gradient.gradient_inputs(inputs, weights)
grad_inputs = einsum_inputs(grad_output, grad_inputs, is_single_layer)
if any(ctx.needs_input_grad[i] for i in (2, 3, 4)):
raise ClassiqTorchError(
f"Grad required for unknown type: {ctx.needs_input_grad}"
)
return grad_inputs, grad_weights, grad_circuit, grad_execute, grad_post_process
forward(ctx, inputs, weights, quantum_program, execute, post_process)
staticmethod
¶
This function receives:
inputs: a 2D Tensor of floats - (batch_size, in_features)
weights: a 2D Tensor of floats - (out_features, num_weights)
circuit: a GeneratedCircuit
object
!!! execute "a function taking a GeneratedCircuit
and MultipleArguments
"
and returning MultipleExecutionDetails
!!! post_process "a function taking a single ExecutionDetails
"
and returning a Tensor
Source code in classiq/applications/qnn/qlayer.py
@staticmethod
def forward( # type: ignore[override]
ctx,
inputs: Tensor,
weights: Tensor,
quantum_program: SerializedQuantumProgram,
execute: ExecuteFunction,
post_process: PostProcessFunction,
) -> Tensor:
"""
This function receives:
inputs: a 2D Tensor of floats - (batch_size, in_features)
weights: a 2D Tensor of floats - (out_features, num_weights)
circuit: a `GeneratedCircuit` object
execute: a function taking a `GeneratedCircuit` and `MultipleArguments`
and returning `MultipleExecutionDetails`
post_process: a function taking a single `ExecutionDetails`
and returning a `Tensor`
"""
circuit = Circuit.parse_raw(quantum_program)
validate_circuit(circuit)
# save for backward
ctx.save_for_backward(inputs, weights)
ctx.quantum_program = quantum_program
ctx.execute = execute
ctx.post_process = post_process
ctx.quantum_gradient = SimpleQuantumGradient(
quantum_program, execute, post_process
)
ctx.batch_size, ctx.num_in_features = inputs.shape
if is_single_layer_circuit(weights):
ctx.num_weights = weights.shape
else:
ctx.num_out_features, ctx.num_weights = weights.shape
# Todo: avoid computing `_get_extracted_parameters` on every `forward`
extracted_parameters = extract_parameters(circuit)
# Todo: avoid defining `convert_tensors_to_arguments` on every `forward`
def convert_tensors_to_arguments(
inputs_: Tensor, weights_: Tensor
) -> MultipleArguments:
arguments = map_parameters(
extracted_parameters,
inputs_,
weights_,
)
return (arguments,)
return iter_inputs_weights(
inputs,
weights,
convert_tensors_to_arguments,
functools.partial(execute, quantum_program),
post_process,
)
executor
¶
Executor module, implementing facilities for executing quantum programs using Classiq platform.
interface
special
¶
analyzer
special
¶
analysis_params
¶
AnalysisComparisonParams (BaseModel)
pydantic-model
¶
Source code in classiq/interface/analyzer/analysis_params.py
class AnalysisComparisonParams(pydantic.BaseModel):
property: ComparisonProperties = pydantic.Field(
default=...,
description="The comparison property used to select the best devices",
)
property: ComparisonProperties
pydantic-field
required
¶
The comparison property used to select the best devices
AnalysisOptionalDevicesParams (HardwareListParams)
pydantic-model
¶
Source code in classiq/interface/analyzer/analysis_params.py
class AnalysisOptionalDevicesParams(HardwareListParams):
qubit_count: int = pydantic.Field(
default=..., description="number of qubits in the data"
)
qubit_count: int
pydantic-field
required
¶
number of qubits in the data
ChemistryGenerationParams (BaseModel)
pydantic-model
¶
Source code in classiq/interface/analyzer/analysis_params.py
class ChemistryGenerationParams(pydantic.BaseModel):
molecule: MoleculeProblem = pydantic.Field(
default=...,
description="The molecule to generate the VQE ansatz for",
)
optimizer_preferences: OptimizerPreferences = pydantic.Field(
default=..., description="Execution options for the classical Optimizer"
)
def initial_point(self) -> Optional[numpy.ndarray]:
if self.optimizer_preferences.initial_point is not None:
return numpy.ndarray(self.optimizer_preferences.initial_point) # type: ignore
else:
return None
HardwareListParams (BaseModel)
pydantic-model
¶
Source code in classiq/interface/analyzer/analysis_params.py
class HardwareListParams(pydantic.BaseModel):
devices: Optional[List[PydanticNonEmptyString]] = pydantic.Field(
default=None, description="Devices"
)
providers: List[AnalyzerProviderVendor]
from_ide: bool = Field(default=False)
@pydantic.validator("providers", always=True)
def set_default_providers(cls, providers: Optional[List[AnalyzerProviderVendor]]):
if providers is None:
providers = list(AnalyzerProviderVendor)
return providers
devices: List[classiq.interface.helpers.custom_pydantic_types.ConstrainedStrValue]
pydantic-field
¶
Devices
HardwareParams (BaseModel)
pydantic-model
¶
Source code in classiq/interface/analyzer/analysis_params.py
class HardwareParams(pydantic.BaseModel):
device: PydanticNonEmptyString = pydantic.Field(default=None, description="Devices")
provider: AnalyzerProviderVendor
device: ConstrainedStrValue
pydantic-field
¶
Devices
cytoscape_graph
¶
CytoScapeEdge (BaseModel)
pydantic-model
¶
Source code in classiq/interface/analyzer/cytoscape_graph.py
class CytoScapeEdge(pydantic.BaseModel):
data: CytoScapeEdgeData = pydantic.Field(
default=..., description="Edge's Data, mainly the source and target of the Edge"
)
data: CytoScapeEdgeData
pydantic-field
required
¶
Edge's Data, mainly the source and target of the Edge
CytoScapeEdgeData (BaseModel)
pydantic-model
¶
Source code in classiq/interface/analyzer/cytoscape_graph.py
class CytoScapeEdgeData(pydantic.BaseModel):
source: str = pydantic.Field(
default=..., description="the Id of the Node that is the Source of the edge"
)
target: str = pydantic.Field(
default=..., description="the Id of the Node that is the Target the edge"
)
CytoScapeGraph (BaseModel)
pydantic-model
¶
Source code in classiq/interface/analyzer/cytoscape_graph.py
class CytoScapeGraph(pydantic.BaseModel):
nodes: List[CytoScapeNode] = pydantic.Field(
default_factory=list,
description="Nodes of the Graph",
)
edges: List[CytoScapeEdge] = pydantic.Field(
default_factory=list,
description="Edges of the Graph",
)
CytoScapeNode (BaseModel)
pydantic-model
¶
Source code in classiq/interface/analyzer/cytoscape_graph.py
class CytoScapeNode(pydantic.BaseModel):
data: Dict[str, Any] = pydantic.Field(
default=...,
description="Data of the Node, such as label, and color, can be of free form",
)
position: Optional[CytoScapePosition] = pydantic.Field(
default=..., description="Position of the Node to be rendered in Cytocape"
)
CytoScapePosition (BaseModel)
pydantic-model
¶
Source code in classiq/interface/analyzer/cytoscape_graph.py
class CytoScapePosition(pydantic.BaseModel):
x: int = pydantic.Field(
default=..., description="X coordinate in the Cytoscape View"
)
y: int = pydantic.Field(
default=..., description="Y coordinate in the Cytoscape View"
)
HardwareConnectivityGraphResult (VersionedModel)
pydantic-model
¶
Source code in classiq/interface/analyzer/cytoscape_graph.py
class HardwareConnectivityGraphResult(VersionedModel):
graph: Optional[CytoScapeGraph] = pydantic.Field(
default=...,
description="The Cytoscape graph in the desired Structure for the FE",
)
error: ConnectivityErrors = pydantic.Field(
default=ConnectivityErrors.EMPTY,
description="Any errors encountered while generating the graph",
)
result
¶
Analysis (VersionedModel)
pydantic-model
¶
Source code in classiq/interface/analyzer/result.py
class Analysis(VersionedModel):
input_properties: QuantumCircuitProperties = pydantic.Field(
default=..., description="Input circuit properties"
)
native_properties: NativeQuantumCircuitProperties = pydantic.Field(
default=..., description="Transpiled circuit properties"
)
pattern_analysis: Optional[PatternAnalysis] = pydantic.Field(
default=None,
description="Pattern analysis, including pattern matching and pattern recognition",
)
input_properties: QuantumCircuitProperties
pydantic-field
required
¶
Input circuit properties
native_properties: NativeQuantumCircuitProperties
pydantic-field
required
¶
Transpiled circuit properties
pattern_analysis: PatternAnalysis
pydantic-field
¶
Pattern analysis, including pattern matching and pattern recognition
AvailableHardware (BaseModel)
pydantic-model
¶
Source code in classiq/interface/analyzer/result.py
class AvailableHardware(pydantic.BaseModel):
ibm_quantum: Optional[Dict[PydanticNonEmptyString, bool]] = pydantic.Field(
default=None,
description="available IBM Quantum devices with boolean indicates if a given device has enough qubits.",
)
azure_quantum: Optional[Dict[PydanticNonEmptyString, bool]] = pydantic.Field(
default=None,
description="available Azure Quantum devices with boolean indicates if a given device has enough qubits.",
)
amazon_braket: Optional[Dict[PydanticNonEmptyString, bool]] = pydantic.Field(
default=None,
description="available Amazon Braket devices with boolean indicates if a given device has enough qubits.",
)
amazon_braket: Dict[classiq.interface.helpers.custom_pydantic_types.ConstrainedStrValue, bool]
pydantic-field
¶
available Amazon Braket devices with boolean indicates if a given device has enough qubits.
azure_quantum: Dict[classiq.interface.helpers.custom_pydantic_types.ConstrainedStrValue, bool]
pydantic-field
¶
available Azure Quantum devices with boolean indicates if a given device has enough qubits.
ibm_quantum: Dict[classiq.interface.helpers.custom_pydantic_types.ConstrainedStrValue, bool]
pydantic-field
¶
available IBM Quantum devices with boolean indicates if a given device has enough qubits.
HardwareComparisonInformation (BaseModel)
pydantic-model
¶
Source code in classiq/interface/analyzer/result.py
class HardwareComparisonInformation(pydantic.BaseModel):
devices: List[PydanticNonEmptyString] = pydantic.Field(
default=..., description="Device which is used for the transpilation."
)
providers: List[PydanticNonEmptyString] = pydantic.Field(
default=..., description="Provider cloud of the device."
)
depth: List[pydantic.NonNegativeInt] = pydantic.Field(
default=..., description="Circuit depth."
)
multi_qubit_gate_count: List[pydantic.NonNegativeInt] = pydantic.Field(
default=..., description="Number of multi qubit gates."
)
total_gate_count: List[pydantic.NonNegativeInt] = pydantic.Field(
default=..., description="Number of total gates."
)
@pydantic.root_validator
def validate_equal_length(cls, values: Dict[str, list]) -> Dict[str, list]:
lengths = list(map(len, values.values()))
if len(set(lengths)) != 1:
raise ValueError("All lists should have the same length")
return values
depth: List[pydantic.types.NonNegativeInt]
pydantic-field
required
¶
Circuit depth.
devices: List[classiq.interface.helpers.custom_pydantic_types.ConstrainedStrValue]
pydantic-field
required
¶
Device which is used for the transpilation.
multi_qubit_gate_count: List[pydantic.types.NonNegativeInt]
pydantic-field
required
¶
Number of multi qubit gates.
providers: List[classiq.interface.helpers.custom_pydantic_types.ConstrainedStrValue]
pydantic-field
required
¶
Provider cloud of the device.
total_gate_count: List[pydantic.types.NonNegativeInt]
pydantic-field
required
¶
Number of total gates.
NativeQuantumCircuitProperties (QuantumCircuitProperties)
pydantic-model
¶
Source code in classiq/interface/analyzer/result.py
class NativeQuantumCircuitProperties(QuantumCircuitProperties):
native_gates: Set[BasisGates] = pydantic.Field(
default=..., description="Native gates used for decomposition"
)
native_gates: Set[classiq.interface.analyzer.result.BasisGates]
pydantic-field
required
¶
Native gates used for decomposition
PatternAnalysis (BaseModel)
pydantic-model
¶
Source code in classiq/interface/analyzer/result.py
class PatternAnalysis(pydantic.BaseModel):
pattern_matching: Optional[PatternMatchingResult] = pydantic.Field(
default=..., description="Pattern matching algorithm"
)
pattern_recognition: Optional[PatternRecognitionResult] = pydantic.Field(
default=..., description="Find unknown patterns"
)
circuit: Circuit = pydantic.Field(
default=..., description="Quantum circuit after pattern analysis"
)
QuantumCircuitProperties (BaseModel)
pydantic-model
¶
Source code in classiq/interface/analyzer/result.py
class QuantumCircuitProperties(pydantic.BaseModel):
depth: pydantic.NonNegativeInt = pydantic.Field(
default=..., description="Circuit depth"
)
auxiliary_qubits: pydantic.NonNegativeInt = pydantic.Field(
default=..., description="Number of Auxiliary qubits"
)
classical_bits: pydantic.NonNegativeInt = pydantic.Field(
default=..., description="Number of classical bits"
)
gates_count: pydantic.NonNegativeInt = pydantic.Field(
default=..., description="Total number of gates in the circuit"
)
multi_qubit_gates_count: pydantic.NonNegativeInt = pydantic.Field(
default=..., description="Number of multi-qubit gates in circuit"
)
non_entangled_subcircuits_count: pydantic.NonNegativeInt = pydantic.Field(
default=..., description="Number of non-entangled sub-circuit "
)
entanglement_upper_bound: EntanglementAnalysisResult = pydantic.Field(
default=...,
description="An upper bound to the entanglement (measured by the Schmidt rank width) of states that can"
"generated by the circuit. None is returned if the entanglement analysis took too long to complete",
)
auxiliary_qubits: NonNegativeInt
pydantic-field
required
¶
Number of Auxiliary qubits
classical_bits: NonNegativeInt
pydantic-field
required
¶
Number of classical bits
depth: NonNegativeInt
pydantic-field
required
¶
Circuit depth
entanglement_upper_bound: EntanglementAnalysisResult
pydantic-field
required
¶
An upper bound to the entanglement (measured by the Schmidt rank width) of states that cangenerated by the circuit. None is returned if the entanglement analysis took too long to complete
gates_count: NonNegativeInt
pydantic-field
required
¶
Total number of gates in the circuit
multi_qubit_gates_count: NonNegativeInt
pydantic-field
required
¶
Number of multi-qubit gates in circuit
non_entangled_subcircuits_count: NonNegativeInt
pydantic-field
required
¶
Number of non-entangled sub-circuit
SingleHardwareInformation (BaseModel)
pydantic-model
¶
Source code in classiq/interface/analyzer/result.py
class SingleHardwareInformation(pydantic.BaseModel):
devices: PydanticNonEmptyString = pydantic.Field(
default=..., description="Device which is used for the transpilation."
)
providers: PydanticNonEmptyString = pydantic.Field(
default=..., description="Provider cloud of the device."
)
depth: pydantic.NonNegativeInt = pydantic.Field(
default=..., description="Circuit depth."
)
multi_qubit_gate_count: pydantic.NonNegativeInt = pydantic.Field(
default=..., description="Number of multi qubit gates."
)
total_gate_count: pydantic.NonNegativeInt = pydantic.Field(
default=..., description="Number of total gates."
)
depth: NonNegativeInt
pydantic-field
required
¶
Circuit depth.
devices: ConstrainedStrValue
pydantic-field
required
¶
Device which is used for the transpilation.
multi_qubit_gate_count: NonNegativeInt
pydantic-field
required
¶
Number of multi qubit gates.
providers: ConstrainedStrValue
pydantic-field
required
¶
Provider cloud of the device.
total_gate_count: NonNegativeInt
pydantic-field
required
¶
Number of total gates.
backend
special
¶
backend_preferences
¶
AwsBackendPreferences (BackendPreferences)
pydantic-model
¶
Source code in classiq/interface/backend/backend_preferences.py
class AwsBackendPreferences(BackendPreferences):
backend_service_provider: ProviderTypeVendor.AMAZON_BRAKET
# Allow running any backend supported by the vendor
backend_name: Union[AmazonBraketBackendNames, str]
aws_role_arn: pydantic_backend.PydanticAwsRoleArn = pydantic.Field(
description="ARN of the role to be assumed for execution on your Braket account."
)
s3_bucket_name: str = pydantic.Field(description="S3 Bucket Name")
s3_folder: pydantic_backend.PydanticS3BucketKey = pydantic.Field(
description="S3 Folder Path Within The S3 Bucket"
)
job_timeout: pydantic_backend.PydanticExecutionTimeout = pydantic.Field(
description="Timeout for Jobs sent for execution in seconds.",
default=AWS_DEFAULT_JOB_TIMEOUT_SECONDS,
)
@validator("s3_bucket_name")
def _validate_s3_bucket_name(
cls, s3_bucket_name: str, values: Dict[str, Any]
) -> str:
s3_bucket_name = s3_bucket_name.strip()
if not s3_bucket_name.startswith("amazon-braket-"):
raise ValueError('S3 bucket name should start with "amazon-braket-"')
return s3_bucket_name
@pydantic.root_validator(pre=True)
def _set_backend_service_provider(cls, values: Dict[str, Any]) -> Dict[str, Any]:
return values_with_discriminator(
values, "backend_service_provider", ProviderVendor.AMAZON_BRAKET
)
aws_role_arn: ConstrainedStrValue
pydantic-field
required
¶
ARN of the role to be assumed for execution on your Braket account.
job_timeout: ConstrainedIntValue
pydantic-field
¶
Timeout for Jobs sent for execution in seconds.
s3_bucket_name: str
pydantic-field
required
¶
S3 Bucket Name
s3_folder: ConstrainedStrValue
pydantic-field
required
¶
S3 Folder Path Within The S3 Bucket
AzureBackendPreferences (BackendPreferences)
pydantic-model
¶
Source code in classiq/interface/backend/backend_preferences.py
class AzureBackendPreferences(BackendPreferences):
backend_service_provider: ProviderTypeVendor.AZURE_QUANTUM
# Allow running any backend supported by the vendor
backend_name: Union[AzureQuantumBackendNames, str]
location: str = pydantic.Field(
default="East US", description="Azure personal resource region"
)
credentials: Optional[AzureCredential] = pydantic.Field(
default=None,
description="The service principal credential to access personal quantum workspace",
)
@property
def run_through_classiq(self) -> bool:
return self.credentials is None
@pydantic.root_validator(pre=True)
def _set_backend_service_provider(cls, values: Dict[str, Any]) -> Dict[str, Any]:
return values_with_discriminator(
values, "backend_service_provider", ProviderVendor.AZURE_QUANTUM
)
AzureCredential (BaseSettings)
pydantic-model
¶
Source code in classiq/interface/backend/backend_preferences.py
class AzureCredential(pydantic.BaseSettings):
tenant_id: str = pydantic.Field(description="Azure Tenant ID")
client_id: str = pydantic.Field(description="Azure Client ID")
client_secret: str = pydantic.Field(description="Azure Client Secret")
resource_id: pydantic_backend.PydanticAzureResourceIDType = pydantic.Field(
description="Azure Resource ID (including Azure subscription ID, resource "
"group and workspace), for personal resource",
)
class Config:
title = "Azure Service Principal Credential"
env_prefix = "AZURE_"
case_sensitive = False
client_id: str
pydantic-field
required
¶
Azure Client ID
client_secret: str
pydantic-field
required
¶
Azure Client Secret
resource_id: ConstrainedStrValue
pydantic-field
required
¶
Azure Resource ID (including Azure subscription ID, resource group and workspace), for personal resource
tenant_id: str
pydantic-field
required
¶
Azure Tenant ID
BackendPreferences (BaseModel)
pydantic-model
¶
Source code in classiq/interface/backend/backend_preferences.py
class BackendPreferences(BaseModel):
# Due to the way the field is currently implemented, i.e. it redefined with different types
# in the subclass, it shouldn't be dumped with exclude_unset. This causes this field not to appear.
# For example: don't use obj.dict(exclude_unset=True).
backend_service_provider: str = pydantic.Field(
..., description="Provider company or cloud for the requested backend."
)
backend_name: str = pydantic.Field(
..., description="Name of the requested backend or target."
)
@classmethod
def batch_preferences(
cls, *, backend_names: Iterable[str], **kwargs
) -> List[BackendPreferences]:
return [cls(backend_name=name, **kwargs) for name in backend_names]
IBMBackendPreferences (BackendPreferences)
pydantic-model
¶
Source code in classiq/interface/backend/backend_preferences.py
class IBMBackendPreferences(BackendPreferences):
backend_service_provider: ProviderTypeVendor.IBM_QUANTUM
backend_name: Union[AllIBMQBackendNames, str]
access_token: Optional[str] = pydantic.Field(
default=None,
description="IBM Quantum access token to be used"
" with IBM Quantum hosted backends",
)
provider: IBMBackendProvider = pydantic.Field(
default_factory=IBMBackendProvider,
description="Provider specs. for identifying a single IBM Quantum provider.",
)
@pydantic.root_validator(pre=True)
def _set_backend_service_provider(cls, values: Dict[str, Any]) -> Dict[str, Any]:
return values_with_discriminator(
values, "backend_service_provider", ProviderVendor.IBM_QUANTUM
)
IonqBackendPreferences (BackendPreferences)
pydantic-model
¶
Source code in classiq/interface/backend/backend_preferences.py
class IonqBackendPreferences(BackendPreferences):
backend_service_provider: ProviderTypeVendor.IONQ
backend_name: IonqBackendNames = pydantic.Field(
default=IonqBackendNames.SIMULATOR,
description="IonQ backend for quantum programs execution.",
)
api_key: pydantic_backend.PydanticIonQApiKeyType = pydantic.Field(
..., description="IonQ API key"
)
@pydantic.root_validator(pre=True)
def _set_backend_service_provider(cls, values: Dict[str, Any]) -> Dict[str, Any]:
return values_with_discriminator(
values, "backend_service_provider", ProviderVendor.IONQ
)
api_key: ConstrainedStrValue
pydantic-field
required
¶
IonQ API key
chemistry
special
¶
fermionic_operator
¶
FermionicOperator (HashablePydanticBaseModel)
pydantic-model
¶
Specification of a Fermionic operator. Input: List of ladder operators, each ladder operator is described by a tuple of its index and a character indicating if it's a creation ('+') or annihilation operator ('-').
Source code in classiq/interface/chemistry/fermionic_operator.py
class FermionicOperator(HashablePydanticBaseModel):
"""
Specification of a Fermionic operator.
Input:
List of ladder operators, each ladder operator is described by a tuple of its
index and a character indicating if it's a creation ('+') or annihilation operator ('-').
"""
op_list: list = pydantic.Field(
description="A list of tuples each containing an index and a character; for example [('+', 0), ('-', 1)].",
)
@staticmethod
def _validate_single_op(op: tuple) -> LadderOperator:
if not isinstance(op, tuple):
try: # type: ignore[unreachable] # it is reachable...
op = tuple(op)
except Exception as exc:
raise ValueError("Ladder operator should be a tuple.") from exc
if len(op) != 2:
raise ValueError(
"Ladder operator tuple should be of length two; for example (1, '+')."
)
if op[0] not in ("+", "-"):
raise ValueError(
"The first term in a ladder operator tuple indicates if its a raising ('+')"
f" or lowering ('-') operator. Allowed input is: '+' or '-', received {op[0]}"
)
if not isinstance(op[1], int):
raise ValueError(
"The second term in a ladder operator tuple indicates its index and should be of type int"
)
return op # type: ignore[return-value] # mypy thinks that it is `Tuple[Any, ...]`, though the asserts here tell otherwise..
@pydantic.validator("op_list")
def _validate_op_list(cls, op_list: list) -> list:
return list(map(cls._validate_single_op, op_list))
def __mul__(self, coeff: Union[float, int]) -> SummedFermionicOperator:
if isinstance(coeff, (float, int)):
return SummedFermionicOperator(op_list=[(self, float(coeff))])
raise ValueError(
"The coefficient multiplying Fermionic Operator should be of type float"
)
__rmul__ = __mul__
def __add__(
self, other: Union[SummedFermionicOperator, FermionicOperator]
) -> SummedFermionicOperator:
if isinstance(other, SummedFermionicOperator):
return SummedFermionicOperator(op_list=[(self, 1.0)] + other.op_list)
elif isinstance(other, FermionicOperator):
return SummedFermionicOperator(op_list=[(self, 1.0)] + [(other, 1.0)])
raise ValueError(
"FermionicOperator can be summed together only with type FermionicOperator or SummedFermionicOperator"
)
class Config:
frozen = True
@staticmethod
def _to_ladder_op(char: str) -> str:
return "a" + _SUPERSCRIPT_PLUS if char == "+" else "a"
@staticmethod
def _to_subscript(num: int) -> str:
return "".join(_SUBSCRIPT_UNICODE_CHARS[digit] for digit in str(num))
def __str__(self) -> str:
return "".join(
f"{self._to_ladder_op(char)}{self._to_subscript(index)}"
for (char, index) in self.op_list
)
@property
def all_indices(self) -> Set[int]:
return {op[1] for op in self.op_list}
op_list: list
pydantic-field
required
¶
A list of tuples each containing an index and a character; for example [('+', 0), ('-', 1)].
__str__(self)
special
¶
Return str(self).
Source code in classiq/interface/chemistry/fermionic_operator.py
def __str__(self) -> str:
return "".join(
f"{self._to_ladder_op(char)}{self._to_subscript(index)}"
for (char, index) in self.op_list
)
SummedFermionicOperator (HashablePydanticBaseModel)
pydantic-model
¶
Specification of a summed Fermionic operator. Input: List of fermionic operators tuples, The first term in the tuple is the FermionicOperator and the second term is its coefficient. For example: op1 = FermionicOperator(op_list=[('+', 0), ('-', 1)]) op2 = FermionicOperator(op_list=[('-', 0), ('-', 1)]) summed_operator = SummedFermionicOperator(op_list=[(op1, 0.2), (op2, 6.7)])
Source code in classiq/interface/chemistry/fermionic_operator.py
class SummedFermionicOperator(HashablePydanticBaseModel):
"""
Specification of a summed Fermionic operator.
Input:
List of fermionic operators tuples, The first term in the tuple is the FermionicOperator and the second term is its coefficient.
For example:
op1 = FermionicOperator(op_list=[('+', 0), ('-', 1)])
op2 = FermionicOperator(op_list=[('-', 0), ('-', 1)])
summed_operator = SummedFermionicOperator(op_list=[(op1, 0.2), (op2, 6.7)])
"""
op_list: list = pydantic.Field(
description="A list of tuples each containing a FermionicOperator and a coefficient.",
)
class Config:
frozen = True
@staticmethod
def _validate_single_op(op: tuple) -> FermionicOperatorTuple:
# is it tuple - if not, convert to tuple
if not isinstance(op, tuple):
try: # type: ignore[unreachable] # it is reachable...
op = tuple(op)
except Exception as exc:
raise ValueError("Operator should be a tuple.") from exc
if len(op) != 2:
raise ValueError("Operator tuple should be of length two.")
# is it FermionicOperator - if not, convert to FermionicOperator
if type(op[0]) is not FermionicOperator:
try:
op = (FermionicOperator(**op[0]), op[1])
except Exception as exc:
raise ValueError(
"The first term in the operator tuple should be an instance of the FermionicOperator class"
) from exc
if type(op[1]) is not float:
raise ValueError(
"The second term in the operator tuple indicates its coefficient and should be of type float"
)
return op # type: ignore[return-value] # mypy thinks that it is `Tuple[Any, ...]`, though the asserts here tell otherwise..
@pydantic.validator("op_list")
def _validate_op_list(cls, op_list: list) -> list:
return list(map(cls._validate_single_op, op_list))
def __add__(
self, other: Union[SummedFermionicOperator, FermionicOperator]
) -> SummedFermionicOperator:
if isinstance(other, SummedFermionicOperator):
return SummedFermionicOperator(op_list=self.op_list + other.op_list)
elif isinstance(other, FermionicOperator):
return SummedFermionicOperator(op_list=self.op_list + [(other, 1.0)])
raise ValueError(
"FermionicOperator can be summed together only with type FermionicOperator or SummedFermionicOperator"
)
def is_close(self, other: SummedFermionicOperator) -> bool:
if not isinstance(other, SummedFermionicOperator):
return False # type: ignore[unreachable]
if len(self.op_list) != len(other.op_list):
return False
for (op1, coeff1), (op2, coeff2) in zip(self.op_list, other.op_list):
if op1 != op2 or not np.isclose(coeff1, coeff2):
return False
return True
@property
def _all_indices(self) -> Set[int]:
return set(
itertools.chain.from_iterable(op.all_indices for op, _ in self.op_list)
)
@property
def num_qubits(self) -> int:
return len(self._all_indices)
def __str__(self) -> str:
return " + \n".join(str(op[1]) + " * " + str(op[0]) for op in self.op_list)
op_list: list
pydantic-field
required
¶
A list of tuples each containing a FermionicOperator and a coefficient.
__str__(self)
special
¶
Return str(self).
Source code in classiq/interface/chemistry/fermionic_operator.py
def __str__(self) -> str:
return " + \n".join(str(op[1]) + " * " + str(op[0]) for op in self.op_list)
ground_state_problem
¶
GroundStateProblem (HashablePydanticBaseModel)
pydantic-model
¶
Source code in classiq/interface/chemistry/ground_state_problem.py
class GroundStateProblem(HashablePydanticBaseModel):
mapping: FermionMapping = pydantic.Field(
default=FermionMapping.JORDAN_WIGNER, description="Fermionic mapping type"
)
z2_symmetries: bool = pydantic.Field(
default=False,
description="whether to perform z2 symmetries reduction",
)
num_qubits: Optional[int] = pydantic.Field(default=None)
@pydantic.validator("z2_symmetries")
def _validate_z2_symmetries(
cls, z2_symmetries: bool, values: Dict[str, Any]
) -> bool:
if z2_symmetries and values.get("mapping") == FermionMapping.FAST_BRAVYI_KITAEV:
raise ValueError(
"z2 symmetries reduction can not be used for fast_bravyi_kitaev mapping"
)
return z2_symmetries
class Config:
frozen = True
HamiltonianProblem (GroundStateProblem)
pydantic-model
¶
Source code in classiq/interface/chemistry/ground_state_problem.py
class HamiltonianProblem(GroundStateProblem):
hamiltonian: SummedFermionicOperator = pydantic.Field(
description="Hamiltonian as a fermionic operator"
)
num_particles: List[pydantic.PositiveInt] = pydantic.Field(
description="Tuple containing the numbers of alpha particles and beta particles"
)
@pydantic.validator("num_particles")
def _validate_num_particles(cls, num_particles: List[int]) -> List[int]:
assert isinstance(num_particles, list)
assert len(num_particles) == 2
# This probably will never happen, since pydantic automatically converts
# floats to ints
assert isinstance(num_particles[0], int)
assert num_particles[0] >= 1
assert isinstance(num_particles[1], int)
assert num_particles[1] >= 1
return num_particles
MoleculeProblem (GroundStateProblem)
pydantic-model
¶
Source code in classiq/interface/chemistry/ground_state_problem.py
class MoleculeProblem(GroundStateProblem):
molecule: molecule.Molecule
basis: str = pydantic.Field(default="sto3g", description="Molecular basis set")
freeze_core: bool = pydantic.Field(default=False)
remove_orbitals: List[int] = pydantic.Field(
default_factory=list, description="list of orbitals to remove"
)
molecule
¶
Molecule (HashablePydanticBaseModel)
pydantic-model
¶
Source code in classiq/interface/chemistry/molecule.py
class Molecule(HashablePydanticBaseModel):
atoms: List[AtomType] = pydantic.Field(
description="A list of atoms each containing the string of the atom's symbol and a list of its (x,y,z) location; for example [('H', (0.0, 0.0, 0.0)), ('H', (0.0, 0.0, 0.735))]."
)
spin: pydantic.NonNegativeInt = pydantic.Field(
default=1, description="spin of the molecule"
)
charge: pydantic.NonNegativeInt = pydantic.Field(
default=0, description="charge of the molecule"
)
@pydantic.validator("atoms", each_item=True)
def _validate_atoms(cls, atom: AtomType) -> AtomType:
if len(atom) != 2:
raise ValueError(
"each atom should be a list of two entries: 1) name pf the elemnt (str) 2) list of its (x,y,z) location"
)
if type(atom[0]) is not str:
raise ValueError(
f"atom name should be a string. unknown element: {atom[0]}."
)
if atom[0] not in ELEMENTS:
raise ValueError(f"unknown element: {atom[0]}.")
if len(atom[1]) != 3:
raise ValueError(
f"location of the atom is of length three, representing the (x,y,z) coordinates of the atom, error value: {atom[1]}"
)
for idx in atom[1]:
if type(idx) is not float and type(idx) is not int:
raise ValueError(
f"coordinates of the atom should be of type float. error value: {idx}"
)
return atom
class Config:
frozen = True
atoms: List[types.ConstrainedListValue]
pydantic-field
required
¶
A list of atoms each containing the string of the atom's symbol and a list of its (x,y,z) location; for example [('H', (0.0, 0.0, 0.0)), ('H', (0.0, 0.0, 0.735))].
charge: NonNegativeInt
pydantic-field
¶
charge of the molecule
spin: NonNegativeInt
pydantic-field
¶
spin of the molecule
operator
¶
PauliOperator (HashablePydanticBaseModel, VersionedModel)
pydantic-model
¶
Specification of a Pauli sum operator.
Source code in classiq/interface/chemistry/operator.py
class PauliOperator(HashablePydanticBaseModel, VersionedModel):
"""
Specification of a Pauli sum operator.
"""
pauli_list: PydanticPauliList = pydantic.Field(
description="A list of tuples each containing a pauli string comprised of I,X,Y,Z characters and a complex coefficient; for example [('IZ', 0.1), ('XY', 0.2)].",
)
is_hermitian: bool = pydantic.Field(default=False)
def show(self) -> str:
if self.is_hermitian:
return "\n".join(
f"{summand[1].real:+.3f} * {summand[0]}" for summand in self.pauli_list
)
return "\n".join(
f"+({summand[1]:+.3f}) * {summand[0]}" for summand in self.pauli_list
)
@pydantic.validator("pauli_list", each_item=True)
def _validate_pauli_monomials(
cls, monomial: Tuple[PydanticPauliMonomialStr, complex]
) -> Tuple[PydanticPauliMonomialStr, complex]:
_PauliMonomialLengthValidator( # type: ignore[call-arg]
monomial=monomial
) # Validate the length of the monomial.
parsed_monomial = _PauliMonomialParser(string=monomial[0], coeff=monomial[1]) # type: ignore[call-arg]
return (parsed_monomial.string, parsed_monomial.coeff)
@pydantic.validator("pauli_list")
def _validate_pauli_list(cls, pauli_list: PydanticPauliList) -> PydanticPauliList:
if not all_equal(len(summand[0]) for summand in pauli_list):
raise ValueError("Pauli strings have incompatible lengths.")
return pauli_list
@pydantic.root_validator
def _validate_hermitianity(cls, values: Dict[str, Any]) -> Dict[str, Any]:
pauli_list = values.get("pauli_list", [])
values["is_hermitian"] = all(
np.isclose(complex(summand[1]).real, summand[1]) for summand in pauli_list
)
if values.get("is_hermitian", False):
values["pauli_list"] = [
(summand[0], complex(summand[1].real)) for summand in pauli_list
]
return values
def __mul__(self, coefficient: complex) -> "PauliOperator":
multiplied_ising = [
(monomial[0], monomial[1] * coefficient) for monomial in self.pauli_list
]
return self.__class__(pauli_list=multiplied_ising)
@property
def is_commutative(self) -> bool:
return all(
self._is_sub_pauli_commutative(
[summand[0][qubit_num] for summand in self.pauli_list]
)
for qubit_num in range(self.num_qubits)
)
@staticmethod
def _is_sub_pauli_commutative(qubit_pauli_string: Union[List[str], str]) -> bool:
unique_paulis = set(qubit_pauli_string) - {"I"}
return len(unique_paulis) <= 1
@property
def num_qubits(self) -> int:
return len(self.pauli_list[0][0])
def to_matrix(self) -> np.ndarray:
return sum(
summand[1] * to_pauli_matrix(summand[0]) for summand in self.pauli_list
) # type: ignore[return-value]
@staticmethod
def _extend_pauli_string(
pauli_string: PydanticPauliMonomialStr, num_extra_qubits: int
) -> PydanticPauliMonomialStr:
return "I" * num_extra_qubits + pauli_string
def extend(self, num_extra_qubits: int) -> "PauliOperator":
new_pauli_list = [
(self._extend_pauli_string(pauli_string, num_extra_qubits), coeff)
for (pauli_string, coeff) in self.pauli_list
]
return self.copy(update={"pauli_list": new_pauli_list}, deep=True)
@staticmethod
def _reorder_pauli_string(
pauli_string: PydanticPauliMonomialStr,
order: Collection[int],
new_num_qubits: int,
) -> PydanticPauliMonomialStr:
reversed_pauli_string = pauli_string[::-1]
reversed_new_pauli_string = ["I"] * new_num_qubits
for logical_pos, actual_pos in enumerate(order):
reversed_new_pauli_string[actual_pos] = reversed_pauli_string[logical_pos]
return "".join(reversed(reversed_new_pauli_string))
@staticmethod
def _validate_reorder(
order: Collection[int],
num_qubits: int,
num_extra_qubits: int,
) -> None:
if num_extra_qubits < 0:
raise ValueError("Number of extra qubits cannot be negative")
if len(order) != num_qubits:
raise ValueError("The qubits order doesn't match the Pauli operator")
if len(order) != len(set(order)):
raise ValueError("The qubits order is not one-to-one")
if not all(pos < num_qubits + num_extra_qubits for pos in order):
raise ValueError("The qubits order contains qubits which do no exist")
@classmethod
def reorder(
cls,
operator: "PauliOperator",
order: Collection[int],
num_extra_qubits: int = 0,
) -> "PauliOperator":
cls._validate_reorder(order, operator.num_qubits, num_extra_qubits)
new_num_qubits = operator.num_qubits + num_extra_qubits
new_pauli_list = [
(cls._reorder_pauli_string(pauli_string, order, new_num_qubits), coeff)
for pauli_string, coeff in operator.pauli_list
]
return cls(pauli_list=new_pauli_list)
@classmethod
def from_unzipped_lists(
cls,
operators: List[List[Pauli]],
coefficients: Optional[List[complex]] = None,
) -> "PauliOperator":
if coefficients is None:
coefficients = [1] * len(operators)
if len(operators) != len(coefficients):
raise ValueError(
f"The number of coefficients ({len(coefficients)}) must be equal to the number of pauli operators ({len(operators)})"
)
return cls(
pauli_list=[
(pauli_integers_to_str(op), coeff)
for op, coeff in zip(operators, coefficients)
]
)
class Config:
frozen = True
pauli_list: ConstrainedListValue
pydantic-field
required
¶
A list of tuples each containing a pauli string comprised of I,X,Y,Z characters and a complex coefficient; for example [('IZ', 0.1), ('XY', 0.2)].
combinatorial_optimization
special
¶
mht_qaoa_input
¶
MhtQaoaInput (BaseModel)
pydantic-model
¶
Source code in classiq/interface/combinatorial_optimization/mht_qaoa_input.py
class MhtQaoaInput(BaseModel):
reps: pydantic.PositiveInt = pydantic.Field(
default=3, description="Number of QAOA layers."
)
plot_list: List[PlotData] = pydantic.Field(
description="The list of (x,y,t) plots of the MHT problem."
)
misdetection_maximum_time_steps: pydantic.NonNegativeInt = pydantic.Field(
default=0,
description="The maximum number of time steps a target might be misdetected.",
)
penalty_energy: float = pydantic.Field(
default=2,
description="Penalty energy for invalid solutions. The value affects "
"the converges rate. Small positive values are preferred",
)
three_local_coeff: float = pydantic.Field(
default=0,
description="Coefficient for the 3-local terms in the Hamiltonian. It is related to the angular acceleration.",
)
one_local_coeff: float = pydantic.Field(
default=0, description="Coefficient for the 1-local terms in the Hamiltonian."
)
is_penalty: bool = pydantic.Field(
default=True, description="Build Pubo using penalty terms"
)
max_velocity: float = pydantic.Field(
default=0, description="Max allowed velocity for a segment"
)
def is_valid_cost(self, cost: float) -> bool:
return True
@pydantic.validator("plot_list")
def round_plot_list_times_and_validate(cls, plot_list):
MhtQaoaInput._check_all_ids_are_distinct(plot_list)
MhtQaoaInput._round_to_tolerance_decimals(plot_list)
time_stamps = sorted({plot.t for plot in plot_list})
time_diff_set = {
np.round(time_stamps[i] - time_stamps[i - 1], decimals=_TOLERANCE_DECIMALS)
for i in range(1, len(time_stamps))
}
if len(time_diff_set) != 1:
raise ValueError("The time difference between each time stamp is not equal")
return plot_list
@staticmethod
def _round_to_tolerance_decimals(plot_list: List[PlotData]) -> None:
for plot in plot_list:
plot.t = np.round(plot.t, decimals=_TOLERANCE_DECIMALS)
@staticmethod
def _check_all_ids_are_distinct(plot_list: List[PlotData]) -> None:
if not more_itertools.all_unique(plot.plot_id for plot in plot_list):
raise ValueError("Plot IDs should be unique.")
is_penalty: bool
pydantic-field
¶
Build Pubo using penalty terms
max_velocity: float
pydantic-field
¶
Max allowed velocity for a segment
misdetection_maximum_time_steps: NonNegativeInt
pydantic-field
¶
The maximum number of time steps a target might be misdetected.
one_local_coeff: float
pydantic-field
¶
Coefficient for the 1-local terms in the Hamiltonian.
penalty_energy: float
pydantic-field
¶
Penalty energy for invalid solutions. The value affects the converges rate. Small positive values are preferred
plot_list: List[classiq.interface.combinatorial_optimization.mht_qaoa_input.PlotData]
pydantic-field
required
¶
The list of (x,y,t) plots of the MHT problem.
reps: PositiveInt
pydantic-field
¶
Number of QAOA layers.
three_local_coeff: float
pydantic-field
¶
Coefficient for the 3-local terms in the Hamiltonian. It is related to the angular acceleration.
PlotData (BaseModel)
pydantic-model
¶
Source code in classiq/interface/combinatorial_optimization/mht_qaoa_input.py
class PlotData(BaseModel):
# We are currently ignoring units. This might need to be handled in the future
x: float = pydantic.Field(description="The X coordinate of this plot")
y: float = pydantic.Field(description="The Y coordinate of this plot")
t: float = pydantic.Field(description="The time stamp of this plot")
plot_id: pydantic.NonNegativeInt = pydantic.Field(
description="The plot ID of this plot"
)
model_io_comon
¶
Functions for saving and loading Pyomo objects to json
StoreSpec
¶
A StoreSpec object tells the serializer functions what to read or write. The default settings will produce a StoreSpec configured to load/save the typical attributes required to load/save a model state.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
classes |
A list of classes to save. Each class is represented by a list (or tuple) containing the following elements: (1) class (compared using isinstance) (2) attribute list or None, an empty list store the object, but none of its attributes, None will not store objects of this class type (3) optional load filter function. The load filter function returns a list of attributes to read based on the state of an object and its saved state. The allows, for example, loading values for unfixed variables, or only loading values whose current value is less than one. The filter function only applies to load not save. Filter functions take two arguments (a) the object (current state) and (b) the dictionary containing the saved state of an object. More specific classes should come before more general classes. For example if an object is a HeatExchanger and a UnitModel, and HeatExchanger is listed first, it will follow the HeatExchanger settings. If UnitModel is listed first in the classes list, it will follow the UnitModel settings. |
((<class 'pyomo.core.base.param.Param'>, ('_mutable',)), (<class 'pyomo.core.base.var.Var'>, ()), (<class 'pyomo.core.base.expression.Expression'>, ()), (<class 'pyomo.core.base.component.Component'>, ('active',)), (<class 'pyomo.core.base.objective.Objective'>, ('sense',)), (<class 'pyomo.core.base.indexed_component.IndexedComponent'>, ('index',))) |
|
data_classes |
This takes the same form as the classes argument. This is for component data classes. |
((<class 'pyomo.core.base.var._VarData'>, ('fixed', 'domain', 'value', 'stale', 'lb', 'ub')), (<class 'pyomo.core.base.param._ParamData'>, ('value',)), (<class 'int'>, ('value',)), (<class 'float'>, ('value',)), (<class 'pyomo.core.base.expression._ExpressionData'>, ()), (<class 'pyomo.core.base.component.ComponentData'>, ('active', '_index')), (<class 'pyomo.core.base.constraint._GeneralConstraintData'>, ()), (<class 'pyomo.core.expr.numvalue.NumericConstant'>, ('value',)), (<class 'pyomo.core.expr.relational_expr.InequalityExpression'>, ('strict',)), (<class 'pyomo.core.base.objective.ScalarObjective'>, ('sense',)), (<class 'pyomo.core.base.set.RangeSet'>, ('_init_data',))) |
|
skip_classes |
This is a list of classes to skip. If a class appears in the skip list, but also appears in the classes argument, the classes argument will override skip_classes. The use for this is to specifically exclude certain classes that would get caught by more general classes (e.g. UnitModel is in the class list, but you want to exclude HeatExchanger which is derived from UnitModel). |
(<class 'pyomo.core.base.external.ExternalFunction'>, <class 'pyomo.network.port.Port'>, <class 'pyomo.core.base.expression.Expression'>, <class 'pyomo.core.base.set.RangeSet'>) |
|
ignore_missing |
bool |
If True will ignore a component or attribute that exists in the model, but not in the stored state. If false an exception will be raised for things in the model that should be loaded but aren't in the stored state. Extra items in the stored state will not raise an exception regardless of this argument. |
True |
suffix |
bool |
If True store suffixes and component ids. If false, don't store suffixes. |
True |
suffix_filter |
None to store all suffixes if suffix=True, or a list of suffixes to store if suffix=True |
None |
Source code in classiq/interface/combinatorial_optimization/model_io_comon.py
class StoreSpec:
"""
A StoreSpec object tells the serializer functions what to read or write.
The default settings will produce a StoreSpec configured to load/save the
typical attributes required to load/save a model state.
Args:
classes: A list of classes to save. Each class is represented by a
list (or tuple) containing the following elements: (1) class
(compared using isinstance) (2) attribute list or None, an empty
list store the object, but none of its attributes, None will not
store objects of this class type (3) optional load filter function.
The load filter function returns a list of attributes to read based
on the state of an object and its saved state. The allows, for
example, loading values for unfixed variables, or only loading
values whose current value is less than one. The filter function
only applies to load not save. Filter functions take two arguments
(a) the object (current state) and (b) the dictionary containing the
saved state of an object. More specific classes should come before
more general classes. For example if an object is a HeatExchanger
and a UnitModel, and HeatExchanger is listed first, it will follow
the HeatExchanger settings. If UnitModel is listed first in the
classes list, it will follow the UnitModel settings.
data_classes: This takes the same form as the classes argument.
This is for component data classes.
skip_classes: This is a list of classes to skip. If a class appears
in the skip list, but also appears in the classes argument, the
classes argument will override skip_classes. The use for this is to
specifically exclude certain classes that would get caught by more
general classes (e.g. UnitModel is in the class list, but you want
to exclude HeatExchanger which is derived from UnitModel).
ignore_missing: If True will ignore a component or attribute that exists
in the model, but not in the stored state. If false an exception
will be raised for things in the model that should be loaded but
aren't in the stored state. Extra items in the stored state will not
raise an exception regardless of this argument.
suffix: If True store suffixes and component ids. If false, don't store
suffixes.
suffix_filter: None to store all suffixes if suffix=True, or a list of
suffixes to store if suffix=True
"""
def __init__(
self,
classes=(
(Param, ("_mutable",)),
(Var, ()),
(Expression, ()),
(Component, ("active",)),
(pyomo.core.base.objective.Objective, ("sense",)),
(pyomo.core.base.indexed_component.IndexedComponent, ("index",)),
),
data_classes=(
(
pyomo.core.base.var._VarData,
(
"fixed",
"domain",
"value",
"stale",
"lb",
"ub",
), # The order is important here. for example, domain attr might be needed in order to set value.
),
(pyomo.core.base.param._ParamData, ("value",)),
(int, ("value",)),
(float, ("value",)),
(pyomo.core.base.expression._ExpressionData, ()),
(pyomo.core.base.component.ComponentData, ("active", "_index")),
(pyomo.core.base.constraint._GeneralConstraintData, ()),
(pyomo.core.expr.numvalue.NumericConstant, ("value",)),
(pyomo.core.expr.relational_expr.InequalityExpression, ("strict",)),
(pyomo.core.base.objective.ScalarObjective, ("sense",)),
(pyomo.core.base.set.RangeSet, ("_init_data",)),
),
skip_classes=(ExternalFunction, Port, Expression, RangeSet),
ignore_missing: bool = True,
suffix: bool = True,
suffix_filter=None,
) -> None:
"""
(see above)
"""
# Callbacks are used for attributes that cannot be directly get or set
self.get_functions = {
"value": _get_value,
"strict": _get_strict,
"domain": _get_domain,
"index": _get_index_name,
}
self.set_functions = {
"_mutable": lambda *args: None,
"active": _set_active,
"fixed": _set_fixed,
"lb": _set_lb,
"ub": _set_ub,
"value": _set_value,
"strict": _set_strict,
"sense": _set_sense,
}
skip_with_classes: List[Any] = [
(i, []) for i in skip_classes if i not in classes
] + list(classes)
self.classes = [i[0] for i in skip_with_classes]
# Add skip classes to classes list, with None as attr list to skip
self.class_attrs = [i[1] for i in skip_with_classes]
self.data_classes = [i[0] for i in data_classes]
self.data_class_attrs = [i[1] for i in data_classes]
# Create filter function lists, use None if not supplied
self.class_filter = [i[2] if len(i) > 2 else None for i in skip_with_classes]
self.data_class_filter = [i[2] if len(i) > 2 else None for i in data_classes]
self.ignore_missing = ignore_missing
self.include_suffix = suffix
self.suffix_filter = suffix_filter
def set_read_callback(self, attr, cb=None) -> None:
"""
Set a callback to set an attribute, when reading from json or dict.
"""
self.set_functions[attr] = cb
def set_write_callback(self, attr, cb=None) -> None:
"""
Set a callback to get an attribute, when writing to json or dict.
"""
self.get_functions[attr] = cb
def get_class_attr_list(self, obj) -> Tuple[List[Any], Any]:
"""
Look up what attributes to save/load for an Component object.
Args:
obj: Object to look up attribute list for.
Return:
A list of attributes and a filter function for object type
"""
attr_list = [] # Attributes to store
filter_function = None # Load filter function
for i, cl in enumerate(self.classes):
if isinstance(obj, cl) or (isinstance(obj, type) and issubclass(obj, cl)):
attr_list += list(self.class_attrs[i])
filter_function = self.class_filter[i] # this does not make sense
return attr_list, filter_function
def get_data_class_attr_list(self, obj) -> Tuple[List[Any], Any]:
"""
Look up what attributes to save/load for an ComponentData object.
Args:
obj: Object or type to look up attribute list for.
Return:
A list of attributes and a filter function for object type
"""
attr_list = [] # Attributes to store
filter_function = None # Load filter function
for i, cl in enumerate(self.data_classes):
if isinstance(obj, cl) or (isinstance(obj, type) and issubclass(obj, cl)):
attr_list += list(self.data_class_attrs[i])
filter_function = self.data_class_filter[
i
] # TODO: this does not make sense
return attr_list, filter_function
@classmethod
def bound(cls):
"""Returns a StoreSpec object to store variable bounds only."""
return cls(
classes=((Var, ()),),
data_classes=((pyomo.core.base.var._VarData, ("lb", "ub")),),
suffix=False,
)
@classmethod
def value(cls):
"""Returns a StoreSpec object to store variable values only."""
return cls(
classes=((Var, ()),),
data_classes=((pyomo.core.base.var._VarData, ("value",)),),
suffix=False,
)
@classmethod
def isfixed(cls):
"""Returns a StoreSpec object to store if variables are fixed."""
return cls(
classes=((Var, ()),),
data_classes=((pyomo.core.base.var._VarData, ("fixed",)),),
suffix=False,
)
@classmethod
def suffix(cls, suffix_filter=None):
return cls(
classes=((Suffix, ()),),
data_classes=(),
suffix=True,
suffix_filter=suffix_filter,
)
@classmethod
def value_isfixed(cls, only_fixed):
"""
Return a StoreSpec object to store variable values and if fixed.
Args:
only_fixed: Only load fixed variable values
"""
if only_fixed:
return cls(
classes=((Var, ()),),
data_classes=(
(pyomo.core.base.var._VarData, ("value", "fixed"), _only_fixed),
),
suffix=False,
)
else:
return cls(
classes=((Var, ()),),
data_classes=((pyomo.core.base.var._VarData, ("value", "fixed")),),
suffix=False,
)
@classmethod
def value_isfixed_isactive(cls, only_fixed):
"""
Return a StoreSpec object to store variable values, if variables are
fixed and if components are active.
Args:
only_fixed: Only load fixed variable values
"""
if only_fixed:
return cls(
classes=((Var, ()), (Param, ()), (Component, ("active",))),
data_classes=(
(pyomo.core.base.var._VarData, ("value", "fixed"), _only_fixed),
(pyomo.core.base.param._ParamData, ("value",)),
(pyomo.core.base.component.ComponentData, ("active",)),
),
suffix=False,
)
else:
return cls(
classes=((Var, ()), (Param, ()), (Component, ("active",))),
data_classes=(
(pyomo.core.base.var._VarData, ("value", "fixed")),
(pyomo.core.base.param._ParamData, ("value",)),
(pyomo.core.base.component.ComponentData, ("active",)),
),
suffix=False,
)
__init__(self, classes=((<class 'pyomo.core.base.param.Param'>, ('_mutable',)), (<class 'pyomo.core.base.var.Var'>, ()), (<class 'pyomo.core.base.expression.Expression'>, ()), (<class 'pyomo.core.base.component.Component'>, ('active',)), (<class 'pyomo.core.base.objective.Objective'>, ('sense',)), (<class 'pyomo.core.base.indexed_component.IndexedComponent'>, ('index',))), data_classes=((<class 'pyomo.core.base.var._VarData'>, ('fixed', 'domain', 'value', 'stale', 'lb', 'ub')), (<class 'pyomo.core.base.param._ParamData'>, ('value',)), (<class 'int'>, ('value',)), (<class 'float'>, ('value',)), (<class 'pyomo.core.base.expression._ExpressionData'>, ()), (<class 'pyomo.core.base.component.ComponentData'>, ('active', '_index')), (<class 'pyomo.core.base.constraint._GeneralConstraintData'>, ()), (<class 'pyomo.core.expr.numvalue.NumericConstant'>, ('value',)), (<class 'pyomo.core.expr.relational_expr.InequalityExpression'>, ('strict',)), (<class 'pyomo.core.base.objective.ScalarObjective'>, ('sense',)), (<class 'pyomo.core.base.set.RangeSet'>, ('_init_data',))), skip_classes=(<class 'pyomo.core.base.external.ExternalFunction'>, <class 'pyomo.network.port.Port'>, <class 'pyomo.core.base.expression.Expression'>, <class 'pyomo.core.base.set.RangeSet'>), ignore_missing=True, suffix=True, suffix_filter=None)
special
¶
(see above)
Source code in classiq/interface/combinatorial_optimization/model_io_comon.py
def __init__(
self,
classes=(
(Param, ("_mutable",)),
(Var, ()),
(Expression, ()),
(Component, ("active",)),
(pyomo.core.base.objective.Objective, ("sense",)),
(pyomo.core.base.indexed_component.IndexedComponent, ("index",)),
),
data_classes=(
(
pyomo.core.base.var._VarData,
(
"fixed",
"domain",
"value",
"stale",
"lb",
"ub",
), # The order is important here. for example, domain attr might be needed in order to set value.
),
(pyomo.core.base.param._ParamData, ("value",)),
(int, ("value",)),
(float, ("value",)),
(pyomo.core.base.expression._ExpressionData, ()),
(pyomo.core.base.component.ComponentData, ("active", "_index")),
(pyomo.core.base.constraint._GeneralConstraintData, ()),
(pyomo.core.expr.numvalue.NumericConstant, ("value",)),
(pyomo.core.expr.relational_expr.InequalityExpression, ("strict",)),
(pyomo.core.base.objective.ScalarObjective, ("sense",)),
(pyomo.core.base.set.RangeSet, ("_init_data",)),
),
skip_classes=(ExternalFunction, Port, Expression, RangeSet),
ignore_missing: bool = True,
suffix: bool = True,
suffix_filter=None,
) -> None:
"""
(see above)
"""
# Callbacks are used for attributes that cannot be directly get or set
self.get_functions = {
"value": _get_value,
"strict": _get_strict,
"domain": _get_domain,
"index": _get_index_name,
}
self.set_functions = {
"_mutable": lambda *args: None,
"active": _set_active,
"fixed": _set_fixed,
"lb": _set_lb,
"ub": _set_ub,
"value": _set_value,
"strict": _set_strict,
"sense": _set_sense,
}
skip_with_classes: List[Any] = [
(i, []) for i in skip_classes if i not in classes
] + list(classes)
self.classes = [i[0] for i in skip_with_classes]
# Add skip classes to classes list, with None as attr list to skip
self.class_attrs = [i[1] for i in skip_with_classes]
self.data_classes = [i[0] for i in data_classes]
self.data_class_attrs = [i[1] for i in data_classes]
# Create filter function lists, use None if not supplied
self.class_filter = [i[2] if len(i) > 2 else None for i in skip_with_classes]
self.data_class_filter = [i[2] if len(i) > 2 else None for i in data_classes]
self.ignore_missing = ignore_missing
self.include_suffix = suffix
self.suffix_filter = suffix_filter
bound()
classmethod
¶
Returns a StoreSpec object to store variable bounds only.
Source code in classiq/interface/combinatorial_optimization/model_io_comon.py
@classmethod
def bound(cls):
"""Returns a StoreSpec object to store variable bounds only."""
return cls(
classes=((Var, ()),),
data_classes=((pyomo.core.base.var._VarData, ("lb", "ub")),),
suffix=False,
)
get_class_attr_list(self, obj)
¶
Look up what attributes to save/load for an Component object.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
obj |
Object to look up attribute list for. |
required |
Returns:
Type | Description |
---|---|
Tuple[List[Any], Any] |
A list of attributes and a filter function for object type |
Source code in classiq/interface/combinatorial_optimization/model_io_comon.py
def get_class_attr_list(self, obj) -> Tuple[List[Any], Any]:
"""
Look up what attributes to save/load for an Component object.
Args:
obj: Object to look up attribute list for.
Return:
A list of attributes and a filter function for object type
"""
attr_list = [] # Attributes to store
filter_function = None # Load filter function
for i, cl in enumerate(self.classes):
if isinstance(obj, cl) or (isinstance(obj, type) and issubclass(obj, cl)):
attr_list += list(self.class_attrs[i])
filter_function = self.class_filter[i] # this does not make sense
return attr_list, filter_function
get_data_class_attr_list(self, obj)
¶
Look up what attributes to save/load for an ComponentData object.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
obj |
Object or type to look up attribute list for. |
required |
Returns:
Type | Description |
---|---|
Tuple[List[Any], Any] |
A list of attributes and a filter function for object type |
Source code in classiq/interface/combinatorial_optimization/model_io_comon.py
def get_data_class_attr_list(self, obj) -> Tuple[List[Any], Any]:
"""
Look up what attributes to save/load for an ComponentData object.
Args:
obj: Object or type to look up attribute list for.
Return:
A list of attributes and a filter function for object type
"""
attr_list = [] # Attributes to store
filter_function = None # Load filter function
for i, cl in enumerate(self.data_classes):
if isinstance(obj, cl) or (isinstance(obj, type) and issubclass(obj, cl)):
attr_list += list(self.data_class_attrs[i])
filter_function = self.data_class_filter[
i
] # TODO: this does not make sense
return attr_list, filter_function
isfixed()
classmethod
¶
Returns a StoreSpec object to store if variables are fixed.
Source code in classiq/interface/combinatorial_optimization/model_io_comon.py
@classmethod
def isfixed(cls):
"""Returns a StoreSpec object to store if variables are fixed."""
return cls(
classes=((Var, ()),),
data_classes=((pyomo.core.base.var._VarData, ("fixed",)),),
suffix=False,
)
set_read_callback(self, attr, cb=None)
¶
Set a callback to set an attribute, when reading from json or dict.
Source code in classiq/interface/combinatorial_optimization/model_io_comon.py
def set_read_callback(self, attr, cb=None) -> None:
"""
Set a callback to set an attribute, when reading from json or dict.
"""
self.set_functions[attr] = cb
set_write_callback(self, attr, cb=None)
¶
Set a callback to get an attribute, when writing to json or dict.
Source code in classiq/interface/combinatorial_optimization/model_io_comon.py
def set_write_callback(self, attr, cb=None) -> None:
"""
Set a callback to get an attribute, when writing to json or dict.
"""
self.get_functions[attr] = cb
value()
classmethod
¶
Returns a StoreSpec object to store variable values only.
Source code in classiq/interface/combinatorial_optimization/model_io_comon.py
@classmethod
def value(cls):
"""Returns a StoreSpec object to store variable values only."""
return cls(
classes=((Var, ()),),
data_classes=((pyomo.core.base.var._VarData, ("value",)),),
suffix=False,
)
value_isfixed(only_fixed)
classmethod
¶
Return a StoreSpec object to store variable values and if fixed.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
only_fixed |
Only load fixed variable values |
required |
Source code in classiq/interface/combinatorial_optimization/model_io_comon.py
@classmethod
def value_isfixed(cls, only_fixed):
"""
Return a StoreSpec object to store variable values and if fixed.
Args:
only_fixed: Only load fixed variable values
"""
if only_fixed:
return cls(
classes=((Var, ()),),
data_classes=(
(pyomo.core.base.var._VarData, ("value", "fixed"), _only_fixed),
),
suffix=False,
)
else:
return cls(
classes=((Var, ()),),
data_classes=((pyomo.core.base.var._VarData, ("value", "fixed")),),
suffix=False,
)
value_isfixed_isactive(only_fixed)
classmethod
¶
Return a StoreSpec object to store variable values, if variables are fixed and if components are active.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
only_fixed |
Only load fixed variable values |
required |
Source code in classiq/interface/combinatorial_optimization/model_io_comon.py
@classmethod
def value_isfixed_isactive(cls, only_fixed):
"""
Return a StoreSpec object to store variable values, if variables are
fixed and if components are active.
Args:
only_fixed: Only load fixed variable values
"""
if only_fixed:
return cls(
classes=((Var, ()), (Param, ()), (Component, ("active",))),
data_classes=(
(pyomo.core.base.var._VarData, ("value", "fixed"), _only_fixed),
(pyomo.core.base.param._ParamData, ("value",)),
(pyomo.core.base.component.ComponentData, ("active",)),
),
suffix=False,
)
else:
return cls(
classes=((Var, ()), (Param, ()), (Component, ("active",))),
data_classes=(
(pyomo.core.base.var._VarData, ("value", "fixed")),
(pyomo.core.base.param._ParamData, ("value",)),
(pyomo.core.base.component.ComponentData, ("active",)),
),
suffix=False,
)
model_serializer
¶
Functions for saving and loading Pyomo objects to json
Counter
¶
This is a counter object, which is an easy way to pass an integer pointer around between methods.
Source code in classiq/interface/combinatorial_optimization/model_serializer.py
class Counter:
"""
This is a counter object, which is an easy way to pass an integer pointer
around between methods.
"""
def __init__(self) -> None:
self.count = 0
to_json(obj, file_name=None, human_read=False, store_spec=None, metadata=None, gz=None, return_dict=False, return_json_string=False)
¶
Save the state of a model to a Python dictionary, and optionally dump it to a json file. To load a model state, a model with the same structure must exist. The model itself cannot be recreated from this.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
obj |
The Pyomo component object to save. Usually a Pyomo model, but could also be a subcomponent of a model (usually a sub-block). |
required | |
file_name |
json file name to save model state, if None only create python dict |
None |
|
gz |
If file_name is given and gv is True gzip the json file. The default is True if the file name ends with '.gz' otherwise False. |
None |
|
human_read |
if True, add indents and spacing to make the json file more readable, if false cut out whitespace and make as compact as possible |
False |
|
metadata |
A dictionary of additional metadata to add. |
None |
|
store_spec |
is What To Save, this is a StoreSpec object that specifies what object types and attributes to save. If None, the default is used which saves the state of the complete model state. |
None |
|
metadata |
additional metadata to save beyond the standard format_version, date, and time. |
None |
|
return_dict |
default is False if true returns a dictionary representation |
False |
|
return_json_string |
default is False returns a json string |
False |
Returns:
Type | Description |
---|---|
Optional[Dict[Any, Any]] |
If return_dict is True returns a dictionary serialization of the Pyomo component. If return_dict is False and return_json_string is True returns a json string dump of the dict. If file_name is given the dictionary is also written to a json file. If gz is True and file_name is given, writes a gzipped json file. |
Source code in classiq/interface/combinatorial_optimization/model_serializer.py
def to_json(
obj,
file_name=None,
human_read=False,
store_spec=None,
metadata=None,
gz=None,
return_dict=False,
return_json_string=False,
) -> Optional[Dict[Any, Any]]:
"""
Save the state of a model to a Python dictionary, and optionally dump it
to a json file. To load a model state, a model with the same structure must
exist. The model itself cannot be recreated from this.
Args:
obj: The Pyomo component object to save. Usually a Pyomo model, but could
also be a subcomponent of a model (usually a sub-block).
file_name: json file name to save model state, if None only create
python dict
gz: If file_name is given and gv is True gzip the json file. The default is
True if the file name ends with '.gz' otherwise False.
human_read: if True, add indents and spacing to make the json file more
readable, if false cut out whitespace and make as compact as
possible
metadata: A dictionary of additional metadata to add.
store_spec: is What To Save, this is a StoreSpec object that specifies what
object types and attributes to save. If None, the default is used
which saves the state of the complete model state.
metadata: additional metadata to save beyond the standard format_version,
date, and time.
return_dict: default is False if true returns a dictionary representation
return_json_string: default is False returns a json string
Returns:
If return_dict is True returns a dictionary serialization of the Pyomo
component. If return_dict is False and return_json_string is True
returns a json string dump of the dict. If file_name is given the dictionary
is also written to a json file. If gz is True and file_name is given, writes
a gzipped json file.
"""
if gz is None:
if isinstance(file_name, str):
gz = file_name.endswith(".gz")
else:
gz = False
if metadata is None:
metadata = {}
suffixes: List[dict] = list()
lookup: Dict[int, int] = dict()
count: Counter = Counter()
start_time = time.time()
if store_spec is None:
store_spec = StoreSpec()
now = datetime.datetime.now()
obj_dict = {
"__metadata__": {
"format_version": __format_version__,
"date": datetime.date.isoformat(now.date()),
"time": datetime.time.isoformat(now.time()),
"other": metadata,
}
}
_write_component(obj_dict, obj, store_spec, count, suffixes=suffixes, lookup=lookup)
for s in suffixes:
_write_component_data(**s)
obj_dict["__metadata__"]["__performance__"] = {}
performance_dict = obj_dict["__metadata__"]["__performance__"]
performance_dict["n_components"] = count.count
dict_time = time.time()
performance_dict["time_to_make_dict"] = dict_time - start_time
dump_kw: Dict[str, Any] = (
{"indent": 2} if human_read else {"separators": (",", ":")}
)
if file_name is not None:
if gz:
with gzip.open(file_name, "w") as f:
json.dump(obj_dict, f, **dump_kw)
else:
with open(file_name, "w") as f:
json.dump(obj_dict, f, **dump_kw)
file_time = time.time()
performance_dict["time_to_write_file"] = file_time - dict_time
if return_dict:
return obj_dict
elif return_json_string:
return json.dumps(obj_dict, **dump_kw) # type: ignore[return-value]
else:
return None
optimization_problem
¶
MaxCutProblem (BaseModel)
pydantic-model
¶
Source code in classiq/interface/combinatorial_optimization/optimization_problem.py
class MaxCutProblem(BaseModel):
qaoa_reps: pydantic.PositiveInt = pydantic.Field(
default=1, description="Number of layers in qaoa ansatz."
)
optimizer_preferences: CombinatorialOptimizer = pydantic.Field(
default_factory=CombinatorialOptimizer,
description="preferences for the VQE execution",
)
serialized_graph: Dict[str, Any]
executor
special
¶
aws_execution_cost
¶
ExecutionCostForTimePeriod (BaseModel)
pydantic-model
¶
Source code in classiq/interface/executor/aws_execution_cost.py
class ExecutionCostForTimePeriod(pydantic.BaseModel):
start: date = pydantic.Field(
description="The beginning of the time period for tasks usage and cost ("
"inclusive).",
)
end: date = pydantic.Field(
description="The end of the time period for tasks usage and cost (exclusive).",
)
granularity: Granularity = pydantic.Field(
description="Either MONTHLY or DAILY, or HOURLY.", default=Granularity.daily
)
cost_scope: CostScope = pydantic.Field(
description="Either user or organization", default=CostScope.user
)
class Config:
json_encoders = {date: lambda v: v.strftime("%Y-%m-%d")}
@validator("end")
def date_order(cls, v, values, **kwargs):
if "start" in values and v <= values["start"]:
raise ValueError('"end" date should be after "start" date')
return v
cost_scope: CostScope
pydantic-field
¶
Either user or organization
end: date
pydantic-field
required
¶
The end of the time period for tasks usage and cost (exclusive).
granularity: Granularity
pydantic-field
¶
Either MONTHLY or DAILY, or HOURLY.
start: date
pydantic-field
required
¶
The beginning of the time period for tasks usage and cost (inclusive).
__json_encoder__(obj)
special
staticmethod
¶
partial(func, args, *keywords) - new function with partial application of the given arguments and keywords.
estimation
¶
OperatorsEstimation (BaseModel)
pydantic-model
¶
Estimate the expectation value of a list of Pauli operators on a quantum state given by a quantum program.
Source code in classiq/interface/executor/estimation.py
class OperatorsEstimation(pydantic.BaseModel):
"""
Estimate the expectation value of a list of Pauli operators on a quantum state given
by a quantum program.
"""
quantum_program: QuantumProgram
operators: PauliOperators
execution_preferences
¶
AmplitudeAmplification (BaseModel)
pydantic-model
¶
Source code in classiq/interface/executor/execution_preferences.py
class AmplitudeAmplification(pydantic.BaseModel):
iterations: List[int] = pydantic.Field(
default_factory=list,
description="Number or list of numbers of iteration to use",
)
growth_rate: float = pydantic.Field(
default=1.25,
description="Number of iteration used is set to round(growth_rate**iterations)",
)
sample_from_iterations: bool = pydantic.Field(
default=False,
description="If True, number of iterations used is picked randomly from "
"[1, iteration] range",
)
num_of_highest_probability_states_to_check: pydantic.PositiveInt = pydantic.Field(
default=1, description="Then number of highest probability states to check"
)
@pydantic.validator("iterations")
def _validate_iterations(cls, iterations: Union[List[int], int]) -> List[int]:
if isinstance(iterations, int):
return [iterations]
return iterations
growth_rate: float
pydantic-field
¶
Number of iteration used is set to round(growth_rate**iterations)
iterations: List[int]
pydantic-field
¶
Number or list of numbers of iteration to use
num_of_highest_probability_states_to_check: PositiveInt
pydantic-field
¶
Then number of highest probability states to check
sample_from_iterations: bool
pydantic-field
¶
If True, number of iterations used is picked randomly from [1, iteration] range
ExecutionPreferences (BaseModel)
pydantic-model
¶
Source code in classiq/interface/executor/execution_preferences.py
class ExecutionPreferences(pydantic.BaseModel):
timeout_sec: Optional[pydantic.PositiveInt] = pydantic.Field(
default=None,
description="If set, limits the execution runtime. Value is in seconds. "
"Not supported on all platforms.",
)
amplitude_amplification: AmplitudeAmplification = pydantic.Field(
default_factory=AmplitudeAmplification,
description="Settings related to amplitude amplification execution, used during the grover execution.",
)
optimizer_preferences: Optional[OptimizerPreferences] = pydantic.Field(
default_factory=None,
description="Settings related to VQE execution.",
)
error_mitigation_method: Optional[ErrorMitigationMethod] = pydantic.Field(
default=None,
description="Error mitigation method. Currently supports complete and tensored "
"measurement calibration.",
)
noise_properties: Optional[NoiseProperties] = pydantic.Field(
default=None, description="Properties of the noise in the circuit"
)
random_seed: int = pydantic.Field(
default=None,
description="The random seed used for the execution",
)
backend_preferences: BackendPreferencesTypes = backend_preferences_field(
backend_name=IBMQBackendNames.IBMQ_AER_SIMULATOR
)
num_shots: Optional[pydantic.PositiveInt] = pydantic.Field(default=None)
transpile_to_hardware: TranspilationOption = pydantic.Field(
default=TranspilationOption.DECOMPOSE,
description="Transpile the circuit to the hardware basis gates before execution",
title="Transpilation Option",
)
def __init__(self, **kwargs: Any) -> None:
super().__init__(**kwargs)
@pydantic.validator("num_shots", always=True)
def validate_num_shots(
cls, original_num_shots: Optional[pydantic.PositiveInt], values: Dict[str, Any]
) -> Optional[pydantic.PositiveInt]:
return _choose_original_or_optimizer_attribute(
original_num_shots, "num_shots", None, values
)
@pydantic.validator("backend_preferences", always=True)
def validate_timeout_for_aws(
cls, backend_preferences: BackendPreferencesTypes, values: Dict[str, Any]
) -> BackendPreferencesTypes:
timeout = values.get("timeout_sec", None)
if (
not isinstance(backend_preferences, AwsBackendPreferences)
or timeout is None
):
return backend_preferences
if (
timeout != backend_preferences.job_timeout
and backend_preferences.job_timeout != AWS_DEFAULT_JOB_TIMEOUT_SECONDS
):
raise ValueError(DIFFERENT_TIMEOUT_MSG)
if timeout > MAX_EXECUTION_TIMEOUT_SECONDS:
raise ValueError(TIMEOUT_LARGE_FOR_AWS_MSG)
backend_preferences.job_timeout = timeout
return backend_preferences
@pydantic.validator("random_seed", always=True)
def validate_random_seed(
cls, original_random_seed: Optional[int], values: Dict[str, Any]
) -> int:
return _choose_original_or_optimizer_attribute(
original_random_seed, "random_seed", create_random_seed(), values
)
amplitude_amplification: AmplitudeAmplification
pydantic-field
¶
Settings related to amplitude amplification execution, used during the grover execution.
backend_preferences: Union[classiq.interface.backend.backend_preferences.AzureBackendPreferences, classiq.interface.backend.backend_preferences.IBMBackendPreferences, classiq.interface.backend.backend_preferences.AwsBackendPreferences, classiq.interface.backend.backend_preferences.IonqBackendPreferences, classiq.interface.backend.backend_preferences.NvidiaBackendPreferences]
pydantic-field
¶
Preferences for the requested backend to run the quantum circuit.
error_mitigation_method: ErrorMitigationMethod
pydantic-field
¶
Error mitigation method. Currently supports complete and tensored measurement calibration.
noise_properties: NoiseProperties
pydantic-field
¶
Properties of the noise in the circuit
optimizer_preferences: OptimizerPreferences
pydantic-field
¶
Settings related to VQE execution.
random_seed: int
pydantic-field
¶
The random seed used for the execution
timeout_sec: PositiveInt
pydantic-field
¶
If set, limits the execution runtime. Value is in seconds. Not supported on all platforms.
transpile_to_hardware: TranspilationOption
pydantic-field
¶
Transpile the circuit to the hardware basis gates before execution
execution_request
¶
ExecutionRequest (BaseModel)
pydantic-model
¶
Source code in classiq/interface/executor/execution_request.py
class ExecutionRequest(BaseModel):
execution_payload: ExecutionPayloads
preferences: ExecutionPreferences = pydantic.Field(
default_factory=ExecutionPreferences,
description="preferences for the execution",
)
@pydantic.validator("preferences")
def validate_ionq_backend(
cls, preferences: ExecutionPreferences, values: Dict[str, Any]
):
"""
This function implement the following check:
BE \\ payload | IonQ program | Qasm program | Other
--------------|--------------|--------------|------
IonQ backend | V | V | X
Other backend | X | V | V
Since:
- We can't execute non-programs on the IonQ backends
- We can't execute IonQ programs on non-IonQ backends
"""
quantum_program = values.get("execution_payload")
is_ionq_backend = isinstance(
preferences.backend_preferences, IonqBackendPreferences
)
if isinstance(quantum_program, QuantumProgram):
if (
quantum_program.syntax == QuantumInstructionSet.IONQ
and not is_ionq_backend
):
raise ValueError("Can only execute IonQ code on IonQ backend.")
else:
# If we handle anything other than a program.
if is_ionq_backend:
raise ValueError(
"IonQ backend supports only execution of QuantumPrograms"
)
return preferences
preferences: ExecutionPreferences
pydantic-field
¶
preferences for the execution
validate_ionq_backend(preferences, values)
classmethod
¶
This function implement the following check: BE \ payload | IonQ program | Qasm program | Other --------------|--------------|--------------|------ IonQ backend | V | V | X Other backend | X | V | V Since: - We can't execute non-programs on the IonQ backends - We can't execute IonQ programs on non-IonQ backends
Source code in classiq/interface/executor/execution_request.py
@pydantic.validator("preferences")
def validate_ionq_backend(
cls, preferences: ExecutionPreferences, values: Dict[str, Any]
):
"""
This function implement the following check:
BE \\ payload | IonQ program | Qasm program | Other
--------------|--------------|--------------|------
IonQ backend | V | V | X
Other backend | X | V | V
Since:
- We can't execute non-programs on the IonQ backends
- We can't execute IonQ programs on non-IonQ backends
"""
quantum_program = values.get("execution_payload")
is_ionq_backend = isinstance(
preferences.backend_preferences, IonqBackendPreferences
)
if isinstance(quantum_program, QuantumProgram):
if (
quantum_program.syntax == QuantumInstructionSet.IONQ
and not is_ionq_backend
):
raise ValueError("Can only execute IonQ code on IonQ backend.")
else:
# If we handle anything other than a program.
if is_ionq_backend:
raise ValueError(
"IonQ backend supports only execution of QuantumPrograms"
)
return preferences
information_param
¶
ExecutionDevicesInform (VersionedModel)
pydantic-model
¶
Source code in classiq/interface/executor/information_param.py
class ExecutionDevicesInform(VersionedModel):
informs_params: List[ExecutionInform] = pydantic.Field(
default=...,
description="List of execution Information of all devices",
)
informs_params: List[classiq.interface.executor.information_param.ExecutionInform]
pydantic-field
required
¶
List of execution Information of all devices
ExecutionInform (BaseModel)
pydantic-model
¶
Source code in classiq/interface/executor/information_param.py
class ExecutionInform(pydantic.BaseModel):
backend_name: str = pydantic.Field(
default=...,
description="The name of the device",
)
backend_service_provider: ProviderVendor = pydantic.Field(
default=...,
description="The name of the provider",
)
status: Union[AvailabilityStatus, MissingData] = pydantic.Field(
default=...,
description="availability status of the hardware",
)
type: DeviceType = pydantic.Field(
default=...,
description="The type of the device",
)
max_qubits: Union[int, MissingData] = pydantic.Field(
default=...,
description="number of qubits in the hardware",
)
average_queue_time: Optional[int] = pydantic.Field(
default=None,
description="how many seconds recently run jobs waited in the queue",
)
pending_jobs: Optional[int] = pydantic.Field(
default=None,
description="number of waiting jobs",
)
average_queue_time: int
pydantic-field
¶
how many seconds recently run jobs waited in the queue
backend_name: str
pydantic-field
required
¶
The name of the device
backend_service_provider: ProviderVendor
pydantic-field
required
¶
The name of the provider
max_qubits: Union[int, classiq.interface.executor.information_param.MissingData]
pydantic-field
required
¶
number of qubits in the hardware
pending_jobs: int
pydantic-field
¶
number of waiting jobs
status: Union[classiq.interface.executor.information_param.AvailabilityStatus, classiq.interface.executor.information_param.MissingData]
pydantic-field
required
¶
availability status of the hardware
type: DeviceType
pydantic-field
required
¶
The type of the device
ExecutionInformRequestParams (BaseModel)
pydantic-model
¶
Source code in classiq/interface/executor/information_param.py
class ExecutionInformRequestParams(pydantic.BaseModel):
provider: ProviderVendor = pydantic.Field(
default=..., description="List of vendor providers"
)
provider: ProviderVendor
pydantic-field
required
¶
List of vendor providers
optimizer_preferences
¶
CombinatorialOptimizer (OptimizerPreferences)
pydantic-model
¶
Source code in classiq/interface/executor/optimizer_preferences.py
class CombinatorialOptimizer(OptimizerPreferences):
cost_type: CostType = pydantic.Field(
default=CostType.CVAR,
description="Summarizing method of the measured bit strings",
)
alpha_cvar: PydanticAlphaParamCVAR = pydantic.Field(
default=None, description="Parameter for the CVAR summarizing method"
)
is_maximization: bool = pydantic.Field(
default=False,
description="Whether the optimization goal is to maximize",
)
should_check_valid_solutions: bool = pydantic.Field(
default=False,
description="Whether to check if all the solutions satisfy the constraints",
)
@pydantic.validator("alpha_cvar", pre=True, always=True)
def check_alpha_cvar(cls, alpha_cvar, values):
cost_type = values.get("cost_type")
if alpha_cvar is not None and cost_type != CostType.CVAR:
raise ValueError("Use CVAR params only for CostType.CVAR.")
if alpha_cvar is None and cost_type == CostType.CVAR:
alpha_cvar = 0.2
return alpha_cvar
alpha_cvar: PydanticAlphaParamCVAR
pydantic-field
¶
Parameter for the CVAR summarizing method
cost_type: CostType
pydantic-field
¶
Summarizing method of the measured bit strings
is_maximization: bool
pydantic-field
¶
Whether the optimization goal is to maximize
should_check_valid_solutions: bool
pydantic-field
¶
Whether to check if all the solutions satisfy the constraints
OptimizerPreferences (BaseModel)
pydantic-model
¶
Source code in classiq/interface/executor/optimizer_preferences.py
class OptimizerPreferences(BaseModel):
name: OptimizerType = pydantic.Field(
default=OptimizerType.COBYLA, description="Classical optimization algorithm."
)
num_shots: Optional[pydantic.PositiveInt] = pydantic.Field(
default=None,
description="Number of repetitions of th