Skip to content

Commit

Permalink
Merge branch 'microsoft:main' into add-ankaa-9q-3
Browse files Browse the repository at this point in the history
  • Loading branch information
Shadow53 authored Oct 2, 2024
2 parents fe6547d + e25d185 commit 46876a8
Show file tree
Hide file tree
Showing 169 changed files with 122,082 additions and 98,602 deletions.
44 changes: 27 additions & 17 deletions .ado/publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -54,15 +54,16 @@ extends:
image: windows-latest
os: windows
stages:
- stage: stage
- stage: PublishToPyPi
displayName: "\U0001F6EB Build and Publish to PyPi"
jobs:
- job: "Build_Azure_Quantum_Python"
displayName: Build "azure-quantum" package
templateContext:
outputs:
- output: pipelineArtifact
displayName: 'Upload "azure-quantum" artifacts'
targetPath: $(Build.SourcesDirectory)/azure-quantum/target/wheels/
targetPath: $(Build.SourcesDirectory)/azure-quantum/artifacts/
artifactName: azure-quantum-wheels

steps:
Expand All @@ -88,10 +89,14 @@ extends:
- script: |
cd $(Build.SourcesDirectory)/azure-quantum
python setup.py sdist --dist-dir=target/wheels
python setup.py bdist_wheel --dist-dir=target/wheels
python setup.py sdist --dist-dir=artifacts/wheels
python setup.py bdist_wheel --dist-dir=artifacts/wheels
displayName: Build "azure-quantum" package
- script: |
copy set_version.py "$(Build.SourcesDirectory)/azure-quantum/artifacts"
displayName: Copy "set_version.py" to artifacts
- job: "Test_Azure_Quantum_Python"
displayName: Test "azure-quantum" package
steps:
Expand Down Expand Up @@ -147,7 +152,7 @@ extends:
inputs:
- input: pipelineArtifact
displayName: 'Download azure-quantum artifacts'
targetPath: $(System.DefaultWorkingDirectory)/drop
targetPath: $(Pipeline.Workspace)/azure-quantum-wheels
artifactName: azure-quantum-wheels
steps:
- task: UsePythonVersion@0
Expand All @@ -156,7 +161,7 @@ extends:
displayName: Set Python version

- script: |
python set_version.py
python $(Pipeline.Workspace)/azure-quantum-wheels/set_version.py
env:
BUILD_TYPE: ${{ parameters.Build_Type }}
RELEASE_TYPE: ${{ parameters.Release_Type }}
Expand All @@ -170,12 +175,12 @@ extends:
)
displayName: Copy built "azure-quantum" package artifacts
inputs:
SourceFolder: '$(Pipeline.Workspace)/azure-quantum-wheels'
SourceFolder: '$(Pipeline.Workspace)/azure-quantum-wheels/wheels'
Contents: '**'
TargetFolder: '$(Build.ArtifactStagingDirectory)/target/wheels'
TargetFolder: '$(Build.ArtifactStagingDirectory)/artifacts/wheels'

- script: |
ls $(Build.ArtifactStagingDirectory)/target/wheels/*
ls $(Build.ArtifactStagingDirectory)/artifacts/wheels/*
displayName: List Py Artifacts to publish
- task: GitHubRelease@1
Expand All @@ -192,18 +197,23 @@ extends:
target: $(Build.SourceVersion)
addChangeLog: False
assets: |
$(Build.ArtifactStagingDirectory)/target/wheels/*
$(Build.ArtifactStagingDirectory)/artifacts/wheels/*
- task: EsrpRelease@4
- task: EsrpRelease@7
condition: ${{ parameters.Publish_Python_Package_To_PyPi }}
displayName: Publish "azure-quantum" package to PyPi
displayName: Sign and publish "azure-quantum" package to PyPi
inputs:
ConnectedServiceName: 'ESRP_Release'
ConnectedServiceName: 'ESRP Signing Connection'
KeyVaultName: 'kv-aqua-esrp-001'
AuthCertName: 'EsrpAuthCert'
SignCertName: 'EsrpSignCert'
ClientId: '832c049d-cd07-4c1c-bfa5-c07250d190cb'
Intent: 'PackageDistribution'
ContentType: 'PyPi'
FolderLocation: '$(Build.ArtifactStagingDirectory)/target/wheels'
Owners: '$(OwnerPersonalAlias)@microsoft.com' # NB: Group email here fails the task with non-actionable output.
FolderLocation: '$(Build.ArtifactStagingDirectory)/artifacts/wheels'
WaitForReleaseCompletion: true
Owners: '$(OwnerPersonalAlias)@microsoft.com' # Group email here fails the task with non-actionable output.
Approvers: '[email protected]'
ServiceEndpointUrl: 'https://api.esrp.microsoft.com'
MainPublisher: 'QuantumDevelpmentKit'
DomainTenantId: '72f988bf-86f1-41af-91ab-2d7cd011db47'
MainPublisher: 'ESRPRELPACMAN' # Default ESRP v7 publisher. Do not change.
DomainTenantId: '72f988bf-86f1-41af-91ab-2d7cd011db47'
200 changes: 198 additions & 2 deletions azure-quantum/azure/quantum/job/job.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,8 @@ def get_results(self, timeout_secs: float = DEFAULT_TIMEOUT):
Raises :class:`RuntimeError` if job execution fails.
Raises :class:`ValueError` if job output is malformed or output format is not compatible.
Raises :class:`azure.quantum.job.JobFailedWithResultsError` if job execution fails,
but failure results could still be retrieved (e.g. for jobs submitted against "microsoft.dft" target).
Expand Down Expand Up @@ -142,21 +144,215 @@ def get_results(self, timeout_secs: float = DEFAULT_TIMEOUT):

if self.details.output_data_format == "microsoft.quantum-results.v1":
if "Histogram" not in results:
raise f"\"Histogram\" array was expected to be in the Job results for \"{self.details.output_data_format}\" output format."
raise ValueError(f"\"Histogram\" array was expected to be in the Job results for \"{self.details.output_data_format}\" output format.")

histogram_values = results["Histogram"]

if len(histogram_values) % 2 == 0:
# Re-mapping {'Histogram': ['[0]', 0.50, '[1]', 0.50] } to {'[0]': 0.50, '[1]': 0.50}
return {histogram_values[i]: histogram_values[i + 1] for i in range(0, len(histogram_values), 2)}
else:
raise f"\"Histogram\" array has invalid format. Even number of items is expected."
raise ValueError(f"\"Histogram\" array has invalid format. Even number of items is expected.")
elif self.details.output_data_format == "microsoft.quantum-results.v2":
if "DataFormat" not in results or results["DataFormat"] != "microsoft.quantum-results.v2":
raise ValueError(f"\"DataFormat\" was expected to be \"microsoft.quantum-results.v2\" in the Job results for \"{self.details.output_data_format}\" output format.")

if "Results" not in results:
raise ValueError(f"\"Results\" field was expected to be in the Job results for \"{self.details.output_data_format}\" output format.")

if len(results["Results"]) < 1:
raise ValueError("\"Results\" array was expected to contain at least one item")

results = results["Results"][0]

if "Histogram" not in results:
raise ValueError(f"\"Histogram\" array was expected to be in the Job results for \"{self.details.output_data_format}\" output format.")

if "Shots" not in results:
raise ValueError(f"\"Shots\" array was expected to be in the Job results for \"{self.details.output_data_format}\" output format.")

histogram_values = results["Histogram"]

total_count = len(results["Shots"])

# Re-mapping object {'Histogram': [{"Outcome": [0], "Display": '[0]', "Count": 500}, {"Outcome": [1], "Display": '[1]', "Count": 500}]} to {'[0]': 0.50, '[1]': 0.50}
return {outcome["Display"]: outcome["Count"] / total_count for outcome in histogram_values}

return results
except:
# If errors decoding the data, return the raw payload:
return payload

def get_results_histogram(self, timeout_secs: float = DEFAULT_TIMEOUT):
"""Get job results histogram by downloading the results blob from the storage container linked via the workspace.
Raises :class:`RuntimeError` if job execution fails.
Raises :class:`ValueError` if job output is malformed or output format is not compatible.
Raises :class:`azure.quantum.job.JobFailedWithResultsError` if job execution fails,
but failure results could still be retrieved (e.g. for jobs submitted against "microsoft.dft" target).
:param timeout_secs: Timeout in seconds, defaults to 300
:type timeout_secs: float
:return: Results dictionary with histogram shots, or raw results if not a json object.
:rtype: typing.Any
"""
if self.results is not None:
return self.results

if not self.has_completed():
self.wait_until_completed(timeout_secs=timeout_secs)

if not self.details.status == "Succeeded":
if self.details.status == "Failed" and self._allow_failure_results():
job_blob_properties = self.download_blob_properties(self.details.output_data_uri)
if job_blob_properties.size > 0:
job_failure_data = self.download_data(self.details.output_data_uri)
raise JobFailedWithResultsError("An error occurred during job execution.", job_failure_data)

raise RuntimeError(
f'{"Cannot retrieve results as job execution failed"}'
+ f"(status: {self.details.status}."
+ f"error: {self.details.error_data})"
)

payload = self.download_data(self.details.output_data_uri)
try:
payload = payload.decode("utf8")
results = json.loads(payload)

if self.details.output_data_format == "microsoft.quantum-results.v2":
if "DataFormat" not in results or results["DataFormat"] != "microsoft.quantum-results.v2":
raise ValueError(f"\"DataFormat\" was expected to be \"microsoft.quantum-results.v2\" in the Job results for \"{self.details.output_data_format}\" output format.")
if "Results" not in results:
raise ValueError(f"\"Results\" field was expected to be in the Job results for \"{self.details.output_data_format}\" output format.")

if len(results["Results"]) < 1:
raise ValueError("\"Results\" array was expected to contain at least one item")

results = results["Results"]

if len(results) == 1:
results = results[0]
if "Histogram" not in results:
raise ValueError(f"\"Histogram\" array was expected to be in the Job results for \"{self.details.output_data_format}\" output format.")

histogram_values = results["Histogram"]
outcome_keys = self._process_outcome(histogram_values)

# Re-mapping object {'Histogram': [{"Outcome": [0], "Display": '[0]', "Count": 500}, {"Outcome": [1], "Display": '[1]', "Count": 500}]} to {'[0]': {"Outcome": [0], "Count": 500}, '[1]': {"Outcome": [1], "Count": 500}}
return {hist_val["Display"]: {"outcome": outcome, "count": hist_val["Count"]} for outcome, hist_val in zip(outcome_keys, histogram_values)}

else:
# This is handling the BatchResults edge case
resultsArray = []
for i, result in enumerate(results):
if "Histogram" not in result:
raise ValueError(f"\"Histogram\" array was expected to be in the Job results for result {i} for \"{self.details.output_data_format}\" output format.")

histogram_values = result["Histogram"]
outcome_keys = self._process_outcome(histogram_values)

# Re-mapping object {'Histogram': [{"Outcome": [0], "Display": '[0]', "Count": 500}, {"Outcome": [1], "Display": '[1]', "Count": 500}]} to {'[0]': {"Outcome": [0], "Count": 500}, '[1]': {"Outcome": [1], "Count": 500}}
resultsArray.append({hist_val["Display"]: {"outcome": outcome, "count": hist_val["Count"]} for outcome, hist_val in zip(outcome_keys, histogram_values)})

return resultsArray

else:
raise ValueError(f"Getting a results histogram with counts instead of probabilities is not a supported feature for jobs using the \"{self.details.output_data_format}\" output format.")

except Exception as e:
raise e

def get_results_shots(self, timeout_secs: float = DEFAULT_TIMEOUT):
"""Get job results per shot data by downloading the results blob from the
storage container linked via the workspace.
Raises :class:`RuntimeError` if job execution fails.
Raises :class:`ValueError` if job output is malformed or output format is not compatible.
Raises :class:`azure.quantum.job.JobFailedWithResultsError` if job execution fails,
but failure results could still be retrieved (e.g. for jobs submitted against "microsoft.dft" target).
:param timeout_secs: Timeout in seconds, defaults to 300
:type timeout_secs: float
:return: Results dictionary with histogram shots, or raw results if not a json object.
:rtype: typing.Any
"""
if self.results is not None:
return self.results

if not self.has_completed():
self.wait_until_completed(timeout_secs=timeout_secs)

if not self.details.status == "Succeeded":
if self.details.status == "Failed" and self._allow_failure_results():
job_blob_properties = self.download_blob_properties(self.details.output_data_uri)
if job_blob_properties.size > 0:
job_failure_data = self.download_data(self.details.output_data_uri)
raise JobFailedWithResultsError("An error occurred during job execution.", job_failure_data)

raise RuntimeError(
f'{"Cannot retrieve results as job execution failed"}'
+ f"(status: {self.details.status}."
+ f"error: {self.details.error_data})"
)

payload = self.download_data(self.details.output_data_uri)
try:
payload = payload.decode("utf8")
results = json.loads(payload)

if self.details.output_data_format == "microsoft.quantum-results.v2":
if "DataFormat" not in results or results["DataFormat"] != "microsoft.quantum-results.v2":
raise ValueError(f"\"DataFormat\" was expected to be \"microsoft.quantum-results.v2\" in the Job results for \"{self.details.output_data_format}\" output format.")
if "Results" not in results:
raise ValueError(f"\"Results\" field was expected to be in the Job results for \"{self.details.output_data_format}\" output format.")

results = results["Results"]

if len(results) < 1:
raise ValueError("\"Results\" array was expected to contain at least one item")

if len(results) == 1:
result = results[0]
if "Shots" not in result:
raise ValueError(f"\"Shots\" array was expected to be in the Job results for \"{self.details.output_data_format}\" output format.")

return [self._convert_tuples(shot) for shot in result["Shots"]]
else:
# This is handling the BatchResults edge case
shotsArray = []
for i, result in enumerate(results):
if "Shots" not in result:
raise ValueError(f"\"Shots\" array was expected to be in the Job results for result {i} of \"{self.details.output_data_format}\" output format.")
shotsArray.append([self._convert_tuples(shot) for shot in result["Shots"]])

return shotsArray
else:
raise ValueError(f"Individual shot results are not supported for jobs using the \"{self.details.output_data_format}\" output format.")
except Exception as e:
raise e

def _process_outcome(self, histogram_results):
return [self._convert_tuples(v['Outcome']) for v in histogram_results]

def _convert_tuples(self, data):
if isinstance(data, dict):
# Check if the dictionary represents a tuple
if all(isinstance(k, str) and k.startswith("Item") for k in data.keys()):
# Convert the dictionary to a tuple
return tuple(self._convert_tuples(data[f"Item{i+1}"]) for i in range(len(data)))
else:
raise "Malformed tuple output"
elif isinstance(data, list):
# Recursively process list elements
return [self._convert_tuples(item) for item in data]
else:
# Return the data as is (int, string, etc.)
return data

@classmethod
def _allow_failure_results(cls) -> bool:
Expand Down
2 changes: 0 additions & 2 deletions azure-quantum/azure/quantum/qiskit/backends/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,8 @@

from azure.quantum.qiskit.backends.ionq import (
IonQBackend,
IonQQPUBackend,
IonQAriaBackend,
IonQSimulatorBackend,
IonQQPUQirBackend,
IonQAriaQirBackend,
IonQForteBackend,
IonQForteQirBackend,
Expand Down
Loading

0 comments on commit 46876a8

Please sign in to comment.