Skip to content

Commit

Permalink
Merge branch 'main' into feat/uninitialized-array
Browse files Browse the repository at this point in the history
  • Loading branch information
mark-koch authored Nov 14, 2024
2 parents 9c91f8b + e63878f commit f346512
Show file tree
Hide file tree
Showing 35 changed files with 607 additions and 209 deletions.
18 changes: 13 additions & 5 deletions .github/workflows/python-wheels.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,11 @@ name: Build and publish python wheels
# this workflow will publish the wheels to pypi.
# If the version is already published, pypi just ignores it.


permissions:
# This permission is required for trusted publishing on pypi
id-token: write

on:
workflow_dispatch:
push:
Expand All @@ -22,6 +27,7 @@ jobs:
build-publish:
name: Build and publish wheels
runs-on: ubuntu-latest
environment: pypi
strategy:
matrix:
package:
Expand Down Expand Up @@ -67,8 +73,10 @@ jobs:
echo " - event_name: ${{ github.event_name }}"
echo " - ref_type: ${{ github.ref_type }}"
echo " - ref: ${{ github.ref }}"
uvx twine upload --skip-existing --verbose dist/*
env:
TWINE_NON_INTERACTIVE: 1
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.PYPI_PUBLISH }}
- name: Publish package distributions to PyPI
# This workflow is a trusted publisher on PyPI for this package.
if: ${{ (github.event_name == 'release' && github.ref_type == 'tag' && startsWith(github.ref, format('refs/tags/{0}-v', matrix.package.dir))) || (github.event_name == 'workflow_dispatch' && github.ref_type == 'tag' && startsWith(github.ref, format('refs/tags/{0}-v', matrix.package.dir))) }}
uses: pypa/gh-action-pypi-publish@release/v1
with:
verbose: true
skip-existing: true
6 changes: 3 additions & 3 deletions .github/workflows/unsoundness.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ jobs:
miri:
name: "Miri"
runs-on: ubuntu-latest
continue-on-error: true
steps:
- uses: actions/checkout@v4
- name: Install Miri
Expand All @@ -44,11 +43,12 @@ jobs:
notify-slack:
uses: CQCL/hugrverse-actions/.github/workflows/slack-notifier.yml@main
needs: miri
if: ${{ needs.miri.result == 'failure' && ( github.event_name == 'workflow_dispatch' || github.event_name == 'push' ) }}
if: always() && needs.miri.result == 'failure' && github.event_name == 'push'
with:
channel-id: 'C04SHCL4FKP'
slack-message: |
💥 The unsoundness check for `CQCL/hugr` failed. <https://github.com/CQCL/hugr/actions/runs/${{ github.run_id }}|Please investigate>.
💥 The unsoundness check for `CQCL/hugr` failed.
<https://github.com/CQCL/hugr/actions/runs/${{ github.run_id }}|Please investigate>.
# Rate-limit the message to once per day
timeout-minutes: 1440
# A repository variable used to store the last message timestamp.
Expand Down
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ repos:
- id: cargo-clippy
name: cargo clippy
description: Run clippy lints with `cargo clippy`.
entry: cargo clippy --all-features --workspace -- -D warnings
entry: cargo clippy --all-targets --all-features --workspace -- -D warnings
language: system
files: \.rs$
pass_filenames: false
Expand Down
8 changes: 6 additions & 2 deletions hugr-cli/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "hugr-cli"
version = "0.13.2"
version = "0.13.3"
edition = { workspace = true }
rust-version = { workspace = true }
license = { workspace = true }
Expand All @@ -12,11 +12,14 @@ description = "Compiler passes for Quantinuum's HUGR"
keywords = ["Quantum", "Quantinuum"]
categories = ["compilers"]

[lib]
bench = false

[dependencies]
clap = { workspace = true, features = ["derive"] }
clap-verbosity-flag.workspace = true
derive_more = { workspace = true, features = ["display", "error", "from"] }
hugr = { path = "../hugr", version = "0.13.2" }
hugr = { path = "../hugr", version = "0.13.3" }
serde_json.workspace = true
serde.workspace = true
thiserror.workspace = true
Expand All @@ -42,3 +45,4 @@ rstest.workspace = true
name = "hugr"
path = "src/main.rs"
doc = false
bench = false
12 changes: 12 additions & 0 deletions hugr-core/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,17 @@
# Changelog

## [0.13.3](https://github.com/CQCL/hugr/compare/hugr-core-v0.13.2...hugr-core-v0.13.3) - 2024-11-06

### Bug Fixes

- Insert DFG directly as a funcdefn in `Package::from_hugr` ([#1621](https://github.com/CQCL/hugr/pull/1621))

### New Features

- `HugrMut::remove_metadata` ([#1619](https://github.com/CQCL/hugr/pull/1619))
- Operation and constructor declarations in `hugr-model` ([#1605](https://github.com/CQCL/hugr/pull/1605))
- Add TailLoop::BREAK_TAG and CONTINUE_TAG ([#1626](https://github.com/CQCL/hugr/pull/1626))

## [0.13.2](https://github.com/CQCL/hugr/compare/hugr-core-v0.13.1...hugr-core-v0.13.2) - 2024-10-22

### Bug Fixes
Expand Down
7 changes: 5 additions & 2 deletions hugr-core/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "hugr-core"
version = "0.13.2"
version = "0.13.3"
edition = { workspace = true }
rust-version = { workspace = true }

Expand All @@ -21,6 +21,9 @@ extension_inference = []
declarative = ["serde_yaml"]
model_unstable = ["hugr-model"]

[lib]
bench = false

[[test]]
name = "model"
required-features = ["model_unstable"]
Expand Down Expand Up @@ -51,7 +54,7 @@ paste = { workspace = true }
strum = { workspace = true }
strum_macros = { workspace = true }
semver = { version = "1.0.23", features = ["serde"] }
hugr-model = { version = "0.13.2", path = "../hugr-model", optional = true }
hugr-model = { version = "0.14.0", path = "../hugr-model", optional = true }
indexmap.workspace = true
fxhash.workspace = true
bumpalo = { workspace = true, features = ["collections"] }
Expand Down
63 changes: 58 additions & 5 deletions hugr-core/src/export.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
//! Exporting HUGR graphs to their `hugr-model` representation.
use crate::{
extension::{ExtensionId, ExtensionSet, OpDef, SignatureFunc},
hugr::IdentList,
hugr::{IdentList, NodeMetadataMap},
ops::{DataflowBlock, OpName, OpTrait, OpType},
types::{
type_param::{TypeArgVariable, TypeParam},
Expand All @@ -21,6 +21,8 @@ type FxIndexSet<T> = IndexSet<T, fxhash::FxBuildHasher>;

pub(crate) const OP_FUNC_CALL_INDIRECT: &str = "func.call-indirect";
const TERM_PARAM_TUPLE: &str = "param.tuple";
const TERM_JSON: &str = "prelude.json";
const META_DESCRIPTION: &str = "docs.description";

/// Export a [`Hugr`] graph to its representation in the model.
pub fn export_hugr<'a>(hugr: &'a Hugr, bump: &'a Bump) -> model::Module<'a> {
Expand Down Expand Up @@ -392,14 +394,19 @@ impl<'a> Context<'a> {
let inputs = self.make_ports(node, Direction::Incoming, num_inputs);
let outputs = self.make_ports(node, Direction::Outgoing, num_outputs);

let meta = match self.hugr.get_node_metadata(node) {
Some(metadata_map) => self.export_node_metadata(metadata_map),
None => &[],
};

// Replace the placeholder node with the actual node.
*self.module.get_node_mut(node_id).unwrap() = model::Node {
operation,
inputs,
outputs,
params,
regions,
meta: &[], // TODO: Export metadata
meta,
signature,
};

Expand Down Expand Up @@ -435,7 +442,7 @@ impl<'a> Context<'a> {
outputs: &[],
params: &[],
regions: &[],
meta: &[], // TODO: Metadata
meta: &[],
signature: None,
}))
}
Expand All @@ -452,8 +459,29 @@ impl<'a> Context<'a> {
decl
});

self.module.get_node_mut(node).unwrap().operation =
model::Operation::DeclareOperation { decl };
let meta = {
let description = Some(opdef.description()).filter(|d| !d.is_empty());
let meta_len = opdef.iter_misc().len() + description.is_some() as usize;
let mut meta = BumpVec::with_capacity_in(meta_len, self.bump);

if let Some(description) = description {
let name = META_DESCRIPTION;
let value = self.make_term(model::Term::Str(self.bump.alloc_str(description)));
meta.push(model::MetaItem { name, value })
}

for (name, value) in opdef.iter_misc() {
let name = self.bump.alloc_str(name);
let value = self.export_json(value);
meta.push(model::MetaItem { name, value });
}

self.bump.alloc_slice_copy(&meta)
};

let node_data = self.module.get_node_mut(node).unwrap();
node_data.operation = model::Operation::DeclareOperation { decl };
node_data.meta = meta;

model::GlobalRef::Direct(node)
}
Expand Down Expand Up @@ -843,6 +871,31 @@ impl<'a> Context<'a> {

self.make_term(model::Term::ExtSet { extensions, rest })
}

pub fn export_node_metadata(
&mut self,
metadata_map: &NodeMetadataMap,
) -> &'a [model::MetaItem<'a>] {
let mut meta = BumpVec::with_capacity_in(metadata_map.len(), self.bump);

for (name, value) in metadata_map {
let name = self.bump.alloc_str(name);
let value = self.export_json(value);
meta.push(model::MetaItem { name, value });
}

meta.into_bump_slice()
}

pub fn export_json(&mut self, value: &serde_json::Value) -> model::TermId {
let value = serde_json::to_string(value).expect("json values are always serializable");
let value = self.make_term(model::Term::Str(self.bump.alloc_str(&value)));
let value = self.bump.alloc_slice_copy(&[value]);
self.make_term(model::Term::ApplyFull {
global: model::GlobalRef::Named(TERM_JSON),
args: value,
})
}
}

#[cfg(test)]
Expand Down
5 changes: 5 additions & 0 deletions hugr-core/src/extension/op_def.rs
Original file line number Diff line number Diff line change
Expand Up @@ -435,6 +435,11 @@ impl OpDef {
self.misc.insert(k.to_string(), v)
}

/// Iterate over all miscellaneous data in the [OpDef].
pub(crate) fn iter_misc(&self) -> impl ExactSizeIterator<Item = (&str, &serde_json::Value)> {
self.misc.iter().map(|(k, v)| (k.as_str(), v))
}

/// Set the constant folding function for this Op, which can evaluate it
/// given constant inputs.
pub fn set_constant_folder(&mut self, fold: impl ConstFold + 'static) {
Expand Down
28 changes: 22 additions & 6 deletions hugr-core/src/hugr/views/sibling_subgraph.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
//! while the former provide views for subgraphs within a single level of the
//! hierarchy.
use std::cell::OnceCell;
use std::collections::HashSet;
use std::mem;

Expand Down Expand Up @@ -453,15 +454,24 @@ fn combine_in_out<'a>(
///
/// This can be used when constructing multiple sibling subgraphs to speed up
/// convexity checking.
pub struct TopoConvexChecker<'g, Base: 'g + HugrView>(
portgraph::algorithms::TopoConvexChecker<Base::Portgraph<'g>>,
);
pub struct TopoConvexChecker<'g, Base: 'g + HugrView> {
base: &'g Base,
checker: OnceCell<portgraph::algorithms::TopoConvexChecker<Base::Portgraph<'g>>>,
}

impl<'g, Base: HugrView> TopoConvexChecker<'g, Base> {
/// Create a new convexity checker.
pub fn new(base: &'g Base) -> Self {
let pg = base.portgraph();
Self(portgraph::algorithms::TopoConvexChecker::new(pg))
Self {
base,
checker: OnceCell::new(),
}
}

/// Returns the portgraph convexity checker, initializing it if necessary.
fn get_checker(&self) -> &portgraph::algorithms::TopoConvexChecker<Base::Portgraph<'g>> {
self.checker
.get_or_init(|| portgraph::algorithms::TopoConvexChecker::new(self.base.portgraph()))
}
}

Expand All @@ -472,7 +482,13 @@ impl<'g, Base: HugrView> ConvexChecker for TopoConvexChecker<'g, Base> {
inputs: impl IntoIterator<Item = portgraph::PortIndex>,
outputs: impl IntoIterator<Item = portgraph::PortIndex>,
) -> bool {
self.0.is_convex(nodes, inputs, outputs)
let mut nodes = nodes.into_iter().multipeek();
// If the node iterator contains less than two nodes, the subgraph is
// trivially convex.
if nodes.peek().is_none() || nodes.peek().is_none() {
return true;
};
self.get_checker().is_convex(nodes, inputs, outputs)
}
}

Expand Down
39 changes: 39 additions & 0 deletions hugr-core/src/import.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,8 @@ use itertools::Either;
use smol_str::{SmolStr, ToSmolStr};
use thiserror::Error;

const TERM_JSON: &str = "prelude.json";

type FxIndexMap<K, V> = IndexMap<K, V, fxhash::FxBuildHasher>;

/// Error during import.
Expand Down Expand Up @@ -184,6 +186,14 @@ impl<'a> Context<'a> {
let node_data = self.get_node(node_id)?;
self.record_links(node, Direction::Incoming, node_data.inputs);
self.record_links(node, Direction::Outgoing, node_data.outputs);

for meta_item in node_data.meta {
// TODO: For now we expect all metadata to be JSON since this is how
// it is handled in `hugr-core`.
let value = self.import_json_value(meta_item.value)?;
self.hugr.set_metadata(node, meta_item.name, value);
}

Ok(node)
}

Expand Down Expand Up @@ -1200,6 +1210,35 @@ impl<'a> Context<'a> {
}
}
}

fn import_json_value(
&mut self,
term_id: model::TermId,
) -> Result<serde_json::Value, ImportError> {
let (global, args) = match self.get_term(term_id)? {
model::Term::Apply { global, args } | model::Term::ApplyFull { global, args } => {
(global, args)
}
_ => return Err(model::ModelError::TypeError(term_id).into()),
};

if global != &GlobalRef::Named(TERM_JSON) {
return Err(model::ModelError::TypeError(term_id).into());
}

let [json_arg] = args else {
return Err(model::ModelError::TypeError(term_id).into());
};

let model::Term::Str(json_str) = self.get_term(*json_arg)? else {
return Err(model::ModelError::TypeError(term_id).into());
};

let json_value =
serde_json::from_str(json_str).map_err(|_| model::ModelError::TypeError(term_id))?;

Ok(json_value)
}
}

#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
Expand Down
1 change: 1 addition & 0 deletions hugr-core/src/package.rs
Original file line number Diff line number Diff line change
Expand Up @@ -402,6 +402,7 @@ mod test {
#[case::dfg("dfg", simple_dfg_hugr(), false)]
#[case::cfg("cfg", simple_cfg_hugr(), false)]
#[case::unsupported_input("input", simple_input_node(), true)]
#[cfg_attr(miri, ignore)] // Opening files is not supported in (isolated) miri
fn hugr_to_package(#[case] test_name: &str, #[case] hugr: Hugr, #[case] errors: bool) {
match (&Package::from_hugr(hugr), errors) {
(Ok(package), false) => {
Expand Down
Loading

0 comments on commit f346512

Please sign in to comment.