Skip to content

Commit

Permalink
Integrate unit examples into codegen stack (#2590)
Browse files Browse the repository at this point in the history
**Easier to review commit by commit**

This PR makes it so that our nice examples in docs/code-examples
automagically end up in docstrings for both Python & Rust.

Fixes #2509 

UPDATE: This also fixes a pretty serious bug in the versioning code...
I'm not entirely how anything worked at all actually (??).

TL;DR: given this:

https://github.com/rerun-io/rerun/blob/0fc2a9fad2616bb3580176df37c488fc76a9a2ca/crates/re_types/definitions/rerun/archetypes/points2d.fbs#L16-L33

You get this

![image](https://github.com/rerun-io/rerun/assets/2910679/e36af2b6-5ae7-4790-9048-8816c140b8d0)

and this

![image](https://github.com/rerun-io/rerun/assets/2910679/feaf0b17-bc38-47c9-8f4f-0b476b54315b)


### What

### Checklist
* [x] I have read and agree to [Contributor
Guide](https://github.com/rerun-io/rerun/blob/main/CONTRIBUTING.md) and
the [Code of
Conduct](https://github.com/rerun-io/rerun/blob/main/CODE_OF_CONDUCT.md)
* [x] I've included a screenshot or gif (if applicable)
* [x] I have tested [demo.rerun.io](https://demo.rerun.io/pr/2590) (if
applicable)

- [PR Build Summary](https://build.rerun.io/pr/2590)
- [Docs
preview](https://rerun.io/preview/pr%3Acmc%2Fhope_docstring_examples/docs)
- [Examples
preview](https://rerun.io/preview/pr%3Acmc%2Fhope_docstring_examples/examples)
  • Loading branch information
teh-cmc authored Jul 5, 2023
1 parent 52f51c1 commit 2569eb7
Show file tree
Hide file tree
Showing 14 changed files with 348 additions and 44 deletions.
15 changes: 11 additions & 4 deletions crates/re_types/build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ use re_build_tools::{

const SOURCE_HASH_PATH: &str = "./source_hash.txt";
const DEFINITIONS_DIR_PATH: &str = "./definitions";
const DOC_EXAMPLES_DIR_PATH: &str = "../../docs/code-examples";
const RUST_OUTPUT_DIR_PATH: &str = ".";
const PYTHON_OUTPUT_DIR_PATH: &str = "../../rerun_py/rerun_sdk/rerun/_rerun2";

Expand All @@ -36,22 +37,28 @@ fn main() {
}

rerun_if_changed_or_doesnt_exist(SOURCE_HASH_PATH);
for path in iter_dir(DEFINITIONS_DIR_PATH, Some(&[".fbs"])) {
for path in iter_dir(DEFINITIONS_DIR_PATH, Some(&["fbs"])) {
rerun_if_changed(&path);
}

// NOTE: We need to hash both the flatbuffers definitions as well as the source code of the
// code generator itself!
let cur_hash = read_versioning_hash(SOURCE_HASH_PATH);
let re_types_builder_hash = compute_crate_hash("re_types_builder");
let definitions_hash = compute_dir_hash(DEFINITIONS_DIR_PATH, Some(&[".fbs"]));
let new_hash = compute_strings_hash(&[&re_types_builder_hash, &definitions_hash]);
let definitions_hash = compute_dir_hash(DEFINITIONS_DIR_PATH, Some(&["fbs"]));
let doc_examples_hash = compute_dir_hash(DOC_EXAMPLES_DIR_PATH, Some(&["rs", "py"]));
let new_hash = compute_strings_hash(&[
&re_types_builder_hash,
&definitions_hash,
&doc_examples_hash,
]);

// Leave these be please, very useful when debugging.
eprintln!("re_types_builder_hash: {re_types_builder_hash:?}");
eprintln!("cur_hash: {cur_hash:?}");
eprintln!("definitions_hash: {definitions_hash:?}");
eprintln!("doc_examples_hash: {doc_examples_hash:?}");
eprintln!("new_hash: {new_hash:?}");
eprintln!("cur_hash: {cur_hash:?}");

if let Some(cur_hash) = cur_hash {
if cur_hash == new_hash {
Expand Down
13 changes: 13 additions & 0 deletions crates/re_types/definitions/rerun/archetypes/points2d.fbs
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,19 @@ namespace rerun.archetypes;
// TODO(#2521): always derive debug & clone for rust backend

/// A 2D point cloud with positions and optional colors, radii, labels, etc.
///
/// \py Example
/// \py -------
/// \py
/// \py ```python
/// \py \include:../../../../../docs/code-examples/point2d_simple_v2.py
/// \py ```
///
/// \rs ## Example
/// \rs
/// \rs ```ignore
/// \rs \include:../../../../../docs/code-examples/point2d_simple_v2.rs
/// \rs ```
table Points2D (
"attr.rust.derive": "Debug, Clone, PartialEq",
order: 100
Expand Down
2 changes: 1 addition & 1 deletion crates/re_types/source_hash.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# This is a sha256 hash for all direct and indirect dependencies of this crate's build script.
# It can be safely removed at anytime to force the build script to run again.
# Check out build.rs to see how it's computed.
5cac23400a6084eaaa53fba54885d12daefa92fdbdb057c8221ec343dcdd9586
d828fdb05c35a01b92dd30b7ce40987b87c5dfe8d1bb8f729004ae88b62c830c
30 changes: 30 additions & 0 deletions crates/re_types/src/archetypes/points2d.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,36 @@
#![allow(clippy::unnecessary_cast)]

#[doc = "A 2D point cloud with positions and optional colors, radii, labels, etc."]
#[doc = ""]
#[doc = "## Example"]
#[doc = ""]
#[doc = "```ignore"]
#[doc = "//! Log some very simple points."]
#[doc = ""]
#[doc = "use rerun::{"]
#[doc = " components::{Rect2D, Vec4D},"]
#[doc = " experimental::archetypes::Points2D,"]
#[doc = " MsgSender, RecordingStreamBuilder,"]
#[doc = "};"]
#[doc = ""]
#[doc = "fn main() -> Result<(), Box<dyn std::error::Error>> {"]
#[doc = " let (rec_stream, storage) = RecordingStreamBuilder::new(\"points\").memory()?;"]
#[doc = ""]
#[doc = " MsgSender::from_archetype(\"points\", &Points2D::new([(0.0, 0.0), (1.0, 1.0)]))?"]
#[doc = " .send(&rec_stream)?;"]
#[doc = ""]
#[doc = " // Log an extra rect to set the view bounds"]
#[doc = " MsgSender::new(\"bounds\")"]
#[doc = " .with_component(&[Rect2D::XCYCWH(Vec4D([0.0, 0.0, 4.0, 3.0]))])?"]
#[doc = " .send(&rec_stream)?;"]
#[doc = ""]
#[doc = " rec_stream.flush_blocking();"]
#[doc = ""]
#[doc = " rerun::native_viewer::show(storage.take())?;"]
#[doc = ""]
#[doc = " Ok(())"]
#[doc = "}"]
#[doc = "```"]
#[derive(Debug, Clone, PartialEq)]
pub struct Points2D {
#[doc = "All the actual 2D points that make up the point cloud."]
Expand Down
25 changes: 18 additions & 7 deletions crates/re_types_builder/src/codegen/python.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
//! Implements the Python codegen pass.
use anyhow::Context as _;
use itertools::Itertools;
use std::{
collections::{BTreeMap, HashMap, HashSet},
io::Write,
Expand Down Expand Up @@ -279,7 +280,8 @@ impl QuotedObject {
assert!(obj.is_struct());

let Object {
filepath,
virtpath,
filepath: _,
fqname: _,
pkg_name: _,
name,
Expand Down Expand Up @@ -320,6 +322,7 @@ impl QuotedObject {
.chain(fields.iter().filter(|field| field.is_nullable));
for field in fields_in_order {
let ObjectField {
virtpath: _,
filepath: _,
fqname: _,
pkg_name: _,
Expand Down Expand Up @@ -370,7 +373,7 @@ impl QuotedObject {

code.push_text(quote_arrow_support_from_obj(arrow_registry, obj), 1, 0);

let mut filepath = PathBuf::from(filepath);
let mut filepath = PathBuf::from(virtpath);
filepath.set_extension("py");

Self {
Expand All @@ -384,7 +387,8 @@ impl QuotedObject {
assert!(!obj.is_struct());

let Object {
filepath,
virtpath,
filepath: _,
fqname: _,
pkg_name: _,
name,
Expand Down Expand Up @@ -413,6 +417,7 @@ impl QuotedObject {

for field in fields {
let ObjectField {
virtpath: _,
filepath: _,
fqname: _,
pkg_name: _,
Expand All @@ -439,7 +444,7 @@ impl QuotedObject {
code.push_text(quote_aliases_from_object(obj), 1, 0);
code.push_text(quote_arrow_support_from_obj(arrow_registry, obj), 1, 0);

let mut filepath = PathBuf::from(filepath);
let mut filepath = PathBuf::from(virtpath);
filepath.set_extension("py");

Self {
Expand Down Expand Up @@ -469,7 +474,13 @@ fn quote_doc_from_docs(docs: &Docs) -> String {
return String::new();
}

let doc = lines.join("\n");
// NOTE: Filter out docstrings within docstrings, it just gets crazy otherwise...
let doc = lines
.into_iter()
.filter(|line| !line.starts_with(r#"""""#))
.collect_vec()
.join("\n");

format!("\"\"\"\n{doc}\n\"\"\"\n\n")
}

Expand Down Expand Up @@ -762,7 +773,7 @@ fn quote_arrow_support_from_obj(arrow_registry: &ArrowRegistry, obj: &Object) ->
fqname,
name,
kind,
filepath,
virtpath,
..
} = obj;

Expand All @@ -776,7 +787,7 @@ fn quote_arrow_support_from_obj(arrow_registry: &ArrowRegistry, obj: &Object) ->
let many_aliases = format!("{name}ArrayLike");
let arrow = format!("{name}Type");

let mut filepath = PathBuf::from(filepath);
let mut filepath = PathBuf::from(virtpath);
filepath.set_extension("py");
let filename = filepath.file_stem().unwrap().to_string_lossy();

Expand Down
47 changes: 40 additions & 7 deletions crates/re_types_builder/src/codegen/rust.rs
Original file line number Diff line number Diff line change
Expand Up @@ -128,11 +128,38 @@ fn create_files(
code.push_text("#![allow(clippy::unnecessary_cast)]", 2, 0);

for obj in objs {
let tokens_str = obj.tokens.to_string();
let mut acc = TokenStream::new();

// NOTE: `TokenStream`s discard whitespacing information by definition, so we need to
// inject some of our own when writing to file.
let tokens_str = tokens_str
// inject some of our own when writing to file... while making sure that don't inject
// random spacing into doc comments that look like code!

let mut tokens = obj.tokens.into_iter();
while let Some(token) = tokens.next() {
match &token {
// If this is a doc-comment block, be smart about it.
proc_macro2::TokenTree::Punct(punct) if punct.as_char() == '#' => {
let tokens_str = acc
.to_string()
.replace('}', "}\n\n")
.replace("] ;", "];\n\n")
.replace("# [doc", "\n\n#[doc")
.replace("impl ", "\n\nimpl ");
code.push_text(tokens_str, 1, 0);
acc = TokenStream::new();

acc.extend([token, tokens.next().unwrap()]);
code.push_text(acc.to_string(), 1, 0);
acc = TokenStream::new();
}
_ => {
acc.extend([token]);
}
}
}

let tokens_str = acc
.to_string()
.replace('}', "}\n\n")
.replace("] ;", "];\n\n")
.replace("# [doc", "\n\n#[doc")
Expand Down Expand Up @@ -195,7 +222,8 @@ impl QuotedObject {
assert!(obj.is_struct());

let Object {
filepath,
virtpath,
filepath: _,
fqname: _,
pkg_name: _,
name,
Expand Down Expand Up @@ -241,7 +269,7 @@ impl QuotedObject {

Self {
filepath: {
let mut filepath = PathBuf::from(filepath);
let mut filepath = PathBuf::from(virtpath);
filepath.set_extension("rs");
filepath
},
Expand All @@ -254,7 +282,8 @@ impl QuotedObject {
assert!(!obj.is_struct());

let Object {
filepath,
virtpath,
filepath: _,
fqname: _,
pkg_name: _,
name,
Expand All @@ -274,6 +303,7 @@ impl QuotedObject {

let quoted_fields = fields.iter().map(|obj_field| {
let ObjectField {
virtpath: _,
filepath: _,
fqname: _,
pkg_name: _,
Expand Down Expand Up @@ -312,7 +342,7 @@ impl QuotedObject {

Self {
filepath: {
let mut filepath = PathBuf::from(filepath);
let mut filepath = PathBuf::from(virtpath);
filepath.set_extension("rs");
filepath
},
Expand All @@ -331,6 +361,7 @@ impl quote::ToTokens for ObjectFieldTokenizer<'_> {
let Self(obj, obj_field) = self;

let ObjectField {
virtpath: _,
filepath: _,
pkg_name: _,
fqname: _,
Expand Down Expand Up @@ -474,6 +505,7 @@ fn quote_trait_impls_from_obj(
obj: &Object,
) -> TokenStream {
let Object {
virtpath: _,
filepath: _,
fqname,
pkg_name: _,
Expand Down Expand Up @@ -790,6 +822,7 @@ fn quote_builder_from_obj(obj: &Object) -> TokenStream {
}

let Object {
virtpath: _,
filepath: _,
fqname: _,
pkg_name: _,
Expand Down
11 changes: 10 additions & 1 deletion crates/re_types_builder/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,8 @@
)]
mod reflection;

use anyhow::Context;

pub use self::reflection::reflection::{
root_as_schema, BaseType as FbsBaseType, Enum as FbsEnum, EnumVal as FbsEnumVal,
Field as FbsField, KeyValue as FbsKeyValue, Object as FbsObject, Schema as FbsSchema,
Expand Down Expand Up @@ -212,14 +214,21 @@ fn generate_lang_agnostic(
let entrypoint_path = entrypoint_path.as_ref();
let entrypoint_filename = entrypoint_path.file_name().unwrap();

let include_dir_path = include_dir_path.as_ref();
let include_dir_path = include_dir_path
.canonicalize()
.with_context(|| format!("failed to canonicalize include path: {include_dir_path:?}"))
.unwrap();

// generate bfbs definitions
compile_binary_schemas(include_dir_path, tmp.path(), entrypoint_path);
compile_binary_schemas(&include_dir_path, tmp.path(), entrypoint_path);

let mut binary_entrypoint_path = PathBuf::from(entrypoint_filename);
binary_entrypoint_path.set_extension("bfbs");

// semantic pass: high level objects from low-level reflection data
let mut objects = Objects::from_buf(
include_dir_path,
sh.read_binary_file(tmp.path().join(binary_entrypoint_path))
.unwrap()
.as_slice(),
Expand Down
Loading

0 comments on commit 2569eb7

Please sign in to comment.