From 60cdf44493f4c6d2bff22fc9ef96cf76c8b1c38c Mon Sep 17 00:00:00 2001 From: Cameron Taggart Date: Thu, 29 Oct 2020 23:03:30 -0600 Subject: [PATCH] create identifier module (#74) * add error report * parse Ident * fix merge * create identifier mod * begin redis testing * fmt --- Cargo.lock | 11 +- codegen/Cargo.toml | 7 +- codegen/examples/gen_mgmt.rs | 82 +++++--- codegen/src/codegen.rs | 271 ++++++++++++-------------- codegen/src/identifier.rs | 223 +++++++++++++++++++++ codegen/src/lib.rs | 30 ++- codegen/src/lib_rs.rs | 34 ++-- codegen/src/path.rs | 1 - codegen/src/reference.rs | 13 ++ codegen/src/spec.rs | 42 ++-- codegen/tests/azure_rest_api_specs.rs | 29 ++- codegen/tests/redis_spec.rs | 75 +++++++ 12 files changed, 587 insertions(+), 231 deletions(-) create mode 100644 codegen/src/identifier.rs create mode 100644 codegen/tests/redis_spec.rs diff --git a/Cargo.lock b/Cargo.lock index 4e43ee5..25d2044 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -62,12 +62,13 @@ dependencies = [ "serde_json", "serde_yaml", "snafu", + "syn", ] [[package]] name = "autorust_openapi" version = "0.2.0" -source = "git+https://github.com/ctaggart/autorust_openapi#181baf2b89d32c013258e40b59d3e00bb501c9c4" +source = "git+https://github.com/ctaggart/autorust_openapi?rev=181baf2b89d32c013258e40b59d3e00bb501c9c4#181baf2b89d32c013258e40b59d3e00bb501c9c4" dependencies = [ "indexmap", "serde", @@ -293,9 +294,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" -version = "0.2.79" +version = "0.2.80" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2448f6066e80e3bfc792e9c98bf705b4b0fc6e8ef5b43e5889aff0eaa9c58743" +checksum = "4d58d1b70b004888f764dfbf6a26a3b0342a1632d33968e4a179d8011c760614" [[package]] name = "linked-hash-map" @@ -311,9 +312,9 @@ checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" [[package]] name = "memchr" -version = "2.3.3" +version = "2.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3728d817d99e5ac407411fa471ff9800a778d88a24685968b36824eaf4bee400" +checksum = "0ee1c47aaa256ecabcaea351eae4a9b01ef39ed810004e298d2511ed284b1525" [[package]] name = "opaque-debug" diff --git a/codegen/Cargo.toml b/codegen/Cargo.toml index bcdb5fb..1899446 100644 --- a/codegen/Cargo.toml +++ b/codegen/Cargo.toml @@ -7,7 +7,7 @@ edition = "2018" [lib] [dependencies] -autorust_openapi = { git = "https://github.com/ctaggart/autorust_openapi" } +autorust_openapi = { rev = "181baf2b89d32c013258e40b59d3e00bb501c9c4", git = "https://github.com/ctaggart/autorust_openapi" } # autorust_openapi = { path = "../autorust_openapi" } quote = "*" proc-macro2 = { version = "*", default-features = false } @@ -15,10 +15,11 @@ serde_json = "*" serde_yaml = "*" heck = "*" regex = "*" -indexmap = {version = "*", features = ["serde-1"]} +indexmap = { version = "*", features = ["serde-1"] } path_abs = "*" comrak = "0.8" serde = "1.0" snafu = "0.6" http = "0.2" -lazy_static = "1.4" \ No newline at end of file +lazy_static = "1.4" +syn = { version = "1.0", features = ["parsing"] } diff --git a/codegen/examples/gen_mgmt.rs b/codegen/examples/gen_mgmt.rs index 86f26ac..b5c137d 100644 --- a/codegen/examples/gen_mgmt.rs +++ b/codegen/examples/gen_mgmt.rs @@ -1,20 +1,17 @@ // cargo run --example gen_mgmt // https://github.com/Azure/azure-rest-api-specs/blob/master/specification/compute/resource-manager - use autorust_codegen::{ - cargo_toml, + self, cargo_toml, config_parser::{self, to_api_version, to_mod_name}, - lib_rs, path, run, Config, + lib_rs, path, Config, }; use heck::SnakeCase; +use snafu::{OptionExt, ResultExt, Snafu}; use std::{ collections::{HashMap, HashSet}, fs, }; -pub type Error = Box; -pub type Result = std::result::Result; - const SPEC_FOLDER: &str = "../azure-rest-api-specs/specification"; const OUTPUT_FOLDER: &str = "../azure-sdk-for-rust/services/mgmt"; @@ -24,15 +21,16 @@ const SERVICE_NAMES: &[(&str, &str)] = &[ ]; const ONLY_SERVICES: &[&str] = &[ - // "datafactory", - ]; + // "network", + // "redis", +]; const SKIP_SERVICES: &[&str] = &[ "apimanagement", // missing properties, all preview apis "automation", // Error: Error("data did not match any variant of untagged enum ReferenceOr", line: 90, column: 5) "cosmos-db", // get_gremlin_graph_throughput defined twice "cost-management", // use of undeclared crate or module `definition` - "databox", // recursive type has infinite size + "databox", // TODO #73 recursive types "databoxedge", // duplicate model pub struct SkuCost { "datamigration", // Error: "schema not found ../azure-rest-api-specs/specification/datamigration/resource-manager/Microsoft.DataMigration/preview/2018-07-15-preview/definitions/MigrateSqlServerSqlDbTask.json ValidationStatus" "deploymentmanager", // missing params @@ -41,14 +39,14 @@ const SKIP_SERVICES: &[&str] = &[ "hardwaresecuritymodules", // recursive without indirection on Error "healthcareapis", // Error: "schema not found ../azure-rest-api-specs/specification/common-types/resource-management/v1/types.json Resource" "hybridcompute", // use of undeclared crate or module `status` - "logic", // recursive type has infinite size + "logic", // TODO #73 recursive types "machinelearning", // missing params "managedservices", // registration_definition "mediaservices", // Error: Error("invalid unicode code point", line: 1380, column: 289) "migrateprojects", // recursive type has infinite size "mixedreality", // &AccountKeyRegenerateRequest not found in scope "netapp", // codegen wrong, missing operation params in function - "network", // thread 'main' panicked at 'called `Option::unwrap()` on a `None` value', codegen/src/codegen.rs:419:42 + "network", // TODO #73 recursive types "powerplatform", // Error: "parameter not found ../azure-rest-api-specs/specification/powerplatform/resource-manager/Microsoft.PowerPlatform/common/v1/definitions.json ResourceGroupNameParameter" "recoveryservicessiterecovery", // duplicate package-2016-08 https://github.com/Azure/azure-rest-api-specs/pull/11287 "redis", // map_type @@ -63,16 +61,40 @@ const SKIP_SERVICE_TAGS: &[(&str, &str)] = &[ ("azureactivedirectory", "package-preview-2020-07"), ("resources", "package-policy-2020-03"), ("recoveryservicesbackup", "package-2020-07"), // duplicate fn get_operation_status + ("network", "package-2017-03-30-only"), // SchemaNotFound 2017-09-01/network.json SubResource ]; +pub type Result = std::result::Result; + +#[derive(Debug, Snafu)] +pub enum Error { + #[snafu(display("file name was not utf-8"))] + FileNameNotUtf8Error {}, + IoError { + source: std::io::Error, + }, + PathError { + source: path::Error, + }, + CodegenError { + source: autorust_codegen::Error, + }, + CargoTomlError { + source: cargo_toml::Error, + }, + LibRsError { + source: lib_rs::Error, + }, +} + fn main() -> Result<()> { - let paths = fs::read_dir(SPEC_FOLDER)?; + let paths = fs::read_dir(SPEC_FOLDER).context(IoError)?; let mut spec_folders = Vec::new(); for path in paths { - let path = path?; - if path.file_type()?.is_dir() { + let path = path.context(IoError)?; + if path.file_type().context(IoError)?.is_dir() { let file_name = path.file_name(); - let spec_folder = file_name.to_str().ok_or("file name was not utf-8")?; + let spec_folder = file_name.to_str().context(FileNameNotUtf8Error)?; spec_folders.push(spec_folder.to_owned()); } } @@ -95,8 +117,8 @@ fn main() -> Result<()> { } fn gen_crate(spec_folder: &str) -> Result<()> { - let spec_folder_full = path::join(SPEC_FOLDER, spec_folder)?; - let readme = &path::join(spec_folder_full, "resource-manager/readme.md")?; + let spec_folder_full = path::join(SPEC_FOLDER, spec_folder).context(PathError)?; + let readme = &path::join(spec_folder_full, "resource-manager/readme.md").context(PathError)?; if !readme.exists() { println!("readme not found at {:?}", readme); return Ok(()); @@ -105,14 +127,13 @@ fn gen_crate(spec_folder: &str) -> Result<()> { let service_name = &get_service_name(spec_folder); // println!("{} -> {}", spec_folder, service_name); let crate_name = &format!("azure_mgmt_{}", service_name); - let output_folder = &path::join(OUTPUT_FOLDER, service_name)?; + let output_folder = &path::join(OUTPUT_FOLDER, service_name).context(PathError)?; - let src_folder = path::join(output_folder, "src")?; + let src_folder = path::join(output_folder, "src").context(PathError)?; if src_folder.exists() { - fs::remove_dir_all(&src_folder)?; + fs::remove_dir_all(&src_folder).context(IoError)?; } - // fs::create_dir_all(&src_folder)?; let packages = config_parser::parse_configurations_from_autorest_config_file(&readme); let mut feature_mod_names = Vec::new(); let skip_service_tags: HashSet<&(&str, &str)> = SKIP_SERVICE_TAGS.iter().collect(); @@ -128,24 +149,26 @@ fn gen_crate(spec_folder: &str) -> Result<()> { let mod_name = &to_mod_name(tag); feature_mod_names.push((tag.to_string(), mod_name.clone())); // println!(" {}", mod_name); - let mod_output_folder = path::join(&src_folder, mod_name)?; + let mod_output_folder = path::join(&src_folder, mod_name).context(PathError)?; // println!(" {:?}", mod_output_folder); // for input_file in &package.input_files { // println!(" {}", input_file); // } - let input_files: Vec<_> = package + let input_files: Result> = package .input_files .iter() - .map(|input_file| path::join(readme, input_file).unwrap()) + .map(|input_file| Ok(path::join(readme, input_file).context(PathError)?)) .collect(); + let input_files = input_files?; // for input_file in &input_files { // println!(" {:?}", input_file); // } - run(Config { + autorust_codegen::run(Config { api_version: Some(api_version), output_folder: mod_output_folder.into(), input_files, - })?; + }) + .context(CodegenError)?; } } if feature_mod_names.len() == 0 { @@ -154,9 +177,10 @@ fn gen_crate(spec_folder: &str) -> Result<()> { cargo_toml::create( crate_name, &feature_mod_names, - &path::join(output_folder, "Cargo.toml").map_err(|_| "Cargo.toml")?, - )?; - lib_rs::create(&feature_mod_names, &path::join(src_folder, "lib.rs").map_err(|_| "lib.rs")?)?; + &path::join(output_folder, "Cargo.toml").context(PathError)?, + ) + .context(CargoTomlError)?; + lib_rs::create(&feature_mod_names, &path::join(src_folder, "lib.rs").context(PathError)?).context(LibRsError)?; Ok(()) } diff --git a/codegen/src/codegen.rs b/codegen/src/codegen.rs index dc3283c..8895084 100644 --- a/codegen/src/codegen.rs +++ b/codegen/src/codegen.rs @@ -1,5 +1,5 @@ -#![allow(unused_variables, dead_code)] use crate::{ + identifier::{ident, CamelCaseIdent}, spec, status_codes::{get_error_responses, get_response_type_name, get_status_code_name, get_success_responses, has_default_response}, Config, OperationVerb, Reference, ResolvedSchema, Spec, @@ -8,7 +8,7 @@ use autorust_openapi::{CollectionFormat, DataType, Parameter, ParameterType, Pat use heck::{CamelCase, SnakeCase}; use indexmap::IndexMap; use proc_macro2::TokenStream; -use quote::{format_ident, quote, ToTokens}; +use quote::quote; use regex::Regex; use serde_json::Value; use snafu::{OptionExt, ResultExt, Snafu}; @@ -21,9 +21,25 @@ use std::{ pub type Result = std::result::Result; #[derive(Debug, Snafu)] pub enum Error { - SpecError { source: spec::Error }, + SpecError { + source: spec::Error, + }, ArrayExpectedToHaveItems, NoNameForRef, + #[snafu(display("IdentError at {}:{} {} ", file, line, source))] + IdentError { + // #[snafu(source(from(Error, Box::new)))] + source: crate::identifier::Error, + file: &'static str, + line: u32, + }, + #[snafu(display("CreateEnumIdentError {} {} {}", property_name, enum_value, source))] + CreateEnumIdentError { + #[snafu(source(from(Error, Box::new)))] + source: Box, + property_name: String, + enum_value: String, + }, } /// Whether or not to pass a type is a reference. @@ -96,7 +112,7 @@ impl CodeGen { for (ref_key, schema) in &all_schemas { let doc_file = &ref_key.file; let schema_name = &ref_key.name; - if let Some(first_doc_file) = schema_names.insert(schema_name, doc_file) { + if let Some(_first_doc_file) = schema_names.insert(schema_name, doc_file) { // eprintln!( // "WARN schema {} already created from {:?}, duplicate from {:?}", // schema_name, first_doc_file, doc_file @@ -106,7 +122,7 @@ impl CodeGen { file.extend(self.create_vec_alias(doc_file, schema_name, schema)?); } else if is_local_enum(schema) { let no_namespace = TokenStream::new(); - let (_tp_name, tp) = create_enum(&no_namespace, schema_name, schema); + let (_tp_name, tp) = create_enum(&no_namespace, schema_name, schema)?; file.extend(tp); } else { for stream in self.create_struct(doc_file, schema_name, schema)? { @@ -156,7 +172,10 @@ impl CodeGen { for (module_name, module) in modules { match module_name { Some(module_name) => { - let name = ident(&module_name); + let name = ident(&module_name).context(IdentError { + file: file!(), + line: line!(), + })?; file.extend(quote! { pub mod #name { use crate::models::*; @@ -191,9 +210,12 @@ impl CodeGen { Ok(()) } - fn create_vec_alias(&self, doc_file: &Path, alias_name: &str, schema: &ResolvedSchema) -> Result { + fn create_vec_alias(&self, _doc_file: &Path, alias_name: &str, schema: &ResolvedSchema) -> Result { let items = get_schema_array_items(&schema.schema.common)?; - let typ = ident(&alias_name.to_camel_case()); + let typ = ident(&alias_name.to_camel_case()).context(IdentError { + file: file!(), + line: line!(), + })?; let items_typ = get_type_name_for_schema_ref(&items, AsReference::False)?; Ok(quote! { pub type #typ = Vec<#items_typ>; }) } @@ -203,13 +225,22 @@ impl CodeGen { let mut streams = Vec::new(); let mut local_types = Vec::new(); let mut props = TokenStream::new(); - let ns = ident(&struct_name.to_snake_case()); - let nm = ident(&struct_name.to_camel_case()); + let ns = ident(&struct_name.to_snake_case()).context(IdentError { + file: file!(), + line: line!(), + })?; + let nm = ident(&struct_name.to_camel_case()).context(IdentError { + file: file!(), + line: line!(), + })?; let required: HashSet<&str> = schema.schema.required.iter().map(String::as_str).collect(); for schema in &schema.schema.all_of { let type_name = get_type_name_for_schema_ref(schema, AsReference::False)?; - let field_name = ident(&type_name.to_string().to_snake_case()); + let field_name = ident(&type_name.to_string().to_snake_case()).context(IdentError { + file: file!(), + line: line!(), + })?; props.extend(quote! { #[serde(flatten)] pub #field_name: #type_name, @@ -221,7 +252,10 @@ impl CodeGen { .resolve_schema_map(doc_file, &schema.schema.properties) .context(SpecError)?; for (property_name, property) in &properties { - let nm = ident(&property_name.to_snake_case()); + let nm = ident(&property_name.to_snake_case()).context(IdentError { + file: file!(), + line: line!(), + })?; let (mut field_tp_name, field_tp) = self.create_struct_field_type(doc_file, &ns, property_name, property)?; let is_required = required.contains(property_name.as_str()); let is_vec = is_vec(&field_tp_name); @@ -288,15 +322,21 @@ impl CodeGen { ) -> Result<(TokenStream, Vec)> { match &property.ref_key { Some(ref_key) => { - let tp = ident(&ref_key.name.to_camel_case()); + let tp = ident(&ref_key.name.to_camel_case()).context(IdentError { + file: file!(), + line: line!(), + })?; Ok((tp, Vec::new())) } None => { if is_local_enum(property) { - let (tp_name, tp) = create_enum(namespace, property_name, property); + let (tp_name, tp) = create_enum(namespace, property_name, property)?; Ok((tp_name, vec![tp])) } else if is_local_struct(property) { - let id = ident(&property_name.to_camel_case()); + let id = ident(&property_name.to_camel_case()).context(IdentError { + file: file!(), + line: line!(), + })?; let tp_name = quote! {#namespace::#id}; let tps = self.create_struct(doc_file, property_name, property)?; // println!("creating local struct {:?} {}", tp_name, tps.len()); @@ -327,65 +367,6 @@ pub fn create_generated_by_header() -> TokenStream { quote! { #![doc = #comment] } } -fn is_keyword(word: &str) -> bool { - matches!( - word, - // https://doc.rust-lang.org/grammar.html#keywords - "abstract" - | "alignof" - | "as" - | "become" - | "box" - | "break" - | "const" - | "continue" - | "crate" - | "do" - | "else" - | "enum" - | "extern" - | "false" - | "final" - | "fn" - | "for" - | "if" - | "impl" - | "in" - | "let" - | "loop" - | "macro" - | "match" - | "mod" - | "move" - | "mut" - | "offsetof" - | "override" - | "priv" - | "proc" - | "pub" - | "pure" - | "ref" - | "return" - | "Self" - | "self" - | "sizeof" - | "static" - | "struct" - | "super" - | "trait" - | "true" - | "type" - | "typeof" - | "unsafe" - | "unsized" - | "use" - | "virtual" - | "where" - | "while" - | "yield" - ) -} - fn is_local_enum(property: &ResolvedSchema) -> bool { property.schema.common.enum_.len() > 0 } @@ -394,13 +375,24 @@ fn is_local_struct(property: &ResolvedSchema) -> bool { property.schema.properties.len() > 0 } -fn create_enum(namespace: &TokenStream, property_name: &str, property: &ResolvedSchema) -> (TokenStream, TokenStream) { - let schema_type = property.schema.common.type_.as_ref(); +fn create_enum(namespace: &TokenStream, property_name: &str, property: &ResolvedSchema) -> Result<(TokenStream, TokenStream)> { let enum_values = enum_values_as_strings(&property.schema.common.enum_); - let id = ident(&property_name.to_camel_case()); + let id = ident(&property_name.to_camel_case()).context(IdentError { + file: file!(), + line: line!(), + })?; let mut values = TokenStream::new(); - enum_values.iter().for_each(|name| { - let nm = ident(&name.to_camel_case()); + for name in enum_values { + let nm = name + .to_camel_case_ident() + .context(IdentError { + file: file!(), + line: line!(), + }) + .context(CreateEnumIdentError { + property_name: property_name.to_owned(), + enum_value: name.to_owned(), + })?; let rename = if &nm.to_string() == name { quote! {} } else { @@ -411,8 +403,11 @@ fn create_enum(namespace: &TokenStream, property_name: &str, property: &Resolved #nm, }; values.extend(value); - }); - let nm = ident(&property_name.to_camel_case()); + } + let nm = ident(&property_name.to_camel_case()).context(IdentError { + file: file!(), + line: line!(), + })?; let tp = quote! { #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum #nm { @@ -420,7 +415,7 @@ fn create_enum(namespace: &TokenStream, property_name: &str, property: &Resolved } }; let tp_name = quote! {#namespace::#id}; - (tp_name, tp) + Ok((tp_name, tp)) } /// Wraps a type in an Option if is not required. @@ -432,21 +427,6 @@ fn require(is_required: bool, tp: TokenStream) -> TokenStream { } } -pub fn ident(text: &str) -> TokenStream { - let text = text.replace(".", "_"); - // prefix with underscore if starts with invalid character - let text = match text.chars().next().unwrap() { - '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' | '0' => format!("_{}", text), - _ => text.to_owned(), - }; - let idt = if is_keyword(&text) { - format_ident!("{}_", text) - } else { - format_ident!("{}", text) - }; - idt.into_token_stream() -} - fn enum_values_as_strings(values: &Vec) -> Vec<&str> { values .iter() @@ -457,17 +437,10 @@ fn enum_values_as_strings(values: &Vec) -> Vec<&str> { .collect() } -/// example: pub type Pets = Vec; -fn trim_ref(path: &str) -> String { - let pos = path.rfind('/').map_or(0, |i| i + 1); - path[pos..].to_string() -} - fn get_param_type(param: &Parameter) -> Result { let is_required = param.required.unwrap_or(false); let is_array = is_array(¶m.common); - let format = param.common.format.as_deref(); - let tp = if let Some(param_type) = ¶m.common.type_ { + let tp = if let Some(_param_type) = ¶m.common.type_ { get_type_name_for_schema(¶m.common, AsReference::True)? } else if let Some(schema) = ¶m.schema { get_type_name_for_schema_ref(schema, AsReference::True)? @@ -478,8 +451,11 @@ fn get_param_type(param: &Parameter) -> Result { Ok(require(is_required || is_array, tp)) } -fn get_param_name(param: &Parameter) -> TokenStream { - ident(¶m.name.to_snake_case()) +fn get_param_name(param: &Parameter) -> Result { + ident(¶m.name.to_snake_case()).context(IdentError { + file: file!(), + line: line!(), + }) } fn parse_params(param_re: &Regex, path: &str) -> Vec { @@ -491,10 +467,10 @@ fn format_path(param_re: &Regex, path: &str) -> String { param_re.replace_all(path, "{}").to_string() } -fn create_function_params(cg: &CodeGen, doc_file: &Path, parameters: &Vec) -> Result { +fn create_function_params(_cg: &CodeGen, _doc_file: &Path, parameters: &Vec) -> Result { let mut params: Vec = Vec::new(); for param in parameters { - let name = get_param_name(param); + let name = get_param_name(param)?; let tp = get_param_type(param)?; params.push(quote! { #name: #tp }); } @@ -557,7 +533,10 @@ fn get_type_name_for_schema_ref(schema: &ReferenceOr, as_ref: AsReferenc ReferenceOr::Reference { reference, .. } => { let rf = Reference::parse(&reference); let name = &rf.name.context(NoNameForRef)?; - let idt = ident(&name.to_camel_case()); + let idt = ident(&name.to_camel_case()).context(IdentError { + file: file!(), + line: line!(), + })?; match as_ref { AsReference::True => Ok(quote! { &#idt }), AsReference::False => Ok(quote! { #idt }), @@ -579,16 +558,28 @@ fn create_function( cg: &CodeGen, doc_file: &Path, path: &str, - item: &PathItem, + _item: &PathItem, operation_verb: &OperationVerb, param_re: &Regex, function_name: &str, ) -> Result { - let fname = ident(function_name); + let fname = ident(function_name).context(IdentError { + file: file!(), + line: line!(), + })?; let params = parse_params(param_re, path); // println!("path params {:#?}", params); - let params: Vec<_> = params.iter().map(|s| ident(&s.to_snake_case())).collect(); + let params: Result> = params + .iter() + .map(|s| { + Ok(ident(&s.to_snake_case()).context(IdentError { + file: file!(), + line: line!(), + })?) + }) + .collect(); + let params = params?; let uri_str_args = quote! { #(#params),* }; let fpath = format!("{{}}{}", &format_path(param_re, path)); @@ -634,7 +625,7 @@ fn create_function( // api-version param if has_param_api_version { - if let Some(api_version) = cg.api_version() { + if let Some(_api_version) = cg.api_version() { ts_request_builder.extend(quote! { req_builder = req_builder.query(&[("api-version", &operation_config.api_version)]); }); @@ -644,7 +635,7 @@ fn create_function( // params for param in ¶meters { let param_name = ¶m.name; - let param_name_var = get_param_name(¶m); + let param_name_var = get_param_name(¶m)?; let required = param.required.unwrap_or(false); match param.in_ { ParameterType::Path => {} // handled above @@ -744,7 +735,10 @@ fn create_function( Some(tp) => quote! { (#tp) }, None => quote! {}, }; - let enum_type_name = ident(&get_response_type_name(status_code)); + let enum_type_name = ident(&get_response_type_name(status_code)).context(IdentError { + file: file!(), + line: line!(), + })?; success_responses_ts.extend(quote! { #enum_type_name#tp, }) } response_enum.extend(quote! { @@ -766,7 +760,10 @@ fn create_function( if response_type == "DefaultResponse" { error_responses_ts.extend(quote! { DefaultResponse { status_code: StatusCode, #tp }, }); } else { - let response_type = ident(response_type); + let response_type = ident(response_type).context(IdentError { + file: file!(), + line: line!(), + })?; error_responses_ts.extend(quote! { #response_type { #tp }, }); } } @@ -779,8 +776,14 @@ fn create_function( match status_code { autorust_openapi::StatusCode::Code(_) => { let tp = create_response_type(rsp)?; - let status_code_name = ident(&get_status_code_name(status_code)); - let response_type_name = ident(&get_response_type_name(status_code)); + let status_code_name = ident(&get_status_code_name(status_code)).context(IdentError { + file: file!(), + line: line!(), + })?; + let response_type_name = ident(&get_response_type_name(status_code)).context(IdentError { + file: file!(), + line: line!(), + })?; if is_single_response { match tp { Some(tp) => { @@ -828,8 +831,14 @@ fn create_function( match status_code { autorust_openapi::StatusCode::Code(_) => { let tp = create_response_type(rsp)?; - let status_code_name = ident(&get_status_code_name(status_code)); - let response_type_name = ident(&get_response_type_name(status_code)); + let status_code_name = ident(&get_status_code_name(status_code)).context(IdentError { + file: file!(), + line: line!(), + })?; + let response_type_name = ident(&get_response_type_name(status_code)).context(IdentError { + file: file!(), + line: line!(), + })?; match tp { Some(tp) => { match_status.extend(quote! { @@ -859,7 +868,6 @@ fn create_function( autorust_openapi::StatusCode::Code(_) => {} autorust_openapi::StatusCode::Default => { let tp = create_response_type(rsp)?; - let response_type_name = ident(&get_response_type_name(status_code)); match tp { Some(tp) => { match_status.extend(quote! { @@ -931,22 +939,3 @@ pub fn create_mod(api_version: &str) -> TokenStream { pub const API_VERSION: &str = #api_version; } } - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_ident_odata_next_link() { - let idt = "odata.nextLink".to_snake_case(); - assert_eq!(idt, "odata.next_link"); - let idt = ident(&idt); - assert_eq!(idt.to_string(), "odata_next_link"); - } - - #[test] - fn test_ident_three_dot_two() { - let idt = ident("3.2"); - assert_eq!(idt.to_string(), "_3_2"); - } -} diff --git a/codegen/src/identifier.rs b/codegen/src/identifier.rs new file mode 100644 index 0000000..0c6bcf2 --- /dev/null +++ b/codegen/src/identifier.rs @@ -0,0 +1,223 @@ +use heck::CamelCase; +use proc_macro2::TokenStream; +use quote::ToTokens; +use snafu::{ResultExt, Snafu}; + +pub type Result = std::result::Result; +#[derive(Debug, Snafu)] +pub enum Error { + #[snafu(display("ParseIdentError {} {}", text, source))] + ParseIdentError { source: syn::Error, text: String }, +} + +pub trait CamelCaseIdent: ToOwned { + fn to_camel_case_ident(&self) -> Result; +} + +impl CamelCaseIdent for str { + fn to_camel_case_ident(&self) -> Result { + let mut txt = replace_chars_with_unicode_names(self); + txt = replace_chars_with_underscore(&txt); + txt = if starts_with_number(&txt) { + prefix_with_underscore_if_starts_with_number(&txt) + } else { + txt.to_camel_case() + }; + let idt = syn::parse_str::(&txt).context(ParseIdentError { text: txt.to_owned() })?; + Ok(idt.into_token_stream()) + } +} + +pub fn ident(text: &str) -> Result { + let mut txt = replace_chars_with_underscore(text); + txt = remove_spaces(&txt); + txt = prefix_with_underscore_if_starts_with_number(&txt); + txt = prefix_with_underscore_keywords(&txt); + let idt = syn::parse_str::(&txt).context(ParseIdentError { text: txt.to_owned() })?; + Ok(idt.into_token_stream()) +} + +fn remove_spaces(text: &str) -> String { + text.replace(" ", "") +} + +fn replace_chars_with_underscore(text: &str) -> String { + let mut txt = text.replace(".", "_"); + txt = txt.replace(",", "_"); + txt = txt.replace("-", "_"); + txt = txt.replace("/", "_"); + txt +} + +/// Replace some special charaters with their unicode names +fn replace_chars_with_unicode_names(text: &str) -> String { + text.replace("*", "Asterisk") +} + +fn starts_with_number(text: &str) -> bool { + match text.chars().next() { + Some(ch) => match ch { + '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' | '0' => true, + _ => false, + }, + None => false, + } +} + +fn prefix_with_underscore_if_starts_with_number(text: &str) -> String { + if starts_with_number(text) { + format!("_{}", text) + } else { + text.to_owned() + } +} + +fn prefix_with_underscore_keywords(text: &str) -> String { + if is_keyword(&text) { + format!("{}_", text) + } else { + text.to_owned() + } +} + +fn is_keyword(word: &str) -> bool { + matches!( + word, + // https://doc.rust-lang.org/grammar.html#keywords + "abstract" + | "alignof" + | "as" + | "become" + | "box" + | "break" + | "const" + | "continue" + | "crate" + | "do" + | "else" + | "enum" + | "extern" + | "false" + | "final" + | "fn" + | "for" + | "if" + | "impl" + | "in" + | "let" + | "loop" + | "macro" + | "match" + | "mod" + | "move" + | "mut" + | "offsetof" + | "override" + | "priv" + | "proc" + | "pub" + | "pure" + | "ref" + | "return" + | "Self" + | "self" + | "sizeof" + | "static" + | "struct" + | "super" + | "trait" + | "true" + | "type" + | "typeof" + | "unsafe" + | "unsized" + | "use" + | "virtual" + | "where" + | "while" + | "yield" + ) +} + +#[cfg(test)] +mod tests { + use super::*; + use heck::SnakeCase; + + #[test] + fn test_odata_next_link() -> Result<()> { + let idt = "odata.nextLink".to_snake_case(); + assert_eq!(idt, "odata.next_link"); + let idt = ident(&idt)?; + assert_eq!(idt.to_string(), "odata_next_link"); + Ok(()) + } + + #[test] + fn test_three_dot_two() -> Result<()> { + let idt = ident("3.2")?; + assert_eq!(idt.to_string(), "_3_2"); + Ok(()) + } + + #[test] + fn test_asterisk() -> Result<()> { + assert_eq!("*".to_camel_case(), ""); + assert_eq!("*".to_camel_case_ident()?.to_string(), "Asterisk"); + Ok(()) + } + + #[test] + fn test_system_assigned_user_assigned() -> Result<()> { + assert_eq!( + "SystemAssigned, UserAssigned".to_camel_case_ident()?.to_string(), + "SystemAssignedUserAssigned" + ); + Ok(()) + } + + #[test] + fn test_gcm_aes_128() -> Result<()> { + assert_eq!("gcm-aes-128".to_camel_case_ident()?.to_string(), "GcmAes128"); + Ok(()) + } + + #[test] + fn test_5() -> Result<()> { + assert_eq!("5".to_camel_case_ident()?.to_string(), "_5"); + Ok(()) + } + + #[test] + fn test_app_configuration() -> Result<()> { + assert_eq!( + "Microsoft.AppConfiguration/configurationStores".to_camel_case_ident()?.to_string(), + "MicrosoftAppConfigurationConfigurationStores" + ); + Ok(()) + } + + #[test] + fn test_microsoft_key_vault_vaults() -> Result<()> { + assert_eq!( + "Microsoft.KeyVault/vaults".to_camel_case_ident()?.to_string(), + "MicrosoftKeyVaultVaults" + ); + Ok(()) + } + + #[test] + fn test_azure_virtual_machine_best_practices() -> Result<()> { + assert_eq!( + "Azure virtual machine best practices – Dev/Test".to_camel_case_ident()?.to_string(), + "AzureVirtualMachineBestPracticesDevTest" + ); + Ok(()) + } + + #[test] + fn test_1_0() -> Result<()> { + assert_eq!("1.0".to_camel_case_ident()?.to_string(), "_1_0"); + Ok(()) + } +} diff --git a/codegen/src/lib.rs b/codegen/src/lib.rs index f1bcc5e..782a251 100644 --- a/codegen/src/lib.rs +++ b/codegen/src/lib.rs @@ -1,6 +1,7 @@ pub mod cargo_toml; mod codegen; pub mod config_parser; +pub mod identifier; pub mod lib_rs; pub mod path; mod reference; @@ -22,24 +23,33 @@ use std::{ extern crate lazy_static; pub type Result = std::result::Result; + #[derive(Debug, Snafu)] pub enum Error { #[snafu(display("Could not create output directory {}: {}", directory.display(), source))] - CreateOutputDirectory { + CreateOutputDirectoryError { directory: PathBuf, source: std::io::Error, }, #[snafu(display("Could not create file {}: {}", file.display(), source))] - CreateFile { + CreateFileError { file: PathBuf, source: std::io::Error, }, #[snafu(display("Could not write file {}: {}", file.display(), source))] - WriteFile { + WriteFileError { file: PathBuf, source: std::io::Error, }, - CodeGenError { + CodeGenNewError { + source: codegen::Error, + }, + #[snafu(display("CreateModelsError {} {}", config.output_folder.display(), source))] + CreateModelsError { + source: codegen::Error, + config: Config, + }, + CreateOperationsError { source: codegen::Error, }, PathError { @@ -56,16 +66,16 @@ pub struct Config { pub fn run(config: Config) -> Result<()> { let directory = &config.output_folder; - fs::create_dir_all(directory).context(CreateOutputDirectory { directory })?; - let cg = &CodeGen::new(config.clone()).context(CodeGenError)?; + fs::create_dir_all(directory).context(CreateOutputDirectoryError { directory })?; + let cg = &CodeGen::new(config.clone()).context(CodeGenNewError)?; // create models from schemas - let models = cg.create_models().context(CodeGenError)?; + let models = cg.create_models().context(CreateModelsError { config: config.clone() })?; let models_path = path::join(&config.output_folder, "models.rs").context(PathError)?; write_file(&models_path, &models)?; // create api client from operations - let operations = cg.create_operations().context(CodeGenError)?; + let operations = cg.create_operations().context(CreateOperationsError)?; let operations_path = path::join(&config.output_folder, "operations.rs").context(PathError)?; write_file(&operations_path, &operations)?; @@ -81,7 +91,7 @@ pub fn write_file>(file: P, tokens: &TokenStream) -> Result<()> let file: PathBuf = file.into(); println!("writing file {}", &file.display()); let code = tokens.to_string(); - let mut buffer = File::create(&file).context(CreateFile { file: file.clone() })?; - buffer.write_all(&code.as_bytes()).context(WriteFile { file })?; + let mut buffer = File::create(&file).context(CreateFileError { file: file.clone() })?; + buffer.write_all(&code.as_bytes()).context(WriteFileError { file })?; Ok(()) } diff --git a/codegen/src/lib_rs.rs b/codegen/src/lib_rs.rs index dc3ad23..3d65112 100644 --- a/codegen/src/lib_rs.rs +++ b/codegen/src/lib_rs.rs @@ -1,22 +1,35 @@ -use crate::{ - codegen::{create_generated_by_header, ident}, - write_file, -}; +use crate::{codegen::create_generated_by_header, identifier::ident, write_file}; use proc_macro2::TokenStream; use quote::quote; +use snafu::{ResultExt, Snafu}; use std::path::Path; -use crate::Result; +pub type Result = std::result::Result; + +#[derive(Debug, Snafu)] +pub enum Error { + IdentModNameError { + source: crate::identifier::Error, + feature_name: String, + mod_name: String, + }, + WriteFileError { + source: crate::Error, + }, +} pub fn create(feature_mod_names: &Vec<(String, String)>, path: &Path) -> Result<()> { - write_file(path, &create_body(feature_mod_names))?; + write_file(path, &create_body(feature_mod_names)?).context(WriteFileError)?; Ok(()) } -fn create_body(feature_mod_names: &Vec<(String, String)>) -> TokenStream { +fn create_body(feature_mod_names: &Vec<(String, String)>) -> Result { let mut cfgs = TokenStream::new(); for (feature_name, mod_name) in feature_mod_names { - let mod_name = ident(mod_name); + let mod_name = ident(mod_name).context(IdentModNameError { + feature_name: feature_name.to_owned(), + mod_name: mod_name.to_owned(), + })?; cfgs.extend(quote! { #[cfg(feature = #feature_name)] mod #mod_name; @@ -25,7 +38,7 @@ fn create_body(feature_mod_names: &Vec<(String, String)>) -> TokenStream { }); } let generated_by = create_generated_by_header(); - quote! { + Ok(quote! { #generated_by #cfgs @@ -57,6 +70,5 @@ fn create_body(feature_mod_names: &Vec<(String, String)>) -> TokenStream { } } } - - } + }) } diff --git a/codegen/src/path.rs b/codegen/src/path.rs index 2f64ac4..3e63ae2 100644 --- a/codegen/src/path.rs +++ b/codegen/src/path.rs @@ -2,7 +2,6 @@ use path_abs::PathMut; use snafu::{ResultExt, Snafu}; use std::path::{Path, PathBuf}; -// pub type StdError = std:error::Error; pub type Result = std::result::Result; #[derive(Debug, Snafu)] pub enum Error { diff --git a/codegen/src/reference.rs b/codegen/src/reference.rs index 60ed202..2b24f34 100644 --- a/codegen/src/reference.rs +++ b/codegen/src/reference.rs @@ -74,4 +74,17 @@ mod tests { ); Ok(()) } + + #[test] + fn can_parse_types() -> Result<()> { + assert_eq!( + Reference::parse("./types.json#/definitions/Resource"), + Reference { + file: Some("./types.json".to_owned()), + path: vec!["definitions".to_owned()], + name: Some("Resource".to_owned()), + } + ); + Ok(()) + } } diff --git a/codegen/src/spec.rs b/codegen/src/spec.rs index 001d141..5907156 100644 --- a/codegen/src/spec.rs +++ b/codegen/src/spec.rs @@ -13,14 +13,25 @@ pub type Result = std::result::Result; #[derive(Debug, Snafu)] pub enum Error { - PathJoin { source: path::Error }, - SchemaNotFound, + PathJoin { + source: path::Error, + }, + #[snafu(display("SchemaNotFound {} {}", ref_key.file.display(), ref_key.name))] + SchemaNotFound { + ref_key: RefKey, + }, NoNameInReference, ParameterNotFound, NotImplemented, - ReadFile { source: std::io::Error }, - DeserializeYaml { source: serde_yaml::Error }, - DeserializeJson { source: serde_json::Error }, + ReadFile { + source: std::io::Error, + }, + DeserializeYaml { + source: serde_yaml::Error, + }, + DeserializeJson { + source: serde_json::Error, + }, } /// An API specification @@ -57,13 +68,12 @@ impl Spec { match schema { ReferenceOr::Reference { .. } => {} ReferenceOr::Item(schema) => { - schemas.insert( - RefKey { - file: path.clone(), - name: name.clone(), - }, - schema.clone(), - ); + let ref_key = RefKey { + file: path.clone(), + name: name.clone(), + }; + // println!("{:?}", ref_key); + schemas.insert(ref_key, schema.clone()); } } } @@ -94,14 +104,20 @@ impl Spec { /// Find the schema for a given doc path and reference pub fn resolve_schema_ref>(&self, doc_path: P, reference: Reference) -> Result { let doc_path: PathBuf = doc_path.into(); + // println!("{:?} {:?}", doc_path, reference); let full_path = match reference.file { None => doc_path, Some(file) => path::join(doc_path, &file).context(PathJoin)?, }; + // println!("{:?}", full_path); let name = reference.name.ok_or_else(|| Error::NoNameInReference)?; let ref_key = RefKey { file: full_path, name }; - let schema = self.schemas.get(&ref_key).context(SchemaNotFound)?.clone(); + let schema = self + .schemas + .get(&ref_key) + .context(SchemaNotFound { ref_key: ref_key.clone() })? + .clone(); Ok(ResolvedSchema { ref_key: Some(ref_key), schema, diff --git a/codegen/tests/azure_rest_api_specs.rs b/codegen/tests/azure_rest_api_specs.rs index 6511274..f55d3df 100644 --- a/codegen/tests/azure_rest_api_specs.rs +++ b/codegen/tests/azure_rest_api_specs.rs @@ -6,12 +6,14 @@ use autorust_codegen::*; use spec::RefString; use std::path::PathBuf; -pub type Error = Box; -pub type Result = std::result::Result; +type Result = std::result::Result>; + +const COMMON_TYPES_SPEC: &str = "../../azure-rest-api-specs/specification/security/resource-manager/common/v1/types.json"; +const VMWARE_SPEC: &str = "../../azure-rest-api-specs/specification/vmware/resource-manager/Microsoft.AVS/stable/2020-03-20/vmware.json"; #[test] fn refs_count_security_common() -> Result<()> { - let api = &spec::openapi::parse("../../azure-rest-api-specs/specification/security/resource-manager/common/v1/types.json")?; + let api = &spec::openapi::parse(COMMON_TYPES_SPEC)?; let refs = spec::get_refs(api); assert_eq!(13, refs.len()); Ok(()) @@ -19,9 +21,7 @@ fn refs_count_security_common() -> Result<()> { #[test] fn refs_count_avs() -> Result<()> { - let api = &spec::openapi::parse( - "../../azure-rest-api-specs/specification/vmware/resource-manager/Microsoft.AVS/stable/2020-03-20/vmware.json", - )?; + let api = &spec::openapi::parse(VMWARE_SPEC)?; let refs = spec::get_refs(api); assert_eq!(197, refs.len()); Ok(()) @@ -29,9 +29,7 @@ fn refs_count_avs() -> Result<()> { #[test] fn ref_files() -> Result<()> { - let api = &spec::openapi::parse( - "../../azure-rest-api-specs/specification/vmware/resource-manager/Microsoft.AVS/stable/2020-03-20/vmware.json", - )?; + let api = &spec::openapi::parse(VMWARE_SPEC)?; let files = spec::openapi::get_ref_files(api); assert_eq!(1, files.len()); assert!(files.contains("../../../../../common-types/resource-management/v1/types.json")); @@ -40,9 +38,7 @@ fn ref_files() -> Result<()> { #[test] fn read_spec_avs() -> Result<()> { - let spec = &Spec::read_files(&[ - "../../azure-rest-api-specs/specification/vmware/resource-manager/Microsoft.AVS/stable/2020-03-20/vmware.json", - ])?; + let spec = &Spec::read_files(&[VMWARE_SPEC])?; assert_eq!(2, spec.docs.len()); assert!(spec.docs.contains_key(std::path::Path::new( "../../azure-rest-api-specs/specification/common-types/resource-management/v1/types.json" @@ -52,8 +48,7 @@ fn read_spec_avs() -> Result<()> { #[test] fn test_resolve_schema_ref() -> Result<()> { - let file = - PathBuf::from("../../azure-rest-api-specs/specification/vmware/resource-manager/Microsoft.AVS/stable/2020-03-20/vmware.json"); + let file = PathBuf::from(VMWARE_SPEC); let spec = &Spec::read_files(&[&file])?; spec.resolve_schema_ref(&file, Reference::parse("#/definitions/OperationList"))?; spec.resolve_schema_ref( @@ -65,8 +60,7 @@ fn test_resolve_schema_ref() -> Result<()> { #[test] fn test_resolve_parameter_ref() -> Result<()> { - let file = - PathBuf::from("../../azure-rest-api-specs/specification/vmware/resource-manager/Microsoft.AVS/stable/2020-03-20/vmware.json"); + let file = PathBuf::from(VMWARE_SPEC); let spec = &Spec::read_files(&[&file])?; spec.resolve_parameter_ref( &file, @@ -77,8 +71,7 @@ fn test_resolve_parameter_ref() -> Result<()> { #[test] fn test_resolve_all_refs() -> Result<()> { - let doc_file = - PathBuf::from("../../azure-rest-api-specs/specification/vmware/resource-manager/Microsoft.AVS/stable/2020-03-20/vmware.json"); + let doc_file = PathBuf::from(VMWARE_SPEC); let spec = &Spec::read_files(&[&doc_file])?; for (doc_file, doc) in &spec.docs { let refs = spec::get_refs(doc); diff --git a/codegen/tests/redis_spec.rs b/codegen/tests/redis_spec.rs new file mode 100644 index 0000000..c08168f --- /dev/null +++ b/codegen/tests/redis_spec.rs @@ -0,0 +1,75 @@ +// cargo test --test redis_specs +// These tests require cloning azure-rest-api-specs. +// git clone git@github.com:Azure/azure-rest-api-specs.git ../azure-rest-api-specs + +use autorust_codegen::{ + spec::{self, RefString}, + Reference, Spec, +}; +use std::path::PathBuf; + +type Result = std::result::Result>; + +const REDIS_SPEC: &str = "../../azure-rest-api-specs/specification/redis/resource-manager/Microsoft.Cache/stable/2020-06-01/redis.json"; +const LINKS_SPEC: &str = "../../azure-rest-api-specs/specification/common-types/resource-management/v1/privatelinks.json"; + +#[test] +fn test_redis_ref_files() -> Result<()> { + let api = &spec::openapi::parse(REDIS_SPEC)?; + let files = spec::openapi::get_ref_files(api); + println!("{:#?}", files); + assert_eq!(2, files.len()); + assert!(files.contains("../../../../../common-types/resource-management/v2/types.json")); + Ok(()) +} + +#[test] +fn test_redis_read_spec() -> Result<()> { + let spec = &Spec::read_files(&[REDIS_SPEC])?; + println!("{:#?}", spec.docs.keys()); + assert_eq!(3, spec.docs.len()); + assert!(spec.docs.contains_key(std::path::Path::new( + "../../azure-rest-api-specs/specification/common-types/resource-management/v2/types.json" + ))); + Ok(()) +} + +#[test] +fn test_links_ref_files() -> Result<()> { + let api = &spec::openapi::parse(LINKS_SPEC)?; + let files = spec::openapi::get_ref_files(api); + println!("{:#?}", files); + assert_eq!(1, files.len()); + assert!(files.contains("./types.json")); + Ok(()) +} + +#[test] +fn test_links_refs_count() -> Result<()> { + let api = &spec::openapi::parse(LINKS_SPEC)?; + let refs = spec::get_refs(api); + assert_eq!(10, refs.len()); + Ok(()) +} + +// #[test] +// fn test_redis_resolve_all_refs() -> Result<()> { +// let doc_file = PathBuf::from(REDIS_SPEC); +// let spec = &Spec::read_files(&[&doc_file])?; +// for (doc_file, doc) in &spec.docs { +// let refs = spec::get_refs(doc); +// for rs in refs { +// match rs { +// RefString::PathItem(_) => {} +// RefString::Example(_) => {} +// RefString::Parameter(reference) => { +// spec.resolve_parameter_ref(&doc_file, Reference::parse(&reference))?; +// } +// RefString::Schema(reference) => { +// spec.resolve_schema_ref(&doc_file, Reference::parse(&reference))?; +// } +// } +// } +// } +// Ok(()) +// }