Skip to content

Commit

Permalink
fix(schema): no unused types anymore
Browse files Browse the repository at this point in the history
Due to shared global state which was altered, we got wrong results.
This is fixed now, thanks to a deepcopy. Amazing, how altering global
state is always biting you, even though you are convinced it's safe
to do in just this case !
General rule: Just don't do it, no matter what !
  • Loading branch information
Byron committed Mar 11, 2015
1 parent ac8c415 commit e3ab233
Show file tree
Hide file tree
Showing 2 changed files with 19 additions and 11 deletions.
2 changes: 1 addition & 1 deletion src/mako/lib/rbuild.mako
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
<%!
from util import (put_and, rust_test_fn_invisible, rust_doc_test_norun, rust_doc_comment,
rb_type, singular, hub_type, mangle_ident, mb_type, method_params, property,
to_fqan, indent_all_but_first_by, schema_markers,
to_fqan, indent_all_but_first_by,
activity_input_type, TREF, IO_REQUEST, schema_to_required_property,
rust_copy_value_s, is_required_property, organize_params, REQUEST_VALUE_PROPERTY_NAME,
build_all_params, rb_type_params_s, hub_type_params_s, mb_type_params_s, mb_additional_type_params)
Expand Down
28 changes: 18 additions & 10 deletions src/mako/lib/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -269,7 +269,7 @@ def _is_map_prop(p):

def _assure_unique_type_name(schemas, tn):
if tn in schemas:
tn += 'Internal'
tn += 'Nested'
return tn

# map a json type to an rust type
Expand Down Expand Up @@ -352,6 +352,7 @@ def is_pod_property(p):
def schema_markers(s, c):
res = set()
ids = s['parents'] + [s.id]
print ids
for sid in ids:
activities = c.sta_map.get(sid, dict())
if len(activities) == 0:
Expand Down Expand Up @@ -584,17 +585,22 @@ def build_activity_mappings(activities, res = None, fqan = None):
# end utility

# A dict of {s.id -> schema} , with all schemas having the 'parents' key set with [s.id, ...] of all parents
# in order of traversal, [0] is first parent, [-1] is the root of them all
# current schemas - the dict will be altered ! Changing global state seems odd, but we own it !
def build_schema_map(schemas):
# in order of traversal, [-1] is first parent, [0] is the root of them all
def build_schema_map():
# 'type' in t and t.type == 'object' and 'properties' in t or ('items' in t and 'properties' in t.items)
PKEY = 'parents'
UBKEY = 'used_by'
all_schemas = deepcopy(schemas)
def recurse_properties(prefix, properties, parent_ids):
for pn, p in properties.iteritems():
if TREF in p:
# they can be used in mulFinternaltiple spots - just brute-force copy all parents in there
# which should probably be renamed to used_by instead
pass
if is_nested_type_property(p):
ns = deepcopy(p)
ns.id = _assure_unique_type_name(schemas, nested_type_name(prefix, pn))
schemas[ns.id] = ns
all_schemas[ns.id] = ns
ns[PKEY] = parent_ids

# To allow us recursing arrays, we simply put items one level up
Expand All @@ -615,27 +621,29 @@ def recurse_properties(prefix, properties, parent_ids):
# end utility
for s in schemas.values():
s[PKEY] = list() # roots never have parents
if UBKEY not in s:
s[UBKEY] = list()
if 'properties' not in s:
continue
recurse_properties(s.id, s.properties, [s.id])
# end for each schema
return schemas
return all_schemas
# end utility

if schemas:
schemas = build_schema_map(schemas)
all_schemas = build_schema_map()
else:
schemas = dict()
all_schemas = dict()
if not resources:
return Context(dict(), dict(), dict(), dict(), schemas)
return Context(dict(), dict(), dict(), dict(), all_schemas)
sta_map, fqan_map = build_activity_mappings(resources)
rta_map = dict()
rtc_map = dict()
for an in fqan_map:
category, resource, activity = activity_split(an)
rta_map.setdefault(resource, list()).append(activity)
assert rtc_map.setdefault(resource, category) == category
return Context(sta_map, fqan_map, rta_map, rtc_map, schemas)
return Context(sta_map, fqan_map, rta_map, rtc_map, all_schemas)

# Expects v to be 'v\d+', throws otherwise
def to_api_version(v):
Expand Down

0 comments on commit e3ab233

Please sign in to comment.