mirror of
https://github.com/matrix-org/matrix-spec
synced 2025-12-20 16:38:37 +01:00
Start encapsulating Units too. Add debug option which controls logging.
This commit is contained in:
parent
96671ce833
commit
8e1d6899c2
|
|
@ -10,14 +10,33 @@ class Sections(object):
|
|||
e.g. "render_room_events" has the section key "room_events"
|
||||
"""
|
||||
|
||||
def __init__(self, env, units):
|
||||
def __init__(self, env, units, debug=False):
|
||||
self.env = env
|
||||
self.units = units
|
||||
self.debug = debug
|
||||
|
||||
def log(self, text):
|
||||
if self.debug:
|
||||
print text
|
||||
|
||||
def get_sections(self):
|
||||
render_list = inspect.getmembers(self, predicate=inspect.ismethod)
|
||||
section_dict = {}
|
||||
for (func_name, func) in render_list:
|
||||
if not func_name.startswith("render_"):
|
||||
continue
|
||||
section_key = func_name[len("render_"):]
|
||||
section = func()
|
||||
section_dict[section_key] = section
|
||||
self.log("Generated section '%s' : %s" % (
|
||||
section_key, section[:60].replace("\n","")
|
||||
))
|
||||
return section_dict
|
||||
|
||||
def render_room_events(self):
|
||||
template = self.env.get_template("events.tmpl")
|
||||
examples = self.units.get("event-examples")
|
||||
schemas = self.units.get("event-schemas")
|
||||
examples = self.units.get("event_examples")
|
||||
schemas = self.units.get("event_schemas")
|
||||
sections = []
|
||||
for event_name in sorted(schemas):
|
||||
if not event_name.startswith("m.room"):
|
||||
|
|
@ -30,11 +49,11 @@ class Sections(object):
|
|||
|
||||
# pass through git ver so it'll be dropped in the input file
|
||||
def render_git_version(self):
|
||||
return self.units.get("git-version")
|
||||
return self.units.get("git_version")
|
||||
|
||||
def _render_ce_type(self, type):
|
||||
template = self.env.get_template("common-event-fields.tmpl")
|
||||
ce_types = self.units.get("common-event-fields")
|
||||
ce_types = self.units.get("common_event_fields")
|
||||
return template.render(common_event=ce_types[type])
|
||||
|
||||
def render_common_event_fields(self):
|
||||
|
|
@ -50,14 +69,7 @@ class Sections(object):
|
|||
def load(env, units):
|
||||
store = AccessKeyStore()
|
||||
sections = Sections(env, units)
|
||||
render_list = inspect.getmembers(sections, predicate=inspect.ismethod)
|
||||
for (func_name, func) in render_list:
|
||||
if not func_name.startswith("render_"):
|
||||
continue
|
||||
section_key = func_name[len("render_"):]
|
||||
section = func()
|
||||
print "Generated section '%s' : %s" % (
|
||||
section_key, section[:60].replace("\n","")
|
||||
)
|
||||
store.add(section_key, section)
|
||||
section_dict = sections.get_sections()
|
||||
for section_key in section_dict:
|
||||
store.add(section_key, section_dict[section_key])
|
||||
return store
|
||||
|
|
@ -1,5 +1,6 @@
|
|||
"""Contains all the units for the spec."""
|
||||
from . import AccessKeyStore
|
||||
import inspect
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
|
|
@ -12,228 +13,244 @@ def prop(obj, path):
|
|||
val = val.get(key, {})
|
||||
return val
|
||||
|
||||
def _load_common_event_fields():
|
||||
path = "../event-schemas/schema/v1/core"
|
||||
event_types = {}
|
||||
with open(path, "r") as f:
|
||||
core_json = json.loads(f.read())
|
||||
for event_type in core_json["definitions"]:
|
||||
event_info = core_json["definitions"][event_type]
|
||||
table = {
|
||||
"title": event_info["title"],
|
||||
"desc": event_info["description"],
|
||||
"rows": []
|
||||
}
|
||||
for prop in sorted(event_info["properties"]):
|
||||
row = {
|
||||
"key": prop,
|
||||
"type": event_info["properties"][prop]["type"],
|
||||
"desc": event_info["properties"][prop].get("description","")
|
||||
class Units(object):
|
||||
|
||||
def __init__(self, debug=False):
|
||||
self.debug = debug
|
||||
|
||||
def log(self, text):
|
||||
if self.debug:
|
||||
print text
|
||||
|
||||
def get_units(self, debug=False):
|
||||
unit_list = inspect.getmembers(self, predicate=inspect.ismethod)
|
||||
unit_dict = {}
|
||||
for (func_name, func) in unit_list:
|
||||
if not func_name.startswith("load_"):
|
||||
continue
|
||||
unit_key = func_name[len("load_"):]
|
||||
unit_dict[unit_key] = func()
|
||||
self.log("Generated unit '%s' : %s" % (
|
||||
unit_key, json.dumps(unit_dict[unit_key])[:50].replace(
|
||||
"\n",""
|
||||
)
|
||||
))
|
||||
return unit_dict
|
||||
|
||||
def load_common_event_fields(self):
|
||||
path = "../event-schemas/schema/v1/core"
|
||||
event_types = {}
|
||||
with open(path, "r") as f:
|
||||
core_json = json.loads(f.read())
|
||||
for event_type in core_json["definitions"]:
|
||||
event_info = core_json["definitions"][event_type]
|
||||
table = {
|
||||
"title": event_info["title"],
|
||||
"desc": event_info["description"],
|
||||
"rows": []
|
||||
}
|
||||
table["rows"].append(row)
|
||||
event_types[event_type] = table
|
||||
return event_types
|
||||
for prop in sorted(event_info["properties"]):
|
||||
row = {
|
||||
"key": prop,
|
||||
"type": event_info["properties"][prop]["type"],
|
||||
"desc": event_info["properties"][prop].get("description","")
|
||||
}
|
||||
table["rows"].append(row)
|
||||
event_types[event_type] = table
|
||||
return event_types
|
||||
|
||||
def _load_examples():
|
||||
path = "../event-schemas/examples/v1"
|
||||
examples = {}
|
||||
for filename in os.listdir(path):
|
||||
if not filename.startswith("m."):
|
||||
continue
|
||||
with open(os.path.join(path, filename), "r") as f:
|
||||
examples[filename] = json.loads(f.read())
|
||||
if filename == "m.room.message#m.text":
|
||||
examples["m.room.message"] = examples[filename]
|
||||
return examples
|
||||
def load_event_examples(self):
|
||||
path = "../event-schemas/examples/v1"
|
||||
examples = {}
|
||||
for filename in os.listdir(path):
|
||||
if not filename.startswith("m."):
|
||||
continue
|
||||
with open(os.path.join(path, filename), "r") as f:
|
||||
examples[filename] = json.loads(f.read())
|
||||
if filename == "m.room.message#m.text":
|
||||
examples["m.room.message"] = examples[filename]
|
||||
return examples
|
||||
|
||||
def _load_schemas():
|
||||
path = "../event-schemas/schema/v1"
|
||||
schemata = {}
|
||||
def load_event_schemas(self):
|
||||
path = "../event-schemas/schema/v1"
|
||||
schemata = {}
|
||||
|
||||
def get_content_fields(obj, enforce_title=False):
|
||||
# Algorithm:
|
||||
# f.e. property => add field info (if field is object then recurse)
|
||||
if obj.get("type") != "object":
|
||||
raise Exception(
|
||||
"get_content_fields: Object %s isn't an object." % obj
|
||||
)
|
||||
if enforce_title and not obj.get("title"):
|
||||
raise Exception(
|
||||
"get_content_fields: Nested object %s doesn't have a title." % obj
|
||||
)
|
||||
|
||||
required_keys = obj.get("required")
|
||||
if not required_keys:
|
||||
required_keys = []
|
||||
|
||||
fields = {
|
||||
"title": obj.get("title"),
|
||||
"rows": []
|
||||
}
|
||||
tables = [fields]
|
||||
|
||||
props = obj["properties"]
|
||||
for key_name in sorted(props):
|
||||
value_type = None
|
||||
required = key_name in required_keys
|
||||
desc = props[key_name].get("description", "")
|
||||
|
||||
if props[key_name]["type"] == "object":
|
||||
if props[key_name].get("additionalProperties"):
|
||||
# not "really" an object, just a KV store
|
||||
value_type = (
|
||||
"{string: %s}" %
|
||||
props[key_name]["additionalProperties"]["type"]
|
||||
)
|
||||
else:
|
||||
nested_object = get_content_fields(
|
||||
props[key_name],
|
||||
enforce_title=True
|
||||
)
|
||||
value_type = "{%s}" % nested_object[0]["title"]
|
||||
tables += nested_object
|
||||
elif props[key_name]["type"] == "array":
|
||||
# if the items of the array are objects then recurse
|
||||
if props[key_name]["items"]["type"] == "object":
|
||||
nested_object = get_content_fields(
|
||||
props[key_name]["items"],
|
||||
enforce_title=True
|
||||
)
|
||||
value_type = "[%s]" % nested_object[0]["title"]
|
||||
tables += nested_object
|
||||
else:
|
||||
value_type = "[%s]" % props[key_name]["items"]["type"]
|
||||
else:
|
||||
value_type = props[key_name]["type"]
|
||||
if props[key_name].get("enum"):
|
||||
value_type = "enum"
|
||||
desc += " One of: %s" % json.dumps(props[key_name]["enum"])
|
||||
|
||||
fields["rows"].append({
|
||||
"key": key_name,
|
||||
"type": value_type,
|
||||
"required": required,
|
||||
"desc": desc,
|
||||
"req_str": "**Required.** " if required else ""
|
||||
})
|
||||
return tables
|
||||
|
||||
for filename in os.listdir(path):
|
||||
if not filename.startswith("m."):
|
||||
continue
|
||||
print "Reading %s" % os.path.join(path, filename)
|
||||
with open(os.path.join(path, filename), "r") as f:
|
||||
json_schema = json.loads(f.read())
|
||||
schema = {
|
||||
"typeof": None,
|
||||
"typeof_info": "",
|
||||
"type": None,
|
||||
"title": None,
|
||||
"desc": None,
|
||||
"content_fields": [
|
||||
# {
|
||||
# title: "<title> key"
|
||||
# rows: [
|
||||
# { key: <key_name>, type: <string>,
|
||||
# desc: <desc>, required: <bool> }
|
||||
# ]
|
||||
# }
|
||||
]
|
||||
}
|
||||
|
||||
# add typeof
|
||||
base_defs = {
|
||||
"core#/definitions/room_event": "Message Event",
|
||||
"core#/definitions/state_event": "State Event"
|
||||
}
|
||||
if type(json_schema.get("allOf")) == list:
|
||||
schema["typeof"] = base_defs.get(
|
||||
json_schema["allOf"][0].get("$ref")
|
||||
def get_content_fields(obj, enforce_title=False):
|
||||
# Algorithm:
|
||||
# f.e. property => add field info (if field is object then recurse)
|
||||
if obj.get("type") != "object":
|
||||
raise Exception(
|
||||
"get_content_fields: Object %s isn't an object." % obj
|
||||
)
|
||||
if enforce_title and not obj.get("title"):
|
||||
raise Exception(
|
||||
"get_content_fields: Nested object %s doesn't have a title." % obj
|
||||
)
|
||||
|
||||
# add type
|
||||
schema["type"] = prop(json_schema, "properties/type/enum")[0]
|
||||
required_keys = obj.get("required")
|
||||
if not required_keys:
|
||||
required_keys = []
|
||||
|
||||
# add summary and desc
|
||||
schema["title"] = json_schema.get("title")
|
||||
schema["desc"] = json_schema.get("description", "")
|
||||
fields = {
|
||||
"title": obj.get("title"),
|
||||
"rows": []
|
||||
}
|
||||
tables = [fields]
|
||||
|
||||
# walk the object for field info
|
||||
schema["content_fields"] = get_content_fields(
|
||||
prop(json_schema, "properties/content")
|
||||
props = obj["properties"]
|
||||
for key_name in sorted(props):
|
||||
value_type = None
|
||||
required = key_name in required_keys
|
||||
desc = props[key_name].get("description", "")
|
||||
|
||||
if props[key_name]["type"] == "object":
|
||||
if props[key_name].get("additionalProperties"):
|
||||
# not "really" an object, just a KV store
|
||||
value_type = (
|
||||
"{string: %s}" %
|
||||
props[key_name]["additionalProperties"]["type"]
|
||||
)
|
||||
else:
|
||||
nested_object = get_content_fields(
|
||||
props[key_name],
|
||||
enforce_title=True
|
||||
)
|
||||
value_type = "{%s}" % nested_object[0]["title"]
|
||||
tables += nested_object
|
||||
elif props[key_name]["type"] == "array":
|
||||
# if the items of the array are objects then recurse
|
||||
if props[key_name]["items"]["type"] == "object":
|
||||
nested_object = get_content_fields(
|
||||
props[key_name]["items"],
|
||||
enforce_title=True
|
||||
)
|
||||
value_type = "[%s]" % nested_object[0]["title"]
|
||||
tables += nested_object
|
||||
else:
|
||||
value_type = "[%s]" % props[key_name]["items"]["type"]
|
||||
else:
|
||||
value_type = props[key_name]["type"]
|
||||
if props[key_name].get("enum"):
|
||||
value_type = "enum"
|
||||
desc += " One of: %s" % json.dumps(props[key_name]["enum"])
|
||||
|
||||
fields["rows"].append({
|
||||
"key": key_name,
|
||||
"type": value_type,
|
||||
"required": required,
|
||||
"desc": desc,
|
||||
"req_str": "**Required.** " if required else ""
|
||||
})
|
||||
return tables
|
||||
|
||||
for filename in os.listdir(path):
|
||||
if not filename.startswith("m."):
|
||||
continue
|
||||
self.log("Reading %s" % os.path.join(path, filename))
|
||||
with open(os.path.join(path, filename), "r") as f:
|
||||
json_schema = json.loads(f.read())
|
||||
schema = {
|
||||
"typeof": None,
|
||||
"typeof_info": "",
|
||||
"type": None,
|
||||
"title": None,
|
||||
"desc": None,
|
||||
"content_fields": [
|
||||
# {
|
||||
# title: "<title> key"
|
||||
# rows: [
|
||||
# { key: <key_name>, type: <string>,
|
||||
# desc: <desc>, required: <bool> }
|
||||
# ]
|
||||
# }
|
||||
]
|
||||
}
|
||||
|
||||
# add typeof
|
||||
base_defs = {
|
||||
"core#/definitions/room_event": "Message Event",
|
||||
"core#/definitions/state_event": "State Event"
|
||||
}
|
||||
if type(json_schema.get("allOf")) == list:
|
||||
schema["typeof"] = base_defs.get(
|
||||
json_schema["allOf"][0].get("$ref")
|
||||
)
|
||||
|
||||
# add type
|
||||
schema["type"] = prop(json_schema, "properties/type/enum")[0]
|
||||
|
||||
# add summary and desc
|
||||
schema["title"] = json_schema.get("title")
|
||||
schema["desc"] = json_schema.get("description", "")
|
||||
|
||||
# walk the object for field info
|
||||
schema["content_fields"] = get_content_fields(
|
||||
prop(json_schema, "properties/content")
|
||||
)
|
||||
|
||||
# Assign state key info
|
||||
if schema["typeof"] == "State Event":
|
||||
skey_desc = prop(json_schema, "properties/state_key/description")
|
||||
if not skey_desc:
|
||||
raise Exception("Missing description for state_key")
|
||||
schema["typeof_info"] = "``state_key``: %s" % skey_desc
|
||||
|
||||
schemata[filename] = schema
|
||||
return schemata
|
||||
|
||||
def load_git_version(self):
|
||||
null = open(os.devnull, 'w')
|
||||
cwd = os.path.dirname(os.path.abspath(__file__))
|
||||
try:
|
||||
git_branch = subprocess.check_output(
|
||||
['git', 'rev-parse', '--abbrev-ref', 'HEAD'],
|
||||
stderr=null,
|
||||
cwd=cwd,
|
||||
).strip()
|
||||
except subprocess.CalledProcessError:
|
||||
git_branch = ""
|
||||
try:
|
||||
git_tag = subprocess.check_output(
|
||||
['git', 'describe', '--exact-match'],
|
||||
stderr=null,
|
||||
cwd=cwd,
|
||||
).strip()
|
||||
git_tag = "tag=" + git_tag
|
||||
except subprocess.CalledProcessError:
|
||||
git_tag = ""
|
||||
try:
|
||||
git_commit = subprocess.check_output(
|
||||
['git', 'rev-parse', '--short', 'HEAD'],
|
||||
stderr=null,
|
||||
cwd=cwd,
|
||||
).strip()
|
||||
except subprocess.CalledProcessError:
|
||||
git_commit = ""
|
||||
try:
|
||||
dirty_string = "-this_is_a_dirty_checkout"
|
||||
is_dirty = subprocess.check_output(
|
||||
['git', 'describe', '--dirty=' + dirty_string, "--all"],
|
||||
stderr=null,
|
||||
cwd=cwd,
|
||||
).strip().endswith(dirty_string)
|
||||
git_dirty = "dirty" if is_dirty else ""
|
||||
except subprocess.CalledProcessError:
|
||||
git_dirty = ""
|
||||
|
||||
if git_branch or git_tag or git_commit or git_dirty:
|
||||
git_version = ",".join(
|
||||
s for s in
|
||||
(git_branch, git_tag, git_commit, git_dirty,)
|
||||
if s
|
||||
)
|
||||
return git_version.encode("ascii")
|
||||
return "Unknown rev"
|
||||
|
||||
# Assign state key info
|
||||
if schema["typeof"] == "State Event":
|
||||
skey_desc = prop(json_schema, "properties/state_key/description")
|
||||
if not skey_desc:
|
||||
raise Exception("Missing description for state_key")
|
||||
schema["typeof_info"] = "``state_key``: %s" % skey_desc
|
||||
|
||||
schemata[filename] = schema
|
||||
return schemata
|
||||
|
||||
def _load_git_ver():
|
||||
null = open(os.devnull, 'w')
|
||||
cwd = os.path.dirname(os.path.abspath(__file__))
|
||||
try:
|
||||
git_branch = subprocess.check_output(
|
||||
['git', 'rev-parse', '--abbrev-ref', 'HEAD'],
|
||||
stderr=null,
|
||||
cwd=cwd,
|
||||
).strip()
|
||||
except subprocess.CalledProcessError:
|
||||
git_branch = ""
|
||||
try:
|
||||
git_tag = subprocess.check_output(
|
||||
['git', 'describe', '--exact-match'],
|
||||
stderr=null,
|
||||
cwd=cwd,
|
||||
).strip()
|
||||
git_tag = "tag=" + git_tag
|
||||
except subprocess.CalledProcessError:
|
||||
git_tag = ""
|
||||
try:
|
||||
git_commit = subprocess.check_output(
|
||||
['git', 'rev-parse', '--short', 'HEAD'],
|
||||
stderr=null,
|
||||
cwd=cwd,
|
||||
).strip()
|
||||
except subprocess.CalledProcessError:
|
||||
git_commit = ""
|
||||
try:
|
||||
dirty_string = "-this_is_a_dirty_checkout"
|
||||
is_dirty = subprocess.check_output(
|
||||
['git', 'describe', '--dirty=' + dirty_string, "--all"],
|
||||
stderr=null,
|
||||
cwd=cwd,
|
||||
).strip().endswith(dirty_string)
|
||||
git_dirty = "dirty" if is_dirty else ""
|
||||
except subprocess.CalledProcessError:
|
||||
git_dirty = ""
|
||||
|
||||
if git_branch or git_tag or git_commit or git_dirty:
|
||||
git_version = ",".join(
|
||||
s for s in
|
||||
(git_branch, git_tag, git_commit, git_dirty,)
|
||||
if s
|
||||
)
|
||||
return git_version.encode("ascii")
|
||||
return "Unknown rev"
|
||||
|
||||
UNIT_DICT = {
|
||||
"event-examples": _load_examples,
|
||||
"event-schemas": _load_schemas,
|
||||
"common-event-fields": _load_common_event_fields,
|
||||
"git-version": _load_git_ver
|
||||
}
|
||||
|
||||
def load():
|
||||
store = AccessKeyStore()
|
||||
for unit_key in UNIT_DICT:
|
||||
unit = UNIT_DICT[unit_key]()
|
||||
print "Generated unit '%s' : %s" % (
|
||||
unit_key, json.dumps(unit)[:50].replace("\n","")
|
||||
)
|
||||
store.add(unit_key, unit)
|
||||
units = Units()
|
||||
unit_dict = units.get_units()
|
||||
for unit_key in unit_dict:
|
||||
store.add(unit_key, unit_dict[unit_key])
|
||||
return store
|
||||
|
|
|
|||
Loading…
Reference in a new issue