123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421 |
-
-
-
-
-
-
-
- import re,json
- from pathlib import Path
-
- def extend_dict(d:dict, k:tuple):
- if len(k) >= 1 and k[0] not in d:
- d[k[0]] = {}
- if len(k) >= 2 and k[1] not in d[k[0]]:
- d[k[0]][k[1]] = {}
- if len(k) >= 3 and k[2] not in d[k[0]][k[1]]:
- d[k[0]][k[1]][k[2]] = {}
-
- grouping_patterns = [
- re.compile(r'^([XYZIJKUVW]|[XYZ]2|Z[34]|E[0-7])$'),
- re.compile(r'^AXIS\d$'),
- re.compile(r'^(MIN|MAX)$'),
- re.compile(r'^[0-8]$'),
- re.compile(r'^HOTEND[0-7]$'),
- re.compile(r'^(HOTENDS|BED|PROBE|COOLER)$'),
- re.compile(r'^[XYZIJKUVW]M(IN|AX)$')
- ]
-
-
- def find_grouping(gdict, filekey, sectkey, optkey, pindex):
- optparts = optkey.split('_')
- if 1 < len(optparts) > pindex:
- for patt in grouping_patterns:
- if patt.match(optparts[pindex]):
- subkey = optparts[pindex]
- modkey = '_'.join(optparts)
- optparts[pindex] = '*'
- wildkey = '_'.join(optparts)
- kkey = f'{filekey}|{sectkey}|{wildkey}'
- if kkey not in gdict: gdict[kkey] = []
- gdict[kkey].append((subkey, modkey))
-
-
- def group_options(schema):
- for pindex in range(10, -1, -1):
- found_groups = {}
- for filekey, f in schema.items():
- for sectkey, s in f.items():
- for optkey in s:
- find_grouping(found_groups, filekey, sectkey, optkey, pindex)
-
- fkeys = [ k for k in found_groups.keys() ]
- for kkey in fkeys:
- items = found_groups[kkey]
- if len(items) > 1:
- f, s, w = kkey.split('|')
- extend_dict(schema, (f, s, w))
- for subkey, optkey in items:
- schema[f][s][w][subkey] = schema[f][s][optkey]
- del schema[f][s][optkey]
- del found_groups[kkey]
-
-
- def load_boards():
- bpath = Path("Marlin/src/core/boards.h")
- if bpath.is_file():
- with bpath.open() as bfile:
- boards = []
- for line in bfile:
- if line.startswith("#define BOARD_"):
- bname = line.split()[1]
- if bname != "BOARD_UNKNOWN": boards.append(bname)
- return "['" + "','".join(boards) + "']"
- return ''
-
-
-
-
- def extract():
-
- boards = load_boards()
-
-
- class Parse:
- NORMAL = 0
- BLOCK_COMMENT = 1
- EOL_COMMENT = 2
- GET_SENSORS = 3
- ERROR = 9
-
-
- filekey = { 'Configuration.h':'basic', 'Configuration_adv.h':'advanced' }
-
- sch_out = { 'basic':{}, 'advanced':{} }
-
- defgrep = re.compile(r'^(//)?\s*(#define)\s+([A-Za-z0-9_]+)\s*(.*?)\s*(//.+)?$')
-
- ignore = ('CONFIGURATION_H_VERSION', 'CONFIGURATION_ADV_H_VERSION', 'CONFIG_EXAMPLES_DIR', 'CONFIG_EXPORT')
-
- state = Parse.NORMAL
-
- sid = 0
-
- for fn, fk in filekey.items():
- with Path("Marlin", fn).open() as fileobj:
- section = 'none'
- line_number = 0
- conditions = []
- comment_buff = []
- options_json = ''
- eol_options = False
- join_line = False
- line = ''
- last_added_ref = None
-
- for the_line in fileobj.readlines():
- line_number += 1
-
-
- the_line = the_line.strip()
-
- if join_line:
- line += (' ' if line else '') + the_line
- else:
- line, line_start = the_line, line_number
-
-
-
- join_line = line.endswith("\\")
- if join_line:
- line = line[:-1].strip()
- continue
- else:
- line_end = line_number
-
- defmatch = defgrep.match(line)
-
-
-
-
- if state == Parse.EOL_COMMENT:
-
- if not defmatch and the_line.startswith('//'):
- comment_buff.append(the_line[2:].strip())
- else:
- last_added_ref['comment'] = ' '.join(comment_buff)
- comment_buff = []
- state = Parse.NORMAL
-
- def use_comment(c, opt, sec, bufref):
- if c.startswith(':'):
- d = c[1:].strip()
- cbr = c.rindex('}') if d.startswith('{') else c.rindex(']') if d.startswith('[') else 0
- if cbr:
- opt, cmt = c[1:cbr+1].strip(), c[cbr+1:].strip()
- if cmt != '': bufref.append(cmt)
- else:
- opt = c[1:].strip()
- elif c.startswith('@section'):
- sec = c[8:].strip()
- elif not c.startswith('========'):
- bufref.append(c)
- return opt, sec
-
-
-
- if state in (Parse.BLOCK_COMMENT, Parse.GET_SENSORS):
- endpos = line.find('*/')
- if endpos < 0:
- cline = line
- else:
- cline, line = line[:endpos].strip(), line[endpos+2:].strip()
-
-
- if state == Parse.GET_SENSORS:
- options_json = f'[ {options_json[:-2]} ]'
-
- state = Parse.NORMAL
-
-
- if cline.startswith('*'): cline = cline[1:].strip()
-
-
- if state == Parse.GET_SENSORS:
- sens = re.match(r'^(-?\d+)\s*:\s*(.+)$', cline)
- if sens:
- s2 = sens[2].replace("'","''")
- options_json += f"{sens[1]}:'{s2}', "
-
- elif state == Parse.BLOCK_COMMENT:
-
-
- if cline == "Temperature sensors available:":
- state, cline = Parse.GET_SENSORS, "Temperature Sensors"
-
- options_json, section = use_comment(cline, options_json, section, comment_buff)
-
-
- elif state == Parse.NORMAL:
-
- st = 2 if re.match(r'^//\s*#define', line) else 0
- cpos1 = line.find('/*')
- cpos2 = line.find('//', st)
-
-
- cpos = -1
- if cpos1 != -1 and (cpos1 < cpos2 or cpos2 == -1):
- cpos = cpos1
- comment_buff = []
- state = Parse.BLOCK_COMMENT
- eol_options = False
-
- elif cpos2 != -1 and (cpos2 < cpos1 or cpos1 == -1):
- cpos = cpos2
-
-
- if cline.startswith(':'): eol_options = True
-
-
- if state == Parse.NORMAL and defmatch != None and cpos > 10:
- state = Parse.EOL_COMMENT
- comment_buff = []
-
-
- if cpos != -1:
- cline, line = line[cpos+2:].strip(), line[:cpos].strip()
-
-
- if state == Parse.BLOCK_COMMENT:
- if cline.startswith('*'): cline = cline[1:].strip()
-
-
- if cline != '':
- options_json, section = use_comment(cline, options_json, section, comment_buff)
-
-
- if line == '':
- options_json = ''
- continue
-
-
- def atomize(s):
- if s == '' \
- or re.match(r'^[A-Za-z0-9_]*(\([^)]+\))?$', s) \
- or re.match(r'^[A-Za-z0-9_]+ == \d+?$', s):
- return s
- return f'({s})'
-
-
-
-
-
-
-
-
-
-
- cparts = line.split()
- iselif, iselse = cparts[0] == '#elif', cparts[0] == '#else'
- if iselif or iselse or cparts[0] == '#endif':
- if len(conditions) == 0:
- raise Exception(f'no #if block at line {line_number}')
-
-
- prev = conditions.pop()
-
- if iselif or iselse:
- prev[-1] = '!' + prev[-1]
- if iselif: prev.append(atomize(line[5:].strip()))
- conditions.append(prev)
-
- elif cparts[0] == '#if':
- conditions.append([ atomize(line[3:].strip()) ])
- elif cparts[0] == '#ifdef':
- conditions.append([ f'defined({line[6:].strip()})' ])
- elif cparts[0] == '#ifndef':
- conditions.append([ f'!defined({line[7:].strip()})' ])
-
-
- elif defmatch != None:
-
-
- enabled, define_name, val = defmatch[1] == None, defmatch[3], defmatch[4]
-
-
- sid += 1
-
-
- define_info = {
- 'section': section,
- 'name': define_name,
- 'enabled': enabled,
- 'line': line_start,
- 'sid': sid
- }
-
-
- if val == '':
- value_type = 'switch'
- elif re.match(r'^(true|false)$', val):
- value_type = 'bool'
- val = val == 'true'
- elif re.match(r'^[-+]?\s*\d+$', val):
- value_type = 'int'
- val = int(val)
- elif re.match(r'[-+]?\s*(\d+\.|\d*\.\d+)([eE][-+]?\d+)?[fF]?', val):
- value_type = 'float'
- val = float(val.replace('f',''))
- else:
- value_type = 'string' if val[0] == '"' \
- else 'char' if val[0] == "'" \
- else 'state' if re.match(r'^(LOW|HIGH)$', val) \
- else 'enum' if re.match(r'^[A-Za-z0-9_]{3,}$', val) \
- else 'int[]' if re.match(r'^{(\s*[-+]?\s*\d+\s*(,\s*)?)+}$', val) \
- else 'float[]' if re.match(r'^{(\s*[-+]?\s*(\d+\.|\d*\.\d+)([eE][-+]?\d+)?[fF]?\s*(,\s*)?)+}$', val) \
- else 'array' if val[0] == '{' \
- else ''
-
- if val != '': define_info['value'] = val
- if value_type != '': define_info['type'] = value_type
-
-
- if conditions: define_info['requires'] = ' && '.join(sum(conditions, []))
-
-
- if comment_buff:
- full_comment = '\n'.join(comment_buff)
-
-
-
- if state == Parse.EOL_COMMENT:
- define_info['comment'] = ''
- else:
- define_info['comment'] = full_comment
- comment_buff = []
-
-
- units = re.match(r'^\(([^)]+)\)', full_comment)
- if units:
- units = units[1]
- if units == 's' or units == 'sec': units = 'seconds'
- define_info['units'] = units
-
-
- if define_name == "MOTHERBOARD" and boards != '':
- define_info['options'] = boards
- elif options_json != '':
- define_info['options'] = options_json
- if eol_options: options_json = ''
-
-
- if section not in sch_out[fk]: sch_out[fk][section] = {}
-
-
- if define_name in sch_out[fk][section]:
- info = sch_out[fk][section][define_name]
- if isinstance(info, dict): info = [ info ]
- info.append(define_info)
- else:
-
- sch_out[fk][section][define_name] = define_info
-
- if state == Parse.EOL_COMMENT:
- last_added_ref = define_info
-
- return sch_out
-
- def dump_json(schema:dict, jpath:Path):
- with jpath.open('w') as jfile:
- json.dump(schema, jfile, ensure_ascii=False, indent=2)
-
- def dump_yaml(schema:dict, ypath:Path):
- import yaml
- with ypath.open('w') as yfile:
- yaml.dump(schema, yfile, default_flow_style=False, width=120, indent=2)
-
- def main():
- try:
- schema = extract()
- except Exception as exc:
- print("Error: " + str(exc))
- schema = None
-
- if schema:
-
-
- import sys
- if len(sys.argv) > 1:
- arg = sys.argv[1]
- else:
- arg = 'some'
-
-
- if arg in ['some', 'json', 'jsons']:
- print("Generating JSON ...")
- dump_json(schema, Path('schema.json'))
-
-
- if arg in ['group', 'jsons']:
- group_options(schema)
- dump_json(schema, Path('schema_grouped.json'))
-
-
- if arg in ['some', 'yml', 'yaml']:
- try:
- import yaml
- except ImportError:
- print("Installing YAML module ...")
- import subprocess
- try:
- subprocess.run(['python3', '-m', 'pip', 'install', 'pyyaml'])
- import yaml
- except:
- print("Failed to install YAML module")
- return
-
- print("Generating YML ...")
- dump_yaml(schema, Path('schema.yml'))
-
- if __name__ == '__main__':
- main()
|