mirror of
https://github.com/minetest/irrlicht.git
synced 2025-07-01 15:50:27 +02:00
Shamelessly copy Josiah
This commit is contained in:
10
source/lib/json/tools/amalgamate/CHANGES.md
Normal file
10
source/lib/json/tools/amalgamate/CHANGES.md
Normal file
@ -0,0 +1,10 @@
|
||||
The following changes have been made to the code with respect to <https://github.com/edlund/amalgamate/commit/c91f07eea1133aa184f652b8f1398eaf03586208>:
|
||||
|
||||
- Resolved inspection results from PyCharm:
|
||||
- replaced tabs with spaces
|
||||
- added encoding annotation
|
||||
- reindented file to remove trailing whitespaces
|
||||
- unused import `sys`
|
||||
- membership check
|
||||
- made function from `_is_within`
|
||||
- removed unused variable `actual_path`
|
66
source/lib/json/tools/amalgamate/README.md
Normal file
66
source/lib/json/tools/amalgamate/README.md
Normal file
@ -0,0 +1,66 @@
|
||||
|
||||
# amalgamate.py - Amalgamate C source and header files
|
||||
|
||||
Origin: https://bitbucket.org/erikedlund/amalgamate
|
||||
|
||||
Mirror: https://github.com/edlund/amalgamate
|
||||
|
||||
`amalgamate.py` aims to make it easy to use SQLite-style C source and header
|
||||
amalgamation in projects.
|
||||
|
||||
For more information, please refer to: http://sqlite.org/amalgamation.html
|
||||
|
||||
## Here be dragons
|
||||
|
||||
`amalgamate.py` is quite dumb, it only knows the bare minimum about C code
|
||||
required in order to be able to handle trivial include directives. It can
|
||||
produce weird results for unexpected code.
|
||||
|
||||
Things to be aware of:
|
||||
|
||||
`amalgamate.py` will not handle complex include directives correctly:
|
||||
|
||||
#define HEADER_PATH "path/to/header.h"
|
||||
#include HEADER_PATH
|
||||
|
||||
In the above example, `path/to/header.h` will not be included in the
|
||||
amalgamation (HEADER_PATH is never expanded).
|
||||
|
||||
`amalgamate.py` makes the assumption that each source and header file which
|
||||
is not empty will end in a new-line character, which is not immediately
|
||||
preceded by a backslash character (see 5.1.1.2p1.2 of ISO C99).
|
||||
|
||||
`amalgamate.py` should be usable with C++ code, but raw string literals from
|
||||
C++11 will definitely cause problems:
|
||||
|
||||
R"delimiter(Terrible raw \ data " #include <sneaky.hpp>)delimiter"
|
||||
R"delimiter(Terrible raw \ data " escaping)delimiter"
|
||||
|
||||
In the examples above, `amalgamate.py` will stop parsing the raw string literal
|
||||
when it encounters the first quotation mark, which will produce unexpected
|
||||
results.
|
||||
|
||||
## Installing amalgamate.py
|
||||
|
||||
Python v.2.7.0 or higher is required.
|
||||
|
||||
`amalgamate.py` can be tested and installed using the following commands:
|
||||
|
||||
./test.sh && sudo -k cp ./amalgamate.py /usr/local/bin/
|
||||
|
||||
## Using amalgamate.py
|
||||
|
||||
amalgamate.py [-v] -c path/to/config.json -s path/to/source/dir \
|
||||
[-p path/to/prologue.(c|h)]
|
||||
|
||||
* The `-c, --config` option should specify the path to a JSON config file which
|
||||
lists the source files, include paths and where to write the resulting
|
||||
amalgamation. Have a look at `test/source.c.json` and `test/include.h.json`
|
||||
to see two examples.
|
||||
|
||||
* The `-s, --source` option should specify the path to the source directory.
|
||||
This is useful for supporting separate source and build directories.
|
||||
|
||||
* The `-p, --prologue` option should specify the path to a file which will be
|
||||
added to the beginning of the amalgamation. It is optional.
|
||||
|
299
source/lib/json/tools/amalgamate/amalgamate.py
Executable file
299
source/lib/json/tools/amalgamate/amalgamate.py
Executable file
@ -0,0 +1,299 @@
|
||||
#!/usr/bin/env python3
|
||||
# coding=utf-8
|
||||
|
||||
# amalgamate.py - Amalgamate C source and header files.
|
||||
# Copyright (c) 2012, Erik Edlund <erik.edlund@32767.se>
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without modification,
|
||||
# are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright notice,
|
||||
# this list of conditions and the following disclaimer.
|
||||
#
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# * Neither the name of Erik Edlund, nor the names of its contributors may
|
||||
# be used to endorse or promote products derived from this software without
|
||||
# specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
|
||||
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
||||
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
from __future__ import division
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
|
||||
import argparse
|
||||
import datetime
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
|
||||
|
||||
class Amalgamation(object):
|
||||
|
||||
# Prepends self.source_path to file_path if needed.
|
||||
def actual_path(self, file_path):
|
||||
if not os.path.isabs(file_path):
|
||||
file_path = os.path.join(self.source_path, file_path)
|
||||
return file_path
|
||||
|
||||
# Search included file_path in self.include_paths and
|
||||
# in source_dir if specified.
|
||||
def find_included_file(self, file_path, source_dir):
|
||||
search_dirs = self.include_paths[:]
|
||||
if source_dir:
|
||||
search_dirs.insert(0, source_dir)
|
||||
|
||||
for search_dir in search_dirs:
|
||||
search_path = os.path.join(search_dir, file_path)
|
||||
if os.path.isfile(self.actual_path(search_path)):
|
||||
return search_path
|
||||
return None
|
||||
|
||||
def __init__(self, args):
|
||||
with open(args.config, 'r') as f:
|
||||
config = json.loads(f.read())
|
||||
for key in config:
|
||||
setattr(self, key, config[key])
|
||||
|
||||
self.verbose = args.verbose == "yes"
|
||||
self.prologue = args.prologue
|
||||
self.source_path = args.source_path
|
||||
self.included_files = []
|
||||
|
||||
# Generate the amalgamation and write it to the target file.
|
||||
def generate(self):
|
||||
amalgamation = ""
|
||||
|
||||
if self.prologue:
|
||||
with open(self.prologue, 'r') as f:
|
||||
amalgamation += datetime.datetime.now().strftime(f.read())
|
||||
|
||||
if self.verbose:
|
||||
print("Config:")
|
||||
print(" target = {0}".format(self.target))
|
||||
print(" working_dir = {0}".format(os.getcwd()))
|
||||
print(" include_paths = {0}".format(self.include_paths))
|
||||
print("Creating amalgamation:")
|
||||
for file_path in self.sources:
|
||||
# Do not check the include paths while processing the source
|
||||
# list, all given source paths must be correct.
|
||||
# actual_path = self.actual_path(file_path)
|
||||
print(" - processing \"{0}\"".format(file_path))
|
||||
t = TranslationUnit(file_path, self, True)
|
||||
amalgamation += t.content
|
||||
|
||||
with open(self.target, 'w') as f:
|
||||
f.write(amalgamation)
|
||||
|
||||
print("...done!\n")
|
||||
if self.verbose:
|
||||
print("Files processed: {0}".format(self.sources))
|
||||
print("Files included: {0}".format(self.included_files))
|
||||
print("")
|
||||
|
||||
|
||||
def _is_within(match, matches):
|
||||
for m in matches:
|
||||
if match.start() > m.start() and \
|
||||
match.end() < m.end():
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class TranslationUnit(object):
|
||||
# // C++ comment.
|
||||
cpp_comment_pattern = re.compile(r"//.*?\n")
|
||||
|
||||
# /* C comment. */
|
||||
c_comment_pattern = re.compile(r"/\*.*?\*/", re.S)
|
||||
|
||||
# "complex \"stri\\\ng\" value".
|
||||
string_pattern = re.compile("[^']" r'".*?(?<=[^\\])"', re.S)
|
||||
|
||||
# Handle simple include directives. Support for advanced
|
||||
# directives where macros and defines needs to expanded is
|
||||
# not a concern right now.
|
||||
include_pattern = re.compile(
|
||||
r'#\s*include\s+(<|")(?P<path>.*?)("|>)', re.S)
|
||||
|
||||
# #pragma once
|
||||
pragma_once_pattern = re.compile(r'#\s*pragma\s+once', re.S)
|
||||
|
||||
# Search for pattern in self.content, add the match to
|
||||
# contexts if found and update the index accordingly.
|
||||
def _search_content(self, index, pattern, contexts):
|
||||
match = pattern.search(self.content, index)
|
||||
if match:
|
||||
contexts.append(match)
|
||||
return match.end()
|
||||
return index + 2
|
||||
|
||||
# Return all the skippable contexts, i.e., comments and strings
|
||||
def _find_skippable_contexts(self):
|
||||
# Find contexts in the content in which a found include
|
||||
# directive should not be processed.
|
||||
skippable_contexts = []
|
||||
|
||||
# Walk through the content char by char, and try to grab
|
||||
# skippable contexts using regular expressions when found.
|
||||
i = 1
|
||||
content_len = len(self.content)
|
||||
while i < content_len:
|
||||
j = i - 1
|
||||
current = self.content[i]
|
||||
previous = self.content[j]
|
||||
|
||||
if current == '"':
|
||||
# String value.
|
||||
i = self._search_content(j, self.string_pattern,
|
||||
skippable_contexts)
|
||||
elif current == '*' and previous == '/':
|
||||
# C style comment.
|
||||
i = self._search_content(j, self.c_comment_pattern,
|
||||
skippable_contexts)
|
||||
elif current == '/' and previous == '/':
|
||||
# C++ style comment.
|
||||
i = self._search_content(j, self.cpp_comment_pattern,
|
||||
skippable_contexts)
|
||||
else:
|
||||
# Skip to the next char.
|
||||
i += 1
|
||||
|
||||
return skippable_contexts
|
||||
|
||||
# Returns True if the match is within list of other matches
|
||||
|
||||
# Removes pragma once from content
|
||||
def _process_pragma_once(self):
|
||||
content_len = len(self.content)
|
||||
if content_len < len("#include <x>"):
|
||||
return 0
|
||||
|
||||
# Find contexts in the content in which a found include
|
||||
# directive should not be processed.
|
||||
skippable_contexts = self._find_skippable_contexts()
|
||||
|
||||
pragmas = []
|
||||
pragma_once_match = self.pragma_once_pattern.search(self.content)
|
||||
while pragma_once_match:
|
||||
if not _is_within(pragma_once_match, skippable_contexts):
|
||||
pragmas.append(pragma_once_match)
|
||||
|
||||
pragma_once_match = self.pragma_once_pattern.search(self.content,
|
||||
pragma_once_match.end())
|
||||
|
||||
# Handle all collected pragma once directives.
|
||||
prev_end = 0
|
||||
tmp_content = ''
|
||||
for pragma_match in pragmas:
|
||||
tmp_content += self.content[prev_end:pragma_match.start()]
|
||||
prev_end = pragma_match.end()
|
||||
tmp_content += self.content[prev_end:]
|
||||
self.content = tmp_content
|
||||
|
||||
# Include all trivial #include directives into self.content.
|
||||
def _process_includes(self):
|
||||
content_len = len(self.content)
|
||||
if content_len < len("#include <x>"):
|
||||
return 0
|
||||
|
||||
# Find contexts in the content in which a found include
|
||||
# directive should not be processed.
|
||||
skippable_contexts = self._find_skippable_contexts()
|
||||
|
||||
# Search for include directives in the content, collect those
|
||||
# which should be included into the content.
|
||||
includes = []
|
||||
include_match = self.include_pattern.search(self.content)
|
||||
while include_match:
|
||||
if not _is_within(include_match, skippable_contexts):
|
||||
include_path = include_match.group("path")
|
||||
search_same_dir = include_match.group(1) == '"'
|
||||
found_included_path = self.amalgamation.find_included_file(
|
||||
include_path, self.file_dir if search_same_dir else None)
|
||||
if found_included_path:
|
||||
includes.append((include_match, found_included_path))
|
||||
|
||||
include_match = self.include_pattern.search(self.content,
|
||||
include_match.end())
|
||||
|
||||
# Handle all collected include directives.
|
||||
prev_end = 0
|
||||
tmp_content = ''
|
||||
for include in includes:
|
||||
include_match, found_included_path = include
|
||||
tmp_content += self.content[prev_end:include_match.start()]
|
||||
tmp_content += "// {0}\n".format(include_match.group(0))
|
||||
if found_included_path not in self.amalgamation.included_files:
|
||||
t = TranslationUnit(found_included_path, self.amalgamation, False)
|
||||
tmp_content += t.content
|
||||
prev_end = include_match.end()
|
||||
tmp_content += self.content[prev_end:]
|
||||
self.content = tmp_content
|
||||
|
||||
return len(includes)
|
||||
|
||||
# Make all content processing
|
||||
def _process(self):
|
||||
if not self.is_root:
|
||||
self._process_pragma_once()
|
||||
self._process_includes()
|
||||
|
||||
def __init__(self, file_path, amalgamation, is_root):
|
||||
self.file_path = file_path
|
||||
self.file_dir = os.path.dirname(file_path)
|
||||
self.amalgamation = amalgamation
|
||||
self.is_root = is_root
|
||||
|
||||
self.amalgamation.included_files.append(self.file_path)
|
||||
|
||||
actual_path = self.amalgamation.actual_path(file_path)
|
||||
if not os.path.isfile(actual_path):
|
||||
raise IOError("File not found: \"{0}\"".format(file_path))
|
||||
with open(actual_path, 'r') as f:
|
||||
self.content = f.read()
|
||||
self._process()
|
||||
|
||||
|
||||
def main():
|
||||
description = "Amalgamate C source and header files."
|
||||
usage = " ".join([
|
||||
"amalgamate.py",
|
||||
"[-v]",
|
||||
"-c path/to/config.json",
|
||||
"-s path/to/source/dir",
|
||||
"[-p path/to/prologue.(c|h)]"
|
||||
])
|
||||
argsparser = argparse.ArgumentParser(
|
||||
description=description, usage=usage)
|
||||
|
||||
argsparser.add_argument("-v", "--verbose", dest="verbose",
|
||||
choices=["yes", "no"], metavar="", help="be verbose")
|
||||
|
||||
argsparser.add_argument("-c", "--config", dest="config",
|
||||
required=True, metavar="", help="path to a JSON config file")
|
||||
|
||||
argsparser.add_argument("-s", "--source", dest="source_path",
|
||||
required=True, metavar="", help="source code path")
|
||||
|
||||
argsparser.add_argument("-p", "--prologue", dest="prologue",
|
||||
required=False, metavar="", help="path to a C prologue file")
|
||||
|
||||
amalgamation = Amalgamation(argsparser.parse_args())
|
||||
amalgamation.generate()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
8
source/lib/json/tools/amalgamate/config_json.json
Normal file
8
source/lib/json/tools/amalgamate/config_json.json
Normal file
@ -0,0 +1,8 @@
|
||||
{
|
||||
"project": "JSON for Modern C++",
|
||||
"target": "single_include/nlohmann/json.hpp",
|
||||
"sources": [
|
||||
"include/nlohmann/json.hpp"
|
||||
],
|
||||
"include_paths": ["include"]
|
||||
}
|
8
source/lib/json/tools/amalgamate/config_json_fwd.json
Normal file
8
source/lib/json/tools/amalgamate/config_json_fwd.json
Normal file
@ -0,0 +1,8 @@
|
||||
{
|
||||
"project": "JSON for Modern C++",
|
||||
"target": "single_include/nlohmann/json_fwd.hpp",
|
||||
"sources": [
|
||||
"include/nlohmann/json_fwd.hpp"
|
||||
],
|
||||
"include_paths": ["include"]
|
||||
}
|
78
source/lib/json/tools/gdb_pretty_printer/README.md
Normal file
78
source/lib/json/tools/gdb_pretty_printer/README.md
Normal file
@ -0,0 +1,78 @@
|
||||
# GDB Pretty Printer
|
||||
|
||||
File [nlohmann-json.py](nlohmann-json.py) contains a pretty printer for GDB for JSON values of this library. It was originally published as [Gist](https://gist.github.com/ssbssa/60da5339c6e6036b2afce17de06050ea#file-nlohmann-json-py) by [Hannes Domani](https://github.com/ssbssa).
|
||||
|
||||
## How to use
|
||||
|
||||
- Add line
|
||||
|
||||
```
|
||||
source /path/to/nlohmann-json.py
|
||||
```
|
||||
|
||||
to `~/.gdbinit`. Note you must replace `/path/to` with whatever path you stored file `nlohmann-json.py`.
|
||||
- In GDB, debug as usual. When you want to pretty-print a JSON value `var`, type
|
||||
|
||||
```
|
||||
p -pretty on -array on -- var
|
||||
```
|
||||
|
||||
The result should look like
|
||||
|
||||
```
|
||||
$1 = std::map with 5 elements = {
|
||||
["Baptiste"] = std::map with 1 element = {
|
||||
["first"] = "second"
|
||||
},
|
||||
["Emmanuel"] = std::vector of length 3, capacity 3 = {
|
||||
3,
|
||||
"25",
|
||||
0.5
|
||||
},
|
||||
["Jean"] = 0.7,
|
||||
["Zorg"] = std::map with 8 elements = {
|
||||
["array"] = std::vector of length 3, capacity 3 = {
|
||||
1,
|
||||
0,
|
||||
2
|
||||
},
|
||||
["awesome_str"] = "bleh",
|
||||
["bool"] = true,
|
||||
["flex"] = 0.2,
|
||||
["float"] = 5.22,
|
||||
["int"] = 5,
|
||||
["nested"] = std::map with 1 element = {
|
||||
["bar"] = "barz"
|
||||
},
|
||||
["trap "] = "you fell"
|
||||
},
|
||||
["empty"] = nlohmann::detail::value_t::null
|
||||
}
|
||||
```
|
||||
|
||||
Requires Python 3.9+. Last tested with GDB 12.1.
|
||||
See [#1952](https://github.com/nlohmann/json/issues/1952) for more information. Please post questions there.
|
||||
|
||||
## Copyright
|
||||
|
||||
MIT License
|
||||
|
||||
Copyright (C) 2020 [Hannes Domani](https://github.com/ssbssa)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
32
source/lib/json/tools/gdb_pretty_printer/nlohmann-json.py
Normal file
32
source/lib/json/tools/gdb_pretty_printer/nlohmann-json.py
Normal file
@ -0,0 +1,32 @@
|
||||
import gdb
|
||||
import re
|
||||
|
||||
ns_pattern = re.compile(r'nlohmann(::json_abi(?P<tags>\w*)(_v(?P<v_major>\d+)_(?P<v_minor>\d+)_(?P<v_patch>\d+))?)?::(?P<name>.+)')
|
||||
class JsonValuePrinter:
|
||||
"Print a json-value"
|
||||
|
||||
def __init__(self, val):
|
||||
self.val = val
|
||||
|
||||
def to_string(self):
|
||||
if self.val.type.strip_typedefs().code == gdb.TYPE_CODE_FLT:
|
||||
return ("%.6f" % float(self.val)).rstrip("0")
|
||||
return self.val
|
||||
|
||||
def json_lookup_function(val):
|
||||
if m := ns_pattern.fullmatch(str(val.type.strip_typedefs().name)):
|
||||
name = m.group('name')
|
||||
if name and name.startswith('basic_json<') and name.endswith('>'):
|
||||
m = ns_pattern.fullmatch(str(val['m_type']))
|
||||
t = m.group('name')
|
||||
if t and t.startswith('detail::value_t::'):
|
||||
try:
|
||||
union_val = val['m_value'][t.removeprefix('detail::value_t::')]
|
||||
if union_val.type.code == gdb.TYPE_CODE_PTR:
|
||||
return gdb.default_visualizer(union_val.dereference())
|
||||
else:
|
||||
return JsonValuePrinter(union_val)
|
||||
except Exception:
|
||||
return JsonValuePrinter(val['m_type'])
|
||||
|
||||
gdb.pretty_printers.append(json_lookup_function)
|
10
source/lib/json/tools/generate_natvis/README.md
Normal file
10
source/lib/json/tools/generate_natvis/README.md
Normal file
@ -0,0 +1,10 @@
|
||||
generate_natvis.py
|
||||
==================
|
||||
|
||||
Generate the Natvis debugger visualization file for all supported namespace combinations.
|
||||
|
||||
## Usage
|
||||
|
||||
```
|
||||
./generate_natvis.py --version X.Y.Z output_directory/
|
||||
```
|
41
source/lib/json/tools/generate_natvis/generate_natvis.py
Executable file
41
source/lib/json/tools/generate_natvis/generate_natvis.py
Executable file
@ -0,0 +1,41 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import itertools
|
||||
import jinja2
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
def semver(v):
|
||||
if not re.fullmatch(r'\d+\.\d+\.\d+', v):
|
||||
raise ValueError
|
||||
return v
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--version', required=True, type=semver, help='Library version number')
|
||||
parser.add_argument('output', help='Output directory for nlohmann_json.natvis')
|
||||
args = parser.parse_args()
|
||||
|
||||
namespaces = ['nlohmann']
|
||||
abi_prefix = 'json_abi'
|
||||
abi_tags = ['_diag', '_ldvcmp']
|
||||
version = '_v' + args.version.replace('.', '_')
|
||||
inline_namespaces = []
|
||||
|
||||
# generate all combinations of inline namespace names
|
||||
for n in range(0, len(abi_tags) + 1):
|
||||
for tags in itertools.combinations(abi_tags, n):
|
||||
ns = abi_prefix + ''.join(tags)
|
||||
inline_namespaces += [ns, ns + version]
|
||||
|
||||
namespaces += [f'{namespaces[0]}::{ns}' for ns in inline_namespaces]
|
||||
|
||||
env = jinja2.Environment(loader=jinja2.FileSystemLoader(searchpath=sys.path[0]), autoescape=True, trim_blocks=True,
|
||||
lstrip_blocks=True, keep_trailing_newline=True)
|
||||
template = env.get_template('nlohmann_json.natvis.j2')
|
||||
natvis = template.render(namespaces=namespaces)
|
||||
|
||||
with open(os.path.join(args.output, 'nlohmann_json.natvis'), 'w') as f:
|
||||
f.write(natvis)
|
@ -0,0 +1,40 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
|
||||
<!-- * * * * * * * * AUTO-GENERATED FILE * * * * * * * * -->
|
||||
<!-- Edit ./tools/generate_natvis/nlohmann_json.natvis.j2 -->
|
||||
<!-- * * * * * * * * AUTO-GENERATED FILE * * * * * * * * -->
|
||||
|
||||
<AutoVisualizer xmlns="http://schemas.microsoft.com/vstudio/debugger/natvis/2010">
|
||||
{% for ns in namespaces %}
|
||||
<!-- Namespace {{ ns }} -->
|
||||
<Type Name="{{ ns }}::basic_json<*>">
|
||||
<DisplayString Condition="m_type == {{ ns }}::detail::value_t::null">null</DisplayString>
|
||||
<DisplayString Condition="m_type == {{ ns }}::detail::value_t::object">{*(m_value.object)}</DisplayString>
|
||||
<DisplayString Condition="m_type == {{ ns }}::detail::value_t::array">{*(m_value.array)}</DisplayString>
|
||||
<DisplayString Condition="m_type == {{ ns }}::detail::value_t::string">{*(m_value.string)}</DisplayString>
|
||||
<DisplayString Condition="m_type == {{ ns }}::detail::value_t::boolean">{m_value.boolean}</DisplayString>
|
||||
<DisplayString Condition="m_type == {{ ns }}::detail::value_t::number_integer">{m_value.number_integer}</DisplayString>
|
||||
<DisplayString Condition="m_type == {{ ns }}::detail::value_t::number_unsigned">{m_value.number_unsigned}</DisplayString>
|
||||
<DisplayString Condition="m_type == {{ ns }}::detail::value_t::number_float">{m_value.number_float}</DisplayString>
|
||||
<DisplayString Condition="m_type == {{ ns }}::detail::value_t::discarded">discarded</DisplayString>
|
||||
<Expand>
|
||||
<ExpandedItem Condition="m_type == {{ ns }}::detail::value_t::object">
|
||||
*(m_value.object),view(simple)
|
||||
</ExpandedItem>
|
||||
<ExpandedItem Condition="m_type == {{ ns }}::detail::value_t::array">
|
||||
*(m_value.array),view(simple)
|
||||
</ExpandedItem>
|
||||
</Expand>
|
||||
</Type>
|
||||
|
||||
<!-- Skip the pair first/second members in the treeview while traversing a map.
|
||||
Only works in VS 2015 Update 2 and beyond using the new visualization -->
|
||||
<Type Name="std::pair<*, {{ ns }}::basic_json<*>>" IncludeView="MapHelper">
|
||||
<DisplayString>{second}</DisplayString>
|
||||
<Expand>
|
||||
<ExpandedItem>second</ExpandedItem>
|
||||
</Expand>
|
||||
</Type>
|
||||
|
||||
{% endfor %}
|
||||
</AutoVisualizer>
|
43
source/lib/json/tools/macro_builder/main.cpp
Normal file
43
source/lib/json/tools/macro_builder/main.cpp
Normal file
@ -0,0 +1,43 @@
|
||||
#include <cstdlib>
|
||||
#include <iostream>
|
||||
#include <sstream>
|
||||
|
||||
using namespace std;
|
||||
|
||||
void build_code(int max_args)
|
||||
{
|
||||
stringstream ss;
|
||||
ss << "#define NLOHMANN_JSON_EXPAND( x ) x" << endl;
|
||||
ss << "#define NLOHMANN_JSON_GET_MACRO(";
|
||||
for (int i = 0 ; i < max_args ; i++)
|
||||
ss << "_" << i + 1 << ", ";
|
||||
ss << "NAME,...) NAME" << endl;
|
||||
|
||||
ss << "#define NLOHMANN_JSON_PASTE(...) NLOHMANN_JSON_EXPAND(NLOHMANN_JSON_GET_MACRO(__VA_ARGS__, \\" << endl;
|
||||
for (int i = max_args ; i > 1 ; i--)
|
||||
ss << "NLOHMANN_JSON_PASTE" << i << ", \\" << endl;
|
||||
ss << "NLOHMANN_JSON_PASTE1)(__VA_ARGS__))" << endl;
|
||||
|
||||
ss << "#define NLOHMANN_JSON_PASTE2(func, v1) func(v1)" << endl;
|
||||
for (int i = 3 ; i <= max_args ; i++)
|
||||
{
|
||||
ss << "#define NLOHMANN_JSON_PASTE" << i << "(func, ";
|
||||
for (int j = 1 ; j < i -1 ; j++)
|
||||
ss << "v" << j << ", ";
|
||||
ss << "v" << i-1 << ") NLOHMANN_JSON_PASTE2(func, v1) NLOHMANN_JSON_PASTE" << i-1 << "(func, ";
|
||||
for (int j = 2 ; j < i-1 ; j++)
|
||||
ss << "v" << j << ", ";
|
||||
ss << "v" << i-1 << ")" << endl;
|
||||
}
|
||||
|
||||
cout << ss.str() << endl;
|
||||
}
|
||||
|
||||
int main(int argc, char** argv)
|
||||
{
|
||||
int max_args = 64;
|
||||
build_code(max_args);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
91
source/lib/json/tools/serve_header/README.md
Normal file
91
source/lib/json/tools/serve_header/README.md
Normal file
@ -0,0 +1,91 @@
|
||||
serve_header.py
|
||||
===============
|
||||
|
||||
Serves the `single_include/nlohmann/json.hpp` header file over HTTP(S).
|
||||
|
||||
The header file is automatically amalgamated on demand.
|
||||
|
||||

|
||||
|
||||
## Prerequisites
|
||||
|
||||
1. Make sure these Python packages are installed.
|
||||
```
|
||||
PyYAML
|
||||
watchdog
|
||||
```
|
||||
(see `tools/serve_header/requirements.txt`)
|
||||
|
||||
2. To serve the header over HTTPS (which is required by Compiler Explorer at this time), a certificate is needed.
|
||||
The recommended method for creating a locally-trusted certificate is to use [`mkcert`](https://github.com/FiloSottile/mkcert).
|
||||
- Install the `mkcert` certificate authority into your trust store(s):
|
||||
```
|
||||
$ mkcert -install
|
||||
```
|
||||
- Create a certificate for `localhost`:
|
||||
```
|
||||
$ mkcert localhost
|
||||
```
|
||||
The command will create two files, `localhost.pem` and `localhost-key.pem`, in the current working directory. It is recommended to create them in the top level or project root directory.
|
||||
|
||||
## Usage
|
||||
|
||||
`serve_header.py` has a built-in default configuration that will serve the `single_include/nlohmann/json.hpp` header file relative to the top level or project root directory it is homed in.
|
||||
The built-in configuration expects the certificate `localhost.pem` and the private key `localhost-key.pem`to be located in the top level or project root directory.
|
||||
|
||||
To start serving the `json.hpp` header file at `https://localhost:8443/json.hpp`, run this command from the top level or project root directory:
|
||||
```
|
||||
$ make serve_header
|
||||
```
|
||||
|
||||
Open [Compiler Explorer](https://godbolt.org/) and try it out:
|
||||
```cpp
|
||||
#include <https://localhost:8443/json.hpp>
|
||||
using namespace nlohmann;
|
||||
|
||||
#include <iostream>
|
||||
|
||||
int main() {
|
||||
// these macros are dynamically injected into the header file
|
||||
std::cout << JSON_BUILD_TIME << " (" << JSON_BUILD_COUNT << ")\n";
|
||||
|
||||
return 0;
|
||||
}
|
||||
```
|
||||
|
||||
> `serve_header.py` dynamically injects the macros `JSON_BUILD_COUNT` and `JSON_BUILD_TIME` into the served header file. By comparing build count or time output from the compiled program with the output from `serve_header.py`, one can be reasonably sure the compiled code uses the expected revision of the header file.
|
||||
|
||||
## Configuration
|
||||
|
||||
`serve_header.py` will try to read a configuration file `serve_header.yml` in the top level or project root directory, and will fall back on built-in defaults if the file cannot be read.
|
||||
An annotated example configuration can be found in `tools/serve_header/serve_header.yml.example`.
|
||||
|
||||
## Serving `json.hpp` from multiple project directory instances or working trees
|
||||
|
||||
`serve_header.py` was designed with the goal of supporting multiple project roots or working trees at the same time.
|
||||
The recommended directory structure is shown below but `serve_header.py` can work with other structures as well, including a nested hierarchy.
|
||||
```
|
||||
json/ ⮜ the parent or web server root directory
|
||||
├── develop/ ⮜ the main git checkout
|
||||
│ └── ...
|
||||
├── feature1/
|
||||
│ └── ... any number of additional
|
||||
├── feature2/ ⮜ working trees (e.g., created
|
||||
│ └── ... with git worktree)
|
||||
└── feature3/
|
||||
└── ...
|
||||
```
|
||||
|
||||
To serve the header of each working tree at `https://localhost:8443/<worktree>/json.hpp`, a configuration file is needed.
|
||||
1. Create the file `serve_header.yml` in the top level or project root directory of any working tree:
|
||||
```yaml
|
||||
root: ..
|
||||
```
|
||||
By shifting the web server root directory up one level, the `single_include/nlohmann/json.hpp` header files relative to each sibling directory or working tree will be served.
|
||||
|
||||
2. Start `serve_header.py` by running this command from the same top level or project root directory the configuration file is located in:
|
||||
```
|
||||
$ make serve_header
|
||||
```
|
||||
|
||||
`serve_header.py` will automatically detect the addition or removal of working trees anywhere within the configured web server root directory.
|
BIN
source/lib/json/tools/serve_header/demo.png
Normal file
BIN
source/lib/json/tools/serve_header/demo.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 544 KiB |
2
source/lib/json/tools/serve_header/requirements.txt
Normal file
2
source/lib/json/tools/serve_header/requirements.txt
Normal file
@ -0,0 +1,2 @@
|
||||
PyYAML==6.0
|
||||
watchdog==2.1.7
|
410
source/lib/json/tools/serve_header/serve_header.py
Executable file
410
source/lib/json/tools/serve_header/serve_header.py
Executable file
@ -0,0 +1,410 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import contextlib
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from io import BytesIO
|
||||
from threading import Lock, Timer
|
||||
|
||||
from watchdog.events import FileSystemEventHandler
|
||||
from watchdog.observers import Observer
|
||||
|
||||
from http import HTTPStatus
|
||||
from http.server import ThreadingHTTPServer, SimpleHTTPRequestHandler
|
||||
|
||||
CONFIG_FILE = 'serve_header.yml'
|
||||
MAKEFILE = 'Makefile'
|
||||
INCLUDE = 'include/nlohmann/'
|
||||
SINGLE_INCLUDE = 'single_include/nlohmann/'
|
||||
HEADER = 'json.hpp'
|
||||
|
||||
DATETIME_FORMAT = '%Y-%m-%d %H:%M:%S'
|
||||
|
||||
JSON_VERSION_RE = re.compile(r'\s*#\s*define\s+NLOHMANN_JSON_VERSION_MAJOR\s+')
|
||||
|
||||
class ExitHandler(logging.StreamHandler):
|
||||
def __init__(self, level):
|
||||
"""."""
|
||||
super().__init__()
|
||||
self.level = level
|
||||
|
||||
def emit(self, record):
|
||||
if record.levelno >= self.level:
|
||||
sys.exit(1)
|
||||
|
||||
def is_project_root(test_dir='.'):
|
||||
makefile = os.path.join(test_dir, MAKEFILE)
|
||||
include = os.path.join(test_dir, INCLUDE)
|
||||
single_include = os.path.join(test_dir, SINGLE_INCLUDE)
|
||||
|
||||
return (os.path.exists(makefile)
|
||||
and os.path.isfile(makefile)
|
||||
and os.path.exists(include)
|
||||
and os.path.exists(single_include))
|
||||
|
||||
class DirectoryEventBucket:
|
||||
def __init__(self, callback, delay=1.2, threshold=0.8):
|
||||
"""."""
|
||||
self.delay = delay
|
||||
self.threshold = timedelta(seconds=threshold)
|
||||
self.callback = callback
|
||||
self.event_dirs = set([])
|
||||
self.timer = None
|
||||
self.lock = Lock()
|
||||
|
||||
def start_timer(self):
|
||||
if self.timer is None:
|
||||
self.timer = Timer(self.delay, self.process_dirs)
|
||||
self.timer.start()
|
||||
|
||||
def process_dirs(self):
|
||||
result_dirs = []
|
||||
event_dirs = set([])
|
||||
with self.lock:
|
||||
self.timer = None
|
||||
while self.event_dirs:
|
||||
time, event_dir = self.event_dirs.pop()
|
||||
delta = datetime.now() - time
|
||||
if delta < self.threshold:
|
||||
event_dirs.add((time, event_dir))
|
||||
else:
|
||||
result_dirs.append(event_dir)
|
||||
self.event_dirs = event_dirs
|
||||
if result_dirs:
|
||||
self.callback(os.path.commonpath(result_dirs))
|
||||
if self.event_dirs:
|
||||
self.start_timer()
|
||||
|
||||
def add_dir(self, path):
|
||||
with self.lock:
|
||||
# add path to the set of event_dirs if it is not a sibling of
|
||||
# a directory already in the set
|
||||
if not any(os.path.commonpath([path, event_dir]) == event_dir
|
||||
for (_, event_dir) in self.event_dirs):
|
||||
self.event_dirs.add((datetime.now(), path))
|
||||
if self.timer is None:
|
||||
self.start_timer()
|
||||
|
||||
class WorkTree:
|
||||
make_command = 'make'
|
||||
|
||||
def __init__(self, root_dir, tree_dir):
|
||||
"""."""
|
||||
self.root_dir = root_dir
|
||||
self.tree_dir = tree_dir
|
||||
self.rel_dir = os.path.relpath(tree_dir, root_dir)
|
||||
self.name = os.path.basename(tree_dir)
|
||||
self.include_dir = os.path.abspath(os.path.join(tree_dir, INCLUDE))
|
||||
self.header = os.path.abspath(os.path.join(tree_dir, SINGLE_INCLUDE, HEADER))
|
||||
self.rel_header = os.path.relpath(self.header, root_dir)
|
||||
self.dirty = True
|
||||
self.build_count = 0
|
||||
t = os.path.getmtime(self.header)
|
||||
t = datetime.fromtimestamp(t)
|
||||
self.build_time = t.strftime(DATETIME_FORMAT)
|
||||
|
||||
def __hash__(self):
|
||||
"""."""
|
||||
return hash((self.tree_dir))
|
||||
|
||||
def __eq__(self, other):
|
||||
"""."""
|
||||
if not isinstance(other, type(self)):
|
||||
return NotImplemented
|
||||
return self.tree_dir == other.tree_dir
|
||||
|
||||
def update_dirty(self, path):
|
||||
if self.dirty:
|
||||
return
|
||||
|
||||
path = os.path.abspath(path)
|
||||
if os.path.commonpath([path, self.include_dir]) == self.include_dir:
|
||||
logging.info(f'{self.name}: working tree marked dirty')
|
||||
self.dirty = True
|
||||
|
||||
def amalgamate_header(self):
|
||||
if not self.dirty:
|
||||
return
|
||||
|
||||
mtime = os.path.getmtime(self.header)
|
||||
subprocess.run([WorkTree.make_command, 'amalgamate'], cwd=self.tree_dir,
|
||||
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
||||
if mtime == os.path.getmtime(self.header):
|
||||
logging.info(f'{self.name}: no changes')
|
||||
else:
|
||||
self.build_count += 1
|
||||
self.build_time = datetime.now().strftime(DATETIME_FORMAT)
|
||||
logging.info(f'{self.name}: header amalgamated (build count {self.build_count})')
|
||||
|
||||
self.dirty = False
|
||||
|
||||
class WorkTrees(FileSystemEventHandler):
|
||||
def __init__(self, root_dir):
|
||||
"""."""
|
||||
super().__init__()
|
||||
self.root_dir = root_dir
|
||||
self.trees = set([])
|
||||
self.tree_lock = Lock()
|
||||
self.scan(root_dir)
|
||||
self.created_bucket = DirectoryEventBucket(self.scan)
|
||||
self.observer = Observer()
|
||||
self.observer.schedule(self, root_dir, recursive=True)
|
||||
self.observer.start()
|
||||
|
||||
def scan(self, base_dir):
|
||||
scan_dirs = set([base_dir])
|
||||
# recursively scan base_dir for working trees
|
||||
|
||||
while scan_dirs:
|
||||
scan_dir = os.path.abspath(scan_dirs.pop())
|
||||
self.scan_tree(scan_dir)
|
||||
try:
|
||||
with os.scandir(scan_dir) as dir_it:
|
||||
for entry in dir_it:
|
||||
if entry.is_dir():
|
||||
scan_dirs.add(entry.path)
|
||||
except FileNotFoundError as e:
|
||||
logging.debug('path disappeared: %s', e)
|
||||
|
||||
def scan_tree(self, scan_dir):
|
||||
if not is_project_root(scan_dir):
|
||||
return
|
||||
|
||||
# skip source trees in build directories
|
||||
# this check could be enhanced
|
||||
if scan_dir.endswith('/_deps/json-src'):
|
||||
return
|
||||
|
||||
tree = WorkTree(self.root_dir, scan_dir)
|
||||
with self.tree_lock:
|
||||
if not tree in self.trees:
|
||||
if tree.name == tree.rel_dir:
|
||||
logging.info(f'adding working tree {tree.name}')
|
||||
else:
|
||||
logging.info(f'adding working tree {tree.name} at {tree.rel_dir}')
|
||||
url = os.path.join('/', tree.rel_dir, HEADER)
|
||||
logging.info(f'{tree.name}: serving header at {url}')
|
||||
self.trees.add(tree)
|
||||
|
||||
def rescan(self, path=None):
|
||||
if path is not None:
|
||||
path = os.path.abspath(path)
|
||||
trees = set([])
|
||||
# check if any working trees have been removed
|
||||
with self.tree_lock:
|
||||
while self.trees:
|
||||
tree = self.trees.pop()
|
||||
if ((path is None
|
||||
or os.path.commonpath([path, tree.tree_dir]) == tree.tree_dir)
|
||||
and not is_project_root(tree.tree_dir)):
|
||||
if tree.name == tree.rel_dir:
|
||||
logging.info(f'removing working tree {tree.name}')
|
||||
else:
|
||||
logging.info(f'removing working tree {tree.name} at {tree.rel_dir}')
|
||||
else:
|
||||
trees.add(tree)
|
||||
self.trees = trees
|
||||
|
||||
def find(self, path):
|
||||
# find working tree for a given header file path
|
||||
path = os.path.abspath(path)
|
||||
with self.tree_lock:
|
||||
for tree in self.trees:
|
||||
if path == tree.header:
|
||||
return tree
|
||||
return None
|
||||
|
||||
def on_any_event(self, event):
|
||||
logging.debug('%s (is_dir=%s): %s', event.event_type,
|
||||
event.is_directory, event.src_path)
|
||||
path = os.path.abspath(event.src_path)
|
||||
if event.is_directory:
|
||||
if event.event_type == 'created':
|
||||
# check for new working trees
|
||||
self.created_bucket.add_dir(path)
|
||||
elif event.event_type == 'deleted':
|
||||
# check for deleted working trees
|
||||
self.rescan(path)
|
||||
elif event.event_type == 'closed':
|
||||
with self.tree_lock:
|
||||
for tree in self.trees:
|
||||
tree.update_dirty(path)
|
||||
|
||||
def stop(self):
|
||||
self.observer.stop()
|
||||
self.observer.join()
|
||||
|
||||
class HeaderRequestHandler(SimpleHTTPRequestHandler): # lgtm[py/missing-call-to-init]
|
||||
def __init__(self, request, client_address, server):
|
||||
"""."""
|
||||
self.worktrees = server.worktrees
|
||||
self.worktree = None
|
||||
try:
|
||||
super().__init__(request, client_address, server,
|
||||
directory=server.worktrees.root_dir)
|
||||
except ConnectionResetError:
|
||||
logging.debug('connection reset by peer')
|
||||
|
||||
def translate_path(self, path):
|
||||
path = os.path.abspath(super().translate_path(path))
|
||||
|
||||
# add single_include/nlohmann into path, if needed
|
||||
header = os.path.join('/', HEADER)
|
||||
header_path = os.path.join('/', SINGLE_INCLUDE, HEADER)
|
||||
if (path.endswith(header)
|
||||
and not path.endswith(header_path)):
|
||||
path = os.path.join(os.path.dirname(path), SINGLE_INCLUDE, HEADER)
|
||||
|
||||
return path
|
||||
|
||||
def send_head(self):
|
||||
# check if the translated path matches a working tree
|
||||
# and fullfill the request; otherwise, send 404
|
||||
path = self.translate_path(self.path)
|
||||
self.worktree = self.worktrees.find(path)
|
||||
if self.worktree is not None:
|
||||
self.worktree.amalgamate_header()
|
||||
logging.info(f'{self.worktree.name}; serving header (build count {self.worktree.build_count})')
|
||||
return super().send_head()
|
||||
logging.info(f'invalid request path: {self.path}')
|
||||
super().send_error(HTTPStatus.NOT_FOUND, 'Not Found')
|
||||
return None
|
||||
|
||||
def send_header(self, keyword, value):
|
||||
# intercept Content-Length header; sent in copyfile later
|
||||
if keyword == 'Content-Length':
|
||||
return
|
||||
super().send_header(keyword, value)
|
||||
|
||||
def end_headers (self):
|
||||
# intercept; called in copyfile() or indirectly
|
||||
# by send_head via super().send_error()
|
||||
pass
|
||||
|
||||
def copyfile(self, source, outputfile):
|
||||
injected = False
|
||||
content = BytesIO()
|
||||
length = 0
|
||||
# inject build count and time into served header
|
||||
for line in source:
|
||||
line = line.decode('utf-8')
|
||||
if not injected and JSON_VERSION_RE.match(line):
|
||||
length += content.write(bytes('#define JSON_BUILD_COUNT '\
|
||||
f'{self.worktree.build_count}\n', 'utf-8'))
|
||||
length += content.write(bytes('#define JSON_BUILD_TIME '\
|
||||
f'"{self.worktree.build_time}"\n\n', 'utf-8'))
|
||||
injected = True
|
||||
length += content.write(bytes(line, 'utf-8'))
|
||||
|
||||
# set content length
|
||||
super().send_header('Content-Length', length)
|
||||
# CORS header
|
||||
self.send_header('Access-Control-Allow-Origin', '*')
|
||||
# prevent caching
|
||||
self.send_header('Cache-Control', 'no-cache, no-store, must-revalidate')
|
||||
self.send_header('Pragma', 'no-cache')
|
||||
self.send_header('Expires', '0')
|
||||
super().end_headers()
|
||||
|
||||
# send the header
|
||||
content.seek(0)
|
||||
shutil.copyfileobj(content, outputfile)
|
||||
|
||||
def log_message(self, format, *args):
|
||||
pass
|
||||
|
||||
class DualStackServer(ThreadingHTTPServer):
|
||||
def __init__(self, addr, worktrees):
|
||||
"""."""
|
||||
self.worktrees = worktrees
|
||||
super().__init__(addr, HeaderRequestHandler)
|
||||
|
||||
def server_bind(self):
|
||||
# suppress exception when protocol is IPv4
|
||||
with contextlib.suppress(Exception):
|
||||
self.socket.setsockopt(
|
||||
socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0)
|
||||
return super().server_bind()
|
||||
|
||||
if __name__ == '__main__':
|
||||
import argparse
|
||||
import ssl
|
||||
import socket
|
||||
import yaml
|
||||
|
||||
# exit code
|
||||
ec = 0
|
||||
|
||||
# setup logging
|
||||
logging.basicConfig(format='[%(asctime)s] %(levelname)s: %(message)s',
|
||||
datefmt=DATETIME_FORMAT, level=logging.INFO)
|
||||
log = logging.getLogger()
|
||||
log.addHandler(ExitHandler(logging.ERROR))
|
||||
|
||||
# parse command line arguments
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--make', default='make',
|
||||
help='the make command (default: make)')
|
||||
args = parser.parse_args()
|
||||
|
||||
# propagate the make command to use for amalgamating headers
|
||||
WorkTree.make_command = args.make
|
||||
|
||||
worktrees = None
|
||||
try:
|
||||
# change working directory to project root
|
||||
os.chdir(os.path.realpath(os.path.join(sys.path[0], '../../')))
|
||||
|
||||
if not is_project_root():
|
||||
log.error('working directory does not look like project root')
|
||||
|
||||
# load config
|
||||
config = {}
|
||||
config_file = os.path.abspath(CONFIG_FILE)
|
||||
try:
|
||||
with open(config_file, 'r') as f:
|
||||
config = yaml.safe_load(f)
|
||||
except FileNotFoundError:
|
||||
log.info(f'cannot find configuration file: {config_file}')
|
||||
log.info('using default configuration')
|
||||
|
||||
# find and monitor working trees
|
||||
worktrees = WorkTrees(config.get('root', '.'))
|
||||
|
||||
# start web server
|
||||
infos = socket.getaddrinfo(config.get('bind', None), config.get('port', 8443),
|
||||
type=socket.SOCK_STREAM, flags=socket.AI_PASSIVE)
|
||||
DualStackServer.address_family = infos[0][0]
|
||||
HeaderRequestHandler.protocol_version = 'HTTP/1.0'
|
||||
with DualStackServer(infos[0][4], worktrees) as httpd:
|
||||
scheme = 'HTTP'
|
||||
https = config.get('https', {})
|
||||
if https.get('enabled', True):
|
||||
cert_file = https.get('cert_file', 'localhost.pem')
|
||||
key_file = https.get('key_file', 'localhost-key.pem')
|
||||
ssl_ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
||||
ssl_ctx.minimum_version = ssl.TLSVersion.TLSv1_2
|
||||
ssl_ctx.maximum_version = ssl.TLSVersion.MAXIMUM_SUPPORTED
|
||||
ssl_ctx.load_cert_chain(cert_file, key_file)
|
||||
httpd.socket = ssl_ctx.wrap_socket(httpd.socket, server_side=True)
|
||||
scheme = 'HTTPS'
|
||||
host, port = httpd.socket.getsockname()[:2]
|
||||
log.info(f'serving {scheme} on {host} port {port}')
|
||||
log.info('press Ctrl+C to exit')
|
||||
httpd.serve_forever()
|
||||
|
||||
except KeyboardInterrupt:
|
||||
log.info('exiting')
|
||||
except Exception:
|
||||
ec = 1
|
||||
log.exception('an error occurred:')
|
||||
finally:
|
||||
if worktrees is not None:
|
||||
worktrees.stop()
|
||||
sys.exit(ec)
|
15
source/lib/json/tools/serve_header/serve_header.yml.example
Normal file
15
source/lib/json/tools/serve_header/serve_header.yml.example
Normal file
@ -0,0 +1,15 @@
|
||||
# all paths are relative to the project root
|
||||
|
||||
# the root directory for the web server
|
||||
# root: .
|
||||
|
||||
# configure SSL
|
||||
# https:
|
||||
# enabled: true
|
||||
# these filenames are listed in .gitignore
|
||||
# cert_file: localhost.pem
|
||||
# key_file: localhost-key.pem
|
||||
|
||||
# address and port for the server to listen on
|
||||
# bind: null
|
||||
# port: 8443
|
Reference in New Issue
Block a user