mirror of
https://github.com/open-source-parsers/jsoncpp.git
synced 2025-08-04 10:36:23 -04:00
Merge branch 'master' into fix_cmake_double_find
This commit is contained in:
commit
68035d34d5
@ -1,4 +1,4 @@
|
||||
BasedOnStyle: LLVM
|
||||
DerivePointerAlignment: false
|
||||
PointerAlignment: Left
|
||||
|
||||
SpacesBeforeTrailingComments: 1
|
||||
|
20
.github/workflows/clang-format.yml
vendored
Normal file
20
.github/workflows/clang-format.yml
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
name: clang-format check
|
||||
on: [check_run, pull_request, push]
|
||||
|
||||
jobs:
|
||||
formatting-check:
|
||||
name: formatting check
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
path:
|
||||
- 'src'
|
||||
- 'examples'
|
||||
- 'include'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: runs clang-format style check for C/C++/Protobuf programs.
|
||||
uses: jidicula/clang-format-action@v4.13.0
|
||||
with:
|
||||
clang-format-version: '18'
|
||||
check-path: ${{ matrix.path }}
|
18
.github/workflows/cmake.yml
vendored
Normal file
18
.github/workflows/cmake.yml
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
name: cmake
|
||||
on: [check_run, push, pull_request]
|
||||
jobs:
|
||||
cmake-publish:
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
|
||||
steps:
|
||||
- name: checkout project
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: build project
|
||||
uses: threeal/cmake-action@v2.0.0
|
||||
|
65
.github/workflows/meson.yml
vendored
Normal file
65
.github/workflows/meson.yml
vendored
Normal file
@ -0,0 +1,65 @@
|
||||
name: meson build and test
|
||||
run-name: update pushed to ${{ github.ref }}
|
||||
on: [check_run, push, pull_request]
|
||||
|
||||
jobs:
|
||||
meson-publish:
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
|
||||
steps:
|
||||
- name: checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: setup python
|
||||
uses: actions/setup-python@v5
|
||||
|
||||
- name: meson build
|
||||
uses: BSFishy/meson-build@v1.0.3
|
||||
with:
|
||||
meson-version: 1.5.1
|
||||
ninja-version: 1.11.1.1
|
||||
action: build
|
||||
|
||||
- name: meson test
|
||||
uses: BSFishy/meson-build@v1.0.3
|
||||
with:
|
||||
meson-version: 1.5.1
|
||||
ninja-version: 1.11.1.1
|
||||
action: test
|
||||
|
||||
meson-coverage:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: setup python
|
||||
uses: actions/setup-python@v5
|
||||
|
||||
- name: meson build
|
||||
uses: BSFishy/meson-build@v1.0.3
|
||||
with:
|
||||
meson-version: 1.5.1
|
||||
ninja-version: 1.11.1.1
|
||||
setup-options: -Db_coverage=true
|
||||
action: build
|
||||
|
||||
- name: meson test
|
||||
uses: BSFishy/meson-build@v1.0.3
|
||||
with:
|
||||
meson-version: 1.5.1
|
||||
ninja-version: 1.11.1.1
|
||||
setup-options: -Db_coverage=true
|
||||
action: test
|
||||
|
||||
- name: generate code coverage report
|
||||
uses: threeal/gcovr-action@v1.0.0
|
||||
with:
|
||||
coveralls-send: true
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
71
.travis.yml
71
.travis.yml
@ -1,71 +0,0 @@
|
||||
# Build matrix / environment variables are explained on:
|
||||
# http://about.travis-ci.com/docs/user/build-configuration/
|
||||
# This file can be validated on: http://www.yamllint.com/
|
||||
# Or using the Ruby based travel command line tool:
|
||||
# gem install travis --no-rdoc --no-ri
|
||||
# travis lint .travis.yml
|
||||
language: cpp
|
||||
sudo: false
|
||||
addons:
|
||||
homebrew:
|
||||
packages:
|
||||
- clang-format
|
||||
- meson
|
||||
- ninja
|
||||
update: false # do not update homebrew by default
|
||||
apt:
|
||||
sources:
|
||||
- ubuntu-toolchain-r-test
|
||||
- llvm-toolchain-xenial-8
|
||||
packages:
|
||||
- clang-format-8
|
||||
- clang-8
|
||||
- valgrind
|
||||
matrix:
|
||||
include:
|
||||
- name: Mac clang meson static release testing
|
||||
os: osx
|
||||
osx_image: xcode11
|
||||
compiler: clang
|
||||
env:
|
||||
CXX="clang++"
|
||||
CC="clang"
|
||||
LIB_TYPE=static
|
||||
BUILD_TYPE=release
|
||||
script: ./.travis_scripts/meson_builder.sh
|
||||
- name: Linux xenial clang meson static release testing
|
||||
os: linux
|
||||
dist: xenial
|
||||
compiler: clang
|
||||
env:
|
||||
CXX="clang++"
|
||||
CC="clang"
|
||||
LIB_TYPE=static
|
||||
BUILD_TYPE=release
|
||||
PYTHONUSERBASE="$(pwd)/LOCAL"
|
||||
PATH="$PYTHONUSERBASE/bin:$PATH"
|
||||
# before_install and install steps only needed for linux meson builds
|
||||
before_install:
|
||||
- source ./.travis_scripts/travis.before_install.${TRAVIS_OS_NAME}.sh
|
||||
install:
|
||||
- source ./.travis_scripts/travis.install.${TRAVIS_OS_NAME}.sh
|
||||
script: ./.travis_scripts/meson_builder.sh
|
||||
- name: Linux xenial gcc cmake coverage
|
||||
os: linux
|
||||
dist: xenial
|
||||
compiler: gcc
|
||||
env:
|
||||
CXX=g++
|
||||
CC=gcc
|
||||
DO_Coverage=ON
|
||||
BUILD_TOOL="Unix Makefiles"
|
||||
BUILD_TYPE=Debug
|
||||
LIB_TYPE=shared
|
||||
DESTDIR=/tmp/cmake_json_cpp
|
||||
before_install:
|
||||
- pip install --user cpp-coveralls
|
||||
script: ./.travis_scripts/cmake_builder.sh
|
||||
after_success:
|
||||
- coveralls --include src/lib_json --include include
|
||||
notifications:
|
||||
email: false
|
@ -1,130 +0,0 @@
|
||||
#!/usr/bin/env sh
|
||||
# This script can be used on the command line directly to configure several
|
||||
# different build environments.
|
||||
# This is called by `.travis.yml` via Travis CI.
|
||||
# Travis supplies $TRAVIS_OS_NAME.
|
||||
# http://docs.travis-ci.com/user/multi-os/
|
||||
# Our .travis.yml also defines:
|
||||
|
||||
# - BUILD_TYPE=Release/Debug
|
||||
# - LIB_TYPE=static/shared
|
||||
#
|
||||
# Optional environmental variables
|
||||
# - DESTDIR <- used for setting the install prefix
|
||||
# - BUILD_TOOL=["Unix Makefile"|"Ninja"]
|
||||
# - BUILDNAME <- how to identify this build on the dashboard
|
||||
# - DO_MemCheck <- if set, try to use valgrind
|
||||
# - DO_Coverage <- if set, try to do dashboard coverage testing
|
||||
#
|
||||
|
||||
env_set=1
|
||||
if ${BUILD_TYPE+false}; then
|
||||
echo "BUILD_TYPE not set in environment."
|
||||
env_set=0
|
||||
fi
|
||||
if ${LIB_TYPE+false}; then
|
||||
echo "LIB_TYPE not set in environment."
|
||||
env_set=0
|
||||
fi
|
||||
if ${CXX+false}; then
|
||||
echo "CXX not set in environment."
|
||||
env_set=0
|
||||
fi
|
||||
|
||||
|
||||
if [ ${env_set} -eq 0 ]; then
|
||||
echo "USAGE: CXX=$(which clang++) BUILD_TYPE=[Release|Debug] LIB_TYPE=[static|shared] $0"
|
||||
echo ""
|
||||
echo "Examples:"
|
||||
echo " CXX=$(which clang++) BUILD_TYPE=Release LIB_TYPE=shared DESTDIR=/tmp/cmake_json_cpp $0"
|
||||
echo " CXX=$(which clang++) BUILD_TYPE=Debug LIB_TYPE=shared DESTDIR=/tmp/cmake_json_cpp $0"
|
||||
echo " CXX=$(which clang++) BUILD_TYPE=Release LIB_TYPE=static DESTDIR=/tmp/cmake_json_cpp $0"
|
||||
echo " CXX=$(which clang++) BUILD_TYPE=Debug LIB_TYPE=static DESTDIR=/tmp/cmake_json_cpp $0"
|
||||
|
||||
echo " CXX=$(which g++) BUILD_TYPE=Release LIB_TYPE=shared DESTDIR=/tmp/cmake_json_cpp $0"
|
||||
echo " CXX=$(which g++) BUILD_TYPE=Debug LIB_TYPE=shared DESTDIR=/tmp/cmake_json_cpp $0"
|
||||
echo " CXX=$(which g++) BUILD_TYPE=Release LIB_TYPE=static DESTDIR=/tmp/cmake_json_cpp $0"
|
||||
echo " CXX=$(which g++) BUILD_TYPE=Debug LIB_TYPE=static DESTDIR=/tmp/cmake_json_cpp $0"
|
||||
|
||||
exit -1
|
||||
fi
|
||||
|
||||
if ${DESTDIR+false}; then
|
||||
DESTDIR="/usr/local"
|
||||
fi
|
||||
|
||||
# -e: fail on error
|
||||
# -v: show commands
|
||||
# -x: show expanded commands
|
||||
set -vex
|
||||
|
||||
env | sort
|
||||
|
||||
which cmake
|
||||
cmake --version
|
||||
|
||||
echo ${CXX}
|
||||
${CXX} --version
|
||||
_COMPILER_NAME=`basename ${CXX}`
|
||||
if [ "${LIB_TYPE}" = "shared" ]; then
|
||||
_CMAKE_BUILD_SHARED_LIBS=ON
|
||||
else
|
||||
_CMAKE_BUILD_SHARED_LIBS=OFF
|
||||
fi
|
||||
|
||||
CTEST_TESTING_OPTION="-D ExperimentalTest"
|
||||
# - DO_MemCheck <- if set, try to use valgrind
|
||||
if ! ${DO_MemCheck+false}; then
|
||||
valgrind --version
|
||||
CTEST_TESTING_OPTION="-D ExperimentalMemCheck"
|
||||
else
|
||||
# - DO_Coverage <- if set, try to do dashboard coverage testing
|
||||
if ! ${DO_Coverage+false}; then
|
||||
export CXXFLAGS="-fprofile-arcs -ftest-coverage"
|
||||
export LDFLAGS="-fprofile-arcs -ftest-coverage"
|
||||
CTEST_TESTING_OPTION="-D ExperimentalTest -D ExperimentalCoverage"
|
||||
#gcov --version
|
||||
fi
|
||||
fi
|
||||
|
||||
# Ninja = Generates build.ninja files.
|
||||
if ${BUILD_TOOL+false}; then
|
||||
BUILD_TOOL="Ninja"
|
||||
export _BUILD_EXE=ninja
|
||||
which ninja
|
||||
ninja --version
|
||||
else
|
||||
# Unix Makefiles = Generates standard UNIX makefiles.
|
||||
export _BUILD_EXE=make
|
||||
fi
|
||||
|
||||
_BUILD_DIR_NAME="build-cmake_${BUILD_TYPE}_${LIB_TYPE}_${_COMPILER_NAME}_${_BUILD_EXE}"
|
||||
mkdir -p ${_BUILD_DIR_NAME}
|
||||
cd "${_BUILD_DIR_NAME}"
|
||||
if ${BUILDNAME+false}; then
|
||||
_HOSTNAME=`hostname -s`
|
||||
BUILDNAME="${_HOSTNAME}_${BUILD_TYPE}_${LIB_TYPE}_${_COMPILER_NAME}_${_BUILD_EXE}"
|
||||
fi
|
||||
cmake \
|
||||
-G "${BUILD_TOOL}" \
|
||||
-DBUILDNAME:STRING="${BUILDNAME}" \
|
||||
-DCMAKE_CXX_COMPILER:PATH=${CXX} \
|
||||
-DCMAKE_BUILD_TYPE:STRING=${BUILD_TYPE} \
|
||||
-DBUILD_SHARED_LIBS:BOOL=${_CMAKE_BUILD_SHARED_LIBS} \
|
||||
-DCMAKE_INSTALL_PREFIX:PATH=${DESTDIR} \
|
||||
../
|
||||
|
||||
ctest -C ${BUILD_TYPE} -D ExperimentalStart -D ExperimentalConfigure -D ExperimentalBuild ${CTEST_TESTING_OPTION} -D ExperimentalSubmit
|
||||
# Final step is to verify that installation succeeds
|
||||
cmake --build . --config ${BUILD_TYPE} --target install
|
||||
|
||||
if [ "${DESTDIR}" != "/usr/local" ]; then
|
||||
${_BUILD_EXE} install
|
||||
fi
|
||||
cd -
|
||||
|
||||
if ${CLEANUP+false}; then
|
||||
echo "Skipping cleanup: build directory will persist."
|
||||
else
|
||||
rm -r "${_BUILD_DIR_NAME}"
|
||||
fi
|
@ -1,83 +0,0 @@
|
||||
#!/usr/bin/env sh
|
||||
# This script can be used on the command line directly to configure several
|
||||
# different build environments.
|
||||
# This is called by `.travis.yml` via Travis CI.
|
||||
# Travis supplies $TRAVIS_OS_NAME.
|
||||
# http://docs.travis-ci.com/user/multi-os/
|
||||
# Our .travis.yml also defines:
|
||||
|
||||
# - BUILD_TYPE=release/debug
|
||||
# - LIB_TYPE=static/shared
|
||||
|
||||
env_set=1
|
||||
if ${BUILD_TYPE+false}; then
|
||||
echo "BUILD_TYPE not set in environment."
|
||||
env_set=0
|
||||
fi
|
||||
if ${LIB_TYPE+false}; then
|
||||
echo "LIB_TYPE not set in environment."
|
||||
env_set=0
|
||||
fi
|
||||
if ${CXX+false}; then
|
||||
echo "CXX not set in environment."
|
||||
env_set=0
|
||||
fi
|
||||
|
||||
|
||||
if [ ${env_set} -eq 0 ]; then
|
||||
echo "USAGE: CXX=$(which clang++) BUILD_TYPE=[release|debug] LIB_TYPE=[static|shared] $0"
|
||||
echo ""
|
||||
echo "Examples:"
|
||||
echo " CXX=$(which clang++) BUILD_TYPE=release LIB_TYPE=shared DESTDIR=/tmp/meson_json_cpp $0"
|
||||
echo " CXX=$(which clang++) BUILD_TYPE=debug LIB_TYPE=shared DESTDIR=/tmp/meson_json_cpp $0"
|
||||
echo " CXX=$(which clang++) BUILD_TYPE=release LIB_TYPE=static DESTDIR=/tmp/meson_json_cpp $0"
|
||||
echo " CXX=$(which clang++) BUILD_TYPE=debug LIB_TYPE=static DESTDIR=/tmp/meson_json_cpp $0"
|
||||
|
||||
echo " CXX=$(which g++) BUILD_TYPE=release LIB_TYPE=shared DESTDIR=/tmp/meson_json_cpp $0"
|
||||
echo " CXX=$(which g++) BUILD_TYPE=debug LIB_TYPE=shared DESTDIR=/tmp/meson_json_cpp $0"
|
||||
echo " CXX=$(which g++) BUILD_TYPE=release LIB_TYPE=static DESTDIR=/tmp/meson_json_cpp $0"
|
||||
echo " CXX=$(which g++) BUILD_TYPE=debug LIB_TYPE=static DESTDIR=/tmp/meson_json_cpp $0"
|
||||
|
||||
exit -1
|
||||
fi
|
||||
|
||||
if ${DESTDIR+false}; then
|
||||
DESTDIR="/usr/local"
|
||||
fi
|
||||
|
||||
# -e: fail on error
|
||||
# -v: show commands
|
||||
# -x: show expanded commands
|
||||
set -vex
|
||||
|
||||
|
||||
env | sort
|
||||
|
||||
which python3
|
||||
which meson
|
||||
which ninja
|
||||
echo ${CXX}
|
||||
${CXX} --version
|
||||
python3 --version
|
||||
meson --version
|
||||
ninja --version
|
||||
_COMPILER_NAME=`basename ${CXX}`
|
||||
_BUILD_DIR_NAME="build-${BUILD_TYPE}_${LIB_TYPE}_${_COMPILER_NAME}"
|
||||
|
||||
#./.travis_scripts/run-clang-format.sh
|
||||
meson --fatal-meson-warnings --werror --buildtype ${BUILD_TYPE} --default-library ${LIB_TYPE} . "${_BUILD_DIR_NAME}"
|
||||
ninja -v -j 2 -C "${_BUILD_DIR_NAME}"
|
||||
|
||||
cd "${_BUILD_DIR_NAME}"
|
||||
meson test --no-rebuild --print-errorlogs
|
||||
|
||||
if [ "${DESTDIR}" != "/usr/local" ]; then
|
||||
ninja install
|
||||
fi
|
||||
cd -
|
||||
|
||||
if ${CLEANUP+false}; then
|
||||
echo "Skipping cleanup: build directory will persist."
|
||||
else
|
||||
rm -r "${_BUILD_DIR_NAME}"
|
||||
fi
|
@ -1,356 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
"""A wrapper script around clang-format, suitable for linting multiple files
|
||||
and to use for continuous integration.
|
||||
This is an alternative API for the clang-format command line.
|
||||
It runs over multiple files and directories in parallel.
|
||||
A diff output is produced and a sensible exit code is returned.
|
||||
|
||||
NOTE: pulled from https://github.com/Sarcasm/run-clang-format, which is
|
||||
licensed under the MIT license.
|
||||
"""
|
||||
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import argparse
|
||||
import codecs
|
||||
import difflib
|
||||
import fnmatch
|
||||
import io
|
||||
import multiprocessing
|
||||
import os
|
||||
import signal
|
||||
import subprocess
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from functools import partial
|
||||
|
||||
try:
|
||||
from subprocess import DEVNULL # py3k
|
||||
except ImportError:
|
||||
DEVNULL = open(os.devnull, "wb")
|
||||
|
||||
|
||||
DEFAULT_EXTENSIONS = 'c,h,C,H,cpp,hpp,cc,hh,c++,h++,cxx,hxx'
|
||||
|
||||
|
||||
class ExitStatus:
|
||||
SUCCESS = 0
|
||||
DIFF = 1
|
||||
TROUBLE = 2
|
||||
|
||||
|
||||
def list_files(files, recursive=False, extensions=None, exclude=None):
|
||||
if extensions is None:
|
||||
extensions = []
|
||||
if exclude is None:
|
||||
exclude = []
|
||||
|
||||
out = []
|
||||
for file in files:
|
||||
if recursive and os.path.isdir(file):
|
||||
for dirpath, dnames, fnames in os.walk(file):
|
||||
fpaths = [os.path.join(dirpath, fname) for fname in fnames]
|
||||
for pattern in exclude:
|
||||
# os.walk() supports trimming down the dnames list
|
||||
# by modifying it in-place,
|
||||
# to avoid unnecessary directory listings.
|
||||
dnames[:] = [
|
||||
x for x in dnames
|
||||
if
|
||||
not fnmatch.fnmatch(os.path.join(dirpath, x), pattern)
|
||||
]
|
||||
fpaths = [
|
||||
x for x in fpaths if not fnmatch.fnmatch(x, pattern)
|
||||
]
|
||||
for f in fpaths:
|
||||
ext = os.path.splitext(f)[1][1:]
|
||||
if ext in extensions:
|
||||
out.append(f)
|
||||
else:
|
||||
out.append(file)
|
||||
return out
|
||||
|
||||
|
||||
def make_diff(file, original, reformatted):
|
||||
return list(
|
||||
difflib.unified_diff(
|
||||
original,
|
||||
reformatted,
|
||||
fromfile='{}\t(original)'.format(file),
|
||||
tofile='{}\t(reformatted)'.format(file),
|
||||
n=3))
|
||||
|
||||
|
||||
class DiffError(Exception):
|
||||
def __init__(self, message, errs=None):
|
||||
super(DiffError, self).__init__(message)
|
||||
self.errs = errs or []
|
||||
|
||||
|
||||
class UnexpectedError(Exception):
|
||||
def __init__(self, message, exc=None):
|
||||
super(UnexpectedError, self).__init__(message)
|
||||
self.formatted_traceback = traceback.format_exc()
|
||||
self.exc = exc
|
||||
|
||||
|
||||
def run_clang_format_diff_wrapper(args, file):
|
||||
try:
|
||||
ret = run_clang_format_diff(args, file)
|
||||
return ret
|
||||
except DiffError:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise UnexpectedError('{}: {}: {}'.format(file, e.__class__.__name__,
|
||||
e), e)
|
||||
|
||||
|
||||
def run_clang_format_diff(args, file):
|
||||
try:
|
||||
with io.open(file, 'r', encoding='utf-8') as f:
|
||||
original = f.readlines()
|
||||
except IOError as exc:
|
||||
raise DiffError(str(exc))
|
||||
invocation = [args.clang_format_executable, file]
|
||||
|
||||
# Use of utf-8 to decode the process output.
|
||||
#
|
||||
# Hopefully, this is the correct thing to do.
|
||||
#
|
||||
# It's done due to the following assumptions (which may be incorrect):
|
||||
# - clang-format will returns the bytes read from the files as-is,
|
||||
# without conversion, and it is already assumed that the files use utf-8.
|
||||
# - if the diagnostics were internationalized, they would use utf-8:
|
||||
# > Adding Translations to Clang
|
||||
# >
|
||||
# > Not possible yet!
|
||||
# > Diagnostic strings should be written in UTF-8,
|
||||
# > the client can translate to the relevant code page if needed.
|
||||
# > Each translation completely replaces the format string
|
||||
# > for the diagnostic.
|
||||
# > -- http://clang.llvm.org/docs/InternalsManual.html#internals-diag-translation
|
||||
#
|
||||
# It's not pretty, due to Python 2 & 3 compatibility.
|
||||
encoding_py3 = {}
|
||||
if sys.version_info[0] >= 3:
|
||||
encoding_py3['encoding'] = 'utf-8'
|
||||
|
||||
try:
|
||||
proc = subprocess.Popen(
|
||||
invocation,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
universal_newlines=True,
|
||||
**encoding_py3)
|
||||
except OSError as exc:
|
||||
raise DiffError(
|
||||
"Command '{}' failed to start: {}".format(
|
||||
subprocess.list2cmdline(invocation), exc
|
||||
)
|
||||
)
|
||||
proc_stdout = proc.stdout
|
||||
proc_stderr = proc.stderr
|
||||
if sys.version_info[0] < 3:
|
||||
# make the pipes compatible with Python 3,
|
||||
# reading lines should output unicode
|
||||
encoding = 'utf-8'
|
||||
proc_stdout = codecs.getreader(encoding)(proc_stdout)
|
||||
proc_stderr = codecs.getreader(encoding)(proc_stderr)
|
||||
# hopefully the stderr pipe won't get full and block the process
|
||||
outs = list(proc_stdout.readlines())
|
||||
errs = list(proc_stderr.readlines())
|
||||
proc.wait()
|
||||
if proc.returncode:
|
||||
raise DiffError(
|
||||
"Command '{}' returned non-zero exit status {}".format(
|
||||
subprocess.list2cmdline(invocation), proc.returncode
|
||||
),
|
||||
errs,
|
||||
)
|
||||
return make_diff(file, original, outs), errs
|
||||
|
||||
|
||||
def bold_red(s):
|
||||
return '\x1b[1m\x1b[31m' + s + '\x1b[0m'
|
||||
|
||||
|
||||
def colorize(diff_lines):
|
||||
def bold(s):
|
||||
return '\x1b[1m' + s + '\x1b[0m'
|
||||
|
||||
def cyan(s):
|
||||
return '\x1b[36m' + s + '\x1b[0m'
|
||||
|
||||
def green(s):
|
||||
return '\x1b[32m' + s + '\x1b[0m'
|
||||
|
||||
def red(s):
|
||||
return '\x1b[31m' + s + '\x1b[0m'
|
||||
|
||||
for line in diff_lines:
|
||||
if line[:4] in ['--- ', '+++ ']:
|
||||
yield bold(line)
|
||||
elif line.startswith('@@ '):
|
||||
yield cyan(line)
|
||||
elif line.startswith('+'):
|
||||
yield green(line)
|
||||
elif line.startswith('-'):
|
||||
yield red(line)
|
||||
else:
|
||||
yield line
|
||||
|
||||
|
||||
def print_diff(diff_lines, use_color):
|
||||
if use_color:
|
||||
diff_lines = colorize(diff_lines)
|
||||
if sys.version_info[0] < 3:
|
||||
sys.stdout.writelines((l.encode('utf-8') for l in diff_lines))
|
||||
else:
|
||||
sys.stdout.writelines(diff_lines)
|
||||
|
||||
|
||||
def print_trouble(prog, message, use_colors):
|
||||
error_text = 'error:'
|
||||
if use_colors:
|
||||
error_text = bold_red(error_text)
|
||||
print("{}: {} {}".format(prog, error_text, message), file=sys.stderr)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument(
|
||||
'--clang-format-executable',
|
||||
metavar='EXECUTABLE',
|
||||
help='path to the clang-format executable',
|
||||
default='clang-format')
|
||||
parser.add_argument(
|
||||
'--extensions',
|
||||
help='comma separated list of file extensions (default: {})'.format(
|
||||
DEFAULT_EXTENSIONS),
|
||||
default=DEFAULT_EXTENSIONS)
|
||||
parser.add_argument(
|
||||
'-r',
|
||||
'--recursive',
|
||||
action='store_true',
|
||||
help='run recursively over directories')
|
||||
parser.add_argument('files', metavar='file', nargs='+')
|
||||
parser.add_argument(
|
||||
'-q',
|
||||
'--quiet',
|
||||
action='store_true')
|
||||
parser.add_argument(
|
||||
'-j',
|
||||
metavar='N',
|
||||
type=int,
|
||||
default=0,
|
||||
help='run N clang-format jobs in parallel'
|
||||
' (default number of cpus + 1)')
|
||||
parser.add_argument(
|
||||
'--color',
|
||||
default='auto',
|
||||
choices=['auto', 'always', 'never'],
|
||||
help='show colored diff (default: auto)')
|
||||
parser.add_argument(
|
||||
'-e',
|
||||
'--exclude',
|
||||
metavar='PATTERN',
|
||||
action='append',
|
||||
default=[],
|
||||
help='exclude paths matching the given glob-like pattern(s)'
|
||||
' from recursive search')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# use default signal handling, like diff return SIGINT value on ^C
|
||||
# https://bugs.python.org/issue14229#msg156446
|
||||
signal.signal(signal.SIGINT, signal.SIG_DFL)
|
||||
try:
|
||||
signal.SIGPIPE
|
||||
except AttributeError:
|
||||
# compatibility, SIGPIPE does not exist on Windows
|
||||
pass
|
||||
else:
|
||||
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
|
||||
|
||||
colored_stdout = False
|
||||
colored_stderr = False
|
||||
if args.color == 'always':
|
||||
colored_stdout = True
|
||||
colored_stderr = True
|
||||
elif args.color == 'auto':
|
||||
colored_stdout = sys.stdout.isatty()
|
||||
colored_stderr = sys.stderr.isatty()
|
||||
|
||||
version_invocation = [args.clang_format_executable, str("--version")]
|
||||
try:
|
||||
subprocess.check_call(version_invocation, stdout=DEVNULL)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print_trouble(parser.prog, str(e), use_colors=colored_stderr)
|
||||
return ExitStatus.TROUBLE
|
||||
except OSError as e:
|
||||
print_trouble(
|
||||
parser.prog,
|
||||
"Command '{}' failed to start: {}".format(
|
||||
subprocess.list2cmdline(version_invocation), e
|
||||
),
|
||||
use_colors=colored_stderr,
|
||||
)
|
||||
return ExitStatus.TROUBLE
|
||||
|
||||
retcode = ExitStatus.SUCCESS
|
||||
files = list_files(
|
||||
args.files,
|
||||
recursive=args.recursive,
|
||||
exclude=args.exclude,
|
||||
extensions=args.extensions.split(','))
|
||||
|
||||
if not files:
|
||||
return
|
||||
|
||||
njobs = args.j
|
||||
if njobs == 0:
|
||||
njobs = multiprocessing.cpu_count() + 1
|
||||
njobs = min(len(files), njobs)
|
||||
|
||||
if njobs == 1:
|
||||
# execute directly instead of in a pool,
|
||||
# less overhead, simpler stacktraces
|
||||
it = (run_clang_format_diff_wrapper(args, file) for file in files)
|
||||
pool = None
|
||||
else:
|
||||
pool = multiprocessing.Pool(njobs)
|
||||
it = pool.imap_unordered(
|
||||
partial(run_clang_format_diff_wrapper, args), files)
|
||||
while True:
|
||||
try:
|
||||
outs, errs = next(it)
|
||||
except StopIteration:
|
||||
break
|
||||
except DiffError as e:
|
||||
print_trouble(parser.prog, str(e), use_colors=colored_stderr)
|
||||
retcode = ExitStatus.TROUBLE
|
||||
sys.stderr.writelines(e.errs)
|
||||
except UnexpectedError as e:
|
||||
print_trouble(parser.prog, str(e), use_colors=colored_stderr)
|
||||
sys.stderr.write(e.formatted_traceback)
|
||||
retcode = ExitStatus.TROUBLE
|
||||
# stop at the first unexpected error,
|
||||
# something could be very wrong,
|
||||
# don't process all files unnecessarily
|
||||
if pool:
|
||||
pool.terminate()
|
||||
break
|
||||
else:
|
||||
sys.stderr.writelines(errs)
|
||||
if outs == []:
|
||||
continue
|
||||
if not args.quiet:
|
||||
print_diff(outs, use_color=colored_stdout)
|
||||
if retcode == ExitStatus.SUCCESS:
|
||||
retcode = ExitStatus.DIFF
|
||||
return retcode
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
@ -1,4 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
python $DIR/run-clang-format.py -r $DIR/../src/**/ $DIR/../include/**/
|
@ -1,8 +0,0 @@
|
||||
set -vex
|
||||
|
||||
# Preinstalled versions of python are dependent on which Ubuntu distribution
|
||||
# you are running. The below version needs to be updated whenever we roll
|
||||
# the Ubuntu version used in Travis.
|
||||
# https://docs.travis-ci.com/user/languages/python/
|
||||
|
||||
pyenv global 3.7.1
|
@ -1,5 +0,0 @@
|
||||
set -vex
|
||||
|
||||
pip3 install --user meson ninja
|
||||
which meson
|
||||
which ninja
|
@ -1 +0,0 @@
|
||||
# NOTHING TO DO HERE
|
@ -54,16 +54,6 @@ endif()
|
||||
|
||||
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_CURRENT_SOURCE_DIR}/cmake")
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# use ccache if found, has to be done before project()
|
||||
# ---------------------------------------------------------------------------
|
||||
find_program(CCACHE_EXECUTABLE "ccache" HINTS /usr/local/bin /opt/local/bin)
|
||||
if(CCACHE_EXECUTABLE)
|
||||
message(STATUS "use ccache")
|
||||
set(CMAKE_CXX_COMPILER_LAUNCHER "${CCACHE_EXECUTABLE}" CACHE PATH "ccache" FORCE)
|
||||
set(CMAKE_C_COMPILER_LAUNCHER "${CCACHE_EXECUTABLE}" CACHE PATH "ccache" FORCE)
|
||||
endif()
|
||||
|
||||
project(jsoncpp
|
||||
# Note: version must be updated in three places when doing a release. This
|
||||
# annoying process ensures that amalgamate, CMake, and meson all report the
|
||||
@ -72,11 +62,11 @@ project(jsoncpp
|
||||
# 2. ./include/json/version.h
|
||||
# 3. ./CMakeLists.txt
|
||||
# IMPORTANT: also update the PROJECT_SOVERSION!!
|
||||
VERSION 1.9.5 # <major>[.<minor>[.<patch>[.<tweak>]]]
|
||||
VERSION 1.9.6 # <major>[.<minor>[.<patch>[.<tweak>]]]
|
||||
LANGUAGES CXX)
|
||||
|
||||
message(STATUS "JsonCpp Version: ${PROJECT_VERSION_MAJOR}.${PROJECT_VERSION_MINOR}.${PROJECT_VERSION_PATCH}")
|
||||
set(PROJECT_SOVERSION 25)
|
||||
set(PROJECT_SOVERSION 26)
|
||||
|
||||
include(${CMAKE_CURRENT_SOURCE_DIR}/include/PreventInSourceBuilds.cmake)
|
||||
include(${CMAKE_CURRENT_SOURCE_DIR}/include/PreventInBuildInstalls.cmake)
|
||||
@ -103,7 +93,9 @@ if(CMAKE_CURRENT_SOURCE_DIR STREQUAL CMAKE_SOURCE_DIR)
|
||||
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/bin" CACHE PATH "Executable/dll output dir.")
|
||||
endif()
|
||||
|
||||
set(JSONCPP_USE_SECURE_MEMORY "0" CACHE STRING "-D...=1 to use memory-wiping allocator for STL")
|
||||
if(JSONCPP_USE_SECURE_MEMORY)
|
||||
add_definitions("-DJSONCPP_USE_SECURE_MEMORY=1")
|
||||
endif()
|
||||
|
||||
configure_file("${PROJECT_SOURCE_DIR}/version.in"
|
||||
"${PROJECT_BINARY_DIR}/version"
|
||||
|
17
SECURITY.md
Normal file
17
SECURITY.md
Normal file
@ -0,0 +1,17 @@
|
||||
# Security Policy
|
||||
|
||||
If you have discovered a security vulnerability in this project, please report it
|
||||
privately. **Do not disclose it as a public issue.** This gives us time to work with you
|
||||
to fix the issue before public exposure, reducing the chance that the exploit will be
|
||||
used before a patch is released.
|
||||
|
||||
Please submit the report by filling out
|
||||
[this form](https://github.com/open-source-parsers/jsoncpp/security/advisories/new).
|
||||
|
||||
Please provide the following information in your report:
|
||||
|
||||
- A description of the vulnerability and its impact
|
||||
- How to reproduce the issue
|
||||
|
||||
This project is maintained by volunteers on a reasonable-effort basis. As such,
|
||||
we ask that you give us 90 days to work on a fix before public exposure.
|
@ -63,7 +63,7 @@ def amalgamate_source(source_top_dir=None,
|
||||
"""
|
||||
print("Amalgamating header...")
|
||||
header = AmalgamationFile(source_top_dir)
|
||||
header.add_text("/// Json-cpp amalgamated header (http://jsoncpp.sourceforge.net/).")
|
||||
header.add_text("/// Json-cpp amalgamated header (https://github.com/open-source-parsers/jsoncpp/).")
|
||||
header.add_text('/// It is intended to be used with #include "%s"' % header_include_path)
|
||||
header.add_file("LICENSE", wrap_in_comment=True)
|
||||
header.add_text("#ifndef JSON_AMALGAMATED_H_INCLUDED")
|
||||
@ -90,7 +90,7 @@ def amalgamate_source(source_top_dir=None,
|
||||
forward_header_include_path = base + "-forwards" + ext
|
||||
print("Amalgamating forward header...")
|
||||
header = AmalgamationFile(source_top_dir)
|
||||
header.add_text("/// Json-cpp amalgamated forward header (http://jsoncpp.sourceforge.net/).")
|
||||
header.add_text("/// Json-cpp amalgamated forward header (https://github.com/open-source-parsers/jsoncpp/).")
|
||||
header.add_text('/// It is intended to be used with #include "%s"' % forward_header_include_path)
|
||||
header.add_text("/// This header provides forward declaration for all JsonCpp types.")
|
||||
header.add_file("LICENSE", wrap_in_comment=True)
|
||||
@ -112,7 +112,7 @@ def amalgamate_source(source_top_dir=None,
|
||||
|
||||
print("Amalgamating source...")
|
||||
source = AmalgamationFile(source_top_dir)
|
||||
source.add_text("/// Json-cpp amalgamated source (http://jsoncpp.sourceforge.net/).")
|
||||
source.add_text("/// Json-cpp amalgamated source (https://github.com/open-source-parsers/jsoncpp/).")
|
||||
source.add_text('/// It is intended to be used with #include "%s"' % header_include_path)
|
||||
source.add_file("LICENSE", wrap_in_comment=True)
|
||||
source.add_text("")
|
||||
|
@ -25,7 +25,7 @@ int main() {
|
||||
const std::unique_ptr<Json::CharReader> reader(builder.newCharReader());
|
||||
if (!reader->parse(rawJson.c_str(), rawJson.c_str() + rawJsonLength, &root,
|
||||
&err)) {
|
||||
std::cout << "error" << std::endl;
|
||||
std::cout << "error: " << err << std::endl;
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
}
|
||||
|
@ -2,8 +2,8 @@
|
||||
# This function will prevent in-source builds
|
||||
function(AssureOutOfSourceBuilds)
|
||||
# make sure the user doesn't play dirty with symlinks
|
||||
get_filename_component(srcdir "${CMAKE_SOURCE_DIR}" REALPATH)
|
||||
get_filename_component(bindir "${CMAKE_BINARY_DIR}" REALPATH)
|
||||
get_filename_component(srcdir "${CMAKE_CURRENT_SOURCE_DIR}" REALPATH)
|
||||
get_filename_component(bindir "${CMAKE_CURRENT_BINARY_DIR}" REALPATH)
|
||||
|
||||
# disallow in-source builds
|
||||
if("${srcdir}" STREQUAL "${bindir}")
|
||||
|
@ -69,7 +69,9 @@ public:
|
||||
// Boilerplate
|
||||
SecureAllocator() {}
|
||||
template <typename U> SecureAllocator(const SecureAllocator<U>&) {}
|
||||
template <typename U> struct rebind { using other = SecureAllocator<U>; };
|
||||
template <typename U> struct rebind {
|
||||
using other = SecureAllocator<U>;
|
||||
};
|
||||
};
|
||||
|
||||
template <typename T, typename U>
|
||||
|
@ -190,6 +190,7 @@ private:
|
||||
using Errors = std::deque<ErrorInfo>;
|
||||
|
||||
bool readToken(Token& token);
|
||||
bool readTokenSkippingComments(Token& token);
|
||||
void skipSpaces();
|
||||
bool match(const Char* pattern, int patternLength);
|
||||
bool readComment();
|
||||
@ -221,7 +222,6 @@ private:
|
||||
int& column) const;
|
||||
String getLocationLineAndColumn(Location location) const;
|
||||
void addComment(Location begin, Location end, CommentPlacement placement);
|
||||
void skipCommentTokens(Token& token);
|
||||
|
||||
static bool containsNewLine(Location begin, Location end);
|
||||
static String normalizeEOL(Location begin, Location end);
|
||||
@ -244,6 +244,12 @@ private:
|
||||
*/
|
||||
class JSON_API CharReader {
|
||||
public:
|
||||
struct JSON_API StructuredError {
|
||||
ptrdiff_t offset_start;
|
||||
ptrdiff_t offset_limit;
|
||||
String message;
|
||||
};
|
||||
|
||||
virtual ~CharReader() = default;
|
||||
/** \brief Read a Value from a <a HREF="http://www.json.org">JSON</a>
|
||||
* document. The document must be a UTF-8 encoded string containing the
|
||||
@ -262,7 +268,12 @@ public:
|
||||
* error occurred.
|
||||
*/
|
||||
virtual bool parse(char const* beginDoc, char const* endDoc, Value* root,
|
||||
String* errs) = 0;
|
||||
String* errs);
|
||||
|
||||
/** \brief Returns a vector of structured errors encountered while parsing.
|
||||
* Each parse call resets the stored list of errors.
|
||||
*/
|
||||
std::vector<StructuredError> getStructuredErrors() const;
|
||||
|
||||
class JSON_API Factory {
|
||||
public:
|
||||
@ -272,6 +283,20 @@ public:
|
||||
*/
|
||||
virtual CharReader* newCharReader() const = 0;
|
||||
}; // Factory
|
||||
|
||||
protected:
|
||||
class Impl {
|
||||
public:
|
||||
virtual ~Impl() = default;
|
||||
virtual bool parse(char const* beginDoc, char const* endDoc, Value* root,
|
||||
String* errs) = 0;
|
||||
virtual std::vector<StructuredError> getStructuredErrors() const = 0;
|
||||
};
|
||||
|
||||
explicit CharReader(std::unique_ptr<Impl> impl) : _impl(std::move(impl)) {}
|
||||
|
||||
private:
|
||||
std::unique_ptr<Impl> _impl;
|
||||
}; // CharReader
|
||||
|
||||
/** \brief Build a CharReader implementation.
|
||||
@ -360,6 +385,12 @@ public:
|
||||
* \snippet src/lib_json/json_reader.cpp CharReaderBuilderStrictMode
|
||||
*/
|
||||
static void strictMode(Json::Value* settings);
|
||||
/** ECMA-404 mode.
|
||||
* \pre 'settings' != NULL (but Json::null is fine)
|
||||
* \remark Defaults:
|
||||
* \snippet src/lib_json/json_reader.cpp CharReaderBuilderECMA404Mode
|
||||
*/
|
||||
static void ecma404Mode(Json::Value* settings);
|
||||
};
|
||||
|
||||
/** Consume entire stream and use its begin/end.
|
||||
|
@ -3,8 +3,8 @@
|
||||
// recognized in your jurisdiction.
|
||||
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
#ifndef JSON_H_INCLUDED
|
||||
#define JSON_H_INCLUDED
|
||||
#ifndef JSON_VALUE_H_INCLUDED
|
||||
#define JSON_VALUE_H_INCLUDED
|
||||
|
||||
#if !defined(JSON_IS_AMALGAMATION)
|
||||
#include "forwards.h"
|
||||
@ -586,19 +586,23 @@ public:
|
||||
iterator end();
|
||||
|
||||
/// \brief Returns a reference to the first element in the `Value`.
|
||||
/// Requires that this value holds an array or json object, with at least one element.
|
||||
/// Requires that this value holds an array or json object, with at least one
|
||||
/// element.
|
||||
const Value& front() const;
|
||||
|
||||
/// \brief Returns a reference to the first element in the `Value`.
|
||||
/// Requires that this value holds an array or json object, with at least one element.
|
||||
/// Requires that this value holds an array or json object, with at least one
|
||||
/// element.
|
||||
Value& front();
|
||||
|
||||
/// \brief Returns a reference to the last element in the `Value`.
|
||||
/// Requires that value holds an array or json object, with at least one element.
|
||||
/// Requires that value holds an array or json object, with at least one
|
||||
/// element.
|
||||
const Value& back() const;
|
||||
|
||||
/// \brief Returns a reference to the last element in the `Value`.
|
||||
/// Requires that this value holds an array or json object, with at least one element.
|
||||
/// Requires that this value holds an array or json object, with at least one
|
||||
/// element.
|
||||
Value& back();
|
||||
|
||||
// Accessors for the [start, limit) range of bytes within the JSON text from
|
||||
|
@ -9,19 +9,18 @@
|
||||
// 3. /CMakeLists.txt
|
||||
// IMPORTANT: also update the SOVERSION!!
|
||||
|
||||
#define JSONCPP_VERSION_STRING "1.9.5"
|
||||
#define JSONCPP_VERSION_STRING "1.9.6"
|
||||
#define JSONCPP_VERSION_MAJOR 1
|
||||
#define JSONCPP_VERSION_MINOR 9
|
||||
#define JSONCPP_VERSION_PATCH 5
|
||||
#define JSONCPP_VERSION_PATCH 6
|
||||
#define JSONCPP_VERSION_QUALIFIER
|
||||
#define JSONCPP_VERSION_HEXA \
|
||||
((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | \
|
||||
(JSONCPP_VERSION_PATCH << 8))
|
||||
|
||||
#ifdef JSONCPP_USING_SECURE_MEMORY
|
||||
#undef JSONCPP_USING_SECURE_MEMORY
|
||||
#endif
|
||||
#if !defined(JSONCPP_USE_SECURE_MEMORY)
|
||||
#define JSONCPP_USING_SECURE_MEMORY 0
|
||||
#endif
|
||||
// If non-zero, the library zeroes any memory that it has allocated before
|
||||
// it frees its memory.
|
||||
|
||||
|
@ -168,8 +168,7 @@ public:
|
||||
#pragma warning(push)
|
||||
#pragma warning(disable : 4996) // Deriving from deprecated class
|
||||
#endif
|
||||
class JSON_API FastWriter
|
||||
: public Writer {
|
||||
class JSON_API FastWriter : public Writer {
|
||||
public:
|
||||
FastWriter();
|
||||
~FastWriter() override = default;
|
||||
@ -228,8 +227,7 @@ private:
|
||||
#pragma warning(push)
|
||||
#pragma warning(disable : 4996) // Deriving from deprecated class
|
||||
#endif
|
||||
class JSON_API
|
||||
StyledWriter : public Writer {
|
||||
class JSON_API StyledWriter : public Writer {
|
||||
public:
|
||||
StyledWriter();
|
||||
~StyledWriter() override = default;
|
||||
@ -297,8 +295,7 @@ private:
|
||||
#pragma warning(push)
|
||||
#pragma warning(disable : 4996) // Deriving from deprecated class
|
||||
#endif
|
||||
class JSON_API
|
||||
StyledStreamWriter {
|
||||
class JSON_API StyledStreamWriter {
|
||||
public:
|
||||
/**
|
||||
* \param indentation Each level will be indented by this amount extra.
|
||||
@ -354,6 +351,7 @@ String JSON_API valueToString(
|
||||
PrecisionType precisionType = PrecisionType::significantDigits);
|
||||
String JSON_API valueToString(bool value);
|
||||
String JSON_API valueToQuotedString(const char* value);
|
||||
String JSON_API valueToQuotedString(const char* value, size_t length);
|
||||
|
||||
/// \brief Output using the StyledStreamWriter.
|
||||
/// \see Json::operator>>()
|
||||
|
@ -1,5 +1,5 @@
|
||||
cmake_policy(PUSH)
|
||||
cmake_policy(VERSION 3.0)
|
||||
cmake_policy(VERSION 3.0...3.26)
|
||||
|
||||
@PACKAGE_INIT@
|
||||
|
||||
|
8
jsoncppConfig.cmake.meson.in
Normal file
8
jsoncppConfig.cmake.meson.in
Normal file
@ -0,0 +1,8 @@
|
||||
@PACKAGE_INIT@
|
||||
|
||||
@MESON_SHARED_TARGET@
|
||||
@MESON_STATIC_TARGET@
|
||||
|
||||
include ( "${CMAKE_CURRENT_LIST_DIR}/jsoncpp-namespaced-targets.cmake" )
|
||||
|
||||
check_required_components(JsonCpp)
|
45
meson.build
45
meson.build
@ -9,13 +9,13 @@ project(
|
||||
# 2. /include/json/version.h
|
||||
# 3. /CMakeLists.txt
|
||||
# IMPORTANT: also update the SOVERSION!!
|
||||
version : '1.9.4',
|
||||
version : '1.9.6',
|
||||
default_options : [
|
||||
'buildtype=release',
|
||||
'cpp_std=c++11',
|
||||
'warning_level=1'],
|
||||
license : 'Public Domain',
|
||||
meson_version : '>= 0.49.0')
|
||||
meson_version : '>= 0.54.0')
|
||||
|
||||
|
||||
jsoncpp_headers = files([
|
||||
@ -50,7 +50,7 @@ jsoncpp_lib = library(
|
||||
'src/lib_json/json_value.cpp',
|
||||
'src/lib_json/json_writer.cpp',
|
||||
]),
|
||||
soversion : 25,
|
||||
soversion : 26,
|
||||
install : true,
|
||||
include_directories : jsoncpp_include_directories,
|
||||
cpp_args: dll_export_flag)
|
||||
@ -62,6 +62,43 @@ import('pkgconfig').generate(
|
||||
filebase : 'jsoncpp',
|
||||
description : 'A C++ library for interacting with JSON')
|
||||
|
||||
cmakeconf = configuration_data()
|
||||
cmakeconf.set('MESON_LIB_DIR', get_option('libdir'))
|
||||
cmakeconf.set('MESON_INCLUDE_DIR', get_option('includedir'))
|
||||
|
||||
fs = import('fs')
|
||||
if get_option('default_library') == 'shared'
|
||||
shared_name = fs.name(jsoncpp_lib.full_path())
|
||||
endif
|
||||
if get_option('default_library') == 'static'
|
||||
static_name = fs.name(jsoncpp_lib.full_path())
|
||||
endif
|
||||
if get_option('default_library') == 'both'
|
||||
shared_name = fs.name(jsoncpp_lib.get_shared_lib().full_path())
|
||||
static_name = fs.name(jsoncpp_lib.get_static_lib().full_path())
|
||||
endif
|
||||
|
||||
if get_option('default_library') == 'shared' or get_option('default_library') == 'both'
|
||||
cmakeconf.set('MESON_SHARED_TARGET', '''
|
||||
add_library(jsoncpp_lib IMPORTED SHARED)
|
||||
set_target_properties(jsoncpp_lib PROPERTIES
|
||||
IMPORTED_LOCATION "''' + join_paths('${PACKAGE_PREFIX_DIR}', get_option('libdir'), shared_name) + '''"
|
||||
INTERFACE_INCLUDE_DIRECTORIES "''' + join_paths('${PACKAGE_PREFIX_DIR}', get_option('includedir')) + '")')
|
||||
endif
|
||||
if get_option('default_library') == 'static' or get_option('default_library') == 'both'
|
||||
cmakeconf.set('MESON_STATIC_TARGET', '''
|
||||
add_library(jsoncpp_static IMPORTED STATIC)
|
||||
set_target_properties(jsoncpp_static PROPERTIES
|
||||
IMPORTED_LOCATION "''' + join_paths('${PACKAGE_PREFIX_DIR}', get_option('libdir'), static_name) + '''"
|
||||
INTERFACE_INCLUDE_DIRECTORIES "''' + join_paths('${PACKAGE_PREFIX_DIR}', get_option('includedir')) + '")')
|
||||
endif
|
||||
|
||||
import('cmake').configure_package_config_file(
|
||||
name: 'jsoncpp',
|
||||
input: 'jsoncppConfig.cmake.meson.in',
|
||||
configuration: cmakeconf)
|
||||
install_data('jsoncpp-namespaced-targets.cmake', install_dir : join_paths(get_option('libdir'), 'cmake', jsoncpp_lib.name()))
|
||||
|
||||
# for libraries bundling jsoncpp
|
||||
jsoncpp_dep = declare_dependency(
|
||||
include_directories : jsoncpp_include_directories,
|
||||
@ -73,7 +110,7 @@ if meson.is_subproject() or not get_option('tests')
|
||||
subdir_done()
|
||||
endif
|
||||
|
||||
python = import('python').find_installation()
|
||||
python = find_program('python3')
|
||||
|
||||
jsoncpp_test = executable(
|
||||
'jsoncpp_test', files([
|
||||
|
@ -240,11 +240,14 @@ static int parseCommandLine(int argc, const char* argv[], Options* opts) {
|
||||
return printUsage(argv);
|
||||
}
|
||||
int index = 1;
|
||||
if (Json::String(argv[index]) == "--json-checker") {
|
||||
opts->features = Json::Features::strictMode();
|
||||
if (Json::String(argv[index]) == "--parse-only") {
|
||||
opts->parseOnly = true;
|
||||
++index;
|
||||
}
|
||||
if (Json::String(argv[index]) == "--strict") {
|
||||
opts->features = Json::Features::strictMode();
|
||||
++index;
|
||||
}
|
||||
if (Json::String(argv[index]) == "--json-config") {
|
||||
printConfig();
|
||||
return 3;
|
||||
|
@ -132,7 +132,6 @@ if(BUILD_SHARED_LIBS)
|
||||
target_include_directories(${SHARED_LIB} PUBLIC
|
||||
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>
|
||||
$<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}/${JSONCPP_INCLUDE_DIR}>
|
||||
$<BUILD_INTERFACE:${PROJECT_BINARY_DIR}/include/json>
|
||||
)
|
||||
|
||||
list(APPEND CMAKE_TARGETS ${SHARED_LIB})
|
||||
@ -144,7 +143,7 @@ if(BUILD_STATIC_LIBS)
|
||||
|
||||
# avoid name clashes on windows as the shared import lib is also named jsoncpp.lib
|
||||
if(NOT DEFINED STATIC_SUFFIX AND BUILD_SHARED_LIBS)
|
||||
if (MSVC)
|
||||
if (WIN32)
|
||||
set(STATIC_SUFFIX "_static")
|
||||
else()
|
||||
set(STATIC_SUFFIX "")
|
||||
@ -166,7 +165,6 @@ if(BUILD_STATIC_LIBS)
|
||||
target_include_directories(${STATIC_LIB} PUBLIC
|
||||
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>
|
||||
$<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}/${JSONCPP_INCLUDE_DIR}>
|
||||
$<BUILD_INTERFACE:${PROJECT_BINARY_DIR}/include/json>
|
||||
)
|
||||
|
||||
list(APPEND CMAKE_TARGETS ${STATIC_LIB})
|
||||
@ -193,7 +191,6 @@ if(BUILD_OBJECT_LIBS)
|
||||
target_include_directories(${OBJECT_LIB} PUBLIC
|
||||
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>
|
||||
$<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}/${JSONCPP_INCLUDE_DIR}>
|
||||
$<BUILD_INTERFACE:${PROJECT_BINARY_DIR}/include/json>
|
||||
)
|
||||
|
||||
list(APPEND CMAKE_TARGETS ${OBJECT_LIB})
|
||||
|
@ -129,7 +129,7 @@ bool Reader::parse(const char* beginDoc, const char* endDoc, Value& root,
|
||||
|
||||
bool successful = readValue();
|
||||
Token token;
|
||||
skipCommentTokens(token);
|
||||
readTokenSkippingComments(token);
|
||||
if (collectComments_ && !commentsBefore_.empty())
|
||||
root.setComment(commentsBefore_, commentAfter);
|
||||
if (features_.strictRoot_) {
|
||||
@ -157,7 +157,7 @@ bool Reader::readValue() {
|
||||
throwRuntimeError("Exceeded stackLimit in readValue().");
|
||||
|
||||
Token token;
|
||||
skipCommentTokens(token);
|
||||
readTokenSkippingComments(token);
|
||||
bool successful = true;
|
||||
|
||||
if (collectComments_ && !commentsBefore_.empty()) {
|
||||
@ -225,14 +225,14 @@ bool Reader::readValue() {
|
||||
return successful;
|
||||
}
|
||||
|
||||
void Reader::skipCommentTokens(Token& token) {
|
||||
bool Reader::readTokenSkippingComments(Token& token) {
|
||||
bool success = readToken(token);
|
||||
if (features_.allowComments_) {
|
||||
do {
|
||||
readToken(token);
|
||||
} while (token.type_ == tokenComment);
|
||||
} else {
|
||||
readToken(token);
|
||||
while (success && token.type_ == tokenComment) {
|
||||
success = readToken(token);
|
||||
}
|
||||
}
|
||||
return success;
|
||||
}
|
||||
|
||||
bool Reader::readToken(Token& token) {
|
||||
@ -446,12 +446,7 @@ bool Reader::readObject(Token& token) {
|
||||
Value init(objectValue);
|
||||
currentValue().swapPayload(init);
|
||||
currentValue().setOffsetStart(token.start_ - begin_);
|
||||
while (readToken(tokenName)) {
|
||||
bool initialTokenOk = true;
|
||||
while (tokenName.type_ == tokenComment && initialTokenOk)
|
||||
initialTokenOk = readToken(tokenName);
|
||||
if (!initialTokenOk)
|
||||
break;
|
||||
while (readTokenSkippingComments(tokenName)) {
|
||||
if (tokenName.type_ == tokenObjectEnd && name.empty()) // empty object
|
||||
return true;
|
||||
name.clear();
|
||||
@ -480,15 +475,11 @@ bool Reader::readObject(Token& token) {
|
||||
return recoverFromError(tokenObjectEnd);
|
||||
|
||||
Token comma;
|
||||
if (!readToken(comma) ||
|
||||
(comma.type_ != tokenObjectEnd && comma.type_ != tokenArraySeparator &&
|
||||
comma.type_ != tokenComment)) {
|
||||
if (!readTokenSkippingComments(comma) ||
|
||||
(comma.type_ != tokenObjectEnd && comma.type_ != tokenArraySeparator)) {
|
||||
return addErrorAndRecover("Missing ',' or '}' in object declaration",
|
||||
comma, tokenObjectEnd);
|
||||
}
|
||||
bool finalizeTokenOk = true;
|
||||
while (comma.type_ == tokenComment && finalizeTokenOk)
|
||||
finalizeTokenOk = readToken(comma);
|
||||
if (comma.type_ == tokenObjectEnd)
|
||||
return true;
|
||||
}
|
||||
@ -518,10 +509,7 @@ bool Reader::readArray(Token& token) {
|
||||
|
||||
Token currentToken;
|
||||
// Accept Comment after last item in the array.
|
||||
ok = readToken(currentToken);
|
||||
while (currentToken.type_ == tokenComment && ok) {
|
||||
ok = readToken(currentToken);
|
||||
}
|
||||
ok = readTokenSkippingComments(currentToken);
|
||||
bool badTokenType = (currentToken.type_ != tokenArraySeparator &&
|
||||
currentToken.type_ != tokenArrayEnd);
|
||||
if (!ok || badTokenType) {
|
||||
@ -599,8 +587,7 @@ bool Reader::decodeDouble(Token& token) {
|
||||
|
||||
bool Reader::decodeDouble(Token& token, Value& decoded) {
|
||||
double value = 0;
|
||||
String buffer(token.start_, token.end_);
|
||||
IStringStream is(buffer);
|
||||
IStringStream is(String(token.start_, token.end_));
|
||||
if (!(is >> value)) {
|
||||
if (value == std::numeric_limits<double>::max())
|
||||
value = std::numeric_limits<double>::infinity();
|
||||
@ -773,7 +760,7 @@ void Reader::getLocationLineAndColumn(Location location, int& line,
|
||||
while (current < location && current != end_) {
|
||||
Char c = *current++;
|
||||
if (c == '\r') {
|
||||
if (*current == '\n')
|
||||
if (current != end_ && *current == '\n')
|
||||
++current;
|
||||
lastLineStart = current;
|
||||
++line;
|
||||
@ -890,17 +877,12 @@ class OurReader {
|
||||
public:
|
||||
using Char = char;
|
||||
using Location = const Char*;
|
||||
struct StructuredError {
|
||||
ptrdiff_t offset_start;
|
||||
ptrdiff_t offset_limit;
|
||||
String message;
|
||||
};
|
||||
|
||||
explicit OurReader(OurFeatures const& features);
|
||||
bool parse(const char* beginDoc, const char* endDoc, Value& root,
|
||||
bool collectComments = true);
|
||||
String getFormattedErrorMessages() const;
|
||||
std::vector<StructuredError> getStructuredErrors() const;
|
||||
std::vector<CharReader::StructuredError> getStructuredErrors() const;
|
||||
|
||||
private:
|
||||
OurReader(OurReader const&); // no impl
|
||||
@ -943,6 +925,7 @@ private:
|
||||
using Errors = std::deque<ErrorInfo>;
|
||||
|
||||
bool readToken(Token& token);
|
||||
bool readTokenSkippingComments(Token& token);
|
||||
void skipSpaces();
|
||||
void skipBom(bool skipBom);
|
||||
bool match(const Char* pattern, int patternLength);
|
||||
@ -976,7 +959,6 @@ private:
|
||||
int& column) const;
|
||||
String getLocationLineAndColumn(Location location) const;
|
||||
void addComment(Location begin, Location end, CommentPlacement placement);
|
||||
void skipCommentTokens(Token& token);
|
||||
|
||||
static String normalizeEOL(Location begin, Location end);
|
||||
static bool containsNewLine(Location begin, Location end);
|
||||
@ -1030,7 +1012,7 @@ bool OurReader::parse(const char* beginDoc, const char* endDoc, Value& root,
|
||||
bool successful = readValue();
|
||||
nodes_.pop();
|
||||
Token token;
|
||||
skipCommentTokens(token);
|
||||
readTokenSkippingComments(token);
|
||||
if (features_.failIfExtra_ && (token.type_ != tokenEndOfStream)) {
|
||||
addError("Extra non-whitespace after JSON value.", token);
|
||||
return false;
|
||||
@ -1058,7 +1040,7 @@ bool OurReader::readValue() {
|
||||
if (nodes_.size() > features_.stackLimit_)
|
||||
throwRuntimeError("Exceeded stackLimit in readValue().");
|
||||
Token token;
|
||||
skipCommentTokens(token);
|
||||
readTokenSkippingComments(token);
|
||||
bool successful = true;
|
||||
|
||||
if (collectComments_ && !commentsBefore_.empty()) {
|
||||
@ -1145,14 +1127,14 @@ bool OurReader::readValue() {
|
||||
return successful;
|
||||
}
|
||||
|
||||
void OurReader::skipCommentTokens(Token& token) {
|
||||
bool OurReader::readTokenSkippingComments(Token& token) {
|
||||
bool success = readToken(token);
|
||||
if (features_.allowComments_) {
|
||||
do {
|
||||
readToken(token);
|
||||
} while (token.type_ == tokenComment);
|
||||
} else {
|
||||
readToken(token);
|
||||
while (success && token.type_ == tokenComment) {
|
||||
success = readToken(token);
|
||||
}
|
||||
}
|
||||
return success;
|
||||
}
|
||||
|
||||
bool OurReader::readToken(Token& token) {
|
||||
@ -1449,12 +1431,7 @@ bool OurReader::readObject(Token& token) {
|
||||
Value init(objectValue);
|
||||
currentValue().swapPayload(init);
|
||||
currentValue().setOffsetStart(token.start_ - begin_);
|
||||
while (readToken(tokenName)) {
|
||||
bool initialTokenOk = true;
|
||||
while (tokenName.type_ == tokenComment && initialTokenOk)
|
||||
initialTokenOk = readToken(tokenName);
|
||||
if (!initialTokenOk)
|
||||
break;
|
||||
while (readTokenSkippingComments(tokenName)) {
|
||||
if (tokenName.type_ == tokenObjectEnd &&
|
||||
(name.empty() ||
|
||||
features_.allowTrailingCommas_)) // empty object or trailing comma
|
||||
@ -1491,15 +1468,11 @@ bool OurReader::readObject(Token& token) {
|
||||
return recoverFromError(tokenObjectEnd);
|
||||
|
||||
Token comma;
|
||||
if (!readToken(comma) ||
|
||||
(comma.type_ != tokenObjectEnd && comma.type_ != tokenArraySeparator &&
|
||||
comma.type_ != tokenComment)) {
|
||||
if (!readTokenSkippingComments(comma) ||
|
||||
(comma.type_ != tokenObjectEnd && comma.type_ != tokenArraySeparator)) {
|
||||
return addErrorAndRecover("Missing ',' or '}' in object declaration",
|
||||
comma, tokenObjectEnd);
|
||||
}
|
||||
bool finalizeTokenOk = true;
|
||||
while (comma.type_ == tokenComment && finalizeTokenOk)
|
||||
finalizeTokenOk = readToken(comma);
|
||||
if (comma.type_ == tokenObjectEnd)
|
||||
return true;
|
||||
}
|
||||
@ -1533,10 +1506,7 @@ bool OurReader::readArray(Token& token) {
|
||||
|
||||
Token currentToken;
|
||||
// Accept Comment after last item in the array.
|
||||
ok = readToken(currentToken);
|
||||
while (currentToken.type_ == tokenComment && ok) {
|
||||
ok = readToken(currentToken);
|
||||
}
|
||||
ok = readTokenSkippingComments(currentToken);
|
||||
bool badTokenType = (currentToken.type_ != tokenArraySeparator &&
|
||||
currentToken.type_ != tokenArrayEnd);
|
||||
if (!ok || badTokenType) {
|
||||
@ -1651,8 +1621,7 @@ bool OurReader::decodeDouble(Token& token) {
|
||||
|
||||
bool OurReader::decodeDouble(Token& token, Value& decoded) {
|
||||
double value = 0;
|
||||
const String buffer(token.start_, token.end_);
|
||||
IStringStream is(buffer);
|
||||
IStringStream is(String(token.start_, token.end_));
|
||||
if (!(is >> value)) {
|
||||
if (value == std::numeric_limits<double>::max())
|
||||
value = std::numeric_limits<double>::infinity();
|
||||
@ -1825,7 +1794,7 @@ void OurReader::getLocationLineAndColumn(Location location, int& line,
|
||||
while (current < location && current != end_) {
|
||||
Char c = *current++;
|
||||
if (c == '\r') {
|
||||
if (*current == '\n')
|
||||
if (current != end_ && *current == '\n')
|
||||
++current;
|
||||
lastLineStart = current;
|
||||
++line;
|
||||
@ -1860,10 +1829,11 @@ String OurReader::getFormattedErrorMessages() const {
|
||||
return formattedMessage;
|
||||
}
|
||||
|
||||
std::vector<OurReader::StructuredError> OurReader::getStructuredErrors() const {
|
||||
std::vector<OurReader::StructuredError> allErrors;
|
||||
std::vector<CharReader::StructuredError>
|
||||
OurReader::getStructuredErrors() const {
|
||||
std::vector<CharReader::StructuredError> allErrors;
|
||||
for (const auto& error : errors_) {
|
||||
OurReader::StructuredError structured;
|
||||
CharReader::StructuredError structured;
|
||||
structured.offset_start = error.token_.start_ - begin_;
|
||||
structured.offset_limit = error.token_.end_ - begin_;
|
||||
structured.message = error.message_;
|
||||
@ -1873,12 +1843,18 @@ std::vector<OurReader::StructuredError> OurReader::getStructuredErrors() const {
|
||||
}
|
||||
|
||||
class OurCharReader : public CharReader {
|
||||
bool const collectComments_;
|
||||
OurReader reader_;
|
||||
|
||||
public:
|
||||
OurCharReader(bool collectComments, OurFeatures const& features)
|
||||
: CharReader(
|
||||
std::unique_ptr<OurImpl>(new OurImpl(collectComments, features))) {}
|
||||
|
||||
protected:
|
||||
class OurImpl : public Impl {
|
||||
public:
|
||||
OurImpl(bool collectComments, OurFeatures const& features)
|
||||
: collectComments_(collectComments), reader_(features) {}
|
||||
|
||||
bool parse(char const* beginDoc, char const* endDoc, Value* root,
|
||||
String* errs) override {
|
||||
bool ok = reader_.parse(beginDoc, endDoc, *root, collectComments_);
|
||||
@ -1887,6 +1863,16 @@ public:
|
||||
}
|
||||
return ok;
|
||||
}
|
||||
|
||||
std::vector<CharReader::StructuredError>
|
||||
getStructuredErrors() const override {
|
||||
return reader_.getStructuredErrors();
|
||||
}
|
||||
|
||||
private:
|
||||
bool const collectComments_;
|
||||
OurReader reader_;
|
||||
};
|
||||
};
|
||||
|
||||
CharReaderBuilder::CharReaderBuilder() { setDefaults(&settings_); }
|
||||
@ -1975,6 +1961,32 @@ void CharReaderBuilder::setDefaults(Json::Value* settings) {
|
||||
(*settings)["skipBom"] = true;
|
||||
//! [CharReaderBuilderDefaults]
|
||||
}
|
||||
// static
|
||||
void CharReaderBuilder::ecma404Mode(Json::Value* settings) {
|
||||
//! [CharReaderBuilderECMA404Mode]
|
||||
(*settings)["allowComments"] = false;
|
||||
(*settings)["allowTrailingCommas"] = false;
|
||||
(*settings)["strictRoot"] = false;
|
||||
(*settings)["allowDroppedNullPlaceholders"] = false;
|
||||
(*settings)["allowNumericKeys"] = false;
|
||||
(*settings)["allowSingleQuotes"] = false;
|
||||
(*settings)["stackLimit"] = 1000;
|
||||
(*settings)["failIfExtra"] = true;
|
||||
(*settings)["rejectDupKeys"] = false;
|
||||
(*settings)["allowSpecialFloats"] = false;
|
||||
(*settings)["skipBom"] = false;
|
||||
//! [CharReaderBuilderECMA404Mode]
|
||||
}
|
||||
|
||||
std::vector<CharReader::StructuredError>
|
||||
CharReader::getStructuredErrors() const {
|
||||
return _impl->getStructuredErrors();
|
||||
}
|
||||
|
||||
bool CharReader::parse(char const* beginDoc, char const* endDoc, Value* root,
|
||||
String* errs) {
|
||||
return _impl->parse(beginDoc, endDoc, root, errs);
|
||||
}
|
||||
|
||||
//////////////////////////////////
|
||||
// global functions
|
||||
@ -1983,7 +1995,7 @@ bool parseFromStream(CharReader::Factory const& fact, IStream& sin, Value* root,
|
||||
String* errs) {
|
||||
OStringStream ssin;
|
||||
ssin << sin.rdbuf();
|
||||
String doc = ssin.str();
|
||||
String doc = std::move(ssin).str();
|
||||
char const* begin = doc.data();
|
||||
char const* end = begin + doc.size();
|
||||
// Note that we do not actually need a null-terminator.
|
||||
|
@ -1205,7 +1205,7 @@ bool Value::removeIndex(ArrayIndex index, Value* removed) {
|
||||
return false;
|
||||
}
|
||||
if (removed)
|
||||
*removed = it->second;
|
||||
*removed = std::move(it->second);
|
||||
ArrayIndex oldSize = size();
|
||||
// shift left all items left, into the place of the "removed"
|
||||
for (ArrayIndex i = index; i < (oldSize - 1); ++i) {
|
||||
@ -1410,9 +1410,8 @@ void Value::setComment(String comment, CommentPlacement placement) {
|
||||
// Always discard trailing newline, to aid indentation.
|
||||
comment.pop_back();
|
||||
}
|
||||
JSON_ASSERT(!comment.empty());
|
||||
JSON_ASSERT_MESSAGE(
|
||||
comment[0] == '\0' || comment[0] == '/',
|
||||
comment.empty() || comment[0] == '/',
|
||||
"in Json::Value::setComment(): Comments must start with /");
|
||||
comments_.set(placement, std::move(comment));
|
||||
}
|
||||
|
@ -132,8 +132,9 @@ String valueToString(double value, bool useSpecialFloats,
|
||||
if (!isfinite(value)) {
|
||||
static const char* const reps[2][3] = {{"NaN", "-Infinity", "Infinity"},
|
||||
{"null", "-1e+9999", "1e+9999"}};
|
||||
return reps[useSpecialFloats ? 0 : 1]
|
||||
[isnan(value) ? 0 : (value < 0) ? 1 : 2];
|
||||
return reps[useSpecialFloats ? 0 : 1][isnan(value) ? 0
|
||||
: (value < 0) ? 1
|
||||
: 2];
|
||||
}
|
||||
|
||||
String buffer(size_t(36), '\0');
|
||||
@ -353,6 +354,10 @@ String valueToQuotedString(const char* value) {
|
||||
return valueToQuotedStringN(value, strlen(value));
|
||||
}
|
||||
|
||||
String valueToQuotedString(const char* value, size_t length) {
|
||||
return valueToQuotedStringN(value, length);
|
||||
}
|
||||
|
||||
// Class Writer
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
Writer::~Writer() = default;
|
||||
@ -490,7 +495,7 @@ void StyledWriter::writeValue(const Value& value) {
|
||||
const String& name = *it;
|
||||
const Value& childValue = value[name];
|
||||
writeCommentBeforeValue(childValue);
|
||||
writeWithIndent(valueToQuotedString(name.c_str()));
|
||||
writeWithIndent(valueToQuotedString(name.c_str(), name.size()));
|
||||
document_ += " : ";
|
||||
writeValue(childValue);
|
||||
if (++it == members.end()) {
|
||||
@ -708,7 +713,7 @@ void StyledStreamWriter::writeValue(const Value& value) {
|
||||
const String& name = *it;
|
||||
const Value& childValue = value[name];
|
||||
writeCommentBeforeValue(childValue);
|
||||
writeWithIndent(valueToQuotedString(name.c_str()));
|
||||
writeWithIndent(valueToQuotedString(name.c_str(), name.size()));
|
||||
*document_ << " : ";
|
||||
writeValue(childValue);
|
||||
if (++it == members.end()) {
|
||||
@ -1246,7 +1251,7 @@ String writeString(StreamWriter::Factory const& factory, Value const& root) {
|
||||
OStringStream sout;
|
||||
StreamWriterPtr const writer(factory.newStreamWriter());
|
||||
writer->write(root, &sout);
|
||||
return sout.str();
|
||||
return std::move(sout).str();
|
||||
}
|
||||
|
||||
OStream& operator<<(OStream& sout, Value const& root) {
|
||||
|
@ -3632,12 +3632,12 @@ JSONTEST_FIXTURE_LOCAL(CharReaderAllowSpecialFloatsTest, issue209) {
|
||||
for (const auto& td : test_data) {
|
||||
bool ok = reader->parse(&*td.in.begin(), &*td.in.begin() + td.in.size(),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(td.ok == ok) << "line:" << td.line << "\n"
|
||||
<< " expected: {"
|
||||
<< "ok:" << td.ok << ", in:\'" << td.in << "\'"
|
||||
<< "}\n"
|
||||
<< " actual: {"
|
||||
<< "ok:" << ok << "}\n";
|
||||
// clang-format off
|
||||
JSONTEST_ASSERT(td.ok == ok) <<
|
||||
"line:" << td.line << "\n " <<
|
||||
"expected: {ok:" << td.ok << ", in:\'" << td.in << "\'}\n " <<
|
||||
"actual: {ok:" << ok << "}\n";
|
||||
// clang-format on
|
||||
}
|
||||
|
||||
{
|
||||
@ -3917,6 +3917,36 @@ JSONTEST_FIXTURE_LOCAL(FuzzTest, fuzzDoesntCrash) {
|
||||
example.size()));
|
||||
}
|
||||
|
||||
struct ParseWithStructuredErrorsTest : JsonTest::TestCase {
|
||||
void testErrors(
|
||||
const std::string& doc, bool success,
|
||||
const std::vector<Json::CharReader::StructuredError>& expectedErrors) {
|
||||
Json::CharReaderBuilder b;
|
||||
CharReaderPtr reader(b.newCharReader());
|
||||
Json::Value root;
|
||||
JSONTEST_ASSERT_EQUAL(
|
||||
reader->parse(doc.data(), doc.data() + doc.length(), &root, nullptr),
|
||||
success);
|
||||
auto actualErrors = reader->getStructuredErrors();
|
||||
JSONTEST_ASSERT_EQUAL(expectedErrors.size(), actualErrors.size());
|
||||
for (std::size_t i = 0; i < actualErrors.size(); i++) {
|
||||
const auto& a = actualErrors[i];
|
||||
const auto& e = expectedErrors[i];
|
||||
JSONTEST_ASSERT_EQUAL(a.offset_start, e.offset_start);
|
||||
JSONTEST_ASSERT_EQUAL(a.offset_limit, e.offset_limit);
|
||||
JSONTEST_ASSERT_STRING_EQUAL(a.message, e.message);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
JSONTEST_FIXTURE_LOCAL(ParseWithStructuredErrorsTest, success) {
|
||||
testErrors("{}", true, {});
|
||||
}
|
||||
|
||||
JSONTEST_FIXTURE_LOCAL(ParseWithStructuredErrorsTest, singleError) {
|
||||
testErrors("{ 1 : 2 }", false, {{2, 3, "Missing '}' or object member name"}});
|
||||
}
|
||||
|
||||
int main(int argc, const char* argv[]) {
|
||||
JsonTest::Runner runner;
|
||||
|
||||
|
4
test/data/fail_strict_comment_01.json
Normal file
4
test/data/fail_strict_comment_01.json
Normal file
@ -0,0 +1,4 @@
|
||||
{
|
||||
"a": "aaa",
|
||||
"b": "bbb" // comments not allowed in strict mode
|
||||
}
|
4
test/data/fail_strict_comment_02.json
Normal file
4
test/data/fail_strict_comment_02.json
Normal file
@ -0,0 +1,4 @@
|
||||
{
|
||||
"a": "aaa", // comments not allowed in strict mode
|
||||
"b": "bbb"
|
||||
}
|
3
test/data/fail_strict_comment_03.json
Normal file
3
test/data/fail_strict_comment_03.json
Normal file
@ -0,0 +1,3 @@
|
||||
{
|
||||
"array" : [1, 2, 3 /* comments not allowed in strict mode */]
|
||||
}
|
1
test/data/fail_test_object_02.json
Normal file
1
test/data/fail_test_object_02.json
Normal file
@ -0,0 +1 @@
|
||||
{"one": 1 /* } */ { "two" : 2 }
|
@ -97,14 +97,17 @@ def runAllTests(jsontest_executable_path, input_dir = None,
|
||||
valgrind_path = use_valgrind and VALGRIND_CMD or ''
|
||||
for input_path in tests + test_jsonchecker:
|
||||
expect_failure = os.path.basename(input_path).startswith('fail')
|
||||
is_json_checker_test = (input_path in test_jsonchecker) or expect_failure
|
||||
is_json_checker_test = input_path in test_jsonchecker
|
||||
is_parse_only = is_json_checker_test or expect_failure
|
||||
is_strict_test = ('_strict_' in os.path.basename(input_path)) or is_json_checker_test
|
||||
print('TESTING:', input_path, end=' ')
|
||||
options = is_json_checker_test and '--json-checker' or ''
|
||||
options = is_parse_only and '--parse-only' or ''
|
||||
options += is_strict_test and ' --strict' or ''
|
||||
options += ' --json-writer %s'%writerClass
|
||||
cmd = '%s%s %s "%s"' % ( valgrind_path, jsontest_executable_path, options,
|
||||
input_path)
|
||||
status, process_output = getStatusOutput(cmd)
|
||||
if is_json_checker_test:
|
||||
if is_parse_only:
|
||||
if expect_failure:
|
||||
if not status:
|
||||
print('FAILED')
|
||||
|
Loading…
x
Reference in New Issue
Block a user