init repo.

This commit is contained in:
tqcq
2024-12-19 13:14:37 +08:00
commit 7d7845acb5
1412 changed files with 596214 additions and 0 deletions

15
third_party/fmt/support/Android.mk vendored Normal file
View File

@ -0,0 +1,15 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := fmt_static
LOCAL_MODULE_FILENAME := libfmt
LOCAL_SRC_FILES := ../src/format.cc
LOCAL_C_INCLUDES := $(LOCAL_PATH)
LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)
LOCAL_CFLAGS += -std=c++11 -fexceptions
include $(BUILD_STATIC_LIBRARY)

View File

@ -0,0 +1 @@
<manifest package="dev.fmt" />

2061
third_party/fmt/support/C++.sublime-syntax vendored Normal file

File diff suppressed because it is too large Load Diff

4
third_party/fmt/support/README vendored Normal file
View File

@ -0,0 +1,4 @@
This directory contains build support files such as
* CMake modules
* Build scripts

20
third_party/fmt/support/Vagrantfile vendored Normal file
View File

@ -0,0 +1,20 @@
# -*- mode: ruby -*-
# vi: set ft=ruby :
# A vagrant config for testing against gcc-4.8.
Vagrant.configure("2") do |config|
config.vm.box = "ubuntu/xenial64"
config.disksize.size = '15GB'
config.vm.provider "virtualbox" do |vb|
vb.memory = "4096"
end
config.vm.provision "shell", inline: <<-SHELL
apt-get update
apt-get install -y g++ make wget git
wget -q https://github.com/Kitware/CMake/releases/download/v3.26.0/cmake-3.26.0-Linux-x86_64.tar.gz
tar xzf cmake-3.26.0-Linux-x86_64.tar.gz
ln -s `pwd`/cmake-3.26.0-Linux-x86_64/bin/cmake /usr/local/bin
SHELL
end

View File

@ -0,0 +1 @@
6.1.2

View File

@ -0,0 +1,28 @@
cc_library(
name = "fmt",
srcs = [
#"src/fmt.cc", # No C++ module support
"src/format.cc",
"src/os.cc",
],
hdrs = [
"include/fmt/args.h",
"include/fmt/chrono.h",
"include/fmt/color.h",
"include/fmt/compile.h",
"include/fmt/core.h",
"include/fmt/format.h",
"include/fmt/format-inl.h",
"include/fmt/os.h",
"include/fmt/ostream.h",
"include/fmt/printf.h",
"include/fmt/ranges.h",
"include/fmt/std.h",
"include/fmt/xchar.h",
],
includes = [
"include",
],
strip_include_prefix = "include",
visibility = ["//visibility:public"],
)

74
third_party/fmt/support/bazel/README.md vendored Normal file
View File

@ -0,0 +1,74 @@
# Bazel support
To get [Bazel](https://bazel.build/) working with {fmt} you can copy the files `BUILD.bazel`, `WORKSPACE.bazel`, and `.bazelversion` from this folder (`support/bazel`) to the root folder of this project. This way {fmt} gets bazelized and can be used with Bazel (e.g. doing a `bazel build //...` on {fmt}).
## Using {fmt} as a dependency
The following minimal example shows how to use {fmt} as a dependency within a Bazel project.
The following file structure is assumed:
```
example
├── BUILD.bazel
├── main.cpp
└── WORKSPACE.bazel
```
*main.cpp*:
```c++
#include "fmt/core.h"
int main() {
fmt::print("The answer is {}\n", 42);
}
```
The expected output of this example is `The answer is 42`.
*WORKSPACE.bazel*:
```python
load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository")
git_repository(
name = "fmt",
branch = "master",
remote = "https://github.com/fmtlib/fmt",
patch_cmds = [
"mv support/bazel/.bazelversion .bazelversion",
"mv support/bazel/BUILD.bazel BUILD.bazel",
"mv support/bazel/WORKSPACE.bazel WORKSPACE.bazel",
],
# Windows-related patch commands are only needed in the case MSYS2 is not installed.
# More details about the installation process of MSYS2 on Windows systems can be found here:
# https://docs.bazel.build/versions/main/install-windows.html#installing-compilers-and-language-runtimes
# Even if MSYS2 is installed the Windows related patch commands can still be used.
patch_cmds_win = [
"Move-Item -Path support/bazel/.bazelversion -Destination .bazelversion",
"Move-Item -Path support/bazel/BUILD.bazel -Destination BUILD.bazel",
"Move-Item -Path support/bazel/WORKSPACE.bazel -Destination WORKSPACE.bazel",
],
)
```
In the *WORKSPACE* file, the {fmt} GitHub repository is fetched. Using the attribute `patch_cmds` the files `BUILD.bazel`, `WORKSPACE.bazel`, and `.bazelversion` are moved to the root of the {fmt} repository. This way the {fmt} repository is recognized as a bazelized workspace.
*BUILD.bazel*:
```python
cc_binary(
name = "Demo",
srcs = ["main.cpp"],
deps = ["@fmt"],
)
```
The *BUILD* file defines a binary named `Demo` that has a dependency to {fmt}.
To execute the binary you can run `bazel run //:Demo`.
# Using Bzlmod
The [Bazel Central Registry](https://github.com/bazelbuild/bazel-central-registry/tree/main/modules/fmt) also provides support for {fmt}.

View File

@ -0,0 +1 @@
workspace(name = "fmt")

58
third_party/fmt/support/build-docs.py vendored Executable file
View File

@ -0,0 +1,58 @@
#!/usr/bin/env python
# Build the documentation in CI.
from __future__ import print_function
import errno, os, shutil, subprocess, sys, urllib
from subprocess import call, check_call, Popen, PIPE, STDOUT
def rmtree_if_exists(dir):
try:
shutil.rmtree(dir)
except OSError as e:
if e.errno == errno.ENOENT:
pass
# Build the docs.
fmt_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path.insert(0, os.path.join(fmt_dir, 'doc'))
import build
build.create_build_env()
html_dir = build.build_docs()
repo = 'fmtlib.github.io'
branch = os.environ['GITHUB_REF']
is_ci = 'CI' in os.environ
if is_ci and branch != 'refs/heads/master':
print('Branch: ' + branch)
exit(0) # Ignore non-master branches
if is_ci and 'KEY' not in os.environ:
# Don't update the repo if building in CI from an account that doesn't have
# push access.
print('Skipping update of ' + repo)
exit(0)
# Clone the fmtlib.github.io repo.
rmtree_if_exists(repo)
git_url = 'https://github.com/' if is_ci else 'git@github.com:'
check_call(['git', 'clone', git_url + 'fmtlib/{}.git'.format(repo)])
# Copy docs to the repo.
target_dir = os.path.join(repo, 'dev')
rmtree_if_exists(target_dir)
shutil.copytree(html_dir, target_dir, ignore=shutil.ignore_patterns('.*'))
if is_ci:
check_call(['git', 'config', '--global', 'user.name', 'fmtbot'])
check_call(['git', 'config', '--global', 'user.email', 'viz@fmt.dev'])
# Push docs to GitHub pages.
check_call(['git', 'add', '--all'], cwd=repo)
if call(['git', 'diff-index', '--quiet', 'HEAD'], cwd=repo):
check_call(['git', 'commit', '-m', 'Update documentation'], cwd=repo)
cmd = 'git push'
if is_ci:
cmd += ' https://$KEY@github.com/fmtlib/fmtlib.github.io.git master'
p = Popen(cmd, shell=True, stdout=PIPE, stderr=STDOUT, cwd=repo)
# Print the output without the key.
print(p.communicate()[0].decode('utf-8').replace(os.environ['KEY'], '$KEY'))
if p.returncode != 0:
raise subprocess.CalledProcessError(p.returncode, cmd)

132
third_party/fmt/support/build.gradle vendored Normal file
View File

@ -0,0 +1,132 @@
import java.nio.file.Paths
// General gradle arguments for root project
buildscript {
repositories {
google()
jcenter()
}
dependencies {
//
// https://developer.android.com/studio/releases/gradle-plugin#updating-gradle
//
// Notice that 4.0.0 here is the version of [Android Gradle Plugin]
// According to URL above you will need Gradle 6.1 or higher
//
classpath "com.android.tools.build:gradle:4.1.1"
}
}
repositories {
google()
jcenter()
}
// Project's root where CMakeLists.txt exists: rootDir/support/.cxx -> rootDir
def rootDir = Paths.get(project.buildDir.getParent()).getParent()
println("rootDir: ${rootDir}")
// Output: Shared library (.so) for Android
apply plugin: "com.android.library"
android {
compileSdkVersion 25 // Android 7.0
// Target ABI
// - This option controls target platform of module
// - The platform might be limited by compiler's support
// some can work with Clang(default), but some can work only with GCC...
// if bad, both toolchains might not support it
splits {
abi {
enable true
// Specify platforms for Application
reset()
include "arm64-v8a", "armeabi-v7a", "x86_64"
}
}
ndkVersion "21.3.6528147" // ANDROID_NDK_HOME is deprecated. Be explicit
defaultConfig {
minSdkVersion 21 // Android 5.0+
targetSdkVersion 25 // Follow Compile SDK
versionCode 34 // Follow release count
versionName "7.1.2" // Follow Official version
externalNativeBuild {
cmake {
arguments "-DANDROID_STL=c++_shared" // Specify Android STL
arguments "-DBUILD_SHARED_LIBS=true" // Build shared object
arguments "-DFMT_TEST=false" // Skip test
arguments "-DFMT_DOC=false" // Skip document
cppFlags "-std=c++17"
targets "fmt"
}
}
println(externalNativeBuild.cmake.cppFlags)
println(externalNativeBuild.cmake.arguments)
}
// External Native build
// - Use existing CMakeList.txt
// - Give path to CMake. This gradle file should be
// neighbor of the top level cmake
externalNativeBuild {
cmake {
version "3.10.0+"
path "${rootDir}/CMakeLists.txt"
// buildStagingDirectory "./build" // Custom path for cmake output
}
}
sourceSets{
// Android Manifest for Gradle
main {
manifest.srcFile "AndroidManifest.xml"
}
}
// https://developer.android.com/studio/build/native-dependencies#build_system_configuration
buildFeatures {
prefab true
prefabPublishing true
}
prefab {
fmt {
headers "${rootDir}/include"
}
}
}
assemble.doLast
{
// Instead of `ninja install`, Gradle will deploy the files.
// We are doing this since FMT is dependent to the ANDROID_STL after build
copy {
from "build/intermediates/cmake"
into "${rootDir}/libs"
}
// Copy debug binaries
copy {
from "${rootDir}/libs/debug/obj"
into "${rootDir}/libs/debug"
}
// Copy Release binaries
copy {
from "${rootDir}/libs/release/obj"
into "${rootDir}/libs/release"
}
// Remove empty directory
delete "${rootDir}/libs/debug/obj"
delete "${rootDir}/libs/release/obj"
// Copy AAR files. Notice that the aar is named after the folder of this script.
copy {
from "build/outputs/aar/support-release.aar"
into "${rootDir}/libs"
rename "support-release.aar", "fmt-release.aar"
}
copy {
from "build/outputs/aar/support-debug.aar"
into "${rootDir}/libs"
rename "support-debug.aar", "fmt-debug.aar"
}
}

View File

@ -0,0 +1,7 @@
# A CMake script to find SetEnv.cmd.
find_program(WINSDK_SETENV NAMES SetEnv.cmd
PATHS "[HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows;CurrentInstallFolder]/bin")
if (WINSDK_SETENV AND PRINT_PATH)
execute_process(COMMAND ${CMAKE_COMMAND} -E echo "${WINSDK_SETENV}")
endif ()

View File

@ -0,0 +1,26 @@
# This module provides function for joining paths
# known from from most languages
#
# Original license:
# SPDX-License-Identifier: (MIT OR CC0-1.0)
# Explicit permission given to distribute this module under
# the terms of the project as described in /LICENSE.rst.
# Copyright 2020 Jan Tojnar
# https://github.com/jtojnar/cmake-snips
#
# Modelled after Pythons os.path.join
# https://docs.python.org/3.7/library/os.path.html#os.path.join
# Windows not supported
function(join_paths joined_path first_path_segment)
set(temp_path "${first_path_segment}")
foreach(current_segment IN LISTS ARGN)
if(NOT ("${current_segment}" STREQUAL ""))
if(IS_ABSOLUTE "${current_segment}")
set(temp_path "${current_segment}")
else()
set(temp_path "${temp_path}/${current_segment}")
endif()
endif()
endforeach()
set(${joined_path} "${temp_path}" PARENT_SCOPE)
endfunction()

View File

@ -0,0 +1,7 @@
@PACKAGE_INIT@
if (NOT TARGET fmt::fmt)
include(${CMAKE_CURRENT_LIST_DIR}/@targets_export_name@.cmake)
endif ()
check_required_components(fmt)

11
third_party/fmt/support/cmake/fmt.pc.in vendored Normal file
View File

@ -0,0 +1,11 @@
prefix=@CMAKE_INSTALL_PREFIX@
exec_prefix=@CMAKE_INSTALL_PREFIX@
libdir=@libdir_for_pc_file@
includedir=@includedir_for_pc_file@
Name: fmt
Description: A modern formatting library
Version: @FMT_VERSION@
Libs: -L${libdir} -l@FMT_LIB_NAME@
Cflags: -I${includedir}

53
third_party/fmt/support/compute-powers.py vendored Executable file
View File

@ -0,0 +1,53 @@
#!/usr/bin/env python
# Compute 10 ** exp with exp in the range [min_exponent, max_exponent] and print
# normalized (with most-significant bit equal to 1) significands in hexadecimal.
from __future__ import print_function
min_exponent = -348
max_exponent = 340
step = 8
significand_size = 64
exp_offset = 2000
class fp:
pass
powers = []
for i, exp in enumerate(range(min_exponent, max_exponent + 1, step)):
result = fp()
n = 10 ** exp if exp >= 0 else 2 ** exp_offset / 10 ** -exp
k = significand_size + 1
# Convert to binary and round.
binary = '{:b}'.format(n)
result.f = (int('{:0<{}}'.format(binary[:k], k), 2) + 1) / 2
result.e = len(binary) - (exp_offset if exp < 0 else 0) - significand_size
powers.append(result)
# Sanity check.
exp_offset10 = 400
actual = result.f * 10 ** exp_offset10
if result.e > 0:
actual *= 2 ** result.e
else:
for j in range(-result.e):
actual /= 2
expected = 10 ** (exp_offset10 + exp)
precision = len('{}'.format(expected)) - len('{}'.format(actual - expected))
if precision < 19:
print('low precision:', precision)
exit(1)
print('Significands:', end='')
for i, fp in enumerate(powers):
if i % 3 == 0:
print(end='\n ')
print(' {:0<#16x}'.format(fp.f, ), end=',')
print('\n\nExponents:', end='')
for i, fp in enumerate(powers):
if i % 11 == 0:
print(end='\n ')
print(' {:5}'.format(fp.e), end=',')
print('\n\nMax exponent difference:',
max([x.e - powers[i - 1].e for i, x in enumerate(powers)][1:]))

581
third_party/fmt/support/docopt.py vendored Normal file
View File

@ -0,0 +1,581 @@
"""Pythonic command-line interface parser that will make you smile.
* http://docopt.org
* Repository and issue-tracker: https://github.com/docopt/docopt
* Licensed under terms of MIT license (see LICENSE-MIT)
* Copyright (c) 2013 Vladimir Keleshev, vladimir@keleshev.com
"""
import sys
import re
__all__ = ['docopt']
__version__ = '0.6.1'
class DocoptLanguageError(Exception):
"""Error in construction of usage-message by developer."""
class DocoptExit(SystemExit):
"""Exit in case user invoked program with incorrect arguments."""
usage = ''
def __init__(self, message=''):
SystemExit.__init__(self, (message + '\n' + self.usage).strip())
class Pattern(object):
def __eq__(self, other):
return repr(self) == repr(other)
def __hash__(self):
return hash(repr(self))
def fix(self):
self.fix_identities()
self.fix_repeating_arguments()
return self
def fix_identities(self, uniq=None):
"""Make pattern-tree tips point to same object if they are equal."""
if not hasattr(self, 'children'):
return self
uniq = list(set(self.flat())) if uniq is None else uniq
for i, child in enumerate(self.children):
if not hasattr(child, 'children'):
assert child in uniq
self.children[i] = uniq[uniq.index(child)]
else:
child.fix_identities(uniq)
def fix_repeating_arguments(self):
"""Fix elements that should accumulate/increment values."""
either = [list(child.children) for child in transform(self).children]
for case in either:
for e in [child for child in case if case.count(child) > 1]:
if type(e) is Argument or type(e) is Option and e.argcount:
if e.value is None:
e.value = []
elif type(e.value) is not list:
e.value = e.value.split()
if type(e) is Command or type(e) is Option and e.argcount == 0:
e.value = 0
return self
def transform(pattern):
"""Expand pattern into an (almost) equivalent one, but with single Either.
Example: ((-a | -b) (-c | -d)) => (-a -c | -a -d | -b -c | -b -d)
Quirks: [-a] => (-a), (-a...) => (-a -a)
"""
result = []
groups = [[pattern]]
while groups:
children = groups.pop(0)
parents = [Required, Optional, OptionsShortcut, Either, OneOrMore]
if any(t in map(type, children) for t in parents):
child = [c for c in children if type(c) in parents][0]
children.remove(child)
if type(child) is Either:
for c in child.children:
groups.append([c] + children)
elif type(child) is OneOrMore:
groups.append(child.children * 2 + children)
else:
groups.append(child.children + children)
else:
result.append(children)
return Either(*[Required(*e) for e in result])
class LeafPattern(Pattern):
"""Leaf/terminal node of a pattern tree."""
def __init__(self, name, value=None):
self.name, self.value = name, value
def __repr__(self):
return '%s(%r, %r)' % (self.__class__.__name__, self.name, self.value)
def flat(self, *types):
return [self] if not types or type(self) in types else []
def match(self, left, collected=None):
collected = [] if collected is None else collected
pos, match = self.single_match(left)
if match is None:
return False, left, collected
left_ = left[:pos] + left[pos + 1:]
same_name = [a for a in collected if a.name == self.name]
if type(self.value) in (int, list):
if type(self.value) is int:
increment = 1
else:
increment = ([match.value] if type(match.value) is str
else match.value)
if not same_name:
match.value = increment
return True, left_, collected + [match]
same_name[0].value += increment
return True, left_, collected
return True, left_, collected + [match]
class BranchPattern(Pattern):
"""Branch/inner node of a pattern tree."""
def __init__(self, *children):
self.children = list(children)
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__,
', '.join(repr(a) for a in self.children))
def flat(self, *types):
if type(self) in types:
return [self]
return sum([child.flat(*types) for child in self.children], [])
class Argument(LeafPattern):
def single_match(self, left):
for n, pattern in enumerate(left):
if type(pattern) is Argument:
return n, Argument(self.name, pattern.value)
return None, None
@classmethod
def parse(class_, source):
name = re.findall('(<\S*?>)', source)[0]
value = re.findall('\[default: (.*)\]', source, flags=re.I)
return class_(name, value[0] if value else None)
class Command(Argument):
def __init__(self, name, value=False):
self.name, self.value = name, value
def single_match(self, left):
for n, pattern in enumerate(left):
if type(pattern) is Argument:
if pattern.value == self.name:
return n, Command(self.name, True)
else:
break
return None, None
class Option(LeafPattern):
def __init__(self, short=None, long=None, argcount=0, value=False):
assert argcount in (0, 1)
self.short, self.long, self.argcount = short, long, argcount
self.value = None if value is False and argcount else value
@classmethod
def parse(class_, option_description):
short, long, argcount, value = None, None, 0, False
options, _, description = option_description.strip().partition(' ')
options = options.replace(',', ' ').replace('=', ' ')
for s in options.split():
if s.startswith('--'):
long = s
elif s.startswith('-'):
short = s
else:
argcount = 1
if argcount:
matched = re.findall('\[default: (.*)\]', description, flags=re.I)
value = matched[0] if matched else None
return class_(short, long, argcount, value)
def single_match(self, left):
for n, pattern in enumerate(left):
if self.name == pattern.name:
return n, pattern
return None, None
@property
def name(self):
return self.long or self.short
def __repr__(self):
return 'Option(%r, %r, %r, %r)' % (self.short, self.long,
self.argcount, self.value)
class Required(BranchPattern):
def match(self, left, collected=None):
collected = [] if collected is None else collected
l = left
c = collected
for pattern in self.children:
matched, l, c = pattern.match(l, c)
if not matched:
return False, left, collected
return True, l, c
class Optional(BranchPattern):
def match(self, left, collected=None):
collected = [] if collected is None else collected
for pattern in self.children:
m, left, collected = pattern.match(left, collected)
return True, left, collected
class OptionsShortcut(Optional):
"""Marker/placeholder for [options] shortcut."""
class OneOrMore(BranchPattern):
def match(self, left, collected=None):
assert len(self.children) == 1
collected = [] if collected is None else collected
l = left
c = collected
l_ = None
matched = True
times = 0
while matched:
# could it be that something didn't match but changed l or c?
matched, l, c = self.children[0].match(l, c)
times += 1 if matched else 0
if l_ == l:
break
l_ = l
if times >= 1:
return True, l, c
return False, left, collected
class Either(BranchPattern):
def match(self, left, collected=None):
collected = [] if collected is None else collected
outcomes = []
for pattern in self.children:
matched, _, _ = outcome = pattern.match(left, collected)
if matched:
outcomes.append(outcome)
if outcomes:
return min(outcomes, key=lambda outcome: len(outcome[1]))
return False, left, collected
class Tokens(list):
def __init__(self, source, error=DocoptExit):
self += source.split() if hasattr(source, 'split') else source
self.error = error
@staticmethod
def from_pattern(source):
source = re.sub(r'([\[\]\(\)\|]|\.\.\.)', r' \1 ', source)
source = [s for s in re.split('\s+|(\S*<.*?>)', source) if s]
return Tokens(source, error=DocoptLanguageError)
def move(self):
return self.pop(0) if len(self) else None
def current(self):
return self[0] if len(self) else None
def parse_long(tokens, options):
"""long ::= '--' chars [ ( ' ' | '=' ) chars ] ;"""
long, eq, value = tokens.move().partition('=')
assert long.startswith('--')
value = None if eq == value == '' else value
similar = [o for o in options if o.long == long]
if tokens.error is DocoptExit and similar == []: # if no exact match
similar = [o for o in options if o.long and o.long.startswith(long)]
if len(similar) > 1: # might be simply specified ambiguously 2+ times?
raise tokens.error('%s is not a unique prefix: %s?' %
(long, ', '.join(o.long for o in similar)))
elif len(similar) < 1:
argcount = 1 if eq == '=' else 0
o = Option(None, long, argcount)
options.append(o)
if tokens.error is DocoptExit:
o = Option(None, long, argcount, value if argcount else True)
else:
o = Option(similar[0].short, similar[0].long,
similar[0].argcount, similar[0].value)
if o.argcount == 0:
if value is not None:
raise tokens.error('%s must not have an argument' % o.long)
else:
if value is None:
if tokens.current() in [None, '--']:
raise tokens.error('%s requires argument' % o.long)
value = tokens.move()
if tokens.error is DocoptExit:
o.value = value if value is not None else True
return [o]
def parse_shorts(tokens, options):
"""shorts ::= '-' ( chars )* [ [ ' ' ] chars ] ;"""
token = tokens.move()
assert token.startswith('-') and not token.startswith('--')
left = token.lstrip('-')
parsed = []
while left != '':
short, left = '-' + left[0], left[1:]
similar = [o for o in options if o.short == short]
if len(similar) > 1:
raise tokens.error('%s is specified ambiguously %d times' %
(short, len(similar)))
elif len(similar) < 1:
o = Option(short, None, 0)
options.append(o)
if tokens.error is DocoptExit:
o = Option(short, None, 0, True)
else: # why copying is necessary here?
o = Option(short, similar[0].long,
similar[0].argcount, similar[0].value)
value = None
if o.argcount != 0:
if left == '':
if tokens.current() in [None, '--']:
raise tokens.error('%s requires argument' % short)
value = tokens.move()
else:
value = left
left = ''
if tokens.error is DocoptExit:
o.value = value if value is not None else True
parsed.append(o)
return parsed
def parse_pattern(source, options):
tokens = Tokens.from_pattern(source)
result = parse_expr(tokens, options)
if tokens.current() is not None:
raise tokens.error('unexpected ending: %r' % ' '.join(tokens))
return Required(*result)
def parse_expr(tokens, options):
"""expr ::= seq ( '|' seq )* ;"""
seq = parse_seq(tokens, options)
if tokens.current() != '|':
return seq
result = [Required(*seq)] if len(seq) > 1 else seq
while tokens.current() == '|':
tokens.move()
seq = parse_seq(tokens, options)
result += [Required(*seq)] if len(seq) > 1 else seq
return [Either(*result)] if len(result) > 1 else result
def parse_seq(tokens, options):
"""seq ::= ( atom [ '...' ] )* ;"""
result = []
while tokens.current() not in [None, ']', ')', '|']:
atom = parse_atom(tokens, options)
if tokens.current() == '...':
atom = [OneOrMore(*atom)]
tokens.move()
result += atom
return result
def parse_atom(tokens, options):
"""atom ::= '(' expr ')' | '[' expr ']' | 'options'
| long | shorts | argument | command ;
"""
token = tokens.current()
result = []
if token in '([':
tokens.move()
matching, pattern = {'(': [')', Required], '[': [']', Optional]}[token]
result = pattern(*parse_expr(tokens, options))
if tokens.move() != matching:
raise tokens.error("unmatched '%s'" % token)
return [result]
elif token == 'options':
tokens.move()
return [OptionsShortcut()]
elif token.startswith('--') and token != '--':
return parse_long(tokens, options)
elif token.startswith('-') and token not in ('-', '--'):
return parse_shorts(tokens, options)
elif token.startswith('<') and token.endswith('>') or token.isupper():
return [Argument(tokens.move())]
else:
return [Command(tokens.move())]
def parse_argv(tokens, options, options_first=False):
"""Parse command-line argument vector.
If options_first:
argv ::= [ long | shorts ]* [ argument ]* [ '--' [ argument ]* ] ;
else:
argv ::= [ long | shorts | argument ]* [ '--' [ argument ]* ] ;
"""
parsed = []
while tokens.current() is not None:
if tokens.current() == '--':
return parsed + [Argument(None, v) for v in tokens]
elif tokens.current().startswith('--'):
parsed += parse_long(tokens, options)
elif tokens.current().startswith('-') and tokens.current() != '-':
parsed += parse_shorts(tokens, options)
elif options_first:
return parsed + [Argument(None, v) for v in tokens]
else:
parsed.append(Argument(None, tokens.move()))
return parsed
def parse_defaults(doc):
defaults = []
for s in parse_section('options:', doc):
# FIXME corner case "bla: options: --foo"
_, _, s = s.partition(':') # get rid of "options:"
split = re.split('\n[ \t]*(-\S+?)', '\n' + s)[1:]
split = [s1 + s2 for s1, s2 in zip(split[::2], split[1::2])]
options = [Option.parse(s) for s in split if s.startswith('-')]
defaults += options
return defaults
def parse_section(name, source):
pattern = re.compile('^([^\n]*' + name + '[^\n]*\n?(?:[ \t].*?(?:\n|$))*)',
re.IGNORECASE | re.MULTILINE)
return [s.strip() for s in pattern.findall(source)]
def formal_usage(section):
_, _, section = section.partition(':') # drop "usage:"
pu = section.split()
return '( ' + ' '.join(') | (' if s == pu[0] else s for s in pu[1:]) + ' )'
def extras(help, version, options, doc):
if help and any((o.name in ('-h', '--help')) and o.value for o in options):
print(doc.strip("\n"))
sys.exit()
if version and any(o.name == '--version' and o.value for o in options):
print(version)
sys.exit()
class Dict(dict):
def __repr__(self):
return '{%s}' % ',\n '.join('%r: %r' % i for i in sorted(self.items()))
def docopt(doc, argv=None, help=True, version=None, options_first=False):
"""Parse `argv` based on command-line interface described in `doc`.
`docopt` creates your command-line interface based on its
description that you pass as `doc`. Such description can contain
--options, <positional-argument>, commands, which could be
[optional], (required), (mutually | exclusive) or repeated...
Parameters
----------
doc : str
Description of your command-line interface.
argv : list of str, optional
Argument vector to be parsed. sys.argv[1:] is used if not
provided.
help : bool (default: True)
Set to False to disable automatic help on -h or --help
options.
version : any object
If passed, the object will be printed if --version is in
`argv`.
options_first : bool (default: False)
Set to True to require options precede positional arguments,
i.e. to forbid options and positional arguments intermix.
Returns
-------
args : dict
A dictionary, where keys are names of command-line elements
such as e.g. "--verbose" and "<path>", and values are the
parsed values of those elements.
Example
-------
>>> from docopt import docopt
>>> doc = '''
... Usage:
... my_program tcp <host> <port> [--timeout=<seconds>]
... my_program serial <port> [--baud=<n>] [--timeout=<seconds>]
... my_program (-h | --help | --version)
...
... Options:
... -h, --help Show this screen and exit.
... --baud=<n> Baudrate [default: 9600]
... '''
>>> argv = ['tcp', '127.0.0.1', '80', '--timeout', '30']
>>> docopt(doc, argv)
{'--baud': '9600',
'--help': False,
'--timeout': '30',
'--version': False,
'<host>': '127.0.0.1',
'<port>': '80',
'serial': False,
'tcp': True}
See also
--------
* For video introduction see http://docopt.org
* Full documentation is available in README.rst as well as online
at https://github.com/docopt/docopt#readme
"""
argv = sys.argv[1:] if argv is None else argv
usage_sections = parse_section('usage:', doc)
if len(usage_sections) == 0:
raise DocoptLanguageError('"usage:" (case-insensitive) not found.')
if len(usage_sections) > 1:
raise DocoptLanguageError('More than one "usage:" (case-insensitive).')
DocoptExit.usage = usage_sections[0]
options = parse_defaults(doc)
pattern = parse_pattern(formal_usage(DocoptExit.usage), options)
# [default] syntax for argument is disabled
#for a in pattern.flat(Argument):
# same_name = [d for d in arguments if d.name == a.name]
# if same_name:
# a.value = same_name[0].value
argv = parse_argv(Tokens(argv), list(options), options_first)
pattern_options = set(pattern.flat(Option))
for options_shortcut in pattern.flat(OptionsShortcut):
doc_options = parse_defaults(doc)
options_shortcut.children = list(set(doc_options) - pattern_options)
#if any_options:
# options_shortcut.children += [Option(o.short, o.long, o.argcount)
# for o in argv if type(o) is Option]
extras(help, version, argv, doc)
matched, left, collected = pattern.fix().match(argv)
if matched and left == []: # better error message if left?
return Dict((a.name, a.value) for a in (pattern.flat() + collected))
raise DocoptExit()

329
third_party/fmt/support/manage.py vendored Executable file
View File

@ -0,0 +1,329 @@
#!/usr/bin/env python3
"""Manage site and releases.
Usage:
manage.py release [<branch>]
manage.py site
For the release command $FMT_TOKEN should contain a GitHub personal access token
obtained from https://github.com/settings/tokens.
"""
from __future__ import print_function
import datetime, docopt, errno, fileinput, json, os
import re, requests, shutil, sys
from contextlib import contextmanager
from distutils.version import LooseVersion
from subprocess import check_call
class Git:
def __init__(self, dir):
self.dir = dir
def call(self, method, args, **kwargs):
return check_call(['git', method] + list(args), **kwargs)
def add(self, *args):
return self.call('add', args, cwd=self.dir)
def checkout(self, *args):
return self.call('checkout', args, cwd=self.dir)
def clean(self, *args):
return self.call('clean', args, cwd=self.dir)
def clone(self, *args):
return self.call('clone', list(args) + [self.dir])
def commit(self, *args):
return self.call('commit', args, cwd=self.dir)
def pull(self, *args):
return self.call('pull', args, cwd=self.dir)
def push(self, *args):
return self.call('push', args, cwd=self.dir)
def reset(self, *args):
return self.call('reset', args, cwd=self.dir)
def update(self, *args):
clone = not os.path.exists(self.dir)
if clone:
self.clone(*args)
return clone
def clean_checkout(repo, branch):
repo.clean('-f', '-d')
repo.reset('--hard')
repo.checkout(branch)
class Runner:
def __init__(self, cwd):
self.cwd = cwd
def __call__(self, *args, **kwargs):
kwargs['cwd'] = kwargs.get('cwd', self.cwd)
check_call(args, **kwargs)
def create_build_env():
"""Create a build environment."""
class Env:
pass
env = Env()
# Import the documentation build module.
env.fmt_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, os.path.join(env.fmt_dir, 'doc'))
import build
env.build_dir = 'build'
env.versions = build.versions
# Virtualenv and repos are cached to speed up builds.
build.create_build_env(os.path.join(env.build_dir, 'virtualenv'))
env.fmt_repo = Git(os.path.join(env.build_dir, 'fmt'))
return env
@contextmanager
def rewrite(filename):
class Buffer:
pass
buffer = Buffer()
if not os.path.exists(filename):
buffer.data = ''
yield buffer
return
with open(filename) as f:
buffer.data = f.read()
yield buffer
with open(filename, 'w') as f:
f.write(buffer.data)
fmt_repo_url = 'git@github.com:fmtlib/fmt'
def update_site(env):
env.fmt_repo.update(fmt_repo_url)
doc_repo = Git(os.path.join(env.build_dir, 'fmtlib.github.io'))
doc_repo.update('git@github.com:fmtlib/fmtlib.github.io')
for version in env.versions:
clean_checkout(env.fmt_repo, version)
target_doc_dir = os.path.join(env.fmt_repo.dir, 'doc')
# Remove the old theme.
for entry in os.listdir(target_doc_dir):
path = os.path.join(target_doc_dir, entry)
if os.path.isdir(path):
shutil.rmtree(path)
# Copy the new theme.
for entry in ['_static', '_templates', 'basic-bootstrap', 'bootstrap',
'conf.py', 'fmt.less']:
src = os.path.join(env.fmt_dir, 'doc', entry)
dst = os.path.join(target_doc_dir, entry)
copy = shutil.copytree if os.path.isdir(src) else shutil.copyfile
copy(src, dst)
# Rename index to contents.
contents = os.path.join(target_doc_dir, 'contents.rst')
if not os.path.exists(contents):
os.rename(os.path.join(target_doc_dir, 'index.rst'), contents)
# Fix issues in reference.rst/api.rst.
for filename in ['reference.rst', 'api.rst', 'index.rst']:
pattern = re.compile('doxygenfunction.. (bin|oct|hexu|hex)$', re.M)
with rewrite(os.path.join(target_doc_dir, filename)) as b:
b.data = b.data.replace('std::ostream &', 'std::ostream&')
b.data = re.sub(pattern, r'doxygenfunction:: \1(int)', b.data)
b.data = b.data.replace('std::FILE*', 'std::FILE *')
b.data = b.data.replace('unsigned int', 'unsigned')
#b.data = b.data.replace('operator""_', 'operator"" _')
b.data = b.data.replace(
'format_to_n(OutputIt, size_t, string_view, Args&&',
'format_to_n(OutputIt, size_t, const S&, const Args&')
b.data = b.data.replace(
'format_to_n(OutputIt, std::size_t, string_view, Args&&',
'format_to_n(OutputIt, std::size_t, const S&, const Args&')
if version == ('3.0.2'):
b.data = b.data.replace(
'fprintf(std::ostream&', 'fprintf(std::ostream &')
if version == ('5.3.0'):
b.data = b.data.replace(
'format_to(OutputIt, const S&, const Args&...)',
'format_to(OutputIt, const S &, const Args &...)')
if version.startswith('5.') or version.startswith('6.'):
b.data = b.data.replace(', size_t', ', std::size_t')
if version.startswith('7.'):
b.data = b.data.replace(', std::size_t', ', size_t')
b.data = b.data.replace('join(It, It', 'join(It, Sentinel')
if version.startswith('7.1.'):
b.data = b.data.replace(', std::size_t', ', size_t')
b.data = b.data.replace('join(It, It', 'join(It, Sentinel')
b.data = b.data.replace(
'fmt::format_to(OutputIt, const S&, Args&&...)',
'fmt::format_to(OutputIt, const S&, Args&&...) -> ' +
'typename std::enable_if<enable, OutputIt>::type')
b.data = b.data.replace('aa long', 'a long')
b.data = b.data.replace('serveral', 'several')
if version.startswith('6.2.'):
b.data = b.data.replace(
'vformat(const S&, basic_format_args<' +
'buffer_context<Char>>)',
'vformat(const S&, basic_format_args<' +
'buffer_context<type_identity_t<Char>>>)')
# Fix a broken link in index.rst.
index = os.path.join(target_doc_dir, 'index.rst')
with rewrite(index) as b:
b.data = b.data.replace(
'doc/latest/index.html#format-string-syntax', 'syntax.html')
# Fix issues in syntax.rst.
index = os.path.join(target_doc_dir, 'syntax.rst')
with rewrite(index) as b:
b.data = b.data.replace(
'..productionlist:: sf\n', '.. productionlist:: sf\n ')
b.data = b.data.replace('Examples:\n', 'Examples::\n')
# Build the docs.
html_dir = os.path.join(env.build_dir, 'html')
if os.path.exists(html_dir):
shutil.rmtree(html_dir)
include_dir = env.fmt_repo.dir
if LooseVersion(version) >= LooseVersion('5.0.0'):
include_dir = os.path.join(include_dir, 'include', 'fmt')
elif LooseVersion(version) >= LooseVersion('3.0.0'):
include_dir = os.path.join(include_dir, 'fmt')
import build
build.build_docs(version, doc_dir=target_doc_dir,
include_dir=include_dir, work_dir=env.build_dir)
shutil.rmtree(os.path.join(html_dir, '.doctrees'))
# Create symlinks for older versions.
for link, target in {'index': 'contents', 'api': 'reference'}.items():
link = os.path.join(html_dir, link) + '.html'
target += '.html'
if os.path.exists(os.path.join(html_dir, target)) and \
not os.path.exists(link):
os.symlink(target, link)
# Copy docs to the website.
version_doc_dir = os.path.join(doc_repo.dir, version)
try:
shutil.rmtree(version_doc_dir)
except OSError as e:
if e.errno != errno.ENOENT:
raise
shutil.move(html_dir, version_doc_dir)
def release(args):
env = create_build_env()
fmt_repo = env.fmt_repo
branch = args.get('<branch>')
if branch is None:
branch = 'master'
if not fmt_repo.update('-b', branch, fmt_repo_url):
clean_checkout(fmt_repo, branch)
# Update the date in the changelog and extract the version and the first
# section content.
changelog = 'ChangeLog.md'
changelog_path = os.path.join(fmt_repo.dir, changelog)
is_first_section = True
first_section = []
for i, line in enumerate(fileinput.input(changelog_path, inplace=True)):
if i == 0:
version = re.match(r'# (.*) - TBD', line).group(1)
line = '# {} - {}\n'.format(
version, datetime.date.today().isoformat())
elif not is_first_section:
pass
elif line.startswith('#'):
is_first_section = False
else:
first_section.append(line)
sys.stdout.write(line)
if first_section[0] == '\n':
first_section.pop(0)
changes = ''
code_block = False
stripped = False
for line in first_section:
if re.match(r'^\s*```', line):
code_block = not code_block
changes += line
stripped = False
continue
if code_block:
changes += line
continue
if line == '\n':
changes += line
if stripped:
changes += line
stripped = False
continue
if stripped:
line = ' ' + line.lstrip()
changes += line.rstrip()
stripped = True
cmakelists = 'CMakeLists.txt'
for line in fileinput.input(os.path.join(fmt_repo.dir, cmakelists),
inplace=True):
prefix = 'set(FMT_VERSION '
if line.startswith(prefix):
line = prefix + version + ')\n'
sys.stdout.write(line)
# Add the version to the build script.
script = os.path.join('doc', 'build.py')
script_path = os.path.join(fmt_repo.dir, script)
for line in fileinput.input(script_path, inplace=True):
m = re.match(r'( *versions \+= )\[(.+)\]', line)
if m:
line = '{}[{}, \'{}\']\n'.format(m.group(1), m.group(2), version)
sys.stdout.write(line)
fmt_repo.checkout('-B', 'release')
fmt_repo.add(changelog, cmakelists, script)
fmt_repo.commit('-m', 'Update version')
# Build the docs and package.
run = Runner(fmt_repo.dir)
run('cmake', '.')
run('make', 'doc', 'package_source')
update_site(env)
# Create a release on GitHub.
fmt_repo.push('origin', 'release')
auth_headers = {'Authorization': 'token ' + os.getenv('FMT_TOKEN')}
r = requests.post('https://api.github.com/repos/fmtlib/fmt/releases',
headers=auth_headers,
data=json.dumps({'tag_name': version,
'target_commitish': 'release',
'body': changes, 'draft': True}))
if r.status_code != 201:
raise Exception('Failed to create a release ' + str(r))
id = r.json()['id']
uploads_url = 'https://uploads.github.com/repos/fmtlib/fmt/releases'
package = 'fmt-{}.zip'.format(version)
r = requests.post(
'{}/{}/assets?name={}'.format(uploads_url, id, package),
headers={'Content-Type': 'application/zip'} | auth_headers,
data=open('build/fmt/' + package, 'rb'))
if r.status_code != 201:
raise Exception('Failed to upload an asset ' + str(r))
if __name__ == '__main__':
args = docopt.docopt(__doc__)
if args.get('release'):
release(args)
elif args.get('site'):
update_site(create_build_env())

201
third_party/fmt/support/printable.py vendored Executable file
View File

@ -0,0 +1,201 @@
#!/usr/bin/env python3
# This script is based on
# https://github.com/rust-lang/rust/blob/master/library/core/src/unicode/printable.py
# distributed under https://github.com/rust-lang/rust/blob/master/LICENSE-MIT.
# This script uses the following Unicode tables:
# - UnicodeData.txt
from collections import namedtuple
import csv
import os
import subprocess
NUM_CODEPOINTS=0x110000
def to_ranges(iter):
current = None
for i in iter:
if current is None or i != current[1] or i in (0x10000, 0x20000):
if current is not None:
yield tuple(current)
current = [i, i + 1]
else:
current[1] += 1
if current is not None:
yield tuple(current)
def get_escaped(codepoints):
for c in codepoints:
if (c.class_ or "Cn") in "Cc Cf Cs Co Cn Zl Zp Zs".split() and c.value != ord(' '):
yield c.value
def get_file(f):
try:
return open(os.path.basename(f))
except FileNotFoundError:
subprocess.run(["curl", "-O", f], check=True)
return open(os.path.basename(f))
Codepoint = namedtuple('Codepoint', 'value class_')
def get_codepoints(f):
r = csv.reader(f, delimiter=";")
prev_codepoint = 0
class_first = None
for row in r:
codepoint = int(row[0], 16)
name = row[1]
class_ = row[2]
if class_first is not None:
if not name.endswith("Last>"):
raise ValueError("Missing Last after First")
for c in range(prev_codepoint + 1, codepoint):
yield Codepoint(c, class_first)
class_first = None
if name.endswith("First>"):
class_first = class_
yield Codepoint(codepoint, class_)
prev_codepoint = codepoint
if class_first is not None:
raise ValueError("Missing Last after First")
for c in range(prev_codepoint + 1, NUM_CODEPOINTS):
yield Codepoint(c, None)
def compress_singletons(singletons):
uppers = [] # (upper, # items in lowers)
lowers = []
for i in singletons:
upper = i >> 8
lower = i & 0xff
if len(uppers) == 0 or uppers[-1][0] != upper:
uppers.append((upper, 1))
else:
upper, count = uppers[-1]
uppers[-1] = upper, count + 1
lowers.append(lower)
return uppers, lowers
def compress_normal(normal):
# lengths 0x00..0x7f are encoded as 00, 01, ..., 7e, 7f
# lengths 0x80..0x7fff are encoded as 80 80, 80 81, ..., ff fe, ff ff
compressed = [] # [truelen, (truelenaux), falselen, (falselenaux)]
prev_start = 0
for start, count in normal:
truelen = start - prev_start
falselen = count
prev_start = start + count
assert truelen < 0x8000 and falselen < 0x8000
entry = []
if truelen > 0x7f:
entry.append(0x80 | (truelen >> 8))
entry.append(truelen & 0xff)
else:
entry.append(truelen & 0x7f)
if falselen > 0x7f:
entry.append(0x80 | (falselen >> 8))
entry.append(falselen & 0xff)
else:
entry.append(falselen & 0x7f)
compressed.append(entry)
return compressed
def print_singletons(uppers, lowers, uppersname, lowersname):
print(" static constexpr singleton {}[] = {{".format(uppersname))
for u, c in uppers:
print(" {{{:#04x}, {}}},".format(u, c))
print(" };")
print(" static constexpr unsigned char {}[] = {{".format(lowersname))
for i in range(0, len(lowers), 8):
print(" {}".format(" ".join("{:#04x},".format(l) for l in lowers[i:i+8])))
print(" };")
def print_normal(normal, normalname):
print(" static constexpr unsigned char {}[] = {{".format(normalname))
for v in normal:
print(" {}".format(" ".join("{:#04x},".format(i) for i in v)))
print(" };")
def main():
file = get_file("https://www.unicode.org/Public/UNIDATA/UnicodeData.txt")
codepoints = get_codepoints(file)
CUTOFF=0x10000
singletons0 = []
singletons1 = []
normal0 = []
normal1 = []
extra = []
for a, b in to_ranges(get_escaped(codepoints)):
if a > 2 * CUTOFF:
extra.append((a, b - a))
elif a == b - 1:
if a & CUTOFF:
singletons1.append(a & ~CUTOFF)
else:
singletons0.append(a)
elif a == b - 2:
if a & CUTOFF:
singletons1.append(a & ~CUTOFF)
singletons1.append((a + 1) & ~CUTOFF)
else:
singletons0.append(a)
singletons0.append(a + 1)
else:
if a >= 2 * CUTOFF:
extra.append((a, b - a))
elif a & CUTOFF:
normal1.append((a & ~CUTOFF, b - a))
else:
normal0.append((a, b - a))
singletons0u, singletons0l = compress_singletons(singletons0)
singletons1u, singletons1l = compress_singletons(singletons1)
normal0 = compress_normal(normal0)
normal1 = compress_normal(normal1)
print("""\
FMT_FUNC auto is_printable(uint32_t cp) -> bool {\
""")
print_singletons(singletons0u, singletons0l, 'singletons0', 'singletons0_lower')
print_singletons(singletons1u, singletons1l, 'singletons1', 'singletons1_lower')
print_normal(normal0, 'normal0')
print_normal(normal1, 'normal1')
print("""\
auto lower = static_cast<uint16_t>(cp);
if (cp < 0x10000) {
return is_printable(lower, singletons0,
sizeof(singletons0) / sizeof(*singletons0),
singletons0_lower, normal0, sizeof(normal0));
}
if (cp < 0x20000) {
return is_printable(lower, singletons1,
sizeof(singletons1) / sizeof(*singletons1),
singletons1_lower, normal1, sizeof(normal1));
}\
""")
for a, b in extra:
print(" if (0x{:x} <= cp && cp < 0x{:x}) return false;".format(a, a + b))
print("""\
return cp < 0x{:x};
}}\
""".format(NUM_CODEPOINTS))
if __name__ == '__main__':
main()

7
third_party/fmt/support/rtd/conf.py vendored Normal file
View File

@ -0,0 +1,7 @@
# Sphinx configuration for readthedocs.
import os, sys
master_doc = 'index'
html_theme = 'theme'
html_theme_path = ["."]

2
third_party/fmt/support/rtd/index.rst vendored Normal file
View File

@ -0,0 +1,2 @@
If you are not redirected automatically, follow the
`link to the fmt documentation <https://fmt.dev/latest/>`_.

View File

@ -0,0 +1,17 @@
{% extends "basic/layout.html" %}
{% block extrahead %}
<meta charset="UTF-8">
<meta http-equiv="refresh" content="1;url=https://fmt.dev/latest/">
<script type="text/javascript">
window.location.href = "https://fmt.dev/latest/"
</script>
<title>Page Redirection</title>
{% endblock %}
{% block document %}
If you are not redirected automatically, follow the <a href='https://fmt.dev/latest/'>link to the fmt documentation</a>.
{% endblock %}
{% block footer %}
{% endblock %}

View File

@ -0,0 +1,2 @@
[theme]
inherit = basic