mirror of
https://github.com/intel/intel-device-plugins-for-kubernetes.git
synced 2025-06-03 03:59:37 +00:00
Merge pull request #20 from rojkov/fpga-admission-controller
FPGA webhook for admission controller
This commit is contained in:
commit
418eef2694
295
Gopkg.lock
generated
295
Gopkg.lock
generated
@ -1,9 +1,20 @@
|
||||
# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
|
||||
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/ghodss/yaml"
|
||||
packages = ["."]
|
||||
revision = "0ca9ea5df5451ffdf184b4428c902747c2c11cd7"
|
||||
version = "v1.0.0"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/gogo/protobuf"
|
||||
packages = ["gogoproto","proto","protoc-gen-gogo/descriptor","sortkeys"]
|
||||
packages = [
|
||||
"gogoproto",
|
||||
"proto",
|
||||
"protoc-gen-gogo/descriptor",
|
||||
"sortkeys"
|
||||
]
|
||||
revision = "342cbe0a04158f6dcb03ca0079991a51a4248c02"
|
||||
version = "v0.5"
|
||||
|
||||
@ -16,21 +27,123 @@
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/golang/protobuf"
|
||||
packages = ["proto","ptypes","ptypes/any","ptypes/duration","ptypes/timestamp"]
|
||||
packages = [
|
||||
"proto",
|
||||
"ptypes",
|
||||
"ptypes/any",
|
||||
"ptypes/duration",
|
||||
"ptypes/timestamp"
|
||||
]
|
||||
revision = "1e59b77b52bf8e4b449a57e6f79f21226d571845"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/google/gofuzz"
|
||||
packages = ["."]
|
||||
revision = "24818f796faf91cd76ec7bddd72458fbced7a6c1"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/googleapis/gnostic"
|
||||
packages = [
|
||||
"OpenAPIv2",
|
||||
"compiler",
|
||||
"extensions"
|
||||
]
|
||||
revision = "7c663266750e7d82587642f65e60bc4083f1f84e"
|
||||
version = "v0.2.0"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/howeyc/gopass"
|
||||
packages = ["."]
|
||||
revision = "bf9dde6d0d2c004a008c27aaee91170c786f6db8"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/imdario/mergo"
|
||||
packages = ["."]
|
||||
revision = "9d5f1277e9a8ed20c3684bda8fde67c05628518c"
|
||||
version = "v0.3.4"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/json-iterator/go"
|
||||
packages = ["."]
|
||||
revision = "ca39e5af3ece67bbcda3d0f4f56a8e24d9f2dad4"
|
||||
version = "1.1.3"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/modern-go/concurrent"
|
||||
packages = ["."]
|
||||
revision = "bacd9c7ef1dd9b15be4a9909b8ac7a4e313eec94"
|
||||
version = "1.0.3"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/modern-go/reflect2"
|
||||
packages = ["."]
|
||||
revision = "1df9eeb2bb81f327b96228865c5687bc2194af3f"
|
||||
version = "1.0.0"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/spf13/pflag"
|
||||
packages = ["."]
|
||||
revision = "583c0c0531f06d5278b7d917446061adc344b5cd"
|
||||
version = "v1.0.1"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "golang.org/x/crypto"
|
||||
packages = ["ssh/terminal"]
|
||||
revision = "df8d4716b3472e4a531c33cedbe537dae921a1a9"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "golang.org/x/net"
|
||||
packages = ["context","http2","http2/hpack","idna","internal/timeseries","lex/httplex","trace"]
|
||||
packages = [
|
||||
"context",
|
||||
"http2",
|
||||
"http2/hpack",
|
||||
"idna",
|
||||
"internal/timeseries",
|
||||
"lex/httplex",
|
||||
"trace"
|
||||
]
|
||||
revision = "5ccada7d0a7ba9aeb5d3aca8d3501b4c2a509fec"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "golang.org/x/sys"
|
||||
packages = [
|
||||
"unix",
|
||||
"windows"
|
||||
]
|
||||
revision = "c11f84a56e43e20a78cee75a7c034031ecf57d1f"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "golang.org/x/text"
|
||||
packages = ["collate","collate/build","internal/colltab","internal/gen","internal/tag","internal/triegen","internal/ucd","language","secure/bidirule","transform","unicode/bidi","unicode/cldr","unicode/norm","unicode/rangetable"]
|
||||
packages = [
|
||||
"collate",
|
||||
"collate/build",
|
||||
"internal/colltab",
|
||||
"internal/gen",
|
||||
"internal/tag",
|
||||
"internal/triegen",
|
||||
"internal/ucd",
|
||||
"language",
|
||||
"secure/bidirule",
|
||||
"transform",
|
||||
"unicode/bidi",
|
||||
"unicode/cldr",
|
||||
"unicode/norm",
|
||||
"unicode/rangetable"
|
||||
]
|
||||
revision = "e19ae1496984b1c655b8044a65c0300a3c878dd3"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "golang.org/x/time"
|
||||
packages = ["rate"]
|
||||
revision = "fbb02b2291d28baffd63558aa44b4b56f178d650"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "google.golang.org/genproto"
|
||||
@ -39,10 +152,180 @@
|
||||
|
||||
[[projects]]
|
||||
name = "google.golang.org/grpc"
|
||||
packages = [".","balancer","balancer/base","balancer/roundrobin","codes","connectivity","credentials","encoding","encoding/proto","grpclb/grpc_lb_v1/messages","grpclog","internal","keepalive","metadata","naming","peer","resolver","resolver/dns","resolver/passthrough","stats","status","tap","transport"]
|
||||
packages = [
|
||||
".",
|
||||
"balancer",
|
||||
"balancer/base",
|
||||
"balancer/roundrobin",
|
||||
"codes",
|
||||
"connectivity",
|
||||
"credentials",
|
||||
"encoding",
|
||||
"encoding/proto",
|
||||
"grpclb/grpc_lb_v1/messages",
|
||||
"grpclog",
|
||||
"internal",
|
||||
"keepalive",
|
||||
"metadata",
|
||||
"naming",
|
||||
"peer",
|
||||
"resolver",
|
||||
"resolver/dns",
|
||||
"resolver/passthrough",
|
||||
"stats",
|
||||
"status",
|
||||
"tap",
|
||||
"transport"
|
||||
]
|
||||
revision = "8e4536a86ab602859c20df5ebfd0bd4228d08655"
|
||||
version = "v1.10.0"
|
||||
|
||||
[[projects]]
|
||||
name = "gopkg.in/inf.v0"
|
||||
packages = ["."]
|
||||
revision = "d2d2541c53f18d2a059457998ce2876cc8e67cbf"
|
||||
version = "v0.9.1"
|
||||
|
||||
[[projects]]
|
||||
name = "gopkg.in/yaml.v2"
|
||||
packages = ["."]
|
||||
revision = "5420a8b6744d3b0345ab293f6fcba19c978f1183"
|
||||
version = "v2.2.1"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "k8s.io/api"
|
||||
packages = [
|
||||
"admission/v1beta1",
|
||||
"admissionregistration/v1alpha1",
|
||||
"admissionregistration/v1beta1",
|
||||
"apps/v1",
|
||||
"apps/v1beta1",
|
||||
"apps/v1beta2",
|
||||
"authentication/v1",
|
||||
"authentication/v1beta1",
|
||||
"authorization/v1",
|
||||
"authorization/v1beta1",
|
||||
"autoscaling/v1",
|
||||
"autoscaling/v2beta1",
|
||||
"batch/v1",
|
||||
"batch/v1beta1",
|
||||
"batch/v2alpha1",
|
||||
"certificates/v1beta1",
|
||||
"core/v1",
|
||||
"events/v1beta1",
|
||||
"extensions/v1beta1",
|
||||
"networking/v1",
|
||||
"policy/v1beta1",
|
||||
"rbac/v1",
|
||||
"rbac/v1alpha1",
|
||||
"rbac/v1beta1",
|
||||
"scheduling/v1alpha1",
|
||||
"settings/v1alpha1",
|
||||
"storage/v1",
|
||||
"storage/v1alpha1",
|
||||
"storage/v1beta1"
|
||||
]
|
||||
revision = "cfe4a76edf6d17ea00c8b47fabf55edfa5f6c994"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "k8s.io/apimachinery"
|
||||
packages = [
|
||||
"pkg/api/errors",
|
||||
"pkg/api/meta",
|
||||
"pkg/api/resource",
|
||||
"pkg/apis/meta/v1",
|
||||
"pkg/apis/meta/v1/unstructured",
|
||||
"pkg/apis/meta/v1beta1",
|
||||
"pkg/conversion",
|
||||
"pkg/conversion/queryparams",
|
||||
"pkg/fields",
|
||||
"pkg/labels",
|
||||
"pkg/runtime",
|
||||
"pkg/runtime/schema",
|
||||
"pkg/runtime/serializer",
|
||||
"pkg/runtime/serializer/json",
|
||||
"pkg/runtime/serializer/protobuf",
|
||||
"pkg/runtime/serializer/recognizer",
|
||||
"pkg/runtime/serializer/streaming",
|
||||
"pkg/runtime/serializer/versioning",
|
||||
"pkg/selection",
|
||||
"pkg/types",
|
||||
"pkg/util/clock",
|
||||
"pkg/util/errors",
|
||||
"pkg/util/framer",
|
||||
"pkg/util/intstr",
|
||||
"pkg/util/json",
|
||||
"pkg/util/net",
|
||||
"pkg/util/runtime",
|
||||
"pkg/util/sets",
|
||||
"pkg/util/validation",
|
||||
"pkg/util/validation/field",
|
||||
"pkg/util/wait",
|
||||
"pkg/util/yaml",
|
||||
"pkg/version",
|
||||
"pkg/watch",
|
||||
"third_party/forked/golang/reflect"
|
||||
]
|
||||
revision = "5a0ac3ef2591b5fbf8659ab72e91c3bfac620615"
|
||||
|
||||
[[projects]]
|
||||
name = "k8s.io/client-go"
|
||||
packages = [
|
||||
"discovery",
|
||||
"kubernetes",
|
||||
"kubernetes/scheme",
|
||||
"kubernetes/typed/admissionregistration/v1alpha1",
|
||||
"kubernetes/typed/admissionregistration/v1beta1",
|
||||
"kubernetes/typed/apps/v1",
|
||||
"kubernetes/typed/apps/v1beta1",
|
||||
"kubernetes/typed/apps/v1beta2",
|
||||
"kubernetes/typed/authentication/v1",
|
||||
"kubernetes/typed/authentication/v1beta1",
|
||||
"kubernetes/typed/authorization/v1",
|
||||
"kubernetes/typed/authorization/v1beta1",
|
||||
"kubernetes/typed/autoscaling/v1",
|
||||
"kubernetes/typed/autoscaling/v2beta1",
|
||||
"kubernetes/typed/batch/v1",
|
||||
"kubernetes/typed/batch/v1beta1",
|
||||
"kubernetes/typed/batch/v2alpha1",
|
||||
"kubernetes/typed/certificates/v1beta1",
|
||||
"kubernetes/typed/core/v1",
|
||||
"kubernetes/typed/events/v1beta1",
|
||||
"kubernetes/typed/extensions/v1beta1",
|
||||
"kubernetes/typed/networking/v1",
|
||||
"kubernetes/typed/policy/v1beta1",
|
||||
"kubernetes/typed/rbac/v1",
|
||||
"kubernetes/typed/rbac/v1alpha1",
|
||||
"kubernetes/typed/rbac/v1beta1",
|
||||
"kubernetes/typed/scheduling/v1alpha1",
|
||||
"kubernetes/typed/settings/v1alpha1",
|
||||
"kubernetes/typed/storage/v1",
|
||||
"kubernetes/typed/storage/v1alpha1",
|
||||
"kubernetes/typed/storage/v1beta1",
|
||||
"pkg/apis/clientauthentication",
|
||||
"pkg/apis/clientauthentication/v1alpha1",
|
||||
"pkg/version",
|
||||
"plugin/pkg/client/auth/exec",
|
||||
"rest",
|
||||
"rest/watch",
|
||||
"tools/auth",
|
||||
"tools/clientcmd",
|
||||
"tools/clientcmd/api",
|
||||
"tools/clientcmd/api/latest",
|
||||
"tools/clientcmd/api/v1",
|
||||
"tools/metrics",
|
||||
"tools/reference",
|
||||
"transport",
|
||||
"util/cert",
|
||||
"util/flowcontrol",
|
||||
"util/homedir",
|
||||
"util/integer"
|
||||
]
|
||||
revision = "23781f4d6632d88e869066eaebb743857aa1ef9b"
|
||||
version = "v7.0.0"
|
||||
|
||||
[[projects]]
|
||||
name = "k8s.io/kubernetes"
|
||||
packages = ["pkg/kubelet/apis/deviceplugin/v1beta1"]
|
||||
@ -52,6 +335,6 @@
|
||||
[solve-meta]
|
||||
analyzer-name = "dep"
|
||||
analyzer-version = 1
|
||||
inputs-digest = "3c267fa12282e03452e39f7a65513f90dbf36401fa59832f66a11ad1c0236034"
|
||||
inputs-digest = "481e372f64cc48abd8548d0f3668bda0d9cc72b26c8b2544861fcc1305e9496f"
|
||||
solver-name = "gps-cdcl"
|
||||
solver-version = 1
|
||||
|
@ -7,6 +7,7 @@
|
||||
- [About](#about)
|
||||
- [GPU device plugin](cmd/gpu_plugin/README.md)
|
||||
- [FPGA device plugin](cmd/fpga_plugin/README.md)
|
||||
- [FPGA admission controller webhook](cmd/fpga_admissionwebhook/README.md)
|
||||
|
||||
## About
|
||||
|
||||
|
10
build/docker/intel-fpga-admissionwebhook.Dockerfile
Normal file
10
build/docker/intel-fpga-admissionwebhook.Dockerfile
Normal file
@ -0,0 +1,10 @@
|
||||
FROM golang:1.10-alpine as builder
|
||||
ARG DIR=/go/src/github.com/intel/intel-device-plugins-for-kubernetes
|
||||
WORKDIR $DIR
|
||||
COPY . .
|
||||
RUN cd cmd/fpga_admissionwebhook; go install
|
||||
RUN chmod a+x /go/bin/fpga_admissionwebhook
|
||||
|
||||
FROM alpine
|
||||
COPY --from=builder /go/bin/fpga_admissionwebhook /usr/bin/intel_fpga_admissionwebhook
|
||||
CMD ["/usr/bin/intel_fpga_admissionwebhook"]
|
240
cmd/fpga_admissionwebhook/README.md
Normal file
240
cmd/fpga_admissionwebhook/README.md
Normal file
@ -0,0 +1,240 @@
|
||||
## Build and install Intel FPGA webhook for admission controller
|
||||
|
||||
### Get source code
|
||||
|
||||
$ mkdir -p $GOPATH/src/github.com/intel/
|
||||
$ cd $GOPATH/src/github.com/intel/
|
||||
$ git clone https://github.com/intel/intel-device-plugins-for-kubernetes.git
|
||||
|
||||
### Build a Docker image with the webhook
|
||||
|
||||
$ cd $GOPATH/src/github.com/intel/intel-device-plugins-for-kubernetes
|
||||
$ make webhook-container
|
||||
$ docker images
|
||||
REPOSITORY TAG IMAGE ID CREATED SIZE
|
||||
intel-fpga-admissionwebhook 878381826cdef0b112234c296d4e13d3266455ae 0fbbf9dfae95 0 sec ago 24.7MB
|
||||
...
|
||||
|
||||
### Create secret including signed key/cert pair for the webhook
|
||||
|
||||
Use the following script taken from [this article](https://medium.com/ibm-cloud/diving-into-kubernetes-mutatingadmissionwebhook-6ef3c5695f74):
|
||||
|
||||
```bash
|
||||
#!/bin/bash
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case ${1} in
|
||||
--service)
|
||||
service="$2"
|
||||
shift
|
||||
;;
|
||||
--secret)
|
||||
secret="$2"
|
||||
shift
|
||||
;;
|
||||
--namespace)
|
||||
namespace="$2"
|
||||
shift
|
||||
;;
|
||||
--kubectl)
|
||||
kubectl="$2"
|
||||
shift
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
[ -z ${service} ] && service=intel-fpga-webhook-svc
|
||||
[ -z ${secret} ] && secret=intel-fpga-webhook-certs
|
||||
[ -z ${namespace} ] && namespace=default
|
||||
[ -z ${kubectl} ] && kubectl=kubectl
|
||||
|
||||
csrName=${service}.${namespace}
|
||||
tmpdir=$(mktemp -d)
|
||||
echo "creating certs in tmpdir ${tmpdir} "
|
||||
|
||||
cat <<EOF >> ${tmpdir}/csr.conf
|
||||
[req]
|
||||
req_extensions = v3_req
|
||||
distinguished_name = req_distinguished_name
|
||||
[req_distinguished_name]
|
||||
[ v3_req ]
|
||||
basicConstraints = CA:FALSE
|
||||
keyUsage = nonRepudiation, digitalSignature, keyEncipherment
|
||||
extendedKeyUsage = serverAuth
|
||||
subjectAltName = @alt_names
|
||||
[alt_names]
|
||||
DNS.1 = ${service}
|
||||
DNS.2 = ${service}.${namespace}
|
||||
DNS.3 = ${service}.${namespace}.svc
|
||||
EOF
|
||||
|
||||
openssl genrsa -out ${tmpdir}/server-key.pem 2048
|
||||
openssl req -new -key ${tmpdir}/server-key.pem -subj "/CN=${service}.${namespace}.svc" -out ${tmpdir}/server.csr -config ${tmpdir}/csr.conf
|
||||
|
||||
# clean-up any previously created CSR for our service. Ignore errors if not present.
|
||||
${kubectl} delete csr ${csrName} 2>/dev/null || true
|
||||
|
||||
# create server cert/key CSR and send to k8s API
|
||||
cat <<EOF | ${kubectl} create -f -
|
||||
apiVersion: certificates.k8s.io/v1beta1
|
||||
kind: CertificateSigningRequest
|
||||
metadata:
|
||||
name: ${csrName}
|
||||
spec:
|
||||
groups:
|
||||
- system:authenticated
|
||||
request: $(cat ${tmpdir}/server.csr | base64 -w 0)
|
||||
usages:
|
||||
- digital signature
|
||||
- key encipherment
|
||||
- server auth
|
||||
EOF
|
||||
|
||||
# verify CSR has been created
|
||||
while true; do
|
||||
${kubectl} get csr ${csrName}
|
||||
if [ "$?" -eq 0 ]; then
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
# approve and fetch the signed certificate
|
||||
${kubectl} certificate approve ${csrName}
|
||||
|
||||
# verify certificate has been signed
|
||||
for x in $(seq 10); do
|
||||
serverCert=$(${kubectl} get csr ${csrName} -o jsonpath='{.status.certificate}')
|
||||
if [[ ${serverCert} != '' ]]; then
|
||||
break
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
|
||||
if [[ ${serverCert} == '' ]]; then
|
||||
echo "ERROR: After approving csr ${csrName}, the signed certificate did not appear on the resource. Giving up after 10 attempts." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo ${serverCert} | openssl base64 -d -A -out ${tmpdir}/server-cert.pem
|
||||
|
||||
|
||||
# create the secret with CA cert and server cert/key
|
||||
${kubectl} create secret generic ${secret} \
|
||||
--from-file=key.pem=${tmpdir}/server-key.pem \
|
||||
--from-file=cert.pem=${tmpdir}/server-cert.pem \
|
||||
--dry-run -o yaml |
|
||||
${kubectl} -n ${namespace} apply -f -
|
||||
```
|
||||
|
||||
You should see something like
|
||||
|
||||
$ ./scripts/webhook-create-signed-cert.sh
|
||||
creating certs in tmpdir /tmp/tmp.9sgk16v5Y2
|
||||
Generating RSA private key, 2048 bit long modulus
|
||||
.........................+++
|
||||
....+++
|
||||
e is 65537 (0x010001)
|
||||
certificatesigningrequest "intel-fpga-webhook-svc.default" created
|
||||
NAME AGE REQUESTOR CONDITION
|
||||
intel-fpga-webhook-svc.default 0s system:admin Pending
|
||||
certificatesigningrequest "intel-fpga-webhook-svc.default" approved
|
||||
secret "intel-fpga-webhook-certs" created
|
||||
|
||||
### Deploy webhook service
|
||||
|
||||
Using the following specs:
|
||||
|
||||
```yaml
|
||||
apiVersion: extensions/v1beta1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: intel-fpga-webhook-deployment
|
||||
labels:
|
||||
app: intel-fpga-webhook
|
||||
spec:
|
||||
replicas: 1
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: intel-fpga-webhook
|
||||
spec:
|
||||
containers:
|
||||
- name: fpga-mutator
|
||||
image: intel-fpga-admissionwebhook:878381826cdef0b112234c296d4e13d3266455ae
|
||||
imagePullPolicy: IfNotPresent
|
||||
command:
|
||||
- /usr/bin/intel_fpga_admissionwebhook
|
||||
args:
|
||||
- -tls-cert-file=/etc/webhook/certs/cert.pem
|
||||
- -tls-private-key-file=/etc/webhook/certs/key.pem
|
||||
- -alsologtostderr
|
||||
- -v=2
|
||||
- 2>&1
|
||||
volumeMounts:
|
||||
- name: webhook-certs
|
||||
mountPath: /etc/webhook/certs
|
||||
readOnly: true
|
||||
volumes:
|
||||
- name: webhook-certs
|
||||
secret:
|
||||
secretName: intel-fpga-webhook-certs
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: intel-fpga-webhook-svc
|
||||
labels:
|
||||
app: intel-fpga-webhook
|
||||
spec:
|
||||
ports:
|
||||
- port: 443
|
||||
targetPort: 443
|
||||
selector:
|
||||
app: intel-fpga-webhook
|
||||
```
|
||||
|
||||
create `intel-fpga-webhook-svc` service:
|
||||
|
||||
$ kubectl create -f </path/to/deployment.yaml>
|
||||
deployment.extensions/intel-fpga-webhook-deployment created
|
||||
service/intel-fpga-webhook-svc created
|
||||
|
||||
### Configure webhook admission controller on the fly
|
||||
|
||||
With this spec as a template
|
||||
```yaml
|
||||
apiVersion: admissionregistration.k8s.io/v1beta1
|
||||
kind: MutatingWebhookConfiguration
|
||||
metadata:
|
||||
name: fpga-mutator-webhook-cfg
|
||||
labels:
|
||||
app: intel-fpga-webhook
|
||||
webhooks:
|
||||
- name: fpga.mutator.webhooks.intel.com
|
||||
rules:
|
||||
- apiGroups:
|
||||
- ""
|
||||
apiVersions:
|
||||
- v1
|
||||
operations:
|
||||
- CREATE
|
||||
resources:
|
||||
- pods
|
||||
clientConfig:
|
||||
service:
|
||||
path: "/pods"
|
||||
namespace: default
|
||||
name: intel-fpga-webhook-svc
|
||||
caBundle: {CA_BUNDLE}
|
||||
```
|
||||
|
||||
register the webhook
|
||||
|
||||
$ cat </path/to/fpga-pod-mutating-webhook.yaml> | sed -e "s/{CA_BUNDLE}/$(kubectl get configmap -n kube-system extension-apiserver-authentication -o=jsonpath='{.data.client-ca-file}' | base64 -w 0)/g" | kubectl create -f -
|
||||
|
||||
Please note that the placeholder `{CA_BUNDLE}` should be replaced with the
|
||||
certificate which is used for signing certificate requests in your cluster,
|
||||
the one passed in the option `--cluster-signing-cert-file` to
|
||||
`kube-controller-manager`. Depending on how your cluster is configured it may
|
||||
differ from what is stored in the configmap `extension-apiserver-authentication`.
|
257
cmd/fpga_admissionwebhook/fpga_admissionwebhook.go
Normal file
257
cmd/fpga_admissionwebhook/fpga_admissionwebhook.go
Normal file
@ -0,0 +1,257 @@
|
||||
// Copyright 2018 Intel Corporation. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/golang/glog"
|
||||
|
||||
"k8s.io/api/admission/v1beta1"
|
||||
admissionregistrationv1beta1 "k8s.io/api/admissionregistration/v1beta1"
|
||||
corev1 "k8s.io/api/core/v1"
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
"k8s.io/apimachinery/pkg/runtime"
|
||||
"k8s.io/apimachinery/pkg/runtime/serializer"
|
||||
"k8s.io/apimachinery/pkg/types"
|
||||
)
|
||||
|
||||
const (
|
||||
resourceReplaceOp = `{
|
||||
"op": "remove",
|
||||
"path": "/spec/containers/%d/resources/%s/%s"
|
||||
}, {
|
||||
"op": "add",
|
||||
"path": "/spec/containers/%d/resources/%s/%s",
|
||||
"value": %s
|
||||
}`
|
||||
)
|
||||
|
||||
var (
|
||||
scheme = runtime.NewScheme()
|
||||
codecs = serializer.NewCodecFactory(scheme)
|
||||
rfc6901Escaper = strings.NewReplacer("~", "~0", "/", "~1")
|
||||
)
|
||||
|
||||
func init() {
|
||||
addToScheme(scheme)
|
||||
}
|
||||
|
||||
func addToScheme(scheme *runtime.Scheme) {
|
||||
corev1.AddToScheme(scheme)
|
||||
admissionregistrationv1beta1.AddToScheme(scheme)
|
||||
}
|
||||
|
||||
// TODO: get rid of hardcoded translations of FPGA resource names to region interface IDs
|
||||
func translateFpgaResourceName(oldname corev1.ResourceName) string {
|
||||
switch string(oldname) {
|
||||
case "intel.com/fpga-arria10":
|
||||
return rfc6901Escaper.Replace("intel.com/fpga-region-ce48969398f05f33946d560708be108a")
|
||||
case "intel.com/fpga-arria10-nlb0":
|
||||
return rfc6901Escaper.Replace("intel.com/fpga-af-d8424dc4a4a3c413f89e433683f9040b")
|
||||
case "intel.com/fpga-arria10-nlb3":
|
||||
return rfc6901Escaper.Replace("intel.com/fpga-af-f7df405cbd7acf7222f144b0b93acd18")
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
func getTLSConfig(certFile string, keyFile string) *tls.Config {
|
||||
sCert, err := tls.LoadX509KeyPair(certFile, keyFile)
|
||||
if err != nil {
|
||||
glog.Fatal(err)
|
||||
}
|
||||
return &tls.Config{
|
||||
Certificates: []tls.Certificate{sCert},
|
||||
}
|
||||
}
|
||||
|
||||
func mutatePods(ar v1beta1.AdmissionReview) *v1beta1.AdmissionResponse {
|
||||
var ops []string
|
||||
|
||||
glog.V(2).Info("mutating pods")
|
||||
|
||||
podResource := metav1.GroupVersionResource{Group: "", Version: "v1", Resource: "pods"}
|
||||
if ar.Request.Resource != podResource {
|
||||
glog.Errorf("expect resource to be %s", podResource)
|
||||
return nil
|
||||
}
|
||||
|
||||
raw := ar.Request.Object.Raw
|
||||
pod := corev1.Pod{}
|
||||
deserializer := codecs.UniversalDeserializer()
|
||||
if _, _, err := deserializer.Decode(raw, nil, &pod); err != nil {
|
||||
glog.Error(err)
|
||||
return toAdmissionResponse(err)
|
||||
}
|
||||
reviewResponse := v1beta1.AdmissionResponse{}
|
||||
reviewResponse.Allowed = true
|
||||
|
||||
for containerIdx, container := range pod.Spec.Containers {
|
||||
for resourceName, resourceQuantity := range container.Resources.Limits {
|
||||
newName := translateFpgaResourceName(resourceName)
|
||||
if len(newName) > 0 {
|
||||
op := fmt.Sprintf(resourceReplaceOp, containerIdx,
|
||||
"limits", rfc6901Escaper.Replace(string(resourceName)),
|
||||
containerIdx, "limits", newName, resourceQuantity.String())
|
||||
ops = append(ops, op)
|
||||
}
|
||||
}
|
||||
for resourceName, resourceQuantity := range container.Resources.Requests {
|
||||
newName := translateFpgaResourceName(resourceName)
|
||||
if len(newName) > 0 {
|
||||
op := fmt.Sprintf(resourceReplaceOp, containerIdx,
|
||||
"requests", rfc6901Escaper.Replace(string(resourceName)),
|
||||
containerIdx, "requests", newName, resourceQuantity.String())
|
||||
ops = append(ops, op)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(ops) > 0 {
|
||||
reviewResponse.Patch = []byte("[ " + strings.Join(ops, ",") + " ]")
|
||||
pt := v1beta1.PatchTypeJSONPatch
|
||||
reviewResponse.PatchType = &pt
|
||||
}
|
||||
|
||||
return &reviewResponse
|
||||
}
|
||||
|
||||
type admitFunc func(v1beta1.AdmissionReview) *v1beta1.AdmissionResponse
|
||||
|
||||
func toAdmissionResponse(err error) *v1beta1.AdmissionResponse {
|
||||
return &v1beta1.AdmissionResponse{
|
||||
Result: &metav1.Status{
|
||||
Message: err.Error(),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func serve(w http.ResponseWriter, r *http.Request, admit admitFunc) {
|
||||
var body []byte
|
||||
var reviewResponse *v1beta1.AdmissionResponse
|
||||
var reqUID types.UID
|
||||
|
||||
if r.Body != nil {
|
||||
if data, err := ioutil.ReadAll(r.Body); err == nil {
|
||||
body = data
|
||||
}
|
||||
}
|
||||
|
||||
if len(body) == 0 {
|
||||
glog.Error("No body in request")
|
||||
w.WriteHeader(http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
// verify the content type is accurate
|
||||
contentType := r.Header.Get("Content-Type")
|
||||
if contentType != "application/json" {
|
||||
glog.Errorf("contentType=%s, expect application/json", contentType)
|
||||
w.WriteHeader(http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
glog.V(2).Info(fmt.Sprintf("handling request: %v", body))
|
||||
ar := v1beta1.AdmissionReview{}
|
||||
deserializer := codecs.UniversalDeserializer()
|
||||
if _, _, err := deserializer.Decode(body, nil, &ar); err != nil {
|
||||
glog.Error(err)
|
||||
reviewResponse = toAdmissionResponse(err)
|
||||
} else {
|
||||
if ar.Request == nil {
|
||||
err = errors.New("Request is empty")
|
||||
reviewResponse = toAdmissionResponse(err)
|
||||
} else {
|
||||
reqUID = ar.Request.UID
|
||||
reviewResponse = admit(ar)
|
||||
}
|
||||
}
|
||||
glog.V(2).Info(fmt.Sprintf("sending response: %v", reviewResponse))
|
||||
|
||||
response := v1beta1.AdmissionReview{}
|
||||
if reviewResponse != nil {
|
||||
response.Response = reviewResponse
|
||||
response.Response.UID = reqUID
|
||||
}
|
||||
|
||||
// reset the Object and OldObject, they are not needed in a response.
|
||||
if ar.Request != nil {
|
||||
ar.Request.Object = runtime.RawExtension{}
|
||||
ar.Request.OldObject = runtime.RawExtension{}
|
||||
}
|
||||
|
||||
resp, err := json.Marshal(response)
|
||||
if err != nil {
|
||||
glog.Error(err)
|
||||
}
|
||||
if _, err := w.Write(resp); err != nil {
|
||||
glog.Error(err)
|
||||
}
|
||||
}
|
||||
|
||||
func servePods(w http.ResponseWriter, r *http.Request) {
|
||||
serve(w, r, mutatePods)
|
||||
}
|
||||
|
||||
func main() {
|
||||
var certFile string
|
||||
var keyFile string
|
||||
|
||||
flag.StringVar(&certFile, "tls-cert-file", certFile,
|
||||
"File containing the x509 Certificate for HTTPS. (CA cert, if any, concatenated after server cert).")
|
||||
flag.StringVar(&keyFile, "tls-private-key-file", keyFile, "File containing the x509 private key matching --tls-cert-file.")
|
||||
|
||||
flag.Parse()
|
||||
|
||||
if certFile == "" {
|
||||
glog.Error("TLS certificate file is not set")
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
if keyFile == "" {
|
||||
glog.Error("TLS private key is not set")
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
if _, err := os.Stat(certFile); err != nil {
|
||||
glog.Error("TLS certificate not found")
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
if _, err := os.Stat(keyFile); err != nil {
|
||||
glog.Error("TLS private key not found")
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
http.HandleFunc("/pods", servePods)
|
||||
|
||||
glog.V(2).Info("Webhook started")
|
||||
|
||||
server := &http.Server{
|
||||
Addr: ":443",
|
||||
TLSConfig: getTLSConfig(certFile, keyFile),
|
||||
}
|
||||
|
||||
glog.Fatal(server.ListenAndServeTLS("", ""))
|
||||
}
|
211
cmd/fpga_admissionwebhook/fpga_admissionwebhook_test.go
Normal file
211
cmd/fpga_admissionwebhook/fpga_admissionwebhook_test.go
Normal file
@ -0,0 +1,211 @@
|
||||
// Copyright 2018 Intel Corporation. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"k8s.io/api/admission/v1beta1"
|
||||
corev1 "k8s.io/api/core/v1"
|
||||
"k8s.io/apimachinery/pkg/api/resource"
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
"k8s.io/apimachinery/pkg/runtime"
|
||||
)
|
||||
|
||||
func fakeMutatePods(ar v1beta1.AdmissionReview) *v1beta1.AdmissionResponse {
|
||||
reviewResponse := v1beta1.AdmissionResponse{}
|
||||
return &reviewResponse
|
||||
}
|
||||
|
||||
func TestServe(t *testing.T) {
|
||||
ar1, err := json.Marshal(&v1beta1.AdmissionReview{})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
ar2, err := json.Marshal(&v1beta1.AdmissionReview{
|
||||
Request: &v1beta1.AdmissionRequest{},
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
tcases := []struct {
|
||||
header http.Header
|
||||
body io.Reader
|
||||
expectedStatus int
|
||||
}{
|
||||
{
|
||||
expectedStatus: http.StatusBadRequest,
|
||||
},
|
||||
{
|
||||
body: strings.NewReader("hello world"),
|
||||
expectedStatus: http.StatusBadRequest,
|
||||
},
|
||||
{
|
||||
header: http.Header{
|
||||
"Content-Type": []string{"application/json"},
|
||||
},
|
||||
expectedStatus: http.StatusBadRequest,
|
||||
},
|
||||
{
|
||||
body: strings.NewReader("hello world"),
|
||||
header: http.Header{
|
||||
"Content-Type": []string{"application/json"},
|
||||
},
|
||||
expectedStatus: http.StatusOK,
|
||||
},
|
||||
{
|
||||
body: bytes.NewReader(ar1),
|
||||
header: http.Header{
|
||||
"Content-Type": []string{"application/json"},
|
||||
},
|
||||
expectedStatus: http.StatusOK,
|
||||
},
|
||||
{
|
||||
body: bytes.NewReader(ar2),
|
||||
header: http.Header{
|
||||
"Content-Type": []string{"application/json"},
|
||||
},
|
||||
expectedStatus: http.StatusOK,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tcase := range tcases {
|
||||
req, err := http.NewRequest("POST", "/pods", tcase.body)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
req.Header = tcase.header
|
||||
|
||||
rr := httptest.NewRecorder()
|
||||
handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { serve(w, r, fakeMutatePods) })
|
||||
|
||||
handler.ServeHTTP(rr, req)
|
||||
|
||||
if status := rr.Code; status != tcase.expectedStatus {
|
||||
t.Errorf("handler returned wrong status code: got %v want %v",
|
||||
status, tcase.expectedStatus)
|
||||
}
|
||||
|
||||
if tcase.expectedStatus == http.StatusOK {
|
||||
var ar v1beta1.AdmissionReview
|
||||
err = json.Unmarshal(rr.Body.Bytes(), &ar)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestMutatePods(t *testing.T) {
|
||||
pod := corev1.Pod{
|
||||
Spec: corev1.PodSpec{
|
||||
Containers: []corev1.Container{
|
||||
{
|
||||
Name: "test-container",
|
||||
Image: "test-image",
|
||||
Resources: corev1.ResourceRequirements{
|
||||
Limits: corev1.ResourceList{
|
||||
"cpu": resource.MustParse("1"),
|
||||
"intel.com/fpga-arria10": resource.MustParse("1"),
|
||||
},
|
||||
Requests: corev1.ResourceList{
|
||||
"cpu": resource.MustParse("1"),
|
||||
"intel.com/fpga-arria10": resource.MustParse("1"),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
podRaw, err := json.Marshal(pod)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
tcases := []struct {
|
||||
name string
|
||||
ar v1beta1.AdmissionReview
|
||||
expectedResponse bool
|
||||
expectedPatchOps int
|
||||
}{
|
||||
{
|
||||
name: "empty admission request",
|
||||
ar: v1beta1.AdmissionReview{
|
||||
Request: &v1beta1.AdmissionRequest{},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "admission request without object",
|
||||
ar: v1beta1.AdmissionReview{
|
||||
Request: &v1beta1.AdmissionRequest{
|
||||
Resource: metav1.GroupVersionResource{Group: "", Version: "v1", Resource: "pods"},
|
||||
},
|
||||
},
|
||||
expectedResponse: true,
|
||||
},
|
||||
{
|
||||
name: "admission request with corrupted object",
|
||||
ar: v1beta1.AdmissionReview{
|
||||
Request: &v1beta1.AdmissionRequest{
|
||||
Resource: metav1.GroupVersionResource{Group: "", Version: "v1", Resource: "pods"},
|
||||
Object: runtime.RawExtension{
|
||||
Raw: []byte(`{"corrupted json":}`),
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedResponse: true,
|
||||
},
|
||||
{
|
||||
name: "non-empty admission request",
|
||||
ar: v1beta1.AdmissionReview{
|
||||
Request: &v1beta1.AdmissionRequest{
|
||||
Resource: metav1.GroupVersionResource{Group: "", Version: "v1", Resource: "pods"},
|
||||
Object: runtime.RawExtension{
|
||||
Raw: podRaw,
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedResponse: true,
|
||||
expectedPatchOps: 4,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tcase := range tcases {
|
||||
resp := mutatePods(tcase.ar)
|
||||
|
||||
if !tcase.expectedResponse && resp != nil {
|
||||
t.Errorf("Test case '%s': got unexpected response", tcase.name)
|
||||
} else if tcase.expectedResponse && resp == nil {
|
||||
t.Errorf("Test case '%s': got no response", tcase.name)
|
||||
} else if tcase.expectedResponse && tcase.expectedPatchOps > 0 {
|
||||
var ops interface{}
|
||||
|
||||
err := json.Unmarshal(resp.Patch, &ops)
|
||||
if err != nil {
|
||||
t.Errorf("Test case '%s': got unparsable patch '%s'", tcase.name, resp.Patch)
|
||||
} else if len(ops.([]interface{})) != tcase.expectedPatchOps {
|
||||
t.Errorf("Test case '%s': got wrong number of operations in the patch. Expected %d, but got %d",
|
||||
tcase.name, tcase.expectedPatchOps, len(ops.([]interface{})))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
20
vendor/github.com/ghodss/yaml/.gitignore
generated
vendored
Normal file
20
vendor/github.com/ghodss/yaml/.gitignore
generated
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
# OSX leaves these everywhere on SMB shares
|
||||
._*
|
||||
|
||||
# Eclipse files
|
||||
.classpath
|
||||
.project
|
||||
.settings/**
|
||||
|
||||
# Emacs save files
|
||||
*~
|
||||
|
||||
# Vim-related files
|
||||
[._]*.s[a-w][a-z]
|
||||
[._]s[a-w][a-z]
|
||||
*.un~
|
||||
Session.vim
|
||||
.netrwhist
|
||||
|
||||
# Go test binaries
|
||||
*.test
|
7
vendor/github.com/ghodss/yaml/.travis.yml
generated
vendored
Normal file
7
vendor/github.com/ghodss/yaml/.travis.yml
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
language: go
|
||||
go:
|
||||
- 1.3
|
||||
- 1.4
|
||||
script:
|
||||
- go test
|
||||
- go build
|
50
vendor/github.com/ghodss/yaml/LICENSE
generated
vendored
Normal file
50
vendor/github.com/ghodss/yaml/LICENSE
generated
vendored
Normal file
@ -0,0 +1,50 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Sam Ghods
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
|
||||
Copyright (c) 2012 The Go Authors. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following disclaimer
|
||||
in the documentation and/or other materials provided with the
|
||||
distribution.
|
||||
* Neither the name of Google Inc. nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
121
vendor/github.com/ghodss/yaml/README.md
generated
vendored
Normal file
121
vendor/github.com/ghodss/yaml/README.md
generated
vendored
Normal file
@ -0,0 +1,121 @@
|
||||
# YAML marshaling and unmarshaling support for Go
|
||||
|
||||
[](https://travis-ci.org/ghodss/yaml)
|
||||
|
||||
## Introduction
|
||||
|
||||
A wrapper around [go-yaml](https://github.com/go-yaml/yaml) designed to enable a better way of handling YAML when marshaling to and from structs.
|
||||
|
||||
In short, this library first converts YAML to JSON using go-yaml and then uses `json.Marshal` and `json.Unmarshal` to convert to or from the struct. This means that it effectively reuses the JSON struct tags as well as the custom JSON methods `MarshalJSON` and `UnmarshalJSON` unlike go-yaml. For a detailed overview of the rationale behind this method, [see this blog post](http://ghodss.com/2014/the-right-way-to-handle-yaml-in-golang/).
|
||||
|
||||
## Compatibility
|
||||
|
||||
This package uses [go-yaml](https://github.com/go-yaml/yaml) and therefore supports [everything go-yaml supports](https://github.com/go-yaml/yaml#compatibility).
|
||||
|
||||
## Caveats
|
||||
|
||||
**Caveat #1:** When using `yaml.Marshal` and `yaml.Unmarshal`, binary data should NOT be preceded with the `!!binary` YAML tag. If you do, go-yaml will convert the binary data from base64 to native binary data, which is not compatible with JSON. You can still use binary in your YAML files though - just store them without the `!!binary` tag and decode the base64 in your code (e.g. in the custom JSON methods `MarshalJSON` and `UnmarshalJSON`). This also has the benefit that your YAML and your JSON binary data will be decoded exactly the same way. As an example:
|
||||
|
||||
```
|
||||
BAD:
|
||||
exampleKey: !!binary gIGC
|
||||
|
||||
GOOD:
|
||||
exampleKey: gIGC
|
||||
... and decode the base64 data in your code.
|
||||
```
|
||||
|
||||
**Caveat #2:** When using `YAMLToJSON` directly, maps with keys that are maps will result in an error since this is not supported by JSON. This error will occur in `Unmarshal` as well since you can't unmarshal map keys anyways since struct fields can't be keys.
|
||||
|
||||
## Installation and usage
|
||||
|
||||
To install, run:
|
||||
|
||||
```
|
||||
$ go get github.com/ghodss/yaml
|
||||
```
|
||||
|
||||
And import using:
|
||||
|
||||
```
|
||||
import "github.com/ghodss/yaml"
|
||||
```
|
||||
|
||||
Usage is very similar to the JSON library:
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/ghodss/yaml"
|
||||
)
|
||||
|
||||
type Person struct {
|
||||
Name string `json:"name"` // Affects YAML field names too.
|
||||
Age int `json:"age"`
|
||||
}
|
||||
|
||||
func main() {
|
||||
// Marshal a Person struct to YAML.
|
||||
p := Person{"John", 30}
|
||||
y, err := yaml.Marshal(p)
|
||||
if err != nil {
|
||||
fmt.Printf("err: %v\n", err)
|
||||
return
|
||||
}
|
||||
fmt.Println(string(y))
|
||||
/* Output:
|
||||
age: 30
|
||||
name: John
|
||||
*/
|
||||
|
||||
// Unmarshal the YAML back into a Person struct.
|
||||
var p2 Person
|
||||
err = yaml.Unmarshal(y, &p2)
|
||||
if err != nil {
|
||||
fmt.Printf("err: %v\n", err)
|
||||
return
|
||||
}
|
||||
fmt.Println(p2)
|
||||
/* Output:
|
||||
{John 30}
|
||||
*/
|
||||
}
|
||||
```
|
||||
|
||||
`yaml.YAMLToJSON` and `yaml.JSONToYAML` methods are also available:
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/ghodss/yaml"
|
||||
)
|
||||
|
||||
func main() {
|
||||
j := []byte(`{"name": "John", "age": 30}`)
|
||||
y, err := yaml.JSONToYAML(j)
|
||||
if err != nil {
|
||||
fmt.Printf("err: %v\n", err)
|
||||
return
|
||||
}
|
||||
fmt.Println(string(y))
|
||||
/* Output:
|
||||
name: John
|
||||
age: 30
|
||||
*/
|
||||
j2, err := yaml.YAMLToJSON(y)
|
||||
if err != nil {
|
||||
fmt.Printf("err: %v\n", err)
|
||||
return
|
||||
}
|
||||
fmt.Println(string(j2))
|
||||
/* Output:
|
||||
{"age":30,"name":"John"}
|
||||
*/
|
||||
}
|
||||
```
|
501
vendor/github.com/ghodss/yaml/fields.go
generated
vendored
Normal file
501
vendor/github.com/ghodss/yaml/fields.go
generated
vendored
Normal file
@ -0,0 +1,501 @@
|
||||
// Copyright 2013 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
package yaml
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding"
|
||||
"encoding/json"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// indirect walks down v allocating pointers as needed,
|
||||
// until it gets to a non-pointer.
|
||||
// if it encounters an Unmarshaler, indirect stops and returns that.
|
||||
// if decodingNull is true, indirect stops at the last pointer so it can be set to nil.
|
||||
func indirect(v reflect.Value, decodingNull bool) (json.Unmarshaler, encoding.TextUnmarshaler, reflect.Value) {
|
||||
// If v is a named type and is addressable,
|
||||
// start with its address, so that if the type has pointer methods,
|
||||
// we find them.
|
||||
if v.Kind() != reflect.Ptr && v.Type().Name() != "" && v.CanAddr() {
|
||||
v = v.Addr()
|
||||
}
|
||||
for {
|
||||
// Load value from interface, but only if the result will be
|
||||
// usefully addressable.
|
||||
if v.Kind() == reflect.Interface && !v.IsNil() {
|
||||
e := v.Elem()
|
||||
if e.Kind() == reflect.Ptr && !e.IsNil() && (!decodingNull || e.Elem().Kind() == reflect.Ptr) {
|
||||
v = e
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if v.Kind() != reflect.Ptr {
|
||||
break
|
||||
}
|
||||
|
||||
if v.Elem().Kind() != reflect.Ptr && decodingNull && v.CanSet() {
|
||||
break
|
||||
}
|
||||
if v.IsNil() {
|
||||
if v.CanSet() {
|
||||
v.Set(reflect.New(v.Type().Elem()))
|
||||
} else {
|
||||
v = reflect.New(v.Type().Elem())
|
||||
}
|
||||
}
|
||||
if v.Type().NumMethod() > 0 {
|
||||
if u, ok := v.Interface().(json.Unmarshaler); ok {
|
||||
return u, nil, reflect.Value{}
|
||||
}
|
||||
if u, ok := v.Interface().(encoding.TextUnmarshaler); ok {
|
||||
return nil, u, reflect.Value{}
|
||||
}
|
||||
}
|
||||
v = v.Elem()
|
||||
}
|
||||
return nil, nil, v
|
||||
}
|
||||
|
||||
// A field represents a single field found in a struct.
|
||||
type field struct {
|
||||
name string
|
||||
nameBytes []byte // []byte(name)
|
||||
equalFold func(s, t []byte) bool // bytes.EqualFold or equivalent
|
||||
|
||||
tag bool
|
||||
index []int
|
||||
typ reflect.Type
|
||||
omitEmpty bool
|
||||
quoted bool
|
||||
}
|
||||
|
||||
func fillField(f field) field {
|
||||
f.nameBytes = []byte(f.name)
|
||||
f.equalFold = foldFunc(f.nameBytes)
|
||||
return f
|
||||
}
|
||||
|
||||
// byName sorts field by name, breaking ties with depth,
|
||||
// then breaking ties with "name came from json tag", then
|
||||
// breaking ties with index sequence.
|
||||
type byName []field
|
||||
|
||||
func (x byName) Len() int { return len(x) }
|
||||
|
||||
func (x byName) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
|
||||
|
||||
func (x byName) Less(i, j int) bool {
|
||||
if x[i].name != x[j].name {
|
||||
return x[i].name < x[j].name
|
||||
}
|
||||
if len(x[i].index) != len(x[j].index) {
|
||||
return len(x[i].index) < len(x[j].index)
|
||||
}
|
||||
if x[i].tag != x[j].tag {
|
||||
return x[i].tag
|
||||
}
|
||||
return byIndex(x).Less(i, j)
|
||||
}
|
||||
|
||||
// byIndex sorts field by index sequence.
|
||||
type byIndex []field
|
||||
|
||||
func (x byIndex) Len() int { return len(x) }
|
||||
|
||||
func (x byIndex) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
|
||||
|
||||
func (x byIndex) Less(i, j int) bool {
|
||||
for k, xik := range x[i].index {
|
||||
if k >= len(x[j].index) {
|
||||
return false
|
||||
}
|
||||
if xik != x[j].index[k] {
|
||||
return xik < x[j].index[k]
|
||||
}
|
||||
}
|
||||
return len(x[i].index) < len(x[j].index)
|
||||
}
|
||||
|
||||
// typeFields returns a list of fields that JSON should recognize for the given type.
|
||||
// The algorithm is breadth-first search over the set of structs to include - the top struct
|
||||
// and then any reachable anonymous structs.
|
||||
func typeFields(t reflect.Type) []field {
|
||||
// Anonymous fields to explore at the current level and the next.
|
||||
current := []field{}
|
||||
next := []field{{typ: t}}
|
||||
|
||||
// Count of queued names for current level and the next.
|
||||
count := map[reflect.Type]int{}
|
||||
nextCount := map[reflect.Type]int{}
|
||||
|
||||
// Types already visited at an earlier level.
|
||||
visited := map[reflect.Type]bool{}
|
||||
|
||||
// Fields found.
|
||||
var fields []field
|
||||
|
||||
for len(next) > 0 {
|
||||
current, next = next, current[:0]
|
||||
count, nextCount = nextCount, map[reflect.Type]int{}
|
||||
|
||||
for _, f := range current {
|
||||
if visited[f.typ] {
|
||||
continue
|
||||
}
|
||||
visited[f.typ] = true
|
||||
|
||||
// Scan f.typ for fields to include.
|
||||
for i := 0; i < f.typ.NumField(); i++ {
|
||||
sf := f.typ.Field(i)
|
||||
if sf.PkgPath != "" { // unexported
|
||||
continue
|
||||
}
|
||||
tag := sf.Tag.Get("json")
|
||||
if tag == "-" {
|
||||
continue
|
||||
}
|
||||
name, opts := parseTag(tag)
|
||||
if !isValidTag(name) {
|
||||
name = ""
|
||||
}
|
||||
index := make([]int, len(f.index)+1)
|
||||
copy(index, f.index)
|
||||
index[len(f.index)] = i
|
||||
|
||||
ft := sf.Type
|
||||
if ft.Name() == "" && ft.Kind() == reflect.Ptr {
|
||||
// Follow pointer.
|
||||
ft = ft.Elem()
|
||||
}
|
||||
|
||||
// Record found field and index sequence.
|
||||
if name != "" || !sf.Anonymous || ft.Kind() != reflect.Struct {
|
||||
tagged := name != ""
|
||||
if name == "" {
|
||||
name = sf.Name
|
||||
}
|
||||
fields = append(fields, fillField(field{
|
||||
name: name,
|
||||
tag: tagged,
|
||||
index: index,
|
||||
typ: ft,
|
||||
omitEmpty: opts.Contains("omitempty"),
|
||||
quoted: opts.Contains("string"),
|
||||
}))
|
||||
if count[f.typ] > 1 {
|
||||
// If there were multiple instances, add a second,
|
||||
// so that the annihilation code will see a duplicate.
|
||||
// It only cares about the distinction between 1 or 2,
|
||||
// so don't bother generating any more copies.
|
||||
fields = append(fields, fields[len(fields)-1])
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// Record new anonymous struct to explore in next round.
|
||||
nextCount[ft]++
|
||||
if nextCount[ft] == 1 {
|
||||
next = append(next, fillField(field{name: ft.Name(), index: index, typ: ft}))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sort.Sort(byName(fields))
|
||||
|
||||
// Delete all fields that are hidden by the Go rules for embedded fields,
|
||||
// except that fields with JSON tags are promoted.
|
||||
|
||||
// The fields are sorted in primary order of name, secondary order
|
||||
// of field index length. Loop over names; for each name, delete
|
||||
// hidden fields by choosing the one dominant field that survives.
|
||||
out := fields[:0]
|
||||
for advance, i := 0, 0; i < len(fields); i += advance {
|
||||
// One iteration per name.
|
||||
// Find the sequence of fields with the name of this first field.
|
||||
fi := fields[i]
|
||||
name := fi.name
|
||||
for advance = 1; i+advance < len(fields); advance++ {
|
||||
fj := fields[i+advance]
|
||||
if fj.name != name {
|
||||
break
|
||||
}
|
||||
}
|
||||
if advance == 1 { // Only one field with this name
|
||||
out = append(out, fi)
|
||||
continue
|
||||
}
|
||||
dominant, ok := dominantField(fields[i : i+advance])
|
||||
if ok {
|
||||
out = append(out, dominant)
|
||||
}
|
||||
}
|
||||
|
||||
fields = out
|
||||
sort.Sort(byIndex(fields))
|
||||
|
||||
return fields
|
||||
}
|
||||
|
||||
// dominantField looks through the fields, all of which are known to
|
||||
// have the same name, to find the single field that dominates the
|
||||
// others using Go's embedding rules, modified by the presence of
|
||||
// JSON tags. If there are multiple top-level fields, the boolean
|
||||
// will be false: This condition is an error in Go and we skip all
|
||||
// the fields.
|
||||
func dominantField(fields []field) (field, bool) {
|
||||
// The fields are sorted in increasing index-length order. The winner
|
||||
// must therefore be one with the shortest index length. Drop all
|
||||
// longer entries, which is easy: just truncate the slice.
|
||||
length := len(fields[0].index)
|
||||
tagged := -1 // Index of first tagged field.
|
||||
for i, f := range fields {
|
||||
if len(f.index) > length {
|
||||
fields = fields[:i]
|
||||
break
|
||||
}
|
||||
if f.tag {
|
||||
if tagged >= 0 {
|
||||
// Multiple tagged fields at the same level: conflict.
|
||||
// Return no field.
|
||||
return field{}, false
|
||||
}
|
||||
tagged = i
|
||||
}
|
||||
}
|
||||
if tagged >= 0 {
|
||||
return fields[tagged], true
|
||||
}
|
||||
// All remaining fields have the same length. If there's more than one,
|
||||
// we have a conflict (two fields named "X" at the same level) and we
|
||||
// return no field.
|
||||
if len(fields) > 1 {
|
||||
return field{}, false
|
||||
}
|
||||
return fields[0], true
|
||||
}
|
||||
|
||||
var fieldCache struct {
|
||||
sync.RWMutex
|
||||
m map[reflect.Type][]field
|
||||
}
|
||||
|
||||
// cachedTypeFields is like typeFields but uses a cache to avoid repeated work.
|
||||
func cachedTypeFields(t reflect.Type) []field {
|
||||
fieldCache.RLock()
|
||||
f := fieldCache.m[t]
|
||||
fieldCache.RUnlock()
|
||||
if f != nil {
|
||||
return f
|
||||
}
|
||||
|
||||
// Compute fields without lock.
|
||||
// Might duplicate effort but won't hold other computations back.
|
||||
f = typeFields(t)
|
||||
if f == nil {
|
||||
f = []field{}
|
||||
}
|
||||
|
||||
fieldCache.Lock()
|
||||
if fieldCache.m == nil {
|
||||
fieldCache.m = map[reflect.Type][]field{}
|
||||
}
|
||||
fieldCache.m[t] = f
|
||||
fieldCache.Unlock()
|
||||
return f
|
||||
}
|
||||
|
||||
func isValidTag(s string) bool {
|
||||
if s == "" {
|
||||
return false
|
||||
}
|
||||
for _, c := range s {
|
||||
switch {
|
||||
case strings.ContainsRune("!#$%&()*+-./:<=>?@[]^_{|}~ ", c):
|
||||
// Backslash and quote chars are reserved, but
|
||||
// otherwise any punctuation chars are allowed
|
||||
// in a tag name.
|
||||
default:
|
||||
if !unicode.IsLetter(c) && !unicode.IsDigit(c) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
const (
|
||||
caseMask = ^byte(0x20) // Mask to ignore case in ASCII.
|
||||
kelvin = '\u212a'
|
||||
smallLongEss = '\u017f'
|
||||
)
|
||||
|
||||
// foldFunc returns one of four different case folding equivalence
|
||||
// functions, from most general (and slow) to fastest:
|
||||
//
|
||||
// 1) bytes.EqualFold, if the key s contains any non-ASCII UTF-8
|
||||
// 2) equalFoldRight, if s contains special folding ASCII ('k', 'K', 's', 'S')
|
||||
// 3) asciiEqualFold, no special, but includes non-letters (including _)
|
||||
// 4) simpleLetterEqualFold, no specials, no non-letters.
|
||||
//
|
||||
// The letters S and K are special because they map to 3 runes, not just 2:
|
||||
// * S maps to s and to U+017F 'ſ' Latin small letter long s
|
||||
// * k maps to K and to U+212A 'K' Kelvin sign
|
||||
// See http://play.golang.org/p/tTxjOc0OGo
|
||||
//
|
||||
// The returned function is specialized for matching against s and
|
||||
// should only be given s. It's not curried for performance reasons.
|
||||
func foldFunc(s []byte) func(s, t []byte) bool {
|
||||
nonLetter := false
|
||||
special := false // special letter
|
||||
for _, b := range s {
|
||||
if b >= utf8.RuneSelf {
|
||||
return bytes.EqualFold
|
||||
}
|
||||
upper := b & caseMask
|
||||
if upper < 'A' || upper > 'Z' {
|
||||
nonLetter = true
|
||||
} else if upper == 'K' || upper == 'S' {
|
||||
// See above for why these letters are special.
|
||||
special = true
|
||||
}
|
||||
}
|
||||
if special {
|
||||
return equalFoldRight
|
||||
}
|
||||
if nonLetter {
|
||||
return asciiEqualFold
|
||||
}
|
||||
return simpleLetterEqualFold
|
||||
}
|
||||
|
||||
// equalFoldRight is a specialization of bytes.EqualFold when s is
|
||||
// known to be all ASCII (including punctuation), but contains an 's',
|
||||
// 'S', 'k', or 'K', requiring a Unicode fold on the bytes in t.
|
||||
// See comments on foldFunc.
|
||||
func equalFoldRight(s, t []byte) bool {
|
||||
for _, sb := range s {
|
||||
if len(t) == 0 {
|
||||
return false
|
||||
}
|
||||
tb := t[0]
|
||||
if tb < utf8.RuneSelf {
|
||||
if sb != tb {
|
||||
sbUpper := sb & caseMask
|
||||
if 'A' <= sbUpper && sbUpper <= 'Z' {
|
||||
if sbUpper != tb&caseMask {
|
||||
return false
|
||||
}
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
t = t[1:]
|
||||
continue
|
||||
}
|
||||
// sb is ASCII and t is not. t must be either kelvin
|
||||
// sign or long s; sb must be s, S, k, or K.
|
||||
tr, size := utf8.DecodeRune(t)
|
||||
switch sb {
|
||||
case 's', 'S':
|
||||
if tr != smallLongEss {
|
||||
return false
|
||||
}
|
||||
case 'k', 'K':
|
||||
if tr != kelvin {
|
||||
return false
|
||||
}
|
||||
default:
|
||||
return false
|
||||
}
|
||||
t = t[size:]
|
||||
|
||||
}
|
||||
if len(t) > 0 {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// asciiEqualFold is a specialization of bytes.EqualFold for use when
|
||||
// s is all ASCII (but may contain non-letters) and contains no
|
||||
// special-folding letters.
|
||||
// See comments on foldFunc.
|
||||
func asciiEqualFold(s, t []byte) bool {
|
||||
if len(s) != len(t) {
|
||||
return false
|
||||
}
|
||||
for i, sb := range s {
|
||||
tb := t[i]
|
||||
if sb == tb {
|
||||
continue
|
||||
}
|
||||
if ('a' <= sb && sb <= 'z') || ('A' <= sb && sb <= 'Z') {
|
||||
if sb&caseMask != tb&caseMask {
|
||||
return false
|
||||
}
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// simpleLetterEqualFold is a specialization of bytes.EqualFold for
|
||||
// use when s is all ASCII letters (no underscores, etc) and also
|
||||
// doesn't contain 'k', 'K', 's', or 'S'.
|
||||
// See comments on foldFunc.
|
||||
func simpleLetterEqualFold(s, t []byte) bool {
|
||||
if len(s) != len(t) {
|
||||
return false
|
||||
}
|
||||
for i, b := range s {
|
||||
if b&caseMask != t[i]&caseMask {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// tagOptions is the string following a comma in a struct field's "json"
|
||||
// tag, or the empty string. It does not include the leading comma.
|
||||
type tagOptions string
|
||||
|
||||
// parseTag splits a struct field's json tag into its name and
|
||||
// comma-separated options.
|
||||
func parseTag(tag string) (string, tagOptions) {
|
||||
if idx := strings.Index(tag, ","); idx != -1 {
|
||||
return tag[:idx], tagOptions(tag[idx+1:])
|
||||
}
|
||||
return tag, tagOptions("")
|
||||
}
|
||||
|
||||
// Contains reports whether a comma-separated list of options
|
||||
// contains a particular substr flag. substr must be surrounded by a
|
||||
// string boundary or commas.
|
||||
func (o tagOptions) Contains(optionName string) bool {
|
||||
if len(o) == 0 {
|
||||
return false
|
||||
}
|
||||
s := string(o)
|
||||
for s != "" {
|
||||
var next string
|
||||
i := strings.Index(s, ",")
|
||||
if i >= 0 {
|
||||
s, next = s[:i], s[i+1:]
|
||||
}
|
||||
if s == optionName {
|
||||
return true
|
||||
}
|
||||
s = next
|
||||
}
|
||||
return false
|
||||
}
|
277
vendor/github.com/ghodss/yaml/yaml.go
generated
vendored
Normal file
277
vendor/github.com/ghodss/yaml/yaml.go
generated
vendored
Normal file
@ -0,0 +1,277 @@
|
||||
package yaml
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strconv"
|
||||
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
// Marshals the object into JSON then converts JSON to YAML and returns the
|
||||
// YAML.
|
||||
func Marshal(o interface{}) ([]byte, error) {
|
||||
j, err := json.Marshal(o)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error marshaling into JSON: %v", err)
|
||||
}
|
||||
|
||||
y, err := JSONToYAML(j)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error converting JSON to YAML: %v", err)
|
||||
}
|
||||
|
||||
return y, nil
|
||||
}
|
||||
|
||||
// Converts YAML to JSON then uses JSON to unmarshal into an object.
|
||||
func Unmarshal(y []byte, o interface{}) error {
|
||||
vo := reflect.ValueOf(o)
|
||||
j, err := yamlToJSON(y, &vo)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error converting YAML to JSON: %v", err)
|
||||
}
|
||||
|
||||
err = json.Unmarshal(j, o)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error unmarshaling JSON: %v", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Convert JSON to YAML.
|
||||
func JSONToYAML(j []byte) ([]byte, error) {
|
||||
// Convert the JSON to an object.
|
||||
var jsonObj interface{}
|
||||
// We are using yaml.Unmarshal here (instead of json.Unmarshal) because the
|
||||
// Go JSON library doesn't try to pick the right number type (int, float,
|
||||
// etc.) when unmarshalling to interface{}, it just picks float64
|
||||
// universally. go-yaml does go through the effort of picking the right
|
||||
// number type, so we can preserve number type throughout this process.
|
||||
err := yaml.Unmarshal(j, &jsonObj)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Marshal this object into YAML.
|
||||
return yaml.Marshal(jsonObj)
|
||||
}
|
||||
|
||||
// Convert YAML to JSON. Since JSON is a subset of YAML, passing JSON through
|
||||
// this method should be a no-op.
|
||||
//
|
||||
// Things YAML can do that are not supported by JSON:
|
||||
// * In YAML you can have binary and null keys in your maps. These are invalid
|
||||
// in JSON. (int and float keys are converted to strings.)
|
||||
// * Binary data in YAML with the !!binary tag is not supported. If you want to
|
||||
// use binary data with this library, encode the data as base64 as usual but do
|
||||
// not use the !!binary tag in your YAML. This will ensure the original base64
|
||||
// encoded data makes it all the way through to the JSON.
|
||||
func YAMLToJSON(y []byte) ([]byte, error) {
|
||||
return yamlToJSON(y, nil)
|
||||
}
|
||||
|
||||
func yamlToJSON(y []byte, jsonTarget *reflect.Value) ([]byte, error) {
|
||||
// Convert the YAML to an object.
|
||||
var yamlObj interface{}
|
||||
err := yaml.Unmarshal(y, &yamlObj)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// YAML objects are not completely compatible with JSON objects (e.g. you
|
||||
// can have non-string keys in YAML). So, convert the YAML-compatible object
|
||||
// to a JSON-compatible object, failing with an error if irrecoverable
|
||||
// incompatibilties happen along the way.
|
||||
jsonObj, err := convertToJSONableObject(yamlObj, jsonTarget)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Convert this object to JSON and return the data.
|
||||
return json.Marshal(jsonObj)
|
||||
}
|
||||
|
||||
func convertToJSONableObject(yamlObj interface{}, jsonTarget *reflect.Value) (interface{}, error) {
|
||||
var err error
|
||||
|
||||
// Resolve jsonTarget to a concrete value (i.e. not a pointer or an
|
||||
// interface). We pass decodingNull as false because we're not actually
|
||||
// decoding into the value, we're just checking if the ultimate target is a
|
||||
// string.
|
||||
if jsonTarget != nil {
|
||||
ju, tu, pv := indirect(*jsonTarget, false)
|
||||
// We have a JSON or Text Umarshaler at this level, so we can't be trying
|
||||
// to decode into a string.
|
||||
if ju != nil || tu != nil {
|
||||
jsonTarget = nil
|
||||
} else {
|
||||
jsonTarget = &pv
|
||||
}
|
||||
}
|
||||
|
||||
// If yamlObj is a number or a boolean, check if jsonTarget is a string -
|
||||
// if so, coerce. Else return normal.
|
||||
// If yamlObj is a map or array, find the field that each key is
|
||||
// unmarshaling to, and when you recurse pass the reflect.Value for that
|
||||
// field back into this function.
|
||||
switch typedYAMLObj := yamlObj.(type) {
|
||||
case map[interface{}]interface{}:
|
||||
// JSON does not support arbitrary keys in a map, so we must convert
|
||||
// these keys to strings.
|
||||
//
|
||||
// From my reading of go-yaml v2 (specifically the resolve function),
|
||||
// keys can only have the types string, int, int64, float64, binary
|
||||
// (unsupported), or null (unsupported).
|
||||
strMap := make(map[string]interface{})
|
||||
for k, v := range typedYAMLObj {
|
||||
// Resolve the key to a string first.
|
||||
var keyString string
|
||||
switch typedKey := k.(type) {
|
||||
case string:
|
||||
keyString = typedKey
|
||||
case int:
|
||||
keyString = strconv.Itoa(typedKey)
|
||||
case int64:
|
||||
// go-yaml will only return an int64 as a key if the system
|
||||
// architecture is 32-bit and the key's value is between 32-bit
|
||||
// and 64-bit. Otherwise the key type will simply be int.
|
||||
keyString = strconv.FormatInt(typedKey, 10)
|
||||
case float64:
|
||||
// Stolen from go-yaml to use the same conversion to string as
|
||||
// the go-yaml library uses to convert float to string when
|
||||
// Marshaling.
|
||||
s := strconv.FormatFloat(typedKey, 'g', -1, 32)
|
||||
switch s {
|
||||
case "+Inf":
|
||||
s = ".inf"
|
||||
case "-Inf":
|
||||
s = "-.inf"
|
||||
case "NaN":
|
||||
s = ".nan"
|
||||
}
|
||||
keyString = s
|
||||
case bool:
|
||||
if typedKey {
|
||||
keyString = "true"
|
||||
} else {
|
||||
keyString = "false"
|
||||
}
|
||||
default:
|
||||
return nil, fmt.Errorf("Unsupported map key of type: %s, key: %+#v, value: %+#v",
|
||||
reflect.TypeOf(k), k, v)
|
||||
}
|
||||
|
||||
// jsonTarget should be a struct or a map. If it's a struct, find
|
||||
// the field it's going to map to and pass its reflect.Value. If
|
||||
// it's a map, find the element type of the map and pass the
|
||||
// reflect.Value created from that type. If it's neither, just pass
|
||||
// nil - JSON conversion will error for us if it's a real issue.
|
||||
if jsonTarget != nil {
|
||||
t := *jsonTarget
|
||||
if t.Kind() == reflect.Struct {
|
||||
keyBytes := []byte(keyString)
|
||||
// Find the field that the JSON library would use.
|
||||
var f *field
|
||||
fields := cachedTypeFields(t.Type())
|
||||
for i := range fields {
|
||||
ff := &fields[i]
|
||||
if bytes.Equal(ff.nameBytes, keyBytes) {
|
||||
f = ff
|
||||
break
|
||||
}
|
||||
// Do case-insensitive comparison.
|
||||
if f == nil && ff.equalFold(ff.nameBytes, keyBytes) {
|
||||
f = ff
|
||||
}
|
||||
}
|
||||
if f != nil {
|
||||
// Find the reflect.Value of the most preferential
|
||||
// struct field.
|
||||
jtf := t.Field(f.index[0])
|
||||
strMap[keyString], err = convertToJSONableObject(v, &jtf)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
continue
|
||||
}
|
||||
} else if t.Kind() == reflect.Map {
|
||||
// Create a zero value of the map's element type to use as
|
||||
// the JSON target.
|
||||
jtv := reflect.Zero(t.Type().Elem())
|
||||
strMap[keyString], err = convertToJSONableObject(v, &jtv)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
continue
|
||||
}
|
||||
}
|
||||
strMap[keyString], err = convertToJSONableObject(v, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return strMap, nil
|
||||
case []interface{}:
|
||||
// We need to recurse into arrays in case there are any
|
||||
// map[interface{}]interface{}'s inside and to convert any
|
||||
// numbers to strings.
|
||||
|
||||
// If jsonTarget is a slice (which it really should be), find the
|
||||
// thing it's going to map to. If it's not a slice, just pass nil
|
||||
// - JSON conversion will error for us if it's a real issue.
|
||||
var jsonSliceElemValue *reflect.Value
|
||||
if jsonTarget != nil {
|
||||
t := *jsonTarget
|
||||
if t.Kind() == reflect.Slice {
|
||||
// By default slices point to nil, but we need a reflect.Value
|
||||
// pointing to a value of the slice type, so we create one here.
|
||||
ev := reflect.Indirect(reflect.New(t.Type().Elem()))
|
||||
jsonSliceElemValue = &ev
|
||||
}
|
||||
}
|
||||
|
||||
// Make and use a new array.
|
||||
arr := make([]interface{}, len(typedYAMLObj))
|
||||
for i, v := range typedYAMLObj {
|
||||
arr[i], err = convertToJSONableObject(v, jsonSliceElemValue)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return arr, nil
|
||||
default:
|
||||
// If the target type is a string and the YAML type is a number,
|
||||
// convert the YAML type to a string.
|
||||
if jsonTarget != nil && (*jsonTarget).Kind() == reflect.String {
|
||||
// Based on my reading of go-yaml, it may return int, int64,
|
||||
// float64, or uint64.
|
||||
var s string
|
||||
switch typedVal := typedYAMLObj.(type) {
|
||||
case int:
|
||||
s = strconv.FormatInt(int64(typedVal), 10)
|
||||
case int64:
|
||||
s = strconv.FormatInt(typedVal, 10)
|
||||
case float64:
|
||||
s = strconv.FormatFloat(typedVal, 'g', -1, 32)
|
||||
case uint64:
|
||||
s = strconv.FormatUint(typedVal, 10)
|
||||
case bool:
|
||||
if typedVal {
|
||||
s = "true"
|
||||
} else {
|
||||
s = "false"
|
||||
}
|
||||
}
|
||||
if len(s) > 0 {
|
||||
yamlObj = interface{}(s)
|
||||
}
|
||||
}
|
||||
return yamlObj, nil
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
287
vendor/github.com/ghodss/yaml/yaml_test.go
generated
vendored
Normal file
287
vendor/github.com/ghodss/yaml/yaml_test.go
generated
vendored
Normal file
@ -0,0 +1,287 @@
|
||||
package yaml
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"testing"
|
||||
)
|
||||
|
||||
type MarshalTest struct {
|
||||
A string
|
||||
B int64
|
||||
// Would like to test float64, but it's not supported in go-yaml.
|
||||
// (See https://github.com/go-yaml/yaml/issues/83.)
|
||||
C float32
|
||||
}
|
||||
|
||||
func TestMarshal(t *testing.T) {
|
||||
f32String := strconv.FormatFloat(math.MaxFloat32, 'g', -1, 32)
|
||||
s := MarshalTest{"a", math.MaxInt64, math.MaxFloat32}
|
||||
e := []byte(fmt.Sprintf("A: a\nB: %d\nC: %s\n", math.MaxInt64, f32String))
|
||||
|
||||
y, err := Marshal(s)
|
||||
if err != nil {
|
||||
t.Errorf("error marshaling YAML: %v", err)
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(y, e) {
|
||||
t.Errorf("marshal YAML was unsuccessful, expected: %#v, got: %#v",
|
||||
string(e), string(y))
|
||||
}
|
||||
}
|
||||
|
||||
type UnmarshalString struct {
|
||||
A string
|
||||
True string
|
||||
}
|
||||
|
||||
type UnmarshalStringMap struct {
|
||||
A map[string]string
|
||||
}
|
||||
|
||||
type UnmarshalNestedString struct {
|
||||
A NestedString
|
||||
}
|
||||
|
||||
type NestedString struct {
|
||||
A string
|
||||
}
|
||||
|
||||
type UnmarshalSlice struct {
|
||||
A []NestedSlice
|
||||
}
|
||||
|
||||
type NestedSlice struct {
|
||||
B string
|
||||
C *string
|
||||
}
|
||||
|
||||
func TestUnmarshal(t *testing.T) {
|
||||
y := []byte("a: 1")
|
||||
s1 := UnmarshalString{}
|
||||
e1 := UnmarshalString{A: "1"}
|
||||
unmarshal(t, y, &s1, &e1)
|
||||
|
||||
y = []byte("a: true")
|
||||
s1 = UnmarshalString{}
|
||||
e1 = UnmarshalString{A: "true"}
|
||||
unmarshal(t, y, &s1, &e1)
|
||||
|
||||
y = []byte("true: 1")
|
||||
s1 = UnmarshalString{}
|
||||
e1 = UnmarshalString{True: "1"}
|
||||
unmarshal(t, y, &s1, &e1)
|
||||
|
||||
y = []byte("a:\n a: 1")
|
||||
s2 := UnmarshalNestedString{}
|
||||
e2 := UnmarshalNestedString{NestedString{"1"}}
|
||||
unmarshal(t, y, &s2, &e2)
|
||||
|
||||
y = []byte("a:\n - b: abc\n c: def\n - b: 123\n c: 456\n")
|
||||
s3 := UnmarshalSlice{}
|
||||
e3 := UnmarshalSlice{[]NestedSlice{NestedSlice{"abc", strPtr("def")}, NestedSlice{"123", strPtr("456")}}}
|
||||
unmarshal(t, y, &s3, &e3)
|
||||
|
||||
y = []byte("a:\n b: 1")
|
||||
s4 := UnmarshalStringMap{}
|
||||
e4 := UnmarshalStringMap{map[string]string{"b": "1"}}
|
||||
unmarshal(t, y, &s4, &e4)
|
||||
|
||||
y = []byte(`
|
||||
a:
|
||||
name: TestA
|
||||
b:
|
||||
name: TestB
|
||||
`)
|
||||
type NamedThing struct {
|
||||
Name string `json:"name"`
|
||||
}
|
||||
s5 := map[string]*NamedThing{}
|
||||
e5 := map[string]*NamedThing{
|
||||
"a": &NamedThing{Name: "TestA"},
|
||||
"b": &NamedThing{Name: "TestB"},
|
||||
}
|
||||
unmarshal(t, y, &s5, &e5)
|
||||
}
|
||||
|
||||
func unmarshal(t *testing.T, y []byte, s, e interface{}) {
|
||||
err := Unmarshal(y, s)
|
||||
if err != nil {
|
||||
t.Errorf("error unmarshaling YAML: %v", err)
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(s, e) {
|
||||
t.Errorf("unmarshal YAML was unsuccessful, expected: %+#v, got: %+#v",
|
||||
e, s)
|
||||
}
|
||||
}
|
||||
|
||||
type Case struct {
|
||||
input string
|
||||
output string
|
||||
// By default we test that reversing the output == input. But if there is a
|
||||
// difference in the reversed output, you can optionally specify it here.
|
||||
reverse *string
|
||||
}
|
||||
|
||||
type RunType int
|
||||
|
||||
const (
|
||||
RunTypeJSONToYAML RunType = iota
|
||||
RunTypeYAMLToJSON
|
||||
)
|
||||
|
||||
func TestJSONToYAML(t *testing.T) {
|
||||
cases := []Case{
|
||||
{
|
||||
`{"t":"a"}`,
|
||||
"t: a\n",
|
||||
nil,
|
||||
}, {
|
||||
`{"t":null}`,
|
||||
"t: null\n",
|
||||
nil,
|
||||
},
|
||||
}
|
||||
|
||||
runCases(t, RunTypeJSONToYAML, cases)
|
||||
}
|
||||
|
||||
func TestYAMLToJSON(t *testing.T) {
|
||||
cases := []Case{
|
||||
{
|
||||
"t: a\n",
|
||||
`{"t":"a"}`,
|
||||
nil,
|
||||
}, {
|
||||
"t: \n",
|
||||
`{"t":null}`,
|
||||
strPtr("t: null\n"),
|
||||
}, {
|
||||
"t: null\n",
|
||||
`{"t":null}`,
|
||||
nil,
|
||||
}, {
|
||||
"1: a\n",
|
||||
`{"1":"a"}`,
|
||||
strPtr("\"1\": a\n"),
|
||||
}, {
|
||||
"1000000000000000000000000000000000000: a\n",
|
||||
`{"1e+36":"a"}`,
|
||||
strPtr("\"1e+36\": a\n"),
|
||||
}, {
|
||||
"1e+36: a\n",
|
||||
`{"1e+36":"a"}`,
|
||||
strPtr("\"1e+36\": a\n"),
|
||||
}, {
|
||||
"\"1e+36\": a\n",
|
||||
`{"1e+36":"a"}`,
|
||||
nil,
|
||||
}, {
|
||||
"\"1.2\": a\n",
|
||||
`{"1.2":"a"}`,
|
||||
nil,
|
||||
}, {
|
||||
"- t: a\n",
|
||||
`[{"t":"a"}]`,
|
||||
nil,
|
||||
}, {
|
||||
"- t: a\n" +
|
||||
"- t:\n" +
|
||||
" b: 1\n" +
|
||||
" c: 2\n",
|
||||
`[{"t":"a"},{"t":{"b":1,"c":2}}]`,
|
||||
nil,
|
||||
}, {
|
||||
`[{t: a}, {t: {b: 1, c: 2}}]`,
|
||||
`[{"t":"a"},{"t":{"b":1,"c":2}}]`,
|
||||
strPtr("- t: a\n" +
|
||||
"- t:\n" +
|
||||
" b: 1\n" +
|
||||
" c: 2\n"),
|
||||
}, {
|
||||
"- t: \n",
|
||||
`[{"t":null}]`,
|
||||
strPtr("- t: null\n"),
|
||||
}, {
|
||||
"- t: null\n",
|
||||
`[{"t":null}]`,
|
||||
nil,
|
||||
},
|
||||
}
|
||||
|
||||
// Cases that should produce errors.
|
||||
_ = []Case{
|
||||
{
|
||||
"~: a",
|
||||
`{"null":"a"}`,
|
||||
nil,
|
||||
}, {
|
||||
"a: !!binary gIGC\n",
|
||||
"{\"a\":\"\x80\x81\x82\"}",
|
||||
nil,
|
||||
},
|
||||
}
|
||||
|
||||
runCases(t, RunTypeYAMLToJSON, cases)
|
||||
}
|
||||
|
||||
func runCases(t *testing.T, runType RunType, cases []Case) {
|
||||
var f func([]byte) ([]byte, error)
|
||||
var invF func([]byte) ([]byte, error)
|
||||
var msg string
|
||||
var invMsg string
|
||||
if runType == RunTypeJSONToYAML {
|
||||
f = JSONToYAML
|
||||
invF = YAMLToJSON
|
||||
msg = "JSON to YAML"
|
||||
invMsg = "YAML back to JSON"
|
||||
} else {
|
||||
f = YAMLToJSON
|
||||
invF = JSONToYAML
|
||||
msg = "YAML to JSON"
|
||||
invMsg = "JSON back to YAML"
|
||||
}
|
||||
|
||||
for _, c := range cases {
|
||||
// Convert the string.
|
||||
t.Logf("converting %s\n", c.input)
|
||||
output, err := f([]byte(c.input))
|
||||
if err != nil {
|
||||
t.Errorf("Failed to convert %s, input: `%s`, err: %v", msg, c.input, err)
|
||||
}
|
||||
|
||||
// Check it against the expected output.
|
||||
if string(output) != c.output {
|
||||
t.Errorf("Failed to convert %s, input: `%s`, expected `%s`, got `%s`",
|
||||
msg, c.input, c.output, string(output))
|
||||
}
|
||||
|
||||
// Set the string that we will compare the reversed output to.
|
||||
reverse := c.input
|
||||
// If a special reverse string was specified, use that instead.
|
||||
if c.reverse != nil {
|
||||
reverse = *c.reverse
|
||||
}
|
||||
|
||||
// Reverse the output.
|
||||
input, err := invF(output)
|
||||
if err != nil {
|
||||
t.Errorf("Failed to convert %s, input: `%s`, err: %v", invMsg, string(output), err)
|
||||
}
|
||||
|
||||
// Check the reverse is equal to the input (or to *c.reverse).
|
||||
if string(input) != reverse {
|
||||
t.Errorf("Failed to convert %s, input: `%s`, expected `%s`, got `%s`",
|
||||
invMsg, string(output), reverse, string(input))
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// To be able to easily fill in the *Case.reverse string above.
|
||||
func strPtr(s string) *string {
|
||||
return &s
|
||||
}
|
13
vendor/github.com/google/gofuzz/.travis.yml
generated
vendored
Normal file
13
vendor/github.com/google/gofuzz/.travis.yml
generated
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
language: go
|
||||
|
||||
go:
|
||||
- 1.4
|
||||
- 1.3
|
||||
- 1.2
|
||||
- tip
|
||||
|
||||
install:
|
||||
- if ! go get code.google.com/p/go.tools/cmd/cover; then go get golang.org/x/tools/cmd/cover; fi
|
||||
|
||||
script:
|
||||
- go test -cover
|
67
vendor/github.com/google/gofuzz/CONTRIBUTING.md
generated
vendored
Normal file
67
vendor/github.com/google/gofuzz/CONTRIBUTING.md
generated
vendored
Normal file
@ -0,0 +1,67 @@
|
||||
# How to contribute #
|
||||
|
||||
We'd love to accept your patches and contributions to this project. There are
|
||||
a just a few small guidelines you need to follow.
|
||||
|
||||
|
||||
## Contributor License Agreement ##
|
||||
|
||||
Contributions to any Google project must be accompanied by a Contributor
|
||||
License Agreement. This is not a copyright **assignment**, it simply gives
|
||||
Google permission to use and redistribute your contributions as part of the
|
||||
project.
|
||||
|
||||
* If you are an individual writing original source code and you're sure you
|
||||
own the intellectual property, then you'll need to sign an [individual
|
||||
CLA][].
|
||||
|
||||
* If you work for a company that wants to allow you to contribute your work,
|
||||
then you'll need to sign a [corporate CLA][].
|
||||
|
||||
You generally only need to submit a CLA once, so if you've already submitted
|
||||
one (even if it was for a different project), you probably don't need to do it
|
||||
again.
|
||||
|
||||
[individual CLA]: https://developers.google.com/open-source/cla/individual
|
||||
[corporate CLA]: https://developers.google.com/open-source/cla/corporate
|
||||
|
||||
|
||||
## Submitting a patch ##
|
||||
|
||||
1. It's generally best to start by opening a new issue describing the bug or
|
||||
feature you're intending to fix. Even if you think it's relatively minor,
|
||||
it's helpful to know what people are working on. Mention in the initial
|
||||
issue that you are planning to work on that bug or feature so that it can
|
||||
be assigned to you.
|
||||
|
||||
1. Follow the normal process of [forking][] the project, and setup a new
|
||||
branch to work in. It's important that each group of changes be done in
|
||||
separate branches in order to ensure that a pull request only includes the
|
||||
commits related to that bug or feature.
|
||||
|
||||
1. Go makes it very simple to ensure properly formatted code, so always run
|
||||
`go fmt` on your code before committing it. You should also run
|
||||
[golint][] over your code. As noted in the [golint readme][], it's not
|
||||
strictly necessary that your code be completely "lint-free", but this will
|
||||
help you find common style issues.
|
||||
|
||||
1. Any significant changes should almost always be accompanied by tests. The
|
||||
project already has good test coverage, so look at some of the existing
|
||||
tests if you're unsure how to go about it. [gocov][] and [gocov-html][]
|
||||
are invaluable tools for seeing which parts of your code aren't being
|
||||
exercised by your tests.
|
||||
|
||||
1. Do your best to have [well-formed commit messages][] for each change.
|
||||
This provides consistency throughout the project, and ensures that commit
|
||||
messages are able to be formatted properly by various git tools.
|
||||
|
||||
1. Finally, push the commits to your fork and submit a [pull request][].
|
||||
|
||||
[forking]: https://help.github.com/articles/fork-a-repo
|
||||
[golint]: https://github.com/golang/lint
|
||||
[golint readme]: https://github.com/golang/lint/blob/master/README
|
||||
[gocov]: https://github.com/axw/gocov
|
||||
[gocov-html]: https://github.com/matm/gocov-html
|
||||
[well-formed commit messages]: http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html
|
||||
[squash]: http://git-scm.com/book/en/Git-Tools-Rewriting-History#Squashing-Commits
|
||||
[pull request]: https://help.github.com/articles/creating-a-pull-request
|
202
vendor/github.com/google/gofuzz/LICENSE
generated
vendored
Normal file
202
vendor/github.com/google/gofuzz/LICENSE
generated
vendored
Normal file
@ -0,0 +1,202 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
71
vendor/github.com/google/gofuzz/README.md
generated
vendored
Normal file
71
vendor/github.com/google/gofuzz/README.md
generated
vendored
Normal file
@ -0,0 +1,71 @@
|
||||
gofuzz
|
||||
======
|
||||
|
||||
gofuzz is a library for populating go objects with random values.
|
||||
|
||||
[](https://godoc.org/github.com/google/gofuzz)
|
||||
[](https://travis-ci.org/google/gofuzz)
|
||||
|
||||
This is useful for testing:
|
||||
|
||||
* Do your project's objects really serialize/unserialize correctly in all cases?
|
||||
* Is there an incorrectly formatted object that will cause your project to panic?
|
||||
|
||||
Import with ```import "github.com/google/gofuzz"```
|
||||
|
||||
You can use it on single variables:
|
||||
```go
|
||||
f := fuzz.New()
|
||||
var myInt int
|
||||
f.Fuzz(&myInt) // myInt gets a random value.
|
||||
```
|
||||
|
||||
You can use it on maps:
|
||||
```go
|
||||
f := fuzz.New().NilChance(0).NumElements(1, 1)
|
||||
var myMap map[ComplexKeyType]string
|
||||
f.Fuzz(&myMap) // myMap will have exactly one element.
|
||||
```
|
||||
|
||||
Customize the chance of getting a nil pointer:
|
||||
```go
|
||||
f := fuzz.New().NilChance(.5)
|
||||
var fancyStruct struct {
|
||||
A, B, C, D *string
|
||||
}
|
||||
f.Fuzz(&fancyStruct) // About half the pointers should be set.
|
||||
```
|
||||
|
||||
You can even customize the randomization completely if needed:
|
||||
```go
|
||||
type MyEnum string
|
||||
const (
|
||||
A MyEnum = "A"
|
||||
B MyEnum = "B"
|
||||
)
|
||||
type MyInfo struct {
|
||||
Type MyEnum
|
||||
AInfo *string
|
||||
BInfo *string
|
||||
}
|
||||
|
||||
f := fuzz.New().NilChance(0).Funcs(
|
||||
func(e *MyInfo, c fuzz.Continue) {
|
||||
switch c.Intn(2) {
|
||||
case 0:
|
||||
e.Type = A
|
||||
c.Fuzz(&e.AInfo)
|
||||
case 1:
|
||||
e.Type = B
|
||||
c.Fuzz(&e.BInfo)
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
var myObject MyInfo
|
||||
f.Fuzz(&myObject) // Type will correspond to whether A or B info is set.
|
||||
```
|
||||
|
||||
See more examples in ```example_test.go```.
|
||||
|
||||
Happy testing!
|
18
vendor/github.com/google/gofuzz/doc.go
generated
vendored
Normal file
18
vendor/github.com/google/gofuzz/doc.go
generated
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
/*
|
||||
Copyright 2014 Google Inc. All rights reserved.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
// Package fuzz is a library for populating go objects with random values.
|
||||
package fuzz
|
225
vendor/github.com/google/gofuzz/example_test.go
generated
vendored
Normal file
225
vendor/github.com/google/gofuzz/example_test.go
generated
vendored
Normal file
@ -0,0 +1,225 @@
|
||||
/*
|
||||
Copyright 2014 Google Inc. All rights reserved.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
package fuzz_test
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math/rand"
|
||||
|
||||
"github.com/google/gofuzz"
|
||||
)
|
||||
|
||||
func ExampleSimple() {
|
||||
type MyType struct {
|
||||
A string
|
||||
B string
|
||||
C int
|
||||
D struct {
|
||||
E float64
|
||||
}
|
||||
}
|
||||
|
||||
f := fuzz.New()
|
||||
object := MyType{}
|
||||
|
||||
uniqueObjects := map[MyType]int{}
|
||||
|
||||
for i := 0; i < 1000; i++ {
|
||||
f.Fuzz(&object)
|
||||
uniqueObjects[object]++
|
||||
}
|
||||
fmt.Printf("Got %v unique objects.\n", len(uniqueObjects))
|
||||
// Output:
|
||||
// Got 1000 unique objects.
|
||||
}
|
||||
|
||||
func ExampleCustom() {
|
||||
type MyType struct {
|
||||
A int
|
||||
B string
|
||||
}
|
||||
|
||||
counter := 0
|
||||
f := fuzz.New().Funcs(
|
||||
func(i *int, c fuzz.Continue) {
|
||||
*i = counter
|
||||
counter++
|
||||
},
|
||||
)
|
||||
object := MyType{}
|
||||
|
||||
uniqueObjects := map[MyType]int{}
|
||||
|
||||
for i := 0; i < 100; i++ {
|
||||
f.Fuzz(&object)
|
||||
if object.A != i {
|
||||
fmt.Printf("Unexpected value: %#v\n", object)
|
||||
}
|
||||
uniqueObjects[object]++
|
||||
}
|
||||
fmt.Printf("Got %v unique objects.\n", len(uniqueObjects))
|
||||
// Output:
|
||||
// Got 100 unique objects.
|
||||
}
|
||||
|
||||
func ExampleComplex() {
|
||||
type OtherType struct {
|
||||
A string
|
||||
B string
|
||||
}
|
||||
type MyType struct {
|
||||
Pointer *OtherType
|
||||
Map map[string]OtherType
|
||||
PointerMap *map[string]OtherType
|
||||
Slice []OtherType
|
||||
SlicePointer []*OtherType
|
||||
PointerSlicePointer *[]*OtherType
|
||||
}
|
||||
|
||||
f := fuzz.New().RandSource(rand.NewSource(0)).NilChance(0).NumElements(1, 1).Funcs(
|
||||
func(o *OtherType, c fuzz.Continue) {
|
||||
o.A = "Foo"
|
||||
o.B = "Bar"
|
||||
},
|
||||
func(op **OtherType, c fuzz.Continue) {
|
||||
*op = &OtherType{"A", "B"}
|
||||
},
|
||||
func(m map[string]OtherType, c fuzz.Continue) {
|
||||
m["Works Because"] = OtherType{
|
||||
"Fuzzer",
|
||||
"Preallocated",
|
||||
}
|
||||
},
|
||||
)
|
||||
object := MyType{}
|
||||
f.Fuzz(&object)
|
||||
bytes, err := json.MarshalIndent(&object, "", " ")
|
||||
if err != nil {
|
||||
fmt.Printf("error: %v\n", err)
|
||||
}
|
||||
fmt.Printf("%s\n", string(bytes))
|
||||
// Output:
|
||||
// {
|
||||
// "Pointer": {
|
||||
// "A": "A",
|
||||
// "B": "B"
|
||||
// },
|
||||
// "Map": {
|
||||
// "Works Because": {
|
||||
// "A": "Fuzzer",
|
||||
// "B": "Preallocated"
|
||||
// }
|
||||
// },
|
||||
// "PointerMap": {
|
||||
// "Works Because": {
|
||||
// "A": "Fuzzer",
|
||||
// "B": "Preallocated"
|
||||
// }
|
||||
// },
|
||||
// "Slice": [
|
||||
// {
|
||||
// "A": "Foo",
|
||||
// "B": "Bar"
|
||||
// }
|
||||
// ],
|
||||
// "SlicePointer": [
|
||||
// {
|
||||
// "A": "A",
|
||||
// "B": "B"
|
||||
// }
|
||||
// ],
|
||||
// "PointerSlicePointer": [
|
||||
// {
|
||||
// "A": "A",
|
||||
// "B": "B"
|
||||
// }
|
||||
// ]
|
||||
// }
|
||||
}
|
||||
|
||||
func ExampleMap() {
|
||||
f := fuzz.New().NilChance(0).NumElements(1, 1)
|
||||
var myMap map[struct{ A, B, C int }]string
|
||||
f.Fuzz(&myMap)
|
||||
fmt.Printf("myMap has %v element(s).\n", len(myMap))
|
||||
// Output:
|
||||
// myMap has 1 element(s).
|
||||
}
|
||||
|
||||
func ExampleSingle() {
|
||||
f := fuzz.New()
|
||||
var i int
|
||||
f.Fuzz(&i)
|
||||
|
||||
// Technically, we'd expect this to fail one out of 2 billion attempts...
|
||||
fmt.Printf("(i == 0) == %v", i == 0)
|
||||
// Output:
|
||||
// (i == 0) == false
|
||||
}
|
||||
|
||||
func ExampleEnum() {
|
||||
type MyEnum string
|
||||
const (
|
||||
A MyEnum = "A"
|
||||
B MyEnum = "B"
|
||||
)
|
||||
type MyInfo struct {
|
||||
Type MyEnum
|
||||
AInfo *string
|
||||
BInfo *string
|
||||
}
|
||||
|
||||
f := fuzz.New().NilChance(0).Funcs(
|
||||
func(e *MyInfo, c fuzz.Continue) {
|
||||
// Note c's embedded Rand allows for direct use.
|
||||
// We could also use c.RandBool() here.
|
||||
switch c.Intn(2) {
|
||||
case 0:
|
||||
e.Type = A
|
||||
c.Fuzz(&e.AInfo)
|
||||
case 1:
|
||||
e.Type = B
|
||||
c.Fuzz(&e.BInfo)
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
for i := 0; i < 100; i++ {
|
||||
var myObject MyInfo
|
||||
f.Fuzz(&myObject)
|
||||
switch myObject.Type {
|
||||
case A:
|
||||
if myObject.AInfo == nil {
|
||||
fmt.Println("AInfo should have been set!")
|
||||
}
|
||||
if myObject.BInfo != nil {
|
||||
fmt.Println("BInfo should NOT have been set!")
|
||||
}
|
||||
case B:
|
||||
if myObject.BInfo == nil {
|
||||
fmt.Println("BInfo should have been set!")
|
||||
}
|
||||
if myObject.AInfo != nil {
|
||||
fmt.Println("AInfo should NOT have been set!")
|
||||
}
|
||||
default:
|
||||
fmt.Println("Invalid enum value!")
|
||||
}
|
||||
}
|
||||
// Output:
|
||||
}
|
487
vendor/github.com/google/gofuzz/fuzz.go
generated
vendored
Normal file
487
vendor/github.com/google/gofuzz/fuzz.go
generated
vendored
Normal file
@ -0,0 +1,487 @@
|
||||
/*
|
||||
Copyright 2014 Google Inc. All rights reserved.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
package fuzz
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math/rand"
|
||||
"reflect"
|
||||
"time"
|
||||
)
|
||||
|
||||
// fuzzFuncMap is a map from a type to a fuzzFunc that handles that type.
|
||||
type fuzzFuncMap map[reflect.Type]reflect.Value
|
||||
|
||||
// Fuzzer knows how to fill any object with random fields.
|
||||
type Fuzzer struct {
|
||||
fuzzFuncs fuzzFuncMap
|
||||
defaultFuzzFuncs fuzzFuncMap
|
||||
r *rand.Rand
|
||||
nilChance float64
|
||||
minElements int
|
||||
maxElements int
|
||||
maxDepth int
|
||||
}
|
||||
|
||||
// New returns a new Fuzzer. Customize your Fuzzer further by calling Funcs,
|
||||
// RandSource, NilChance, or NumElements in any order.
|
||||
func New() *Fuzzer {
|
||||
return NewWithSeed(time.Now().UnixNano())
|
||||
}
|
||||
|
||||
func NewWithSeed(seed int64) *Fuzzer {
|
||||
f := &Fuzzer{
|
||||
defaultFuzzFuncs: fuzzFuncMap{
|
||||
reflect.TypeOf(&time.Time{}): reflect.ValueOf(fuzzTime),
|
||||
},
|
||||
|
||||
fuzzFuncs: fuzzFuncMap{},
|
||||
r: rand.New(rand.NewSource(seed)),
|
||||
nilChance: .2,
|
||||
minElements: 1,
|
||||
maxElements: 10,
|
||||
maxDepth: 100,
|
||||
}
|
||||
return f
|
||||
}
|
||||
|
||||
// Funcs adds each entry in fuzzFuncs as a custom fuzzing function.
|
||||
//
|
||||
// Each entry in fuzzFuncs must be a function taking two parameters.
|
||||
// The first parameter must be a pointer or map. It is the variable that
|
||||
// function will fill with random data. The second parameter must be a
|
||||
// fuzz.Continue, which will provide a source of randomness and a way
|
||||
// to automatically continue fuzzing smaller pieces of the first parameter.
|
||||
//
|
||||
// These functions are called sensibly, e.g., if you wanted custom string
|
||||
// fuzzing, the function `func(s *string, c fuzz.Continue)` would get
|
||||
// called and passed the address of strings. Maps and pointers will always
|
||||
// be made/new'd for you, ignoring the NilChange option. For slices, it
|
||||
// doesn't make much sense to pre-create them--Fuzzer doesn't know how
|
||||
// long you want your slice--so take a pointer to a slice, and make it
|
||||
// yourself. (If you don't want your map/pointer type pre-made, take a
|
||||
// pointer to it, and make it yourself.) See the examples for a range of
|
||||
// custom functions.
|
||||
func (f *Fuzzer) Funcs(fuzzFuncs ...interface{}) *Fuzzer {
|
||||
for i := range fuzzFuncs {
|
||||
v := reflect.ValueOf(fuzzFuncs[i])
|
||||
if v.Kind() != reflect.Func {
|
||||
panic("Need only funcs!")
|
||||
}
|
||||
t := v.Type()
|
||||
if t.NumIn() != 2 || t.NumOut() != 0 {
|
||||
panic("Need 2 in and 0 out params!")
|
||||
}
|
||||
argT := t.In(0)
|
||||
switch argT.Kind() {
|
||||
case reflect.Ptr, reflect.Map:
|
||||
default:
|
||||
panic("fuzzFunc must take pointer or map type")
|
||||
}
|
||||
if t.In(1) != reflect.TypeOf(Continue{}) {
|
||||
panic("fuzzFunc's second parameter must be type fuzz.Continue")
|
||||
}
|
||||
f.fuzzFuncs[argT] = v
|
||||
}
|
||||
return f
|
||||
}
|
||||
|
||||
// RandSource causes f to get values from the given source of randomness.
|
||||
// Use if you want deterministic fuzzing.
|
||||
func (f *Fuzzer) RandSource(s rand.Source) *Fuzzer {
|
||||
f.r = rand.New(s)
|
||||
return f
|
||||
}
|
||||
|
||||
// NilChance sets the probability of creating a nil pointer, map, or slice to
|
||||
// 'p'. 'p' should be between 0 (no nils) and 1 (all nils), inclusive.
|
||||
func (f *Fuzzer) NilChance(p float64) *Fuzzer {
|
||||
if p < 0 || p > 1 {
|
||||
panic("p should be between 0 and 1, inclusive.")
|
||||
}
|
||||
f.nilChance = p
|
||||
return f
|
||||
}
|
||||
|
||||
// NumElements sets the minimum and maximum number of elements that will be
|
||||
// added to a non-nil map or slice.
|
||||
func (f *Fuzzer) NumElements(atLeast, atMost int) *Fuzzer {
|
||||
if atLeast > atMost {
|
||||
panic("atLeast must be <= atMost")
|
||||
}
|
||||
if atLeast < 0 {
|
||||
panic("atLeast must be >= 0")
|
||||
}
|
||||
f.minElements = atLeast
|
||||
f.maxElements = atMost
|
||||
return f
|
||||
}
|
||||
|
||||
func (f *Fuzzer) genElementCount() int {
|
||||
if f.minElements == f.maxElements {
|
||||
return f.minElements
|
||||
}
|
||||
return f.minElements + f.r.Intn(f.maxElements-f.minElements+1)
|
||||
}
|
||||
|
||||
func (f *Fuzzer) genShouldFill() bool {
|
||||
return f.r.Float64() > f.nilChance
|
||||
}
|
||||
|
||||
// MaxDepth sets the maximum number of recursive fuzz calls that will be made
|
||||
// before stopping. This includes struct members, pointers, and map and slice
|
||||
// elements.
|
||||
func (f *Fuzzer) MaxDepth(d int) *Fuzzer {
|
||||
f.maxDepth = d
|
||||
return f
|
||||
}
|
||||
|
||||
// Fuzz recursively fills all of obj's fields with something random. First
|
||||
// this tries to find a custom fuzz function (see Funcs). If there is no
|
||||
// custom function this tests whether the object implements fuzz.Interface and,
|
||||
// if so, calls Fuzz on it to fuzz itself. If that fails, this will see if
|
||||
// there is a default fuzz function provided by this package. If all of that
|
||||
// fails, this will generate random values for all primitive fields and then
|
||||
// recurse for all non-primitives.
|
||||
//
|
||||
// This is safe for cyclic or tree-like structs, up to a limit. Use the
|
||||
// MaxDepth method to adjust how deep you need it to recurse.
|
||||
//
|
||||
// obj must be a pointer. Only exported (public) fields can be set (thanks,
|
||||
// golang :/ ) Intended for tests, so will panic on bad input or unimplemented
|
||||
// fields.
|
||||
func (f *Fuzzer) Fuzz(obj interface{}) {
|
||||
v := reflect.ValueOf(obj)
|
||||
if v.Kind() != reflect.Ptr {
|
||||
panic("needed ptr!")
|
||||
}
|
||||
v = v.Elem()
|
||||
f.fuzzWithContext(v, 0)
|
||||
}
|
||||
|
||||
// FuzzNoCustom is just like Fuzz, except that any custom fuzz function for
|
||||
// obj's type will not be called and obj will not be tested for fuzz.Interface
|
||||
// conformance. This applies only to obj and not other instances of obj's
|
||||
// type.
|
||||
// Not safe for cyclic or tree-like structs!
|
||||
// obj must be a pointer. Only exported (public) fields can be set (thanks, golang :/ )
|
||||
// Intended for tests, so will panic on bad input or unimplemented fields.
|
||||
func (f *Fuzzer) FuzzNoCustom(obj interface{}) {
|
||||
v := reflect.ValueOf(obj)
|
||||
if v.Kind() != reflect.Ptr {
|
||||
panic("needed ptr!")
|
||||
}
|
||||
v = v.Elem()
|
||||
f.fuzzWithContext(v, flagNoCustomFuzz)
|
||||
}
|
||||
|
||||
const (
|
||||
// Do not try to find a custom fuzz function. Does not apply recursively.
|
||||
flagNoCustomFuzz uint64 = 1 << iota
|
||||
)
|
||||
|
||||
func (f *Fuzzer) fuzzWithContext(v reflect.Value, flags uint64) {
|
||||
fc := &fuzzerContext{fuzzer: f}
|
||||
fc.doFuzz(v, flags)
|
||||
}
|
||||
|
||||
// fuzzerContext carries context about a single fuzzing run, which lets Fuzzer
|
||||
// be thread-safe.
|
||||
type fuzzerContext struct {
|
||||
fuzzer *Fuzzer
|
||||
curDepth int
|
||||
}
|
||||
|
||||
func (fc *fuzzerContext) doFuzz(v reflect.Value, flags uint64) {
|
||||
if fc.curDepth >= fc.fuzzer.maxDepth {
|
||||
return
|
||||
}
|
||||
fc.curDepth++
|
||||
defer func() { fc.curDepth-- }()
|
||||
|
||||
if !v.CanSet() {
|
||||
return
|
||||
}
|
||||
|
||||
if flags&flagNoCustomFuzz == 0 {
|
||||
// Check for both pointer and non-pointer custom functions.
|
||||
if v.CanAddr() && fc.tryCustom(v.Addr()) {
|
||||
return
|
||||
}
|
||||
if fc.tryCustom(v) {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if fn, ok := fillFuncMap[v.Kind()]; ok {
|
||||
fn(v, fc.fuzzer.r)
|
||||
return
|
||||
}
|
||||
switch v.Kind() {
|
||||
case reflect.Map:
|
||||
if fc.fuzzer.genShouldFill() {
|
||||
v.Set(reflect.MakeMap(v.Type()))
|
||||
n := fc.fuzzer.genElementCount()
|
||||
for i := 0; i < n; i++ {
|
||||
key := reflect.New(v.Type().Key()).Elem()
|
||||
fc.doFuzz(key, 0)
|
||||
val := reflect.New(v.Type().Elem()).Elem()
|
||||
fc.doFuzz(val, 0)
|
||||
v.SetMapIndex(key, val)
|
||||
}
|
||||
return
|
||||
}
|
||||
v.Set(reflect.Zero(v.Type()))
|
||||
case reflect.Ptr:
|
||||
if fc.fuzzer.genShouldFill() {
|
||||
v.Set(reflect.New(v.Type().Elem()))
|
||||
fc.doFuzz(v.Elem(), 0)
|
||||
return
|
||||
}
|
||||
v.Set(reflect.Zero(v.Type()))
|
||||
case reflect.Slice:
|
||||
if fc.fuzzer.genShouldFill() {
|
||||
n := fc.fuzzer.genElementCount()
|
||||
v.Set(reflect.MakeSlice(v.Type(), n, n))
|
||||
for i := 0; i < n; i++ {
|
||||
fc.doFuzz(v.Index(i), 0)
|
||||
}
|
||||
return
|
||||
}
|
||||
v.Set(reflect.Zero(v.Type()))
|
||||
case reflect.Array:
|
||||
if fc.fuzzer.genShouldFill() {
|
||||
n := v.Len()
|
||||
for i := 0; i < n; i++ {
|
||||
fc.doFuzz(v.Index(i), 0)
|
||||
}
|
||||
return
|
||||
}
|
||||
v.Set(reflect.Zero(v.Type()))
|
||||
case reflect.Struct:
|
||||
for i := 0; i < v.NumField(); i++ {
|
||||
fc.doFuzz(v.Field(i), 0)
|
||||
}
|
||||
case reflect.Chan:
|
||||
fallthrough
|
||||
case reflect.Func:
|
||||
fallthrough
|
||||
case reflect.Interface:
|
||||
fallthrough
|
||||
default:
|
||||
panic(fmt.Sprintf("Can't handle %#v", v.Interface()))
|
||||
}
|
||||
}
|
||||
|
||||
// tryCustom searches for custom handlers, and returns true iff it finds a match
|
||||
// and successfully randomizes v.
|
||||
func (fc *fuzzerContext) tryCustom(v reflect.Value) bool {
|
||||
// First: see if we have a fuzz function for it.
|
||||
doCustom, ok := fc.fuzzer.fuzzFuncs[v.Type()]
|
||||
if !ok {
|
||||
// Second: see if it can fuzz itself.
|
||||
if v.CanInterface() {
|
||||
intf := v.Interface()
|
||||
if fuzzable, ok := intf.(Interface); ok {
|
||||
fuzzable.Fuzz(Continue{fc: fc, Rand: fc.fuzzer.r})
|
||||
return true
|
||||
}
|
||||
}
|
||||
// Finally: see if there is a default fuzz function.
|
||||
doCustom, ok = fc.fuzzer.defaultFuzzFuncs[v.Type()]
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
switch v.Kind() {
|
||||
case reflect.Ptr:
|
||||
if v.IsNil() {
|
||||
if !v.CanSet() {
|
||||
return false
|
||||
}
|
||||
v.Set(reflect.New(v.Type().Elem()))
|
||||
}
|
||||
case reflect.Map:
|
||||
if v.IsNil() {
|
||||
if !v.CanSet() {
|
||||
return false
|
||||
}
|
||||
v.Set(reflect.MakeMap(v.Type()))
|
||||
}
|
||||
default:
|
||||
return false
|
||||
}
|
||||
|
||||
doCustom.Call([]reflect.Value{v, reflect.ValueOf(Continue{
|
||||
fc: fc,
|
||||
Rand: fc.fuzzer.r,
|
||||
})})
|
||||
return true
|
||||
}
|
||||
|
||||
// Interface represents an object that knows how to fuzz itself. Any time we
|
||||
// find a type that implements this interface we will delegate the act of
|
||||
// fuzzing itself.
|
||||
type Interface interface {
|
||||
Fuzz(c Continue)
|
||||
}
|
||||
|
||||
// Continue can be passed to custom fuzzing functions to allow them to use
|
||||
// the correct source of randomness and to continue fuzzing their members.
|
||||
type Continue struct {
|
||||
fc *fuzzerContext
|
||||
|
||||
// For convenience, Continue implements rand.Rand via embedding.
|
||||
// Use this for generating any randomness if you want your fuzzing
|
||||
// to be repeatable for a given seed.
|
||||
*rand.Rand
|
||||
}
|
||||
|
||||
// Fuzz continues fuzzing obj. obj must be a pointer.
|
||||
func (c Continue) Fuzz(obj interface{}) {
|
||||
v := reflect.ValueOf(obj)
|
||||
if v.Kind() != reflect.Ptr {
|
||||
panic("needed ptr!")
|
||||
}
|
||||
v = v.Elem()
|
||||
c.fc.doFuzz(v, 0)
|
||||
}
|
||||
|
||||
// FuzzNoCustom continues fuzzing obj, except that any custom fuzz function for
|
||||
// obj's type will not be called and obj will not be tested for fuzz.Interface
|
||||
// conformance. This applies only to obj and not other instances of obj's
|
||||
// type.
|
||||
func (c Continue) FuzzNoCustom(obj interface{}) {
|
||||
v := reflect.ValueOf(obj)
|
||||
if v.Kind() != reflect.Ptr {
|
||||
panic("needed ptr!")
|
||||
}
|
||||
v = v.Elem()
|
||||
c.fc.doFuzz(v, flagNoCustomFuzz)
|
||||
}
|
||||
|
||||
// RandString makes a random string up to 20 characters long. The returned string
|
||||
// may include a variety of (valid) UTF-8 encodings.
|
||||
func (c Continue) RandString() string {
|
||||
return randString(c.Rand)
|
||||
}
|
||||
|
||||
// RandUint64 makes random 64 bit numbers.
|
||||
// Weirdly, rand doesn't have a function that gives you 64 random bits.
|
||||
func (c Continue) RandUint64() uint64 {
|
||||
return randUint64(c.Rand)
|
||||
}
|
||||
|
||||
// RandBool returns true or false randomly.
|
||||
func (c Continue) RandBool() bool {
|
||||
return randBool(c.Rand)
|
||||
}
|
||||
|
||||
func fuzzInt(v reflect.Value, r *rand.Rand) {
|
||||
v.SetInt(int64(randUint64(r)))
|
||||
}
|
||||
|
||||
func fuzzUint(v reflect.Value, r *rand.Rand) {
|
||||
v.SetUint(randUint64(r))
|
||||
}
|
||||
|
||||
func fuzzTime(t *time.Time, c Continue) {
|
||||
var sec, nsec int64
|
||||
// Allow for about 1000 years of random time values, which keeps things
|
||||
// like JSON parsing reasonably happy.
|
||||
sec = c.Rand.Int63n(1000 * 365 * 24 * 60 * 60)
|
||||
c.Fuzz(&nsec)
|
||||
*t = time.Unix(sec, nsec)
|
||||
}
|
||||
|
||||
var fillFuncMap = map[reflect.Kind]func(reflect.Value, *rand.Rand){
|
||||
reflect.Bool: func(v reflect.Value, r *rand.Rand) {
|
||||
v.SetBool(randBool(r))
|
||||
},
|
||||
reflect.Int: fuzzInt,
|
||||
reflect.Int8: fuzzInt,
|
||||
reflect.Int16: fuzzInt,
|
||||
reflect.Int32: fuzzInt,
|
||||
reflect.Int64: fuzzInt,
|
||||
reflect.Uint: fuzzUint,
|
||||
reflect.Uint8: fuzzUint,
|
||||
reflect.Uint16: fuzzUint,
|
||||
reflect.Uint32: fuzzUint,
|
||||
reflect.Uint64: fuzzUint,
|
||||
reflect.Uintptr: fuzzUint,
|
||||
reflect.Float32: func(v reflect.Value, r *rand.Rand) {
|
||||
v.SetFloat(float64(r.Float32()))
|
||||
},
|
||||
reflect.Float64: func(v reflect.Value, r *rand.Rand) {
|
||||
v.SetFloat(r.Float64())
|
||||
},
|
||||
reflect.Complex64: func(v reflect.Value, r *rand.Rand) {
|
||||
panic("unimplemented")
|
||||
},
|
||||
reflect.Complex128: func(v reflect.Value, r *rand.Rand) {
|
||||
panic("unimplemented")
|
||||
},
|
||||
reflect.String: func(v reflect.Value, r *rand.Rand) {
|
||||
v.SetString(randString(r))
|
||||
},
|
||||
reflect.UnsafePointer: func(v reflect.Value, r *rand.Rand) {
|
||||
panic("unimplemented")
|
||||
},
|
||||
}
|
||||
|
||||
// randBool returns true or false randomly.
|
||||
func randBool(r *rand.Rand) bool {
|
||||
if r.Int()&1 == 1 {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type charRange struct {
|
||||
first, last rune
|
||||
}
|
||||
|
||||
// choose returns a random unicode character from the given range, using the
|
||||
// given randomness source.
|
||||
func (r *charRange) choose(rand *rand.Rand) rune {
|
||||
count := int64(r.last - r.first)
|
||||
return r.first + rune(rand.Int63n(count))
|
||||
}
|
||||
|
||||
var unicodeRanges = []charRange{
|
||||
{' ', '~'}, // ASCII characters
|
||||
{'\u00a0', '\u02af'}, // Multi-byte encoded characters
|
||||
{'\u4e00', '\u9fff'}, // Common CJK (even longer encodings)
|
||||
}
|
||||
|
||||
// randString makes a random string up to 20 characters long. The returned string
|
||||
// may include a variety of (valid) UTF-8 encodings.
|
||||
func randString(r *rand.Rand) string {
|
||||
n := r.Intn(20)
|
||||
runes := make([]rune, n)
|
||||
for i := range runes {
|
||||
runes[i] = unicodeRanges[r.Intn(len(unicodeRanges))].choose(r)
|
||||
}
|
||||
return string(runes)
|
||||
}
|
||||
|
||||
// randUint64 makes random 64 bit numbers.
|
||||
// Weirdly, rand doesn't have a function that gives you 64 random bits.
|
||||
func randUint64(r *rand.Rand) uint64 {
|
||||
return uint64(r.Uint32())<<32 | uint64(r.Uint32())
|
||||
}
|
472
vendor/github.com/google/gofuzz/fuzz_test.go
generated
vendored
Normal file
472
vendor/github.com/google/gofuzz/fuzz_test.go
generated
vendored
Normal file
@ -0,0 +1,472 @@
|
||||
/*
|
||||
Copyright 2014 Google Inc. All rights reserved.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
package fuzz
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestFuzz_basic(t *testing.T) {
|
||||
obj := &struct {
|
||||
I int
|
||||
I8 int8
|
||||
I16 int16
|
||||
I32 int32
|
||||
I64 int64
|
||||
U uint
|
||||
U8 uint8
|
||||
U16 uint16
|
||||
U32 uint32
|
||||
U64 uint64
|
||||
Uptr uintptr
|
||||
S string
|
||||
B bool
|
||||
T time.Time
|
||||
}{}
|
||||
|
||||
failed := map[string]int{}
|
||||
for i := 0; i < 10; i++ {
|
||||
New().Fuzz(obj)
|
||||
|
||||
if n, v := "i", obj.I; v == 0 {
|
||||
failed[n] = failed[n] + 1
|
||||
}
|
||||
if n, v := "i8", obj.I8; v == 0 {
|
||||
failed[n] = failed[n] + 1
|
||||
}
|
||||
if n, v := "i16", obj.I16; v == 0 {
|
||||
failed[n] = failed[n] + 1
|
||||
}
|
||||
if n, v := "i32", obj.I32; v == 0 {
|
||||
failed[n] = failed[n] + 1
|
||||
}
|
||||
if n, v := "i64", obj.I64; v == 0 {
|
||||
failed[n] = failed[n] + 1
|
||||
}
|
||||
if n, v := "u", obj.U; v == 0 {
|
||||
failed[n] = failed[n] + 1
|
||||
}
|
||||
if n, v := "u8", obj.U8; v == 0 {
|
||||
failed[n] = failed[n] + 1
|
||||
}
|
||||
if n, v := "u16", obj.U16; v == 0 {
|
||||
failed[n] = failed[n] + 1
|
||||
}
|
||||
if n, v := "u32", obj.U32; v == 0 {
|
||||
failed[n] = failed[n] + 1
|
||||
}
|
||||
if n, v := "u64", obj.U64; v == 0 {
|
||||
failed[n] = failed[n] + 1
|
||||
}
|
||||
if n, v := "uptr", obj.Uptr; v == 0 {
|
||||
failed[n] = failed[n] + 1
|
||||
}
|
||||
if n, v := "s", obj.S; v == "" {
|
||||
failed[n] = failed[n] + 1
|
||||
}
|
||||
if n, v := "b", obj.B; v == false {
|
||||
failed[n] = failed[n] + 1
|
||||
}
|
||||
if n, v := "t", obj.T; v.IsZero() {
|
||||
failed[n] = failed[n] + 1
|
||||
}
|
||||
}
|
||||
checkFailed(t, failed)
|
||||
}
|
||||
|
||||
func checkFailed(t *testing.T, failed map[string]int) {
|
||||
for k, v := range failed {
|
||||
if v > 8 {
|
||||
t.Errorf("%v seems to not be getting set, was zero value %v times", k, v)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestFuzz_structptr(t *testing.T) {
|
||||
obj := &struct {
|
||||
A *struct {
|
||||
S string
|
||||
}
|
||||
}{}
|
||||
|
||||
f := New().NilChance(.5)
|
||||
failed := map[string]int{}
|
||||
for i := 0; i < 10; i++ {
|
||||
f.Fuzz(obj)
|
||||
|
||||
if n, v := "a not nil", obj.A; v == nil {
|
||||
failed[n] = failed[n] + 1
|
||||
}
|
||||
if n, v := "a nil", obj.A; v != nil {
|
||||
failed[n] = failed[n] + 1
|
||||
}
|
||||
if n, v := "as", obj.A; v == nil || v.S == "" {
|
||||
failed[n] = failed[n] + 1
|
||||
}
|
||||
}
|
||||
checkFailed(t, failed)
|
||||
}
|
||||
|
||||
// tryFuzz tries fuzzing up to 20 times. Fail if check() never passes, report the highest
|
||||
// stage it ever got to.
|
||||
func tryFuzz(t *testing.T, f *Fuzzer, obj interface{}, check func() (stage int, passed bool)) {
|
||||
maxStage := 0
|
||||
for i := 0; i < 20; i++ {
|
||||
f.Fuzz(obj)
|
||||
stage, passed := check()
|
||||
if stage > maxStage {
|
||||
maxStage = stage
|
||||
}
|
||||
if passed {
|
||||
return
|
||||
}
|
||||
}
|
||||
t.Errorf("Only ever got to stage %v", maxStage)
|
||||
}
|
||||
|
||||
func TestFuzz_structmap(t *testing.T) {
|
||||
obj := &struct {
|
||||
A map[struct {
|
||||
S string
|
||||
}]struct {
|
||||
S2 string
|
||||
}
|
||||
B map[string]string
|
||||
}{}
|
||||
|
||||
tryFuzz(t, New(), obj, func() (int, bool) {
|
||||
if obj.A == nil {
|
||||
return 1, false
|
||||
}
|
||||
if len(obj.A) == 0 {
|
||||
return 2, false
|
||||
}
|
||||
for k, v := range obj.A {
|
||||
if k.S == "" {
|
||||
return 3, false
|
||||
}
|
||||
if v.S2 == "" {
|
||||
return 4, false
|
||||
}
|
||||
}
|
||||
|
||||
if obj.B == nil {
|
||||
return 5, false
|
||||
}
|
||||
if len(obj.B) == 0 {
|
||||
return 6, false
|
||||
}
|
||||
for k, v := range obj.B {
|
||||
if k == "" {
|
||||
return 7, false
|
||||
}
|
||||
if v == "" {
|
||||
return 8, false
|
||||
}
|
||||
}
|
||||
return 9, true
|
||||
})
|
||||
}
|
||||
|
||||
func TestFuzz_structslice(t *testing.T) {
|
||||
obj := &struct {
|
||||
A []struct {
|
||||
S string
|
||||
}
|
||||
B []string
|
||||
}{}
|
||||
|
||||
tryFuzz(t, New(), obj, func() (int, bool) {
|
||||
if obj.A == nil {
|
||||
return 1, false
|
||||
}
|
||||
if len(obj.A) == 0 {
|
||||
return 2, false
|
||||
}
|
||||
for _, v := range obj.A {
|
||||
if v.S == "" {
|
||||
return 3, false
|
||||
}
|
||||
}
|
||||
|
||||
if obj.B == nil {
|
||||
return 4, false
|
||||
}
|
||||
if len(obj.B) == 0 {
|
||||
return 5, false
|
||||
}
|
||||
for _, v := range obj.B {
|
||||
if v == "" {
|
||||
return 6, false
|
||||
}
|
||||
}
|
||||
return 7, true
|
||||
})
|
||||
}
|
||||
|
||||
func TestFuzz_structarray(t *testing.T) {
|
||||
obj := &struct {
|
||||
A [3]struct {
|
||||
S string
|
||||
}
|
||||
B [2]int
|
||||
}{}
|
||||
|
||||
tryFuzz(t, New(), obj, func() (int, bool) {
|
||||
for _, v := range obj.A {
|
||||
if v.S == "" {
|
||||
return 1, false
|
||||
}
|
||||
}
|
||||
|
||||
for _, v := range obj.B {
|
||||
if v == 0 {
|
||||
return 2, false
|
||||
}
|
||||
}
|
||||
return 3, true
|
||||
})
|
||||
}
|
||||
|
||||
func TestFuzz_custom(t *testing.T) {
|
||||
obj := &struct {
|
||||
A string
|
||||
B *string
|
||||
C map[string]string
|
||||
D *map[string]string
|
||||
}{}
|
||||
|
||||
testPhrase := "gotcalled"
|
||||
testMap := map[string]string{"C": "D"}
|
||||
f := New().Funcs(
|
||||
func(s *string, c Continue) {
|
||||
*s = testPhrase
|
||||
},
|
||||
func(m map[string]string, c Continue) {
|
||||
m["C"] = "D"
|
||||
},
|
||||
)
|
||||
|
||||
tryFuzz(t, f, obj, func() (int, bool) {
|
||||
if obj.A != testPhrase {
|
||||
return 1, false
|
||||
}
|
||||
if obj.B == nil {
|
||||
return 2, false
|
||||
}
|
||||
if *obj.B != testPhrase {
|
||||
return 3, false
|
||||
}
|
||||
if e, a := testMap, obj.C; !reflect.DeepEqual(e, a) {
|
||||
return 4, false
|
||||
}
|
||||
if obj.D == nil {
|
||||
return 5, false
|
||||
}
|
||||
if e, a := testMap, *obj.D; !reflect.DeepEqual(e, a) {
|
||||
return 6, false
|
||||
}
|
||||
return 7, true
|
||||
})
|
||||
}
|
||||
|
||||
type SelfFuzzer string
|
||||
|
||||
// Implement fuzz.Interface.
|
||||
func (sf *SelfFuzzer) Fuzz(c Continue) {
|
||||
*sf = selfFuzzerTestPhrase
|
||||
}
|
||||
|
||||
const selfFuzzerTestPhrase = "was fuzzed"
|
||||
|
||||
func TestFuzz_interface(t *testing.T) {
|
||||
f := New()
|
||||
|
||||
var obj1 SelfFuzzer
|
||||
tryFuzz(t, f, &obj1, func() (int, bool) {
|
||||
if obj1 != selfFuzzerTestPhrase {
|
||||
return 1, false
|
||||
}
|
||||
return 1, true
|
||||
})
|
||||
|
||||
var obj2 map[int]SelfFuzzer
|
||||
tryFuzz(t, f, &obj2, func() (int, bool) {
|
||||
for _, v := range obj2 {
|
||||
if v != selfFuzzerTestPhrase {
|
||||
return 1, false
|
||||
}
|
||||
}
|
||||
return 1, true
|
||||
})
|
||||
}
|
||||
|
||||
func TestFuzz_interfaceAndFunc(t *testing.T) {
|
||||
const privateTestPhrase = "private phrase"
|
||||
f := New().Funcs(
|
||||
// This should take precedence over SelfFuzzer.Fuzz().
|
||||
func(s *SelfFuzzer, c Continue) {
|
||||
*s = privateTestPhrase
|
||||
},
|
||||
)
|
||||
|
||||
var obj1 SelfFuzzer
|
||||
tryFuzz(t, f, &obj1, func() (int, bool) {
|
||||
if obj1 != privateTestPhrase {
|
||||
return 1, false
|
||||
}
|
||||
return 1, true
|
||||
})
|
||||
|
||||
var obj2 map[int]SelfFuzzer
|
||||
tryFuzz(t, f, &obj2, func() (int, bool) {
|
||||
for _, v := range obj2 {
|
||||
if v != privateTestPhrase {
|
||||
return 1, false
|
||||
}
|
||||
}
|
||||
return 1, true
|
||||
})
|
||||
}
|
||||
|
||||
func TestFuzz_noCustom(t *testing.T) {
|
||||
type Inner struct {
|
||||
Str string
|
||||
}
|
||||
type Outer struct {
|
||||
Str string
|
||||
In Inner
|
||||
}
|
||||
|
||||
testPhrase := "gotcalled"
|
||||
f := New().Funcs(
|
||||
func(outer *Outer, c Continue) {
|
||||
outer.Str = testPhrase
|
||||
c.Fuzz(&outer.In)
|
||||
},
|
||||
func(inner *Inner, c Continue) {
|
||||
inner.Str = testPhrase
|
||||
},
|
||||
)
|
||||
c := Continue{fc: &fuzzerContext{fuzzer: f}, Rand: f.r}
|
||||
|
||||
// Fuzzer.Fuzz()
|
||||
obj1 := Outer{}
|
||||
f.Fuzz(&obj1)
|
||||
if obj1.Str != testPhrase {
|
||||
t.Errorf("expected Outer custom function to have been called")
|
||||
}
|
||||
if obj1.In.Str != testPhrase {
|
||||
t.Errorf("expected Inner custom function to have been called")
|
||||
}
|
||||
|
||||
// Continue.Fuzz()
|
||||
obj2 := Outer{}
|
||||
c.Fuzz(&obj2)
|
||||
if obj2.Str != testPhrase {
|
||||
t.Errorf("expected Outer custom function to have been called")
|
||||
}
|
||||
if obj2.In.Str != testPhrase {
|
||||
t.Errorf("expected Inner custom function to have been called")
|
||||
}
|
||||
|
||||
// Fuzzer.FuzzNoCustom()
|
||||
obj3 := Outer{}
|
||||
f.FuzzNoCustom(&obj3)
|
||||
if obj3.Str == testPhrase {
|
||||
t.Errorf("expected Outer custom function to not have been called")
|
||||
}
|
||||
if obj3.In.Str != testPhrase {
|
||||
t.Errorf("expected Inner custom function to have been called")
|
||||
}
|
||||
|
||||
// Continue.FuzzNoCustom()
|
||||
obj4 := Outer{}
|
||||
c.FuzzNoCustom(&obj4)
|
||||
if obj4.Str == testPhrase {
|
||||
t.Errorf("expected Outer custom function to not have been called")
|
||||
}
|
||||
if obj4.In.Str != testPhrase {
|
||||
t.Errorf("expected Inner custom function to have been called")
|
||||
}
|
||||
}
|
||||
|
||||
func TestFuzz_NumElements(t *testing.T) {
|
||||
f := New().NilChance(0).NumElements(0, 1)
|
||||
obj := &struct {
|
||||
A []int
|
||||
}{}
|
||||
|
||||
tryFuzz(t, f, obj, func() (int, bool) {
|
||||
if obj.A == nil {
|
||||
return 1, false
|
||||
}
|
||||
return 2, len(obj.A) == 0
|
||||
})
|
||||
tryFuzz(t, f, obj, func() (int, bool) {
|
||||
if obj.A == nil {
|
||||
return 3, false
|
||||
}
|
||||
return 4, len(obj.A) == 1
|
||||
})
|
||||
}
|
||||
|
||||
func TestFuzz_Maxdepth(t *testing.T) {
|
||||
type S struct {
|
||||
S *S
|
||||
}
|
||||
|
||||
f := New().NilChance(0)
|
||||
|
||||
f.MaxDepth(1)
|
||||
for i := 0; i < 100; i++ {
|
||||
obj := S{}
|
||||
f.Fuzz(&obj)
|
||||
|
||||
if obj.S != nil {
|
||||
t.Errorf("Expected nil")
|
||||
}
|
||||
}
|
||||
|
||||
f.MaxDepth(3) // field, ptr
|
||||
for i := 0; i < 100; i++ {
|
||||
obj := S{}
|
||||
f.Fuzz(&obj)
|
||||
|
||||
if obj.S == nil {
|
||||
t.Errorf("Expected obj.S not nil")
|
||||
} else if obj.S.S != nil {
|
||||
t.Errorf("Expected obj.S.S nil")
|
||||
}
|
||||
}
|
||||
|
||||
f.MaxDepth(5) // field, ptr, field, ptr
|
||||
for i := 0; i < 100; i++ {
|
||||
obj := S{}
|
||||
f.Fuzz(&obj)
|
||||
|
||||
if obj.S == nil {
|
||||
t.Errorf("Expected obj.S not nil")
|
||||
} else if obj.S.S == nil {
|
||||
t.Errorf("Expected obj.S.S not nil")
|
||||
} else if obj.S.S.S != nil {
|
||||
t.Errorf("Expected obj.S.S.S nil")
|
||||
}
|
||||
}
|
||||
}
|
21
vendor/github.com/googleapis/gnostic/.gitignore
generated
vendored
Normal file
21
vendor/github.com/googleapis/gnostic/.gitignore
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
# IntelliJ IDEA
|
||||
.idea
|
||||
# Eclipse
|
||||
.checkstyle
|
||||
.project
|
||||
.settings
|
||||
# Swift
|
||||
.build
|
||||
Packages
|
||||
# Node
|
||||
node_modules
|
||||
package-lock.json
|
||||
bundle.json
|
||||
# vi
|
||||
*.swp
|
||||
# vscode
|
||||
.vscode
|
||||
.DS_Store
|
||||
*~
|
||||
Package.resolved
|
||||
extensions/sample/generated
|
29
vendor/github.com/googleapis/gnostic/.travis-install.sh
generated
vendored
Executable file
29
vendor/github.com/googleapis/gnostic/.travis-install.sh
generated
vendored
Executable file
@ -0,0 +1,29 @@
|
||||
#!/bin/sh
|
||||
|
||||
#
|
||||
# Install dependencies that aren't available as Ubuntu packages.
|
||||
#
|
||||
# Everything goes into $HOME/local.
|
||||
#
|
||||
# Scripts should add
|
||||
# - $HOME/local/bin to PATH
|
||||
# - $HOME/local/lib to LD_LIBRARY_PATH
|
||||
#
|
||||
|
||||
cd
|
||||
mkdir -p local
|
||||
|
||||
# Install swift
|
||||
SWIFT_URL=https://swift.org/builds/swift-4.0-branch/ubuntu1404/swift-4.0-DEVELOPMENT-SNAPSHOT-2017-09-01-a/swift-4.0-DEVELOPMENT-SNAPSHOT-2017-09-01-a-ubuntu14.04.tar.gz
|
||||
echo $SWIFT_URL
|
||||
curl -fSsL $SWIFT_URL -o swift.tar.gz
|
||||
tar -xzf swift.tar.gz --strip-components=2 --directory=local
|
||||
|
||||
# Install protoc
|
||||
PROTOC_URL=https://github.com/google/protobuf/releases/download/v3.4.0/protoc-3.4.0-linux-x86_64.zip
|
||||
echo $PROTOC_URL
|
||||
curl -fSsL $PROTOC_URL -o protoc.zip
|
||||
unzip protoc.zip -d local
|
||||
|
||||
# Verify installation
|
||||
find local
|
49
vendor/github.com/googleapis/gnostic/.travis.yml
generated
vendored
Normal file
49
vendor/github.com/googleapis/gnostic/.travis.yml
generated
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
# Travis CI build file for OpenAPI Compiler, including Go and Swift plugins
|
||||
|
||||
# Use Ubuntu 14.04
|
||||
dist: trusty
|
||||
|
||||
sudo: false
|
||||
|
||||
language: go
|
||||
|
||||
addons:
|
||||
apt:
|
||||
packages:
|
||||
- clang-3.8
|
||||
- lldb-3.8
|
||||
- libicu-dev
|
||||
- libtool
|
||||
- libcurl4-openssl-dev
|
||||
- libbsd-dev
|
||||
- build-essential
|
||||
- libssl-dev
|
||||
- uuid-dev
|
||||
- curl
|
||||
- unzip
|
||||
|
||||
install:
|
||||
- ./.travis-install.sh
|
||||
- export PATH=.:$HOME/local/bin:$PATH
|
||||
- make
|
||||
|
||||
script:
|
||||
- go test . -v
|
||||
- pushd plugins/gnostic-go-generator/examples/v2.0/bookstore
|
||||
- make test
|
||||
- popd
|
||||
- pushd plugins/gnostic-go-generator/examples/v2.0/sample
|
||||
- make test
|
||||
- popd
|
||||
- pushd plugins/gnostic-go-generator/examples/v3.0/bookstore
|
||||
- make test
|
||||
- popd
|
||||
- export PATH=.:$HOME/local/bin:$PATH
|
||||
- export LD_LIBRARY_PATH=$HOME/local/lib
|
||||
- pushd plugins/gnostic-swift-generator
|
||||
- make install
|
||||
- cd examples/bookstore
|
||||
- make
|
||||
- .build/debug/Server &
|
||||
- make test
|
||||
|
35
vendor/github.com/googleapis/gnostic/COMPILE-PROTOS.sh
generated
vendored
Executable file
35
vendor/github.com/googleapis/gnostic/COMPILE-PROTOS.sh
generated
vendored
Executable file
@ -0,0 +1,35 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Copyright 2016 Google Inc. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
go get github.com/golang/protobuf/protoc-gen-go
|
||||
|
||||
protoc \
|
||||
--go_out=Mgoogle/protobuf/any.proto=github.com/golang/protobuf/ptypes/any:. \
|
||||
OpenAPIv2/OpenAPIv2.proto
|
||||
|
||||
protoc \
|
||||
-I.:$GOPATH/src \
|
||||
--go_out=:. \
|
||||
plugins/plugin.proto
|
||||
|
||||
protoc \
|
||||
--go_out=Mgoogle/protobuf/any.proto=github.com/golang/protobuf/ptypes/any:. \
|
||||
OpenAPIv3/OpenAPIv3.proto
|
||||
|
||||
protoc \
|
||||
--go_out=Mgoogle/protobuf/any.proto=github.com/golang/protobuf/ptypes/any:. \
|
||||
discovery/discovery.proto
|
35
vendor/github.com/googleapis/gnostic/CONTRIBUTING.md
generated
vendored
Normal file
35
vendor/github.com/googleapis/gnostic/CONTRIBUTING.md
generated
vendored
Normal file
@ -0,0 +1,35 @@
|
||||
# How to become a contributor and submit your own code
|
||||
|
||||
## Contributor License Agreements
|
||||
|
||||
We'd love to accept your sample apps and patches! Before we can take them, we
|
||||
have to jump a couple of legal hurdles.
|
||||
|
||||
Please fill out either the individual or corporate Contributor License Agreement
|
||||
(CLA).
|
||||
|
||||
* If you are an individual writing original source code and you're sure you
|
||||
own the intellectual property, then you'll need to sign an [individual CLA]
|
||||
(https://developers.google.com/open-source/cla/individual).
|
||||
* If you work for a company that wants to allow you to contribute your work,
|
||||
then you'll need to sign a [corporate CLA]
|
||||
(https://developers.google.com/open-source/cla/corporate).
|
||||
|
||||
Follow either of the two links above to access the appropriate CLA and
|
||||
instructions for how to sign and return it. Once we receive it, we'll be able to
|
||||
accept your pull requests.
|
||||
|
||||
## Contributing A Patch
|
||||
|
||||
1. Submit an issue describing your proposed change to the repo in question.
|
||||
1. The repo owner will respond to your issue promptly.
|
||||
1. If your proposed change is accepted, and you haven't already done so, sign a
|
||||
Contributor License Agreement (see details above).
|
||||
1. Fork the desired repo, develop and test your code changes.
|
||||
1. Ensure that your code adheres to the existing style in the sample to which
|
||||
you are contributing. Refer to the
|
||||
[Google Cloud Platform Samples Style Guide]
|
||||
(https://github.com/GoogleCloudPlatform/Template/wiki/style.html) for the
|
||||
recommended coding standards for this organization.
|
||||
1. Ensure that your code has an appropriate set of unit tests which all pass.
|
||||
1. Submit a pull request.
|
203
vendor/github.com/googleapis/gnostic/LICENSE
generated
vendored
Normal file
203
vendor/github.com/googleapis/gnostic/LICENSE
generated
vendored
Normal file
@ -0,0 +1,203 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
16
vendor/github.com/googleapis/gnostic/Makefile
generated
vendored
Normal file
16
vendor/github.com/googleapis/gnostic/Makefile
generated
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
|
||||
build:
|
||||
go get
|
||||
go install
|
||||
cd generate-gnostic; go get; go install
|
||||
cd apps/disco; go get; go install
|
||||
cd apps/report; go get; go install
|
||||
cd apps/petstore-builder; go get; go install
|
||||
cd plugins/gnostic-summary; go get; go install
|
||||
cd plugins/gnostic-analyze; go get; go install
|
||||
cd plugins/gnostic-go-generator; go get; go install
|
||||
rm -f $(GOPATH)/bin/gnostic-go-client $(GOPATH)/bin/gnostic-go-server
|
||||
ln -s $(GOPATH)/bin/gnostic-go-generator $(GOPATH)/bin/gnostic-go-client
|
||||
ln -s $(GOPATH)/bin/gnostic-go-generator $(GOPATH)/bin/gnostic-go-server
|
||||
cd extensions/sample; make
|
||||
|
8728
vendor/github.com/googleapis/gnostic/OpenAPIv2/OpenAPIv2.go
generated
vendored
Normal file
8728
vendor/github.com/googleapis/gnostic/OpenAPIv2/OpenAPIv2.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
4455
vendor/github.com/googleapis/gnostic/OpenAPIv2/OpenAPIv2.pb.go
generated
vendored
Normal file
4455
vendor/github.com/googleapis/gnostic/OpenAPIv2/OpenAPIv2.pb.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
663
vendor/github.com/googleapis/gnostic/OpenAPIv2/OpenAPIv2.proto
generated
vendored
Normal file
663
vendor/github.com/googleapis/gnostic/OpenAPIv2/OpenAPIv2.proto
generated
vendored
Normal file
@ -0,0 +1,663 @@
|
||||
// Copyright 2017 Google Inc. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// THIS FILE IS AUTOMATICALLY GENERATED.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
package openapi.v2;
|
||||
|
||||
import "google/protobuf/any.proto";
|
||||
|
||||
// This option lets the proto compiler generate Java code inside the package
|
||||
// name (see below) instead of inside an outer class. It creates a simpler
|
||||
// developer experience by reducing one-level of name nesting and be
|
||||
// consistent with most programming languages that don't support outer classes.
|
||||
option java_multiple_files = true;
|
||||
|
||||
// The Java outer classname should be the filename in UpperCamelCase. This
|
||||
// class is only used to hold proto descriptor, so developers don't need to
|
||||
// work with it directly.
|
||||
option java_outer_classname = "OpenAPIProto";
|
||||
|
||||
// The Java package name must be proto package name with proper prefix.
|
||||
option java_package = "org.openapi_v2";
|
||||
|
||||
// A reasonable prefix for the Objective-C symbols generated from the package.
|
||||
// It should at a minimum be 3 characters long, all uppercase, and convention
|
||||
// is to use an abbreviation of the package name. Something short, but
|
||||
// hopefully unique enough to not conflict with things that may come along in
|
||||
// the future. 'GPB' is reserved for the protocol buffer implementation itself.
|
||||
option objc_class_prefix = "OAS";
|
||||
|
||||
message AdditionalPropertiesItem {
|
||||
oneof oneof {
|
||||
Schema schema = 1;
|
||||
bool boolean = 2;
|
||||
}
|
||||
}
|
||||
|
||||
message Any {
|
||||
google.protobuf.Any value = 1;
|
||||
string yaml = 2;
|
||||
}
|
||||
|
||||
message ApiKeySecurity {
|
||||
string type = 1;
|
||||
string name = 2;
|
||||
string in = 3;
|
||||
string description = 4;
|
||||
repeated NamedAny vendor_extension = 5;
|
||||
}
|
||||
|
||||
message BasicAuthenticationSecurity {
|
||||
string type = 1;
|
||||
string description = 2;
|
||||
repeated NamedAny vendor_extension = 3;
|
||||
}
|
||||
|
||||
message BodyParameter {
|
||||
// A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed.
|
||||
string description = 1;
|
||||
// The name of the parameter.
|
||||
string name = 2;
|
||||
// Determines the location of the parameter.
|
||||
string in = 3;
|
||||
// Determines whether or not this parameter is required or optional.
|
||||
bool required = 4;
|
||||
Schema schema = 5;
|
||||
repeated NamedAny vendor_extension = 6;
|
||||
}
|
||||
|
||||
// Contact information for the owners of the API.
|
||||
message Contact {
|
||||
// The identifying name of the contact person/organization.
|
||||
string name = 1;
|
||||
// The URL pointing to the contact information.
|
||||
string url = 2;
|
||||
// The email address of the contact person/organization.
|
||||
string email = 3;
|
||||
repeated NamedAny vendor_extension = 4;
|
||||
}
|
||||
|
||||
message Default {
|
||||
repeated NamedAny additional_properties = 1;
|
||||
}
|
||||
|
||||
// One or more JSON objects describing the schemas being consumed and produced by the API.
|
||||
message Definitions {
|
||||
repeated NamedSchema additional_properties = 1;
|
||||
}
|
||||
|
||||
message Document {
|
||||
// The Swagger version of this document.
|
||||
string swagger = 1;
|
||||
Info info = 2;
|
||||
// The host (name or ip) of the API. Example: 'swagger.io'
|
||||
string host = 3;
|
||||
// The base path to the API. Example: '/api'.
|
||||
string base_path = 4;
|
||||
// The transfer protocol of the API.
|
||||
repeated string schemes = 5;
|
||||
// A list of MIME types accepted by the API.
|
||||
repeated string consumes = 6;
|
||||
// A list of MIME types the API can produce.
|
||||
repeated string produces = 7;
|
||||
Paths paths = 8;
|
||||
Definitions definitions = 9;
|
||||
ParameterDefinitions parameters = 10;
|
||||
ResponseDefinitions responses = 11;
|
||||
repeated SecurityRequirement security = 12;
|
||||
SecurityDefinitions security_definitions = 13;
|
||||
repeated Tag tags = 14;
|
||||
ExternalDocs external_docs = 15;
|
||||
repeated NamedAny vendor_extension = 16;
|
||||
}
|
||||
|
||||
message Examples {
|
||||
repeated NamedAny additional_properties = 1;
|
||||
}
|
||||
|
||||
// information about external documentation
|
||||
message ExternalDocs {
|
||||
string description = 1;
|
||||
string url = 2;
|
||||
repeated NamedAny vendor_extension = 3;
|
||||
}
|
||||
|
||||
// A deterministic version of a JSON Schema object.
|
||||
message FileSchema {
|
||||
string format = 1;
|
||||
string title = 2;
|
||||
string description = 3;
|
||||
Any default = 4;
|
||||
repeated string required = 5;
|
||||
string type = 6;
|
||||
bool read_only = 7;
|
||||
ExternalDocs external_docs = 8;
|
||||
Any example = 9;
|
||||
repeated NamedAny vendor_extension = 10;
|
||||
}
|
||||
|
||||
message FormDataParameterSubSchema {
|
||||
// Determines whether or not this parameter is required or optional.
|
||||
bool required = 1;
|
||||
// Determines the location of the parameter.
|
||||
string in = 2;
|
||||
// A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed.
|
||||
string description = 3;
|
||||
// The name of the parameter.
|
||||
string name = 4;
|
||||
// allows sending a parameter by name only or with an empty value.
|
||||
bool allow_empty_value = 5;
|
||||
string type = 6;
|
||||
string format = 7;
|
||||
PrimitivesItems items = 8;
|
||||
string collection_format = 9;
|
||||
Any default = 10;
|
||||
double maximum = 11;
|
||||
bool exclusive_maximum = 12;
|
||||
double minimum = 13;
|
||||
bool exclusive_minimum = 14;
|
||||
int64 max_length = 15;
|
||||
int64 min_length = 16;
|
||||
string pattern = 17;
|
||||
int64 max_items = 18;
|
||||
int64 min_items = 19;
|
||||
bool unique_items = 20;
|
||||
repeated Any enum = 21;
|
||||
double multiple_of = 22;
|
||||
repeated NamedAny vendor_extension = 23;
|
||||
}
|
||||
|
||||
message Header {
|
||||
string type = 1;
|
||||
string format = 2;
|
||||
PrimitivesItems items = 3;
|
||||
string collection_format = 4;
|
||||
Any default = 5;
|
||||
double maximum = 6;
|
||||
bool exclusive_maximum = 7;
|
||||
double minimum = 8;
|
||||
bool exclusive_minimum = 9;
|
||||
int64 max_length = 10;
|
||||
int64 min_length = 11;
|
||||
string pattern = 12;
|
||||
int64 max_items = 13;
|
||||
int64 min_items = 14;
|
||||
bool unique_items = 15;
|
||||
repeated Any enum = 16;
|
||||
double multiple_of = 17;
|
||||
string description = 18;
|
||||
repeated NamedAny vendor_extension = 19;
|
||||
}
|
||||
|
||||
message HeaderParameterSubSchema {
|
||||
// Determines whether or not this parameter is required or optional.
|
||||
bool required = 1;
|
||||
// Determines the location of the parameter.
|
||||
string in = 2;
|
||||
// A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed.
|
||||
string description = 3;
|
||||
// The name of the parameter.
|
||||
string name = 4;
|
||||
string type = 5;
|
||||
string format = 6;
|
||||
PrimitivesItems items = 7;
|
||||
string collection_format = 8;
|
||||
Any default = 9;
|
||||
double maximum = 10;
|
||||
bool exclusive_maximum = 11;
|
||||
double minimum = 12;
|
||||
bool exclusive_minimum = 13;
|
||||
int64 max_length = 14;
|
||||
int64 min_length = 15;
|
||||
string pattern = 16;
|
||||
int64 max_items = 17;
|
||||
int64 min_items = 18;
|
||||
bool unique_items = 19;
|
||||
repeated Any enum = 20;
|
||||
double multiple_of = 21;
|
||||
repeated NamedAny vendor_extension = 22;
|
||||
}
|
||||
|
||||
message Headers {
|
||||
repeated NamedHeader additional_properties = 1;
|
||||
}
|
||||
|
||||
// General information about the API.
|
||||
message Info {
|
||||
// A unique and precise title of the API.
|
||||
string title = 1;
|
||||
// A semantic version number of the API.
|
||||
string version = 2;
|
||||
// A longer description of the API. Should be different from the title. GitHub Flavored Markdown is allowed.
|
||||
string description = 3;
|
||||
// The terms of service for the API.
|
||||
string terms_of_service = 4;
|
||||
Contact contact = 5;
|
||||
License license = 6;
|
||||
repeated NamedAny vendor_extension = 7;
|
||||
}
|
||||
|
||||
message ItemsItem {
|
||||
repeated Schema schema = 1;
|
||||
}
|
||||
|
||||
message JsonReference {
|
||||
string _ref = 1;
|
||||
string description = 2;
|
||||
}
|
||||
|
||||
message License {
|
||||
// The name of the license type. It's encouraged to use an OSI compatible license.
|
||||
string name = 1;
|
||||
// The URL pointing to the license.
|
||||
string url = 2;
|
||||
repeated NamedAny vendor_extension = 3;
|
||||
}
|
||||
|
||||
// Automatically-generated message used to represent maps of Any as ordered (name,value) pairs.
|
||||
message NamedAny {
|
||||
// Map key
|
||||
string name = 1;
|
||||
// Mapped value
|
||||
Any value = 2;
|
||||
}
|
||||
|
||||
// Automatically-generated message used to represent maps of Header as ordered (name,value) pairs.
|
||||
message NamedHeader {
|
||||
// Map key
|
||||
string name = 1;
|
||||
// Mapped value
|
||||
Header value = 2;
|
||||
}
|
||||
|
||||
// Automatically-generated message used to represent maps of Parameter as ordered (name,value) pairs.
|
||||
message NamedParameter {
|
||||
// Map key
|
||||
string name = 1;
|
||||
// Mapped value
|
||||
Parameter value = 2;
|
||||
}
|
||||
|
||||
// Automatically-generated message used to represent maps of PathItem as ordered (name,value) pairs.
|
||||
message NamedPathItem {
|
||||
// Map key
|
||||
string name = 1;
|
||||
// Mapped value
|
||||
PathItem value = 2;
|
||||
}
|
||||
|
||||
// Automatically-generated message used to represent maps of Response as ordered (name,value) pairs.
|
||||
message NamedResponse {
|
||||
// Map key
|
||||
string name = 1;
|
||||
// Mapped value
|
||||
Response value = 2;
|
||||
}
|
||||
|
||||
// Automatically-generated message used to represent maps of ResponseValue as ordered (name,value) pairs.
|
||||
message NamedResponseValue {
|
||||
// Map key
|
||||
string name = 1;
|
||||
// Mapped value
|
||||
ResponseValue value = 2;
|
||||
}
|
||||
|
||||
// Automatically-generated message used to represent maps of Schema as ordered (name,value) pairs.
|
||||
message NamedSchema {
|
||||
// Map key
|
||||
string name = 1;
|
||||
// Mapped value
|
||||
Schema value = 2;
|
||||
}
|
||||
|
||||
// Automatically-generated message used to represent maps of SecurityDefinitionsItem as ordered (name,value) pairs.
|
||||
message NamedSecurityDefinitionsItem {
|
||||
// Map key
|
||||
string name = 1;
|
||||
// Mapped value
|
||||
SecurityDefinitionsItem value = 2;
|
||||
}
|
||||
|
||||
// Automatically-generated message used to represent maps of string as ordered (name,value) pairs.
|
||||
message NamedString {
|
||||
// Map key
|
||||
string name = 1;
|
||||
// Mapped value
|
||||
string value = 2;
|
||||
}
|
||||
|
||||
// Automatically-generated message used to represent maps of StringArray as ordered (name,value) pairs.
|
||||
message NamedStringArray {
|
||||
// Map key
|
||||
string name = 1;
|
||||
// Mapped value
|
||||
StringArray value = 2;
|
||||
}
|
||||
|
||||
message NonBodyParameter {
|
||||
oneof oneof {
|
||||
HeaderParameterSubSchema header_parameter_sub_schema = 1;
|
||||
FormDataParameterSubSchema form_data_parameter_sub_schema = 2;
|
||||
QueryParameterSubSchema query_parameter_sub_schema = 3;
|
||||
PathParameterSubSchema path_parameter_sub_schema = 4;
|
||||
}
|
||||
}
|
||||
|
||||
message Oauth2AccessCodeSecurity {
|
||||
string type = 1;
|
||||
string flow = 2;
|
||||
Oauth2Scopes scopes = 3;
|
||||
string authorization_url = 4;
|
||||
string token_url = 5;
|
||||
string description = 6;
|
||||
repeated NamedAny vendor_extension = 7;
|
||||
}
|
||||
|
||||
message Oauth2ApplicationSecurity {
|
||||
string type = 1;
|
||||
string flow = 2;
|
||||
Oauth2Scopes scopes = 3;
|
||||
string token_url = 4;
|
||||
string description = 5;
|
||||
repeated NamedAny vendor_extension = 6;
|
||||
}
|
||||
|
||||
message Oauth2ImplicitSecurity {
|
||||
string type = 1;
|
||||
string flow = 2;
|
||||
Oauth2Scopes scopes = 3;
|
||||
string authorization_url = 4;
|
||||
string description = 5;
|
||||
repeated NamedAny vendor_extension = 6;
|
||||
}
|
||||
|
||||
message Oauth2PasswordSecurity {
|
||||
string type = 1;
|
||||
string flow = 2;
|
||||
Oauth2Scopes scopes = 3;
|
||||
string token_url = 4;
|
||||
string description = 5;
|
||||
repeated NamedAny vendor_extension = 6;
|
||||
}
|
||||
|
||||
message Oauth2Scopes {
|
||||
repeated NamedString additional_properties = 1;
|
||||
}
|
||||
|
||||
message Operation {
|
||||
repeated string tags = 1;
|
||||
// A brief summary of the operation.
|
||||
string summary = 2;
|
||||
// A longer description of the operation, GitHub Flavored Markdown is allowed.
|
||||
string description = 3;
|
||||
ExternalDocs external_docs = 4;
|
||||
// A unique identifier of the operation.
|
||||
string operation_id = 5;
|
||||
// A list of MIME types the API can produce.
|
||||
repeated string produces = 6;
|
||||
// A list of MIME types the API can consume.
|
||||
repeated string consumes = 7;
|
||||
// The parameters needed to send a valid API call.
|
||||
repeated ParametersItem parameters = 8;
|
||||
Responses responses = 9;
|
||||
// The transfer protocol of the API.
|
||||
repeated string schemes = 10;
|
||||
bool deprecated = 11;
|
||||
repeated SecurityRequirement security = 12;
|
||||
repeated NamedAny vendor_extension = 13;
|
||||
}
|
||||
|
||||
message Parameter {
|
||||
oneof oneof {
|
||||
BodyParameter body_parameter = 1;
|
||||
NonBodyParameter non_body_parameter = 2;
|
||||
}
|
||||
}
|
||||
|
||||
// One or more JSON representations for parameters
|
||||
message ParameterDefinitions {
|
||||
repeated NamedParameter additional_properties = 1;
|
||||
}
|
||||
|
||||
message ParametersItem {
|
||||
oneof oneof {
|
||||
Parameter parameter = 1;
|
||||
JsonReference json_reference = 2;
|
||||
}
|
||||
}
|
||||
|
||||
message PathItem {
|
||||
string _ref = 1;
|
||||
Operation get = 2;
|
||||
Operation put = 3;
|
||||
Operation post = 4;
|
||||
Operation delete = 5;
|
||||
Operation options = 6;
|
||||
Operation head = 7;
|
||||
Operation patch = 8;
|
||||
// The parameters needed to send a valid API call.
|
||||
repeated ParametersItem parameters = 9;
|
||||
repeated NamedAny vendor_extension = 10;
|
||||
}
|
||||
|
||||
message PathParameterSubSchema {
|
||||
// Determines whether or not this parameter is required or optional.
|
||||
bool required = 1;
|
||||
// Determines the location of the parameter.
|
||||
string in = 2;
|
||||
// A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed.
|
||||
string description = 3;
|
||||
// The name of the parameter.
|
||||
string name = 4;
|
||||
string type = 5;
|
||||
string format = 6;
|
||||
PrimitivesItems items = 7;
|
||||
string collection_format = 8;
|
||||
Any default = 9;
|
||||
double maximum = 10;
|
||||
bool exclusive_maximum = 11;
|
||||
double minimum = 12;
|
||||
bool exclusive_minimum = 13;
|
||||
int64 max_length = 14;
|
||||
int64 min_length = 15;
|
||||
string pattern = 16;
|
||||
int64 max_items = 17;
|
||||
int64 min_items = 18;
|
||||
bool unique_items = 19;
|
||||
repeated Any enum = 20;
|
||||
double multiple_of = 21;
|
||||
repeated NamedAny vendor_extension = 22;
|
||||
}
|
||||
|
||||
// Relative paths to the individual endpoints. They must be relative to the 'basePath'.
|
||||
message Paths {
|
||||
repeated NamedAny vendor_extension = 1;
|
||||
repeated NamedPathItem path = 2;
|
||||
}
|
||||
|
||||
message PrimitivesItems {
|
||||
string type = 1;
|
||||
string format = 2;
|
||||
PrimitivesItems items = 3;
|
||||
string collection_format = 4;
|
||||
Any default = 5;
|
||||
double maximum = 6;
|
||||
bool exclusive_maximum = 7;
|
||||
double minimum = 8;
|
||||
bool exclusive_minimum = 9;
|
||||
int64 max_length = 10;
|
||||
int64 min_length = 11;
|
||||
string pattern = 12;
|
||||
int64 max_items = 13;
|
||||
int64 min_items = 14;
|
||||
bool unique_items = 15;
|
||||
repeated Any enum = 16;
|
||||
double multiple_of = 17;
|
||||
repeated NamedAny vendor_extension = 18;
|
||||
}
|
||||
|
||||
message Properties {
|
||||
repeated NamedSchema additional_properties = 1;
|
||||
}
|
||||
|
||||
message QueryParameterSubSchema {
|
||||
// Determines whether or not this parameter is required or optional.
|
||||
bool required = 1;
|
||||
// Determines the location of the parameter.
|
||||
string in = 2;
|
||||
// A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed.
|
||||
string description = 3;
|
||||
// The name of the parameter.
|
||||
string name = 4;
|
||||
// allows sending a parameter by name only or with an empty value.
|
||||
bool allow_empty_value = 5;
|
||||
string type = 6;
|
||||
string format = 7;
|
||||
PrimitivesItems items = 8;
|
||||
string collection_format = 9;
|
||||
Any default = 10;
|
||||
double maximum = 11;
|
||||
bool exclusive_maximum = 12;
|
||||
double minimum = 13;
|
||||
bool exclusive_minimum = 14;
|
||||
int64 max_length = 15;
|
||||
int64 min_length = 16;
|
||||
string pattern = 17;
|
||||
int64 max_items = 18;
|
||||
int64 min_items = 19;
|
||||
bool unique_items = 20;
|
||||
repeated Any enum = 21;
|
||||
double multiple_of = 22;
|
||||
repeated NamedAny vendor_extension = 23;
|
||||
}
|
||||
|
||||
message Response {
|
||||
string description = 1;
|
||||
SchemaItem schema = 2;
|
||||
Headers headers = 3;
|
||||
Examples examples = 4;
|
||||
repeated NamedAny vendor_extension = 5;
|
||||
}
|
||||
|
||||
// One or more JSON representations for parameters
|
||||
message ResponseDefinitions {
|
||||
repeated NamedResponse additional_properties = 1;
|
||||
}
|
||||
|
||||
message ResponseValue {
|
||||
oneof oneof {
|
||||
Response response = 1;
|
||||
JsonReference json_reference = 2;
|
||||
}
|
||||
}
|
||||
|
||||
// Response objects names can either be any valid HTTP status code or 'default'.
|
||||
message Responses {
|
||||
repeated NamedResponseValue response_code = 1;
|
||||
repeated NamedAny vendor_extension = 2;
|
||||
}
|
||||
|
||||
// A deterministic version of a JSON Schema object.
|
||||
message Schema {
|
||||
string _ref = 1;
|
||||
string format = 2;
|
||||
string title = 3;
|
||||
string description = 4;
|
||||
Any default = 5;
|
||||
double multiple_of = 6;
|
||||
double maximum = 7;
|
||||
bool exclusive_maximum = 8;
|
||||
double minimum = 9;
|
||||
bool exclusive_minimum = 10;
|
||||
int64 max_length = 11;
|
||||
int64 min_length = 12;
|
||||
string pattern = 13;
|
||||
int64 max_items = 14;
|
||||
int64 min_items = 15;
|
||||
bool unique_items = 16;
|
||||
int64 max_properties = 17;
|
||||
int64 min_properties = 18;
|
||||
repeated string required = 19;
|
||||
repeated Any enum = 20;
|
||||
AdditionalPropertiesItem additional_properties = 21;
|
||||
TypeItem type = 22;
|
||||
ItemsItem items = 23;
|
||||
repeated Schema all_of = 24;
|
||||
Properties properties = 25;
|
||||
string discriminator = 26;
|
||||
bool read_only = 27;
|
||||
Xml xml = 28;
|
||||
ExternalDocs external_docs = 29;
|
||||
Any example = 30;
|
||||
repeated NamedAny vendor_extension = 31;
|
||||
}
|
||||
|
||||
message SchemaItem {
|
||||
oneof oneof {
|
||||
Schema schema = 1;
|
||||
FileSchema file_schema = 2;
|
||||
}
|
||||
}
|
||||
|
||||
message SecurityDefinitions {
|
||||
repeated NamedSecurityDefinitionsItem additional_properties = 1;
|
||||
}
|
||||
|
||||
message SecurityDefinitionsItem {
|
||||
oneof oneof {
|
||||
BasicAuthenticationSecurity basic_authentication_security = 1;
|
||||
ApiKeySecurity api_key_security = 2;
|
||||
Oauth2ImplicitSecurity oauth2_implicit_security = 3;
|
||||
Oauth2PasswordSecurity oauth2_password_security = 4;
|
||||
Oauth2ApplicationSecurity oauth2_application_security = 5;
|
||||
Oauth2AccessCodeSecurity oauth2_access_code_security = 6;
|
||||
}
|
||||
}
|
||||
|
||||
message SecurityRequirement {
|
||||
repeated NamedStringArray additional_properties = 1;
|
||||
}
|
||||
|
||||
message StringArray {
|
||||
repeated string value = 1;
|
||||
}
|
||||
|
||||
message Tag {
|
||||
string name = 1;
|
||||
string description = 2;
|
||||
ExternalDocs external_docs = 3;
|
||||
repeated NamedAny vendor_extension = 4;
|
||||
}
|
||||
|
||||
message TypeItem {
|
||||
repeated string value = 1;
|
||||
}
|
||||
|
||||
// Any property starting with x- is valid.
|
||||
message VendorExtension {
|
||||
repeated NamedAny additional_properties = 1;
|
||||
}
|
||||
|
||||
message Xml {
|
||||
string name = 1;
|
||||
string namespace = 2;
|
||||
string prefix = 3;
|
||||
bool attribute = 4;
|
||||
bool wrapped = 5;
|
||||
repeated NamedAny vendor_extension = 6;
|
||||
}
|
||||
|
16
vendor/github.com/googleapis/gnostic/OpenAPIv2/README.md
generated
vendored
Normal file
16
vendor/github.com/googleapis/gnostic/OpenAPIv2/README.md
generated
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
# OpenAPI v2 Protocol Buffer Models
|
||||
|
||||
This directory contains a Protocol Buffer-language model
|
||||
and related code for supporting OpenAPI v2.
|
||||
|
||||
Gnostic applications and plugins can use OpenAPIv2.proto
|
||||
to generate Protocol Buffer support code for their preferred languages.
|
||||
|
||||
OpenAPIv2.go is used by Gnostic to read JSON and YAML OpenAPI
|
||||
descriptions into the Protocol Buffer-based datastructures
|
||||
generated from OpenAPIv2.proto.
|
||||
|
||||
OpenAPIv2.proto and OpenAPIv2.go are generated by the Gnostic
|
||||
compiler generator, and OpenAPIv2.pb.go is generated by
|
||||
protoc, the Protocol Buffer compiler, and protoc-gen-go, the
|
||||
Protocol Buffer Go code generation plugin.
|
1610
vendor/github.com/googleapis/gnostic/OpenAPIv2/openapi-2.0.json
generated
vendored
Normal file
1610
vendor/github.com/googleapis/gnostic/OpenAPIv2/openapi-2.0.json
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
109
vendor/github.com/googleapis/gnostic/README.md
generated
vendored
Normal file
109
vendor/github.com/googleapis/gnostic/README.md
generated
vendored
Normal file
@ -0,0 +1,109 @@
|
||||
[](https://travis-ci.org/googleapis/gnostic)
|
||||
|
||||
# ⨁ gnostic
|
||||
|
||||
This repository contains a Go command line tool which converts
|
||||
JSON and YAML [OpenAPI](https://github.com/OAI/OpenAPI-Specification)
|
||||
descriptions to and from equivalent Protocol Buffer representations.
|
||||
|
||||
[Protocol Buffers](https://developers.google.com/protocol-buffers/)
|
||||
provide a language-neutral, platform-neutral, extensible mechanism
|
||||
for serializing structured data.
|
||||
**gnostic**'s Protocol Buffer models for the OpenAPI Specification
|
||||
can be used to generate code that includes data structures with
|
||||
explicit fields for the elements of an OpenAPI description.
|
||||
This makes it possible for developers to work with OpenAPI
|
||||
descriptions in type-safe ways, which is particularly useful
|
||||
in strongly-typed languages like Go and Swift.
|
||||
|
||||
**gnostic** reads OpenAPI descriptions into
|
||||
these generated data structures, reports errors,
|
||||
resolves internal dependencies, and writes the results
|
||||
in a binary form that can be used in any language that is
|
||||
supported by the Protocol Buffer tools.
|
||||
A plugin interface simplifies integration with API
|
||||
tools written in a variety of different languages,
|
||||
and when necessary, Protocol Buffer OpenAPI descriptions
|
||||
can be reexported as JSON or YAML.
|
||||
|
||||
**gnostic** compilation code and OpenAPI Protocol Buffer
|
||||
models are automatically generated from an
|
||||
[OpenAPI JSON Schema](https://github.com/OAI/OpenAPI-Specification/blob/master/schemas/v2.0/schema.json).
|
||||
Source code for the generator is in the [generate-gnostic](generate-gnostic) directory.
|
||||
|
||||
## Disclaimer
|
||||
|
||||
This is prerelease software and work in progress. Feedback and
|
||||
contributions are welcome, but we currently make no guarantees of
|
||||
function or stability.
|
||||
|
||||
## Requirements
|
||||
|
||||
**gnostic** can be run in any environment that supports [Go](http://golang.org)
|
||||
and the [Google Protocol Buffer Compiler](https://github.com/google/protobuf).
|
||||
|
||||
## Installation
|
||||
|
||||
1. Get this package by downloading it with `go get`.
|
||||
|
||||
go get github.com/googleapis/gnostic
|
||||
|
||||
2. [Optional] Build and run the compiler generator.
|
||||
This uses the OpenAPI JSON schema to generate a Protocol Buffer language file
|
||||
that describes the OpenAPI specification and a Go-language file of code that
|
||||
will read a JSON or YAML OpenAPI representation into the generated protocol
|
||||
buffers. Pre-generated versions of these files are in the OpenAPIv2 directory.
|
||||
|
||||
cd $GOPATH/src/github.com/googleapis/gnostic/generate-gnostic
|
||||
go install
|
||||
cd ..
|
||||
generate-gnostic --v2
|
||||
|
||||
3. [Optional] Generate Protocol Buffer support code.
|
||||
A pre-generated version of this file is checked into the OpenAPIv2 directory.
|
||||
This step requires a local installation of protoc, the Protocol Buffer Compiler,
|
||||
and the Go protoc plugin.
|
||||
You can get protoc [here](https://github.com/google/protobuf).
|
||||
You can install the plugin with this command:
|
||||
|
||||
go get -u github.com/golang/protobuf/protoc-gen-go
|
||||
|
||||
Then use the following to recompile the Gnostic Protocol Buffer models:
|
||||
|
||||
./COMPILE-PROTOS.sh
|
||||
|
||||
4. [Optional] Rebuild **gnostic**. This is only necessary if you've performed steps
|
||||
2 or 3 above.
|
||||
|
||||
go install github.com/googleapis/gnostic
|
||||
|
||||
5. Run **gnostic**. This will create a file in the current directory named "petstore.pb" that contains a binary
|
||||
Protocol Buffer description of a sample API.
|
||||
|
||||
gnostic --pb-out=. examples/v2.0/json/petstore.json
|
||||
|
||||
6. You can also compile files that you specify with a URL. Here's another way to compile the previous
|
||||
example. This time we're creating "petstore.text", which contains a textual representation of the
|
||||
Protocol Buffer description. This is mainly for use in testing and debugging.
|
||||
|
||||
gnostic --text-out=petstore.text https://raw.githubusercontent.com/googleapis/gnostic/master/examples/petstore.json
|
||||
|
||||
7. For a sample application, see apps/report.
|
||||
|
||||
go install github.com/googleapis/gnostic/apps/report
|
||||
report petstore.pb
|
||||
|
||||
8. **gnostic** supports plugins. This builds and runs a sample plugin
|
||||
that reports some basic information about an API. The "-" causes the plugin to
|
||||
write its output to stdout.
|
||||
|
||||
go install github.com/googleapis/gnostic/plugins/gnostic-go-sample
|
||||
gnostic examples/v2.0/json/petstore.json --go-sample-out=-
|
||||
|
||||
## Copyright
|
||||
|
||||
Copyright 2017, Google Inc.
|
||||
|
||||
## License
|
||||
|
||||
Released under the Apache 2.0 license.
|
3
vendor/github.com/googleapis/gnostic/compiler/README.md
generated
vendored
Normal file
3
vendor/github.com/googleapis/gnostic/compiler/README.md
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
# Compiler support code
|
||||
|
||||
This directory contains compiler support code used by Gnostic and Gnostic extensions.
|
43
vendor/github.com/googleapis/gnostic/compiler/context.go
generated
vendored
Normal file
43
vendor/github.com/googleapis/gnostic/compiler/context.go
generated
vendored
Normal file
@ -0,0 +1,43 @@
|
||||
// Copyright 2017 Google Inc. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package compiler
|
||||
|
||||
// Context contains state of the compiler as it traverses a document.
|
||||
type Context struct {
|
||||
Parent *Context
|
||||
Name string
|
||||
ExtensionHandlers *[]ExtensionHandler
|
||||
}
|
||||
|
||||
// NewContextWithExtensions returns a new object representing the compiler state
|
||||
func NewContextWithExtensions(name string, parent *Context, extensionHandlers *[]ExtensionHandler) *Context {
|
||||
return &Context{Name: name, Parent: parent, ExtensionHandlers: extensionHandlers}
|
||||
}
|
||||
|
||||
// NewContext returns a new object representing the compiler state
|
||||
func NewContext(name string, parent *Context) *Context {
|
||||
if parent != nil {
|
||||
return &Context{Name: name, Parent: parent, ExtensionHandlers: parent.ExtensionHandlers}
|
||||
}
|
||||
return &Context{Name: name, Parent: parent, ExtensionHandlers: nil}
|
||||
}
|
||||
|
||||
// Description returns a text description of the compiler state
|
||||
func (context *Context) Description() string {
|
||||
if context.Parent != nil {
|
||||
return context.Parent.Description() + "." + context.Name
|
||||
}
|
||||
return context.Name
|
||||
}
|
61
vendor/github.com/googleapis/gnostic/compiler/error.go
generated
vendored
Normal file
61
vendor/github.com/googleapis/gnostic/compiler/error.go
generated
vendored
Normal file
@ -0,0 +1,61 @@
|
||||
// Copyright 2017 Google Inc. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package compiler
|
||||
|
||||
// Error represents compiler errors and their location in the document.
|
||||
type Error struct {
|
||||
Context *Context
|
||||
Message string
|
||||
}
|
||||
|
||||
// NewError creates an Error.
|
||||
func NewError(context *Context, message string) *Error {
|
||||
return &Error{Context: context, Message: message}
|
||||
}
|
||||
|
||||
// Error returns the string value of an Error.
|
||||
func (err *Error) Error() string {
|
||||
if err.Context == nil {
|
||||
return "ERROR " + err.Message
|
||||
}
|
||||
return "ERROR " + err.Context.Description() + " " + err.Message
|
||||
}
|
||||
|
||||
// ErrorGroup is a container for groups of Error values.
|
||||
type ErrorGroup struct {
|
||||
Errors []error
|
||||
}
|
||||
|
||||
// NewErrorGroupOrNil returns a new ErrorGroup for a slice of errors or nil if the slice is empty.
|
||||
func NewErrorGroupOrNil(errors []error) error {
|
||||
if len(errors) == 0 {
|
||||
return nil
|
||||
} else if len(errors) == 1 {
|
||||
return errors[0]
|
||||
} else {
|
||||
return &ErrorGroup{Errors: errors}
|
||||
}
|
||||
}
|
||||
|
||||
func (group *ErrorGroup) Error() string {
|
||||
result := ""
|
||||
for i, err := range group.Errors {
|
||||
if i > 0 {
|
||||
result += "\n"
|
||||
}
|
||||
result += err.Error()
|
||||
}
|
||||
return result
|
||||
}
|
101
vendor/github.com/googleapis/gnostic/compiler/extension-handler.go
generated
vendored
Normal file
101
vendor/github.com/googleapis/gnostic/compiler/extension-handler.go
generated
vendored
Normal file
@ -0,0 +1,101 @@
|
||||
// Copyright 2017 Google Inc. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package compiler
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"os/exec"
|
||||
|
||||
"strings"
|
||||
|
||||
"errors"
|
||||
|
||||
"github.com/golang/protobuf/proto"
|
||||
"github.com/golang/protobuf/ptypes/any"
|
||||
ext_plugin "github.com/googleapis/gnostic/extensions"
|
||||
yaml "gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
// ExtensionHandler describes a binary that is called by the compiler to handle specification extensions.
|
||||
type ExtensionHandler struct {
|
||||
Name string
|
||||
}
|
||||
|
||||
// HandleExtension calls a binary extension handler.
|
||||
func HandleExtension(context *Context, in interface{}, extensionName string) (bool, *any.Any, error) {
|
||||
handled := false
|
||||
var errFromPlugin error
|
||||
var outFromPlugin *any.Any
|
||||
|
||||
if context != nil && context.ExtensionHandlers != nil && len(*(context.ExtensionHandlers)) != 0 {
|
||||
for _, customAnyProtoGenerator := range *(context.ExtensionHandlers) {
|
||||
outFromPlugin, errFromPlugin = customAnyProtoGenerator.handle(in, extensionName)
|
||||
if outFromPlugin == nil {
|
||||
continue
|
||||
} else {
|
||||
handled = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
return handled, outFromPlugin, errFromPlugin
|
||||
}
|
||||
|
||||
func (extensionHandlers *ExtensionHandler) handle(in interface{}, extensionName string) (*any.Any, error) {
|
||||
if extensionHandlers.Name != "" {
|
||||
binary, _ := yaml.Marshal(in)
|
||||
|
||||
request := &ext_plugin.ExtensionHandlerRequest{}
|
||||
|
||||
version := &ext_plugin.Version{}
|
||||
version.Major = 0
|
||||
version.Minor = 1
|
||||
version.Patch = 0
|
||||
request.CompilerVersion = version
|
||||
|
||||
request.Wrapper = &ext_plugin.Wrapper{}
|
||||
|
||||
request.Wrapper.Version = "v2"
|
||||
request.Wrapper.Yaml = string(binary)
|
||||
request.Wrapper.ExtensionName = extensionName
|
||||
|
||||
requestBytes, _ := proto.Marshal(request)
|
||||
cmd := exec.Command(extensionHandlers.Name)
|
||||
cmd.Stdin = bytes.NewReader(requestBytes)
|
||||
output, err := cmd.Output()
|
||||
|
||||
if err != nil {
|
||||
fmt.Printf("Error: %+v\n", err)
|
||||
return nil, err
|
||||
}
|
||||
response := &ext_plugin.ExtensionHandlerResponse{}
|
||||
err = proto.Unmarshal(output, response)
|
||||
if err != nil {
|
||||
fmt.Printf("Error: %+v\n", err)
|
||||
fmt.Printf("%s\n", string(output))
|
||||
return nil, err
|
||||
}
|
||||
if !response.Handled {
|
||||
return nil, nil
|
||||
}
|
||||
if len(response.Error) != 0 {
|
||||
message := fmt.Sprintf("Errors when parsing: %+v for field %s by vendor extension handler %s. Details %+v", in, extensionName, extensionHandlers.Name, strings.Join(response.Error, ","))
|
||||
return nil, errors.New(message)
|
||||
}
|
||||
return response.Value, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
197
vendor/github.com/googleapis/gnostic/compiler/helpers.go
generated
vendored
Normal file
197
vendor/github.com/googleapis/gnostic/compiler/helpers.go
generated
vendored
Normal file
@ -0,0 +1,197 @@
|
||||
// Copyright 2017 Google Inc. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package compiler
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"gopkg.in/yaml.v2"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
// compiler helper functions, usually called from generated code
|
||||
|
||||
// UnpackMap gets a yaml.MapSlice if possible.
|
||||
func UnpackMap(in interface{}) (yaml.MapSlice, bool) {
|
||||
m, ok := in.(yaml.MapSlice)
|
||||
if ok {
|
||||
return m, true
|
||||
}
|
||||
// do we have an empty array?
|
||||
a, ok := in.([]interface{})
|
||||
if ok && len(a) == 0 {
|
||||
// if so, return an empty map
|
||||
return yaml.MapSlice{}, true
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
|
||||
// SortedKeysForMap returns the sorted keys of a yaml.MapSlice.
|
||||
func SortedKeysForMap(m yaml.MapSlice) []string {
|
||||
keys := make([]string, 0)
|
||||
for _, item := range m {
|
||||
keys = append(keys, item.Key.(string))
|
||||
}
|
||||
sort.Strings(keys)
|
||||
return keys
|
||||
}
|
||||
|
||||
// MapHasKey returns true if a yaml.MapSlice contains a specified key.
|
||||
func MapHasKey(m yaml.MapSlice, key string) bool {
|
||||
for _, item := range m {
|
||||
itemKey, ok := item.Key.(string)
|
||||
if ok && key == itemKey {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// MapValueForKey gets the value of a map value for a specified key.
|
||||
func MapValueForKey(m yaml.MapSlice, key string) interface{} {
|
||||
for _, item := range m {
|
||||
itemKey, ok := item.Key.(string)
|
||||
if ok && key == itemKey {
|
||||
return item.Value
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ConvertInterfaceArrayToStringArray converts an array of interfaces to an array of strings, if possible.
|
||||
func ConvertInterfaceArrayToStringArray(interfaceArray []interface{}) []string {
|
||||
stringArray := make([]string, 0)
|
||||
for _, item := range interfaceArray {
|
||||
v, ok := item.(string)
|
||||
if ok {
|
||||
stringArray = append(stringArray, v)
|
||||
}
|
||||
}
|
||||
return stringArray
|
||||
}
|
||||
|
||||
// MissingKeysInMap identifies which keys from a list of required keys are not in a map.
|
||||
func MissingKeysInMap(m yaml.MapSlice, requiredKeys []string) []string {
|
||||
missingKeys := make([]string, 0)
|
||||
for _, k := range requiredKeys {
|
||||
if !MapHasKey(m, k) {
|
||||
missingKeys = append(missingKeys, k)
|
||||
}
|
||||
}
|
||||
return missingKeys
|
||||
}
|
||||
|
||||
// InvalidKeysInMap returns keys in a map that don't match a list of allowed keys and patterns.
|
||||
func InvalidKeysInMap(m yaml.MapSlice, allowedKeys []string, allowedPatterns []*regexp.Regexp) []string {
|
||||
invalidKeys := make([]string, 0)
|
||||
for _, item := range m {
|
||||
itemKey, ok := item.Key.(string)
|
||||
if ok {
|
||||
key := itemKey
|
||||
found := false
|
||||
// does the key match an allowed key?
|
||||
for _, allowedKey := range allowedKeys {
|
||||
if key == allowedKey {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
// does the key match an allowed pattern?
|
||||
for _, allowedPattern := range allowedPatterns {
|
||||
if allowedPattern.MatchString(key) {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
invalidKeys = append(invalidKeys, key)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return invalidKeys
|
||||
}
|
||||
|
||||
// DescribeMap describes a map (for debugging purposes).
|
||||
func DescribeMap(in interface{}, indent string) string {
|
||||
description := ""
|
||||
m, ok := in.(map[string]interface{})
|
||||
if ok {
|
||||
keys := make([]string, 0)
|
||||
for k := range m {
|
||||
keys = append(keys, k)
|
||||
}
|
||||
sort.Strings(keys)
|
||||
for _, k := range keys {
|
||||
v := m[k]
|
||||
description += fmt.Sprintf("%s%s:\n", indent, k)
|
||||
description += DescribeMap(v, indent+" ")
|
||||
}
|
||||
return description
|
||||
}
|
||||
a, ok := in.([]interface{})
|
||||
if ok {
|
||||
for i, v := range a {
|
||||
description += fmt.Sprintf("%s%d:\n", indent, i)
|
||||
description += DescribeMap(v, indent+" ")
|
||||
}
|
||||
return description
|
||||
}
|
||||
description += fmt.Sprintf("%s%+v\n", indent, in)
|
||||
return description
|
||||
}
|
||||
|
||||
// PluralProperties returns the string "properties" pluralized.
|
||||
func PluralProperties(count int) string {
|
||||
if count == 1 {
|
||||
return "property"
|
||||
}
|
||||
return "properties"
|
||||
}
|
||||
|
||||
// StringArrayContainsValue returns true if a string array contains a specified value.
|
||||
func StringArrayContainsValue(array []string, value string) bool {
|
||||
for _, item := range array {
|
||||
if item == value {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// StringArrayContainsValues returns true if a string array contains all of a list of specified values.
|
||||
func StringArrayContainsValues(array []string, values []string) bool {
|
||||
for _, value := range values {
|
||||
if !StringArrayContainsValue(array, value) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// StringValue returns the string value of an item.
|
||||
func StringValue(item interface{}) (value string, ok bool) {
|
||||
value, ok = item.(string)
|
||||
if ok {
|
||||
return value, ok
|
||||
}
|
||||
intValue, ok := item.(int)
|
||||
if ok {
|
||||
return strconv.Itoa(intValue), true
|
||||
}
|
||||
return "", false
|
||||
}
|
16
vendor/github.com/googleapis/gnostic/compiler/main.go
generated
vendored
Normal file
16
vendor/github.com/googleapis/gnostic/compiler/main.go
generated
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
// Copyright 2017 Google Inc. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// Package compiler provides support functions to generated compiler code.
|
||||
package compiler
|
175
vendor/github.com/googleapis/gnostic/compiler/reader.go
generated
vendored
Normal file
175
vendor/github.com/googleapis/gnostic/compiler/reader.go
generated
vendored
Normal file
@ -0,0 +1,175 @@
|
||||
// Copyright 2017 Google Inc. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package compiler
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"gopkg.in/yaml.v2"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var fileCache map[string][]byte
|
||||
var infoCache map[string]interface{}
|
||||
var count int64
|
||||
|
||||
var verboseReader = false
|
||||
|
||||
func initializeFileCache() {
|
||||
if fileCache == nil {
|
||||
fileCache = make(map[string][]byte, 0)
|
||||
}
|
||||
}
|
||||
|
||||
func initializeInfoCache() {
|
||||
if infoCache == nil {
|
||||
infoCache = make(map[string]interface{}, 0)
|
||||
}
|
||||
}
|
||||
|
||||
// FetchFile gets a specified file from the local filesystem or a remote location.
|
||||
func FetchFile(fileurl string) ([]byte, error) {
|
||||
initializeFileCache()
|
||||
bytes, ok := fileCache[fileurl]
|
||||
if ok {
|
||||
if verboseReader {
|
||||
log.Printf("Cache hit %s", fileurl)
|
||||
}
|
||||
return bytes, nil
|
||||
}
|
||||
if verboseReader {
|
||||
log.Printf("Fetching %s", fileurl)
|
||||
}
|
||||
response, err := http.Get(fileurl)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if response.StatusCode != 200 {
|
||||
return nil, errors.New(fmt.Sprintf("Error downloading %s: %s", fileurl, response.Status))
|
||||
}
|
||||
defer response.Body.Close()
|
||||
bytes, err = ioutil.ReadAll(response.Body)
|
||||
if err == nil {
|
||||
fileCache[fileurl] = bytes
|
||||
}
|
||||
return bytes, err
|
||||
}
|
||||
|
||||
// ReadBytesForFile reads the bytes of a file.
|
||||
func ReadBytesForFile(filename string) ([]byte, error) {
|
||||
// is the filename a url?
|
||||
fileurl, _ := url.Parse(filename)
|
||||
if fileurl.Scheme != "" {
|
||||
// yes, fetch it
|
||||
bytes, err := FetchFile(filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return bytes, nil
|
||||
}
|
||||
// no, it's a local filename
|
||||
bytes, err := ioutil.ReadFile(filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return bytes, nil
|
||||
}
|
||||
|
||||
// ReadInfoFromBytes unmarshals a file as a yaml.MapSlice.
|
||||
func ReadInfoFromBytes(filename string, bytes []byte) (interface{}, error) {
|
||||
initializeInfoCache()
|
||||
cachedInfo, ok := infoCache[filename]
|
||||
if ok {
|
||||
if verboseReader {
|
||||
log.Printf("Cache hit info for file %s", filename)
|
||||
}
|
||||
return cachedInfo, nil
|
||||
}
|
||||
if verboseReader {
|
||||
log.Printf("Reading info for file %s", filename)
|
||||
}
|
||||
var info yaml.MapSlice
|
||||
err := yaml.Unmarshal(bytes, &info)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(filename) > 0 {
|
||||
infoCache[filename] = info
|
||||
}
|
||||
return info, nil
|
||||
}
|
||||
|
||||
// ReadInfoForRef reads a file and return the fragment needed to resolve a $ref.
|
||||
func ReadInfoForRef(basefile string, ref string) (interface{}, error) {
|
||||
initializeInfoCache()
|
||||
{
|
||||
info, ok := infoCache[ref]
|
||||
if ok {
|
||||
if verboseReader {
|
||||
log.Printf("Cache hit for ref %s#%s", basefile, ref)
|
||||
}
|
||||
return info, nil
|
||||
}
|
||||
}
|
||||
if verboseReader {
|
||||
log.Printf("Reading info for ref %s#%s", basefile, ref)
|
||||
}
|
||||
count = count + 1
|
||||
basedir, _ := filepath.Split(basefile)
|
||||
parts := strings.Split(ref, "#")
|
||||
var filename string
|
||||
if parts[0] != "" {
|
||||
filename = basedir + parts[0]
|
||||
} else {
|
||||
filename = basefile
|
||||
}
|
||||
bytes, err := ReadBytesForFile(filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
info, err := ReadInfoFromBytes(filename, bytes)
|
||||
if err != nil {
|
||||
log.Printf("File error: %v\n", err)
|
||||
} else {
|
||||
if len(parts) > 1 {
|
||||
path := strings.Split(parts[1], "/")
|
||||
for i, key := range path {
|
||||
if i > 0 {
|
||||
m, ok := info.(yaml.MapSlice)
|
||||
if ok {
|
||||
found := false
|
||||
for _, section := range m {
|
||||
if section.Key == key {
|
||||
info = section.Value
|
||||
found = true
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
infoCache[ref] = nil
|
||||
return nil, NewError(nil, fmt.Sprintf("could not resolve %s", ref))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
infoCache[ref] = info
|
||||
return info, nil
|
||||
}
|
5
vendor/github.com/googleapis/gnostic/extensions/COMPILE-EXTENSION.sh
generated
vendored
Executable file
5
vendor/github.com/googleapis/gnostic/extensions/COMPILE-EXTENSION.sh
generated
vendored
Executable file
@ -0,0 +1,5 @@
|
||||
go get github.com/golang/protobuf/protoc-gen-go
|
||||
|
||||
protoc \
|
||||
--go_out=Mgoogle/protobuf/any.proto=github.com/golang/protobuf/ptypes/any:. *.proto
|
||||
|
5
vendor/github.com/googleapis/gnostic/extensions/README.md
generated
vendored
Normal file
5
vendor/github.com/googleapis/gnostic/extensions/README.md
generated
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
# Extensions
|
||||
|
||||
This directory contains support code for building Gnostic extensions and associated examples.
|
||||
|
||||
Extensions are used to compile vendor or specification extensions into protocol buffer structures.
|
218
vendor/github.com/googleapis/gnostic/extensions/extension.pb.go
generated
vendored
Normal file
218
vendor/github.com/googleapis/gnostic/extensions/extension.pb.go
generated
vendored
Normal file
@ -0,0 +1,218 @@
|
||||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// source: extension.proto
|
||||
|
||||
/*
|
||||
Package openapiextension_v1 is a generated protocol buffer package.
|
||||
|
||||
It is generated from these files:
|
||||
extension.proto
|
||||
|
||||
It has these top-level messages:
|
||||
Version
|
||||
ExtensionHandlerRequest
|
||||
ExtensionHandlerResponse
|
||||
Wrapper
|
||||
*/
|
||||
package openapiextension_v1
|
||||
|
||||
import proto "github.com/golang/protobuf/proto"
|
||||
import fmt "fmt"
|
||||
import math "math"
|
||||
import google_protobuf "github.com/golang/protobuf/ptypes/any"
|
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
var _ = proto.Marshal
|
||||
var _ = fmt.Errorf
|
||||
var _ = math.Inf
|
||||
|
||||
// This is a compile-time assertion to ensure that this generated file
|
||||
// is compatible with the proto package it is being compiled against.
|
||||
// A compilation error at this line likely means your copy of the
|
||||
// proto package needs to be updated.
|
||||
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
|
||||
|
||||
// The version number of OpenAPI compiler.
|
||||
type Version struct {
|
||||
Major int32 `protobuf:"varint,1,opt,name=major" json:"major,omitempty"`
|
||||
Minor int32 `protobuf:"varint,2,opt,name=minor" json:"minor,omitempty"`
|
||||
Patch int32 `protobuf:"varint,3,opt,name=patch" json:"patch,omitempty"`
|
||||
// A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should
|
||||
// be empty for mainline stable releases.
|
||||
Suffix string `protobuf:"bytes,4,opt,name=suffix" json:"suffix,omitempty"`
|
||||
}
|
||||
|
||||
func (m *Version) Reset() { *m = Version{} }
|
||||
func (m *Version) String() string { return proto.CompactTextString(m) }
|
||||
func (*Version) ProtoMessage() {}
|
||||
func (*Version) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} }
|
||||
|
||||
func (m *Version) GetMajor() int32 {
|
||||
if m != nil {
|
||||
return m.Major
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *Version) GetMinor() int32 {
|
||||
if m != nil {
|
||||
return m.Minor
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *Version) GetPatch() int32 {
|
||||
if m != nil {
|
||||
return m.Patch
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *Version) GetSuffix() string {
|
||||
if m != nil {
|
||||
return m.Suffix
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// An encoded Request is written to the ExtensionHandler's stdin.
|
||||
type ExtensionHandlerRequest struct {
|
||||
// The OpenAPI descriptions that were explicitly listed on the command line.
|
||||
// The specifications will appear in the order they are specified to gnostic.
|
||||
Wrapper *Wrapper `protobuf:"bytes,1,opt,name=wrapper" json:"wrapper,omitempty"`
|
||||
// The version number of openapi compiler.
|
||||
CompilerVersion *Version `protobuf:"bytes,3,opt,name=compiler_version,json=compilerVersion" json:"compiler_version,omitempty"`
|
||||
}
|
||||
|
||||
func (m *ExtensionHandlerRequest) Reset() { *m = ExtensionHandlerRequest{} }
|
||||
func (m *ExtensionHandlerRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*ExtensionHandlerRequest) ProtoMessage() {}
|
||||
func (*ExtensionHandlerRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} }
|
||||
|
||||
func (m *ExtensionHandlerRequest) GetWrapper() *Wrapper {
|
||||
if m != nil {
|
||||
return m.Wrapper
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *ExtensionHandlerRequest) GetCompilerVersion() *Version {
|
||||
if m != nil {
|
||||
return m.CompilerVersion
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// The extensions writes an encoded ExtensionHandlerResponse to stdout.
|
||||
type ExtensionHandlerResponse struct {
|
||||
// true if the extension is handled by the extension handler; false otherwise
|
||||
Handled bool `protobuf:"varint,1,opt,name=handled" json:"handled,omitempty"`
|
||||
// Error message. If non-empty, the extension handling failed.
|
||||
// The extension handler process should exit with status code zero
|
||||
// even if it reports an error in this way.
|
||||
//
|
||||
// This should be used to indicate errors which prevent the extension from
|
||||
// operating as intended. Errors which indicate a problem in gnostic
|
||||
// itself -- such as the input Document being unparseable -- should be
|
||||
// reported by writing a message to stderr and exiting with a non-zero
|
||||
// status code.
|
||||
Error []string `protobuf:"bytes,2,rep,name=error" json:"error,omitempty"`
|
||||
// text output
|
||||
Value *google_protobuf.Any `protobuf:"bytes,3,opt,name=value" json:"value,omitempty"`
|
||||
}
|
||||
|
||||
func (m *ExtensionHandlerResponse) Reset() { *m = ExtensionHandlerResponse{} }
|
||||
func (m *ExtensionHandlerResponse) String() string { return proto.CompactTextString(m) }
|
||||
func (*ExtensionHandlerResponse) ProtoMessage() {}
|
||||
func (*ExtensionHandlerResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} }
|
||||
|
||||
func (m *ExtensionHandlerResponse) GetHandled() bool {
|
||||
if m != nil {
|
||||
return m.Handled
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (m *ExtensionHandlerResponse) GetError() []string {
|
||||
if m != nil {
|
||||
return m.Error
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *ExtensionHandlerResponse) GetValue() *google_protobuf.Any {
|
||||
if m != nil {
|
||||
return m.Value
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type Wrapper struct {
|
||||
// version of the OpenAPI specification in which this extension was written.
|
||||
Version string `protobuf:"bytes,1,opt,name=version" json:"version,omitempty"`
|
||||
// Name of the extension
|
||||
ExtensionName string `protobuf:"bytes,2,opt,name=extension_name,json=extensionName" json:"extension_name,omitempty"`
|
||||
// Must be a valid yaml for the proto
|
||||
Yaml string `protobuf:"bytes,3,opt,name=yaml" json:"yaml,omitempty"`
|
||||
}
|
||||
|
||||
func (m *Wrapper) Reset() { *m = Wrapper{} }
|
||||
func (m *Wrapper) String() string { return proto.CompactTextString(m) }
|
||||
func (*Wrapper) ProtoMessage() {}
|
||||
func (*Wrapper) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} }
|
||||
|
||||
func (m *Wrapper) GetVersion() string {
|
||||
if m != nil {
|
||||
return m.Version
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Wrapper) GetExtensionName() string {
|
||||
if m != nil {
|
||||
return m.ExtensionName
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (m *Wrapper) GetYaml() string {
|
||||
if m != nil {
|
||||
return m.Yaml
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func init() {
|
||||
proto.RegisterType((*Version)(nil), "openapiextension.v1.Version")
|
||||
proto.RegisterType((*ExtensionHandlerRequest)(nil), "openapiextension.v1.ExtensionHandlerRequest")
|
||||
proto.RegisterType((*ExtensionHandlerResponse)(nil), "openapiextension.v1.ExtensionHandlerResponse")
|
||||
proto.RegisterType((*Wrapper)(nil), "openapiextension.v1.Wrapper")
|
||||
}
|
||||
|
||||
func init() { proto.RegisterFile("extension.proto", fileDescriptor0) }
|
||||
|
||||
var fileDescriptor0 = []byte{
|
||||
// 357 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x91, 0x4d, 0x4b, 0xc3, 0x40,
|
||||
0x18, 0x84, 0x49, 0xbf, 0x62, 0x56, 0x6c, 0x65, 0x2d, 0x1a, 0xc5, 0x43, 0x09, 0x08, 0x45, 0x64,
|
||||
0x4b, 0x15, 0xbc, 0xb7, 0x50, 0xd4, 0x8b, 0x2d, 0x7b, 0xa8, 0x37, 0xcb, 0x36, 0x7d, 0x9b, 0x46,
|
||||
0x92, 0xdd, 0x75, 0xf3, 0x61, 0xfb, 0x57, 0x3c, 0xfa, 0x4b, 0x25, 0xbb, 0x49, 0x3d, 0xa8, 0xb7,
|
||||
0xcc, 0xc3, 0x24, 0xef, 0xcc, 0x04, 0x75, 0x60, 0x9b, 0x02, 0x4f, 0x42, 0xc1, 0x89, 0x54, 0x22,
|
||||
0x15, 0xf8, 0x44, 0x48, 0xe0, 0x4c, 0x86, 0x3f, 0x3c, 0x1f, 0x5e, 0x9c, 0x07, 0x42, 0x04, 0x11,
|
||||
0x0c, 0xb4, 0x65, 0x99, 0xad, 0x07, 0x8c, 0xef, 0x8c, 0xdf, 0xf3, 0x91, 0x3d, 0x07, 0x55, 0x18,
|
||||
0x71, 0x17, 0x35, 0x63, 0xf6, 0x26, 0x94, 0x6b, 0xf5, 0xac, 0x7e, 0x93, 0x1a, 0xa1, 0x69, 0xc8,
|
||||
0x85, 0x72, 0x6b, 0x25, 0x2d, 0x44, 0x41, 0x25, 0x4b, 0xfd, 0x8d, 0x5b, 0x37, 0x54, 0x0b, 0x7c,
|
||||
0x8a, 0x5a, 0x49, 0xb6, 0x5e, 0x87, 0x5b, 0xb7, 0xd1, 0xb3, 0xfa, 0x0e, 0x2d, 0x95, 0xf7, 0x69,
|
||||
0xa1, 0xb3, 0x49, 0x15, 0xe8, 0x91, 0xf1, 0x55, 0x04, 0x8a, 0xc2, 0x7b, 0x06, 0x49, 0x8a, 0xef,
|
||||
0x91, 0xfd, 0xa1, 0x98, 0x94, 0x60, 0xee, 0x1e, 0xde, 0x5e, 0x92, 0x3f, 0x2a, 0x90, 0x17, 0xe3,
|
||||
0xa1, 0x95, 0x19, 0x3f, 0xa0, 0x63, 0x5f, 0xc4, 0x32, 0x8c, 0x40, 0x2d, 0x72, 0xd3, 0x40, 0x87,
|
||||
0xf9, 0xef, 0x03, 0x65, 0x4b, 0xda, 0xa9, 0xde, 0x2a, 0x81, 0x97, 0x23, 0xf7, 0x77, 0xb6, 0x44,
|
||||
0x0a, 0x9e, 0x00, 0x76, 0x91, 0xbd, 0xd1, 0x68, 0xa5, 0xc3, 0x1d, 0xd0, 0x4a, 0x16, 0x03, 0x80,
|
||||
0x52, 0x7a, 0x96, 0x7a, 0xdf, 0xa1, 0x46, 0xe0, 0x6b, 0xd4, 0xcc, 0x59, 0x94, 0x41, 0x99, 0xa4,
|
||||
0x4b, 0xcc, 0xf0, 0xa4, 0x1a, 0x9e, 0x8c, 0xf8, 0x8e, 0x1a, 0x8b, 0xf7, 0x8a, 0xec, 0xb2, 0x54,
|
||||
0x71, 0xa6, 0xaa, 0x60, 0xe9, 0xe1, 0x2a, 0x89, 0xaf, 0x50, 0x7b, 0xdf, 0x62, 0xc1, 0x59, 0x0c,
|
||||
0xfa, 0x37, 0x38, 0xf4, 0x68, 0x4f, 0x9f, 0x59, 0x0c, 0x18, 0xa3, 0xc6, 0x8e, 0xc5, 0x91, 0x3e,
|
||||
0xeb, 0x50, 0xfd, 0x3c, 0xbe, 0x41, 0x6d, 0xa1, 0x02, 0x12, 0x70, 0x91, 0xa4, 0xa1, 0x4f, 0xf2,
|
||||
0xe1, 0x18, 0x4f, 0x25, 0xf0, 0xd1, 0xec, 0x69, 0x5f, 0x77, 0x3e, 0x9c, 0x59, 0x5f, 0xb5, 0xfa,
|
||||
0x74, 0x34, 0x59, 0xb6, 0x74, 0xc4, 0xbb, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0x84, 0x5c, 0x6b,
|
||||
0x80, 0x51, 0x02, 0x00, 0x00,
|
||||
}
|
93
vendor/github.com/googleapis/gnostic/extensions/extension.proto
generated
vendored
Normal file
93
vendor/github.com/googleapis/gnostic/extensions/extension.proto
generated
vendored
Normal file
@ -0,0 +1,93 @@
|
||||
// Copyright 2017 Google Inc. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
syntax = "proto3";
|
||||
|
||||
import "google/protobuf/any.proto";
|
||||
package openapiextension.v1;
|
||||
|
||||
// This option lets the proto compiler generate Java code inside the package
|
||||
// name (see below) instead of inside an outer class. It creates a simpler
|
||||
// developer experience by reducing one-level of name nesting and be
|
||||
// consistent with most programming languages that don't support outer classes.
|
||||
option java_multiple_files = true;
|
||||
|
||||
// The Java outer classname should be the filename in UpperCamelCase. This
|
||||
// class is only used to hold proto descriptor, so developers don't need to
|
||||
// work with it directly.
|
||||
option java_outer_classname = "OpenAPIExtensionV1";
|
||||
|
||||
// The Java package name must be proto package name with proper prefix.
|
||||
option java_package = "org.gnostic.v1";
|
||||
|
||||
// A reasonable prefix for the Objective-C symbols generated from the package.
|
||||
// It should at a minimum be 3 characters long, all uppercase, and convention
|
||||
// is to use an abbreviation of the package name. Something short, but
|
||||
// hopefully unique enough to not conflict with things that may come along in
|
||||
// the future. 'GPB' is reserved for the protocol buffer implementation itself.
|
||||
//
|
||||
option objc_class_prefix = "OAE"; // "OpenAPI Extension"
|
||||
|
||||
// The version number of OpenAPI compiler.
|
||||
message Version {
|
||||
int32 major = 1;
|
||||
int32 minor = 2;
|
||||
int32 patch = 3;
|
||||
// A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should
|
||||
// be empty for mainline stable releases.
|
||||
string suffix = 4;
|
||||
}
|
||||
|
||||
// An encoded Request is written to the ExtensionHandler's stdin.
|
||||
message ExtensionHandlerRequest {
|
||||
|
||||
// The OpenAPI descriptions that were explicitly listed on the command line.
|
||||
// The specifications will appear in the order they are specified to gnostic.
|
||||
Wrapper wrapper = 1;
|
||||
|
||||
// The version number of openapi compiler.
|
||||
Version compiler_version = 3;
|
||||
}
|
||||
|
||||
// The extensions writes an encoded ExtensionHandlerResponse to stdout.
|
||||
message ExtensionHandlerResponse {
|
||||
|
||||
// true if the extension is handled by the extension handler; false otherwise
|
||||
bool handled = 1;
|
||||
|
||||
// Error message. If non-empty, the extension handling failed.
|
||||
// The extension handler process should exit with status code zero
|
||||
// even if it reports an error in this way.
|
||||
//
|
||||
// This should be used to indicate errors which prevent the extension from
|
||||
// operating as intended. Errors which indicate a problem in gnostic
|
||||
// itself -- such as the input Document being unparseable -- should be
|
||||
// reported by writing a message to stderr and exiting with a non-zero
|
||||
// status code.
|
||||
repeated string error = 2;
|
||||
|
||||
// text output
|
||||
google.protobuf.Any value = 3;
|
||||
}
|
||||
|
||||
message Wrapper {
|
||||
// version of the OpenAPI specification in which this extension was written.
|
||||
string version = 1;
|
||||
|
||||
// Name of the extension
|
||||
string extension_name = 2;
|
||||
|
||||
// Must be a valid yaml for the proto
|
||||
string yaml = 3;
|
||||
}
|
82
vendor/github.com/googleapis/gnostic/extensions/extensions.go
generated
vendored
Normal file
82
vendor/github.com/googleapis/gnostic/extensions/extensions.go
generated
vendored
Normal file
@ -0,0 +1,82 @@
|
||||
// Copyright 2017 Google Inc. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package openapiextension_v1
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
|
||||
"github.com/golang/protobuf/proto"
|
||||
"github.com/golang/protobuf/ptypes"
|
||||
)
|
||||
|
||||
type documentHandler func(version string, extensionName string, document string)
|
||||
type extensionHandler func(name string, yamlInput string) (bool, proto.Message, error)
|
||||
|
||||
func forInputYamlFromOpenapic(handler documentHandler) {
|
||||
data, err := ioutil.ReadAll(os.Stdin)
|
||||
if err != nil {
|
||||
fmt.Println("File error:", err.Error())
|
||||
os.Exit(1)
|
||||
}
|
||||
if len(data) == 0 {
|
||||
fmt.Println("No input data.")
|
||||
os.Exit(1)
|
||||
}
|
||||
request := &ExtensionHandlerRequest{}
|
||||
err = proto.Unmarshal(data, request)
|
||||
if err != nil {
|
||||
fmt.Println("Input error:", err.Error())
|
||||
os.Exit(1)
|
||||
}
|
||||
handler(request.Wrapper.Version, request.Wrapper.ExtensionName, request.Wrapper.Yaml)
|
||||
}
|
||||
|
||||
// ProcessExtension calles the handler for a specified extension.
|
||||
func ProcessExtension(handleExtension extensionHandler) {
|
||||
response := &ExtensionHandlerResponse{}
|
||||
forInputYamlFromOpenapic(
|
||||
func(version string, extensionName string, yamlInput string) {
|
||||
var newObject proto.Message
|
||||
var err error
|
||||
|
||||
handled, newObject, err := handleExtension(extensionName, yamlInput)
|
||||
if !handled {
|
||||
responseBytes, _ := proto.Marshal(response)
|
||||
os.Stdout.Write(responseBytes)
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
// If we reach here, then the extension is handled
|
||||
response.Handled = true
|
||||
if err != nil {
|
||||
response.Error = append(response.Error, err.Error())
|
||||
responseBytes, _ := proto.Marshal(response)
|
||||
os.Stdout.Write(responseBytes)
|
||||
os.Exit(0)
|
||||
}
|
||||
response.Value, err = ptypes.MarshalAny(newObject)
|
||||
if err != nil {
|
||||
response.Error = append(response.Error, err.Error())
|
||||
responseBytes, _ := proto.Marshal(response)
|
||||
os.Stdout.Write(responseBytes)
|
||||
os.Exit(0)
|
||||
}
|
||||
})
|
||||
|
||||
responseBytes, _ := proto.Marshal(response)
|
||||
os.Stdout.Write(responseBytes)
|
||||
}
|
610
vendor/github.com/googleapis/gnostic/gnostic.go
generated
vendored
Normal file
610
vendor/github.com/googleapis/gnostic/gnostic.go
generated
vendored
Normal file
@ -0,0 +1,610 @@
|
||||
// Copyright 2017 Google Inc. All Rights Reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
//go:generate ./COMPILE-PROTOS.sh
|
||||
|
||||
// Gnostic is a tool for building better REST APIs through knowledge.
|
||||
//
|
||||
// Gnostic reads declarative descriptions of REST APIs that conform
|
||||
// to the OpenAPI Specification, reports errors, resolves internal
|
||||
// dependencies, and puts the results in a binary form that can
|
||||
// be used in any language that is supported by the Protocol Buffer
|
||||
// tools.
|
||||
//
|
||||
// Gnostic models are validated and typed. This allows API tool
|
||||
// developers to focus on their product and not worry about input
|
||||
// validation and type checking.
|
||||
//
|
||||
// Gnostic calls plugins that implement a variety of API implementation
|
||||
// and support features including generation of client and server
|
||||
// support code.
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/golang/protobuf/proto"
|
||||
"github.com/googleapis/gnostic/OpenAPIv2"
|
||||
"github.com/googleapis/gnostic/OpenAPIv3"
|
||||
"github.com/googleapis/gnostic/compiler"
|
||||
"github.com/googleapis/gnostic/discovery"
|
||||
"github.com/googleapis/gnostic/jsonwriter"
|
||||
plugins "github.com/googleapis/gnostic/plugins"
|
||||
surface "github.com/googleapis/gnostic/surface"
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
const ( // Source Format
|
||||
SourceFormatUnknown = 0
|
||||
SourceFormatOpenAPI2 = 2
|
||||
SourceFormatOpenAPI3 = 3
|
||||
SourceFormatDiscovery = 4
|
||||
)
|
||||
|
||||
// Determine the version of an OpenAPI description read from JSON or YAML.
|
||||
func getOpenAPIVersionFromInfo(info interface{}) int {
|
||||
m, ok := compiler.UnpackMap(info)
|
||||
if !ok {
|
||||
return SourceFormatUnknown
|
||||
}
|
||||
swagger, ok := compiler.MapValueForKey(m, "swagger").(string)
|
||||
if ok && strings.HasPrefix(swagger, "2.0") {
|
||||
return SourceFormatOpenAPI2
|
||||
}
|
||||
openapi, ok := compiler.MapValueForKey(m, "openapi").(string)
|
||||
if ok && strings.HasPrefix(openapi, "3.0") {
|
||||
return SourceFormatOpenAPI3
|
||||
}
|
||||
kind, ok := compiler.MapValueForKey(m, "kind").(string)
|
||||
if ok && kind == "discovery#restDescription" {
|
||||
return SourceFormatDiscovery
|
||||
}
|
||||
return SourceFormatUnknown
|
||||
}
|
||||
|
||||
const (
|
||||
pluginPrefix = "gnostic-"
|
||||
extensionPrefix = "gnostic-x-"
|
||||
)
|
||||
|
||||
type pluginCall struct {
|
||||
Name string
|
||||
Invocation string
|
||||
}
|
||||
|
||||
// Invokes a plugin.
|
||||
func (p *pluginCall) perform(document proto.Message, sourceFormat int, sourceName string, timePlugins bool) ([]*plugins.Message, error) {
|
||||
if p.Name != "" {
|
||||
request := &plugins.Request{}
|
||||
|
||||
// Infer the name of the executable by adding the prefix.
|
||||
executableName := pluginPrefix + p.Name
|
||||
|
||||
// Validate invocation string with regular expression.
|
||||
invocation := p.Invocation
|
||||
|
||||
//
|
||||
// Plugin invocations must consist of
|
||||
// zero or more comma-separated key=value pairs followed by a path.
|
||||
// If pairs are present, a colon separates them from the path.
|
||||
// Keys and values must be alphanumeric strings and may contain
|
||||
// dashes, underscores, periods, or forward slashes.
|
||||
// A path can contain any characters other than the separators ',', ':', and '='.
|
||||
//
|
||||
invocationRegex := regexp.MustCompile(`^([\w-_\/\.]+=[\w-_\/\.]+(,[\w-_\/\.]+=[\w-_\/\.]+)*:)?[^,:=]+$`)
|
||||
if !invocationRegex.Match([]byte(p.Invocation)) {
|
||||
return nil, fmt.Errorf("Invalid invocation of %s: %s", executableName, invocation)
|
||||
}
|
||||
|
||||
invocationParts := strings.Split(p.Invocation, ":")
|
||||
var outputLocation string
|
||||
switch len(invocationParts) {
|
||||
case 1:
|
||||
outputLocation = invocationParts[0]
|
||||
case 2:
|
||||
parameters := strings.Split(invocationParts[0], ",")
|
||||
for _, keyvalue := range parameters {
|
||||
pair := strings.Split(keyvalue, "=")
|
||||
if len(pair) == 2 {
|
||||
request.Parameters = append(request.Parameters, &plugins.Parameter{Name: pair[0], Value: pair[1]})
|
||||
}
|
||||
}
|
||||
outputLocation = invocationParts[1]
|
||||
default:
|
||||
// badly-formed request
|
||||
outputLocation = invocationParts[len(invocationParts)-1]
|
||||
}
|
||||
|
||||
version := &plugins.Version{}
|
||||
version.Major = 0
|
||||
version.Minor = 1
|
||||
version.Patch = 0
|
||||
request.CompilerVersion = version
|
||||
|
||||
request.OutputPath = outputLocation
|
||||
|
||||
request.SourceName = sourceName
|
||||
switch sourceFormat {
|
||||
case SourceFormatOpenAPI2:
|
||||
request.AddModel("openapi.v2.Document", document)
|
||||
// include experimental API surface model
|
||||
surfaceModel, err := surface.NewModelFromOpenAPI2(document.(*openapi_v2.Document))
|
||||
if err == nil {
|
||||
request.AddModel("surface.v1.Model", surfaceModel)
|
||||
}
|
||||
case SourceFormatOpenAPI3:
|
||||
request.AddModel("openapi.v3.Document", document)
|
||||
// include experimental API surface model
|
||||
surfaceModel, err := surface.NewModelFromOpenAPI3(document.(*openapi_v3.Document))
|
||||
if err == nil {
|
||||
request.AddModel("surface.v1.Model", surfaceModel)
|
||||
}
|
||||
case SourceFormatDiscovery:
|
||||
request.AddModel("discovery.v1.Document", document)
|
||||
default:
|
||||
}
|
||||
|
||||
requestBytes, _ := proto.Marshal(request)
|
||||
|
||||
cmd := exec.Command(executableName, "-plugin")
|
||||
cmd.Stdin = bytes.NewReader(requestBytes)
|
||||
cmd.Stderr = os.Stderr
|
||||
pluginStartTime := time.Now()
|
||||
output, err := cmd.Output()
|
||||
pluginElapsedTime := time.Since(pluginStartTime)
|
||||
if timePlugins {
|
||||
fmt.Printf("> %s (%s)\n", executableName, pluginElapsedTime)
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
response := &plugins.Response{}
|
||||
err = proto.Unmarshal(output, response)
|
||||
if err != nil {
|
||||
// Gnostic expects plugins to only write the
|
||||
// response message to stdout. Be sure that
|
||||
// any logging messages are written to stderr only.
|
||||
return nil, errors.New("Invalid plugin response (plugins must write log messages to stderr, not stdout).")
|
||||
}
|
||||
plugins.HandleResponse(response, outputLocation)
|
||||
return response.Messages, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func isFile(path string) bool {
|
||||
fileInfo, err := os.Stat(path)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
return !fileInfo.IsDir()
|
||||
}
|
||||
|
||||
func isDirectory(path string) bool {
|
||||
fileInfo, err := os.Stat(path)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
return fileInfo.IsDir()
|
||||
}
|
||||
|
||||
// Write bytes to a named file.
|
||||
// Certain names have special meaning:
|
||||
// ! writes nothing
|
||||
// - writes to stdout
|
||||
// = writes to stderr
|
||||
// If a directory name is given, the file is written there with
|
||||
// a name derived from the source and extension arguments.
|
||||
func writeFile(name string, bytes []byte, source string, extension string) {
|
||||
var writer io.Writer
|
||||
if name == "!" {
|
||||
return
|
||||
} else if name == "-" {
|
||||
writer = os.Stdout
|
||||
} else if name == "=" {
|
||||
writer = os.Stderr
|
||||
} else if isDirectory(name) {
|
||||
base := filepath.Base(source)
|
||||
// Remove the original source extension.
|
||||
base = base[0 : len(base)-len(filepath.Ext(base))]
|
||||
// Build the path that puts the result in the passed-in directory.
|
||||
filename := name + "/" + base + "." + extension
|
||||
file, _ := os.Create(filename)
|
||||
defer file.Close()
|
||||
writer = file
|
||||
} else {
|
||||
file, _ := os.Create(name)
|
||||
defer file.Close()
|
||||
writer = file
|
||||
}
|
||||
writer.Write(bytes)
|
||||
if name == "-" || name == "=" {
|
||||
writer.Write([]byte("\n"))
|
||||
}
|
||||
}
|
||||
|
||||
// The Gnostic structure holds global state information for gnostic.
|
||||
type Gnostic struct {
|
||||
usage string
|
||||
sourceName string
|
||||
binaryOutputPath string
|
||||
textOutputPath string
|
||||
yamlOutputPath string
|
||||
jsonOutputPath string
|
||||
errorOutputPath string
|
||||
messageOutputPath string
|
||||
resolveReferences bool
|
||||
pluginCalls []*pluginCall
|
||||
extensionHandlers []compiler.ExtensionHandler
|
||||
sourceFormat int
|
||||
timePlugins bool
|
||||
}
|
||||
|
||||
// Initialize a structure to store global application state.
|
||||
func newGnostic() *Gnostic {
|
||||
g := &Gnostic{}
|
||||
// Option fields initialize to their default values.
|
||||
g.usage = `
|
||||
Usage: gnostic SOURCE [OPTIONS]
|
||||
SOURCE is the filename or URL of an API description.
|
||||
Options:
|
||||
--pb-out=PATH Write a binary proto to the specified location.
|
||||
--text-out=PATH Write a text proto to the specified location.
|
||||
--json-out=PATH Write a json API description to the specified location.
|
||||
--yaml-out=PATH Write a yaml API description to the specified location.
|
||||
--errors-out=PATH Write compilation errors to the specified location.
|
||||
--messages-out=PATH Write messages generated by plugins to the specified
|
||||
location. Messages from all plugin invocations are
|
||||
written to a single common file.
|
||||
--PLUGIN-out=PATH Run the plugin named gnostic-PLUGIN and write results
|
||||
to the specified location.
|
||||
--PLUGIN Run the plugin named gnostic-PLUGIN but don't write any
|
||||
results. Used for plugins that return messages only.
|
||||
PLUGIN must not match any other gnostic option.
|
||||
--x-EXTENSION Use the extension named gnostic-x-EXTENSION
|
||||
to process OpenAPI specification extensions.
|
||||
--resolve-refs Explicitly resolve $ref references.
|
||||
This could have problems with recursive definitions.
|
||||
--time-plugins Report plugin runtimes.
|
||||
`
|
||||
// Initialize internal structures.
|
||||
g.pluginCalls = make([]*pluginCall, 0)
|
||||
g.extensionHandlers = make([]compiler.ExtensionHandler, 0)
|
||||
return g
|
||||
}
|
||||
|
||||
// Parse command-line options.
|
||||
func (g *Gnostic) readOptions() {
|
||||
// plugin processing matches patterns of the form "--PLUGIN-out=PATH" and "--PLUGIN_out=PATH"
|
||||
pluginRegex := regexp.MustCompile("--(.+)[-_]out=(.+)")
|
||||
|
||||
// extension processing matches patterns of the form "--x-EXTENSION"
|
||||
extensionRegex := regexp.MustCompile("--x-(.+)")
|
||||
|
||||
for i, arg := range os.Args {
|
||||
if i == 0 {
|
||||
continue // skip the tool name
|
||||
}
|
||||
var m [][]byte
|
||||
if m = pluginRegex.FindSubmatch([]byte(arg)); m != nil {
|
||||
pluginName := string(m[1])
|
||||
invocation := string(m[2])
|
||||
switch pluginName {
|
||||
case "pb":
|
||||
g.binaryOutputPath = invocation
|
||||
case "text":
|
||||
g.textOutputPath = invocation
|
||||
case "json":
|
||||
g.jsonOutputPath = invocation
|
||||
case "yaml":
|
||||
g.yamlOutputPath = invocation
|
||||
case "errors":
|
||||
g.errorOutputPath = invocation
|
||||
case "messages":
|
||||
g.messageOutputPath = invocation
|
||||
default:
|
||||
p := &pluginCall{Name: pluginName, Invocation: invocation}
|
||||
g.pluginCalls = append(g.pluginCalls, p)
|
||||
}
|
||||
} else if m = extensionRegex.FindSubmatch([]byte(arg)); m != nil {
|
||||
extensionName := string(m[1])
|
||||
extensionHandler := compiler.ExtensionHandler{Name: extensionPrefix + extensionName}
|
||||
g.extensionHandlers = append(g.extensionHandlers, extensionHandler)
|
||||
} else if arg == "--resolve-refs" {
|
||||
g.resolveReferences = true
|
||||
} else if arg == "--time-plugins" {
|
||||
g.timePlugins = true
|
||||
} else if arg[0] == '-' && arg[1] == '-' {
|
||||
// try letting the option specify a plugin with no output files (or unwanted output files)
|
||||
// this is useful for calling plugins like linters that only return messages
|
||||
p := &pluginCall{Name: arg[2:len(arg)], Invocation: "!"}
|
||||
g.pluginCalls = append(g.pluginCalls, p)
|
||||
} else if arg[0] == '-' {
|
||||
fmt.Fprintf(os.Stderr, "Unknown option: %s.\n%s\n", arg, g.usage)
|
||||
os.Exit(-1)
|
||||
} else {
|
||||
g.sourceName = arg
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Validate command-line options.
|
||||
func (g *Gnostic) validateOptions() {
|
||||
if g.binaryOutputPath == "" &&
|
||||
g.textOutputPath == "" &&
|
||||
g.yamlOutputPath == "" &&
|
||||
g.jsonOutputPath == "" &&
|
||||
g.errorOutputPath == "" &&
|
||||
len(g.pluginCalls) == 0 {
|
||||
fmt.Fprintf(os.Stderr, "Missing output directives.\n%s\n", g.usage)
|
||||
os.Exit(-1)
|
||||
}
|
||||
if g.sourceName == "" {
|
||||
fmt.Fprintf(os.Stderr, "No input specified.\n%s\n", g.usage)
|
||||
os.Exit(-1)
|
||||
}
|
||||
// If we get here and the error output is unspecified, write errors to stderr.
|
||||
if g.errorOutputPath == "" {
|
||||
g.errorOutputPath = "="
|
||||
}
|
||||
}
|
||||
|
||||
// Generate an error message to be written to stderr or a file.
|
||||
func (g *Gnostic) errorBytes(err error) []byte {
|
||||
return []byte("Errors reading " + g.sourceName + "\n" + err.Error())
|
||||
}
|
||||
|
||||
// Read an OpenAPI description from YAML or JSON.
|
||||
func (g *Gnostic) readOpenAPIText(bytes []byte) (message proto.Message, err error) {
|
||||
info, err := compiler.ReadInfoFromBytes(g.sourceName, bytes)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// Determine the OpenAPI version.
|
||||
g.sourceFormat = getOpenAPIVersionFromInfo(info)
|
||||
if g.sourceFormat == SourceFormatUnknown {
|
||||
return nil, errors.New("unable to identify OpenAPI version")
|
||||
}
|
||||
// Compile to the proto model.
|
||||
if g.sourceFormat == SourceFormatOpenAPI2 {
|
||||
document, err := openapi_v2.NewDocument(info, compiler.NewContextWithExtensions("$root", nil, &g.extensionHandlers))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
message = document
|
||||
} else if g.sourceFormat == SourceFormatOpenAPI3 {
|
||||
document, err := openapi_v3.NewDocument(info, compiler.NewContextWithExtensions("$root", nil, &g.extensionHandlers))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
message = document
|
||||
} else {
|
||||
document, err := discovery_v1.NewDocument(info, compiler.NewContextWithExtensions("$root", nil, &g.extensionHandlers))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
message = document
|
||||
}
|
||||
return message, err
|
||||
}
|
||||
|
||||
// Read an OpenAPI binary file.
|
||||
func (g *Gnostic) readOpenAPIBinary(data []byte) (message proto.Message, err error) {
|
||||
// try to read an OpenAPI v3 document
|
||||
documentV3 := &openapi_v3.Document{}
|
||||
err = proto.Unmarshal(data, documentV3)
|
||||
if err == nil && strings.HasPrefix(documentV3.Openapi, "3.0") {
|
||||
g.sourceFormat = SourceFormatOpenAPI3
|
||||
return documentV3, nil
|
||||
}
|
||||
// if that failed, try to read an OpenAPI v2 document
|
||||
documentV2 := &openapi_v2.Document{}
|
||||
err = proto.Unmarshal(data, documentV2)
|
||||
if err == nil && strings.HasPrefix(documentV2.Swagger, "2.0") {
|
||||
g.sourceFormat = SourceFormatOpenAPI2
|
||||
return documentV2, nil
|
||||
}
|
||||
// if that failed, try to read a Discovery Format document
|
||||
discoveryDocument := &discovery_v1.Document{}
|
||||
err = proto.Unmarshal(data, discoveryDocument)
|
||||
if err == nil { // && strings.HasPrefix(documentV2.Swagger, "2.0") {
|
||||
g.sourceFormat = SourceFormatDiscovery
|
||||
return discoveryDocument, nil
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Write a binary pb representation.
|
||||
func (g *Gnostic) writeBinaryOutput(message proto.Message) {
|
||||
protoBytes, err := proto.Marshal(message)
|
||||
if err != nil {
|
||||
writeFile(g.errorOutputPath, g.errorBytes(err), g.sourceName, "errors")
|
||||
defer os.Exit(-1)
|
||||
} else {
|
||||
writeFile(g.binaryOutputPath, protoBytes, g.sourceName, "pb")
|
||||
}
|
||||
}
|
||||
|
||||
// Write a text pb representation.
|
||||
func (g *Gnostic) writeTextOutput(message proto.Message) {
|
||||
bytes := []byte(proto.MarshalTextString(message))
|
||||
writeFile(g.textOutputPath, bytes, g.sourceName, "text")
|
||||
}
|
||||
|
||||
// Write JSON/YAML OpenAPI representations.
|
||||
func (g *Gnostic) writeJSONYAMLOutput(message proto.Message) {
|
||||
// Convert the OpenAPI document into an exportable MapSlice.
|
||||
var rawInfo yaml.MapSlice
|
||||
var ok bool
|
||||
var err error
|
||||
if g.sourceFormat == SourceFormatOpenAPI2 {
|
||||
document := message.(*openapi_v2.Document)
|
||||
rawInfo, ok = document.ToRawInfo().(yaml.MapSlice)
|
||||
if !ok {
|
||||
rawInfo = nil
|
||||
}
|
||||
} else if g.sourceFormat == SourceFormatOpenAPI3 {
|
||||
document := message.(*openapi_v3.Document)
|
||||
rawInfo, ok = document.ToRawInfo().(yaml.MapSlice)
|
||||
if !ok {
|
||||
rawInfo = nil
|
||||
}
|
||||
} else if g.sourceFormat == SourceFormatDiscovery {
|
||||
document := message.(*discovery_v1.Document)
|
||||
rawInfo, ok = document.ToRawInfo().(yaml.MapSlice)
|
||||
if !ok {
|
||||
rawInfo = nil
|
||||
}
|
||||
}
|
||||
// Optionally write description in yaml format.
|
||||
if g.yamlOutputPath != "" {
|
||||
var bytes []byte
|
||||
if rawInfo != nil {
|
||||
bytes, err = yaml.Marshal(rawInfo)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error generating yaml output %s\n", err.Error())
|
||||
}
|
||||
writeFile(g.yamlOutputPath, bytes, g.sourceName, "yaml")
|
||||
} else {
|
||||
fmt.Fprintf(os.Stderr, "No yaml output available.\n")
|
||||
}
|
||||
}
|
||||
// Optionally write description in json format.
|
||||
if g.jsonOutputPath != "" {
|
||||
var bytes []byte
|
||||
if rawInfo != nil {
|
||||
bytes, _ = jsonwriter.Marshal(rawInfo)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error generating json output %s\n", err.Error())
|
||||
}
|
||||
writeFile(g.jsonOutputPath, bytes, g.sourceName, "json")
|
||||
} else {
|
||||
fmt.Fprintf(os.Stderr, "No json output available.\n")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Write messages.
|
||||
func (g *Gnostic) writeMessagesOutput(message proto.Message) {
|
||||
protoBytes, err := proto.Marshal(message)
|
||||
if err != nil {
|
||||
writeFile(g.messageOutputPath, g.errorBytes(err), g.sourceName, "errors")
|
||||
defer os.Exit(-1)
|
||||
} else {
|
||||
writeFile(g.messageOutputPath, protoBytes, g.sourceName, "messages.pb")
|
||||
}
|
||||
}
|
||||
|
||||
// Perform all actions specified in the command-line options.
|
||||
func (g *Gnostic) performActions(message proto.Message) (err error) {
|
||||
// Optionally resolve internal references.
|
||||
if g.resolveReferences {
|
||||
if g.sourceFormat == SourceFormatOpenAPI2 {
|
||||
document := message.(*openapi_v2.Document)
|
||||
_, err = document.ResolveReferences(g.sourceName)
|
||||
} else if g.sourceFormat == SourceFormatOpenAPI3 {
|
||||
document := message.(*openapi_v3.Document)
|
||||
_, err = document.ResolveReferences(g.sourceName)
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
// Optionally write proto in binary format.
|
||||
if g.binaryOutputPath != "" {
|
||||
g.writeBinaryOutput(message)
|
||||
}
|
||||
// Optionally write proto in text format.
|
||||
if g.textOutputPath != "" {
|
||||
g.writeTextOutput(message)
|
||||
}
|
||||
// Optionally write document in yaml and/or json formats.
|
||||
if g.yamlOutputPath != "" || g.jsonOutputPath != "" {
|
||||
g.writeJSONYAMLOutput(message)
|
||||
}
|
||||
// Call all specified plugins.
|
||||
messages := make([]*plugins.Message, 0)
|
||||
for _, p := range g.pluginCalls {
|
||||
pluginMessages, err := p.perform(message, g.sourceFormat, g.sourceName, g.timePlugins)
|
||||
if err != nil {
|
||||
writeFile(g.errorOutputPath, g.errorBytes(err), g.sourceName, "errors")
|
||||
defer os.Exit(-1) // run all plugins, even when some have errors
|
||||
}
|
||||
messages = append(messages, pluginMessages...)
|
||||
}
|
||||
if g.messageOutputPath != "" {
|
||||
g.writeMessagesOutput(&plugins.Messages{Messages: messages})
|
||||
} else {
|
||||
// Print any messages from the plugins
|
||||
if len(messages) > 0 {
|
||||
for _, message := range messages {
|
||||
fmt.Printf("%+v\n", message)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (g *Gnostic) main() {
|
||||
var err error
|
||||
g.readOptions()
|
||||
g.validateOptions()
|
||||
// Read the OpenAPI source.
|
||||
bytes, err := compiler.ReadBytesForFile(g.sourceName)
|
||||
if err != nil {
|
||||
writeFile(g.errorOutputPath, g.errorBytes(err), g.sourceName, "errors")
|
||||
os.Exit(-1)
|
||||
}
|
||||
extension := strings.ToLower(filepath.Ext(g.sourceName))
|
||||
var message proto.Message
|
||||
if extension == ".json" || extension == ".yaml" {
|
||||
// Try to read the source as JSON/YAML.
|
||||
message, err = g.readOpenAPIText(bytes)
|
||||
if err != nil {
|
||||
writeFile(g.errorOutputPath, g.errorBytes(err), g.sourceName, "errors")
|
||||
os.Exit(-1)
|
||||
}
|
||||
} else if extension == ".pb" {
|
||||
// Try to read the source as a binary protocol buffer.
|
||||
message, err = g.readOpenAPIBinary(bytes)
|
||||
if err != nil {
|
||||
writeFile(g.errorOutputPath, g.errorBytes(err), g.sourceName, "errors")
|
||||
os.Exit(-1)
|
||||
}
|
||||
} else {
|
||||
err = errors.New("unknown file extension. 'json', 'yaml', and 'pb' are accepted")
|
||||
writeFile(g.errorOutputPath, g.errorBytes(err), g.sourceName, "errors")
|
||||
os.Exit(-1)
|
||||
}
|
||||
// Perform actions specified by command options.
|
||||
err = g.performActions(message)
|
||||
if err != nil {
|
||||
writeFile(g.errorOutputPath, g.errorBytes(err), g.sourceName, "errors")
|
||||
os.Exit(-1)
|
||||
}
|
||||
}
|
||||
|
||||
func main() {
|
||||
g := newGnostic()
|
||||
g.main()
|
||||
}
|
453
vendor/github.com/googleapis/gnostic/gnostic_test.go
generated
vendored
Normal file
453
vendor/github.com/googleapis/gnostic/gnostic_test.go
generated
vendored
Normal file
@ -0,0 +1,453 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func testCompiler(t *testing.T, inputFile string, referenceFile string, expectErrors bool) {
|
||||
textFile := strings.Replace(filepath.Base(inputFile), filepath.Ext(inputFile), ".text", 1)
|
||||
errorsFile := strings.Replace(filepath.Base(inputFile), filepath.Ext(inputFile), ".errors", 1)
|
||||
// remove any preexisting output files
|
||||
os.Remove(textFile)
|
||||
os.Remove(errorsFile)
|
||||
// run the compiler
|
||||
var err error
|
||||
var cmd = exec.Command(
|
||||
"gnostic",
|
||||
inputFile,
|
||||
"--text-out=.",
|
||||
"--errors-out=.",
|
||||
"--resolve-refs")
|
||||
//t.Log(cmd.Args)
|
||||
err = cmd.Run()
|
||||
if err != nil && !expectErrors {
|
||||
t.Logf("Compile failed: %+v", err)
|
||||
t.FailNow()
|
||||
}
|
||||
// verify the output against a reference
|
||||
var outputFile string
|
||||
if expectErrors {
|
||||
outputFile = errorsFile
|
||||
} else {
|
||||
outputFile = textFile
|
||||
}
|
||||
err = exec.Command("diff", outputFile, referenceFile).Run()
|
||||
if err != nil {
|
||||
t.Logf("Diff failed: %+v", err)
|
||||
t.FailNow()
|
||||
} else {
|
||||
// if the test succeeded, clean up
|
||||
os.Remove(textFile)
|
||||
os.Remove(errorsFile)
|
||||
}
|
||||
}
|
||||
|
||||
func testNormal(t *testing.T, inputFile string, referenceFile string) {
|
||||
testCompiler(t, inputFile, referenceFile, false)
|
||||
}
|
||||
|
||||
func testErrors(t *testing.T, inputFile string, referenceFile string) {
|
||||
testCompiler(t, inputFile, referenceFile, true)
|
||||
}
|
||||
|
||||
func TestPetstoreJSON(t *testing.T) {
|
||||
testNormal(t,
|
||||
"examples/v2.0/json/petstore.json",
|
||||
"test/v2.0/petstore.text")
|
||||
}
|
||||
|
||||
func TestPetstoreYAML(t *testing.T) {
|
||||
testNormal(t,
|
||||
"examples/v2.0/yaml/petstore.yaml",
|
||||
"test/v2.0/petstore.text")
|
||||
}
|
||||
|
||||
func TestSeparateYAML(t *testing.T) {
|
||||
testNormal(t,
|
||||
"examples/v2.0/yaml/petstore-separate/spec/swagger.yaml",
|
||||
"test/v2.0/yaml/petstore-separate/spec/swagger.text")
|
||||
}
|
||||
|
||||
func TestSeparateJSON(t *testing.T) {
|
||||
testNormal(t,
|
||||
"examples/v2.0/json/petstore-separate/spec/swagger.json",
|
||||
"test/v2.0/yaml/petstore-separate/spec/swagger.text") // yaml and json results should be identical
|
||||
}
|
||||
|
||||
func TestRemotePetstoreJSON(t *testing.T) {
|
||||
testNormal(t,
|
||||
"https://raw.githubusercontent.com/googleapis/openapi-compiler/master/examples/v2.0/json/petstore.json",
|
||||
"test/v2.0/petstore.text")
|
||||
}
|
||||
|
||||
func TestRemotePetstoreYAML(t *testing.T) {
|
||||
testNormal(t,
|
||||
"https://raw.githubusercontent.com/googleapis/openapi-compiler/master/examples/v2.0/yaml/petstore.yaml",
|
||||
"test/v2.0/petstore.text")
|
||||
}
|
||||
|
||||
func TestRemoteSeparateYAML(t *testing.T) {
|
||||
testNormal(t,
|
||||
"https://raw.githubusercontent.com/googleapis/openapi-compiler/master/examples/v2.0/yaml/petstore-separate/spec/swagger.yaml",
|
||||
"test/v2.0/yaml/petstore-separate/spec/swagger.text")
|
||||
}
|
||||
|
||||
func TestRemoteSeparateJSON(t *testing.T) {
|
||||
testNormal(t,
|
||||
"https://raw.githubusercontent.com/googleapis/openapi-compiler/master/examples/v2.0/json/petstore-separate/spec/swagger.json",
|
||||
"test/v2.0/yaml/petstore-separate/spec/swagger.text")
|
||||
}
|
||||
|
||||
func TestErrorBadProperties(t *testing.T) {
|
||||
testErrors(t,
|
||||
"examples/errors/petstore-badproperties.yaml",
|
||||
"test/errors/petstore-badproperties.errors")
|
||||
}
|
||||
|
||||
func TestErrorUnresolvedRefs(t *testing.T) {
|
||||
testErrors(t,
|
||||
"examples/errors/petstore-unresolvedrefs.yaml",
|
||||
"test/errors/petstore-unresolvedrefs.errors")
|
||||
}
|
||||
|
||||
func TestErrorMissingVersion(t *testing.T) {
|
||||
testErrors(t,
|
||||
"examples/errors/petstore-missingversion.yaml",
|
||||
"test/errors/petstore-missingversion.errors")
|
||||
}
|
||||
|
||||
func testPlugin(t *testing.T, plugin string, inputFile string, outputFile string, referenceFile string) {
|
||||
// remove any preexisting output files
|
||||
os.Remove(outputFile)
|
||||
// run the compiler
|
||||
var err error
|
||||
output, err := exec.Command(
|
||||
"gnostic",
|
||||
"--"+plugin+"-out=-",
|
||||
inputFile).Output()
|
||||
if err != nil {
|
||||
t.Logf("Compile failed: %+v", err)
|
||||
t.FailNow()
|
||||
}
|
||||
_ = ioutil.WriteFile(outputFile, output, 0644)
|
||||
err = exec.Command("diff", outputFile, referenceFile).Run()
|
||||
if err != nil {
|
||||
t.Logf("Diff failed: %+v", err)
|
||||
t.FailNow()
|
||||
} else {
|
||||
// if the test succeeded, clean up
|
||||
os.Remove(outputFile)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSamplePluginWithPetstore(t *testing.T) {
|
||||
testPlugin(t,
|
||||
"summary",
|
||||
"examples/v2.0/yaml/petstore.yaml",
|
||||
"sample-petstore.out",
|
||||
"test/v2.0/yaml/sample-petstore.out")
|
||||
}
|
||||
|
||||
func TestErrorInvalidPluginInvocations(t *testing.T) {
|
||||
var err error
|
||||
output, err := exec.Command(
|
||||
"gnostic",
|
||||
"examples/v2.0/yaml/petstore.yaml",
|
||||
"--errors-out=-",
|
||||
"--plugin-out=foo=bar,:abc",
|
||||
"--plugin-out=,foo=bar:abc",
|
||||
"--plugin-out=foo=:abc",
|
||||
"--plugin-out==bar:abc",
|
||||
"--plugin-out=,,:abc",
|
||||
"--plugin-out=foo=bar=baz:abc",
|
||||
).Output()
|
||||
if err == nil {
|
||||
t.Logf("Invalid invocations were accepted")
|
||||
t.FailNow()
|
||||
}
|
||||
outputFile := "invalid-plugin-invocation.errors"
|
||||
_ = ioutil.WriteFile(outputFile, output, 0644)
|
||||
err = exec.Command("diff", outputFile, "test/errors/invalid-plugin-invocation.errors").Run()
|
||||
if err != nil {
|
||||
t.Logf("Diff failed: %+v", err)
|
||||
t.FailNow()
|
||||
} else {
|
||||
// if the test succeeded, clean up
|
||||
os.Remove(outputFile)
|
||||
}
|
||||
}
|
||||
|
||||
func TestValidPluginInvocations(t *testing.T) {
|
||||
var err error
|
||||
output, err := exec.Command(
|
||||
"gnostic",
|
||||
"examples/v2.0/yaml/petstore.yaml",
|
||||
"--errors-out=-",
|
||||
// verify an invocation with no parameters
|
||||
"--summary-out=!", // "!" indicates that no output should be generated
|
||||
// verify single pair of parameters
|
||||
"--summary-out=a=b:!",
|
||||
// verify multiple parameters
|
||||
"--summary-out=a=b,c=123,xyz=alphabetagammadelta:!",
|
||||
// verify that special characters / . - _ can be included in parameter keys and values
|
||||
"--summary-out=a/b/c=x/y/z:!",
|
||||
"--summary-out=a.b.c=x.y.z:!",
|
||||
"--summary-out=a-b-c=x-y-z:!",
|
||||
"--summary-out=a_b_c=x_y_z:!",
|
||||
).Output()
|
||||
if len(output) != 0 {
|
||||
t.Logf("Valid invocations generated invalid errors\n%s", string(output))
|
||||
t.FailNow()
|
||||
}
|
||||
if err != nil {
|
||||
t.Logf("Valid invocations were not accepted")
|
||||
t.FailNow()
|
||||
}
|
||||
}
|
||||
|
||||
func TestExtensionHandlerWithLibraryExample(t *testing.T) {
|
||||
outputFile := "library-example-with-ext.text.out"
|
||||
inputFile := "test/library-example-with-ext.json"
|
||||
referenceFile := "test/library-example-with-ext.text.out"
|
||||
|
||||
os.Remove(outputFile)
|
||||
// run the compiler
|
||||
var err error
|
||||
|
||||
command := exec.Command(
|
||||
"gnostic",
|
||||
"--x-sampleone",
|
||||
"--x-sampletwo",
|
||||
"--text-out="+outputFile,
|
||||
"--resolve-refs",
|
||||
inputFile)
|
||||
|
||||
_, err = command.Output()
|
||||
if err != nil {
|
||||
t.Logf("Compile failed for command %v: %+v", command, err)
|
||||
t.FailNow()
|
||||
}
|
||||
//_ = ioutil.WriteFile(outputFile, output, 0644)
|
||||
err = exec.Command("diff", outputFile, referenceFile).Run()
|
||||
if err != nil {
|
||||
t.Logf("Diff failed: %+v", err)
|
||||
t.FailNow()
|
||||
} else {
|
||||
// if the test succeeded, clean up
|
||||
os.Remove(outputFile)
|
||||
}
|
||||
}
|
||||
|
||||
func TestJSONOutput(t *testing.T) {
|
||||
inputFile := "test/library-example-with-ext.json"
|
||||
|
||||
textFile := "sample.text"
|
||||
jsonFile := "sample.json"
|
||||
textFile2 := "sample2.text"
|
||||
jsonFile2 := "sample2.json"
|
||||
|
||||
os.Remove(textFile)
|
||||
os.Remove(jsonFile)
|
||||
os.Remove(textFile2)
|
||||
os.Remove(jsonFile2)
|
||||
|
||||
var err error
|
||||
|
||||
// Run the compiler once.
|
||||
command := exec.Command(
|
||||
"gnostic",
|
||||
"--text-out="+textFile,
|
||||
"--json-out="+jsonFile,
|
||||
inputFile)
|
||||
_, err = command.Output()
|
||||
if err != nil {
|
||||
t.Logf("Compile failed for command %v: %+v", command, err)
|
||||
t.FailNow()
|
||||
}
|
||||
|
||||
// Run the compiler again, this time on the generated output.
|
||||
command = exec.Command(
|
||||
"gnostic",
|
||||
"--text-out="+textFile2,
|
||||
"--json-out="+jsonFile2,
|
||||
jsonFile)
|
||||
_, err = command.Output()
|
||||
if err != nil {
|
||||
t.Logf("Compile failed for command %v: %+v", command, err)
|
||||
t.FailNow()
|
||||
}
|
||||
|
||||
// Verify that both models have the same internal representation.
|
||||
err = exec.Command("diff", textFile, textFile2).Run()
|
||||
if err != nil {
|
||||
t.Logf("Diff failed: %+v", err)
|
||||
t.FailNow()
|
||||
} else {
|
||||
// if the test succeeded, clean up
|
||||
os.Remove(textFile)
|
||||
os.Remove(jsonFile)
|
||||
os.Remove(textFile2)
|
||||
os.Remove(jsonFile2)
|
||||
}
|
||||
}
|
||||
|
||||
func TestYAMLOutput(t *testing.T) {
|
||||
inputFile := "test/library-example-with-ext.json"
|
||||
|
||||
textFile := "sample.text"
|
||||
yamlFile := "sample.yaml"
|
||||
textFile2 := "sample2.text"
|
||||
yamlFile2 := "sample2.yaml"
|
||||
|
||||
os.Remove(textFile)
|
||||
os.Remove(yamlFile)
|
||||
os.Remove(textFile2)
|
||||
os.Remove(yamlFile2)
|
||||
|
||||
var err error
|
||||
|
||||
// Run the compiler once.
|
||||
command := exec.Command(
|
||||
"gnostic",
|
||||
"--text-out="+textFile,
|
||||
"--yaml-out="+yamlFile,
|
||||
inputFile)
|
||||
_, err = command.Output()
|
||||
if err != nil {
|
||||
t.Logf("Compile failed for command %v: %+v", command, err)
|
||||
t.FailNow()
|
||||
}
|
||||
|
||||
// Run the compiler again, this time on the generated output.
|
||||
command = exec.Command(
|
||||
"gnostic",
|
||||
"--text-out="+textFile2,
|
||||
"--yaml-out="+yamlFile2,
|
||||
yamlFile)
|
||||
_, err = command.Output()
|
||||
if err != nil {
|
||||
t.Logf("Compile failed for command %v: %+v", command, err)
|
||||
t.FailNow()
|
||||
}
|
||||
|
||||
// Verify that both models have the same internal representation.
|
||||
err = exec.Command("diff", textFile, textFile2).Run()
|
||||
if err != nil {
|
||||
t.Logf("Diff failed: %+v", err)
|
||||
t.FailNow()
|
||||
} else {
|
||||
// if the test succeeded, clean up
|
||||
os.Remove(textFile)
|
||||
os.Remove(yamlFile)
|
||||
os.Remove(textFile2)
|
||||
os.Remove(yamlFile2)
|
||||
}
|
||||
}
|
||||
|
||||
func testBuilder(version string, t *testing.T) {
|
||||
var err error
|
||||
|
||||
pbFile := "petstore-" + version + ".pb"
|
||||
yamlFile := "petstore.yaml"
|
||||
jsonFile := "petstore.json"
|
||||
textFile := "petstore.text"
|
||||
textReference := "test/" + version + ".0/petstore.text"
|
||||
|
||||
os.Remove(pbFile)
|
||||
os.Remove(textFile)
|
||||
os.Remove(yamlFile)
|
||||
os.Remove(jsonFile)
|
||||
|
||||
// Generate petstore.pb.
|
||||
command := exec.Command(
|
||||
"petstore-builder",
|
||||
"--"+version)
|
||||
_, err = command.Output()
|
||||
if err != nil {
|
||||
t.Logf("Command %v failed: %+v", command, err)
|
||||
t.FailNow()
|
||||
}
|
||||
|
||||
// Convert petstore.pb to yaml and json.
|
||||
command = exec.Command(
|
||||
"gnostic",
|
||||
pbFile,
|
||||
"--json-out="+jsonFile,
|
||||
"--yaml-out="+yamlFile)
|
||||
_, err = command.Output()
|
||||
if err != nil {
|
||||
t.Logf("Command %v failed: %+v", command, err)
|
||||
t.FailNow()
|
||||
}
|
||||
|
||||
// Read petstore.yaml, resolve references, and export text.
|
||||
command = exec.Command(
|
||||
"gnostic",
|
||||
yamlFile,
|
||||
"--resolve-refs",
|
||||
"--text-out="+textFile)
|
||||
_, err = command.Output()
|
||||
if err != nil {
|
||||
t.Logf("Command %v failed: %+v", command, err)
|
||||
t.FailNow()
|
||||
}
|
||||
|
||||
// Verify that the generated text matches our reference.
|
||||
err = exec.Command("diff", textFile, textReference).Run()
|
||||
if err != nil {
|
||||
t.Logf("Diff failed: %+v", err)
|
||||
t.FailNow()
|
||||
}
|
||||
|
||||
// Read petstore.json, resolve references, and export text.
|
||||
command = exec.Command(
|
||||
"gnostic",
|
||||
jsonFile,
|
||||
"--resolve-refs",
|
||||
"--text-out="+textFile)
|
||||
_, err = command.Output()
|
||||
if err != nil {
|
||||
t.Logf("Command %v failed: %+v", command, err)
|
||||
t.FailNow()
|
||||
}
|
||||
|
||||
// Verify that the generated text matches our reference.
|
||||
err = exec.Command("diff", textFile, textReference).Run()
|
||||
if err != nil {
|
||||
t.Logf("Diff failed: %+v", err)
|
||||
t.FailNow()
|
||||
}
|
||||
|
||||
// if the test succeeded, clean up
|
||||
os.Remove(pbFile)
|
||||
os.Remove(textFile)
|
||||
os.Remove(yamlFile)
|
||||
os.Remove(jsonFile)
|
||||
}
|
||||
|
||||
func TestBuilderV2(t *testing.T) {
|
||||
testBuilder("v2", t)
|
||||
}
|
||||
|
||||
func TestBuilderV3(t *testing.T) {
|
||||
testBuilder("v3", t)
|
||||
}
|
||||
|
||||
// OpenAPI 3.0 tests
|
||||
|
||||
func TestPetstoreYAML_30(t *testing.T) {
|
||||
testNormal(t,
|
||||
"examples/v3.0/yaml/petstore.yaml",
|
||||
"test/v3.0/petstore.text")
|
||||
}
|
||||
|
||||
func TestPetstoreJSON_30(t *testing.T) {
|
||||
testNormal(t,
|
||||
"examples/v3.0/json/petstore.json",
|
||||
"test/v3.0/petstore.text")
|
||||
}
|
11
vendor/github.com/howeyc/gopass/.travis.yml
generated
vendored
Normal file
11
vendor/github.com/howeyc/gopass/.travis.yml
generated
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
language: go
|
||||
|
||||
os:
|
||||
- linux
|
||||
- osx
|
||||
|
||||
go:
|
||||
- 1.3
|
||||
- 1.4
|
||||
- 1.5
|
||||
- tip
|
15
vendor/github.com/howeyc/gopass/LICENSE.txt
generated
vendored
Normal file
15
vendor/github.com/howeyc/gopass/LICENSE.txt
generated
vendored
Normal file
@ -0,0 +1,15 @@
|
||||
ISC License
|
||||
|
||||
Copyright (c) 2012 Chris Howey
|
||||
|
||||
Permission to use, copy, modify, and distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
384
vendor/github.com/howeyc/gopass/OPENSOLARIS.LICENSE
generated
vendored
Normal file
384
vendor/github.com/howeyc/gopass/OPENSOLARIS.LICENSE
generated
vendored
Normal file
@ -0,0 +1,384 @@
|
||||
Unless otherwise noted, all files in this distribution are released
|
||||
under the Common Development and Distribution License (CDDL).
|
||||
Exceptions are noted within the associated source files.
|
||||
|
||||
--------------------------------------------------------------------
|
||||
|
||||
|
||||
COMMON DEVELOPMENT AND DISTRIBUTION LICENSE Version 1.0
|
||||
|
||||
1. Definitions.
|
||||
|
||||
1.1. "Contributor" means each individual or entity that creates
|
||||
or contributes to the creation of Modifications.
|
||||
|
||||
1.2. "Contributor Version" means the combination of the Original
|
||||
Software, prior Modifications used by a Contributor (if any),
|
||||
and the Modifications made by that particular Contributor.
|
||||
|
||||
1.3. "Covered Software" means (a) the Original Software, or (b)
|
||||
Modifications, or (c) the combination of files containing
|
||||
Original Software with files containing Modifications, in
|
||||
each case including portions thereof.
|
||||
|
||||
1.4. "Executable" means the Covered Software in any form other
|
||||
than Source Code.
|
||||
|
||||
1.5. "Initial Developer" means the individual or entity that first
|
||||
makes Original Software available under this License.
|
||||
|
||||
1.6. "Larger Work" means a work which combines Covered Software or
|
||||
portions thereof with code not governed by the terms of this
|
||||
License.
|
||||
|
||||
1.7. "License" means this document.
|
||||
|
||||
1.8. "Licensable" means having the right to grant, to the maximum
|
||||
extent possible, whether at the time of the initial grant or
|
||||
subsequently acquired, any and all of the rights conveyed
|
||||
herein.
|
||||
|
||||
1.9. "Modifications" means the Source Code and Executable form of
|
||||
any of the following:
|
||||
|
||||
A. Any file that results from an addition to, deletion from or
|
||||
modification of the contents of a file containing Original
|
||||
Software or previous Modifications;
|
||||
|
||||
B. Any new file that contains any part of the Original
|
||||
Software or previous Modifications; or
|
||||
|
||||
C. Any new file that is contributed or otherwise made
|
||||
available under the terms of this License.
|
||||
|
||||
1.10. "Original Software" means the Source Code and Executable
|
||||
form of computer software code that is originally released
|
||||
under this License.
|
||||
|
||||
1.11. "Patent Claims" means any patent claim(s), now owned or
|
||||
hereafter acquired, including without limitation, method,
|
||||
process, and apparatus claims, in any patent Licensable by
|
||||
grantor.
|
||||
|
||||
1.12. "Source Code" means (a) the common form of computer software
|
||||
code in which modifications are made and (b) associated
|
||||
documentation included in or with such code.
|
||||
|
||||
1.13. "You" (or "Your") means an individual or a legal entity
|
||||
exercising rights under, and complying with all of the terms
|
||||
of, this License. For legal entities, "You" includes any
|
||||
entity which controls, is controlled by, or is under common
|
||||
control with You. For purposes of this definition,
|
||||
"control" means (a) the power, direct or indirect, to cause
|
||||
the direction or management of such entity, whether by
|
||||
contract or otherwise, or (b) ownership of more than fifty
|
||||
percent (50%) of the outstanding shares or beneficial
|
||||
ownership of such entity.
|
||||
|
||||
2. License Grants.
|
||||
|
||||
2.1. The Initial Developer Grant.
|
||||
|
||||
Conditioned upon Your compliance with Section 3.1 below and
|
||||
subject to third party intellectual property claims, the Initial
|
||||
Developer hereby grants You a world-wide, royalty-free,
|
||||
non-exclusive license:
|
||||
|
||||
(a) under intellectual property rights (other than patent or
|
||||
trademark) Licensable by Initial Developer, to use,
|
||||
reproduce, modify, display, perform, sublicense and
|
||||
distribute the Original Software (or portions thereof),
|
||||
with or without Modifications, and/or as part of a Larger
|
||||
Work; and
|
||||
|
||||
(b) under Patent Claims infringed by the making, using or
|
||||
selling of Original Software, to make, have made, use,
|
||||
practice, sell, and offer for sale, and/or otherwise
|
||||
dispose of the Original Software (or portions thereof).
|
||||
|
||||
(c) The licenses granted in Sections 2.1(a) and (b) are
|
||||
effective on the date Initial Developer first distributes
|
||||
or otherwise makes the Original Software available to a
|
||||
third party under the terms of this License.
|
||||
|
||||
(d) Notwithstanding Section 2.1(b) above, no patent license is
|
||||
granted: (1) for code that You delete from the Original
|
||||
Software, or (2) for infringements caused by: (i) the
|
||||
modification of the Original Software, or (ii) the
|
||||
combination of the Original Software with other software
|
||||
or devices.
|
||||
|
||||
2.2. Contributor Grant.
|
||||
|
||||
Conditioned upon Your compliance with Section 3.1 below and
|
||||
subject to third party intellectual property claims, each
|
||||
Contributor hereby grants You a world-wide, royalty-free,
|
||||
non-exclusive license:
|
||||
|
||||
(a) under intellectual property rights (other than patent or
|
||||
trademark) Licensable by Contributor to use, reproduce,
|
||||
modify, display, perform, sublicense and distribute the
|
||||
Modifications created by such Contributor (or portions
|
||||
thereof), either on an unmodified basis, with other
|
||||
Modifications, as Covered Software and/or as part of a
|
||||
Larger Work; and
|
||||
|
||||
(b) under Patent Claims infringed by the making, using, or
|
||||
selling of Modifications made by that Contributor either
|
||||
alone and/or in combination with its Contributor Version
|
||||
(or portions of such combination), to make, use, sell,
|
||||
offer for sale, have made, and/or otherwise dispose of:
|
||||
(1) Modifications made by that Contributor (or portions
|
||||
thereof); and (2) the combination of Modifications made by
|
||||
that Contributor with its Contributor Version (or portions
|
||||
of such combination).
|
||||
|
||||
(c) The licenses granted in Sections 2.2(a) and 2.2(b) are
|
||||
effective on the date Contributor first distributes or
|
||||
otherwise makes the Modifications available to a third
|
||||
party.
|
||||
|
||||
(d) Notwithstanding Section 2.2(b) above, no patent license is
|
||||
granted: (1) for any code that Contributor has deleted
|
||||
from the Contributor Version; (2) for infringements caused
|
||||
by: (i) third party modifications of Contributor Version,
|
||||
or (ii) the combination of Modifications made by that
|
||||
Contributor with other software (except as part of the
|
||||
Contributor Version) or other devices; or (3) under Patent
|
||||
Claims infringed by Covered Software in the absence of
|
||||
Modifications made by that Contributor.
|
||||
|
||||
3. Distribution Obligations.
|
||||
|
||||
3.1. Availability of Source Code.
|
||||
|
||||
Any Covered Software that You distribute or otherwise make
|
||||
available in Executable form must also be made available in Source
|
||||
Code form and that Source Code form must be distributed only under
|
||||
the terms of this License. You must include a copy of this
|
||||
License with every copy of the Source Code form of the Covered
|
||||
Software You distribute or otherwise make available. You must
|
||||
inform recipients of any such Covered Software in Executable form
|
||||
as to how they can obtain such Covered Software in Source Code
|
||||
form in a reasonable manner on or through a medium customarily
|
||||
used for software exchange.
|
||||
|
||||
3.2. Modifications.
|
||||
|
||||
The Modifications that You create or to which You contribute are
|
||||
governed by the terms of this License. You represent that You
|
||||
believe Your Modifications are Your original creation(s) and/or
|
||||
You have sufficient rights to grant the rights conveyed by this
|
||||
License.
|
||||
|
||||
3.3. Required Notices.
|
||||
|
||||
You must include a notice in each of Your Modifications that
|
||||
identifies You as the Contributor of the Modification. You may
|
||||
not remove or alter any copyright, patent or trademark notices
|
||||
contained within the Covered Software, or any notices of licensing
|
||||
or any descriptive text giving attribution to any Contributor or
|
||||
the Initial Developer.
|
||||
|
||||
3.4. Application of Additional Terms.
|
||||
|
||||
You may not offer or impose any terms on any Covered Software in
|
||||
Source Code form that alters or restricts the applicable version
|
||||
of this License or the recipients' rights hereunder. You may
|
||||
choose to offer, and to charge a fee for, warranty, support,
|
||||
indemnity or liability obligations to one or more recipients of
|
||||
Covered Software. However, you may do so only on Your own behalf,
|
||||
and not on behalf of the Initial Developer or any Contributor.
|
||||
You must make it absolutely clear that any such warranty, support,
|
||||
indemnity or liability obligation is offered by You alone, and You
|
||||
hereby agree to indemnify the Initial Developer and every
|
||||
Contributor for any liability incurred by the Initial Developer or
|
||||
such Contributor as a result of warranty, support, indemnity or
|
||||
liability terms You offer.
|
||||
|
||||
3.5. Distribution of Executable Versions.
|
||||
|
||||
You may distribute the Executable form of the Covered Software
|
||||
under the terms of this License or under the terms of a license of
|
||||
Your choice, which may contain terms different from this License,
|
||||
provided that You are in compliance with the terms of this License
|
||||
and that the license for the Executable form does not attempt to
|
||||
limit or alter the recipient's rights in the Source Code form from
|
||||
the rights set forth in this License. If You distribute the
|
||||
Covered Software in Executable form under a different license, You
|
||||
must make it absolutely clear that any terms which differ from
|
||||
this License are offered by You alone, not by the Initial
|
||||
Developer or Contributor. You hereby agree to indemnify the
|
||||
Initial Developer and every Contributor for any liability incurred
|
||||
by the Initial Developer or such Contributor as a result of any
|
||||
such terms You offer.
|
||||
|
||||
3.6. Larger Works.
|
||||
|
||||
You may create a Larger Work by combining Covered Software with
|
||||
other code not governed by the terms of this License and
|
||||
distribute the Larger Work as a single product. In such a case,
|
||||
You must make sure the requirements of this License are fulfilled
|
||||
for the Covered Software.
|
||||
|
||||
4. Versions of the License.
|
||||
|
||||
4.1. New Versions.
|
||||
|
||||
Sun Microsystems, Inc. is the initial license steward and may
|
||||
publish revised and/or new versions of this License from time to
|
||||
time. Each version will be given a distinguishing version number.
|
||||
Except as provided in Section 4.3, no one other than the license
|
||||
steward has the right to modify this License.
|
||||
|
||||
4.2. Effect of New Versions.
|
||||
|
||||
You may always continue to use, distribute or otherwise make the
|
||||
Covered Software available under the terms of the version of the
|
||||
License under which You originally received the Covered Software.
|
||||
If the Initial Developer includes a notice in the Original
|
||||
Software prohibiting it from being distributed or otherwise made
|
||||
available under any subsequent version of the License, You must
|
||||
distribute and make the Covered Software available under the terms
|
||||
of the version of the License under which You originally received
|
||||
the Covered Software. Otherwise, You may also choose to use,
|
||||
distribute or otherwise make the Covered Software available under
|
||||
the terms of any subsequent version of the License published by
|
||||
the license steward.
|
||||
|
||||
4.3. Modified Versions.
|
||||
|
||||
When You are an Initial Developer and You want to create a new
|
||||
license for Your Original Software, You may create and use a
|
||||
modified version of this License if You: (a) rename the license
|
||||
and remove any references to the name of the license steward
|
||||
(except to note that the license differs from this License); and
|
||||
(b) otherwise make it clear that the license contains terms which
|
||||
differ from this License.
|
||||
|
||||
5. DISCLAIMER OF WARRANTY.
|
||||
|
||||
COVERED SOFTWARE IS PROVIDED UNDER THIS LICENSE ON AN "AS IS"
|
||||
BASIS, WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED,
|
||||
INCLUDING, WITHOUT LIMITATION, WARRANTIES THAT THE COVERED
|
||||
SOFTWARE IS FREE OF DEFECTS, MERCHANTABLE, FIT FOR A PARTICULAR
|
||||
PURPOSE OR NON-INFRINGING. THE ENTIRE RISK AS TO THE QUALITY AND
|
||||
PERFORMANCE OF THE COVERED SOFTWARE IS WITH YOU. SHOULD ANY
|
||||
COVERED SOFTWARE PROVE DEFECTIVE IN ANY RESPECT, YOU (NOT THE
|
||||
INITIAL DEVELOPER OR ANY OTHER CONTRIBUTOR) ASSUME THE COST OF ANY
|
||||
NECESSARY SERVICING, REPAIR OR CORRECTION. THIS DISCLAIMER OF
|
||||
WARRANTY CONSTITUTES AN ESSENTIAL PART OF THIS LICENSE. NO USE OF
|
||||
ANY COVERED SOFTWARE IS AUTHORIZED HEREUNDER EXCEPT UNDER THIS
|
||||
DISCLAIMER.
|
||||
|
||||
6. TERMINATION.
|
||||
|
||||
6.1. This License and the rights granted hereunder will terminate
|
||||
automatically if You fail to comply with terms herein and fail to
|
||||
cure such breach within 30 days of becoming aware of the breach.
|
||||
Provisions which, by their nature, must remain in effect beyond
|
||||
the termination of this License shall survive.
|
||||
|
||||
6.2. If You assert a patent infringement claim (excluding
|
||||
declaratory judgment actions) against Initial Developer or a
|
||||
Contributor (the Initial Developer or Contributor against whom You
|
||||
assert such claim is referred to as "Participant") alleging that
|
||||
the Participant Software (meaning the Contributor Version where
|
||||
the Participant is a Contributor or the Original Software where
|
||||
the Participant is the Initial Developer) directly or indirectly
|
||||
infringes any patent, then any and all rights granted directly or
|
||||
indirectly to You by such Participant, the Initial Developer (if
|
||||
the Initial Developer is not the Participant) and all Contributors
|
||||
under Sections 2.1 and/or 2.2 of this License shall, upon 60 days
|
||||
notice from Participant terminate prospectively and automatically
|
||||
at the expiration of such 60 day notice period, unless if within
|
||||
such 60 day period You withdraw Your claim with respect to the
|
||||
Participant Software against such Participant either unilaterally
|
||||
or pursuant to a written agreement with Participant.
|
||||
|
||||
6.3. In the event of termination under Sections 6.1 or 6.2 above,
|
||||
all end user licenses that have been validly granted by You or any
|
||||
distributor hereunder prior to termination (excluding licenses
|
||||
granted to You by any distributor) shall survive termination.
|
||||
|
||||
7. LIMITATION OF LIABILITY.
|
||||
|
||||
UNDER NO CIRCUMSTANCES AND UNDER NO LEGAL THEORY, WHETHER TORT
|
||||
(INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE, SHALL YOU, THE
|
||||
INITIAL DEVELOPER, ANY OTHER CONTRIBUTOR, OR ANY DISTRIBUTOR OF
|
||||
COVERED SOFTWARE, OR ANY SUPPLIER OF ANY OF SUCH PARTIES, BE
|
||||
LIABLE TO ANY PERSON FOR ANY INDIRECT, SPECIAL, INCIDENTAL, OR
|
||||
CONSEQUENTIAL DAMAGES OF ANY CHARACTER INCLUDING, WITHOUT
|
||||
LIMITATION, DAMAGES FOR LOST PROFITS, LOSS OF GOODWILL, WORK
|
||||
STOPPAGE, COMPUTER FAILURE OR MALFUNCTION, OR ANY AND ALL OTHER
|
||||
COMMERCIAL DAMAGES OR LOSSES, EVEN IF SUCH PARTY SHALL HAVE BEEN
|
||||
INFORMED OF THE POSSIBILITY OF SUCH DAMAGES. THIS LIMITATION OF
|
||||
LIABILITY SHALL NOT APPLY TO LIABILITY FOR DEATH OR PERSONAL
|
||||
INJURY RESULTING FROM SUCH PARTY'S NEGLIGENCE TO THE EXTENT
|
||||
APPLICABLE LAW PROHIBITS SUCH LIMITATION. SOME JURISDICTIONS DO
|
||||
NOT ALLOW THE EXCLUSION OR LIMITATION OF INCIDENTAL OR
|
||||
CONSEQUENTIAL DAMAGES, SO THIS EXCLUSION AND LIMITATION MAY NOT
|
||||
APPLY TO YOU.
|
||||
|
||||
8. U.S. GOVERNMENT END USERS.
|
||||
|
||||
The Covered Software is a "commercial item," as that term is
|
||||
defined in 48 C.F.R. 2.101 (Oct. 1995), consisting of "commercial
|
||||
computer software" (as that term is defined at 48
|
||||
C.F.R. 252.227-7014(a)(1)) and "commercial computer software
|
||||
documentation" as such terms are used in 48 C.F.R. 12.212
|
||||
(Sept. 1995). Consistent with 48 C.F.R. 12.212 and 48
|
||||
C.F.R. 227.7202-1 through 227.7202-4 (June 1995), all
|
||||
U.S. Government End Users acquire Covered Software with only those
|
||||
rights set forth herein. This U.S. Government Rights clause is in
|
||||
lieu of, and supersedes, any other FAR, DFAR, or other clause or
|
||||
provision that addresses Government rights in computer software
|
||||
under this License.
|
||||
|
||||
9. MISCELLANEOUS.
|
||||
|
||||
This License represents the complete agreement concerning subject
|
||||
matter hereof. If any provision of this License is held to be
|
||||
unenforceable, such provision shall be reformed only to the extent
|
||||
necessary to make it enforceable. This License shall be governed
|
||||
by the law of the jurisdiction specified in a notice contained
|
||||
within the Original Software (except to the extent applicable law,
|
||||
if any, provides otherwise), excluding such jurisdiction's
|
||||
conflict-of-law provisions. Any litigation relating to this
|
||||
License shall be subject to the jurisdiction of the courts located
|
||||
in the jurisdiction and venue specified in a notice contained
|
||||
within the Original Software, with the losing party responsible
|
||||
for costs, including, without limitation, court costs and
|
||||
reasonable attorneys' fees and expenses. The application of the
|
||||
United Nations Convention on Contracts for the International Sale
|
||||
of Goods is expressly excluded. Any law or regulation which
|
||||
provides that the language of a contract shall be construed
|
||||
against the drafter shall not apply to this License. You agree
|
||||
that You alone are responsible for compliance with the United
|
||||
States export administration regulations (and the export control
|
||||
laws and regulation of any other countries) when You use,
|
||||
distribute or otherwise make available any Covered Software.
|
||||
|
||||
10. RESPONSIBILITY FOR CLAIMS.
|
||||
|
||||
As between Initial Developer and the Contributors, each party is
|
||||
responsible for claims and damages arising, directly or
|
||||
indirectly, out of its utilization of rights under this License
|
||||
and You agree to work with Initial Developer and Contributors to
|
||||
distribute such responsibility on an equitable basis. Nothing
|
||||
herein is intended or shall be deemed to constitute any admission
|
||||
of liability.
|
||||
|
||||
--------------------------------------------------------------------
|
||||
|
||||
NOTICE PURSUANT TO SECTION 9 OF THE COMMON DEVELOPMENT AND
|
||||
DISTRIBUTION LICENSE (CDDL)
|
||||
|
||||
For Covered Software in this distribution, this License shall
|
||||
be governed by the laws of the State of California (excluding
|
||||
conflict-of-law provisions).
|
||||
|
||||
Any litigation relating to this License shall be subject to the
|
||||
jurisdiction of the Federal Courts of the Northern District of
|
||||
California and the state courts of the State of California, with
|
||||
venue lying in Santa Clara County, California.
|
27
vendor/github.com/howeyc/gopass/README.md
generated
vendored
Normal file
27
vendor/github.com/howeyc/gopass/README.md
generated
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
# getpasswd in Go [](https://godoc.org/github.com/howeyc/gopass) [](http://travis-ci.org/howeyc/gopass)
|
||||
|
||||
Retrieve password from user terminal or piped input without echo.
|
||||
|
||||
Verified on BSD, Linux, and Windows.
|
||||
|
||||
Example:
|
||||
```go
|
||||
package main
|
||||
|
||||
import "fmt"
|
||||
import "github.com/howeyc/gopass"
|
||||
|
||||
func main() {
|
||||
fmt.Printf("Password: ")
|
||||
|
||||
// Silent. For printing *'s use gopass.GetPasswdMasked()
|
||||
pass, err := gopass.GetPasswd()
|
||||
if err != nil {
|
||||
// Handle gopass.ErrInterrupted or getch() read error
|
||||
}
|
||||
|
||||
// Do something with pass
|
||||
}
|
||||
```
|
||||
|
||||
Caution: Multi-byte characters not supported!
|
110
vendor/github.com/howeyc/gopass/pass.go
generated
vendored
Normal file
110
vendor/github.com/howeyc/gopass/pass.go
generated
vendored
Normal file
@ -0,0 +1,110 @@
|
||||
package gopass
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
)
|
||||
|
||||
type FdReader interface {
|
||||
io.Reader
|
||||
Fd() uintptr
|
||||
}
|
||||
|
||||
var defaultGetCh = func(r io.Reader) (byte, error) {
|
||||
buf := make([]byte, 1)
|
||||
if n, err := r.Read(buf); n == 0 || err != nil {
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return 0, io.EOF
|
||||
}
|
||||
return buf[0], nil
|
||||
}
|
||||
|
||||
var (
|
||||
maxLength = 512
|
||||
ErrInterrupted = errors.New("interrupted")
|
||||
ErrMaxLengthExceeded = fmt.Errorf("maximum byte limit (%v) exceeded", maxLength)
|
||||
|
||||
// Provide variable so that tests can provide a mock implementation.
|
||||
getch = defaultGetCh
|
||||
)
|
||||
|
||||
// getPasswd returns the input read from terminal.
|
||||
// If prompt is not empty, it will be output as a prompt to the user
|
||||
// If masked is true, typing will be matched by asterisks on the screen.
|
||||
// Otherwise, typing will echo nothing.
|
||||
func getPasswd(prompt string, masked bool, r FdReader, w io.Writer) ([]byte, error) {
|
||||
var err error
|
||||
var pass, bs, mask []byte
|
||||
if masked {
|
||||
bs = []byte("\b \b")
|
||||
mask = []byte("*")
|
||||
}
|
||||
|
||||
if isTerminal(r.Fd()) {
|
||||
if oldState, err := makeRaw(r.Fd()); err != nil {
|
||||
return pass, err
|
||||
} else {
|
||||
defer func() {
|
||||
restore(r.Fd(), oldState)
|
||||
fmt.Fprintln(w)
|
||||
}()
|
||||
}
|
||||
}
|
||||
|
||||
if prompt != "" {
|
||||
fmt.Fprint(w, prompt)
|
||||
}
|
||||
|
||||
// Track total bytes read, not just bytes in the password. This ensures any
|
||||
// errors that might flood the console with nil or -1 bytes infinitely are
|
||||
// capped.
|
||||
var counter int
|
||||
for counter = 0; counter <= maxLength; counter++ {
|
||||
if v, e := getch(r); e != nil {
|
||||
err = e
|
||||
break
|
||||
} else if v == 127 || v == 8 {
|
||||
if l := len(pass); l > 0 {
|
||||
pass = pass[:l-1]
|
||||
fmt.Fprint(w, string(bs))
|
||||
}
|
||||
} else if v == 13 || v == 10 {
|
||||
break
|
||||
} else if v == 3 {
|
||||
err = ErrInterrupted
|
||||
break
|
||||
} else if v != 0 {
|
||||
pass = append(pass, v)
|
||||
fmt.Fprint(w, string(mask))
|
||||
}
|
||||
}
|
||||
|
||||
if counter > maxLength {
|
||||
err = ErrMaxLengthExceeded
|
||||
}
|
||||
|
||||
return pass, err
|
||||
}
|
||||
|
||||
// GetPasswd returns the password read from the terminal without echoing input.
|
||||
// The returned byte array does not include end-of-line characters.
|
||||
func GetPasswd() ([]byte, error) {
|
||||
return getPasswd("", false, os.Stdin, os.Stdout)
|
||||
}
|
||||
|
||||
// GetPasswdMasked returns the password read from the terminal, echoing asterisks.
|
||||
// The returned byte array does not include end-of-line characters.
|
||||
func GetPasswdMasked() ([]byte, error) {
|
||||
return getPasswd("", true, os.Stdin, os.Stdout)
|
||||
}
|
||||
|
||||
// GetPasswdPrompt prompts the user and returns the password read from the terminal.
|
||||
// If mask is true, then asterisks are echoed.
|
||||
// The returned byte array does not include end-of-line characters.
|
||||
func GetPasswdPrompt(prompt string, mask bool, r FdReader, w io.Writer) ([]byte, error) {
|
||||
return getPasswd(prompt, mask, r, w)
|
||||
}
|
225
vendor/github.com/howeyc/gopass/pass_test.go
generated
vendored
Normal file
225
vendor/github.com/howeyc/gopass/pass_test.go
generated
vendored
Normal file
@ -0,0 +1,225 @@
|
||||
package gopass
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
// TestGetPasswd tests the password creation and output based on a byte buffer
|
||||
// as input to mock the underlying getch() methods.
|
||||
func TestGetPasswd(t *testing.T) {
|
||||
type testData struct {
|
||||
input []byte
|
||||
|
||||
// Due to how backspaces are written, it is easier to manually write
|
||||
// each expected output for the masked cases.
|
||||
masked string
|
||||
password string
|
||||
byesLeft int
|
||||
reason string
|
||||
}
|
||||
|
||||
ds := []testData{
|
||||
testData{[]byte("abc\n"), "***", "abc", 0, "Password parsing should stop at \\n"},
|
||||
testData{[]byte("abc\r"), "***", "abc", 0, "Password parsing should stop at \\r"},
|
||||
testData{[]byte("a\nbc\n"), "*", "a", 3, "Password parsing should stop at \\n"},
|
||||
testData{[]byte("*!]|\n"), "****", "*!]|", 0, "Special characters shouldn't affect the password."},
|
||||
|
||||
testData{[]byte("abc\r\n"), "***", "abc", 1,
|
||||
"Password parsing should stop at \\r; Windows LINE_MODE should be unset so \\r is not converted to \\r\\n."},
|
||||
|
||||
testData{[]byte{'a', 'b', 'c', 8, '\n'}, "***\b \b", "ab", 0, "Backspace byte should remove the last read byte."},
|
||||
testData{[]byte{'a', 'b', 127, 'c', '\n'}, "**\b \b*", "ac", 0, "Delete byte should remove the last read byte."},
|
||||
testData{[]byte{'a', 'b', 127, 'c', 8, 127, '\n'}, "**\b \b*\b \b\b \b", "", 0, "Successive deletes continue to delete."},
|
||||
testData{[]byte{8, 8, 8, '\n'}, "", "", 0, "Deletes before characters are noops."},
|
||||
testData{[]byte{8, 8, 8, 'a', 'b', 'c', '\n'}, "***", "abc", 0, "Deletes before characters are noops."},
|
||||
|
||||
testData{[]byte{'a', 'b', 0, 'c', '\n'}, "***", "abc", 0,
|
||||
"Nil byte should be ignored due; may get unintended nil bytes from syscalls on Windows."},
|
||||
}
|
||||
|
||||
// Redirecting output for tests as they print to os.Stdout but we want to
|
||||
// capture and test the output.
|
||||
for _, masked := range []bool{true, false} {
|
||||
for _, d := range ds {
|
||||
pipeBytesToStdin(d.input)
|
||||
|
||||
r, w, err := os.Pipe()
|
||||
if err != nil {
|
||||
t.Fatal(err.Error())
|
||||
}
|
||||
|
||||
result, err := getPasswd("", masked, os.Stdin, w)
|
||||
if err != nil {
|
||||
t.Errorf("Error getting password: %s", err.Error())
|
||||
}
|
||||
leftOnBuffer := flushStdin()
|
||||
|
||||
// Test output (masked and unmasked). Delete/backspace actually
|
||||
// deletes, overwrites and deletes again. As a result, we need to
|
||||
// remove those from the pipe afterwards to mimic the console's
|
||||
// interpretation of those bytes.
|
||||
w.Close()
|
||||
output, err := ioutil.ReadAll(r)
|
||||
if err != nil {
|
||||
t.Fatal(err.Error())
|
||||
}
|
||||
var expectedOutput []byte
|
||||
if masked {
|
||||
expectedOutput = []byte(d.masked)
|
||||
} else {
|
||||
expectedOutput = []byte("")
|
||||
}
|
||||
if bytes.Compare(expectedOutput, output) != 0 {
|
||||
t.Errorf("Expected output to equal %v (%q) but got %v (%q) instead when masked=%v. %s", expectedOutput, string(expectedOutput), output, string(output), masked, d.reason)
|
||||
}
|
||||
|
||||
if string(result) != d.password {
|
||||
t.Errorf("Expected %q but got %q instead when masked=%v. %s", d.password, result, masked, d.reason)
|
||||
}
|
||||
|
||||
if leftOnBuffer != d.byesLeft {
|
||||
t.Errorf("Expected %v bytes left on buffer but instead got %v when masked=%v. %s", d.byesLeft, leftOnBuffer, masked, d.reason)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TestPipe ensures we get our expected pipe behavior.
|
||||
func TestPipe(t *testing.T) {
|
||||
type testData struct {
|
||||
input string
|
||||
password string
|
||||
expError error
|
||||
}
|
||||
ds := []testData{
|
||||
testData{"abc", "abc", io.EOF},
|
||||
testData{"abc\n", "abc", nil},
|
||||
testData{"abc\r", "abc", nil},
|
||||
testData{"abc\r\n", "abc", nil},
|
||||
}
|
||||
|
||||
for _, d := range ds {
|
||||
_, err := pipeToStdin(d.input)
|
||||
if err != nil {
|
||||
t.Log("Error writing input to stdin:", err)
|
||||
t.FailNow()
|
||||
}
|
||||
pass, err := GetPasswd()
|
||||
if string(pass) != d.password {
|
||||
t.Errorf("Expected %q but got %q instead.", d.password, string(pass))
|
||||
}
|
||||
if err != d.expError {
|
||||
t.Errorf("Expected %v but got %q instead.", d.expError, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// flushStdin reads from stdin for .5 seconds to ensure no bytes are left on
|
||||
// the buffer. Returns the number of bytes read.
|
||||
func flushStdin() int {
|
||||
ch := make(chan byte)
|
||||
go func(ch chan byte) {
|
||||
reader := bufio.NewReader(os.Stdin)
|
||||
for {
|
||||
b, err := reader.ReadByte()
|
||||
if err != nil { // Maybe log non io.EOF errors, if you want
|
||||
close(ch)
|
||||
return
|
||||
}
|
||||
ch <- b
|
||||
}
|
||||
close(ch)
|
||||
}(ch)
|
||||
|
||||
numBytes := 0
|
||||
for {
|
||||
select {
|
||||
case _, ok := <-ch:
|
||||
if !ok {
|
||||
return numBytes
|
||||
}
|
||||
numBytes++
|
||||
case <-time.After(500 * time.Millisecond):
|
||||
return numBytes
|
||||
}
|
||||
}
|
||||
return numBytes
|
||||
}
|
||||
|
||||
// pipeToStdin pipes the given string onto os.Stdin by replacing it with an
|
||||
// os.Pipe. The write end of the pipe is closed so that EOF is read after the
|
||||
// final byte.
|
||||
func pipeToStdin(s string) (int, error) {
|
||||
pipeReader, pipeWriter, err := os.Pipe()
|
||||
if err != nil {
|
||||
fmt.Println("Error getting os pipes:", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
os.Stdin = pipeReader
|
||||
w, err := pipeWriter.WriteString(s)
|
||||
pipeWriter.Close()
|
||||
return w, err
|
||||
}
|
||||
|
||||
func pipeBytesToStdin(b []byte) (int, error) {
|
||||
return pipeToStdin(string(b))
|
||||
}
|
||||
|
||||
// TestGetPasswd_Err tests errors are properly handled from getch()
|
||||
func TestGetPasswd_Err(t *testing.T) {
|
||||
var inBuffer *bytes.Buffer
|
||||
getch = func(io.Reader) (byte, error) {
|
||||
b, err := inBuffer.ReadByte()
|
||||
if err != nil {
|
||||
return 13, err
|
||||
}
|
||||
if b == 'z' {
|
||||
return 'z', fmt.Errorf("Forced error; byte returned should not be considered accurate.")
|
||||
}
|
||||
return b, nil
|
||||
}
|
||||
defer func() { getch = defaultGetCh }()
|
||||
|
||||
for input, expectedPassword := range map[string]string{"abc": "abc", "abzc": "ab"} {
|
||||
inBuffer = bytes.NewBufferString(input)
|
||||
p, err := GetPasswdMasked()
|
||||
if string(p) != expectedPassword {
|
||||
t.Errorf("Expected %q but got %q instead.", expectedPassword, p)
|
||||
}
|
||||
if err == nil {
|
||||
t.Errorf("Expected error to be returned.")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestMaxPasswordLength(t *testing.T) {
|
||||
type testData struct {
|
||||
input []byte
|
||||
expectedErr error
|
||||
|
||||
// Helper field to output in case of failure; rather than hundreds of
|
||||
// bytes.
|
||||
inputDesc string
|
||||
}
|
||||
|
||||
ds := []testData{
|
||||
testData{append(bytes.Repeat([]byte{'a'}, maxLength), '\n'), nil, fmt.Sprintf("%v 'a' bytes followed by a newline", maxLength)},
|
||||
testData{append(bytes.Repeat([]byte{'a'}, maxLength+1), '\n'), ErrMaxLengthExceeded, fmt.Sprintf("%v 'a' bytes followed by a newline", maxLength+1)},
|
||||
testData{append(bytes.Repeat([]byte{0x00}, maxLength+1), '\n'), ErrMaxLengthExceeded, fmt.Sprintf("%v 0x00 bytes followed by a newline", maxLength+1)},
|
||||
}
|
||||
|
||||
for _, d := range ds {
|
||||
pipeBytesToStdin(d.input)
|
||||
_, err := GetPasswd()
|
||||
if err != d.expectedErr {
|
||||
t.Errorf("Expected error to be %v; isntead got %v from %v", d.expectedErr, err, d.inputDesc)
|
||||
}
|
||||
}
|
||||
}
|
25
vendor/github.com/howeyc/gopass/terminal.go
generated
vendored
Normal file
25
vendor/github.com/howeyc/gopass/terminal.go
generated
vendored
Normal file
@ -0,0 +1,25 @@
|
||||
// +build !solaris
|
||||
|
||||
package gopass
|
||||
|
||||
import "golang.org/x/crypto/ssh/terminal"
|
||||
|
||||
type terminalState struct {
|
||||
state *terminal.State
|
||||
}
|
||||
|
||||
func isTerminal(fd uintptr) bool {
|
||||
return terminal.IsTerminal(int(fd))
|
||||
}
|
||||
|
||||
func makeRaw(fd uintptr) (*terminalState, error) {
|
||||
state, err := terminal.MakeRaw(int(fd))
|
||||
|
||||
return &terminalState{
|
||||
state: state,
|
||||
}, err
|
||||
}
|
||||
|
||||
func restore(fd uintptr, oldState *terminalState) error {
|
||||
return terminal.Restore(int(fd), oldState.state)
|
||||
}
|
69
vendor/github.com/howeyc/gopass/terminal_solaris.go
generated
vendored
Normal file
69
vendor/github.com/howeyc/gopass/terminal_solaris.go
generated
vendored
Normal file
@ -0,0 +1,69 @@
|
||||
/*
|
||||
* CDDL HEADER START
|
||||
*
|
||||
* The contents of this file are subject to the terms of the
|
||||
* Common Development and Distribution License, Version 1.0 only
|
||||
* (the "License"). You may not use this file except in compliance
|
||||
* with the License.
|
||||
*
|
||||
* You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
|
||||
* or http://www.opensolaris.org/os/licensing.
|
||||
* See the License for the specific language governing permissions
|
||||
* and limitations under the License.
|
||||
*
|
||||
* When distributing Covered Code, include this CDDL HEADER in each
|
||||
* file and include the License file at usr/src/OPENSOLARIS.LICENSE.
|
||||
* If applicable, add the following below this CDDL HEADER, with the
|
||||
* fields enclosed by brackets "[]" replaced with your own identifying
|
||||
* information: Portions Copyright [yyyy] [name of copyright owner]
|
||||
*
|
||||
* CDDL HEADER END
|
||||
*/
|
||||
// Below is derived from Solaris source, so CDDL license is included.
|
||||
|
||||
package gopass
|
||||
|
||||
import (
|
||||
"syscall"
|
||||
|
||||
"golang.org/x/sys/unix"
|
||||
)
|
||||
|
||||
type terminalState struct {
|
||||
state *unix.Termios
|
||||
}
|
||||
|
||||
// isTerminal returns true if there is a terminal attached to the given
|
||||
// file descriptor.
|
||||
// Source: http://src.illumos.org/source/xref/illumos-gate/usr/src/lib/libbc/libc/gen/common/isatty.c
|
||||
func isTerminal(fd uintptr) bool {
|
||||
var termio unix.Termio
|
||||
err := unix.IoctlSetTermio(int(fd), unix.TCGETA, &termio)
|
||||
return err == nil
|
||||
}
|
||||
|
||||
// makeRaw puts the terminal connected to the given file descriptor into raw
|
||||
// mode and returns the previous state of the terminal so that it can be
|
||||
// restored.
|
||||
// Source: http://src.illumos.org/source/xref/illumos-gate/usr/src/lib/libast/common/uwin/getpass.c
|
||||
func makeRaw(fd uintptr) (*terminalState, error) {
|
||||
oldTermiosPtr, err := unix.IoctlGetTermios(int(fd), unix.TCGETS)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
oldTermios := *oldTermiosPtr
|
||||
|
||||
newTermios := oldTermios
|
||||
newTermios.Lflag &^= syscall.ECHO | syscall.ECHOE | syscall.ECHOK | syscall.ECHONL
|
||||
if err := unix.IoctlSetTermios(int(fd), unix.TCSETS, &newTermios); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &terminalState{
|
||||
state: oldTermiosPtr,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func restore(fd uintptr, oldState *terminalState) error {
|
||||
return unix.IoctlSetTermios(int(fd), unix.TCSETS, oldState.state)
|
||||
}
|
33
vendor/github.com/imdario/mergo/.gitignore
generated
vendored
Normal file
33
vendor/github.com/imdario/mergo/.gitignore
generated
vendored
Normal file
@ -0,0 +1,33 @@
|
||||
#### joe made this: http://goel.io/joe
|
||||
|
||||
#### go ####
|
||||
# Binaries for programs and plugins
|
||||
*.exe
|
||||
*.dll
|
||||
*.so
|
||||
*.dylib
|
||||
|
||||
# Test binary, build with `go test -c`
|
||||
*.test
|
||||
|
||||
# Output of the go coverage tool, specifically when used with LiteIDE
|
||||
*.out
|
||||
|
||||
# Project-local glide cache, RE: https://github.com/Masterminds/glide/issues/736
|
||||
.glide/
|
||||
|
||||
#### vim ####
|
||||
# Swap
|
||||
[._]*.s[a-v][a-z]
|
||||
[._]*.sw[a-p]
|
||||
[._]s[a-v][a-z]
|
||||
[._]sw[a-p]
|
||||
|
||||
# Session
|
||||
Session.vim
|
||||
|
||||
# Temporary
|
||||
.netrwhist
|
||||
*~
|
||||
# Auto-generated tag files
|
||||
tags
|
7
vendor/github.com/imdario/mergo/.travis.yml
generated
vendored
Normal file
7
vendor/github.com/imdario/mergo/.travis.yml
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
language: go
|
||||
install:
|
||||
- go get -t
|
||||
- go get golang.org/x/tools/cmd/cover
|
||||
- go get github.com/mattn/goveralls
|
||||
script:
|
||||
- $HOME/gopath/bin/goveralls -service=travis-ci -repotoken $COVERALLS_TOKEN
|
46
vendor/github.com/imdario/mergo/CODE_OF_CONDUCT.md
generated
vendored
Normal file
46
vendor/github.com/imdario/mergo/CODE_OF_CONDUCT.md
generated
vendored
Normal file
@ -0,0 +1,46 @@
|
||||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
||||
In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to creating a positive environment include:
|
||||
|
||||
* Using welcoming and inclusive language
|
||||
* Being respectful of differing viewpoints and experiences
|
||||
* Gracefully accepting constructive criticism
|
||||
* Focusing on what is best for the community
|
||||
* Showing empathy towards other community members
|
||||
|
||||
Examples of unacceptable behavior by participants include:
|
||||
|
||||
* The use of sexualized language or imagery and unwelcome sexual attention or advances
|
||||
* Trolling, insulting/derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or electronic address, without explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a professional setting
|
||||
|
||||
## Our Responsibilities
|
||||
|
||||
Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at i@dario.im. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
|
||||
|
||||
Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]
|
||||
|
||||
[homepage]: http://contributor-covenant.org
|
||||
[version]: http://contributor-covenant.org/version/1/4/
|
28
vendor/github.com/imdario/mergo/LICENSE
generated
vendored
Normal file
28
vendor/github.com/imdario/mergo/LICENSE
generated
vendored
Normal file
@ -0,0 +1,28 @@
|
||||
Copyright (c) 2013 Dario Castañé. All rights reserved.
|
||||
Copyright (c) 2012 The Go Authors. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following disclaimer
|
||||
in the documentation and/or other materials provided with the
|
||||
distribution.
|
||||
* Neither the name of Google Inc. nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
222
vendor/github.com/imdario/mergo/README.md
generated
vendored
Normal file
222
vendor/github.com/imdario/mergo/README.md
generated
vendored
Normal file
@ -0,0 +1,222 @@
|
||||
# Mergo
|
||||
|
||||
A helper to merge structs and maps in Golang. Useful for configuration default values, avoiding messy if-statements.
|
||||
|
||||
Also a lovely [comune](http://en.wikipedia.org/wiki/Mergo) (municipality) in the Province of Ancona in the Italian region of Marche.
|
||||
|
||||
## Status
|
||||
|
||||
It is ready for production use. [It is used in several projects by Docker, Google, The Linux Foundation, VMWare, Shopify, etc](https://github.com/imdario/mergo#mergo-in-the-wild).
|
||||
|
||||
[![GoDoc][3]][4]
|
||||
[![GoCard][5]][6]
|
||||
[![Build Status][1]][2]
|
||||
[![Coverage Status][7]][8]
|
||||
[![Sourcegraph][9]][10]
|
||||
|
||||
[1]: https://travis-ci.org/imdario/mergo.png
|
||||
[2]: https://travis-ci.org/imdario/mergo
|
||||
[3]: https://godoc.org/github.com/imdario/mergo?status.svg
|
||||
[4]: https://godoc.org/github.com/imdario/mergo
|
||||
[5]: https://goreportcard.com/badge/imdario/mergo
|
||||
[6]: https://goreportcard.com/report/github.com/imdario/mergo
|
||||
[7]: https://coveralls.io/repos/github/imdario/mergo/badge.svg?branch=master
|
||||
[8]: https://coveralls.io/github/imdario/mergo?branch=master
|
||||
[9]: https://sourcegraph.com/github.com/imdario/mergo/-/badge.svg
|
||||
[10]: https://sourcegraph.com/github.com/imdario/mergo?badge
|
||||
|
||||
### Latest release
|
||||
|
||||
[Release v0.3.4](https://github.com/imdario/mergo/releases/tag/v0.3.4).
|
||||
|
||||
### Important note
|
||||
|
||||
Please keep in mind that in [0.3.2](//github.com/imdario/mergo/releases/tag/0.3.2) Mergo changed `Merge()`and `Map()` signatures to support [transformers](#transformers). An optional/variadic argument has been added, so it won't break existing code.
|
||||
|
||||
If you were using Mergo **before** April 6th 2015, please check your project works as intended after updating your local copy with ```go get -u github.com/imdario/mergo```. I apologize for any issue caused by its previous behavior and any future bug that Mergo could cause (I hope it won't!) in existing projects after the change (release 0.2.0).
|
||||
|
||||
### Donations
|
||||
|
||||
If Mergo is useful to you, consider buying me a coffee, a beer or making a monthly donation so I can keep building great free software. :heart_eyes:
|
||||
|
||||
<a href='https://ko-fi.com/B0B58839' target='_blank'><img height='36' style='border:0px;height:36px;' src='https://az743702.vo.msecnd.net/cdn/kofi1.png?v=0' border='0' alt='Buy Me a Coffee at ko-fi.com' /></a>
|
||||
[](https://beerpay.io/imdario/mergo)
|
||||
[](https://beerpay.io/imdario/mergo)
|
||||
<a href="https://liberapay.com/dario/donate"><img alt="Donate using Liberapay" src="https://liberapay.com/assets/widgets/donate.svg"></a>
|
||||
|
||||
### Mergo in the wild
|
||||
|
||||
- [moby/moby](https://github.com/moby/moby)
|
||||
- [kubernetes/kubernetes](https://github.com/kubernetes/kubernetes)
|
||||
- [vmware/dispatch](https://github.com/vmware/dispatch)
|
||||
- [Shopify/themekit](https://github.com/Shopify/themekit)
|
||||
- [imdario/zas](https://github.com/imdario/zas)
|
||||
- [matcornic/hermes](https://github.com/matcornic/hermes)
|
||||
- [OpenBazaar/openbazaar-go](https://github.com/OpenBazaar/openbazaar-go)
|
||||
- [kataras/iris](https://github.com/kataras/iris)
|
||||
- [michaelsauter/crane](https://github.com/michaelsauter/crane)
|
||||
- [go-task/task](https://github.com/go-task/task)
|
||||
- [sensu/uchiwa](https://github.com/sensu/uchiwa)
|
||||
- [ory/hydra](https://github.com/ory/hydra)
|
||||
- [sisatech/vcli](https://github.com/sisatech/vcli)
|
||||
- [dairycart/dairycart](https://github.com/dairycart/dairycart)
|
||||
- [projectcalico/felix](https://github.com/projectcalico/felix)
|
||||
- [resin-os/balena](https://github.com/resin-os/balena)
|
||||
- [go-kivik/kivik](https://github.com/go-kivik/kivik)
|
||||
- [Telefonica/govice](https://github.com/Telefonica/govice)
|
||||
- [supergiant/supergiant](supergiant/supergiant)
|
||||
- [SergeyTsalkov/brooce](https://github.com/SergeyTsalkov/brooce)
|
||||
- [soniah/dnsmadeeasy](https://github.com/soniah/dnsmadeeasy)
|
||||
- [ohsu-comp-bio/funnel](https://github.com/ohsu-comp-bio/funnel)
|
||||
- [EagerIO/Stout](https://github.com/EagerIO/Stout)
|
||||
- [lynndylanhurley/defsynth-api](https://github.com/lynndylanhurley/defsynth-api)
|
||||
- [russross/canvasassignments](https://github.com/russross/canvasassignments)
|
||||
- [rdegges/cryptly-api](https://github.com/rdegges/cryptly-api)
|
||||
- [casualjim/exeggutor](https://github.com/casualjim/exeggutor)
|
||||
- [divshot/gitling](https://github.com/divshot/gitling)
|
||||
- [RWJMurphy/gorl](https://github.com/RWJMurphy/gorl)
|
||||
- [andrerocker/deploy42](https://github.com/andrerocker/deploy42)
|
||||
- [elwinar/rambler](https://github.com/elwinar/rambler)
|
||||
- [tmaiaroto/gopartman](https://github.com/tmaiaroto/gopartman)
|
||||
- [jfbus/impressionist](https://github.com/jfbus/impressionist)
|
||||
- [Jmeyering/zealot](https://github.com/Jmeyering/zealot)
|
||||
- [godep-migrator/rigger-host](https://github.com/godep-migrator/rigger-host)
|
||||
- [Dronevery/MultiwaySwitch-Go](https://github.com/Dronevery/MultiwaySwitch-Go)
|
||||
- [thoas/picfit](https://github.com/thoas/picfit)
|
||||
- [mantasmatelis/whooplist-server](https://github.com/mantasmatelis/whooplist-server)
|
||||
- [jnuthong/item_search](https://github.com/jnuthong/item_search)
|
||||
- [bukalapak/snowboard](https://github.com/bukalapak/snowboard)
|
||||
|
||||
## Installation
|
||||
|
||||
go get github.com/imdario/mergo
|
||||
|
||||
// use in your .go code
|
||||
import (
|
||||
"github.com/imdario/mergo"
|
||||
)
|
||||
|
||||
## Usage
|
||||
|
||||
You can only merge same-type structs with exported fields initialized as zero value of their type and same-types maps. Mergo won't merge unexported (private) fields but will do recursively any exported one. It won't merge empty structs value as [they are not considered zero values](https://golang.org/ref/spec#The_zero_value) either. Also maps will be merged recursively except for structs inside maps (because they are not addressable using Go reflection).
|
||||
|
||||
```go
|
||||
if err := mergo.Merge(&dst, src); err != nil {
|
||||
// ...
|
||||
}
|
||||
```
|
||||
|
||||
Also, you can merge overwriting values using the transformer `WithOverride`.
|
||||
|
||||
```go
|
||||
if err := mergo.Merge(&dst, src, mergo.WithOverride); err != nil {
|
||||
// ...
|
||||
}
|
||||
```
|
||||
|
||||
Additionally, you can map a `map[string]interface{}` to a struct (and otherwise, from struct to map), following the same restrictions as in `Merge()`. Keys are capitalized to find each corresponding exported field.
|
||||
|
||||
```go
|
||||
if err := mergo.Map(&dst, srcMap); err != nil {
|
||||
// ...
|
||||
}
|
||||
```
|
||||
|
||||
Warning: if you map a struct to map, it won't do it recursively. Don't expect Mergo to map struct members of your struct as `map[string]interface{}`. They will be just assigned as values.
|
||||
|
||||
More information and examples in [godoc documentation](http://godoc.org/github.com/imdario/mergo).
|
||||
|
||||
### Nice example
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/imdario/mergo"
|
||||
)
|
||||
|
||||
type Foo struct {
|
||||
A string
|
||||
B int64
|
||||
}
|
||||
|
||||
func main() {
|
||||
src := Foo{
|
||||
A: "one",
|
||||
B: 2,
|
||||
}
|
||||
dest := Foo{
|
||||
A: "two",
|
||||
}
|
||||
mergo.Merge(&dest, src)
|
||||
fmt.Println(dest)
|
||||
// Will print
|
||||
// {two 2}
|
||||
}
|
||||
```
|
||||
|
||||
Note: if test are failing due missing package, please execute:
|
||||
|
||||
go get gopkg.in/yaml.v2
|
||||
|
||||
### Transformers
|
||||
|
||||
Transformers allow to merge specific types differently than in the default behavior. In other words, now you can customize how some types are merged. For example, `time.Time` is a struct; it doesn't have zero value but IsZero can return true because it has fields with zero value. How can we merge a non-zero `time.Time`?
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/imdario/mergo"
|
||||
"reflect"
|
||||
"time"
|
||||
)
|
||||
|
||||
type timeTransfomer struct {
|
||||
}
|
||||
|
||||
func (t timeTransfomer) Transformer(typ reflect.Type) func(dst, src reflect.Value) error {
|
||||
if typ == reflect.TypeOf(time.Time{}) {
|
||||
return func(dst, src reflect.Value) error {
|
||||
if dst.CanSet() {
|
||||
isZero := dst.MethodByName("IsZero")
|
||||
result := isZero.Call([]reflect.Value{})
|
||||
if result[0].Bool() {
|
||||
dst.Set(src)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type Snapshot struct {
|
||||
Time time.Time
|
||||
// ...
|
||||
}
|
||||
|
||||
func main() {
|
||||
src := Snapshot{time.Now()}
|
||||
dest := Snapshot{}
|
||||
mergo.Merge(&dest, src, mergo.WithTransformers(timeTransfomer{}))
|
||||
fmt.Println(dest)
|
||||
// Will print
|
||||
// { 2018-01-12 01:15:00 +0000 UTC m=+0.000000001 }
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## Contact me
|
||||
|
||||
If I can help you, you have an idea or you are using Mergo in your projects, don't hesitate to drop me a line (or a pull request): [@im_dario](https://twitter.com/im_dario)
|
||||
|
||||
## About
|
||||
|
||||
Written by [Dario Castañé](http://dario.im).
|
||||
|
||||
## License
|
||||
|
||||
[BSD 3-Clause](http://opensource.org/licenses/BSD-3-Clause) license, as [Go language](http://golang.org/LICENSE).
|
44
vendor/github.com/imdario/mergo/doc.go
generated
vendored
Normal file
44
vendor/github.com/imdario/mergo/doc.go
generated
vendored
Normal file
@ -0,0 +1,44 @@
|
||||
// Copyright 2013 Dario Castañé. All rights reserved.
|
||||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
/*
|
||||
Package mergo merges same-type structs and maps by setting default values in zero-value fields.
|
||||
|
||||
Mergo won't merge unexported (private) fields but will do recursively any exported one. It also won't merge structs inside maps (because they are not addressable using Go reflection).
|
||||
|
||||
Usage
|
||||
|
||||
From my own work-in-progress project:
|
||||
|
||||
type networkConfig struct {
|
||||
Protocol string
|
||||
Address string
|
||||
ServerType string `json: "server_type"`
|
||||
Port uint16
|
||||
}
|
||||
|
||||
type FssnConfig struct {
|
||||
Network networkConfig
|
||||
}
|
||||
|
||||
var fssnDefault = FssnConfig {
|
||||
networkConfig {
|
||||
"tcp",
|
||||
"127.0.0.1",
|
||||
"http",
|
||||
31560,
|
||||
},
|
||||
}
|
||||
|
||||
// Inside a function [...]
|
||||
|
||||
if err := mergo.Merge(&config, fssnDefault); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
// More code [...]
|
||||
|
||||
*/
|
||||
package mergo
|
25
vendor/github.com/imdario/mergo/issue17_test.go
generated
vendored
Normal file
25
vendor/github.com/imdario/mergo/issue17_test.go
generated
vendored
Normal file
@ -0,0 +1,25 @@
|
||||
package mergo
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
)
|
||||
|
||||
var (
|
||||
request = `{"timestamp":null, "name": "foo"}`
|
||||
maprequest = map[string]interface{}{
|
||||
"timestamp": nil,
|
||||
"name": "foo",
|
||||
"newStuff": "foo",
|
||||
}
|
||||
)
|
||||
|
||||
func TestIssue17MergeWithOverwrite(t *testing.T) {
|
||||
var something map[string]interface{}
|
||||
if err := json.Unmarshal([]byte(request), &something); err != nil {
|
||||
t.Errorf("Error while Unmarshalling maprequest: %s", err)
|
||||
}
|
||||
if err := MergeWithOverwrite(&something, maprequest); err != nil {
|
||||
t.Errorf("Error while merging: %s", err)
|
||||
}
|
||||
}
|
27
vendor/github.com/imdario/mergo/issue23_test.go
generated
vendored
Normal file
27
vendor/github.com/imdario/mergo/issue23_test.go
generated
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
package mergo
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
type document struct {
|
||||
Created *time.Time
|
||||
}
|
||||
|
||||
func TestIssue23MergeWithOverwrite(t *testing.T) {
|
||||
now := time.Now()
|
||||
dst := document{
|
||||
&now,
|
||||
}
|
||||
expected := time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC)
|
||||
src := document{
|
||||
&expected,
|
||||
}
|
||||
if err := MergeWithOverwrite(&dst, src); err != nil {
|
||||
t.Errorf("Error while merging %s", err)
|
||||
}
|
||||
if !dst.Created.Equal(*src.Created) { //--> https://golang.org/pkg/time/#pkg-overview
|
||||
t.Fatalf("Created not merged in properly: dst.Created(%v) != src.Created(%v)", dst.Created, src.Created)
|
||||
}
|
||||
}
|
33
vendor/github.com/imdario/mergo/issue33_test.go
generated
vendored
Normal file
33
vendor/github.com/imdario/mergo/issue33_test.go
generated
vendored
Normal file
@ -0,0 +1,33 @@
|
||||
package mergo
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
type Foo struct {
|
||||
Str string
|
||||
Bslice []byte
|
||||
}
|
||||
|
||||
func TestIssue33Merge(t *testing.T) {
|
||||
dest := Foo{Str: "a"}
|
||||
toMerge := Foo{
|
||||
Str: "b",
|
||||
Bslice: []byte{1, 2},
|
||||
}
|
||||
if err := Merge(&dest, toMerge); err != nil {
|
||||
t.Errorf("Error while merging: %s", err)
|
||||
}
|
||||
// Merge doesn't overwrite an attribute if in destination it doesn't have a zero value.
|
||||
// In this case, Str isn't a zero value string.
|
||||
if dest.Str != "a" {
|
||||
t.Errorf("dest.Str should have not been override as it has a non-zero value: dest.Str(%v) != 'a'", dest.Str)
|
||||
}
|
||||
// If we want to override, we must use MergeWithOverwrite or Merge using WithOverride.
|
||||
if err := Merge(&dest, toMerge, WithOverride); err != nil {
|
||||
t.Errorf("Error while merging: %s", err)
|
||||
}
|
||||
if dest.Str != toMerge.Str {
|
||||
t.Errorf("dest.Str should have been override: dest.Str(%v) != toMerge.Str(%v)", dest.Str, toMerge.Str)
|
||||
}
|
||||
}
|
59
vendor/github.com/imdario/mergo/issue38_test.go
generated
vendored
Normal file
59
vendor/github.com/imdario/mergo/issue38_test.go
generated
vendored
Normal file
@ -0,0 +1,59 @@
|
||||
package mergo
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
type structWithoutTimePointer struct {
|
||||
Created time.Time
|
||||
}
|
||||
|
||||
func TestIssue38Merge(t *testing.T) {
|
||||
dst := structWithoutTimePointer{
|
||||
time.Now(),
|
||||
}
|
||||
|
||||
expected := time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC)
|
||||
src := structWithoutTimePointer{
|
||||
expected,
|
||||
}
|
||||
if err := Merge(&dst, src); err != nil {
|
||||
t.Errorf("Error while merging %s", err)
|
||||
}
|
||||
if dst.Created == src.Created {
|
||||
t.Fatalf("Created merged unexpectedly: dst.Created(%v) == src.Created(%v)", dst.Created, src.Created)
|
||||
}
|
||||
}
|
||||
|
||||
func TestIssue38MergeEmptyStruct(t *testing.T) {
|
||||
dst := structWithoutTimePointer{}
|
||||
|
||||
expected := time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC)
|
||||
src := structWithoutTimePointer{
|
||||
expected,
|
||||
}
|
||||
if err := Merge(&dst, src); err != nil {
|
||||
t.Errorf("Error while merging %s", err)
|
||||
}
|
||||
if dst.Created == src.Created {
|
||||
t.Fatalf("Created merged unexpectedly: dst.Created(%v) == src.Created(%v)", dst.Created, src.Created)
|
||||
}
|
||||
}
|
||||
|
||||
func TestIssue38MergeWithOverwrite(t *testing.T) {
|
||||
dst := structWithoutTimePointer{
|
||||
time.Now(),
|
||||
}
|
||||
|
||||
expected := time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC)
|
||||
src := structWithoutTimePointer{
|
||||
expected,
|
||||
}
|
||||
if err := MergeWithOverwrite(&dst, src); err != nil {
|
||||
t.Errorf("Error while merging %s", err)
|
||||
}
|
||||
if dst.Created != src.Created {
|
||||
t.Fatalf("Created not merged in properly: dst.Created(%v) != src.Created(%v)", dst.Created, src.Created)
|
||||
}
|
||||
}
|
18
vendor/github.com/imdario/mergo/issue50_test.go
generated
vendored
Normal file
18
vendor/github.com/imdario/mergo/issue50_test.go
generated
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
package mergo
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
type testStruct struct {
|
||||
time.Duration
|
||||
}
|
||||
|
||||
func TestIssue50Merge(t *testing.T) {
|
||||
to := testStruct{}
|
||||
from := testStruct{}
|
||||
if err := Merge(&to, from); err != nil {
|
||||
t.Fail()
|
||||
}
|
||||
}
|
99
vendor/github.com/imdario/mergo/issue52_test.go
generated
vendored
Normal file
99
vendor/github.com/imdario/mergo/issue52_test.go
generated
vendored
Normal file
@ -0,0 +1,99 @@
|
||||
package mergo
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
type structWithTime struct {
|
||||
Birth time.Time
|
||||
}
|
||||
|
||||
type timeTransfomer struct {
|
||||
overwrite bool
|
||||
}
|
||||
|
||||
func (t timeTransfomer) Transformer(typ reflect.Type) func(dst, src reflect.Value) error {
|
||||
if typ == reflect.TypeOf(time.Time{}) {
|
||||
return func(dst, src reflect.Value) error {
|
||||
if dst.CanSet() {
|
||||
if t.overwrite {
|
||||
isZero := src.MethodByName("IsZero")
|
||||
result := isZero.Call([]reflect.Value{})
|
||||
if !result[0].Bool() {
|
||||
dst.Set(src)
|
||||
}
|
||||
} else {
|
||||
isZero := dst.MethodByName("IsZero")
|
||||
result := isZero.Call([]reflect.Value{})
|
||||
if result[0].Bool() {
|
||||
dst.Set(src)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func TestOverwriteZeroSrcTime(t *testing.T) {
|
||||
now := time.Now()
|
||||
dst := structWithTime{now}
|
||||
src := structWithTime{}
|
||||
if err := MergeWithOverwrite(&dst, src); err != nil {
|
||||
t.FailNow()
|
||||
}
|
||||
if !dst.Birth.IsZero() {
|
||||
t.Fatalf("dst should have been overwritten: dst.Birth(%v) != now(%v)", dst.Birth, now)
|
||||
}
|
||||
}
|
||||
|
||||
func TestOverwriteZeroSrcTimeWithTransformer(t *testing.T) {
|
||||
now := time.Now()
|
||||
dst := structWithTime{now}
|
||||
src := structWithTime{}
|
||||
if err := MergeWithOverwrite(&dst, src, WithTransformers(timeTransfomer{true})); err != nil {
|
||||
t.FailNow()
|
||||
}
|
||||
if dst.Birth.IsZero() {
|
||||
t.Fatalf("dst should not have been overwritten: dst.Birth(%v) != now(%v)", dst.Birth, now)
|
||||
}
|
||||
}
|
||||
|
||||
func TestOverwriteZeroDstTime(t *testing.T) {
|
||||
now := time.Now()
|
||||
dst := structWithTime{}
|
||||
src := structWithTime{now}
|
||||
if err := MergeWithOverwrite(&dst, src); err != nil {
|
||||
t.FailNow()
|
||||
}
|
||||
if dst.Birth.IsZero() {
|
||||
t.Fatalf("dst should have been overwritten: dst.Birth(%v) != zero(%v)", dst.Birth, time.Time{})
|
||||
}
|
||||
}
|
||||
|
||||
func TestZeroDstTime(t *testing.T) {
|
||||
now := time.Now()
|
||||
dst := structWithTime{}
|
||||
src := structWithTime{now}
|
||||
if err := Merge(&dst, src); err != nil {
|
||||
t.FailNow()
|
||||
}
|
||||
if !dst.Birth.IsZero() {
|
||||
t.Fatalf("dst should not have been overwritten: dst.Birth(%v) != zero(%v)", dst.Birth, time.Time{})
|
||||
}
|
||||
}
|
||||
|
||||
func TestZeroDstTimeWithTransformer(t *testing.T) {
|
||||
now := time.Now()
|
||||
dst := structWithTime{}
|
||||
src := structWithTime{now}
|
||||
if err := Merge(&dst, src, WithTransformers(timeTransfomer{})); err != nil {
|
||||
t.FailNow()
|
||||
}
|
||||
if dst.Birth.IsZero() {
|
||||
t.Fatalf("dst should have been overwritten: dst.Birth(%v) != now(%v)", dst.Birth, now)
|
||||
}
|
||||
}
|
20
vendor/github.com/imdario/mergo/issue61_test.go
generated
vendored
Normal file
20
vendor/github.com/imdario/mergo/issue61_test.go
generated
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
package mergo
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestIssue61MergeNilMap(t *testing.T) {
|
||||
type T struct {
|
||||
I map[string][]string
|
||||
}
|
||||
t1 := T{}
|
||||
t2 := T{I: map[string][]string{"hi": {"there"}}}
|
||||
if err := Merge(&t1, t2); err != nil {
|
||||
t.Fail()
|
||||
}
|
||||
if !reflect.DeepEqual(t2, T{I: map[string][]string{"hi": {"there"}}}) {
|
||||
t.FailNow()
|
||||
}
|
||||
}
|
38
vendor/github.com/imdario/mergo/issue64_test.go
generated
vendored
Normal file
38
vendor/github.com/imdario/mergo/issue64_test.go
generated
vendored
Normal file
@ -0,0 +1,38 @@
|
||||
package mergo
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
type Student struct {
|
||||
Name string
|
||||
Books []string
|
||||
}
|
||||
|
||||
var testData = []struct {
|
||||
S1 Student
|
||||
S2 Student
|
||||
ExpectedSlice []string
|
||||
}{
|
||||
{Student{"Jack", []string{"a", "B"}}, Student{"Tom", []string{"1"}}, []string{"a", "B"}},
|
||||
{Student{"Jack", []string{"a", "B"}}, Student{"Tom", []string{}}, []string{"a", "B"}},
|
||||
{Student{"Jack", []string{}}, Student{"Tom", []string{"1"}}, []string{"1"}},
|
||||
{Student{"Jack", []string{}}, Student{"Tom", []string{}}, []string{}},
|
||||
}
|
||||
|
||||
func TestIssue64MergeSliceWithOverride(t *testing.T) {
|
||||
for _, data := range testData {
|
||||
err := Merge(&data.S2, data.S1, WithOverride)
|
||||
if err != nil {
|
||||
t.Errorf("Error while merging %s", err)
|
||||
}
|
||||
if len(data.S2.Books) != len(data.ExpectedSlice) {
|
||||
t.Fatalf("Got %d elements in slice, but expected %d", len(data.S2.Books), len(data.ExpectedSlice))
|
||||
}
|
||||
for i, val := range data.S2.Books {
|
||||
if val != data.ExpectedSlice[i] {
|
||||
t.Fatalf("Expected %s, but got %s while merging slice with override", data.ExpectedSlice[i], val)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
29
vendor/github.com/imdario/mergo/issue66_test.go
generated
vendored
Normal file
29
vendor/github.com/imdario/mergo/issue66_test.go
generated
vendored
Normal file
@ -0,0 +1,29 @@
|
||||
package mergo
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
type PrivateSliceTest66 struct {
|
||||
PublicStrings []string
|
||||
privateStrings []string
|
||||
}
|
||||
|
||||
func TestPrivateSlice(t *testing.T) {
|
||||
p1 := PrivateSliceTest66{
|
||||
PublicStrings: []string{"one", "two", "three"},
|
||||
privateStrings: []string{"four", "five"},
|
||||
}
|
||||
p2 := PrivateSliceTest66{
|
||||
PublicStrings: []string{"six", "seven"},
|
||||
}
|
||||
if err := Merge(&p1, p2); err != nil {
|
||||
t.Fatalf("Error during the merge: %v", err)
|
||||
}
|
||||
if len(p1.PublicStrings) != 5 {
|
||||
t.Error("5 elements should be in 'PublicStrings' field")
|
||||
}
|
||||
if len(p1.privateStrings) != 2 {
|
||||
t.Error("2 elements should be in 'privateStrings' field")
|
||||
}
|
||||
}
|
174
vendor/github.com/imdario/mergo/map.go
generated
vendored
Normal file
174
vendor/github.com/imdario/mergo/map.go
generated
vendored
Normal file
@ -0,0 +1,174 @@
|
||||
// Copyright 2014 Dario Castañé. All rights reserved.
|
||||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Based on src/pkg/reflect/deepequal.go from official
|
||||
// golang's stdlib.
|
||||
|
||||
package mergo
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
func changeInitialCase(s string, mapper func(rune) rune) string {
|
||||
if s == "" {
|
||||
return s
|
||||
}
|
||||
r, n := utf8.DecodeRuneInString(s)
|
||||
return string(mapper(r)) + s[n:]
|
||||
}
|
||||
|
||||
func isExported(field reflect.StructField) bool {
|
||||
r, _ := utf8.DecodeRuneInString(field.Name)
|
||||
return r >= 'A' && r <= 'Z'
|
||||
}
|
||||
|
||||
// Traverses recursively both values, assigning src's fields values to dst.
|
||||
// The map argument tracks comparisons that have already been seen, which allows
|
||||
// short circuiting on recursive types.
|
||||
func deepMap(dst, src reflect.Value, visited map[uintptr]*visit, depth int, config *Config) (err error) {
|
||||
overwrite := config.Overwrite
|
||||
if dst.CanAddr() {
|
||||
addr := dst.UnsafeAddr()
|
||||
h := 17 * addr
|
||||
seen := visited[h]
|
||||
typ := dst.Type()
|
||||
for p := seen; p != nil; p = p.next {
|
||||
if p.ptr == addr && p.typ == typ {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
// Remember, remember...
|
||||
visited[h] = &visit{addr, typ, seen}
|
||||
}
|
||||
zeroValue := reflect.Value{}
|
||||
switch dst.Kind() {
|
||||
case reflect.Map:
|
||||
dstMap := dst.Interface().(map[string]interface{})
|
||||
for i, n := 0, src.NumField(); i < n; i++ {
|
||||
srcType := src.Type()
|
||||
field := srcType.Field(i)
|
||||
if !isExported(field) {
|
||||
continue
|
||||
}
|
||||
fieldName := field.Name
|
||||
fieldName = changeInitialCase(fieldName, unicode.ToLower)
|
||||
if v, ok := dstMap[fieldName]; !ok || (isEmptyValue(reflect.ValueOf(v)) || overwrite) {
|
||||
dstMap[fieldName] = src.Field(i).Interface()
|
||||
}
|
||||
}
|
||||
case reflect.Ptr:
|
||||
if dst.IsNil() {
|
||||
v := reflect.New(dst.Type().Elem())
|
||||
dst.Set(v)
|
||||
}
|
||||
dst = dst.Elem()
|
||||
fallthrough
|
||||
case reflect.Struct:
|
||||
srcMap := src.Interface().(map[string]interface{})
|
||||
for key := range srcMap {
|
||||
srcValue := srcMap[key]
|
||||
fieldName := changeInitialCase(key, unicode.ToUpper)
|
||||
dstElement := dst.FieldByName(fieldName)
|
||||
if dstElement == zeroValue {
|
||||
// We discard it because the field doesn't exist.
|
||||
continue
|
||||
}
|
||||
srcElement := reflect.ValueOf(srcValue)
|
||||
dstKind := dstElement.Kind()
|
||||
srcKind := srcElement.Kind()
|
||||
if srcKind == reflect.Ptr && dstKind != reflect.Ptr {
|
||||
srcElement = srcElement.Elem()
|
||||
srcKind = reflect.TypeOf(srcElement.Interface()).Kind()
|
||||
} else if dstKind == reflect.Ptr {
|
||||
// Can this work? I guess it can't.
|
||||
if srcKind != reflect.Ptr && srcElement.CanAddr() {
|
||||
srcPtr := srcElement.Addr()
|
||||
srcElement = reflect.ValueOf(srcPtr)
|
||||
srcKind = reflect.Ptr
|
||||
}
|
||||
}
|
||||
|
||||
if !srcElement.IsValid() {
|
||||
continue
|
||||
}
|
||||
if srcKind == dstKind {
|
||||
if err = deepMerge(dstElement, srcElement, visited, depth+1, config); err != nil {
|
||||
return
|
||||
}
|
||||
} else if dstKind == reflect.Interface && dstElement.Kind() == reflect.Interface {
|
||||
if err = deepMerge(dstElement, srcElement, visited, depth+1, config); err != nil {
|
||||
return
|
||||
}
|
||||
} else if srcKind == reflect.Map {
|
||||
if err = deepMap(dstElement, srcElement, visited, depth+1, config); err != nil {
|
||||
return
|
||||
}
|
||||
} else {
|
||||
return fmt.Errorf("type mismatch on %s field: found %v, expected %v", fieldName, srcKind, dstKind)
|
||||
}
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Map sets fields' values in dst from src.
|
||||
// src can be a map with string keys or a struct. dst must be the opposite:
|
||||
// if src is a map, dst must be a valid pointer to struct. If src is a struct,
|
||||
// dst must be map[string]interface{}.
|
||||
// It won't merge unexported (private) fields and will do recursively
|
||||
// any exported field.
|
||||
// If dst is a map, keys will be src fields' names in lower camel case.
|
||||
// Missing key in src that doesn't match a field in dst will be skipped. This
|
||||
// doesn't apply if dst is a map.
|
||||
// This is separated method from Merge because it is cleaner and it keeps sane
|
||||
// semantics: merging equal types, mapping different (restricted) types.
|
||||
func Map(dst, src interface{}, opts ...func(*Config)) error {
|
||||
return _map(dst, src, opts...)
|
||||
}
|
||||
|
||||
// MapWithOverwrite will do the same as Map except that non-empty dst attributes will be overridden by
|
||||
// non-empty src attribute values.
|
||||
// Deprecated: Use Map(…) with WithOverride
|
||||
func MapWithOverwrite(dst, src interface{}, opts ...func(*Config)) error {
|
||||
return _map(dst, src, append(opts, WithOverride)...)
|
||||
}
|
||||
|
||||
func _map(dst, src interface{}, opts ...func(*Config)) error {
|
||||
var (
|
||||
vDst, vSrc reflect.Value
|
||||
err error
|
||||
)
|
||||
config := &Config{}
|
||||
|
||||
for _, opt := range opts {
|
||||
opt(config)
|
||||
}
|
||||
|
||||
if vDst, vSrc, err = resolveValues(dst, src); err != nil {
|
||||
return err
|
||||
}
|
||||
// To be friction-less, we redirect equal-type arguments
|
||||
// to deepMerge. Only because arguments can be anything.
|
||||
if vSrc.Kind() == vDst.Kind() {
|
||||
return deepMerge(vDst, vSrc, make(map[uintptr]*visit), 0, config)
|
||||
}
|
||||
switch vSrc.Kind() {
|
||||
case reflect.Struct:
|
||||
if vDst.Kind() != reflect.Map {
|
||||
return ErrExpectedMapAsDestination
|
||||
}
|
||||
case reflect.Map:
|
||||
if vDst.Kind() != reflect.Struct {
|
||||
return ErrExpectedStructAsDestination
|
||||
}
|
||||
default:
|
||||
return ErrNotSupported
|
||||
}
|
||||
return deepMap(vDst, vSrc, make(map[uintptr]*visit), 0, config)
|
||||
}
|
233
vendor/github.com/imdario/mergo/merge.go
generated
vendored
Normal file
233
vendor/github.com/imdario/mergo/merge.go
generated
vendored
Normal file
@ -0,0 +1,233 @@
|
||||
// Copyright 2013 Dario Castañé. All rights reserved.
|
||||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Based on src/pkg/reflect/deepequal.go from official
|
||||
// golang's stdlib.
|
||||
|
||||
package mergo
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
)
|
||||
|
||||
func hasExportedField(dst reflect.Value) (exported bool) {
|
||||
for i, n := 0, dst.NumField(); i < n; i++ {
|
||||
field := dst.Type().Field(i)
|
||||
if field.Anonymous && dst.Field(i).Kind() == reflect.Struct {
|
||||
exported = exported || hasExportedField(dst.Field(i))
|
||||
} else {
|
||||
exported = exported || len(field.PkgPath) == 0
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
Overwrite bool
|
||||
AppendSlice bool
|
||||
Transformers Transformers
|
||||
}
|
||||
|
||||
type Transformers interface {
|
||||
Transformer(reflect.Type) func(dst, src reflect.Value) error
|
||||
}
|
||||
|
||||
// Traverses recursively both values, assigning src's fields values to dst.
|
||||
// The map argument tracks comparisons that have already been seen, which allows
|
||||
// short circuiting on recursive types.
|
||||
func deepMerge(dst, src reflect.Value, visited map[uintptr]*visit, depth int, config *Config) (err error) {
|
||||
overwrite := config.Overwrite
|
||||
|
||||
if !src.IsValid() {
|
||||
return
|
||||
}
|
||||
if dst.CanAddr() {
|
||||
addr := dst.UnsafeAddr()
|
||||
h := 17 * addr
|
||||
seen := visited[h]
|
||||
typ := dst.Type()
|
||||
for p := seen; p != nil; p = p.next {
|
||||
if p.ptr == addr && p.typ == typ {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
// Remember, remember...
|
||||
visited[h] = &visit{addr, typ, seen}
|
||||
}
|
||||
|
||||
if config.Transformers != nil && !isEmptyValue(dst) {
|
||||
if fn := config.Transformers.Transformer(dst.Type()); fn != nil {
|
||||
err = fn(dst, src)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
switch dst.Kind() {
|
||||
case reflect.Struct:
|
||||
if hasExportedField(dst) {
|
||||
for i, n := 0, dst.NumField(); i < n; i++ {
|
||||
if err = deepMerge(dst.Field(i), src.Field(i), visited, depth+1, config); err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if dst.CanSet() && !isEmptyValue(src) && (overwrite || isEmptyValue(dst)) {
|
||||
dst.Set(src)
|
||||
}
|
||||
}
|
||||
case reflect.Map:
|
||||
if dst.IsNil() && !src.IsNil() {
|
||||
dst.Set(reflect.MakeMap(dst.Type()))
|
||||
}
|
||||
for _, key := range src.MapKeys() {
|
||||
srcElement := src.MapIndex(key)
|
||||
if !srcElement.IsValid() {
|
||||
continue
|
||||
}
|
||||
dstElement := dst.MapIndex(key)
|
||||
switch srcElement.Kind() {
|
||||
case reflect.Chan, reflect.Func, reflect.Map, reflect.Interface, reflect.Slice:
|
||||
if srcElement.IsNil() {
|
||||
continue
|
||||
}
|
||||
fallthrough
|
||||
default:
|
||||
if !srcElement.CanInterface() {
|
||||
continue
|
||||
}
|
||||
switch reflect.TypeOf(srcElement.Interface()).Kind() {
|
||||
case reflect.Struct:
|
||||
fallthrough
|
||||
case reflect.Ptr:
|
||||
fallthrough
|
||||
case reflect.Map:
|
||||
if err = deepMerge(dstElement, srcElement, visited, depth+1, config); err != nil {
|
||||
return
|
||||
}
|
||||
case reflect.Slice:
|
||||
srcSlice := reflect.ValueOf(srcElement.Interface())
|
||||
|
||||
var dstSlice reflect.Value
|
||||
if !dstElement.IsValid() || dstElement.IsNil() {
|
||||
dstSlice = reflect.MakeSlice(srcSlice.Type(), 0, srcSlice.Len())
|
||||
} else {
|
||||
dstSlice = reflect.ValueOf(dstElement.Interface())
|
||||
}
|
||||
|
||||
dstSlice = reflect.AppendSlice(dstSlice, srcSlice)
|
||||
dst.SetMapIndex(key, dstSlice)
|
||||
}
|
||||
}
|
||||
if dstElement.IsValid() && reflect.TypeOf(srcElement.Interface()).Kind() == reflect.Map {
|
||||
continue
|
||||
}
|
||||
|
||||
if srcElement.IsValid() && (overwrite || (!dstElement.IsValid() || isEmptyValue(dst))) {
|
||||
if dst.IsNil() {
|
||||
dst.Set(reflect.MakeMap(dst.Type()))
|
||||
}
|
||||
dst.SetMapIndex(key, srcElement)
|
||||
}
|
||||
}
|
||||
case reflect.Slice:
|
||||
if !dst.CanSet() {
|
||||
break
|
||||
}
|
||||
if !isEmptyValue(src) && (overwrite || isEmptyValue(dst)) && !config.AppendSlice {
|
||||
dst.Set(src)
|
||||
} else {
|
||||
dst.Set(reflect.AppendSlice(dst, src))
|
||||
}
|
||||
case reflect.Ptr:
|
||||
fallthrough
|
||||
case reflect.Interface:
|
||||
if src.IsNil() {
|
||||
break
|
||||
}
|
||||
if src.Kind() != reflect.Interface {
|
||||
if dst.IsNil() || overwrite {
|
||||
if dst.CanSet() && (overwrite || isEmptyValue(dst)) {
|
||||
dst.Set(src)
|
||||
}
|
||||
} else if src.Kind() == reflect.Ptr {
|
||||
if err = deepMerge(dst.Elem(), src.Elem(), visited, depth+1, config); err != nil {
|
||||
return
|
||||
}
|
||||
} else if dst.Elem().Type() == src.Type() {
|
||||
if err = deepMerge(dst.Elem(), src, visited, depth+1, config); err != nil {
|
||||
return
|
||||
}
|
||||
} else {
|
||||
return ErrDifferentArgumentsTypes
|
||||
}
|
||||
break
|
||||
}
|
||||
if dst.IsNil() || overwrite {
|
||||
if dst.CanSet() && (overwrite || isEmptyValue(dst)) {
|
||||
dst.Set(src)
|
||||
}
|
||||
} else if err = deepMerge(dst.Elem(), src.Elem(), visited, depth+1, config); err != nil {
|
||||
return
|
||||
}
|
||||
default:
|
||||
if dst.CanSet() && !isEmptyValue(src) && (overwrite || isEmptyValue(dst)) {
|
||||
dst.Set(src)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Merge will fill any empty for value type attributes on the dst struct using corresponding
|
||||
// src attributes if they themselves are not empty. dst and src must be valid same-type structs
|
||||
// and dst must be a pointer to struct.
|
||||
// It won't merge unexported (private) fields and will do recursively any exported field.
|
||||
func Merge(dst, src interface{}, opts ...func(*Config)) error {
|
||||
return merge(dst, src, opts...)
|
||||
}
|
||||
|
||||
// MergeWithOverwrite will do the same as Merge except that non-empty dst attributes will be overriden by
|
||||
// non-empty src attribute values.
|
||||
// Deprecated: use Merge(…) with WithOverride
|
||||
func MergeWithOverwrite(dst, src interface{}, opts ...func(*Config)) error {
|
||||
return merge(dst, src, append(opts, WithOverride)...)
|
||||
}
|
||||
|
||||
// WithTransformers adds transformers to merge, allowing to customize the merging of some types.
|
||||
func WithTransformers(transformers Transformers) func(*Config) {
|
||||
return func(config *Config) {
|
||||
config.Transformers = transformers
|
||||
}
|
||||
}
|
||||
|
||||
// WithOverride will make merge override non-empty dst attributes with non-empty src attributes values.
|
||||
func WithOverride(config *Config) {
|
||||
config.Overwrite = true
|
||||
}
|
||||
|
||||
// WithAppendSlice will make merge append slices instead of overwriting it
|
||||
func WithAppendSlice(config *Config) {
|
||||
config.AppendSlice = true
|
||||
}
|
||||
|
||||
func merge(dst, src interface{}, opts ...func(*Config)) error {
|
||||
var (
|
||||
vDst, vSrc reflect.Value
|
||||
err error
|
||||
)
|
||||
|
||||
config := &Config{}
|
||||
|
||||
for _, opt := range opts {
|
||||
opt(config)
|
||||
}
|
||||
|
||||
if vDst, vSrc, err = resolveValues(dst, src); err != nil {
|
||||
return err
|
||||
}
|
||||
if vDst.Type() != vSrc.Type() {
|
||||
return ErrDifferentArgumentsTypes
|
||||
}
|
||||
return deepMerge(vDst, vSrc, make(map[uintptr]*visit), 0, config)
|
||||
}
|
33
vendor/github.com/imdario/mergo/merge_appendslice_test.go
generated
vendored
Normal file
33
vendor/github.com/imdario/mergo/merge_appendslice_test.go
generated
vendored
Normal file
@ -0,0 +1,33 @@
|
||||
package mergo
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
var testDataS = []struct {
|
||||
S1 Student
|
||||
S2 Student
|
||||
ExpectedSlice []string
|
||||
}{
|
||||
{Student{"Jack", []string{"a", "B"}}, Student{"Tom", []string{"1"}}, []string{"1", "a", "B"}},
|
||||
{Student{"Jack", []string{"a", "B"}}, Student{"Tom", []string{}}, []string{"a", "B"}},
|
||||
{Student{"Jack", []string{}}, Student{"Tom", []string{"1"}}, []string{"1"}},
|
||||
{Student{"Jack", []string{}}, Student{"Tom", []string{}}, []string{}},
|
||||
}
|
||||
|
||||
func TestMergeSliceWithOverrideWithAppendSlice(t *testing.T) {
|
||||
for _, data := range testDataS {
|
||||
err := Merge(&data.S2, data.S1, WithOverride, WithAppendSlice)
|
||||
if err != nil {
|
||||
t.Errorf("Error while merging %s", err)
|
||||
}
|
||||
if len(data.S2.Books) != len(data.ExpectedSlice) {
|
||||
t.Fatalf("Got %d elements in slice, but expected %d", len(data.S2.Books), len(data.ExpectedSlice))
|
||||
}
|
||||
for i, val := range data.S2.Books {
|
||||
if val != data.ExpectedSlice[i] {
|
||||
t.Fatalf("Expected %s, but got %s while merging slice with override", data.ExpectedSlice[i], val)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
50
vendor/github.com/imdario/mergo/merge_test.go
generated
vendored
Normal file
50
vendor/github.com/imdario/mergo/merge_test.go
generated
vendored
Normal file
@ -0,0 +1,50 @@
|
||||
package mergo
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
type transformer struct {
|
||||
m map[reflect.Type]func(dst, src reflect.Value) error
|
||||
}
|
||||
|
||||
func (s *transformer) Transformer(t reflect.Type) func(dst, src reflect.Value) error {
|
||||
if fn, ok := s.m[t]; ok {
|
||||
return fn
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type foo struct {
|
||||
s string
|
||||
Bar *bar
|
||||
}
|
||||
|
||||
type bar struct {
|
||||
i int
|
||||
s map[string]string
|
||||
}
|
||||
|
||||
func TestMergeWithTransformerNilStruct(t *testing.T) {
|
||||
a := foo{s: "foo"}
|
||||
b := foo{Bar: &bar{i: 2, s: map[string]string{"foo": "bar"}}}
|
||||
if err := Merge(&a, &b, WithOverride, WithTransformers(&transformer{
|
||||
m: map[reflect.Type]func(dst, src reflect.Value) error{
|
||||
reflect.TypeOf(&bar{}): func(dst, src reflect.Value) error {
|
||||
// Do sthg with Elem
|
||||
t.Log(dst.Elem().FieldByName("i"))
|
||||
t.Log(src.Elem())
|
||||
return nil
|
||||
},
|
||||
},
|
||||
})); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if a.s != "foo" {
|
||||
t.Fatalf("b not merged in properly: a.s.Value(%s) != expected(%s)", a.s, "foo")
|
||||
}
|
||||
if a.Bar == nil {
|
||||
t.Fatalf("b not merged in properly: a.Bar shouldn't be nil")
|
||||
}
|
||||
}
|
92
vendor/github.com/imdario/mergo/mergo.go
generated
vendored
Normal file
92
vendor/github.com/imdario/mergo/mergo.go
generated
vendored
Normal file
@ -0,0 +1,92 @@
|
||||
// Copyright 2013 Dario Castañé. All rights reserved.
|
||||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Based on src/pkg/reflect/deepequal.go from official
|
||||
// golang's stdlib.
|
||||
|
||||
package mergo
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"reflect"
|
||||
)
|
||||
|
||||
// Errors reported by Mergo when it finds invalid arguments.
|
||||
var (
|
||||
ErrNilArguments = errors.New("src and dst must not be nil")
|
||||
ErrDifferentArgumentsTypes = errors.New("src and dst must be of same type")
|
||||
ErrNotSupported = errors.New("only structs and maps are supported")
|
||||
ErrExpectedMapAsDestination = errors.New("dst was expected to be a map")
|
||||
ErrExpectedStructAsDestination = errors.New("dst was expected to be a struct")
|
||||
)
|
||||
|
||||
// During deepMerge, must keep track of checks that are
|
||||
// in progress. The comparison algorithm assumes that all
|
||||
// checks in progress are true when it reencounters them.
|
||||
// Visited are stored in a map indexed by 17 * a1 + a2;
|
||||
type visit struct {
|
||||
ptr uintptr
|
||||
typ reflect.Type
|
||||
next *visit
|
||||
}
|
||||
|
||||
// From src/pkg/encoding/json/encode.go.
|
||||
func isEmptyValue(v reflect.Value) bool {
|
||||
switch v.Kind() {
|
||||
case reflect.Array, reflect.Map, reflect.Slice, reflect.String:
|
||||
return v.Len() == 0
|
||||
case reflect.Bool:
|
||||
return !v.Bool()
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
return v.Int() == 0
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||
return v.Uint() == 0
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return v.Float() == 0
|
||||
case reflect.Interface, reflect.Ptr, reflect.Func:
|
||||
return v.IsNil()
|
||||
case reflect.Invalid:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func resolveValues(dst, src interface{}) (vDst, vSrc reflect.Value, err error) {
|
||||
if dst == nil || src == nil {
|
||||
err = ErrNilArguments
|
||||
return
|
||||
}
|
||||
vDst = reflect.ValueOf(dst).Elem()
|
||||
if vDst.Kind() != reflect.Struct && vDst.Kind() != reflect.Map {
|
||||
err = ErrNotSupported
|
||||
return
|
||||
}
|
||||
vSrc = reflect.ValueOf(src)
|
||||
// We check if vSrc is a pointer to dereference it.
|
||||
if vSrc.Kind() == reflect.Ptr {
|
||||
vSrc = vSrc.Elem()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Traverses recursively both values, assigning src's fields values to dst.
|
||||
// The map argument tracks comparisons that have already been seen, which allows
|
||||
// short circuiting on recursive types.
|
||||
func deeper(dst, src reflect.Value, visited map[uintptr]*visit, depth int) (err error) {
|
||||
if dst.CanAddr() {
|
||||
addr := dst.UnsafeAddr()
|
||||
h := 17 * addr
|
||||
seen := visited[h]
|
||||
typ := dst.Type()
|
||||
for p := seen; p != nil; p = p.next {
|
||||
if p.ptr == addr && p.typ == typ {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
// Remember, remember...
|
||||
visited[h] = &visit{addr, typ, seen}
|
||||
}
|
||||
return // TODO refactor
|
||||
}
|
727
vendor/github.com/imdario/mergo/mergo_test.go
generated
vendored
Normal file
727
vendor/github.com/imdario/mergo/mergo_test.go
generated
vendored
Normal file
@ -0,0 +1,727 @@
|
||||
// Copyright 2013 Dario Castañé. All rights reserved.
|
||||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package mergo
|
||||
|
||||
import (
|
||||
"gopkg.in/yaml.v2"
|
||||
"io/ioutil"
|
||||
"reflect"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
type simpleTest struct {
|
||||
Value int
|
||||
}
|
||||
|
||||
type complexTest struct {
|
||||
St simpleTest
|
||||
sz int
|
||||
ID string
|
||||
}
|
||||
|
||||
type mapTest struct {
|
||||
M map[int]int
|
||||
}
|
||||
|
||||
type ifcTest struct {
|
||||
I interface{}
|
||||
}
|
||||
|
||||
type moreComplextText struct {
|
||||
Ct complexTest
|
||||
St simpleTest
|
||||
Nt simpleTest
|
||||
}
|
||||
|
||||
type pointerTest struct {
|
||||
C *simpleTest
|
||||
}
|
||||
|
||||
type sliceTest struct {
|
||||
S []int
|
||||
}
|
||||
|
||||
func TestKb(t *testing.T) {
|
||||
type testStruct struct {
|
||||
Name string
|
||||
KeyValue map[string]interface{}
|
||||
}
|
||||
|
||||
akv := make(map[string]interface{})
|
||||
akv["Key1"] = "not value 1"
|
||||
akv["Key2"] = "value2"
|
||||
a := testStruct{}
|
||||
a.Name = "A"
|
||||
a.KeyValue = akv
|
||||
|
||||
bkv := make(map[string]interface{})
|
||||
bkv["Key1"] = "value1"
|
||||
bkv["Key3"] = "value3"
|
||||
b := testStruct{}
|
||||
b.Name = "B"
|
||||
b.KeyValue = bkv
|
||||
|
||||
ekv := make(map[string]interface{})
|
||||
ekv["Key1"] = "value1"
|
||||
ekv["Key2"] = "value2"
|
||||
ekv["Key3"] = "value3"
|
||||
expected := testStruct{}
|
||||
expected.Name = "B"
|
||||
expected.KeyValue = ekv
|
||||
|
||||
Merge(&b, a)
|
||||
|
||||
if !reflect.DeepEqual(b, expected) {
|
||||
t.Errorf("Actual: %#v did not match \nExpected: %#v", b, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestNil(t *testing.T) {
|
||||
if err := Merge(nil, nil); err != ErrNilArguments {
|
||||
t.Fail()
|
||||
}
|
||||
}
|
||||
|
||||
func TestDifferentTypes(t *testing.T) {
|
||||
a := simpleTest{42}
|
||||
b := 42
|
||||
if err := Merge(&a, b); err != ErrDifferentArgumentsTypes {
|
||||
t.Fail()
|
||||
}
|
||||
}
|
||||
|
||||
func TestSimpleStruct(t *testing.T) {
|
||||
a := simpleTest{}
|
||||
b := simpleTest{42}
|
||||
if err := Merge(&a, b); err != nil {
|
||||
t.FailNow()
|
||||
}
|
||||
if a.Value != 42 {
|
||||
t.Fatalf("b not merged in properly: a.Value(%d) != b.Value(%d)", a.Value, b.Value)
|
||||
}
|
||||
if !reflect.DeepEqual(a, b) {
|
||||
t.FailNow()
|
||||
}
|
||||
}
|
||||
|
||||
func TestComplexStruct(t *testing.T) {
|
||||
a := complexTest{}
|
||||
a.ID = "athing"
|
||||
b := complexTest{simpleTest{42}, 1, "bthing"}
|
||||
if err := Merge(&a, b); err != nil {
|
||||
t.FailNow()
|
||||
}
|
||||
if a.St.Value != 42 {
|
||||
t.Fatalf("b not merged in properly: a.St.Value(%d) != b.St.Value(%d)", a.St.Value, b.St.Value)
|
||||
}
|
||||
if a.sz == 1 {
|
||||
t.Fatalf("a's private field sz not preserved from merge: a.sz(%d) == b.sz(%d)", a.sz, b.sz)
|
||||
}
|
||||
if a.ID == b.ID {
|
||||
t.Fatalf("a's field ID merged unexpectedly: a.ID(%s) == b.ID(%s)", a.ID, b.ID)
|
||||
}
|
||||
}
|
||||
|
||||
func TestComplexStructWithOverwrite(t *testing.T) {
|
||||
a := complexTest{simpleTest{1}, 1, "do-not-overwrite-with-empty-value"}
|
||||
b := complexTest{simpleTest{42}, 2, ""}
|
||||
|
||||
expect := complexTest{simpleTest{42}, 1, "do-not-overwrite-with-empty-value"}
|
||||
if err := MergeWithOverwrite(&a, b); err != nil {
|
||||
t.FailNow()
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(a, expect) {
|
||||
t.Fatalf("Test failed:\ngot :\n%#v\n\nwant :\n%#v\n\n", a, expect)
|
||||
}
|
||||
}
|
||||
|
||||
func TestPointerStruct(t *testing.T) {
|
||||
s1 := simpleTest{}
|
||||
s2 := simpleTest{19}
|
||||
a := pointerTest{&s1}
|
||||
b := pointerTest{&s2}
|
||||
if err := Merge(&a, b); err != nil {
|
||||
t.FailNow()
|
||||
}
|
||||
if a.C.Value != b.C.Value {
|
||||
t.Fatalf("b not merged in properly: a.C.Value(%d) != b.C.Value(%d)", a.C.Value, b.C.Value)
|
||||
}
|
||||
}
|
||||
|
||||
type embeddingStruct struct {
|
||||
embeddedStruct
|
||||
}
|
||||
|
||||
type embeddedStruct struct {
|
||||
A string
|
||||
}
|
||||
|
||||
func TestEmbeddedStruct(t *testing.T) {
|
||||
tests := []struct {
|
||||
src embeddingStruct
|
||||
dst embeddingStruct
|
||||
expected embeddingStruct
|
||||
}{
|
||||
{
|
||||
src: embeddingStruct{
|
||||
embeddedStruct{"foo"},
|
||||
},
|
||||
dst: embeddingStruct{
|
||||
embeddedStruct{""},
|
||||
},
|
||||
expected: embeddingStruct{
|
||||
embeddedStruct{"foo"},
|
||||
},
|
||||
},
|
||||
{
|
||||
src: embeddingStruct{
|
||||
embeddedStruct{""},
|
||||
},
|
||||
dst: embeddingStruct{
|
||||
embeddedStruct{"bar"},
|
||||
},
|
||||
expected: embeddingStruct{
|
||||
embeddedStruct{"bar"},
|
||||
},
|
||||
},
|
||||
{
|
||||
src: embeddingStruct{
|
||||
embeddedStruct{"foo"},
|
||||
},
|
||||
dst: embeddingStruct{
|
||||
embeddedStruct{"bar"},
|
||||
},
|
||||
expected: embeddingStruct{
|
||||
embeddedStruct{"bar"},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
err := Merge(&test.dst, test.src)
|
||||
if err != nil {
|
||||
t.Errorf("unexpected error: %v", err)
|
||||
continue
|
||||
}
|
||||
if !reflect.DeepEqual(test.dst, test.expected) {
|
||||
t.Errorf("unexpected output\nexpected:\n%+v\nsaw:\n%+v\n", test.expected, test.dst)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestPointerStructNil(t *testing.T) {
|
||||
a := pointerTest{nil}
|
||||
b := pointerTest{&simpleTest{19}}
|
||||
if err := Merge(&a, b); err != nil {
|
||||
t.FailNow()
|
||||
}
|
||||
if a.C.Value != b.C.Value {
|
||||
t.Fatalf("b not merged in a properly: a.C.Value(%d) != b.C.Value(%d)", a.C.Value, b.C.Value)
|
||||
}
|
||||
}
|
||||
|
||||
func testSlice(t *testing.T, a []int, b []int) {
|
||||
bc := b
|
||||
e := append(a, b...)
|
||||
|
||||
sa := sliceTest{a}
|
||||
sb := sliceTest{b}
|
||||
if err := Merge(&sa, sb); err != nil {
|
||||
t.FailNow()
|
||||
}
|
||||
if !reflect.DeepEqual(sb.S, bc) {
|
||||
t.Fatalf("Source slice was modified %d != %d", sb.S, bc)
|
||||
}
|
||||
if !reflect.DeepEqual(sa.S, e) {
|
||||
t.Fatalf("b not merged in a proper way %d != %d", sa.S, e)
|
||||
}
|
||||
|
||||
ma := map[string][]int{"S": a}
|
||||
mb := map[string][]int{"S": b}
|
||||
if err := Merge(&ma, mb); err != nil {
|
||||
t.FailNow()
|
||||
}
|
||||
if !reflect.DeepEqual(mb["S"], bc) {
|
||||
t.Fatalf("Source slice was modified %d != %d", mb["S"], bc)
|
||||
}
|
||||
if !reflect.DeepEqual(ma["S"], e) {
|
||||
t.Fatalf("b not merged in a proper way %d != %d", ma["S"], e)
|
||||
}
|
||||
|
||||
if a == nil {
|
||||
// test case with missing dst key
|
||||
ma := map[string][]int{}
|
||||
mb := map[string][]int{"S": b}
|
||||
if err := Merge(&ma, mb); err != nil {
|
||||
t.FailNow()
|
||||
}
|
||||
if !reflect.DeepEqual(mb["S"], bc) {
|
||||
t.Fatalf("Source slice was modified %d != %d", mb["S"], bc)
|
||||
}
|
||||
if !reflect.DeepEqual(ma["S"], e) {
|
||||
t.Fatalf("b not merged in a proper way %d != %d", ma["S"], e)
|
||||
}
|
||||
}
|
||||
|
||||
if b == nil {
|
||||
// test case with missing src key
|
||||
ma := map[string][]int{"S": a}
|
||||
mb := map[string][]int{}
|
||||
if err := Merge(&ma, mb); err != nil {
|
||||
t.FailNow()
|
||||
}
|
||||
if !reflect.DeepEqual(mb["S"], bc) {
|
||||
t.Fatalf("Source slice was modified %d != %d", mb["S"], bc)
|
||||
}
|
||||
if !reflect.DeepEqual(ma["S"], e) {
|
||||
t.Fatalf("b not merged in a proper way %d != %d", ma["S"], e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestSlice(t *testing.T) {
|
||||
testSlice(t, nil, []int{1, 2, 3})
|
||||
testSlice(t, []int{}, []int{1, 2, 3})
|
||||
testSlice(t, []int{1}, []int{2, 3})
|
||||
testSlice(t, []int{1}, []int{})
|
||||
testSlice(t, []int{1}, nil)
|
||||
}
|
||||
|
||||
func TestEmptyMaps(t *testing.T) {
|
||||
a := mapTest{}
|
||||
b := mapTest{
|
||||
map[int]int{},
|
||||
}
|
||||
if err := Merge(&a, b); err != nil {
|
||||
t.Fail()
|
||||
}
|
||||
if !reflect.DeepEqual(a, b) {
|
||||
t.FailNow()
|
||||
}
|
||||
}
|
||||
|
||||
func TestEmptyToEmptyMaps(t *testing.T) {
|
||||
a := mapTest{}
|
||||
b := mapTest{}
|
||||
if err := Merge(&a, b); err != nil {
|
||||
t.Fail()
|
||||
}
|
||||
if !reflect.DeepEqual(a, b) {
|
||||
t.FailNow()
|
||||
}
|
||||
}
|
||||
|
||||
func TestEmptyToNotEmptyMaps(t *testing.T) {
|
||||
a := mapTest{map[int]int{
|
||||
1: 2,
|
||||
3: 4,
|
||||
}}
|
||||
aa := mapTest{map[int]int{
|
||||
1: 2,
|
||||
3: 4,
|
||||
}}
|
||||
b := mapTest{
|
||||
map[int]int{},
|
||||
}
|
||||
if err := Merge(&a, b); err != nil {
|
||||
t.Fail()
|
||||
}
|
||||
if !reflect.DeepEqual(a, aa) {
|
||||
t.FailNow()
|
||||
}
|
||||
}
|
||||
|
||||
func TestMapsWithOverwrite(t *testing.T) {
|
||||
m := map[string]simpleTest{
|
||||
"a": {}, // overwritten by 16
|
||||
"b": {42}, // not overwritten by empty value
|
||||
"c": {13}, // overwritten by 12
|
||||
"d": {61},
|
||||
}
|
||||
n := map[string]simpleTest{
|
||||
"a": {16},
|
||||
"b": {},
|
||||
"c": {12},
|
||||
"e": {14},
|
||||
}
|
||||
expect := map[string]simpleTest{
|
||||
"a": {16},
|
||||
"b": {},
|
||||
"c": {12},
|
||||
"d": {61},
|
||||
"e": {14},
|
||||
}
|
||||
|
||||
if err := MergeWithOverwrite(&m, n); err != nil {
|
||||
t.Fatalf(err.Error())
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(m, expect) {
|
||||
t.Fatalf("Test failed:\ngot :\n%#v\n\nwant :\n%#v\n\n", m, expect)
|
||||
}
|
||||
}
|
||||
|
||||
func TestMaps(t *testing.T) {
|
||||
m := map[string]simpleTest{
|
||||
"a": {},
|
||||
"b": {42},
|
||||
"c": {13},
|
||||
"d": {61},
|
||||
}
|
||||
n := map[string]simpleTest{
|
||||
"a": {16},
|
||||
"b": {},
|
||||
"c": {12},
|
||||
"e": {14},
|
||||
}
|
||||
expect := map[string]simpleTest{
|
||||
"a": {0},
|
||||
"b": {42},
|
||||
"c": {13},
|
||||
"d": {61},
|
||||
"e": {14},
|
||||
}
|
||||
|
||||
if err := Merge(&m, n); err != nil {
|
||||
t.Fatalf(err.Error())
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(m, expect) {
|
||||
t.Fatalf("Test failed:\ngot :\n%#v\n\nwant :\n%#v\n\n", m, expect)
|
||||
}
|
||||
if m["a"].Value != 0 {
|
||||
t.Fatalf(`n merged in m because I solved non-addressable map values TODO: m["a"].Value(%d) != n["a"].Value(%d)`, m["a"].Value, n["a"].Value)
|
||||
}
|
||||
if m["b"].Value != 42 {
|
||||
t.Fatalf(`n wrongly merged in m: m["b"].Value(%d) != n["b"].Value(%d)`, m["b"].Value, n["b"].Value)
|
||||
}
|
||||
if m["c"].Value != 13 {
|
||||
t.Fatalf(`n overwritten in m: m["c"].Value(%d) != n["c"].Value(%d)`, m["c"].Value, n["c"].Value)
|
||||
}
|
||||
}
|
||||
|
||||
func TestMapsWithNilPointer(t *testing.T) {
|
||||
m := map[string]*simpleTest{
|
||||
"a": nil,
|
||||
"b": nil,
|
||||
}
|
||||
n := map[string]*simpleTest{
|
||||
"b": nil,
|
||||
"c": nil,
|
||||
}
|
||||
expect := map[string]*simpleTest{
|
||||
"a": nil,
|
||||
"b": nil,
|
||||
"c": nil,
|
||||
}
|
||||
|
||||
if err := Merge(&m, n, WithOverride); err != nil {
|
||||
t.Fatalf(err.Error())
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(m, expect) {
|
||||
t.Fatalf("Test failed:\ngot :\n%#v\n\nwant :\n%#v\n\n", m, expect)
|
||||
}
|
||||
}
|
||||
|
||||
func TestYAMLMaps(t *testing.T) {
|
||||
thing := loadYAML("testdata/thing.yml")
|
||||
license := loadYAML("testdata/license.yml")
|
||||
ft := thing["fields"].(map[interface{}]interface{})
|
||||
fl := license["fields"].(map[interface{}]interface{})
|
||||
// license has one extra field (site) and another already existing in thing (author) that Mergo won't override.
|
||||
expectedLength := len(ft) + len(fl) - 1
|
||||
if err := Merge(&license, thing); err != nil {
|
||||
t.Fatal(err.Error())
|
||||
}
|
||||
currentLength := len(license["fields"].(map[interface{}]interface{}))
|
||||
if currentLength != expectedLength {
|
||||
t.Fatalf(`thing not merged in license properly, license must have %d elements instead of %d`, expectedLength, currentLength)
|
||||
}
|
||||
fields := license["fields"].(map[interface{}]interface{})
|
||||
if _, ok := fields["id"]; !ok {
|
||||
t.Fatalf(`thing not merged in license properly, license must have a new id field from thing`)
|
||||
}
|
||||
}
|
||||
|
||||
func TestTwoPointerValues(t *testing.T) {
|
||||
a := &simpleTest{}
|
||||
b := &simpleTest{42}
|
||||
if err := Merge(a, b); err != nil {
|
||||
t.Fatalf(`Boom. You crossed the streams: %s`, err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestMap(t *testing.T) {
|
||||
a := complexTest{}
|
||||
a.ID = "athing"
|
||||
c := moreComplextText{a, simpleTest{}, simpleTest{}}
|
||||
b := map[string]interface{}{
|
||||
"ct": map[string]interface{}{
|
||||
"st": map[string]interface{}{
|
||||
"value": 42,
|
||||
},
|
||||
"sz": 1,
|
||||
"id": "bthing",
|
||||
},
|
||||
"st": &simpleTest{144}, // Mapping a reference
|
||||
"zt": simpleTest{299}, // Mapping a missing field (zt doesn't exist)
|
||||
"nt": simpleTest{3},
|
||||
}
|
||||
if err := Map(&c, b); err != nil {
|
||||
t.FailNow()
|
||||
}
|
||||
m := b["ct"].(map[string]interface{})
|
||||
n := m["st"].(map[string]interface{})
|
||||
o := b["st"].(*simpleTest)
|
||||
p := b["nt"].(simpleTest)
|
||||
if c.Ct.St.Value != 42 {
|
||||
t.Fatalf("b not merged in properly: c.Ct.St.Value(%d) != b.Ct.St.Value(%d)", c.Ct.St.Value, n["value"])
|
||||
}
|
||||
if c.St.Value != 144 {
|
||||
t.Fatalf("b not merged in properly: c.St.Value(%d) != b.St.Value(%d)", c.St.Value, o.Value)
|
||||
}
|
||||
if c.Nt.Value != 3 {
|
||||
t.Fatalf("b not merged in properly: c.Nt.Value(%d) != b.Nt.Value(%d)", c.St.Value, p.Value)
|
||||
}
|
||||
if c.Ct.sz == 1 {
|
||||
t.Fatalf("a's private field sz not preserved from merge: c.Ct.sz(%d) == b.Ct.sz(%d)", c.Ct.sz, m["sz"])
|
||||
}
|
||||
if c.Ct.ID == m["id"] {
|
||||
t.Fatalf("a's field ID merged unexpectedly: c.Ct.ID(%s) == b.Ct.ID(%s)", c.Ct.ID, m["id"])
|
||||
}
|
||||
}
|
||||
|
||||
func TestSimpleMap(t *testing.T) {
|
||||
a := simpleTest{}
|
||||
b := map[string]interface{}{
|
||||
"value": 42,
|
||||
}
|
||||
if err := Map(&a, b); err != nil {
|
||||
t.FailNow()
|
||||
}
|
||||
if a.Value != 42 {
|
||||
t.Fatalf("b not merged in properly: a.Value(%d) != b.Value(%v)", a.Value, b["value"])
|
||||
}
|
||||
}
|
||||
|
||||
func TestIfcMap(t *testing.T) {
|
||||
a := ifcTest{}
|
||||
b := ifcTest{42}
|
||||
if err := Map(&a, b); err != nil {
|
||||
t.FailNow()
|
||||
}
|
||||
if a.I != 42 {
|
||||
t.Fatalf("b not merged in properly: a.I(%d) != b.I(%d)", a.I, b.I)
|
||||
}
|
||||
if !reflect.DeepEqual(a, b) {
|
||||
t.FailNow()
|
||||
}
|
||||
}
|
||||
|
||||
func TestIfcMapNoOverwrite(t *testing.T) {
|
||||
a := ifcTest{13}
|
||||
b := ifcTest{42}
|
||||
if err := Map(&a, b); err != nil {
|
||||
t.FailNow()
|
||||
}
|
||||
if a.I != 13 {
|
||||
t.Fatalf("a not left alone: a.I(%d) == b.I(%d)", a.I, b.I)
|
||||
}
|
||||
}
|
||||
|
||||
func TestIfcMapWithOverwrite(t *testing.T) {
|
||||
a := ifcTest{13}
|
||||
b := ifcTest{42}
|
||||
if err := MapWithOverwrite(&a, b); err != nil {
|
||||
t.FailNow()
|
||||
}
|
||||
if a.I != 42 {
|
||||
t.Fatalf("b not merged in properly: a.I(%d) != b.I(%d)", a.I, b.I)
|
||||
}
|
||||
if !reflect.DeepEqual(a, b) {
|
||||
t.FailNow()
|
||||
}
|
||||
}
|
||||
|
||||
type pointerMapTest struct {
|
||||
A int
|
||||
hidden int
|
||||
B *simpleTest
|
||||
}
|
||||
|
||||
func TestBackAndForth(t *testing.T) {
|
||||
pt := pointerMapTest{42, 1, &simpleTest{66}}
|
||||
m := make(map[string]interface{})
|
||||
if err := Map(&m, pt); err != nil {
|
||||
t.FailNow()
|
||||
}
|
||||
var (
|
||||
v interface{}
|
||||
ok bool
|
||||
)
|
||||
if v, ok = m["a"]; v.(int) != pt.A || !ok {
|
||||
t.Fatalf("pt not merged in properly: m[`a`](%d) != pt.A(%d)", v, pt.A)
|
||||
}
|
||||
if v, ok = m["b"]; !ok {
|
||||
t.Fatalf("pt not merged in properly: B is missing in m")
|
||||
}
|
||||
var st *simpleTest
|
||||
if st = v.(*simpleTest); st.Value != 66 {
|
||||
t.Fatalf("something went wrong while mapping pt on m, B wasn't copied")
|
||||
}
|
||||
bpt := pointerMapTest{}
|
||||
if err := Map(&bpt, m); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if bpt.A != pt.A {
|
||||
t.Fatalf("pt not merged in properly: bpt.A(%d) != pt.A(%d)", bpt.A, pt.A)
|
||||
}
|
||||
if bpt.hidden == pt.hidden {
|
||||
t.Fatalf("pt unexpectedly merged: bpt.hidden(%d) == pt.hidden(%d)", bpt.hidden, pt.hidden)
|
||||
}
|
||||
if bpt.B.Value != pt.B.Value {
|
||||
t.Fatalf("pt not merged in properly: bpt.B.Value(%d) != pt.B.Value(%d)", bpt.B.Value, pt.B.Value)
|
||||
}
|
||||
}
|
||||
|
||||
func TestEmbeddedPointerUnpacking(t *testing.T) {
|
||||
tests := []struct{ input pointerMapTest }{
|
||||
{pointerMapTest{42, 1, nil}},
|
||||
{pointerMapTest{42, 1, &simpleTest{66}}},
|
||||
}
|
||||
newValue := 77
|
||||
m := map[string]interface{}{
|
||||
"b": map[string]interface{}{
|
||||
"value": newValue,
|
||||
},
|
||||
}
|
||||
for _, test := range tests {
|
||||
pt := test.input
|
||||
if err := MapWithOverwrite(&pt, m); err != nil {
|
||||
t.FailNow()
|
||||
}
|
||||
if pt.B.Value != newValue {
|
||||
t.Fatalf("pt not mapped properly: pt.A.Value(%d) != m[`b`][`value`](%d)", pt.B.Value, newValue)
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
type structWithTimePointer struct {
|
||||
Birth *time.Time
|
||||
}
|
||||
|
||||
func TestTime(t *testing.T) {
|
||||
now := time.Now()
|
||||
dataStruct := structWithTimePointer{
|
||||
Birth: &now,
|
||||
}
|
||||
dataMap := map[string]interface{}{
|
||||
"Birth": &now,
|
||||
}
|
||||
b := structWithTimePointer{}
|
||||
if err := Merge(&b, dataStruct); err != nil {
|
||||
t.FailNow()
|
||||
}
|
||||
if b.Birth.IsZero() {
|
||||
t.Fatalf("time.Time not merged in properly: b.Birth(%v) != dataStruct['Birth'](%v)", b.Birth, dataStruct.Birth)
|
||||
}
|
||||
if b.Birth != dataStruct.Birth {
|
||||
t.Fatalf("time.Time not merged in properly: b.Birth(%v) != dataStruct['Birth'](%v)", b.Birth, dataStruct.Birth)
|
||||
}
|
||||
b = structWithTimePointer{}
|
||||
if err := Map(&b, dataMap); err != nil {
|
||||
t.FailNow()
|
||||
}
|
||||
if b.Birth.IsZero() {
|
||||
t.Fatalf("time.Time not merged in properly: b.Birth(%v) != dataMap['Birth'](%v)", b.Birth, dataMap["Birth"])
|
||||
}
|
||||
}
|
||||
|
||||
type simpleNested struct {
|
||||
A int
|
||||
}
|
||||
|
||||
type structWithNestedPtrValueMap struct {
|
||||
NestedPtrValue map[string]*simpleNested
|
||||
}
|
||||
|
||||
func TestNestedPtrValueInMap(t *testing.T) {
|
||||
src := &structWithNestedPtrValueMap{
|
||||
NestedPtrValue: map[string]*simpleNested{
|
||||
"x": {
|
||||
A: 1,
|
||||
},
|
||||
},
|
||||
}
|
||||
dst := &structWithNestedPtrValueMap{
|
||||
NestedPtrValue: map[string]*simpleNested{
|
||||
"x": {},
|
||||
},
|
||||
}
|
||||
if err := Map(dst, src); err != nil {
|
||||
t.FailNow()
|
||||
}
|
||||
if dst.NestedPtrValue["x"].A == 0 {
|
||||
t.Fatalf("Nested Ptr value not merged in properly: dst.NestedPtrValue[\"x\"].A(%v) != src.NestedPtrValue[\"x\"].A(%v)", dst.NestedPtrValue["x"].A, src.NestedPtrValue["x"].A)
|
||||
}
|
||||
}
|
||||
|
||||
func loadYAML(path string) (m map[string]interface{}) {
|
||||
m = make(map[string]interface{})
|
||||
raw, _ := ioutil.ReadFile(path)
|
||||
_ = yaml.Unmarshal(raw, &m)
|
||||
return
|
||||
}
|
||||
|
||||
type structWithMap struct {
|
||||
m map[string]structWithUnexportedProperty
|
||||
}
|
||||
|
||||
type structWithUnexportedProperty struct {
|
||||
s string
|
||||
}
|
||||
|
||||
func TestUnexportedProperty(t *testing.T) {
|
||||
a := structWithMap{map[string]structWithUnexportedProperty{
|
||||
"key": {"hello"},
|
||||
}}
|
||||
b := structWithMap{map[string]structWithUnexportedProperty{
|
||||
"key": {"hi"},
|
||||
}}
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
t.Errorf("Should not have panicked")
|
||||
}
|
||||
}()
|
||||
Merge(&a, b)
|
||||
}
|
||||
|
||||
type structWithBoolPointer struct {
|
||||
C *bool
|
||||
}
|
||||
|
||||
func TestBooleanPointer(t *testing.T) {
|
||||
bt, bf := true, false
|
||||
src := structWithBoolPointer{
|
||||
&bt,
|
||||
}
|
||||
dst := structWithBoolPointer{
|
||||
&bf,
|
||||
}
|
||||
if err := Merge(&dst, src); err != nil {
|
||||
t.FailNow()
|
||||
}
|
||||
if dst.C == src.C {
|
||||
t.Fatalf("dst.C should be a different pointer than src.C")
|
||||
}
|
||||
if *dst.C != *src.C {
|
||||
t.Fatalf("dst.C should be true")
|
||||
}
|
||||
}
|
3
vendor/github.com/json-iterator/go/.codecov.yml
generated
vendored
Normal file
3
vendor/github.com/json-iterator/go/.codecov.yml
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
ignore:
|
||||
- "output_tests/.*"
|
||||
|
4
vendor/github.com/json-iterator/go/.gitignore
generated
vendored
Normal file
4
vendor/github.com/json-iterator/go/.gitignore
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
/vendor
|
||||
/bug_test.go
|
||||
/coverage.txt
|
||||
/.idea
|
14
vendor/github.com/json-iterator/go/.travis.yml
generated
vendored
Normal file
14
vendor/github.com/json-iterator/go/.travis.yml
generated
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
language: go
|
||||
|
||||
go:
|
||||
- 1.8.x
|
||||
- 1.x
|
||||
|
||||
before_install:
|
||||
- go get -t -v ./...
|
||||
|
||||
script:
|
||||
- ./test.sh
|
||||
|
||||
after_success:
|
||||
- bash <(curl -s https://codecov.io/bash)
|
21
vendor/github.com/json-iterator/go/Gopkg.lock
generated
vendored
Normal file
21
vendor/github.com/json-iterator/go/Gopkg.lock
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
|
||||
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/modern-go/concurrent"
|
||||
packages = ["."]
|
||||
revision = "e0a39a4cb4216ea8db28e22a69f4ec25610d513a"
|
||||
version = "1.0.0"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/modern-go/reflect2"
|
||||
packages = ["."]
|
||||
revision = "1df9eeb2bb81f327b96228865c5687bc2194af3f"
|
||||
version = "1.0.0"
|
||||
|
||||
[solve-meta]
|
||||
analyzer-name = "dep"
|
||||
analyzer-version = 1
|
||||
inputs-digest = "ac7003b5a981716353a43055ab7d4c5357403cb30a60de2dbdeb446c1544beaa"
|
||||
solver-name = "gps-cdcl"
|
||||
solver-version = 1
|
26
vendor/github.com/json-iterator/go/Gopkg.toml
generated
vendored
Normal file
26
vendor/github.com/json-iterator/go/Gopkg.toml
generated
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
# Gopkg.toml example
|
||||
#
|
||||
# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md
|
||||
# for detailed Gopkg.toml documentation.
|
||||
#
|
||||
# required = ["github.com/user/thing/cmd/thing"]
|
||||
# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"]
|
||||
#
|
||||
# [[constraint]]
|
||||
# name = "github.com/user/project"
|
||||
# version = "1.0.0"
|
||||
#
|
||||
# [[constraint]]
|
||||
# name = "github.com/user/project2"
|
||||
# branch = "dev"
|
||||
# source = "github.com/myfork/project2"
|
||||
#
|
||||
# [[override]]
|
||||
# name = "github.com/x/y"
|
||||
# version = "2.4.0"
|
||||
|
||||
ignored = ["github.com/davecgh/go-spew*","github.com/google/gofuzz*","github.com/stretchr/testify*"]
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/modern-go/reflect2"
|
||||
version = "1.0.0"
|
21
vendor/github.com/json-iterator/go/LICENSE
generated
vendored
Normal file
21
vendor/github.com/json-iterator/go/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2016 json-iterator
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
88
vendor/github.com/json-iterator/go/README.md
generated
vendored
Normal file
88
vendor/github.com/json-iterator/go/README.md
generated
vendored
Normal file
@ -0,0 +1,88 @@
|
||||
[](https://sourcegraph.com/github.com/json-iterator/go?badge)
|
||||
[](http://godoc.org/github.com/json-iterator/go)
|
||||
[](https://travis-ci.org/json-iterator/go)
|
||||
[](https://codecov.io/gh/json-iterator/go)
|
||||
[](https://goreportcard.com/report/github.com/json-iterator/go)
|
||||
[](https://raw.githubusercontent.com/json-iterator/go/master/LICENSE)
|
||||
[](https://gitter.im/json-iterator/Lobby)
|
||||
|
||||
A high-performance 100% compatible drop-in replacement of "encoding/json"
|
||||
|
||||
You can also use thrift like JSON using [thrift-iterator](https://github.com/thrift-iterator/go)
|
||||
|
||||
```
|
||||
Go开发者们请加入我们,滴滴出行平台技术部 taowen@didichuxing.com
|
||||
```
|
||||
|
||||
# Benchmark
|
||||
|
||||

|
||||
|
||||
Source code: https://github.com/json-iterator/go-benchmark/blob/master/src/github.com/json-iterator/go-benchmark/benchmark_medium_payload_test.go
|
||||
|
||||
Raw Result (easyjson requires static code generation)
|
||||
|
||||
| | ns/op | allocation bytes | allocation times |
|
||||
| --- | --- | --- | --- |
|
||||
| std decode | 35510 ns/op | 1960 B/op | 99 allocs/op |
|
||||
| easyjson decode | 8499 ns/op | 160 B/op | 4 allocs/op |
|
||||
| jsoniter decode | 5623 ns/op | 160 B/op | 3 allocs/op |
|
||||
| std encode | 2213 ns/op | 712 B/op | 5 allocs/op |
|
||||
| easyjson encode | 883 ns/op | 576 B/op | 3 allocs/op |
|
||||
| jsoniter encode | 837 ns/op | 384 B/op | 4 allocs/op |
|
||||
|
||||
# Usage
|
||||
|
||||
100% compatibility with standard lib
|
||||
|
||||
Replace
|
||||
|
||||
```go
|
||||
import "encoding/json"
|
||||
json.Marshal(&data)
|
||||
```
|
||||
|
||||
with
|
||||
|
||||
```go
|
||||
import "github.com/json-iterator/go"
|
||||
|
||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
json.Marshal(&data)
|
||||
```
|
||||
|
||||
Replace
|
||||
|
||||
```go
|
||||
import "encoding/json"
|
||||
json.Unmarshal(input, &data)
|
||||
```
|
||||
|
||||
with
|
||||
|
||||
```go
|
||||
import "github.com/json-iterator/go"
|
||||
|
||||
var json = jsoniter.ConfigCompatibleWithStandardLibrary
|
||||
json.Unmarshal(input, &data)
|
||||
```
|
||||
|
||||
[More documentation](http://jsoniter.com/migrate-from-go-std.html)
|
||||
|
||||
# How to get
|
||||
|
||||
```
|
||||
go get github.com/json-iterator/go
|
||||
```
|
||||
|
||||
# Contribution Welcomed !
|
||||
|
||||
Contributors
|
||||
|
||||
* [thockin](https://github.com/thockin)
|
||||
* [mattn](https://github.com/mattn)
|
||||
* [cch123](https://github.com/cch123)
|
||||
* [Oleg Shaldybin](https://github.com/olegshaldybin)
|
||||
* [Jason Toffaletti](https://github.com/toffaletti)
|
||||
|
||||
Report issue or pull request, or email taowen@gmail.com, or [](https://gitter.im/json-iterator/Lobby)
|
141
vendor/github.com/json-iterator/go/adapter.go
generated
vendored
Normal file
141
vendor/github.com/json-iterator/go/adapter.go
generated
vendored
Normal file
@ -0,0 +1,141 @@
|
||||
package jsoniter
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
)
|
||||
|
||||
// RawMessage to make replace json with jsoniter
|
||||
type RawMessage []byte
|
||||
|
||||
// Unmarshal adapts to json/encoding Unmarshal API
|
||||
//
|
||||
// Unmarshal parses the JSON-encoded data and stores the result in the value pointed to by v.
|
||||
// Refer to https://godoc.org/encoding/json#Unmarshal for more information
|
||||
func Unmarshal(data []byte, v interface{}) error {
|
||||
return ConfigDefault.Unmarshal(data, v)
|
||||
}
|
||||
|
||||
// UnmarshalFromString convenient method to read from string instead of []byte
|
||||
func UnmarshalFromString(str string, v interface{}) error {
|
||||
return ConfigDefault.UnmarshalFromString(str, v)
|
||||
}
|
||||
|
||||
// Get quick method to get value from deeply nested JSON structure
|
||||
func Get(data []byte, path ...interface{}) Any {
|
||||
return ConfigDefault.Get(data, path...)
|
||||
}
|
||||
|
||||
// Marshal adapts to json/encoding Marshal API
|
||||
//
|
||||
// Marshal returns the JSON encoding of v, adapts to json/encoding Marshal API
|
||||
// Refer to https://godoc.org/encoding/json#Marshal for more information
|
||||
func Marshal(v interface{}) ([]byte, error) {
|
||||
return ConfigDefault.Marshal(v)
|
||||
}
|
||||
|
||||
// MarshalIndent same as json.MarshalIndent. Prefix is not supported.
|
||||
func MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) {
|
||||
return ConfigDefault.MarshalIndent(v, prefix, indent)
|
||||
}
|
||||
|
||||
// MarshalToString convenient method to write as string instead of []byte
|
||||
func MarshalToString(v interface{}) (string, error) {
|
||||
return ConfigDefault.MarshalToString(v)
|
||||
}
|
||||
|
||||
// NewDecoder adapts to json/stream NewDecoder API.
|
||||
//
|
||||
// NewDecoder returns a new decoder that reads from r.
|
||||
//
|
||||
// Instead of a json/encoding Decoder, an Decoder is returned
|
||||
// Refer to https://godoc.org/encoding/json#NewDecoder for more information
|
||||
func NewDecoder(reader io.Reader) *Decoder {
|
||||
return ConfigDefault.NewDecoder(reader)
|
||||
}
|
||||
|
||||
// Decoder reads and decodes JSON values from an input stream.
|
||||
// Decoder provides identical APIs with json/stream Decoder (Token() and UseNumber() are in progress)
|
||||
type Decoder struct {
|
||||
iter *Iterator
|
||||
}
|
||||
|
||||
// Decode decode JSON into interface{}
|
||||
func (adapter *Decoder) Decode(obj interface{}) error {
|
||||
if adapter.iter.head == adapter.iter.tail && adapter.iter.reader != nil {
|
||||
if !adapter.iter.loadMore() {
|
||||
return io.EOF
|
||||
}
|
||||
}
|
||||
adapter.iter.ReadVal(obj)
|
||||
err := adapter.iter.Error
|
||||
if err == io.EOF {
|
||||
return nil
|
||||
}
|
||||
return adapter.iter.Error
|
||||
}
|
||||
|
||||
// More is there more?
|
||||
func (adapter *Decoder) More() bool {
|
||||
return adapter.iter.head != adapter.iter.tail
|
||||
}
|
||||
|
||||
// Buffered remaining buffer
|
||||
func (adapter *Decoder) Buffered() io.Reader {
|
||||
remaining := adapter.iter.buf[adapter.iter.head:adapter.iter.tail]
|
||||
return bytes.NewReader(remaining)
|
||||
}
|
||||
|
||||
// UseNumber causes the Decoder to unmarshal a number into an interface{} as a
|
||||
// Number instead of as a float64.
|
||||
func (adapter *Decoder) UseNumber() {
|
||||
cfg := adapter.iter.cfg.configBeforeFrozen
|
||||
cfg.UseNumber = true
|
||||
adapter.iter.cfg = cfg.frozeWithCacheReuse()
|
||||
}
|
||||
|
||||
// DisallowUnknownFields causes the Decoder to return an error when the destination
|
||||
// is a struct and the input contains object keys which do not match any
|
||||
// non-ignored, exported fields in the destination.
|
||||
func (adapter *Decoder) DisallowUnknownFields() {
|
||||
cfg := adapter.iter.cfg.configBeforeFrozen
|
||||
cfg.DisallowUnknownFields = true
|
||||
adapter.iter.cfg = cfg.frozeWithCacheReuse()
|
||||
}
|
||||
|
||||
// NewEncoder same as json.NewEncoder
|
||||
func NewEncoder(writer io.Writer) *Encoder {
|
||||
return ConfigDefault.NewEncoder(writer)
|
||||
}
|
||||
|
||||
// Encoder same as json.Encoder
|
||||
type Encoder struct {
|
||||
stream *Stream
|
||||
}
|
||||
|
||||
// Encode encode interface{} as JSON to io.Writer
|
||||
func (adapter *Encoder) Encode(val interface{}) error {
|
||||
adapter.stream.WriteVal(val)
|
||||
adapter.stream.WriteRaw("\n")
|
||||
adapter.stream.Flush()
|
||||
return adapter.stream.Error
|
||||
}
|
||||
|
||||
// SetIndent set the indention. Prefix is not supported
|
||||
func (adapter *Encoder) SetIndent(prefix, indent string) {
|
||||
config := adapter.stream.cfg.configBeforeFrozen
|
||||
config.IndentionStep = len(indent)
|
||||
adapter.stream.cfg = config.frozeWithCacheReuse()
|
||||
}
|
||||
|
||||
// SetEscapeHTML escape html by default, set to false to disable
|
||||
func (adapter *Encoder) SetEscapeHTML(escapeHTML bool) {
|
||||
config := adapter.stream.cfg.configBeforeFrozen
|
||||
config.EscapeHTML = escapeHTML
|
||||
adapter.stream.cfg = config.frozeWithCacheReuse()
|
||||
}
|
||||
|
||||
// Valid reports whether data is a valid JSON encoding.
|
||||
func Valid(data []byte) bool {
|
||||
return ConfigDefault.Valid(data)
|
||||
}
|
321
vendor/github.com/json-iterator/go/any.go
generated
vendored
Normal file
321
vendor/github.com/json-iterator/go/any.go
generated
vendored
Normal file
@ -0,0 +1,321 @@
|
||||
package jsoniter
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/modern-go/reflect2"
|
||||
"io"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
// Any generic object representation.
|
||||
// The lazy json implementation holds []byte and parse lazily.
|
||||
type Any interface {
|
||||
LastError() error
|
||||
ValueType() ValueType
|
||||
MustBeValid() Any
|
||||
ToBool() bool
|
||||
ToInt() int
|
||||
ToInt32() int32
|
||||
ToInt64() int64
|
||||
ToUint() uint
|
||||
ToUint32() uint32
|
||||
ToUint64() uint64
|
||||
ToFloat32() float32
|
||||
ToFloat64() float64
|
||||
ToString() string
|
||||
ToVal(val interface{})
|
||||
Get(path ...interface{}) Any
|
||||
Size() int
|
||||
Keys() []string
|
||||
GetInterface() interface{}
|
||||
WriteTo(stream *Stream)
|
||||
}
|
||||
|
||||
type baseAny struct{}
|
||||
|
||||
func (any *baseAny) Get(path ...interface{}) Any {
|
||||
return &invalidAny{baseAny{}, fmt.Errorf("GetIndex %v from simple value", path)}
|
||||
}
|
||||
|
||||
func (any *baseAny) Size() int {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *baseAny) Keys() []string {
|
||||
return []string{}
|
||||
}
|
||||
|
||||
func (any *baseAny) ToVal(obj interface{}) {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
// WrapInt32 turn int32 into Any interface
|
||||
func WrapInt32(val int32) Any {
|
||||
return &int32Any{baseAny{}, val}
|
||||
}
|
||||
|
||||
// WrapInt64 turn int64 into Any interface
|
||||
func WrapInt64(val int64) Any {
|
||||
return &int64Any{baseAny{}, val}
|
||||
}
|
||||
|
||||
// WrapUint32 turn uint32 into Any interface
|
||||
func WrapUint32(val uint32) Any {
|
||||
return &uint32Any{baseAny{}, val}
|
||||
}
|
||||
|
||||
// WrapUint64 turn uint64 into Any interface
|
||||
func WrapUint64(val uint64) Any {
|
||||
return &uint64Any{baseAny{}, val}
|
||||
}
|
||||
|
||||
// WrapFloat64 turn float64 into Any interface
|
||||
func WrapFloat64(val float64) Any {
|
||||
return &floatAny{baseAny{}, val}
|
||||
}
|
||||
|
||||
// WrapString turn string into Any interface
|
||||
func WrapString(val string) Any {
|
||||
return &stringAny{baseAny{}, val}
|
||||
}
|
||||
|
||||
// Wrap turn a go object into Any interface
|
||||
func Wrap(val interface{}) Any {
|
||||
if val == nil {
|
||||
return &nilAny{}
|
||||
}
|
||||
asAny, isAny := val.(Any)
|
||||
if isAny {
|
||||
return asAny
|
||||
}
|
||||
typ := reflect2.TypeOf(val)
|
||||
switch typ.Kind() {
|
||||
case reflect.Slice:
|
||||
return wrapArray(val)
|
||||
case reflect.Struct:
|
||||
return wrapStruct(val)
|
||||
case reflect.Map:
|
||||
return wrapMap(val)
|
||||
case reflect.String:
|
||||
return WrapString(val.(string))
|
||||
case reflect.Int:
|
||||
if strconv.IntSize == 32 {
|
||||
return WrapInt32(int32(val.(int)))
|
||||
}
|
||||
return WrapInt64(int64(val.(int)))
|
||||
case reflect.Int8:
|
||||
return WrapInt32(int32(val.(int8)))
|
||||
case reflect.Int16:
|
||||
return WrapInt32(int32(val.(int16)))
|
||||
case reflect.Int32:
|
||||
return WrapInt32(val.(int32))
|
||||
case reflect.Int64:
|
||||
return WrapInt64(val.(int64))
|
||||
case reflect.Uint:
|
||||
if strconv.IntSize == 32 {
|
||||
return WrapUint32(uint32(val.(uint)))
|
||||
}
|
||||
return WrapUint64(uint64(val.(uint)))
|
||||
case reflect.Uintptr:
|
||||
if ptrSize == 32 {
|
||||
return WrapUint32(uint32(val.(uintptr)))
|
||||
}
|
||||
return WrapUint64(uint64(val.(uintptr)))
|
||||
case reflect.Uint8:
|
||||
return WrapUint32(uint32(val.(uint8)))
|
||||
case reflect.Uint16:
|
||||
return WrapUint32(uint32(val.(uint16)))
|
||||
case reflect.Uint32:
|
||||
return WrapUint32(uint32(val.(uint32)))
|
||||
case reflect.Uint64:
|
||||
return WrapUint64(val.(uint64))
|
||||
case reflect.Float32:
|
||||
return WrapFloat64(float64(val.(float32)))
|
||||
case reflect.Float64:
|
||||
return WrapFloat64(val.(float64))
|
||||
case reflect.Bool:
|
||||
if val.(bool) == true {
|
||||
return &trueAny{}
|
||||
}
|
||||
return &falseAny{}
|
||||
}
|
||||
return &invalidAny{baseAny{}, fmt.Errorf("unsupported type: %v", typ)}
|
||||
}
|
||||
|
||||
// ReadAny read next JSON element as an Any object. It is a better json.RawMessage.
|
||||
func (iter *Iterator) ReadAny() Any {
|
||||
return iter.readAny()
|
||||
}
|
||||
|
||||
func (iter *Iterator) readAny() Any {
|
||||
c := iter.nextToken()
|
||||
switch c {
|
||||
case '"':
|
||||
iter.unreadByte()
|
||||
return &stringAny{baseAny{}, iter.ReadString()}
|
||||
case 'n':
|
||||
iter.skipThreeBytes('u', 'l', 'l') // null
|
||||
return &nilAny{}
|
||||
case 't':
|
||||
iter.skipThreeBytes('r', 'u', 'e') // true
|
||||
return &trueAny{}
|
||||
case 'f':
|
||||
iter.skipFourBytes('a', 'l', 's', 'e') // false
|
||||
return &falseAny{}
|
||||
case '{':
|
||||
return iter.readObjectAny()
|
||||
case '[':
|
||||
return iter.readArrayAny()
|
||||
case '-':
|
||||
return iter.readNumberAny(false)
|
||||
case 0:
|
||||
return &invalidAny{baseAny{}, errors.New("input is empty")}
|
||||
default:
|
||||
return iter.readNumberAny(true)
|
||||
}
|
||||
}
|
||||
|
||||
func (iter *Iterator) readNumberAny(positive bool) Any {
|
||||
iter.startCapture(iter.head - 1)
|
||||
iter.skipNumber()
|
||||
lazyBuf := iter.stopCapture()
|
||||
return &numberLazyAny{baseAny{}, iter.cfg, lazyBuf, nil}
|
||||
}
|
||||
|
||||
func (iter *Iterator) readObjectAny() Any {
|
||||
iter.startCapture(iter.head - 1)
|
||||
iter.skipObject()
|
||||
lazyBuf := iter.stopCapture()
|
||||
return &objectLazyAny{baseAny{}, iter.cfg, lazyBuf, nil}
|
||||
}
|
||||
|
||||
func (iter *Iterator) readArrayAny() Any {
|
||||
iter.startCapture(iter.head - 1)
|
||||
iter.skipArray()
|
||||
lazyBuf := iter.stopCapture()
|
||||
return &arrayLazyAny{baseAny{}, iter.cfg, lazyBuf, nil}
|
||||
}
|
||||
|
||||
func locateObjectField(iter *Iterator, target string) []byte {
|
||||
var found []byte
|
||||
iter.ReadObjectCB(func(iter *Iterator, field string) bool {
|
||||
if field == target {
|
||||
found = iter.SkipAndReturnBytes()
|
||||
return false
|
||||
}
|
||||
iter.Skip()
|
||||
return true
|
||||
})
|
||||
return found
|
||||
}
|
||||
|
||||
func locateArrayElement(iter *Iterator, target int) []byte {
|
||||
var found []byte
|
||||
n := 0
|
||||
iter.ReadArrayCB(func(iter *Iterator) bool {
|
||||
if n == target {
|
||||
found = iter.SkipAndReturnBytes()
|
||||
return false
|
||||
}
|
||||
iter.Skip()
|
||||
n++
|
||||
return true
|
||||
})
|
||||
return found
|
||||
}
|
||||
|
||||
func locatePath(iter *Iterator, path []interface{}) Any {
|
||||
for i, pathKeyObj := range path {
|
||||
switch pathKey := pathKeyObj.(type) {
|
||||
case string:
|
||||
valueBytes := locateObjectField(iter, pathKey)
|
||||
if valueBytes == nil {
|
||||
return newInvalidAny(path[i:])
|
||||
}
|
||||
iter.ResetBytes(valueBytes)
|
||||
case int:
|
||||
valueBytes := locateArrayElement(iter, pathKey)
|
||||
if valueBytes == nil {
|
||||
return newInvalidAny(path[i:])
|
||||
}
|
||||
iter.ResetBytes(valueBytes)
|
||||
case int32:
|
||||
if '*' == pathKey {
|
||||
return iter.readAny().Get(path[i:]...)
|
||||
}
|
||||
return newInvalidAny(path[i:])
|
||||
default:
|
||||
return newInvalidAny(path[i:])
|
||||
}
|
||||
}
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
return &invalidAny{baseAny{}, iter.Error}
|
||||
}
|
||||
return iter.readAny()
|
||||
}
|
||||
|
||||
var anyType = reflect2.TypeOfPtr((*Any)(nil)).Elem()
|
||||
|
||||
func createDecoderOfAny(ctx *ctx, typ reflect2.Type) ValDecoder {
|
||||
if typ == anyType {
|
||||
return &directAnyCodec{}
|
||||
}
|
||||
if typ.Implements(anyType) {
|
||||
return &anyCodec{
|
||||
valType: typ,
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func createEncoderOfAny(ctx *ctx, typ reflect2.Type) ValEncoder {
|
||||
if typ == anyType {
|
||||
return &directAnyCodec{}
|
||||
}
|
||||
if typ.Implements(anyType) {
|
||||
return &anyCodec{
|
||||
valType: typ,
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type anyCodec struct {
|
||||
valType reflect2.Type
|
||||
}
|
||||
|
||||
func (codec *anyCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
func (codec *anyCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
obj := codec.valType.UnsafeIndirect(ptr)
|
||||
any := obj.(Any)
|
||||
any.WriteTo(stream)
|
||||
}
|
||||
|
||||
func (codec *anyCodec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
obj := codec.valType.UnsafeIndirect(ptr)
|
||||
any := obj.(Any)
|
||||
return any.Size() == 0
|
||||
}
|
||||
|
||||
type directAnyCodec struct {
|
||||
}
|
||||
|
||||
func (codec *directAnyCodec) Decode(ptr unsafe.Pointer, iter *Iterator) {
|
||||
*(*Any)(ptr) = iter.readAny()
|
||||
}
|
||||
|
||||
func (codec *directAnyCodec) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
any := *(*Any)(ptr)
|
||||
any.WriteTo(stream)
|
||||
}
|
||||
|
||||
func (codec *directAnyCodec) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
any := *(*Any)(ptr)
|
||||
return any.Size() == 0
|
||||
}
|
278
vendor/github.com/json-iterator/go/any_array.go
generated
vendored
Normal file
278
vendor/github.com/json-iterator/go/any_array.go
generated
vendored
Normal file
@ -0,0 +1,278 @@
|
||||
package jsoniter
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
type arrayLazyAny struct {
|
||||
baseAny
|
||||
cfg *frozenConfig
|
||||
buf []byte
|
||||
err error
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ValueType() ValueType {
|
||||
return ArrayValue
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) LastError() error {
|
||||
return any.err
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ToBool() bool {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
return iter.ReadArray()
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ToInt() int {
|
||||
if any.ToBool() {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ToInt32() int32 {
|
||||
if any.ToBool() {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ToInt64() int64 {
|
||||
if any.ToBool() {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ToUint() uint {
|
||||
if any.ToBool() {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ToUint32() uint32 {
|
||||
if any.ToBool() {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ToUint64() uint64 {
|
||||
if any.ToBool() {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ToFloat32() float32 {
|
||||
if any.ToBool() {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ToFloat64() float64 {
|
||||
if any.ToBool() {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ToString() string {
|
||||
return *(*string)(unsafe.Pointer(&any.buf))
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) ToVal(val interface{}) {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
iter.ReadVal(val)
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) Get(path ...interface{}) Any {
|
||||
if len(path) == 0 {
|
||||
return any
|
||||
}
|
||||
switch firstPath := path[0].(type) {
|
||||
case int:
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
valueBytes := locateArrayElement(iter, firstPath)
|
||||
if valueBytes == nil {
|
||||
return newInvalidAny(path)
|
||||
}
|
||||
iter.ResetBytes(valueBytes)
|
||||
return locatePath(iter, path[1:])
|
||||
case int32:
|
||||
if '*' == firstPath {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
arr := make([]Any, 0)
|
||||
iter.ReadArrayCB(func(iter *Iterator) bool {
|
||||
found := iter.readAny().Get(path[1:]...)
|
||||
if found.ValueType() != InvalidValue {
|
||||
arr = append(arr, found)
|
||||
}
|
||||
return true
|
||||
})
|
||||
return wrapArray(arr)
|
||||
}
|
||||
return newInvalidAny(path)
|
||||
default:
|
||||
return newInvalidAny(path)
|
||||
}
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) Size() int {
|
||||
size := 0
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
iter.ReadArrayCB(func(iter *Iterator) bool {
|
||||
size++
|
||||
iter.Skip()
|
||||
return true
|
||||
})
|
||||
return size
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) WriteTo(stream *Stream) {
|
||||
stream.Write(any.buf)
|
||||
}
|
||||
|
||||
func (any *arrayLazyAny) GetInterface() interface{} {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
return iter.Read()
|
||||
}
|
||||
|
||||
type arrayAny struct {
|
||||
baseAny
|
||||
val reflect.Value
|
||||
}
|
||||
|
||||
func wrapArray(val interface{}) *arrayAny {
|
||||
return &arrayAny{baseAny{}, reflect.ValueOf(val)}
|
||||
}
|
||||
|
||||
func (any *arrayAny) ValueType() ValueType {
|
||||
return ArrayValue
|
||||
}
|
||||
|
||||
func (any *arrayAny) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *arrayAny) LastError() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *arrayAny) ToBool() bool {
|
||||
return any.val.Len() != 0
|
||||
}
|
||||
|
||||
func (any *arrayAny) ToInt() int {
|
||||
if any.val.Len() == 0 {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *arrayAny) ToInt32() int32 {
|
||||
if any.val.Len() == 0 {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *arrayAny) ToInt64() int64 {
|
||||
if any.val.Len() == 0 {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *arrayAny) ToUint() uint {
|
||||
if any.val.Len() == 0 {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *arrayAny) ToUint32() uint32 {
|
||||
if any.val.Len() == 0 {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *arrayAny) ToUint64() uint64 {
|
||||
if any.val.Len() == 0 {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *arrayAny) ToFloat32() float32 {
|
||||
if any.val.Len() == 0 {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *arrayAny) ToFloat64() float64 {
|
||||
if any.val.Len() == 0 {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *arrayAny) ToString() string {
|
||||
str, _ := MarshalToString(any.val.Interface())
|
||||
return str
|
||||
}
|
||||
|
||||
func (any *arrayAny) Get(path ...interface{}) Any {
|
||||
if len(path) == 0 {
|
||||
return any
|
||||
}
|
||||
switch firstPath := path[0].(type) {
|
||||
case int:
|
||||
if firstPath < 0 || firstPath >= any.val.Len() {
|
||||
return newInvalidAny(path)
|
||||
}
|
||||
return Wrap(any.val.Index(firstPath).Interface())
|
||||
case int32:
|
||||
if '*' == firstPath {
|
||||
mappedAll := make([]Any, 0)
|
||||
for i := 0; i < any.val.Len(); i++ {
|
||||
mapped := Wrap(any.val.Index(i).Interface()).Get(path[1:]...)
|
||||
if mapped.ValueType() != InvalidValue {
|
||||
mappedAll = append(mappedAll, mapped)
|
||||
}
|
||||
}
|
||||
return wrapArray(mappedAll)
|
||||
}
|
||||
return newInvalidAny(path)
|
||||
default:
|
||||
return newInvalidAny(path)
|
||||
}
|
||||
}
|
||||
|
||||
func (any *arrayAny) Size() int {
|
||||
return any.val.Len()
|
||||
}
|
||||
|
||||
func (any *arrayAny) WriteTo(stream *Stream) {
|
||||
stream.WriteVal(any.val)
|
||||
}
|
||||
|
||||
func (any *arrayAny) GetInterface() interface{} {
|
||||
return any.val.Interface()
|
||||
}
|
137
vendor/github.com/json-iterator/go/any_bool.go
generated
vendored
Normal file
137
vendor/github.com/json-iterator/go/any_bool.go
generated
vendored
Normal file
@ -0,0 +1,137 @@
|
||||
package jsoniter
|
||||
|
||||
type trueAny struct {
|
||||
baseAny
|
||||
}
|
||||
|
||||
func (any *trueAny) LastError() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *trueAny) ToBool() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (any *trueAny) ToInt() int {
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *trueAny) ToInt32() int32 {
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *trueAny) ToInt64() int64 {
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *trueAny) ToUint() uint {
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *trueAny) ToUint32() uint32 {
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *trueAny) ToUint64() uint64 {
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *trueAny) ToFloat32() float32 {
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *trueAny) ToFloat64() float64 {
|
||||
return 1
|
||||
}
|
||||
|
||||
func (any *trueAny) ToString() string {
|
||||
return "true"
|
||||
}
|
||||
|
||||
func (any *trueAny) WriteTo(stream *Stream) {
|
||||
stream.WriteTrue()
|
||||
}
|
||||
|
||||
func (any *trueAny) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *trueAny) GetInterface() interface{} {
|
||||
return true
|
||||
}
|
||||
|
||||
func (any *trueAny) ValueType() ValueType {
|
||||
return BoolValue
|
||||
}
|
||||
|
||||
func (any *trueAny) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
type falseAny struct {
|
||||
baseAny
|
||||
}
|
||||
|
||||
func (any *falseAny) LastError() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *falseAny) ToBool() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (any *falseAny) ToInt() int {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *falseAny) ToInt32() int32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *falseAny) ToInt64() int64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *falseAny) ToUint() uint {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *falseAny) ToUint32() uint32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *falseAny) ToUint64() uint64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *falseAny) ToFloat32() float32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *falseAny) ToFloat64() float64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *falseAny) ToString() string {
|
||||
return "false"
|
||||
}
|
||||
|
||||
func (any *falseAny) WriteTo(stream *Stream) {
|
||||
stream.WriteFalse()
|
||||
}
|
||||
|
||||
func (any *falseAny) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *falseAny) GetInterface() interface{} {
|
||||
return false
|
||||
}
|
||||
|
||||
func (any *falseAny) ValueType() ValueType {
|
||||
return BoolValue
|
||||
}
|
||||
|
||||
func (any *falseAny) MustBeValid() Any {
|
||||
return any
|
||||
}
|
83
vendor/github.com/json-iterator/go/any_float.go
generated
vendored
Normal file
83
vendor/github.com/json-iterator/go/any_float.go
generated
vendored
Normal file
@ -0,0 +1,83 @@
|
||||
package jsoniter
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type floatAny struct {
|
||||
baseAny
|
||||
val float64
|
||||
}
|
||||
|
||||
func (any *floatAny) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *floatAny) ValueType() ValueType {
|
||||
return NumberValue
|
||||
}
|
||||
|
||||
func (any *floatAny) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *floatAny) LastError() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *floatAny) ToBool() bool {
|
||||
return any.ToFloat64() != 0
|
||||
}
|
||||
|
||||
func (any *floatAny) ToInt() int {
|
||||
return int(any.val)
|
||||
}
|
||||
|
||||
func (any *floatAny) ToInt32() int32 {
|
||||
return int32(any.val)
|
||||
}
|
||||
|
||||
func (any *floatAny) ToInt64() int64 {
|
||||
return int64(any.val)
|
||||
}
|
||||
|
||||
func (any *floatAny) ToUint() uint {
|
||||
if any.val > 0 {
|
||||
return uint(any.val)
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *floatAny) ToUint32() uint32 {
|
||||
if any.val > 0 {
|
||||
return uint32(any.val)
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *floatAny) ToUint64() uint64 {
|
||||
if any.val > 0 {
|
||||
return uint64(any.val)
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *floatAny) ToFloat32() float32 {
|
||||
return float32(any.val)
|
||||
}
|
||||
|
||||
func (any *floatAny) ToFloat64() float64 {
|
||||
return any.val
|
||||
}
|
||||
|
||||
func (any *floatAny) ToString() string {
|
||||
return strconv.FormatFloat(any.val, 'E', -1, 64)
|
||||
}
|
||||
|
||||
func (any *floatAny) WriteTo(stream *Stream) {
|
||||
stream.WriteFloat64(any.val)
|
||||
}
|
||||
|
||||
func (any *floatAny) GetInterface() interface{} {
|
||||
return any.val
|
||||
}
|
74
vendor/github.com/json-iterator/go/any_int32.go
generated
vendored
Normal file
74
vendor/github.com/json-iterator/go/any_int32.go
generated
vendored
Normal file
@ -0,0 +1,74 @@
|
||||
package jsoniter
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type int32Any struct {
|
||||
baseAny
|
||||
val int32
|
||||
}
|
||||
|
||||
func (any *int32Any) LastError() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *int32Any) ValueType() ValueType {
|
||||
return NumberValue
|
||||
}
|
||||
|
||||
func (any *int32Any) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *int32Any) ToBool() bool {
|
||||
return any.val != 0
|
||||
}
|
||||
|
||||
func (any *int32Any) ToInt() int {
|
||||
return int(any.val)
|
||||
}
|
||||
|
||||
func (any *int32Any) ToInt32() int32 {
|
||||
return any.val
|
||||
}
|
||||
|
||||
func (any *int32Any) ToInt64() int64 {
|
||||
return int64(any.val)
|
||||
}
|
||||
|
||||
func (any *int32Any) ToUint() uint {
|
||||
return uint(any.val)
|
||||
}
|
||||
|
||||
func (any *int32Any) ToUint32() uint32 {
|
||||
return uint32(any.val)
|
||||
}
|
||||
|
||||
func (any *int32Any) ToUint64() uint64 {
|
||||
return uint64(any.val)
|
||||
}
|
||||
|
||||
func (any *int32Any) ToFloat32() float32 {
|
||||
return float32(any.val)
|
||||
}
|
||||
|
||||
func (any *int32Any) ToFloat64() float64 {
|
||||
return float64(any.val)
|
||||
}
|
||||
|
||||
func (any *int32Any) ToString() string {
|
||||
return strconv.FormatInt(int64(any.val), 10)
|
||||
}
|
||||
|
||||
func (any *int32Any) WriteTo(stream *Stream) {
|
||||
stream.WriteInt32(any.val)
|
||||
}
|
||||
|
||||
func (any *int32Any) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *int32Any) GetInterface() interface{} {
|
||||
return any.val
|
||||
}
|
74
vendor/github.com/json-iterator/go/any_int64.go
generated
vendored
Normal file
74
vendor/github.com/json-iterator/go/any_int64.go
generated
vendored
Normal file
@ -0,0 +1,74 @@
|
||||
package jsoniter
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type int64Any struct {
|
||||
baseAny
|
||||
val int64
|
||||
}
|
||||
|
||||
func (any *int64Any) LastError() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *int64Any) ValueType() ValueType {
|
||||
return NumberValue
|
||||
}
|
||||
|
||||
func (any *int64Any) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *int64Any) ToBool() bool {
|
||||
return any.val != 0
|
||||
}
|
||||
|
||||
func (any *int64Any) ToInt() int {
|
||||
return int(any.val)
|
||||
}
|
||||
|
||||
func (any *int64Any) ToInt32() int32 {
|
||||
return int32(any.val)
|
||||
}
|
||||
|
||||
func (any *int64Any) ToInt64() int64 {
|
||||
return any.val
|
||||
}
|
||||
|
||||
func (any *int64Any) ToUint() uint {
|
||||
return uint(any.val)
|
||||
}
|
||||
|
||||
func (any *int64Any) ToUint32() uint32 {
|
||||
return uint32(any.val)
|
||||
}
|
||||
|
||||
func (any *int64Any) ToUint64() uint64 {
|
||||
return uint64(any.val)
|
||||
}
|
||||
|
||||
func (any *int64Any) ToFloat32() float32 {
|
||||
return float32(any.val)
|
||||
}
|
||||
|
||||
func (any *int64Any) ToFloat64() float64 {
|
||||
return float64(any.val)
|
||||
}
|
||||
|
||||
func (any *int64Any) ToString() string {
|
||||
return strconv.FormatInt(any.val, 10)
|
||||
}
|
||||
|
||||
func (any *int64Any) WriteTo(stream *Stream) {
|
||||
stream.WriteInt64(any.val)
|
||||
}
|
||||
|
||||
func (any *int64Any) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *int64Any) GetInterface() interface{} {
|
||||
return any.val
|
||||
}
|
82
vendor/github.com/json-iterator/go/any_invalid.go
generated
vendored
Normal file
82
vendor/github.com/json-iterator/go/any_invalid.go
generated
vendored
Normal file
@ -0,0 +1,82 @@
|
||||
package jsoniter
|
||||
|
||||
import "fmt"
|
||||
|
||||
type invalidAny struct {
|
||||
baseAny
|
||||
err error
|
||||
}
|
||||
|
||||
func newInvalidAny(path []interface{}) *invalidAny {
|
||||
return &invalidAny{baseAny{}, fmt.Errorf("%v not found", path)}
|
||||
}
|
||||
|
||||
func (any *invalidAny) LastError() error {
|
||||
return any.err
|
||||
}
|
||||
|
||||
func (any *invalidAny) ValueType() ValueType {
|
||||
return InvalidValue
|
||||
}
|
||||
|
||||
func (any *invalidAny) MustBeValid() Any {
|
||||
panic(any.err)
|
||||
}
|
||||
|
||||
func (any *invalidAny) ToBool() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (any *invalidAny) ToInt() int {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *invalidAny) ToInt32() int32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *invalidAny) ToInt64() int64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *invalidAny) ToUint() uint {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *invalidAny) ToUint32() uint32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *invalidAny) ToUint64() uint64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *invalidAny) ToFloat32() float32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *invalidAny) ToFloat64() float64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *invalidAny) ToString() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (any *invalidAny) WriteTo(stream *Stream) {
|
||||
}
|
||||
|
||||
func (any *invalidAny) Get(path ...interface{}) Any {
|
||||
if any.err == nil {
|
||||
return &invalidAny{baseAny{}, fmt.Errorf("get %v from invalid", path)}
|
||||
}
|
||||
return &invalidAny{baseAny{}, fmt.Errorf("%v, get %v from invalid", any.err, path)}
|
||||
}
|
||||
|
||||
func (any *invalidAny) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *invalidAny) GetInterface() interface{} {
|
||||
return nil
|
||||
}
|
69
vendor/github.com/json-iterator/go/any_nil.go
generated
vendored
Normal file
69
vendor/github.com/json-iterator/go/any_nil.go
generated
vendored
Normal file
@ -0,0 +1,69 @@
|
||||
package jsoniter
|
||||
|
||||
type nilAny struct {
|
||||
baseAny
|
||||
}
|
||||
|
||||
func (any *nilAny) LastError() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *nilAny) ValueType() ValueType {
|
||||
return NilValue
|
||||
}
|
||||
|
||||
func (any *nilAny) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *nilAny) ToBool() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (any *nilAny) ToInt() int {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *nilAny) ToInt32() int32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *nilAny) ToInt64() int64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *nilAny) ToUint() uint {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *nilAny) ToUint32() uint32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *nilAny) ToUint64() uint64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *nilAny) ToFloat32() float32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *nilAny) ToFloat64() float64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *nilAny) ToString() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (any *nilAny) WriteTo(stream *Stream) {
|
||||
stream.WriteNil()
|
||||
}
|
||||
|
||||
func (any *nilAny) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *nilAny) GetInterface() interface{} {
|
||||
return nil
|
||||
}
|
123
vendor/github.com/json-iterator/go/any_number.go
generated
vendored
Normal file
123
vendor/github.com/json-iterator/go/any_number.go
generated
vendored
Normal file
@ -0,0 +1,123 @@
|
||||
package jsoniter
|
||||
|
||||
import (
|
||||
"io"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
type numberLazyAny struct {
|
||||
baseAny
|
||||
cfg *frozenConfig
|
||||
buf []byte
|
||||
err error
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) ValueType() ValueType {
|
||||
return NumberValue
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) LastError() error {
|
||||
return any.err
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) ToBool() bool {
|
||||
return any.ToFloat64() != 0
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) ToInt() int {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
val := iter.ReadInt()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
any.err = iter.Error
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) ToInt32() int32 {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
val := iter.ReadInt32()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
any.err = iter.Error
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) ToInt64() int64 {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
val := iter.ReadInt64()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
any.err = iter.Error
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) ToUint() uint {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
val := iter.ReadUint()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
any.err = iter.Error
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) ToUint32() uint32 {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
val := iter.ReadUint32()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
any.err = iter.Error
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) ToUint64() uint64 {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
val := iter.ReadUint64()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
any.err = iter.Error
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) ToFloat32() float32 {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
val := iter.ReadFloat32()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
any.err = iter.Error
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) ToFloat64() float64 {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
val := iter.ReadFloat64()
|
||||
if iter.Error != nil && iter.Error != io.EOF {
|
||||
any.err = iter.Error
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) ToString() string {
|
||||
return *(*string)(unsafe.Pointer(&any.buf))
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) WriteTo(stream *Stream) {
|
||||
stream.Write(any.buf)
|
||||
}
|
||||
|
||||
func (any *numberLazyAny) GetInterface() interface{} {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
return iter.Read()
|
||||
}
|
374
vendor/github.com/json-iterator/go/any_object.go
generated
vendored
Normal file
374
vendor/github.com/json-iterator/go/any_object.go
generated
vendored
Normal file
@ -0,0 +1,374 @@
|
||||
package jsoniter
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
type objectLazyAny struct {
|
||||
baseAny
|
||||
cfg *frozenConfig
|
||||
buf []byte
|
||||
err error
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ValueType() ValueType {
|
||||
return ObjectValue
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) LastError() error {
|
||||
return any.err
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ToBool() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ToInt() int {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ToInt32() int32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ToInt64() int64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ToUint() uint {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ToUint32() uint32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ToUint64() uint64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ToFloat32() float32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ToFloat64() float64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ToString() string {
|
||||
return *(*string)(unsafe.Pointer(&any.buf))
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) ToVal(obj interface{}) {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
iter.ReadVal(obj)
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) Get(path ...interface{}) Any {
|
||||
if len(path) == 0 {
|
||||
return any
|
||||
}
|
||||
switch firstPath := path[0].(type) {
|
||||
case string:
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
valueBytes := locateObjectField(iter, firstPath)
|
||||
if valueBytes == nil {
|
||||
return newInvalidAny(path)
|
||||
}
|
||||
iter.ResetBytes(valueBytes)
|
||||
return locatePath(iter, path[1:])
|
||||
case int32:
|
||||
if '*' == firstPath {
|
||||
mappedAll := map[string]Any{}
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
iter.ReadMapCB(func(iter *Iterator, field string) bool {
|
||||
mapped := locatePath(iter, path[1:])
|
||||
if mapped.ValueType() != InvalidValue {
|
||||
mappedAll[field] = mapped
|
||||
}
|
||||
return true
|
||||
})
|
||||
return wrapMap(mappedAll)
|
||||
}
|
||||
return newInvalidAny(path)
|
||||
default:
|
||||
return newInvalidAny(path)
|
||||
}
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) Keys() []string {
|
||||
keys := []string{}
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
iter.ReadMapCB(func(iter *Iterator, field string) bool {
|
||||
iter.Skip()
|
||||
keys = append(keys, field)
|
||||
return true
|
||||
})
|
||||
return keys
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) Size() int {
|
||||
size := 0
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
iter.ReadObjectCB(func(iter *Iterator, field string) bool {
|
||||
iter.Skip()
|
||||
size++
|
||||
return true
|
||||
})
|
||||
return size
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) WriteTo(stream *Stream) {
|
||||
stream.Write(any.buf)
|
||||
}
|
||||
|
||||
func (any *objectLazyAny) GetInterface() interface{} {
|
||||
iter := any.cfg.BorrowIterator(any.buf)
|
||||
defer any.cfg.ReturnIterator(iter)
|
||||
return iter.Read()
|
||||
}
|
||||
|
||||
type objectAny struct {
|
||||
baseAny
|
||||
err error
|
||||
val reflect.Value
|
||||
}
|
||||
|
||||
func wrapStruct(val interface{}) *objectAny {
|
||||
return &objectAny{baseAny{}, nil, reflect.ValueOf(val)}
|
||||
}
|
||||
|
||||
func (any *objectAny) ValueType() ValueType {
|
||||
return ObjectValue
|
||||
}
|
||||
|
||||
func (any *objectAny) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *objectAny) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *objectAny) LastError() error {
|
||||
return any.err
|
||||
}
|
||||
|
||||
func (any *objectAny) ToBool() bool {
|
||||
return any.val.NumField() != 0
|
||||
}
|
||||
|
||||
func (any *objectAny) ToInt() int {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectAny) ToInt32() int32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectAny) ToInt64() int64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectAny) ToUint() uint {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectAny) ToUint32() uint32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectAny) ToUint64() uint64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectAny) ToFloat32() float32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectAny) ToFloat64() float64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *objectAny) ToString() string {
|
||||
str, err := MarshalToString(any.val.Interface())
|
||||
any.err = err
|
||||
return str
|
||||
}
|
||||
|
||||
func (any *objectAny) Get(path ...interface{}) Any {
|
||||
if len(path) == 0 {
|
||||
return any
|
||||
}
|
||||
switch firstPath := path[0].(type) {
|
||||
case string:
|
||||
field := any.val.FieldByName(firstPath)
|
||||
if !field.IsValid() {
|
||||
return newInvalidAny(path)
|
||||
}
|
||||
return Wrap(field.Interface())
|
||||
case int32:
|
||||
if '*' == firstPath {
|
||||
mappedAll := map[string]Any{}
|
||||
for i := 0; i < any.val.NumField(); i++ {
|
||||
field := any.val.Field(i)
|
||||
if field.CanInterface() {
|
||||
mapped := Wrap(field.Interface()).Get(path[1:]...)
|
||||
if mapped.ValueType() != InvalidValue {
|
||||
mappedAll[any.val.Type().Field(i).Name] = mapped
|
||||
}
|
||||
}
|
||||
}
|
||||
return wrapMap(mappedAll)
|
||||
}
|
||||
return newInvalidAny(path)
|
||||
default:
|
||||
return newInvalidAny(path)
|
||||
}
|
||||
}
|
||||
|
||||
func (any *objectAny) Keys() []string {
|
||||
keys := make([]string, 0, any.val.NumField())
|
||||
for i := 0; i < any.val.NumField(); i++ {
|
||||
keys = append(keys, any.val.Type().Field(i).Name)
|
||||
}
|
||||
return keys
|
||||
}
|
||||
|
||||
func (any *objectAny) Size() int {
|
||||
return any.val.NumField()
|
||||
}
|
||||
|
||||
func (any *objectAny) WriteTo(stream *Stream) {
|
||||
stream.WriteVal(any.val)
|
||||
}
|
||||
|
||||
func (any *objectAny) GetInterface() interface{} {
|
||||
return any.val.Interface()
|
||||
}
|
||||
|
||||
type mapAny struct {
|
||||
baseAny
|
||||
err error
|
||||
val reflect.Value
|
||||
}
|
||||
|
||||
func wrapMap(val interface{}) *mapAny {
|
||||
return &mapAny{baseAny{}, nil, reflect.ValueOf(val)}
|
||||
}
|
||||
|
||||
func (any *mapAny) ValueType() ValueType {
|
||||
return ObjectValue
|
||||
}
|
||||
|
||||
func (any *mapAny) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *mapAny) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *mapAny) LastError() error {
|
||||
return any.err
|
||||
}
|
||||
|
||||
func (any *mapAny) ToBool() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (any *mapAny) ToInt() int {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *mapAny) ToInt32() int32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *mapAny) ToInt64() int64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *mapAny) ToUint() uint {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *mapAny) ToUint32() uint32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *mapAny) ToUint64() uint64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *mapAny) ToFloat32() float32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *mapAny) ToFloat64() float64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (any *mapAny) ToString() string {
|
||||
str, err := MarshalToString(any.val.Interface())
|
||||
any.err = err
|
||||
return str
|
||||
}
|
||||
|
||||
func (any *mapAny) Get(path ...interface{}) Any {
|
||||
if len(path) == 0 {
|
||||
return any
|
||||
}
|
||||
switch firstPath := path[0].(type) {
|
||||
case int32:
|
||||
if '*' == firstPath {
|
||||
mappedAll := map[string]Any{}
|
||||
for _, key := range any.val.MapKeys() {
|
||||
keyAsStr := key.String()
|
||||
element := Wrap(any.val.MapIndex(key).Interface())
|
||||
mapped := element.Get(path[1:]...)
|
||||
if mapped.ValueType() != InvalidValue {
|
||||
mappedAll[keyAsStr] = mapped
|
||||
}
|
||||
}
|
||||
return wrapMap(mappedAll)
|
||||
}
|
||||
return newInvalidAny(path)
|
||||
default:
|
||||
value := any.val.MapIndex(reflect.ValueOf(firstPath))
|
||||
if !value.IsValid() {
|
||||
return newInvalidAny(path)
|
||||
}
|
||||
return Wrap(value.Interface())
|
||||
}
|
||||
}
|
||||
|
||||
func (any *mapAny) Keys() []string {
|
||||
keys := make([]string, 0, any.val.Len())
|
||||
for _, key := range any.val.MapKeys() {
|
||||
keys = append(keys, key.String())
|
||||
}
|
||||
return keys
|
||||
}
|
||||
|
||||
func (any *mapAny) Size() int {
|
||||
return any.val.Len()
|
||||
}
|
||||
|
||||
func (any *mapAny) WriteTo(stream *Stream) {
|
||||
stream.WriteVal(any.val)
|
||||
}
|
||||
|
||||
func (any *mapAny) GetInterface() interface{} {
|
||||
return any.val.Interface()
|
||||
}
|
166
vendor/github.com/json-iterator/go/any_str.go
generated
vendored
Normal file
166
vendor/github.com/json-iterator/go/any_str.go
generated
vendored
Normal file
@ -0,0 +1,166 @@
|
||||
package jsoniter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type stringAny struct {
|
||||
baseAny
|
||||
val string
|
||||
}
|
||||
|
||||
func (any *stringAny) Get(path ...interface{}) Any {
|
||||
if len(path) == 0 {
|
||||
return any
|
||||
}
|
||||
return &invalidAny{baseAny{}, fmt.Errorf("GetIndex %v from simple value", path)}
|
||||
}
|
||||
|
||||
func (any *stringAny) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *stringAny) ValueType() ValueType {
|
||||
return StringValue
|
||||
}
|
||||
|
||||
func (any *stringAny) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *stringAny) LastError() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *stringAny) ToBool() bool {
|
||||
str := any.ToString()
|
||||
if str == "0" {
|
||||
return false
|
||||
}
|
||||
for _, c := range str {
|
||||
switch c {
|
||||
case ' ', '\n', '\r', '\t':
|
||||
default:
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (any *stringAny) ToInt() int {
|
||||
return int(any.ToInt64())
|
||||
|
||||
}
|
||||
|
||||
func (any *stringAny) ToInt32() int32 {
|
||||
return int32(any.ToInt64())
|
||||
}
|
||||
|
||||
func (any *stringAny) ToInt64() int64 {
|
||||
if any.val == "" {
|
||||
return 0
|
||||
}
|
||||
|
||||
flag := 1
|
||||
startPos := 0
|
||||
endPos := 0
|
||||
if any.val[0] == '+' || any.val[0] == '-' {
|
||||
startPos = 1
|
||||
}
|
||||
|
||||
if any.val[0] == '-' {
|
||||
flag = -1
|
||||
}
|
||||
|
||||
for i := startPos; i < len(any.val); i++ {
|
||||
if any.val[i] >= '0' && any.val[i] <= '9' {
|
||||
endPos = i + 1
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
parsed, _ := strconv.ParseInt(any.val[startPos:endPos], 10, 64)
|
||||
return int64(flag) * parsed
|
||||
}
|
||||
|
||||
func (any *stringAny) ToUint() uint {
|
||||
return uint(any.ToUint64())
|
||||
}
|
||||
|
||||
func (any *stringAny) ToUint32() uint32 {
|
||||
return uint32(any.ToUint64())
|
||||
}
|
||||
|
||||
func (any *stringAny) ToUint64() uint64 {
|
||||
if any.val == "" {
|
||||
return 0
|
||||
}
|
||||
|
||||
startPos := 0
|
||||
endPos := 0
|
||||
|
||||
if any.val[0] == '-' {
|
||||
return 0
|
||||
}
|
||||
if any.val[0] == '+' {
|
||||
startPos = 1
|
||||
}
|
||||
|
||||
for i := startPos; i < len(any.val); i++ {
|
||||
if any.val[i] >= '0' && any.val[i] <= '9' {
|
||||
endPos = i + 1
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
parsed, _ := strconv.ParseUint(any.val[startPos:endPos], 10, 64)
|
||||
return parsed
|
||||
}
|
||||
|
||||
func (any *stringAny) ToFloat32() float32 {
|
||||
return float32(any.ToFloat64())
|
||||
}
|
||||
|
||||
func (any *stringAny) ToFloat64() float64 {
|
||||
if len(any.val) == 0 {
|
||||
return 0
|
||||
}
|
||||
|
||||
// first char invalid
|
||||
if any.val[0] != '+' && any.val[0] != '-' && (any.val[0] > '9' || any.val[0] < '0') {
|
||||
return 0
|
||||
}
|
||||
|
||||
// extract valid num expression from string
|
||||
// eg 123true => 123, -12.12xxa => -12.12
|
||||
endPos := 1
|
||||
for i := 1; i < len(any.val); i++ {
|
||||
if any.val[i] == '.' || any.val[i] == 'e' || any.val[i] == 'E' || any.val[i] == '+' || any.val[i] == '-' {
|
||||
endPos = i + 1
|
||||
continue
|
||||
}
|
||||
|
||||
// end position is the first char which is not digit
|
||||
if any.val[i] >= '0' && any.val[i] <= '9' {
|
||||
endPos = i + 1
|
||||
} else {
|
||||
endPos = i
|
||||
break
|
||||
}
|
||||
}
|
||||
parsed, _ := strconv.ParseFloat(any.val[:endPos], 64)
|
||||
return parsed
|
||||
}
|
||||
|
||||
func (any *stringAny) ToString() string {
|
||||
return any.val
|
||||
}
|
||||
|
||||
func (any *stringAny) WriteTo(stream *Stream) {
|
||||
stream.WriteString(any.val)
|
||||
}
|
||||
|
||||
func (any *stringAny) GetInterface() interface{} {
|
||||
return any.val
|
||||
}
|
74
vendor/github.com/json-iterator/go/any_uint32.go
generated
vendored
Normal file
74
vendor/github.com/json-iterator/go/any_uint32.go
generated
vendored
Normal file
@ -0,0 +1,74 @@
|
||||
package jsoniter
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type uint32Any struct {
|
||||
baseAny
|
||||
val uint32
|
||||
}
|
||||
|
||||
func (any *uint32Any) LastError() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *uint32Any) ValueType() ValueType {
|
||||
return NumberValue
|
||||
}
|
||||
|
||||
func (any *uint32Any) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *uint32Any) ToBool() bool {
|
||||
return any.val != 0
|
||||
}
|
||||
|
||||
func (any *uint32Any) ToInt() int {
|
||||
return int(any.val)
|
||||
}
|
||||
|
||||
func (any *uint32Any) ToInt32() int32 {
|
||||
return int32(any.val)
|
||||
}
|
||||
|
||||
func (any *uint32Any) ToInt64() int64 {
|
||||
return int64(any.val)
|
||||
}
|
||||
|
||||
func (any *uint32Any) ToUint() uint {
|
||||
return uint(any.val)
|
||||
}
|
||||
|
||||
func (any *uint32Any) ToUint32() uint32 {
|
||||
return any.val
|
||||
}
|
||||
|
||||
func (any *uint32Any) ToUint64() uint64 {
|
||||
return uint64(any.val)
|
||||
}
|
||||
|
||||
func (any *uint32Any) ToFloat32() float32 {
|
||||
return float32(any.val)
|
||||
}
|
||||
|
||||
func (any *uint32Any) ToFloat64() float64 {
|
||||
return float64(any.val)
|
||||
}
|
||||
|
||||
func (any *uint32Any) ToString() string {
|
||||
return strconv.FormatInt(int64(any.val), 10)
|
||||
}
|
||||
|
||||
func (any *uint32Any) WriteTo(stream *Stream) {
|
||||
stream.WriteUint32(any.val)
|
||||
}
|
||||
|
||||
func (any *uint32Any) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *uint32Any) GetInterface() interface{} {
|
||||
return any.val
|
||||
}
|
74
vendor/github.com/json-iterator/go/any_uint64.go
generated
vendored
Normal file
74
vendor/github.com/json-iterator/go/any_uint64.go
generated
vendored
Normal file
@ -0,0 +1,74 @@
|
||||
package jsoniter
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type uint64Any struct {
|
||||
baseAny
|
||||
val uint64
|
||||
}
|
||||
|
||||
func (any *uint64Any) LastError() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *uint64Any) ValueType() ValueType {
|
||||
return NumberValue
|
||||
}
|
||||
|
||||
func (any *uint64Any) MustBeValid() Any {
|
||||
return any
|
||||
}
|
||||
|
||||
func (any *uint64Any) ToBool() bool {
|
||||
return any.val != 0
|
||||
}
|
||||
|
||||
func (any *uint64Any) ToInt() int {
|
||||
return int(any.val)
|
||||
}
|
||||
|
||||
func (any *uint64Any) ToInt32() int32 {
|
||||
return int32(any.val)
|
||||
}
|
||||
|
||||
func (any *uint64Any) ToInt64() int64 {
|
||||
return int64(any.val)
|
||||
}
|
||||
|
||||
func (any *uint64Any) ToUint() uint {
|
||||
return uint(any.val)
|
||||
}
|
||||
|
||||
func (any *uint64Any) ToUint32() uint32 {
|
||||
return uint32(any.val)
|
||||
}
|
||||
|
||||
func (any *uint64Any) ToUint64() uint64 {
|
||||
return any.val
|
||||
}
|
||||
|
||||
func (any *uint64Any) ToFloat32() float32 {
|
||||
return float32(any.val)
|
||||
}
|
||||
|
||||
func (any *uint64Any) ToFloat64() float64 {
|
||||
return float64(any.val)
|
||||
}
|
||||
|
||||
func (any *uint64Any) ToString() string {
|
||||
return strconv.FormatUint(any.val, 10)
|
||||
}
|
||||
|
||||
func (any *uint64Any) WriteTo(stream *Stream) {
|
||||
stream.WriteUint64(any.val)
|
||||
}
|
||||
|
||||
func (any *uint64Any) Parse() *Iterator {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (any *uint64Any) GetInterface() interface{} {
|
||||
return any.val
|
||||
}
|
12
vendor/github.com/json-iterator/go/build.sh
generated
vendored
Executable file
12
vendor/github.com/json-iterator/go/build.sh
generated
vendored
Executable file
@ -0,0 +1,12 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
set -x
|
||||
|
||||
if [ ! -d /tmp/build-golang/src/github.com/json-iterator ]; then
|
||||
mkdir -p /tmp/build-golang/src/github.com/json-iterator
|
||||
ln -s $PWD /tmp/build-golang/src/github.com/json-iterator/go
|
||||
fi
|
||||
export GOPATH=/tmp/build-golang
|
||||
go get -u github.com/golang/dep/cmd/dep
|
||||
cd /tmp/build-golang/src/github.com/json-iterator/go
|
||||
exec $GOPATH/bin/dep ensure -update
|
368
vendor/github.com/json-iterator/go/config.go
generated
vendored
Normal file
368
vendor/github.com/json-iterator/go/config.go
generated
vendored
Normal file
@ -0,0 +1,368 @@
|
||||
package jsoniter
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"github.com/modern-go/concurrent"
|
||||
"github.com/modern-go/reflect2"
|
||||
"io"
|
||||
"reflect"
|
||||
"sync"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
// Config customize how the API should behave.
|
||||
// The API is created from Config by Froze.
|
||||
type Config struct {
|
||||
IndentionStep int
|
||||
MarshalFloatWith6Digits bool
|
||||
EscapeHTML bool
|
||||
SortMapKeys bool
|
||||
UseNumber bool
|
||||
DisallowUnknownFields bool
|
||||
TagKey string
|
||||
OnlyTaggedField bool
|
||||
ValidateJsonRawMessage bool
|
||||
ObjectFieldMustBeSimpleString bool
|
||||
}
|
||||
|
||||
// API the public interface of this package.
|
||||
// Primary Marshal and Unmarshal.
|
||||
type API interface {
|
||||
IteratorPool
|
||||
StreamPool
|
||||
MarshalToString(v interface{}) (string, error)
|
||||
Marshal(v interface{}) ([]byte, error)
|
||||
MarshalIndent(v interface{}, prefix, indent string) ([]byte, error)
|
||||
UnmarshalFromString(str string, v interface{}) error
|
||||
Unmarshal(data []byte, v interface{}) error
|
||||
Get(data []byte, path ...interface{}) Any
|
||||
NewEncoder(writer io.Writer) *Encoder
|
||||
NewDecoder(reader io.Reader) *Decoder
|
||||
Valid(data []byte) bool
|
||||
RegisterExtension(extension Extension)
|
||||
DecoderOf(typ reflect2.Type) ValDecoder
|
||||
EncoderOf(typ reflect2.Type) ValEncoder
|
||||
}
|
||||
|
||||
// ConfigDefault the default API
|
||||
var ConfigDefault = Config{
|
||||
EscapeHTML: true,
|
||||
}.Froze()
|
||||
|
||||
// ConfigCompatibleWithStandardLibrary tries to be 100% compatible with standard library behavior
|
||||
var ConfigCompatibleWithStandardLibrary = Config{
|
||||
EscapeHTML: true,
|
||||
SortMapKeys: true,
|
||||
ValidateJsonRawMessage: true,
|
||||
}.Froze()
|
||||
|
||||
// ConfigFastest marshals float with only 6 digits precision
|
||||
var ConfigFastest = Config{
|
||||
EscapeHTML: false,
|
||||
MarshalFloatWith6Digits: true, // will lose precession
|
||||
ObjectFieldMustBeSimpleString: true, // do not unescape object field
|
||||
}.Froze()
|
||||
|
||||
type frozenConfig struct {
|
||||
configBeforeFrozen Config
|
||||
sortMapKeys bool
|
||||
indentionStep int
|
||||
objectFieldMustBeSimpleString bool
|
||||
onlyTaggedField bool
|
||||
disallowUnknownFields bool
|
||||
decoderCache *concurrent.Map
|
||||
encoderCache *concurrent.Map
|
||||
extensions []Extension
|
||||
streamPool *sync.Pool
|
||||
iteratorPool *sync.Pool
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) initCache() {
|
||||
cfg.decoderCache = concurrent.NewMap()
|
||||
cfg.encoderCache = concurrent.NewMap()
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) addDecoderToCache(cacheKey uintptr, decoder ValDecoder) {
|
||||
cfg.decoderCache.Store(cacheKey, decoder)
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) addEncoderToCache(cacheKey uintptr, encoder ValEncoder) {
|
||||
cfg.encoderCache.Store(cacheKey, encoder)
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) getDecoderFromCache(cacheKey uintptr) ValDecoder {
|
||||
decoder, found := cfg.decoderCache.Load(cacheKey)
|
||||
if found {
|
||||
return decoder.(ValDecoder)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) getEncoderFromCache(cacheKey uintptr) ValEncoder {
|
||||
encoder, found := cfg.encoderCache.Load(cacheKey)
|
||||
if found {
|
||||
return encoder.(ValEncoder)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
var cfgCache = concurrent.NewMap()
|
||||
|
||||
func getFrozenConfigFromCache(cfg Config) *frozenConfig {
|
||||
obj, found := cfgCache.Load(cfg)
|
||||
if found {
|
||||
return obj.(*frozenConfig)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func addFrozenConfigToCache(cfg Config, frozenConfig *frozenConfig) {
|
||||
cfgCache.Store(cfg, frozenConfig)
|
||||
}
|
||||
|
||||
// Froze forge API from config
|
||||
func (cfg Config) Froze() API {
|
||||
api := &frozenConfig{
|
||||
sortMapKeys: cfg.SortMapKeys,
|
||||
indentionStep: cfg.IndentionStep,
|
||||
objectFieldMustBeSimpleString: cfg.ObjectFieldMustBeSimpleString,
|
||||
onlyTaggedField: cfg.OnlyTaggedField,
|
||||
disallowUnknownFields: cfg.DisallowUnknownFields,
|
||||
}
|
||||
api.streamPool = &sync.Pool{
|
||||
New: func() interface{} {
|
||||
return NewStream(api, nil, 512)
|
||||
},
|
||||
}
|
||||
api.iteratorPool = &sync.Pool{
|
||||
New: func() interface{} {
|
||||
return NewIterator(api)
|
||||
},
|
||||
}
|
||||
api.initCache()
|
||||
encoderExtension := EncoderExtension{}
|
||||
decoderExtension := DecoderExtension{}
|
||||
if cfg.MarshalFloatWith6Digits {
|
||||
api.marshalFloatWith6Digits(encoderExtension)
|
||||
}
|
||||
if cfg.EscapeHTML {
|
||||
api.escapeHTML(encoderExtension)
|
||||
}
|
||||
if cfg.UseNumber {
|
||||
api.useNumber(decoderExtension)
|
||||
}
|
||||
if cfg.ValidateJsonRawMessage {
|
||||
api.validateJsonRawMessage(encoderExtension)
|
||||
}
|
||||
if len(encoderExtension) > 0 {
|
||||
api.extensions = append(api.extensions, encoderExtension)
|
||||
}
|
||||
if len(decoderExtension) > 0 {
|
||||
api.extensions = append(api.extensions, decoderExtension)
|
||||
}
|
||||
api.configBeforeFrozen = cfg
|
||||
return api
|
||||
}
|
||||
|
||||
func (cfg Config) frozeWithCacheReuse() *frozenConfig {
|
||||
api := getFrozenConfigFromCache(cfg)
|
||||
if api != nil {
|
||||
return api
|
||||
}
|
||||
api = cfg.Froze().(*frozenConfig)
|
||||
addFrozenConfigToCache(cfg, api)
|
||||
return api
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) validateJsonRawMessage(extension EncoderExtension) {
|
||||
encoder := &funcEncoder{func(ptr unsafe.Pointer, stream *Stream) {
|
||||
rawMessage := *(*json.RawMessage)(ptr)
|
||||
iter := cfg.BorrowIterator([]byte(rawMessage))
|
||||
iter.Read()
|
||||
if iter.Error != nil {
|
||||
stream.WriteRaw("null")
|
||||
} else {
|
||||
cfg.ReturnIterator(iter)
|
||||
stream.WriteRaw(string(rawMessage))
|
||||
}
|
||||
}, func(ptr unsafe.Pointer) bool {
|
||||
return false
|
||||
}}
|
||||
extension[reflect2.TypeOfPtr((*json.RawMessage)(nil)).Elem()] = encoder
|
||||
extension[reflect2.TypeOfPtr((*RawMessage)(nil)).Elem()] = encoder
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) useNumber(extension DecoderExtension) {
|
||||
extension[reflect2.TypeOfPtr((*interface{})(nil)).Elem()] = &funcDecoder{func(ptr unsafe.Pointer, iter *Iterator) {
|
||||
exitingValue := *((*interface{})(ptr))
|
||||
if exitingValue != nil && reflect.TypeOf(exitingValue).Kind() == reflect.Ptr {
|
||||
iter.ReadVal(exitingValue)
|
||||
return
|
||||
}
|
||||
if iter.WhatIsNext() == NumberValue {
|
||||
*((*interface{})(ptr)) = json.Number(iter.readNumberAsString())
|
||||
} else {
|
||||
*((*interface{})(ptr)) = iter.Read()
|
||||
}
|
||||
}}
|
||||
}
|
||||
func (cfg *frozenConfig) getTagKey() string {
|
||||
tagKey := cfg.configBeforeFrozen.TagKey
|
||||
if tagKey == "" {
|
||||
return "json"
|
||||
}
|
||||
return tagKey
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) RegisterExtension(extension Extension) {
|
||||
cfg.extensions = append(cfg.extensions, extension)
|
||||
}
|
||||
|
||||
type lossyFloat32Encoder struct {
|
||||
}
|
||||
|
||||
func (encoder *lossyFloat32Encoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteFloat32Lossy(*((*float32)(ptr)))
|
||||
}
|
||||
|
||||
func (encoder *lossyFloat32Encoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*float32)(ptr)) == 0
|
||||
}
|
||||
|
||||
type lossyFloat64Encoder struct {
|
||||
}
|
||||
|
||||
func (encoder *lossyFloat64Encoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
stream.WriteFloat64Lossy(*((*float64)(ptr)))
|
||||
}
|
||||
|
||||
func (encoder *lossyFloat64Encoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*float64)(ptr)) == 0
|
||||
}
|
||||
|
||||
// EnableLossyFloatMarshalling keeps 10**(-6) precision
|
||||
// for float variables for better performance.
|
||||
func (cfg *frozenConfig) marshalFloatWith6Digits(extension EncoderExtension) {
|
||||
// for better performance
|
||||
extension[reflect2.TypeOfPtr((*float32)(nil)).Elem()] = &lossyFloat32Encoder{}
|
||||
extension[reflect2.TypeOfPtr((*float64)(nil)).Elem()] = &lossyFloat64Encoder{}
|
||||
}
|
||||
|
||||
type htmlEscapedStringEncoder struct {
|
||||
}
|
||||
|
||||
func (encoder *htmlEscapedStringEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
str := *((*string)(ptr))
|
||||
stream.WriteStringWithHTMLEscaped(str)
|
||||
}
|
||||
|
||||
func (encoder *htmlEscapedStringEncoder) IsEmpty(ptr unsafe.Pointer) bool {
|
||||
return *((*string)(ptr)) == ""
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) escapeHTML(encoderExtension EncoderExtension) {
|
||||
encoderExtension[reflect2.TypeOfPtr((*string)(nil)).Elem()] = &htmlEscapedStringEncoder{}
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) cleanDecoders() {
|
||||
typeDecoders = map[string]ValDecoder{}
|
||||
fieldDecoders = map[string]ValDecoder{}
|
||||
*cfg = *(cfg.configBeforeFrozen.Froze().(*frozenConfig))
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) cleanEncoders() {
|
||||
typeEncoders = map[string]ValEncoder{}
|
||||
fieldEncoders = map[string]ValEncoder{}
|
||||
*cfg = *(cfg.configBeforeFrozen.Froze().(*frozenConfig))
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) MarshalToString(v interface{}) (string, error) {
|
||||
stream := cfg.BorrowStream(nil)
|
||||
defer cfg.ReturnStream(stream)
|
||||
stream.WriteVal(v)
|
||||
if stream.Error != nil {
|
||||
return "", stream.Error
|
||||
}
|
||||
return string(stream.Buffer()), nil
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) Marshal(v interface{}) ([]byte, error) {
|
||||
stream := cfg.BorrowStream(nil)
|
||||
defer cfg.ReturnStream(stream)
|
||||
stream.WriteVal(v)
|
||||
if stream.Error != nil {
|
||||
return nil, stream.Error
|
||||
}
|
||||
result := stream.Buffer()
|
||||
copied := make([]byte, len(result))
|
||||
copy(copied, result)
|
||||
return copied, nil
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) {
|
||||
if prefix != "" {
|
||||
panic("prefix is not supported")
|
||||
}
|
||||
for _, r := range indent {
|
||||
if r != ' ' {
|
||||
panic("indent can only be space")
|
||||
}
|
||||
}
|
||||
newCfg := cfg.configBeforeFrozen
|
||||
newCfg.IndentionStep = len(indent)
|
||||
return newCfg.frozeWithCacheReuse().Marshal(v)
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) UnmarshalFromString(str string, v interface{}) error {
|
||||
data := []byte(str)
|
||||
iter := cfg.BorrowIterator(data)
|
||||
defer cfg.ReturnIterator(iter)
|
||||
iter.ReadVal(v)
|
||||
c := iter.nextToken()
|
||||
if c == 0 {
|
||||
if iter.Error == io.EOF {
|
||||
return nil
|
||||
}
|
||||
return iter.Error
|
||||
}
|
||||
iter.ReportError("Unmarshal", "there are bytes left after unmarshal")
|
||||
return iter.Error
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) Get(data []byte, path ...interface{}) Any {
|
||||
iter := cfg.BorrowIterator(data)
|
||||
defer cfg.ReturnIterator(iter)
|
||||
return locatePath(iter, path)
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) Unmarshal(data []byte, v interface{}) error {
|
||||
iter := cfg.BorrowIterator(data)
|
||||
defer cfg.ReturnIterator(iter)
|
||||
iter.ReadVal(v)
|
||||
c := iter.nextToken()
|
||||
if c == 0 {
|
||||
if iter.Error == io.EOF {
|
||||
return nil
|
||||
}
|
||||
return iter.Error
|
||||
}
|
||||
iter.ReportError("Unmarshal", "there are bytes left after unmarshal")
|
||||
return iter.Error
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) NewEncoder(writer io.Writer) *Encoder {
|
||||
stream := NewStream(cfg, writer, 512)
|
||||
return &Encoder{stream}
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) NewDecoder(reader io.Reader) *Decoder {
|
||||
iter := Parse(cfg, reader, 512)
|
||||
return &Decoder{iter}
|
||||
}
|
||||
|
||||
func (cfg *frozenConfig) Valid(data []byte) bool {
|
||||
iter := cfg.BorrowIterator(data)
|
||||
defer cfg.ReturnIterator(iter)
|
||||
iter.Skip()
|
||||
return iter.Error == nil
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user