4
vendor/github.com/OneOfOne/xxhash/.gitignore
generated
vendored
Normal file
4
vendor/github.com/OneOfOne/xxhash/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
*.txt
|
||||
*.pprof
|
||||
cmap2/
|
||||
cache/
|
||||
12
vendor/github.com/OneOfOne/xxhash/.travis.yml
generated
vendored
Normal file
12
vendor/github.com/OneOfOne/xxhash/.travis.yml
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
language: go
|
||||
sudo: false
|
||||
|
||||
go:
|
||||
- 1.8
|
||||
- 1.9
|
||||
- "1.10"
|
||||
- master
|
||||
|
||||
script:
|
||||
- go test -tags safe ./...
|
||||
- go test ./...
|
||||
187
vendor/github.com/OneOfOne/xxhash/LICENSE
generated
vendored
Normal file
187
vendor/github.com/OneOfOne/xxhash/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,187 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
75
vendor/github.com/OneOfOne/xxhash/README.md
generated
vendored
Normal file
75
vendor/github.com/OneOfOne/xxhash/README.md
generated
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
# xxhash [](https://godoc.org/github.com/OneOfOne/xxhash) [](https://travis-ci.org/OneOfOne/xxhash) [](https://gocover.io/github.com/OneOfOne/xxhash)
|
||||
|
||||
This is a native Go implementation of the excellent [xxhash](https://github.com/Cyan4973/xxHash)* algorithm, an extremely fast non-cryptographic Hash algorithm, working at speeds close to RAM limits.
|
||||
|
||||
* The C implementation is ([Copyright](https://github.com/Cyan4973/xxHash/blob/master/LICENSE) (c) 2012-2014, Yann Collet)
|
||||
|
||||
## Install
|
||||
|
||||
go get github.com/OneOfOne/xxhash
|
||||
|
||||
## Features
|
||||
|
||||
* On Go 1.7+ the pure go version is faster than CGO for all inputs.
|
||||
* Supports ChecksumString{32,64} xxhash{32,64}.WriteString, which uses no copies when it can, falls back to copy on appengine.
|
||||
* The native version falls back to a less optimized version on appengine due to the lack of unsafe.
|
||||
* Almost as fast as the mostly pure assembly version written by the brilliant [cespare](https://github.com/cespare/xxhash), while also supporting seeds.
|
||||
* To manually toggle the appengine version build with `-tags safe`.
|
||||
|
||||
## Benchmark
|
||||
|
||||
### Core i7-4790 @ 3.60GHz, Linux 4.12.6-1-ARCH (64bit), Go tip (+ff90f4af66 2017-08-19)
|
||||
|
||||
```bash
|
||||
➤ go test -bench '64' -count 5 -tags cespare | benchstat /dev/stdin
|
||||
name time/op
|
||||
|
||||
# https://github.com/cespare/xxhash
|
||||
XXSum64Cespare/Func-8 160ns ± 2%
|
||||
XXSum64Cespare/Struct-8 173ns ± 1%
|
||||
XXSum64ShortCespare/Func-8 6.78ns ± 1%
|
||||
XXSum64ShortCespare/Struct-8 19.6ns ± 2%
|
||||
|
||||
# this package (default mode, using unsafe)
|
||||
XXSum64/Func-8 170ns ± 1%
|
||||
XXSum64/Struct-8 182ns ± 1%
|
||||
XXSum64Short/Func-8 13.5ns ± 3%
|
||||
XXSum64Short/Struct-8 20.4ns ± 0%
|
||||
|
||||
# this package (appengine, *not* using unsafe)
|
||||
XXSum64/Func-8 241ns ± 5%
|
||||
XXSum64/Struct-8 243ns ± 6%
|
||||
XXSum64Short/Func-8 15.2ns ± 2%
|
||||
XXSum64Short/Struct-8 23.7ns ± 5%
|
||||
|
||||
CRC64ISO-8 1.23µs ± 1%
|
||||
CRC64ISOString-8 2.71µs ± 4%
|
||||
CRC64ISOShort-8 22.2ns ± 3%
|
||||
|
||||
Fnv64-8 2.34µs ± 1%
|
||||
Fnv64Short-8 74.7ns ± 8%
|
||||
#
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```go
|
||||
h := xxhash.New64()
|
||||
// r, err := os.Open("......")
|
||||
// defer f.Close()
|
||||
r := strings.NewReader(F)
|
||||
io.Copy(h, r)
|
||||
fmt.Println("xxhash.Backend:", xxhash.Backend)
|
||||
fmt.Println("File checksum:", h.Sum64())
|
||||
```
|
||||
|
||||
[<kbd>playground</kbd>](http://play.golang.org/p/rhRN3RdQyd)
|
||||
|
||||
## TODO
|
||||
|
||||
* Rewrite the 32bit version to be more optimized.
|
||||
* General cleanup as the Go inliner gets smarter.
|
||||
|
||||
## License
|
||||
|
||||
This project is released under the Apache v2. licence. See [LICENCE](LICENCE) for more details.
|
||||
1
vendor/github.com/OneOfOne/xxhash/go.mod
generated
vendored
Normal file
1
vendor/github.com/OneOfOne/xxhash/go.mod
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
module github.com/OneOfOne/xxhash
|
||||
189
vendor/github.com/OneOfOne/xxhash/xxhash.go
generated
vendored
Normal file
189
vendor/github.com/OneOfOne/xxhash/xxhash.go
generated
vendored
Normal file
@@ -0,0 +1,189 @@
|
||||
package xxhash
|
||||
|
||||
const (
|
||||
prime32x1 uint32 = 2654435761
|
||||
prime32x2 uint32 = 2246822519
|
||||
prime32x3 uint32 = 3266489917
|
||||
prime32x4 uint32 = 668265263
|
||||
prime32x5 uint32 = 374761393
|
||||
|
||||
prime64x1 uint64 = 11400714785074694791
|
||||
prime64x2 uint64 = 14029467366897019727
|
||||
prime64x3 uint64 = 1609587929392839161
|
||||
prime64x4 uint64 = 9650029242287828579
|
||||
prime64x5 uint64 = 2870177450012600261
|
||||
|
||||
maxInt32 int32 = (1<<31 - 1)
|
||||
|
||||
// precomputed zero Vs for seed 0
|
||||
zero64x1 = 0x60ea27eeadc0b5d6
|
||||
zero64x2 = 0xc2b2ae3d27d4eb4f
|
||||
zero64x3 = 0x0
|
||||
zero64x4 = 0x61c8864e7a143579
|
||||
)
|
||||
|
||||
// Checksum32 returns the checksum of the input data with the seed set to 0.
|
||||
func Checksum32(in []byte) uint32 {
|
||||
return Checksum32S(in, 0)
|
||||
}
|
||||
|
||||
// ChecksumString32 returns the checksum of the input data, without creating a copy, with the seed set to 0.
|
||||
func ChecksumString32(s string) uint32 {
|
||||
return ChecksumString32S(s, 0)
|
||||
}
|
||||
|
||||
type XXHash32 struct {
|
||||
mem [16]byte
|
||||
ln, memIdx int32
|
||||
v1, v2, v3, v4 uint32
|
||||
seed uint32
|
||||
}
|
||||
|
||||
// Size returns the number of bytes Sum will return.
|
||||
func (xx *XXHash32) Size() int {
|
||||
return 4
|
||||
}
|
||||
|
||||
// BlockSize returns the hash's underlying block size.
|
||||
// The Write method must be able to accept any amount
|
||||
// of data, but it may operate more efficiently if all writes
|
||||
// are a multiple of the block size.
|
||||
func (xx *XXHash32) BlockSize() int {
|
||||
return 16
|
||||
}
|
||||
|
||||
// NewS32 creates a new hash.Hash32 computing the 32bit xxHash checksum starting with the specific seed.
|
||||
func NewS32(seed uint32) (xx *XXHash32) {
|
||||
xx = &XXHash32{
|
||||
seed: seed,
|
||||
}
|
||||
xx.Reset()
|
||||
return
|
||||
}
|
||||
|
||||
// New32 creates a new hash.Hash32 computing the 32bit xxHash checksum starting with the seed set to 0.
|
||||
func New32() *XXHash32 {
|
||||
return NewS32(0)
|
||||
}
|
||||
|
||||
func (xx *XXHash32) Reset() {
|
||||
xx.v1 = xx.seed + prime32x1 + prime32x2
|
||||
xx.v2 = xx.seed + prime32x2
|
||||
xx.v3 = xx.seed
|
||||
xx.v4 = xx.seed - prime32x1
|
||||
xx.ln, xx.memIdx = 0, 0
|
||||
}
|
||||
|
||||
// Sum appends the current hash to b and returns the resulting slice.
|
||||
// It does not change the underlying hash state.
|
||||
func (xx *XXHash32) Sum(in []byte) []byte {
|
||||
s := xx.Sum32()
|
||||
return append(in, byte(s>>24), byte(s>>16), byte(s>>8), byte(s))
|
||||
}
|
||||
|
||||
// Checksum64 an alias for Checksum64S(in, 0)
|
||||
func Checksum64(in []byte) uint64 {
|
||||
return Checksum64S(in, 0)
|
||||
}
|
||||
|
||||
// ChecksumString64 returns the checksum of the input data, without creating a copy, with the seed set to 0.
|
||||
func ChecksumString64(s string) uint64 {
|
||||
return ChecksumString64S(s, 0)
|
||||
}
|
||||
|
||||
type XXHash64 struct {
|
||||
v1, v2, v3, v4 uint64
|
||||
seed uint64
|
||||
ln uint64
|
||||
mem [32]byte
|
||||
memIdx int8
|
||||
}
|
||||
|
||||
// Size returns the number of bytes Sum will return.
|
||||
func (xx *XXHash64) Size() int {
|
||||
return 8
|
||||
}
|
||||
|
||||
// BlockSize returns the hash's underlying block size.
|
||||
// The Write method must be able to accept any amount
|
||||
// of data, but it may operate more efficiently if all writes
|
||||
// are a multiple of the block size.
|
||||
func (xx *XXHash64) BlockSize() int {
|
||||
return 32
|
||||
}
|
||||
|
||||
// NewS64 creates a new hash.Hash64 computing the 64bit xxHash checksum starting with the specific seed.
|
||||
func NewS64(seed uint64) (xx *XXHash64) {
|
||||
xx = &XXHash64{
|
||||
seed: seed,
|
||||
}
|
||||
xx.Reset()
|
||||
return
|
||||
}
|
||||
|
||||
// New64 creates a new hash.Hash64 computing the 64bit xxHash checksum starting with the seed set to 0x0.
|
||||
func New64() *XXHash64 {
|
||||
return NewS64(0)
|
||||
}
|
||||
|
||||
func (xx *XXHash64) Reset() {
|
||||
xx.ln, xx.memIdx = 0, 0
|
||||
xx.v1, xx.v2, xx.v3, xx.v4 = resetVs64(xx.seed)
|
||||
}
|
||||
|
||||
// Sum appends the current hash to b and returns the resulting slice.
|
||||
// It does not change the underlying hash state.
|
||||
func (xx *XXHash64) Sum(in []byte) []byte {
|
||||
s := xx.Sum64()
|
||||
return append(in, byte(s>>56), byte(s>>48), byte(s>>40), byte(s>>32), byte(s>>24), byte(s>>16), byte(s>>8), byte(s))
|
||||
}
|
||||
|
||||
// force the compiler to use ROTL instructions
|
||||
|
||||
func rotl32_1(x uint32) uint32 { return (x << 1) | (x >> (32 - 1)) }
|
||||
func rotl32_7(x uint32) uint32 { return (x << 7) | (x >> (32 - 7)) }
|
||||
func rotl32_11(x uint32) uint32 { return (x << 11) | (x >> (32 - 11)) }
|
||||
func rotl32_12(x uint32) uint32 { return (x << 12) | (x >> (32 - 12)) }
|
||||
func rotl32_13(x uint32) uint32 { return (x << 13) | (x >> (32 - 13)) }
|
||||
func rotl32_17(x uint32) uint32 { return (x << 17) | (x >> (32 - 17)) }
|
||||
func rotl32_18(x uint32) uint32 { return (x << 18) | (x >> (32 - 18)) }
|
||||
|
||||
func rotl64_1(x uint64) uint64 { return (x << 1) | (x >> (64 - 1)) }
|
||||
func rotl64_7(x uint64) uint64 { return (x << 7) | (x >> (64 - 7)) }
|
||||
func rotl64_11(x uint64) uint64 { return (x << 11) | (x >> (64 - 11)) }
|
||||
func rotl64_12(x uint64) uint64 { return (x << 12) | (x >> (64 - 12)) }
|
||||
func rotl64_18(x uint64) uint64 { return (x << 18) | (x >> (64 - 18)) }
|
||||
func rotl64_23(x uint64) uint64 { return (x << 23) | (x >> (64 - 23)) }
|
||||
func rotl64_27(x uint64) uint64 { return (x << 27) | (x >> (64 - 27)) }
|
||||
func rotl64_31(x uint64) uint64 { return (x << 31) | (x >> (64 - 31)) }
|
||||
|
||||
func mix64(h uint64) uint64 {
|
||||
h ^= h >> 33
|
||||
h *= prime64x2
|
||||
h ^= h >> 29
|
||||
h *= prime64x3
|
||||
h ^= h >> 32
|
||||
return h
|
||||
}
|
||||
|
||||
func resetVs64(seed uint64) (v1, v2, v3, v4 uint64) {
|
||||
if seed == 0 {
|
||||
return zero64x1, zero64x2, zero64x3, zero64x4
|
||||
}
|
||||
return (seed + prime64x1 + prime64x2), (seed + prime64x2), (seed), (seed - prime64x1)
|
||||
}
|
||||
|
||||
// borrowed from cespare
|
||||
func round64(h, v uint64) uint64 {
|
||||
h += v * prime64x2
|
||||
h = rotl64_31(h)
|
||||
h *= prime64x1
|
||||
return h
|
||||
}
|
||||
|
||||
func mergeRound64(h, v uint64) uint64 {
|
||||
v = round64(0, v)
|
||||
h ^= v
|
||||
h = h*prime64x1 + prime64x4
|
||||
return h
|
||||
}
|
||||
161
vendor/github.com/OneOfOne/xxhash/xxhash_go17.go
generated
vendored
Normal file
161
vendor/github.com/OneOfOne/xxhash/xxhash_go17.go
generated
vendored
Normal file
@@ -0,0 +1,161 @@
|
||||
package xxhash
|
||||
|
||||
func u32(in []byte) uint32 {
|
||||
return uint32(in[0]) | uint32(in[1])<<8 | uint32(in[2])<<16 | uint32(in[3])<<24
|
||||
}
|
||||
|
||||
func u64(in []byte) uint64 {
|
||||
return uint64(in[0]) | uint64(in[1])<<8 | uint64(in[2])<<16 | uint64(in[3])<<24 | uint64(in[4])<<32 | uint64(in[5])<<40 | uint64(in[6])<<48 | uint64(in[7])<<56
|
||||
}
|
||||
|
||||
// Checksum32S returns the checksum of the input bytes with the specific seed.
|
||||
func Checksum32S(in []byte, seed uint32) (h uint32) {
|
||||
var i int
|
||||
|
||||
if len(in) > 15 {
|
||||
var (
|
||||
v1 = seed + prime32x1 + prime32x2
|
||||
v2 = seed + prime32x2
|
||||
v3 = seed + 0
|
||||
v4 = seed - prime32x1
|
||||
)
|
||||
for ; i < len(in)-15; i += 16 {
|
||||
in := in[i : i+16 : len(in)]
|
||||
v1 += u32(in[0:4:len(in)]) * prime32x2
|
||||
v1 = rotl32_13(v1) * prime32x1
|
||||
|
||||
v2 += u32(in[4:8:len(in)]) * prime32x2
|
||||
v2 = rotl32_13(v2) * prime32x1
|
||||
|
||||
v3 += u32(in[8:12:len(in)]) * prime32x2
|
||||
v3 = rotl32_13(v3) * prime32x1
|
||||
|
||||
v4 += u32(in[12:16:len(in)]) * prime32x2
|
||||
v4 = rotl32_13(v4) * prime32x1
|
||||
}
|
||||
|
||||
h = rotl32_1(v1) + rotl32_7(v2) + rotl32_12(v3) + rotl32_18(v4)
|
||||
|
||||
} else {
|
||||
h = seed + prime32x5
|
||||
}
|
||||
|
||||
h += uint32(len(in))
|
||||
for ; i <= len(in)-4; i += 4 {
|
||||
in := in[i : i+4 : len(in)]
|
||||
h += u32(in[0:4:len(in)]) * prime32x3
|
||||
h = rotl32_17(h) * prime32x4
|
||||
}
|
||||
|
||||
for ; i < len(in); i++ {
|
||||
h += uint32(in[i]) * prime32x5
|
||||
h = rotl32_11(h) * prime32x1
|
||||
}
|
||||
|
||||
h ^= h >> 15
|
||||
h *= prime32x2
|
||||
h ^= h >> 13
|
||||
h *= prime32x3
|
||||
h ^= h >> 16
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (xx *XXHash32) Write(in []byte) (n int, err error) {
|
||||
i, ml := 0, int(xx.memIdx)
|
||||
n = len(in)
|
||||
xx.ln += int32(n)
|
||||
|
||||
if d := 16 - ml; ml > 0 && ml+len(in) > 16 {
|
||||
xx.memIdx += int32(copy(xx.mem[xx.memIdx:], in[:d]))
|
||||
ml, in = 16, in[d:len(in):len(in)]
|
||||
} else if ml+len(in) < 16 {
|
||||
xx.memIdx += int32(copy(xx.mem[xx.memIdx:], in))
|
||||
return
|
||||
}
|
||||
|
||||
if ml > 0 {
|
||||
i += 16 - ml
|
||||
xx.memIdx += int32(copy(xx.mem[xx.memIdx:len(xx.mem):len(xx.mem)], in))
|
||||
in := xx.mem[:16:len(xx.mem)]
|
||||
|
||||
xx.v1 += u32(in[0:4:len(in)]) * prime32x2
|
||||
xx.v1 = rotl32_13(xx.v1) * prime32x1
|
||||
|
||||
xx.v2 += u32(in[4:8:len(in)]) * prime32x2
|
||||
xx.v2 = rotl32_13(xx.v2) * prime32x1
|
||||
|
||||
xx.v3 += u32(in[8:12:len(in)]) * prime32x2
|
||||
xx.v3 = rotl32_13(xx.v3) * prime32x1
|
||||
|
||||
xx.v4 += u32(in[12:16:len(in)]) * prime32x2
|
||||
xx.v4 = rotl32_13(xx.v4) * prime32x1
|
||||
|
||||
xx.memIdx = 0
|
||||
}
|
||||
|
||||
for ; i <= len(in)-16; i += 16 {
|
||||
in := in[i : i+16 : len(in)]
|
||||
xx.v1 += u32(in[0:4:len(in)]) * prime32x2
|
||||
xx.v1 = rotl32_13(xx.v1) * prime32x1
|
||||
|
||||
xx.v2 += u32(in[4:8:len(in)]) * prime32x2
|
||||
xx.v2 = rotl32_13(xx.v2) * prime32x1
|
||||
|
||||
xx.v3 += u32(in[8:12:len(in)]) * prime32x2
|
||||
xx.v3 = rotl32_13(xx.v3) * prime32x1
|
||||
|
||||
xx.v4 += u32(in[12:16:len(in)]) * prime32x2
|
||||
xx.v4 = rotl32_13(xx.v4) * prime32x1
|
||||
}
|
||||
|
||||
if len(in)-i != 0 {
|
||||
xx.memIdx += int32(copy(xx.mem[xx.memIdx:], in[i:len(in):len(in)]))
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (xx *XXHash32) Sum32() (h uint32) {
|
||||
var i int32
|
||||
if xx.ln > 15 {
|
||||
h = rotl32_1(xx.v1) + rotl32_7(xx.v2) + rotl32_12(xx.v3) + rotl32_18(xx.v4)
|
||||
} else {
|
||||
h = xx.seed + prime32x5
|
||||
}
|
||||
|
||||
h += uint32(xx.ln)
|
||||
|
||||
if xx.memIdx > 0 {
|
||||
for ; i < xx.memIdx-3; i += 4 {
|
||||
in := xx.mem[i : i+4 : len(xx.mem)]
|
||||
h += u32(in[0:4:len(in)]) * prime32x3
|
||||
h = rotl32_17(h) * prime32x4
|
||||
}
|
||||
|
||||
for ; i < xx.memIdx; i++ {
|
||||
h += uint32(xx.mem[i]) * prime32x5
|
||||
h = rotl32_11(h) * prime32x1
|
||||
}
|
||||
}
|
||||
h ^= h >> 15
|
||||
h *= prime32x2
|
||||
h ^= h >> 13
|
||||
h *= prime32x3
|
||||
h ^= h >> 16
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// Checksum64S returns the 64bit xxhash checksum for a single input
|
||||
func Checksum64S(in []byte, seed uint64) uint64 {
|
||||
if len(in) == 0 && seed == 0 {
|
||||
return 0xef46db3751d8e999
|
||||
}
|
||||
|
||||
if len(in) > 31 {
|
||||
return checksum64(in, seed)
|
||||
}
|
||||
|
||||
return checksum64Short(in, seed)
|
||||
}
|
||||
183
vendor/github.com/OneOfOne/xxhash/xxhash_safe.go
generated
vendored
Normal file
183
vendor/github.com/OneOfOne/xxhash/xxhash_safe.go
generated
vendored
Normal file
@@ -0,0 +1,183 @@
|
||||
// +build appengine safe ppc64le ppc64be mipsle mipsbe
|
||||
|
||||
package xxhash
|
||||
|
||||
// Backend returns the current version of xxhash being used.
|
||||
const Backend = "GoSafe"
|
||||
|
||||
func ChecksumString32S(s string, seed uint32) uint32 {
|
||||
return Checksum32S([]byte(s), seed)
|
||||
}
|
||||
|
||||
func (xx *XXHash32) WriteString(s string) (int, error) {
|
||||
if len(s) == 0 {
|
||||
return 0, nil
|
||||
}
|
||||
return xx.Write([]byte(s))
|
||||
}
|
||||
|
||||
func ChecksumString64S(s string, seed uint64) uint64 {
|
||||
return Checksum64S([]byte(s), seed)
|
||||
}
|
||||
|
||||
func (xx *XXHash64) WriteString(s string) (int, error) {
|
||||
if len(s) == 0 {
|
||||
return 0, nil
|
||||
}
|
||||
return xx.Write([]byte(s))
|
||||
}
|
||||
|
||||
func checksum64(in []byte, seed uint64) (h uint64) {
|
||||
var (
|
||||
v1, v2, v3, v4 = resetVs64(seed)
|
||||
|
||||
i int
|
||||
)
|
||||
|
||||
for ; i < len(in)-31; i += 32 {
|
||||
in := in[i : i+32 : len(in)]
|
||||
v1 = round64(v1, u64(in[0:8:len(in)]))
|
||||
v2 = round64(v2, u64(in[8:16:len(in)]))
|
||||
v3 = round64(v3, u64(in[16:24:len(in)]))
|
||||
v4 = round64(v4, u64(in[24:32:len(in)]))
|
||||
}
|
||||
|
||||
h = rotl64_1(v1) + rotl64_7(v2) + rotl64_12(v3) + rotl64_18(v4)
|
||||
|
||||
h = mergeRound64(h, v1)
|
||||
h = mergeRound64(h, v2)
|
||||
h = mergeRound64(h, v3)
|
||||
h = mergeRound64(h, v4)
|
||||
|
||||
h += uint64(len(in))
|
||||
|
||||
for ; i < len(in)-7; i += 8 {
|
||||
h ^= round64(0, u64(in[i:len(in):len(in)]))
|
||||
h = rotl64_27(h)*prime64x1 + prime64x4
|
||||
}
|
||||
|
||||
for ; i < len(in)-3; i += 4 {
|
||||
h ^= uint64(u32(in[i:len(in):len(in)])) * prime64x1
|
||||
h = rotl64_23(h)*prime64x2 + prime64x3
|
||||
}
|
||||
|
||||
for ; i < len(in); i++ {
|
||||
h ^= uint64(in[i]) * prime64x5
|
||||
h = rotl64_11(h) * prime64x1
|
||||
}
|
||||
|
||||
return mix64(h)
|
||||
}
|
||||
|
||||
func checksum64Short(in []byte, seed uint64) uint64 {
|
||||
var (
|
||||
h = seed + prime64x5 + uint64(len(in))
|
||||
i int
|
||||
)
|
||||
|
||||
for ; i < len(in)-7; i += 8 {
|
||||
k := u64(in[i : i+8 : len(in)])
|
||||
h ^= round64(0, k)
|
||||
h = rotl64_27(h)*prime64x1 + prime64x4
|
||||
}
|
||||
|
||||
for ; i < len(in)-3; i += 4 {
|
||||
h ^= uint64(u32(in[i:i+4:len(in)])) * prime64x1
|
||||
h = rotl64_23(h)*prime64x2 + prime64x3
|
||||
}
|
||||
|
||||
for ; i < len(in); i++ {
|
||||
h ^= uint64(in[i]) * prime64x5
|
||||
h = rotl64_11(h) * prime64x1
|
||||
}
|
||||
|
||||
return mix64(h)
|
||||
}
|
||||
|
||||
func (xx *XXHash64) Write(in []byte) (n int, err error) {
|
||||
var (
|
||||
ml = int(xx.memIdx)
|
||||
d = 32 - ml
|
||||
)
|
||||
|
||||
n = len(in)
|
||||
xx.ln += uint64(n)
|
||||
|
||||
if ml+len(in) < 32 {
|
||||
xx.memIdx += int8(copy(xx.mem[xx.memIdx:len(xx.mem):len(xx.mem)], in))
|
||||
return
|
||||
}
|
||||
|
||||
i, v1, v2, v3, v4 := 0, xx.v1, xx.v2, xx.v3, xx.v4
|
||||
if ml > 0 && ml+len(in) > 32 {
|
||||
xx.memIdx += int8(copy(xx.mem[xx.memIdx:len(xx.mem):len(xx.mem)], in[:d:len(in)]))
|
||||
in = in[d:len(in):len(in)]
|
||||
|
||||
in := xx.mem[0:32:len(xx.mem)]
|
||||
|
||||
v1 = round64(v1, u64(in[0:8:len(in)]))
|
||||
v2 = round64(v2, u64(in[8:16:len(in)]))
|
||||
v3 = round64(v3, u64(in[16:24:len(in)]))
|
||||
v4 = round64(v4, u64(in[24:32:len(in)]))
|
||||
|
||||
xx.memIdx = 0
|
||||
}
|
||||
|
||||
for ; i < len(in)-31; i += 32 {
|
||||
in := in[i : i+32 : len(in)]
|
||||
v1 = round64(v1, u64(in[0:8:len(in)]))
|
||||
v2 = round64(v2, u64(in[8:16:len(in)]))
|
||||
v3 = round64(v3, u64(in[16:24:len(in)]))
|
||||
v4 = round64(v4, u64(in[24:32:len(in)]))
|
||||
}
|
||||
|
||||
if len(in)-i != 0 {
|
||||
xx.memIdx += int8(copy(xx.mem[xx.memIdx:], in[i:len(in):len(in)]))
|
||||
}
|
||||
|
||||
xx.v1, xx.v2, xx.v3, xx.v4 = v1, v2, v3, v4
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (xx *XXHash64) Sum64() (h uint64) {
|
||||
var i int
|
||||
if xx.ln > 31 {
|
||||
v1, v2, v3, v4 := xx.v1, xx.v2, xx.v3, xx.v4
|
||||
h = rotl64_1(v1) + rotl64_7(v2) + rotl64_12(v3) + rotl64_18(v4)
|
||||
|
||||
h = mergeRound64(h, v1)
|
||||
h = mergeRound64(h, v2)
|
||||
h = mergeRound64(h, v3)
|
||||
h = mergeRound64(h, v4)
|
||||
} else {
|
||||
h = xx.seed + prime64x5
|
||||
}
|
||||
|
||||
h += uint64(xx.ln)
|
||||
if xx.memIdx > 0 {
|
||||
in := xx.mem[:xx.memIdx]
|
||||
for ; i < int(xx.memIdx)-7; i += 8 {
|
||||
in := in[i : i+8 : len(in)]
|
||||
k := u64(in[0:8:len(in)])
|
||||
k *= prime64x2
|
||||
k = rotl64_31(k)
|
||||
k *= prime64x1
|
||||
h ^= k
|
||||
h = rotl64_27(h)*prime64x1 + prime64x4
|
||||
}
|
||||
|
||||
for ; i < int(xx.memIdx)-3; i += 4 {
|
||||
in := in[i : i+4 : len(in)]
|
||||
h ^= uint64(u32(in[0:4:len(in)])) * prime64x1
|
||||
h = rotl64_23(h)*prime64x2 + prime64x3
|
||||
}
|
||||
|
||||
for ; i < int(xx.memIdx); i++ {
|
||||
h ^= uint64(in[i]) * prime64x5
|
||||
h = rotl64_11(h) * prime64x1
|
||||
}
|
||||
}
|
||||
|
||||
return mix64(h)
|
||||
}
|
||||
238
vendor/github.com/OneOfOne/xxhash/xxhash_unsafe.go
generated
vendored
Normal file
238
vendor/github.com/OneOfOne/xxhash/xxhash_unsafe.go
generated
vendored
Normal file
@@ -0,0 +1,238 @@
|
||||
// +build !safe
|
||||
// +build !appengine
|
||||
// +build !ppc64le
|
||||
// +build !mipsle
|
||||
// +build !ppc64be
|
||||
// +build !mipsbe
|
||||
|
||||
package xxhash
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
// Backend returns the current version of xxhash being used.
|
||||
const Backend = "GoUnsafe"
|
||||
|
||||
// ChecksumString32S returns the checksum of the input data, without creating a copy, with the specific seed.
|
||||
func ChecksumString32S(s string, seed uint32) uint32 {
|
||||
if len(s) == 0 {
|
||||
return Checksum32S(nil, seed)
|
||||
}
|
||||
ss := (*reflect.StringHeader)(unsafe.Pointer(&s))
|
||||
return Checksum32S((*[maxInt32]byte)(unsafe.Pointer(ss.Data))[:len(s):len(s)], seed)
|
||||
}
|
||||
|
||||
func (xx *XXHash32) WriteString(s string) (int, error) {
|
||||
if len(s) == 0 {
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
ss := (*reflect.StringHeader)(unsafe.Pointer(&s))
|
||||
return xx.Write((*[maxInt32]byte)(unsafe.Pointer(ss.Data))[:len(s):len(s)])
|
||||
}
|
||||
|
||||
// ChecksumString64S returns the checksum of the input data, without creating a copy, with the specific seed.
|
||||
func ChecksumString64S(s string, seed uint64) uint64 {
|
||||
if len(s) == 0 {
|
||||
return Checksum64S(nil, seed)
|
||||
}
|
||||
|
||||
ss := (*reflect.StringHeader)(unsafe.Pointer(&s))
|
||||
return Checksum64S((*[maxInt32]byte)(unsafe.Pointer(ss.Data))[:len(s):len(s)], seed)
|
||||
}
|
||||
|
||||
func (xx *XXHash64) WriteString(s string) (int, error) {
|
||||
if len(s) == 0 {
|
||||
return 0, nil
|
||||
}
|
||||
ss := (*reflect.StringHeader)(unsafe.Pointer(&s))
|
||||
return xx.Write((*[maxInt32]byte)(unsafe.Pointer(ss.Data))[:len(s)])
|
||||
}
|
||||
|
||||
func checksum64(in []byte, seed uint64) uint64 {
|
||||
var (
|
||||
wordsLen = len(in) >> 3
|
||||
words = ((*[maxInt32 / 8]uint64)(unsafe.Pointer(&in[0])))[:wordsLen:wordsLen]
|
||||
|
||||
h uint64 = prime64x5
|
||||
|
||||
v1, v2, v3, v4 = resetVs64(seed)
|
||||
|
||||
i int
|
||||
)
|
||||
|
||||
for ; i < len(words)-3; i += 4 {
|
||||
words := (*[4]uint64)(unsafe.Pointer(&words[i]))
|
||||
|
||||
v1 = round64(v1, words[0])
|
||||
v2 = round64(v2, words[1])
|
||||
v3 = round64(v3, words[2])
|
||||
v4 = round64(v4, words[3])
|
||||
}
|
||||
|
||||
h = rotl64_1(v1) + rotl64_7(v2) + rotl64_12(v3) + rotl64_18(v4)
|
||||
|
||||
h = mergeRound64(h, v1)
|
||||
h = mergeRound64(h, v2)
|
||||
h = mergeRound64(h, v3)
|
||||
h = mergeRound64(h, v4)
|
||||
|
||||
h += uint64(len(in))
|
||||
|
||||
for _, k := range words[i:] {
|
||||
h ^= round64(0, k)
|
||||
h = rotl64_27(h)*prime64x1 + prime64x4
|
||||
}
|
||||
|
||||
if in = in[wordsLen<<3 : len(in) : len(in)]; len(in) > 3 {
|
||||
words := (*[1]uint32)(unsafe.Pointer(&in[0]))
|
||||
h ^= uint64(words[0]) * prime64x1
|
||||
h = rotl64_23(h)*prime64x2 + prime64x3
|
||||
|
||||
in = in[4:len(in):len(in)]
|
||||
}
|
||||
|
||||
for _, b := range in {
|
||||
h ^= uint64(b) * prime64x5
|
||||
h = rotl64_11(h) * prime64x1
|
||||
}
|
||||
|
||||
return mix64(h)
|
||||
}
|
||||
|
||||
func checksum64Short(in []byte, seed uint64) uint64 {
|
||||
var (
|
||||
h = seed + prime64x5 + uint64(len(in))
|
||||
i int
|
||||
)
|
||||
|
||||
if len(in) > 7 {
|
||||
var (
|
||||
wordsLen = len(in) >> 3
|
||||
words = ((*[maxInt32 / 8]uint64)(unsafe.Pointer(&in[0])))[:wordsLen:wordsLen]
|
||||
)
|
||||
|
||||
for i := range words {
|
||||
h ^= round64(0, words[i])
|
||||
h = rotl64_27(h)*prime64x1 + prime64x4
|
||||
}
|
||||
|
||||
i = wordsLen << 3
|
||||
}
|
||||
|
||||
if in = in[i:len(in):len(in)]; len(in) > 3 {
|
||||
words := (*[1]uint32)(unsafe.Pointer(&in[0]))
|
||||
h ^= uint64(words[0]) * prime64x1
|
||||
h = rotl64_23(h)*prime64x2 + prime64x3
|
||||
|
||||
in = in[4:len(in):len(in)]
|
||||
}
|
||||
|
||||
for _, b := range in {
|
||||
h ^= uint64(b) * prime64x5
|
||||
h = rotl64_11(h) * prime64x1
|
||||
}
|
||||
|
||||
return mix64(h)
|
||||
}
|
||||
|
||||
func (xx *XXHash64) Write(in []byte) (n int, err error) {
|
||||
mem, idx := xx.mem[:], int(xx.memIdx)
|
||||
|
||||
xx.ln, n = xx.ln+uint64(len(in)), len(in)
|
||||
|
||||
if idx+len(in) < 32 {
|
||||
xx.memIdx += int8(copy(mem[idx:len(mem):len(mem)], in))
|
||||
return
|
||||
}
|
||||
|
||||
var (
|
||||
v1, v2, v3, v4 = xx.v1, xx.v2, xx.v3, xx.v4
|
||||
|
||||
i int
|
||||
)
|
||||
|
||||
if d := 32 - int(idx); d > 0 && int(idx)+len(in) > 31 {
|
||||
copy(mem[idx:len(mem):len(mem)], in[:len(in):len(in)])
|
||||
|
||||
words := (*[4]uint64)(unsafe.Pointer(&mem[0]))
|
||||
|
||||
v1 = round64(v1, words[0])
|
||||
v2 = round64(v2, words[1])
|
||||
v3 = round64(v3, words[2])
|
||||
v4 = round64(v4, words[3])
|
||||
|
||||
if in, xx.memIdx = in[d:len(in):len(in)], 0; len(in) == 0 {
|
||||
goto RET
|
||||
}
|
||||
}
|
||||
|
||||
for ; i < len(in)-31; i += 32 {
|
||||
words := (*[4]uint64)(unsafe.Pointer(&in[i]))
|
||||
|
||||
v1 = round64(v1, words[0])
|
||||
v2 = round64(v2, words[1])
|
||||
v3 = round64(v3, words[2])
|
||||
v4 = round64(v4, words[3])
|
||||
}
|
||||
|
||||
if len(in)-i != 0 {
|
||||
xx.memIdx += int8(copy(mem[xx.memIdx:len(mem):len(mem)], in[i:len(in):len(in)]))
|
||||
}
|
||||
|
||||
RET:
|
||||
xx.v1, xx.v2, xx.v3, xx.v4 = v1, v2, v3, v4
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (xx *XXHash64) Sum64() (h uint64) {
|
||||
if seed := xx.seed; xx.ln > 31 {
|
||||
v1, v2, v3, v4 := xx.v1, xx.v2, xx.v3, xx.v4
|
||||
h = rotl64_1(v1) + rotl64_7(v2) + rotl64_12(v3) + rotl64_18(v4)
|
||||
|
||||
h = mergeRound64(h, v1)
|
||||
h = mergeRound64(h, v2)
|
||||
h = mergeRound64(h, v3)
|
||||
h = mergeRound64(h, v4)
|
||||
} else if seed == 0 {
|
||||
h = prime64x5
|
||||
} else {
|
||||
h = seed + prime64x5
|
||||
}
|
||||
|
||||
h += uint64(xx.ln)
|
||||
|
||||
if xx.memIdx == 0 {
|
||||
return mix64(h)
|
||||
}
|
||||
|
||||
var (
|
||||
in = xx.mem[:xx.memIdx:xx.memIdx]
|
||||
wordsLen = len(in) >> 3
|
||||
words = ((*[maxInt32 / 8]uint64)(unsafe.Pointer(&in[0])))[:wordsLen:wordsLen]
|
||||
)
|
||||
|
||||
for _, k := range words {
|
||||
h ^= round64(0, k)
|
||||
h = rotl64_27(h)*prime64x1 + prime64x4
|
||||
}
|
||||
|
||||
if in = in[wordsLen<<3 : len(in) : len(in)]; len(in) > 3 {
|
||||
words := (*[1]uint32)(unsafe.Pointer(&in[0]))
|
||||
|
||||
h ^= uint64(words[0]) * prime64x1
|
||||
h = rotl64_23(h)*prime64x2 + prime64x3
|
||||
|
||||
in = in[4:len(in):len(in)]
|
||||
}
|
||||
|
||||
for _, b := range in {
|
||||
h ^= uint64(b) * prime64x5
|
||||
h = rotl64_11(h) * prime64x1
|
||||
}
|
||||
|
||||
return mix64(h)
|
||||
}
|
||||
294
vendor/github.com/globalsign/mgo/bson/bson_corpus_spec_test_generator.go
generated
vendored
Normal file
294
vendor/github.com/globalsign/mgo/bson/bson_corpus_spec_test_generator.go
generated
vendored
Normal file
@@ -0,0 +1,294 @@
|
||||
// +build ignore
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"go/format"
|
||||
"html/template"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/globalsign/mgo/internal/json"
|
||||
)
|
||||
|
||||
func main() {
|
||||
log.SetFlags(0)
|
||||
log.SetPrefix(name + ": ")
|
||||
|
||||
var g Generator
|
||||
|
||||
fmt.Fprintf(&g, "// Code generated by \"%s.go\"; DO NOT EDIT\n\n", name)
|
||||
|
||||
src := g.generate()
|
||||
|
||||
err := ioutil.WriteFile(fmt.Sprintf("%s.go", strings.TrimSuffix(name, "_generator")), src, 0644)
|
||||
if err != nil {
|
||||
log.Fatalf("writing output: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
// Generator holds the state of the analysis. Primarily used to buffer
|
||||
// the output for format.Source.
|
||||
type Generator struct {
|
||||
bytes.Buffer // Accumulated output.
|
||||
}
|
||||
|
||||
// format returns the gofmt-ed contents of the Generator's buffer.
|
||||
func (g *Generator) format() []byte {
|
||||
src, err := format.Source(g.Bytes())
|
||||
if err != nil {
|
||||
// Should never happen, but can arise when developing this code.
|
||||
// The user can compile the output to see the error.
|
||||
log.Printf("warning: internal error: invalid Go generated: %s", err)
|
||||
log.Printf("warning: compile the package to analyze the error")
|
||||
return g.Bytes()
|
||||
}
|
||||
return src
|
||||
}
|
||||
|
||||
// EVERYTHING ABOVE IS CONSTANT BETWEEN THE GENERATORS
|
||||
|
||||
const name = "bson_corpus_spec_test_generator"
|
||||
|
||||
func (g *Generator) generate() []byte {
|
||||
|
||||
testFiles, err := filepath.Glob("./specdata/specifications/source/bson-corpus/tests/*.json")
|
||||
if err != nil {
|
||||
log.Fatalf("error reading bson-corpus files: %s", err)
|
||||
}
|
||||
|
||||
tests, err := g.loadTests(testFiles)
|
||||
if err != nil {
|
||||
log.Fatalf("error loading tests: %s", err)
|
||||
}
|
||||
|
||||
tmpl, err := g.getTemplate()
|
||||
if err != nil {
|
||||
log.Fatalf("error loading template: %s", err)
|
||||
}
|
||||
|
||||
tmpl.Execute(&g.Buffer, tests)
|
||||
|
||||
return g.format()
|
||||
}
|
||||
|
||||
func (g *Generator) loadTests(filenames []string) ([]*testDef, error) {
|
||||
var tests []*testDef
|
||||
for _, filename := range filenames {
|
||||
test, err := g.loadTest(filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
tests = append(tests, test)
|
||||
}
|
||||
|
||||
return tests, nil
|
||||
}
|
||||
|
||||
func (g *Generator) loadTest(filename string) (*testDef, error) {
|
||||
content, err := ioutil.ReadFile(filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var testDef testDef
|
||||
err = json.Unmarshal(content, &testDef)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
names := make(map[string]struct{})
|
||||
|
||||
for i := len(testDef.Valid) - 1; i >= 0; i-- {
|
||||
if testDef.BsonType == "0x05" && testDef.Valid[i].Description == "subtype 0x02" {
|
||||
testDef.Valid = append(testDef.Valid[:i], testDef.Valid[i+1:]...)
|
||||
continue
|
||||
}
|
||||
|
||||
name := cleanupFuncName(testDef.Description + "_" + testDef.Valid[i].Description)
|
||||
nameIdx := name
|
||||
j := 1
|
||||
for {
|
||||
if _, ok := names[nameIdx]; !ok {
|
||||
break
|
||||
}
|
||||
|
||||
nameIdx = fmt.Sprintf("%s_%d", name, j)
|
||||
}
|
||||
|
||||
names[nameIdx] = struct{}{}
|
||||
|
||||
testDef.Valid[i].TestDef = &testDef
|
||||
testDef.Valid[i].Name = nameIdx
|
||||
testDef.Valid[i].StructTest = testDef.TestKey != "" &&
|
||||
(testDef.BsonType != "0x05" || strings.Contains(testDef.Valid[i].Description, "0x00")) &&
|
||||
!testDef.Deprecated
|
||||
}
|
||||
|
||||
for i := len(testDef.DecodeErrors) - 1; i >= 0; i-- {
|
||||
if strings.Contains(testDef.DecodeErrors[i].Description, "UTF-8") {
|
||||
testDef.DecodeErrors = append(testDef.DecodeErrors[:i], testDef.DecodeErrors[i+1:]...)
|
||||
continue
|
||||
}
|
||||
|
||||
name := cleanupFuncName(testDef.Description + "_" + testDef.DecodeErrors[i].Description)
|
||||
nameIdx := name
|
||||
j := 1
|
||||
for {
|
||||
if _, ok := names[nameIdx]; !ok {
|
||||
break
|
||||
}
|
||||
|
||||
nameIdx = fmt.Sprintf("%s_%d", name, j)
|
||||
}
|
||||
names[nameIdx] = struct{}{}
|
||||
|
||||
testDef.DecodeErrors[i].Name = nameIdx
|
||||
}
|
||||
|
||||
return &testDef, nil
|
||||
}
|
||||
|
||||
func (g *Generator) getTemplate() (*template.Template, error) {
|
||||
content := `package bson_test
|
||||
|
||||
import (
|
||||
"encoding/hex"
|
||||
"time"
|
||||
|
||||
. "gopkg.in/check.v1"
|
||||
"github.com/globalsign/mgo/bson"
|
||||
)
|
||||
|
||||
func testValid(c *C, in []byte, expected []byte, result interface{}) {
|
||||
err := bson.Unmarshal(in, result)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
out, err := bson.Marshal(result)
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
c.Assert(string(expected), Equals, string(out), Commentf("roundtrip failed for %T, expected '%x' but got '%x'", result, expected, out))
|
||||
}
|
||||
|
||||
func testDecodeSkip(c *C, in []byte) {
|
||||
err := bson.Unmarshal(in, &struct{}{})
|
||||
c.Assert(err, IsNil)
|
||||
}
|
||||
|
||||
func testDecodeError(c *C, in []byte, result interface{}) {
|
||||
err := bson.Unmarshal(in, result)
|
||||
c.Assert(err, Not(IsNil))
|
||||
}
|
||||
|
||||
{{range .}}
|
||||
{{range .Valid}}
|
||||
func (s *S) Test{{.Name}}(c *C) {
|
||||
b, err := hex.DecodeString("{{.Bson}}")
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
{{if .CanonicalBson}}
|
||||
cb, err := hex.DecodeString("{{.CanonicalBson}}")
|
||||
c.Assert(err, IsNil)
|
||||
{{else}}
|
||||
cb := b
|
||||
{{end}}
|
||||
|
||||
var resultD bson.D
|
||||
testValid(c, b, cb, &resultD)
|
||||
{{if .StructTest}}var resultS struct {
|
||||
Element {{.TestDef.GoType}} ` + "`bson:\"{{.TestDef.TestKey}}\"`" + `
|
||||
}
|
||||
testValid(c, b, cb, &resultS){{end}}
|
||||
|
||||
testDecodeSkip(c, b)
|
||||
}
|
||||
{{end}}
|
||||
|
||||
{{range .DecodeErrors}}
|
||||
func (s *S) Test{{.Name}}(c *C) {
|
||||
b, err := hex.DecodeString("{{.Bson}}")
|
||||
c.Assert(err, IsNil)
|
||||
|
||||
var resultD bson.D
|
||||
testDecodeError(c, b, &resultD)
|
||||
}
|
||||
{{end}}
|
||||
{{end}}
|
||||
`
|
||||
tmpl, err := template.New("").Parse(content)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return tmpl, nil
|
||||
}
|
||||
|
||||
func cleanupFuncName(name string) string {
|
||||
return strings.Map(func(r rune) rune {
|
||||
if (r >= 48 && r <= 57) || (r >= 65 && r <= 90) || (r >= 97 && r <= 122) {
|
||||
return r
|
||||
}
|
||||
return '_'
|
||||
}, name)
|
||||
}
|
||||
|
||||
type testDef struct {
|
||||
Description string `json:"description"`
|
||||
BsonType string `json:"bson_type"`
|
||||
TestKey string `json:"test_key"`
|
||||
Valid []*valid `json:"valid"`
|
||||
DecodeErrors []*decodeError `json:"decodeErrors"`
|
||||
Deprecated bool `json:"deprecated"`
|
||||
}
|
||||
|
||||
func (t *testDef) GoType() string {
|
||||
switch t.BsonType {
|
||||
case "0x01":
|
||||
return "float64"
|
||||
case "0x02":
|
||||
return "string"
|
||||
case "0x03":
|
||||
return "bson.D"
|
||||
case "0x04":
|
||||
return "[]interface{}"
|
||||
case "0x05":
|
||||
return "[]byte"
|
||||
case "0x07":
|
||||
return "bson.ObjectId"
|
||||
case "0x08":
|
||||
return "bool"
|
||||
case "0x09":
|
||||
return "time.Time"
|
||||
case "0x0E":
|
||||
return "string"
|
||||
case "0x10":
|
||||
return "int32"
|
||||
case "0x12":
|
||||
return "int64"
|
||||
case "0x13":
|
||||
return "bson.Decimal"
|
||||
default:
|
||||
return "interface{}"
|
||||
}
|
||||
}
|
||||
|
||||
type valid struct {
|
||||
Description string `json:"description"`
|
||||
Bson string `json:"bson"`
|
||||
CanonicalBson string `json:"canonical_bson"`
|
||||
|
||||
Name string
|
||||
StructTest bool
|
||||
TestDef *testDef
|
||||
}
|
||||
|
||||
type decodeError struct {
|
||||
Description string `json:"description"`
|
||||
Bson string `json:"bson"`
|
||||
|
||||
Name string
|
||||
}
|
||||
8
vendor/github.com/gobwas/glob/.gitignore
generated
vendored
Normal file
8
vendor/github.com/gobwas/glob/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
glob.iml
|
||||
.idea
|
||||
*.cpu
|
||||
*.mem
|
||||
*.test
|
||||
*.dot
|
||||
*.png
|
||||
*.svg
|
||||
9
vendor/github.com/gobwas/glob/.travis.yml
generated
vendored
Normal file
9
vendor/github.com/gobwas/glob/.travis.yml
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
sudo: false
|
||||
|
||||
language: go
|
||||
|
||||
go:
|
||||
- 1.5.3
|
||||
|
||||
script:
|
||||
- go test -v ./...
|
||||
21
vendor/github.com/gobwas/glob/LICENSE
generated
vendored
Normal file
21
vendor/github.com/gobwas/glob/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2016 Sergey Kamardin
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
26
vendor/github.com/gobwas/glob/bench.sh
generated
vendored
Normal file
26
vendor/github.com/gobwas/glob/bench.sh
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
#! /bin/bash
|
||||
|
||||
bench() {
|
||||
filename="/tmp/$1-$2.bench"
|
||||
if test -e "${filename}";
|
||||
then
|
||||
echo "Already exists ${filename}"
|
||||
else
|
||||
backup=`git rev-parse --abbrev-ref HEAD`
|
||||
git checkout $1
|
||||
echo -n "Creating ${filename}... "
|
||||
go test ./... -run=NONE -bench=$2 > "${filename}" -benchmem
|
||||
echo "OK"
|
||||
git checkout ${backup}
|
||||
sleep 5
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
to=$1
|
||||
current=`git rev-parse --abbrev-ref HEAD`
|
||||
|
||||
bench ${to} $2
|
||||
bench ${current} $2
|
||||
|
||||
benchcmp $3 "/tmp/${to}-$2.bench" "/tmp/${current}-$2.bench"
|
||||
525
vendor/github.com/gobwas/glob/compiler/compiler.go
generated
vendored
Normal file
525
vendor/github.com/gobwas/glob/compiler/compiler.go
generated
vendored
Normal file
@@ -0,0 +1,525 @@
|
||||
package compiler
|
||||
|
||||
// TODO use constructor with all matchers, and to their structs private
|
||||
// TODO glue multiple Text nodes (like after QuoteMeta)
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
|
||||
"github.com/gobwas/glob/match"
|
||||
"github.com/gobwas/glob/syntax/ast"
|
||||
"github.com/gobwas/glob/util/runes"
|
||||
)
|
||||
|
||||
func optimizeMatcher(matcher match.Matcher) match.Matcher {
|
||||
switch m := matcher.(type) {
|
||||
|
||||
case match.Any:
|
||||
if len(m.Separators) == 0 {
|
||||
return match.NewSuper()
|
||||
}
|
||||
|
||||
case match.AnyOf:
|
||||
if len(m.Matchers) == 1 {
|
||||
return m.Matchers[0]
|
||||
}
|
||||
|
||||
return m
|
||||
|
||||
case match.List:
|
||||
if m.Not == false && len(m.List) == 1 {
|
||||
return match.NewText(string(m.List))
|
||||
}
|
||||
|
||||
return m
|
||||
|
||||
case match.BTree:
|
||||
m.Left = optimizeMatcher(m.Left)
|
||||
m.Right = optimizeMatcher(m.Right)
|
||||
|
||||
r, ok := m.Value.(match.Text)
|
||||
if !ok {
|
||||
return m
|
||||
}
|
||||
|
||||
var (
|
||||
leftNil = m.Left == nil
|
||||
rightNil = m.Right == nil
|
||||
)
|
||||
if leftNil && rightNil {
|
||||
return match.NewText(r.Str)
|
||||
}
|
||||
|
||||
_, leftSuper := m.Left.(match.Super)
|
||||
lp, leftPrefix := m.Left.(match.Prefix)
|
||||
la, leftAny := m.Left.(match.Any)
|
||||
|
||||
_, rightSuper := m.Right.(match.Super)
|
||||
rs, rightSuffix := m.Right.(match.Suffix)
|
||||
ra, rightAny := m.Right.(match.Any)
|
||||
|
||||
switch {
|
||||
case leftSuper && rightSuper:
|
||||
return match.NewContains(r.Str, false)
|
||||
|
||||
case leftSuper && rightNil:
|
||||
return match.NewSuffix(r.Str)
|
||||
|
||||
case rightSuper && leftNil:
|
||||
return match.NewPrefix(r.Str)
|
||||
|
||||
case leftNil && rightSuffix:
|
||||
return match.NewPrefixSuffix(r.Str, rs.Suffix)
|
||||
|
||||
case rightNil && leftPrefix:
|
||||
return match.NewPrefixSuffix(lp.Prefix, r.Str)
|
||||
|
||||
case rightNil && leftAny:
|
||||
return match.NewSuffixAny(r.Str, la.Separators)
|
||||
|
||||
case leftNil && rightAny:
|
||||
return match.NewPrefixAny(r.Str, ra.Separators)
|
||||
}
|
||||
|
||||
return m
|
||||
}
|
||||
|
||||
return matcher
|
||||
}
|
||||
|
||||
func compileMatchers(matchers []match.Matcher) (match.Matcher, error) {
|
||||
if len(matchers) == 0 {
|
||||
return nil, fmt.Errorf("compile error: need at least one matcher")
|
||||
}
|
||||
if len(matchers) == 1 {
|
||||
return matchers[0], nil
|
||||
}
|
||||
if m := glueMatchers(matchers); m != nil {
|
||||
return m, nil
|
||||
}
|
||||
|
||||
idx := -1
|
||||
maxLen := -1
|
||||
var val match.Matcher
|
||||
for i, matcher := range matchers {
|
||||
if l := matcher.Len(); l != -1 && l >= maxLen {
|
||||
maxLen = l
|
||||
idx = i
|
||||
val = matcher
|
||||
}
|
||||
}
|
||||
|
||||
if val == nil { // not found matcher with static length
|
||||
r, err := compileMatchers(matchers[1:])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return match.NewBTree(matchers[0], nil, r), nil
|
||||
}
|
||||
|
||||
left := matchers[:idx]
|
||||
var right []match.Matcher
|
||||
if len(matchers) > idx+1 {
|
||||
right = matchers[idx+1:]
|
||||
}
|
||||
|
||||
var l, r match.Matcher
|
||||
var err error
|
||||
if len(left) > 0 {
|
||||
l, err = compileMatchers(left)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if len(right) > 0 {
|
||||
r, err = compileMatchers(right)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return match.NewBTree(val, l, r), nil
|
||||
}
|
||||
|
||||
func glueMatchers(matchers []match.Matcher) match.Matcher {
|
||||
if m := glueMatchersAsEvery(matchers); m != nil {
|
||||
return m
|
||||
}
|
||||
if m := glueMatchersAsRow(matchers); m != nil {
|
||||
return m
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func glueMatchersAsRow(matchers []match.Matcher) match.Matcher {
|
||||
if len(matchers) <= 1 {
|
||||
return nil
|
||||
}
|
||||
|
||||
var (
|
||||
c []match.Matcher
|
||||
l int
|
||||
)
|
||||
for _, matcher := range matchers {
|
||||
if ml := matcher.Len(); ml == -1 {
|
||||
return nil
|
||||
} else {
|
||||
c = append(c, matcher)
|
||||
l += ml
|
||||
}
|
||||
}
|
||||
return match.NewRow(l, c...)
|
||||
}
|
||||
|
||||
func glueMatchersAsEvery(matchers []match.Matcher) match.Matcher {
|
||||
if len(matchers) <= 1 {
|
||||
return nil
|
||||
}
|
||||
|
||||
var (
|
||||
hasAny bool
|
||||
hasSuper bool
|
||||
hasSingle bool
|
||||
min int
|
||||
separator []rune
|
||||
)
|
||||
|
||||
for i, matcher := range matchers {
|
||||
var sep []rune
|
||||
|
||||
switch m := matcher.(type) {
|
||||
case match.Super:
|
||||
sep = []rune{}
|
||||
hasSuper = true
|
||||
|
||||
case match.Any:
|
||||
sep = m.Separators
|
||||
hasAny = true
|
||||
|
||||
case match.Single:
|
||||
sep = m.Separators
|
||||
hasSingle = true
|
||||
min++
|
||||
|
||||
case match.List:
|
||||
if !m.Not {
|
||||
return nil
|
||||
}
|
||||
sep = m.List
|
||||
hasSingle = true
|
||||
min++
|
||||
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
|
||||
// initialize
|
||||
if i == 0 {
|
||||
separator = sep
|
||||
}
|
||||
|
||||
if runes.Equal(sep, separator) {
|
||||
continue
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
if hasSuper && !hasAny && !hasSingle {
|
||||
return match.NewSuper()
|
||||
}
|
||||
|
||||
if hasAny && !hasSuper && !hasSingle {
|
||||
return match.NewAny(separator)
|
||||
}
|
||||
|
||||
if (hasAny || hasSuper) && min > 0 && len(separator) == 0 {
|
||||
return match.NewMin(min)
|
||||
}
|
||||
|
||||
every := match.NewEveryOf()
|
||||
|
||||
if min > 0 {
|
||||
every.Add(match.NewMin(min))
|
||||
|
||||
if !hasAny && !hasSuper {
|
||||
every.Add(match.NewMax(min))
|
||||
}
|
||||
}
|
||||
|
||||
if len(separator) > 0 {
|
||||
every.Add(match.NewContains(string(separator), true))
|
||||
}
|
||||
|
||||
return every
|
||||
}
|
||||
|
||||
func minimizeMatchers(matchers []match.Matcher) []match.Matcher {
|
||||
var done match.Matcher
|
||||
var left, right, count int
|
||||
|
||||
for l := 0; l < len(matchers); l++ {
|
||||
for r := len(matchers); r > l; r-- {
|
||||
if glued := glueMatchers(matchers[l:r]); glued != nil {
|
||||
var swap bool
|
||||
|
||||
if done == nil {
|
||||
swap = true
|
||||
} else {
|
||||
cl, gl := done.Len(), glued.Len()
|
||||
swap = cl > -1 && gl > -1 && gl > cl
|
||||
swap = swap || count < r-l
|
||||
}
|
||||
|
||||
if swap {
|
||||
done = glued
|
||||
left = l
|
||||
right = r
|
||||
count = r - l
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if done == nil {
|
||||
return matchers
|
||||
}
|
||||
|
||||
next := append(append([]match.Matcher{}, matchers[:left]...), done)
|
||||
if right < len(matchers) {
|
||||
next = append(next, matchers[right:]...)
|
||||
}
|
||||
|
||||
if len(next) == len(matchers) {
|
||||
return next
|
||||
}
|
||||
|
||||
return minimizeMatchers(next)
|
||||
}
|
||||
|
||||
// minimizeAnyOf tries to apply some heuristics to minimize number of nodes in given tree
|
||||
func minimizeTree(tree *ast.Node) *ast.Node {
|
||||
switch tree.Kind {
|
||||
case ast.KindAnyOf:
|
||||
return minimizeTreeAnyOf(tree)
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// minimizeAnyOf tries to find common children of given node of AnyOf pattern
|
||||
// it searches for common children from left and from right
|
||||
// if any common children are found – then it returns new optimized ast tree
|
||||
// else it returns nil
|
||||
func minimizeTreeAnyOf(tree *ast.Node) *ast.Node {
|
||||
if !areOfSameKind(tree.Children, ast.KindPattern) {
|
||||
return nil
|
||||
}
|
||||
|
||||
commonLeft, commonRight := commonChildren(tree.Children)
|
||||
commonLeftCount, commonRightCount := len(commonLeft), len(commonRight)
|
||||
if commonLeftCount == 0 && commonRightCount == 0 { // there are no common parts
|
||||
return nil
|
||||
}
|
||||
|
||||
var result []*ast.Node
|
||||
if commonLeftCount > 0 {
|
||||
result = append(result, ast.NewNode(ast.KindPattern, nil, commonLeft...))
|
||||
}
|
||||
|
||||
var anyOf []*ast.Node
|
||||
for _, child := range tree.Children {
|
||||
reuse := child.Children[commonLeftCount : len(child.Children)-commonRightCount]
|
||||
var node *ast.Node
|
||||
if len(reuse) == 0 {
|
||||
// this pattern is completely reduced by commonLeft and commonRight patterns
|
||||
// so it become nothing
|
||||
node = ast.NewNode(ast.KindNothing, nil)
|
||||
} else {
|
||||
node = ast.NewNode(ast.KindPattern, nil, reuse...)
|
||||
}
|
||||
anyOf = appendIfUnique(anyOf, node)
|
||||
}
|
||||
switch {
|
||||
case len(anyOf) == 1 && anyOf[0].Kind != ast.KindNothing:
|
||||
result = append(result, anyOf[0])
|
||||
case len(anyOf) > 1:
|
||||
result = append(result, ast.NewNode(ast.KindAnyOf, nil, anyOf...))
|
||||
}
|
||||
|
||||
if commonRightCount > 0 {
|
||||
result = append(result, ast.NewNode(ast.KindPattern, nil, commonRight...))
|
||||
}
|
||||
|
||||
return ast.NewNode(ast.KindPattern, nil, result...)
|
||||
}
|
||||
|
||||
func commonChildren(nodes []*ast.Node) (commonLeft, commonRight []*ast.Node) {
|
||||
if len(nodes) <= 1 {
|
||||
return
|
||||
}
|
||||
|
||||
// find node that has least number of children
|
||||
idx := leastChildren(nodes)
|
||||
if idx == -1 {
|
||||
return
|
||||
}
|
||||
tree := nodes[idx]
|
||||
treeLength := len(tree.Children)
|
||||
|
||||
// allocate max able size for rightCommon slice
|
||||
// to get ability insert elements in reverse order (from end to start)
|
||||
// without sorting
|
||||
commonRight = make([]*ast.Node, treeLength)
|
||||
lastRight := treeLength // will use this to get results as commonRight[lastRight:]
|
||||
|
||||
var (
|
||||
breakLeft bool
|
||||
breakRight bool
|
||||
commonTotal int
|
||||
)
|
||||
for i, j := 0, treeLength-1; commonTotal < treeLength && j >= 0 && !(breakLeft && breakRight); i, j = i+1, j-1 {
|
||||
treeLeft := tree.Children[i]
|
||||
treeRight := tree.Children[j]
|
||||
|
||||
for k := 0; k < len(nodes) && !(breakLeft && breakRight); k++ {
|
||||
// skip least children node
|
||||
if k == idx {
|
||||
continue
|
||||
}
|
||||
|
||||
restLeft := nodes[k].Children[i]
|
||||
restRight := nodes[k].Children[j+len(nodes[k].Children)-treeLength]
|
||||
|
||||
breakLeft = breakLeft || !treeLeft.Equal(restLeft)
|
||||
|
||||
// disable searching for right common parts, if left part is already overlapping
|
||||
breakRight = breakRight || (!breakLeft && j <= i)
|
||||
breakRight = breakRight || !treeRight.Equal(restRight)
|
||||
}
|
||||
|
||||
if !breakLeft {
|
||||
commonTotal++
|
||||
commonLeft = append(commonLeft, treeLeft)
|
||||
}
|
||||
if !breakRight {
|
||||
commonTotal++
|
||||
lastRight = j
|
||||
commonRight[j] = treeRight
|
||||
}
|
||||
}
|
||||
|
||||
commonRight = commonRight[lastRight:]
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func appendIfUnique(target []*ast.Node, val *ast.Node) []*ast.Node {
|
||||
for _, n := range target {
|
||||
if reflect.DeepEqual(n, val) {
|
||||
return target
|
||||
}
|
||||
}
|
||||
return append(target, val)
|
||||
}
|
||||
|
||||
func areOfSameKind(nodes []*ast.Node, kind ast.Kind) bool {
|
||||
for _, n := range nodes {
|
||||
if n.Kind != kind {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func leastChildren(nodes []*ast.Node) int {
|
||||
min := -1
|
||||
idx := -1
|
||||
for i, n := range nodes {
|
||||
if idx == -1 || (len(n.Children) < min) {
|
||||
min = len(n.Children)
|
||||
idx = i
|
||||
}
|
||||
}
|
||||
return idx
|
||||
}
|
||||
|
||||
func compileTreeChildren(tree *ast.Node, sep []rune) ([]match.Matcher, error) {
|
||||
var matchers []match.Matcher
|
||||
for _, desc := range tree.Children {
|
||||
m, err := compile(desc, sep)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
matchers = append(matchers, optimizeMatcher(m))
|
||||
}
|
||||
return matchers, nil
|
||||
}
|
||||
|
||||
func compile(tree *ast.Node, sep []rune) (m match.Matcher, err error) {
|
||||
switch tree.Kind {
|
||||
case ast.KindAnyOf:
|
||||
// todo this could be faster on pattern_alternatives_combine_lite (see glob_test.go)
|
||||
if n := minimizeTree(tree); n != nil {
|
||||
return compile(n, sep)
|
||||
}
|
||||
matchers, err := compileTreeChildren(tree, sep)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return match.NewAnyOf(matchers...), nil
|
||||
|
||||
case ast.KindPattern:
|
||||
if len(tree.Children) == 0 {
|
||||
return match.NewNothing(), nil
|
||||
}
|
||||
matchers, err := compileTreeChildren(tree, sep)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
m, err = compileMatchers(minimizeMatchers(matchers))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
case ast.KindAny:
|
||||
m = match.NewAny(sep)
|
||||
|
||||
case ast.KindSuper:
|
||||
m = match.NewSuper()
|
||||
|
||||
case ast.KindSingle:
|
||||
m = match.NewSingle(sep)
|
||||
|
||||
case ast.KindNothing:
|
||||
m = match.NewNothing()
|
||||
|
||||
case ast.KindList:
|
||||
l := tree.Value.(ast.List)
|
||||
m = match.NewList([]rune(l.Chars), l.Not)
|
||||
|
||||
case ast.KindRange:
|
||||
r := tree.Value.(ast.Range)
|
||||
m = match.NewRange(r.Lo, r.Hi, r.Not)
|
||||
|
||||
case ast.KindText:
|
||||
t := tree.Value.(ast.Text)
|
||||
m = match.NewText(t.Text)
|
||||
|
||||
default:
|
||||
return nil, fmt.Errorf("could not compile tree: unknown node type")
|
||||
}
|
||||
|
||||
return optimizeMatcher(m), nil
|
||||
}
|
||||
|
||||
func Compile(tree *ast.Node, sep []rune) (match.Matcher, error) {
|
||||
m, err := compile(tree, sep)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return m, nil
|
||||
}
|
||||
80
vendor/github.com/gobwas/glob/glob.go
generated
vendored
Normal file
80
vendor/github.com/gobwas/glob/glob.go
generated
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
package glob
|
||||
|
||||
import (
|
||||
"github.com/gobwas/glob/compiler"
|
||||
"github.com/gobwas/glob/syntax"
|
||||
)
|
||||
|
||||
// Glob represents compiled glob pattern.
|
||||
type Glob interface {
|
||||
Match(string) bool
|
||||
}
|
||||
|
||||
// Compile creates Glob for given pattern and strings (if any present after pattern) as separators.
|
||||
// The pattern syntax is:
|
||||
//
|
||||
// pattern:
|
||||
// { term }
|
||||
//
|
||||
// term:
|
||||
// `*` matches any sequence of non-separator characters
|
||||
// `**` matches any sequence of characters
|
||||
// `?` matches any single non-separator character
|
||||
// `[` [ `!` ] { character-range } `]`
|
||||
// character class (must be non-empty)
|
||||
// `{` pattern-list `}`
|
||||
// pattern alternatives
|
||||
// c matches character c (c != `*`, `**`, `?`, `\`, `[`, `{`, `}`)
|
||||
// `\` c matches character c
|
||||
//
|
||||
// character-range:
|
||||
// c matches character c (c != `\\`, `-`, `]`)
|
||||
// `\` c matches character c
|
||||
// lo `-` hi matches character c for lo <= c <= hi
|
||||
//
|
||||
// pattern-list:
|
||||
// pattern { `,` pattern }
|
||||
// comma-separated (without spaces) patterns
|
||||
//
|
||||
func Compile(pattern string, separators ...rune) (Glob, error) {
|
||||
ast, err := syntax.Parse(pattern)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
matcher, err := compiler.Compile(ast, separators)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return matcher, nil
|
||||
}
|
||||
|
||||
// MustCompile is the same as Compile, except that if Compile returns error, this will panic
|
||||
func MustCompile(pattern string, separators ...rune) Glob {
|
||||
g, err := Compile(pattern, separators...)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
return g
|
||||
}
|
||||
|
||||
// QuoteMeta returns a string that quotes all glob pattern meta characters
|
||||
// inside the argument text; For example, QuoteMeta(`{foo*}`) returns `\[foo\*\]`.
|
||||
func QuoteMeta(s string) string {
|
||||
b := make([]byte, 2*len(s))
|
||||
|
||||
// a byte loop is correct because all meta characters are ASCII
|
||||
j := 0
|
||||
for i := 0; i < len(s); i++ {
|
||||
if syntax.Special(s[i]) {
|
||||
b[j] = '\\'
|
||||
j++
|
||||
}
|
||||
b[j] = s[i]
|
||||
j++
|
||||
}
|
||||
|
||||
return string(b[0:j])
|
||||
}
|
||||
45
vendor/github.com/gobwas/glob/match/any.go
generated
vendored
Normal file
45
vendor/github.com/gobwas/glob/match/any.go
generated
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/gobwas/glob/util/strings"
|
||||
)
|
||||
|
||||
type Any struct {
|
||||
Separators []rune
|
||||
}
|
||||
|
||||
func NewAny(s []rune) Any {
|
||||
return Any{s}
|
||||
}
|
||||
|
||||
func (self Any) Match(s string) bool {
|
||||
return strings.IndexAnyRunes(s, self.Separators) == -1
|
||||
}
|
||||
|
||||
func (self Any) Index(s string) (int, []int) {
|
||||
found := strings.IndexAnyRunes(s, self.Separators)
|
||||
switch found {
|
||||
case -1:
|
||||
case 0:
|
||||
return 0, segments0
|
||||
default:
|
||||
s = s[:found]
|
||||
}
|
||||
|
||||
segments := acquireSegments(len(s))
|
||||
for i := range s {
|
||||
segments = append(segments, i)
|
||||
}
|
||||
segments = append(segments, len(s))
|
||||
|
||||
return 0, segments
|
||||
}
|
||||
|
||||
func (self Any) Len() int {
|
||||
return lenNo
|
||||
}
|
||||
|
||||
func (self Any) String() string {
|
||||
return fmt.Sprintf("<any:![%s]>", string(self.Separators))
|
||||
}
|
||||
82
vendor/github.com/gobwas/glob/match/any_of.go
generated
vendored
Normal file
82
vendor/github.com/gobwas/glob/match/any_of.go
generated
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
package match
|
||||
|
||||
import "fmt"
|
||||
|
||||
type AnyOf struct {
|
||||
Matchers Matchers
|
||||
}
|
||||
|
||||
func NewAnyOf(m ...Matcher) AnyOf {
|
||||
return AnyOf{Matchers(m)}
|
||||
}
|
||||
|
||||
func (self *AnyOf) Add(m Matcher) error {
|
||||
self.Matchers = append(self.Matchers, m)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (self AnyOf) Match(s string) bool {
|
||||
for _, m := range self.Matchers {
|
||||
if m.Match(s) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (self AnyOf) Index(s string) (int, []int) {
|
||||
index := -1
|
||||
|
||||
segments := acquireSegments(len(s))
|
||||
for _, m := range self.Matchers {
|
||||
idx, seg := m.Index(s)
|
||||
if idx == -1 {
|
||||
continue
|
||||
}
|
||||
|
||||
if index == -1 || idx < index {
|
||||
index = idx
|
||||
segments = append(segments[:0], seg...)
|
||||
continue
|
||||
}
|
||||
|
||||
if idx > index {
|
||||
continue
|
||||
}
|
||||
|
||||
// here idx == index
|
||||
segments = appendMerge(segments, seg)
|
||||
}
|
||||
|
||||
if index == -1 {
|
||||
releaseSegments(segments)
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
return index, segments
|
||||
}
|
||||
|
||||
func (self AnyOf) Len() (l int) {
|
||||
l = -1
|
||||
for _, m := range self.Matchers {
|
||||
ml := m.Len()
|
||||
switch {
|
||||
case l == -1:
|
||||
l = ml
|
||||
continue
|
||||
|
||||
case ml == -1:
|
||||
return -1
|
||||
|
||||
case l != ml:
|
||||
return -1
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (self AnyOf) String() string {
|
||||
return fmt.Sprintf("<any_of:[%s]>", self.Matchers)
|
||||
}
|
||||
146
vendor/github.com/gobwas/glob/match/btree.go
generated
vendored
Normal file
146
vendor/github.com/gobwas/glob/match/btree.go
generated
vendored
Normal file
@@ -0,0 +1,146 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type BTree struct {
|
||||
Value Matcher
|
||||
Left Matcher
|
||||
Right Matcher
|
||||
ValueLengthRunes int
|
||||
LeftLengthRunes int
|
||||
RightLengthRunes int
|
||||
LengthRunes int
|
||||
}
|
||||
|
||||
func NewBTree(Value, Left, Right Matcher) (tree BTree) {
|
||||
tree.Value = Value
|
||||
tree.Left = Left
|
||||
tree.Right = Right
|
||||
|
||||
lenOk := true
|
||||
if tree.ValueLengthRunes = Value.Len(); tree.ValueLengthRunes == -1 {
|
||||
lenOk = false
|
||||
}
|
||||
|
||||
if Left != nil {
|
||||
if tree.LeftLengthRunes = Left.Len(); tree.LeftLengthRunes == -1 {
|
||||
lenOk = false
|
||||
}
|
||||
}
|
||||
|
||||
if Right != nil {
|
||||
if tree.RightLengthRunes = Right.Len(); tree.RightLengthRunes == -1 {
|
||||
lenOk = false
|
||||
}
|
||||
}
|
||||
|
||||
if lenOk {
|
||||
tree.LengthRunes = tree.LeftLengthRunes + tree.ValueLengthRunes + tree.RightLengthRunes
|
||||
} else {
|
||||
tree.LengthRunes = -1
|
||||
}
|
||||
|
||||
return tree
|
||||
}
|
||||
|
||||
func (self BTree) Len() int {
|
||||
return self.LengthRunes
|
||||
}
|
||||
|
||||
// todo?
|
||||
func (self BTree) Index(s string) (int, []int) {
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
func (self BTree) Match(s string) bool {
|
||||
inputLen := len(s)
|
||||
|
||||
// self.Length, self.RLen and self.LLen are values meaning the length of runes for each part
|
||||
// here we manipulating byte length for better optimizations
|
||||
// but these checks still works, cause minLen of 1-rune string is 1 byte.
|
||||
if self.LengthRunes != -1 && self.LengthRunes > inputLen {
|
||||
return false
|
||||
}
|
||||
|
||||
// try to cut unnecessary parts
|
||||
// by knowledge of length of right and left part
|
||||
var offset, limit int
|
||||
if self.LeftLengthRunes >= 0 {
|
||||
offset = self.LeftLengthRunes
|
||||
}
|
||||
if self.RightLengthRunes >= 0 {
|
||||
limit = inputLen - self.RightLengthRunes
|
||||
} else {
|
||||
limit = inputLen
|
||||
}
|
||||
|
||||
for offset < limit {
|
||||
// search for matching part in substring
|
||||
index, segments := self.Value.Index(s[offset:limit])
|
||||
if index == -1 {
|
||||
releaseSegments(segments)
|
||||
return false
|
||||
}
|
||||
|
||||
l := s[:offset+index]
|
||||
var left bool
|
||||
if self.Left != nil {
|
||||
left = self.Left.Match(l)
|
||||
} else {
|
||||
left = l == ""
|
||||
}
|
||||
|
||||
if left {
|
||||
for i := len(segments) - 1; i >= 0; i-- {
|
||||
length := segments[i]
|
||||
|
||||
var right bool
|
||||
var r string
|
||||
// if there is no string for the right branch
|
||||
if inputLen <= offset+index+length {
|
||||
r = ""
|
||||
} else {
|
||||
r = s[offset+index+length:]
|
||||
}
|
||||
|
||||
if self.Right != nil {
|
||||
right = self.Right.Match(r)
|
||||
} else {
|
||||
right = r == ""
|
||||
}
|
||||
|
||||
if right {
|
||||
releaseSegments(segments)
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_, step := utf8.DecodeRuneInString(s[offset+index:])
|
||||
offset += index + step
|
||||
|
||||
releaseSegments(segments)
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (self BTree) String() string {
|
||||
const n string = "<nil>"
|
||||
var l, r string
|
||||
if self.Left == nil {
|
||||
l = n
|
||||
} else {
|
||||
l = self.Left.String()
|
||||
}
|
||||
if self.Right == nil {
|
||||
r = n
|
||||
} else {
|
||||
r = self.Right.String()
|
||||
}
|
||||
|
||||
return fmt.Sprintf("<btree:[%s<-%s->%s]>", l, self.Value, r)
|
||||
}
|
||||
58
vendor/github.com/gobwas/glob/match/contains.go
generated
vendored
Normal file
58
vendor/github.com/gobwas/glob/match/contains.go
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Contains struct {
|
||||
Needle string
|
||||
Not bool
|
||||
}
|
||||
|
||||
func NewContains(needle string, not bool) Contains {
|
||||
return Contains{needle, not}
|
||||
}
|
||||
|
||||
func (self Contains) Match(s string) bool {
|
||||
return strings.Contains(s, self.Needle) != self.Not
|
||||
}
|
||||
|
||||
func (self Contains) Index(s string) (int, []int) {
|
||||
var offset int
|
||||
|
||||
idx := strings.Index(s, self.Needle)
|
||||
|
||||
if !self.Not {
|
||||
if idx == -1 {
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
offset = idx + len(self.Needle)
|
||||
if len(s) <= offset {
|
||||
return 0, []int{offset}
|
||||
}
|
||||
s = s[offset:]
|
||||
} else if idx != -1 {
|
||||
s = s[:idx]
|
||||
}
|
||||
|
||||
segments := acquireSegments(len(s) + 1)
|
||||
for i := range s {
|
||||
segments = append(segments, offset+i)
|
||||
}
|
||||
|
||||
return 0, append(segments, offset+len(s))
|
||||
}
|
||||
|
||||
func (self Contains) Len() int {
|
||||
return lenNo
|
||||
}
|
||||
|
||||
func (self Contains) String() string {
|
||||
var not string
|
||||
if self.Not {
|
||||
not = "!"
|
||||
}
|
||||
return fmt.Sprintf("<contains:%s[%s]>", not, self.Needle)
|
||||
}
|
||||
99
vendor/github.com/gobwas/glob/match/every_of.go
generated
vendored
Normal file
99
vendor/github.com/gobwas/glob/match/every_of.go
generated
vendored
Normal file
@@ -0,0 +1,99 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
type EveryOf struct {
|
||||
Matchers Matchers
|
||||
}
|
||||
|
||||
func NewEveryOf(m ...Matcher) EveryOf {
|
||||
return EveryOf{Matchers(m)}
|
||||
}
|
||||
|
||||
func (self *EveryOf) Add(m Matcher) error {
|
||||
self.Matchers = append(self.Matchers, m)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (self EveryOf) Len() (l int) {
|
||||
for _, m := range self.Matchers {
|
||||
if ml := m.Len(); l > 0 {
|
||||
l += ml
|
||||
} else {
|
||||
return -1
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (self EveryOf) Index(s string) (int, []int) {
|
||||
var index int
|
||||
var offset int
|
||||
|
||||
// make `in` with cap as len(s),
|
||||
// cause it is the maximum size of output segments values
|
||||
next := acquireSegments(len(s))
|
||||
current := acquireSegments(len(s))
|
||||
|
||||
sub := s
|
||||
for i, m := range self.Matchers {
|
||||
idx, seg := m.Index(sub)
|
||||
if idx == -1 {
|
||||
releaseSegments(next)
|
||||
releaseSegments(current)
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
if i == 0 {
|
||||
// we use copy here instead of `current = seg`
|
||||
// cause seg is a slice from reusable buffer `in`
|
||||
// and it could be overwritten in next iteration
|
||||
current = append(current, seg...)
|
||||
} else {
|
||||
// clear the next
|
||||
next = next[:0]
|
||||
|
||||
delta := index - (idx + offset)
|
||||
for _, ex := range current {
|
||||
for _, n := range seg {
|
||||
if ex+delta == n {
|
||||
next = append(next, n)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(next) == 0 {
|
||||
releaseSegments(next)
|
||||
releaseSegments(current)
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
current = append(current[:0], next...)
|
||||
}
|
||||
|
||||
index = idx + offset
|
||||
sub = s[index:]
|
||||
offset += idx
|
||||
}
|
||||
|
||||
releaseSegments(next)
|
||||
|
||||
return index, current
|
||||
}
|
||||
|
||||
func (self EveryOf) Match(s string) bool {
|
||||
for _, m := range self.Matchers {
|
||||
if !m.Match(s) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func (self EveryOf) String() string {
|
||||
return fmt.Sprintf("<every_of:[%s]>", self.Matchers)
|
||||
}
|
||||
49
vendor/github.com/gobwas/glob/match/list.go
generated
vendored
Normal file
49
vendor/github.com/gobwas/glob/match/list.go
generated
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/gobwas/glob/util/runes"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type List struct {
|
||||
List []rune
|
||||
Not bool
|
||||
}
|
||||
|
||||
func NewList(list []rune, not bool) List {
|
||||
return List{list, not}
|
||||
}
|
||||
|
||||
func (self List) Match(s string) bool {
|
||||
r, w := utf8.DecodeRuneInString(s)
|
||||
if len(s) > w {
|
||||
return false
|
||||
}
|
||||
|
||||
inList := runes.IndexRune(self.List, r) != -1
|
||||
return inList == !self.Not
|
||||
}
|
||||
|
||||
func (self List) Len() int {
|
||||
return lenOne
|
||||
}
|
||||
|
||||
func (self List) Index(s string) (int, []int) {
|
||||
for i, r := range s {
|
||||
if self.Not == (runes.IndexRune(self.List, r) == -1) {
|
||||
return i, segmentsByRuneLength[utf8.RuneLen(r)]
|
||||
}
|
||||
}
|
||||
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
func (self List) String() string {
|
||||
var not string
|
||||
if self.Not {
|
||||
not = "!"
|
||||
}
|
||||
|
||||
return fmt.Sprintf("<list:%s[%s]>", not, string(self.List))
|
||||
}
|
||||
81
vendor/github.com/gobwas/glob/match/match.go
generated
vendored
Normal file
81
vendor/github.com/gobwas/glob/match/match.go
generated
vendored
Normal file
@@ -0,0 +1,81 @@
|
||||
package match
|
||||
|
||||
// todo common table of rune's length
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
const lenOne = 1
|
||||
const lenZero = 0
|
||||
const lenNo = -1
|
||||
|
||||
type Matcher interface {
|
||||
Match(string) bool
|
||||
Index(string) (int, []int)
|
||||
Len() int
|
||||
String() string
|
||||
}
|
||||
|
||||
type Matchers []Matcher
|
||||
|
||||
func (m Matchers) String() string {
|
||||
var s []string
|
||||
for _, matcher := range m {
|
||||
s = append(s, fmt.Sprint(matcher))
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s", strings.Join(s, ","))
|
||||
}
|
||||
|
||||
// appendMerge merges and sorts given already SORTED and UNIQUE segments.
|
||||
func appendMerge(target, sub []int) []int {
|
||||
lt, ls := len(target), len(sub)
|
||||
out := make([]int, 0, lt+ls)
|
||||
|
||||
for x, y := 0, 0; x < lt || y < ls; {
|
||||
if x >= lt {
|
||||
out = append(out, sub[y:]...)
|
||||
break
|
||||
}
|
||||
|
||||
if y >= ls {
|
||||
out = append(out, target[x:]...)
|
||||
break
|
||||
}
|
||||
|
||||
xValue := target[x]
|
||||
yValue := sub[y]
|
||||
|
||||
switch {
|
||||
|
||||
case xValue == yValue:
|
||||
out = append(out, xValue)
|
||||
x++
|
||||
y++
|
||||
|
||||
case xValue < yValue:
|
||||
out = append(out, xValue)
|
||||
x++
|
||||
|
||||
case yValue < xValue:
|
||||
out = append(out, yValue)
|
||||
y++
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
target = append(target[:0], out...)
|
||||
|
||||
return target
|
||||
}
|
||||
|
||||
func reverseSegments(input []int) {
|
||||
l := len(input)
|
||||
m := l / 2
|
||||
|
||||
for i := 0; i < m; i++ {
|
||||
input[i], input[l-i-1] = input[l-i-1], input[i]
|
||||
}
|
||||
}
|
||||
49
vendor/github.com/gobwas/glob/match/max.go
generated
vendored
Normal file
49
vendor/github.com/gobwas/glob/match/max.go
generated
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type Max struct {
|
||||
Limit int
|
||||
}
|
||||
|
||||
func NewMax(l int) Max {
|
||||
return Max{l}
|
||||
}
|
||||
|
||||
func (self Max) Match(s string) bool {
|
||||
var l int
|
||||
for range s {
|
||||
l += 1
|
||||
if l > self.Limit {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func (self Max) Index(s string) (int, []int) {
|
||||
segments := acquireSegments(self.Limit + 1)
|
||||
segments = append(segments, 0)
|
||||
var count int
|
||||
for i, r := range s {
|
||||
count++
|
||||
if count > self.Limit {
|
||||
break
|
||||
}
|
||||
segments = append(segments, i+utf8.RuneLen(r))
|
||||
}
|
||||
|
||||
return 0, segments
|
||||
}
|
||||
|
||||
func (self Max) Len() int {
|
||||
return lenNo
|
||||
}
|
||||
|
||||
func (self Max) String() string {
|
||||
return fmt.Sprintf("<max:%d>", self.Limit)
|
||||
}
|
||||
57
vendor/github.com/gobwas/glob/match/min.go
generated
vendored
Normal file
57
vendor/github.com/gobwas/glob/match/min.go
generated
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type Min struct {
|
||||
Limit int
|
||||
}
|
||||
|
||||
func NewMin(l int) Min {
|
||||
return Min{l}
|
||||
}
|
||||
|
||||
func (self Min) Match(s string) bool {
|
||||
var l int
|
||||
for range s {
|
||||
l += 1
|
||||
if l >= self.Limit {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (self Min) Index(s string) (int, []int) {
|
||||
var count int
|
||||
|
||||
c := len(s) - self.Limit + 1
|
||||
if c <= 0 {
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
segments := acquireSegments(c)
|
||||
for i, r := range s {
|
||||
count++
|
||||
if count >= self.Limit {
|
||||
segments = append(segments, i+utf8.RuneLen(r))
|
||||
}
|
||||
}
|
||||
|
||||
if len(segments) == 0 {
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
return 0, segments
|
||||
}
|
||||
|
||||
func (self Min) Len() int {
|
||||
return lenNo
|
||||
}
|
||||
|
||||
func (self Min) String() string {
|
||||
return fmt.Sprintf("<min:%d>", self.Limit)
|
||||
}
|
||||
27
vendor/github.com/gobwas/glob/match/nothing.go
generated
vendored
Normal file
27
vendor/github.com/gobwas/glob/match/nothing.go
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
type Nothing struct{}
|
||||
|
||||
func NewNothing() Nothing {
|
||||
return Nothing{}
|
||||
}
|
||||
|
||||
func (self Nothing) Match(s string) bool {
|
||||
return len(s) == 0
|
||||
}
|
||||
|
||||
func (self Nothing) Index(s string) (int, []int) {
|
||||
return 0, segments0
|
||||
}
|
||||
|
||||
func (self Nothing) Len() int {
|
||||
return lenZero
|
||||
}
|
||||
|
||||
func (self Nothing) String() string {
|
||||
return fmt.Sprintf("<nothing>")
|
||||
}
|
||||
50
vendor/github.com/gobwas/glob/match/prefix.go
generated
vendored
Normal file
50
vendor/github.com/gobwas/glob/match/prefix.go
generated
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type Prefix struct {
|
||||
Prefix string
|
||||
}
|
||||
|
||||
func NewPrefix(p string) Prefix {
|
||||
return Prefix{p}
|
||||
}
|
||||
|
||||
func (self Prefix) Index(s string) (int, []int) {
|
||||
idx := strings.Index(s, self.Prefix)
|
||||
if idx == -1 {
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
length := len(self.Prefix)
|
||||
var sub string
|
||||
if len(s) > idx+length {
|
||||
sub = s[idx+length:]
|
||||
} else {
|
||||
sub = ""
|
||||
}
|
||||
|
||||
segments := acquireSegments(len(sub) + 1)
|
||||
segments = append(segments, length)
|
||||
for i, r := range sub {
|
||||
segments = append(segments, length+i+utf8.RuneLen(r))
|
||||
}
|
||||
|
||||
return idx, segments
|
||||
}
|
||||
|
||||
func (self Prefix) Len() int {
|
||||
return lenNo
|
||||
}
|
||||
|
||||
func (self Prefix) Match(s string) bool {
|
||||
return strings.HasPrefix(s, self.Prefix)
|
||||
}
|
||||
|
||||
func (self Prefix) String() string {
|
||||
return fmt.Sprintf("<prefix:%s>", self.Prefix)
|
||||
}
|
||||
55
vendor/github.com/gobwas/glob/match/prefix_any.go
generated
vendored
Normal file
55
vendor/github.com/gobwas/glob/match/prefix_any.go
generated
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
|
||||
sutil "github.com/gobwas/glob/util/strings"
|
||||
)
|
||||
|
||||
type PrefixAny struct {
|
||||
Prefix string
|
||||
Separators []rune
|
||||
}
|
||||
|
||||
func NewPrefixAny(s string, sep []rune) PrefixAny {
|
||||
return PrefixAny{s, sep}
|
||||
}
|
||||
|
||||
func (self PrefixAny) Index(s string) (int, []int) {
|
||||
idx := strings.Index(s, self.Prefix)
|
||||
if idx == -1 {
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
n := len(self.Prefix)
|
||||
sub := s[idx+n:]
|
||||
i := sutil.IndexAnyRunes(sub, self.Separators)
|
||||
if i > -1 {
|
||||
sub = sub[:i]
|
||||
}
|
||||
|
||||
seg := acquireSegments(len(sub) + 1)
|
||||
seg = append(seg, n)
|
||||
for i, r := range sub {
|
||||
seg = append(seg, n+i+utf8.RuneLen(r))
|
||||
}
|
||||
|
||||
return idx, seg
|
||||
}
|
||||
|
||||
func (self PrefixAny) Len() int {
|
||||
return lenNo
|
||||
}
|
||||
|
||||
func (self PrefixAny) Match(s string) bool {
|
||||
if !strings.HasPrefix(s, self.Prefix) {
|
||||
return false
|
||||
}
|
||||
return sutil.IndexAnyRunes(s[len(self.Prefix):], self.Separators) == -1
|
||||
}
|
||||
|
||||
func (self PrefixAny) String() string {
|
||||
return fmt.Sprintf("<prefix_any:%s![%s]>", self.Prefix, string(self.Separators))
|
||||
}
|
||||
62
vendor/github.com/gobwas/glob/match/prefix_suffix.go
generated
vendored
Normal file
62
vendor/github.com/gobwas/glob/match/prefix_suffix.go
generated
vendored
Normal file
@@ -0,0 +1,62 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type PrefixSuffix struct {
|
||||
Prefix, Suffix string
|
||||
}
|
||||
|
||||
func NewPrefixSuffix(p, s string) PrefixSuffix {
|
||||
return PrefixSuffix{p, s}
|
||||
}
|
||||
|
||||
func (self PrefixSuffix) Index(s string) (int, []int) {
|
||||
prefixIdx := strings.Index(s, self.Prefix)
|
||||
if prefixIdx == -1 {
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
suffixLen := len(self.Suffix)
|
||||
if suffixLen <= 0 {
|
||||
return prefixIdx, []int{len(s) - prefixIdx}
|
||||
}
|
||||
|
||||
if (len(s) - prefixIdx) <= 0 {
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
segments := acquireSegments(len(s) - prefixIdx)
|
||||
for sub := s[prefixIdx:]; ; {
|
||||
suffixIdx := strings.LastIndex(sub, self.Suffix)
|
||||
if suffixIdx == -1 {
|
||||
break
|
||||
}
|
||||
|
||||
segments = append(segments, suffixIdx+suffixLen)
|
||||
sub = sub[:suffixIdx]
|
||||
}
|
||||
|
||||
if len(segments) == 0 {
|
||||
releaseSegments(segments)
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
reverseSegments(segments)
|
||||
|
||||
return prefixIdx, segments
|
||||
}
|
||||
|
||||
func (self PrefixSuffix) Len() int {
|
||||
return lenNo
|
||||
}
|
||||
|
||||
func (self PrefixSuffix) Match(s string) bool {
|
||||
return strings.HasPrefix(s, self.Prefix) && strings.HasSuffix(s, self.Suffix)
|
||||
}
|
||||
|
||||
func (self PrefixSuffix) String() string {
|
||||
return fmt.Sprintf("<prefix_suffix:[%s,%s]>", self.Prefix, self.Suffix)
|
||||
}
|
||||
48
vendor/github.com/gobwas/glob/match/range.go
generated
vendored
Normal file
48
vendor/github.com/gobwas/glob/match/range.go
generated
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type Range struct {
|
||||
Lo, Hi rune
|
||||
Not bool
|
||||
}
|
||||
|
||||
func NewRange(lo, hi rune, not bool) Range {
|
||||
return Range{lo, hi, not}
|
||||
}
|
||||
|
||||
func (self Range) Len() int {
|
||||
return lenOne
|
||||
}
|
||||
|
||||
func (self Range) Match(s string) bool {
|
||||
r, w := utf8.DecodeRuneInString(s)
|
||||
if len(s) > w {
|
||||
return false
|
||||
}
|
||||
|
||||
inRange := r >= self.Lo && r <= self.Hi
|
||||
|
||||
return inRange == !self.Not
|
||||
}
|
||||
|
||||
func (self Range) Index(s string) (int, []int) {
|
||||
for i, r := range s {
|
||||
if self.Not != (r >= self.Lo && r <= self.Hi) {
|
||||
return i, segmentsByRuneLength[utf8.RuneLen(r)]
|
||||
}
|
||||
}
|
||||
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
func (self Range) String() string {
|
||||
var not string
|
||||
if self.Not {
|
||||
not = "!"
|
||||
}
|
||||
return fmt.Sprintf("<range:%s[%s,%s]>", not, string(self.Lo), string(self.Hi))
|
||||
}
|
||||
77
vendor/github.com/gobwas/glob/match/row.go
generated
vendored
Normal file
77
vendor/github.com/gobwas/glob/match/row.go
generated
vendored
Normal file
@@ -0,0 +1,77 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
type Row struct {
|
||||
Matchers Matchers
|
||||
RunesLength int
|
||||
Segments []int
|
||||
}
|
||||
|
||||
func NewRow(len int, m ...Matcher) Row {
|
||||
return Row{
|
||||
Matchers: Matchers(m),
|
||||
RunesLength: len,
|
||||
Segments: []int{len},
|
||||
}
|
||||
}
|
||||
|
||||
func (self Row) matchAll(s string) bool {
|
||||
var idx int
|
||||
for _, m := range self.Matchers {
|
||||
length := m.Len()
|
||||
|
||||
var next, i int
|
||||
for next = range s[idx:] {
|
||||
i++
|
||||
if i == length {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if i < length || !m.Match(s[idx:idx+next+1]) {
|
||||
return false
|
||||
}
|
||||
|
||||
idx += next + 1
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func (self Row) lenOk(s string) bool {
|
||||
var i int
|
||||
for range s {
|
||||
i++
|
||||
if i > self.RunesLength {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return self.RunesLength == i
|
||||
}
|
||||
|
||||
func (self Row) Match(s string) bool {
|
||||
return self.lenOk(s) && self.matchAll(s)
|
||||
}
|
||||
|
||||
func (self Row) Len() (l int) {
|
||||
return self.RunesLength
|
||||
}
|
||||
|
||||
func (self Row) Index(s string) (int, []int) {
|
||||
for i := range s {
|
||||
if len(s[i:]) < self.RunesLength {
|
||||
break
|
||||
}
|
||||
if self.matchAll(s[i:]) {
|
||||
return i, self.Segments
|
||||
}
|
||||
}
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
func (self Row) String() string {
|
||||
return fmt.Sprintf("<row_%d:[%s]>", self.RunesLength, self.Matchers)
|
||||
}
|
||||
91
vendor/github.com/gobwas/glob/match/segments.go
generated
vendored
Normal file
91
vendor/github.com/gobwas/glob/match/segments.go
generated
vendored
Normal file
@@ -0,0 +1,91 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"sync"
|
||||
)
|
||||
|
||||
type SomePool interface {
|
||||
Get() []int
|
||||
Put([]int)
|
||||
}
|
||||
|
||||
var segmentsPools [1024]sync.Pool
|
||||
|
||||
func toPowerOfTwo(v int) int {
|
||||
v--
|
||||
v |= v >> 1
|
||||
v |= v >> 2
|
||||
v |= v >> 4
|
||||
v |= v >> 8
|
||||
v |= v >> 16
|
||||
v++
|
||||
|
||||
return v
|
||||
}
|
||||
|
||||
const (
|
||||
cacheFrom = 16
|
||||
cacheToAndHigher = 1024
|
||||
cacheFromIndex = 15
|
||||
cacheToAndHigherIndex = 1023
|
||||
)
|
||||
|
||||
var (
|
||||
segments0 = []int{0}
|
||||
segments1 = []int{1}
|
||||
segments2 = []int{2}
|
||||
segments3 = []int{3}
|
||||
segments4 = []int{4}
|
||||
)
|
||||
|
||||
var segmentsByRuneLength [5][]int = [5][]int{
|
||||
0: segments0,
|
||||
1: segments1,
|
||||
2: segments2,
|
||||
3: segments3,
|
||||
4: segments4,
|
||||
}
|
||||
|
||||
func init() {
|
||||
for i := cacheToAndHigher; i >= cacheFrom; i >>= 1 {
|
||||
func(i int) {
|
||||
segmentsPools[i-1] = sync.Pool{New: func() interface{} {
|
||||
return make([]int, 0, i)
|
||||
}}
|
||||
}(i)
|
||||
}
|
||||
}
|
||||
|
||||
func getTableIndex(c int) int {
|
||||
p := toPowerOfTwo(c)
|
||||
switch {
|
||||
case p >= cacheToAndHigher:
|
||||
return cacheToAndHigherIndex
|
||||
case p <= cacheFrom:
|
||||
return cacheFromIndex
|
||||
default:
|
||||
return p - 1
|
||||
}
|
||||
}
|
||||
|
||||
func acquireSegments(c int) []int {
|
||||
// make []int with less capacity than cacheFrom
|
||||
// is faster than acquiring it from pool
|
||||
if c < cacheFrom {
|
||||
return make([]int, 0, c)
|
||||
}
|
||||
|
||||
return segmentsPools[getTableIndex(c)].Get().([]int)[:0]
|
||||
}
|
||||
|
||||
func releaseSegments(s []int) {
|
||||
c := cap(s)
|
||||
|
||||
// make []int with less capacity than cacheFrom
|
||||
// is faster than acquiring it from pool
|
||||
if c < cacheFrom {
|
||||
return
|
||||
}
|
||||
|
||||
segmentsPools[getTableIndex(c)].Put(s)
|
||||
}
|
||||
43
vendor/github.com/gobwas/glob/match/single.go
generated
vendored
Normal file
43
vendor/github.com/gobwas/glob/match/single.go
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/gobwas/glob/util/runes"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// single represents ?
|
||||
type Single struct {
|
||||
Separators []rune
|
||||
}
|
||||
|
||||
func NewSingle(s []rune) Single {
|
||||
return Single{s}
|
||||
}
|
||||
|
||||
func (self Single) Match(s string) bool {
|
||||
r, w := utf8.DecodeRuneInString(s)
|
||||
if len(s) > w {
|
||||
return false
|
||||
}
|
||||
|
||||
return runes.IndexRune(self.Separators, r) == -1
|
||||
}
|
||||
|
||||
func (self Single) Len() int {
|
||||
return lenOne
|
||||
}
|
||||
|
||||
func (self Single) Index(s string) (int, []int) {
|
||||
for i, r := range s {
|
||||
if runes.IndexRune(self.Separators, r) == -1 {
|
||||
return i, segmentsByRuneLength[utf8.RuneLen(r)]
|
||||
}
|
||||
}
|
||||
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
func (self Single) String() string {
|
||||
return fmt.Sprintf("<single:![%s]>", string(self.Separators))
|
||||
}
|
||||
35
vendor/github.com/gobwas/glob/match/suffix.go
generated
vendored
Normal file
35
vendor/github.com/gobwas/glob/match/suffix.go
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Suffix struct {
|
||||
Suffix string
|
||||
}
|
||||
|
||||
func NewSuffix(s string) Suffix {
|
||||
return Suffix{s}
|
||||
}
|
||||
|
||||
func (self Suffix) Len() int {
|
||||
return lenNo
|
||||
}
|
||||
|
||||
func (self Suffix) Match(s string) bool {
|
||||
return strings.HasSuffix(s, self.Suffix)
|
||||
}
|
||||
|
||||
func (self Suffix) Index(s string) (int, []int) {
|
||||
idx := strings.Index(s, self.Suffix)
|
||||
if idx == -1 {
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
return 0, []int{idx + len(self.Suffix)}
|
||||
}
|
||||
|
||||
func (self Suffix) String() string {
|
||||
return fmt.Sprintf("<suffix:%s>", self.Suffix)
|
||||
}
|
||||
43
vendor/github.com/gobwas/glob/match/suffix_any.go
generated
vendored
Normal file
43
vendor/github.com/gobwas/glob/match/suffix_any.go
generated
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
sutil "github.com/gobwas/glob/util/strings"
|
||||
)
|
||||
|
||||
type SuffixAny struct {
|
||||
Suffix string
|
||||
Separators []rune
|
||||
}
|
||||
|
||||
func NewSuffixAny(s string, sep []rune) SuffixAny {
|
||||
return SuffixAny{s, sep}
|
||||
}
|
||||
|
||||
func (self SuffixAny) Index(s string) (int, []int) {
|
||||
idx := strings.Index(s, self.Suffix)
|
||||
if idx == -1 {
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
i := sutil.LastIndexAnyRunes(s[:idx], self.Separators) + 1
|
||||
|
||||
return i, []int{idx + len(self.Suffix) - i}
|
||||
}
|
||||
|
||||
func (self SuffixAny) Len() int {
|
||||
return lenNo
|
||||
}
|
||||
|
||||
func (self SuffixAny) Match(s string) bool {
|
||||
if !strings.HasSuffix(s, self.Suffix) {
|
||||
return false
|
||||
}
|
||||
return sutil.IndexAnyRunes(s[:len(s)-len(self.Suffix)], self.Separators) == -1
|
||||
}
|
||||
|
||||
func (self SuffixAny) String() string {
|
||||
return fmt.Sprintf("<suffix_any:![%s]%s>", string(self.Separators), self.Suffix)
|
||||
}
|
||||
33
vendor/github.com/gobwas/glob/match/super.go
generated
vendored
Normal file
33
vendor/github.com/gobwas/glob/match/super.go
generated
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
type Super struct{}
|
||||
|
||||
func NewSuper() Super {
|
||||
return Super{}
|
||||
}
|
||||
|
||||
func (self Super) Match(s string) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (self Super) Len() int {
|
||||
return lenNo
|
||||
}
|
||||
|
||||
func (self Super) Index(s string) (int, []int) {
|
||||
segments := acquireSegments(len(s) + 1)
|
||||
for i := range s {
|
||||
segments = append(segments, i)
|
||||
}
|
||||
segments = append(segments, len(s))
|
||||
|
||||
return 0, segments
|
||||
}
|
||||
|
||||
func (self Super) String() string {
|
||||
return fmt.Sprintf("<super>")
|
||||
}
|
||||
45
vendor/github.com/gobwas/glob/match/text.go
generated
vendored
Normal file
45
vendor/github.com/gobwas/glob/match/text.go
generated
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
package match
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// raw represents raw string to match
|
||||
type Text struct {
|
||||
Str string
|
||||
RunesLength int
|
||||
BytesLength int
|
||||
Segments []int
|
||||
}
|
||||
|
||||
func NewText(s string) Text {
|
||||
return Text{
|
||||
Str: s,
|
||||
RunesLength: utf8.RuneCountInString(s),
|
||||
BytesLength: len(s),
|
||||
Segments: []int{len(s)},
|
||||
}
|
||||
}
|
||||
|
||||
func (self Text) Match(s string) bool {
|
||||
return self.Str == s
|
||||
}
|
||||
|
||||
func (self Text) Len() int {
|
||||
return self.RunesLength
|
||||
}
|
||||
|
||||
func (self Text) Index(s string) (int, []int) {
|
||||
index := strings.Index(s, self.Str)
|
||||
if index == -1 {
|
||||
return -1, nil
|
||||
}
|
||||
|
||||
return index, self.Segments
|
||||
}
|
||||
|
||||
func (self Text) String() string {
|
||||
return fmt.Sprintf("<text:`%v`>", self.Str)
|
||||
}
|
||||
148
vendor/github.com/gobwas/glob/readme.md
generated
vendored
Normal file
148
vendor/github.com/gobwas/glob/readme.md
generated
vendored
Normal file
@@ -0,0 +1,148 @@
|
||||
# glob.[go](https://golang.org)
|
||||
|
||||
[![GoDoc][godoc-image]][godoc-url] [![Build Status][travis-image]][travis-url]
|
||||
|
||||
> Go Globbing Library.
|
||||
|
||||
## Install
|
||||
|
||||
```shell
|
||||
go get github.com/gobwas/glob
|
||||
```
|
||||
|
||||
## Example
|
||||
|
||||
```go
|
||||
|
||||
package main
|
||||
|
||||
import "github.com/gobwas/glob"
|
||||
|
||||
func main() {
|
||||
var g glob.Glob
|
||||
|
||||
// create simple glob
|
||||
g = glob.MustCompile("*.github.com")
|
||||
g.Match("api.github.com") // true
|
||||
|
||||
// quote meta characters and then create simple glob
|
||||
g = glob.MustCompile(glob.QuoteMeta("*.github.com"))
|
||||
g.Match("*.github.com") // true
|
||||
|
||||
// create new glob with set of delimiters as ["."]
|
||||
g = glob.MustCompile("api.*.com", '.')
|
||||
g.Match("api.github.com") // true
|
||||
g.Match("api.gi.hub.com") // false
|
||||
|
||||
// create new glob with set of delimiters as ["."]
|
||||
// but now with super wildcard
|
||||
g = glob.MustCompile("api.**.com", '.')
|
||||
g.Match("api.github.com") // true
|
||||
g.Match("api.gi.hub.com") // true
|
||||
|
||||
// create glob with single symbol wildcard
|
||||
g = glob.MustCompile("?at")
|
||||
g.Match("cat") // true
|
||||
g.Match("fat") // true
|
||||
g.Match("at") // false
|
||||
|
||||
// create glob with single symbol wildcard and delimiters ['f']
|
||||
g = glob.MustCompile("?at", 'f')
|
||||
g.Match("cat") // true
|
||||
g.Match("fat") // false
|
||||
g.Match("at") // false
|
||||
|
||||
// create glob with character-list matchers
|
||||
g = glob.MustCompile("[abc]at")
|
||||
g.Match("cat") // true
|
||||
g.Match("bat") // true
|
||||
g.Match("fat") // false
|
||||
g.Match("at") // false
|
||||
|
||||
// create glob with character-list matchers
|
||||
g = glob.MustCompile("[!abc]at")
|
||||
g.Match("cat") // false
|
||||
g.Match("bat") // false
|
||||
g.Match("fat") // true
|
||||
g.Match("at") // false
|
||||
|
||||
// create glob with character-range matchers
|
||||
g = glob.MustCompile("[a-c]at")
|
||||
g.Match("cat") // true
|
||||
g.Match("bat") // true
|
||||
g.Match("fat") // false
|
||||
g.Match("at") // false
|
||||
|
||||
// create glob with character-range matchers
|
||||
g = glob.MustCompile("[!a-c]at")
|
||||
g.Match("cat") // false
|
||||
g.Match("bat") // false
|
||||
g.Match("fat") // true
|
||||
g.Match("at") // false
|
||||
|
||||
// create glob with pattern-alternatives list
|
||||
g = glob.MustCompile("{cat,bat,[fr]at}")
|
||||
g.Match("cat") // true
|
||||
g.Match("bat") // true
|
||||
g.Match("fat") // true
|
||||
g.Match("rat") // true
|
||||
g.Match("at") // false
|
||||
g.Match("zat") // false
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
## Performance
|
||||
|
||||
This library is created for compile-once patterns. This means, that compilation could take time, but
|
||||
strings matching is done faster, than in case when always parsing template.
|
||||
|
||||
If you will not use compiled `glob.Glob` object, and do `g := glob.MustCompile(pattern); g.Match(...)` every time, then your code will be much more slower.
|
||||
|
||||
Run `go test -bench=.` from source root to see the benchmarks:
|
||||
|
||||
Pattern | Fixture | Match | Speed (ns/op)
|
||||
--------|---------|-------|--------------
|
||||
`[a-z][!a-x]*cat*[h][!b]*eyes*` | `my cat has very bright eyes` | `true` | 432
|
||||
`[a-z][!a-x]*cat*[h][!b]*eyes*` | `my dog has very bright eyes` | `false` | 199
|
||||
`https://*.google.*` | `https://account.google.com` | `true` | 96
|
||||
`https://*.google.*` | `https://google.com` | `false` | 66
|
||||
`{https://*.google.*,*yandex.*,*yahoo.*,*mail.ru}` | `http://yahoo.com` | `true` | 163
|
||||
`{https://*.google.*,*yandex.*,*yahoo.*,*mail.ru}` | `http://google.com` | `false` | 197
|
||||
`{https://*gobwas.com,http://exclude.gobwas.com}` | `https://safe.gobwas.com` | `true` | 22
|
||||
`{https://*gobwas.com,http://exclude.gobwas.com}` | `http://safe.gobwas.com` | `false` | 24
|
||||
`abc*` | `abcdef` | `true` | 8.15
|
||||
`abc*` | `af` | `false` | 5.68
|
||||
`*def` | `abcdef` | `true` | 8.84
|
||||
`*def` | `af` | `false` | 5.74
|
||||
`ab*ef` | `abcdef` | `true` | 15.2
|
||||
`ab*ef` | `af` | `false` | 10.4
|
||||
|
||||
The same things with `regexp` package:
|
||||
|
||||
Pattern | Fixture | Match | Speed (ns/op)
|
||||
--------|---------|-------|--------------
|
||||
`^[a-z][^a-x].*cat.*[h][^b].*eyes.*$` | `my cat has very bright eyes` | `true` | 2553
|
||||
`^[a-z][^a-x].*cat.*[h][^b].*eyes.*$` | `my dog has very bright eyes` | `false` | 1383
|
||||
`^https:\/\/.*\.google\..*$` | `https://account.google.com` | `true` | 1205
|
||||
`^https:\/\/.*\.google\..*$` | `https://google.com` | `false` | 767
|
||||
`^(https:\/\/.*\.google\..*|.*yandex\..*|.*yahoo\..*|.*mail\.ru)$` | `http://yahoo.com` | `true` | 1435
|
||||
`^(https:\/\/.*\.google\..*|.*yandex\..*|.*yahoo\..*|.*mail\.ru)$` | `http://google.com` | `false` | 1674
|
||||
`^(https:\/\/.*gobwas\.com|http://exclude.gobwas.com)$` | `https://safe.gobwas.com` | `true` | 1039
|
||||
`^(https:\/\/.*gobwas\.com|http://exclude.gobwas.com)$` | `http://safe.gobwas.com` | `false` | 272
|
||||
`^abc.*$` | `abcdef` | `true` | 237
|
||||
`^abc.*$` | `af` | `false` | 100
|
||||
`^.*def$` | `abcdef` | `true` | 464
|
||||
`^.*def$` | `af` | `false` | 265
|
||||
`^ab.*ef$` | `abcdef` | `true` | 375
|
||||
`^ab.*ef$` | `af` | `false` | 145
|
||||
|
||||
[godoc-image]: https://godoc.org/github.com/gobwas/glob?status.svg
|
||||
[godoc-url]: https://godoc.org/github.com/gobwas/glob
|
||||
[travis-image]: https://travis-ci.org/gobwas/glob.svg?branch=master
|
||||
[travis-url]: https://travis-ci.org/gobwas/glob
|
||||
|
||||
## Syntax
|
||||
|
||||
Syntax is inspired by [standard wildcards](http://tldp.org/LDP/GNU-Linux-Tools-Summary/html/x11655.htm),
|
||||
except that `**` is aka super-asterisk, that do not sensitive for separators.
|
||||
122
vendor/github.com/gobwas/glob/syntax/ast/ast.go
generated
vendored
Normal file
122
vendor/github.com/gobwas/glob/syntax/ast/ast.go
generated
vendored
Normal file
@@ -0,0 +1,122 @@
|
||||
package ast
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
type Node struct {
|
||||
Parent *Node
|
||||
Children []*Node
|
||||
Value interface{}
|
||||
Kind Kind
|
||||
}
|
||||
|
||||
func NewNode(k Kind, v interface{}, ch ...*Node) *Node {
|
||||
n := &Node{
|
||||
Kind: k,
|
||||
Value: v,
|
||||
}
|
||||
for _, c := range ch {
|
||||
Insert(n, c)
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
func (a *Node) Equal(b *Node) bool {
|
||||
if a.Kind != b.Kind {
|
||||
return false
|
||||
}
|
||||
if a.Value != b.Value {
|
||||
return false
|
||||
}
|
||||
if len(a.Children) != len(b.Children) {
|
||||
return false
|
||||
}
|
||||
for i, c := range a.Children {
|
||||
if !c.Equal(b.Children[i]) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (a *Node) String() string {
|
||||
var buf bytes.Buffer
|
||||
buf.WriteString(a.Kind.String())
|
||||
if a.Value != nil {
|
||||
buf.WriteString(" =")
|
||||
buf.WriteString(fmt.Sprintf("%v", a.Value))
|
||||
}
|
||||
if len(a.Children) > 0 {
|
||||
buf.WriteString(" [")
|
||||
for i, c := range a.Children {
|
||||
if i > 0 {
|
||||
buf.WriteString(", ")
|
||||
}
|
||||
buf.WriteString(c.String())
|
||||
}
|
||||
buf.WriteString("]")
|
||||
}
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
func Insert(parent *Node, children ...*Node) {
|
||||
parent.Children = append(parent.Children, children...)
|
||||
for _, ch := range children {
|
||||
ch.Parent = parent
|
||||
}
|
||||
}
|
||||
|
||||
type List struct {
|
||||
Not bool
|
||||
Chars string
|
||||
}
|
||||
|
||||
type Range struct {
|
||||
Not bool
|
||||
Lo, Hi rune
|
||||
}
|
||||
|
||||
type Text struct {
|
||||
Text string
|
||||
}
|
||||
|
||||
type Kind int
|
||||
|
||||
const (
|
||||
KindNothing Kind = iota
|
||||
KindPattern
|
||||
KindList
|
||||
KindRange
|
||||
KindText
|
||||
KindAny
|
||||
KindSuper
|
||||
KindSingle
|
||||
KindAnyOf
|
||||
)
|
||||
|
||||
func (k Kind) String() string {
|
||||
switch k {
|
||||
case KindNothing:
|
||||
return "Nothing"
|
||||
case KindPattern:
|
||||
return "Pattern"
|
||||
case KindList:
|
||||
return "List"
|
||||
case KindRange:
|
||||
return "Range"
|
||||
case KindText:
|
||||
return "Text"
|
||||
case KindAny:
|
||||
return "Any"
|
||||
case KindSuper:
|
||||
return "Super"
|
||||
case KindSingle:
|
||||
return "Single"
|
||||
case KindAnyOf:
|
||||
return "AnyOf"
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
}
|
||||
157
vendor/github.com/gobwas/glob/syntax/ast/parser.go
generated
vendored
Normal file
157
vendor/github.com/gobwas/glob/syntax/ast/parser.go
generated
vendored
Normal file
@@ -0,0 +1,157 @@
|
||||
package ast
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/gobwas/glob/syntax/lexer"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type Lexer interface {
|
||||
Next() lexer.Token
|
||||
}
|
||||
|
||||
type parseFn func(*Node, Lexer) (parseFn, *Node, error)
|
||||
|
||||
func Parse(lexer Lexer) (*Node, error) {
|
||||
var parser parseFn
|
||||
|
||||
root := NewNode(KindPattern, nil)
|
||||
|
||||
var (
|
||||
tree *Node
|
||||
err error
|
||||
)
|
||||
for parser, tree = parserMain, root; parser != nil; {
|
||||
parser, tree, err = parser(tree, lexer)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return root, nil
|
||||
}
|
||||
|
||||
func parserMain(tree *Node, lex Lexer) (parseFn, *Node, error) {
|
||||
for {
|
||||
token := lex.Next()
|
||||
switch token.Type {
|
||||
case lexer.EOF:
|
||||
return nil, tree, nil
|
||||
|
||||
case lexer.Error:
|
||||
return nil, tree, errors.New(token.Raw)
|
||||
|
||||
case lexer.Text:
|
||||
Insert(tree, NewNode(KindText, Text{token.Raw}))
|
||||
return parserMain, tree, nil
|
||||
|
||||
case lexer.Any:
|
||||
Insert(tree, NewNode(KindAny, nil))
|
||||
return parserMain, tree, nil
|
||||
|
||||
case lexer.Super:
|
||||
Insert(tree, NewNode(KindSuper, nil))
|
||||
return parserMain, tree, nil
|
||||
|
||||
case lexer.Single:
|
||||
Insert(tree, NewNode(KindSingle, nil))
|
||||
return parserMain, tree, nil
|
||||
|
||||
case lexer.RangeOpen:
|
||||
return parserRange, tree, nil
|
||||
|
||||
case lexer.TermsOpen:
|
||||
a := NewNode(KindAnyOf, nil)
|
||||
Insert(tree, a)
|
||||
|
||||
p := NewNode(KindPattern, nil)
|
||||
Insert(a, p)
|
||||
|
||||
return parserMain, p, nil
|
||||
|
||||
case lexer.Separator:
|
||||
p := NewNode(KindPattern, nil)
|
||||
Insert(tree.Parent, p)
|
||||
|
||||
return parserMain, p, nil
|
||||
|
||||
case lexer.TermsClose:
|
||||
return parserMain, tree.Parent.Parent, nil
|
||||
|
||||
default:
|
||||
return nil, tree, fmt.Errorf("unexpected token: %s", token)
|
||||
}
|
||||
}
|
||||
return nil, tree, fmt.Errorf("unknown error")
|
||||
}
|
||||
|
||||
func parserRange(tree *Node, lex Lexer) (parseFn, *Node, error) {
|
||||
var (
|
||||
not bool
|
||||
lo rune
|
||||
hi rune
|
||||
chars string
|
||||
)
|
||||
for {
|
||||
token := lex.Next()
|
||||
switch token.Type {
|
||||
case lexer.EOF:
|
||||
return nil, tree, errors.New("unexpected end")
|
||||
|
||||
case lexer.Error:
|
||||
return nil, tree, errors.New(token.Raw)
|
||||
|
||||
case lexer.Not:
|
||||
not = true
|
||||
|
||||
case lexer.RangeLo:
|
||||
r, w := utf8.DecodeRuneInString(token.Raw)
|
||||
if len(token.Raw) > w {
|
||||
return nil, tree, fmt.Errorf("unexpected length of lo character")
|
||||
}
|
||||
lo = r
|
||||
|
||||
case lexer.RangeBetween:
|
||||
//
|
||||
|
||||
case lexer.RangeHi:
|
||||
r, w := utf8.DecodeRuneInString(token.Raw)
|
||||
if len(token.Raw) > w {
|
||||
return nil, tree, fmt.Errorf("unexpected length of lo character")
|
||||
}
|
||||
|
||||
hi = r
|
||||
|
||||
if hi < lo {
|
||||
return nil, tree, fmt.Errorf("hi character '%s' should be greater than lo '%s'", string(hi), string(lo))
|
||||
}
|
||||
|
||||
case lexer.Text:
|
||||
chars = token.Raw
|
||||
|
||||
case lexer.RangeClose:
|
||||
isRange := lo != 0 && hi != 0
|
||||
isChars := chars != ""
|
||||
|
||||
if isChars == isRange {
|
||||
return nil, tree, fmt.Errorf("could not parse range")
|
||||
}
|
||||
|
||||
if isRange {
|
||||
Insert(tree, NewNode(KindRange, Range{
|
||||
Lo: lo,
|
||||
Hi: hi,
|
||||
Not: not,
|
||||
}))
|
||||
} else {
|
||||
Insert(tree, NewNode(KindList, List{
|
||||
Chars: chars,
|
||||
Not: not,
|
||||
}))
|
||||
}
|
||||
|
||||
return parserMain, tree, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
273
vendor/github.com/gobwas/glob/syntax/lexer/lexer.go
generated
vendored
Normal file
273
vendor/github.com/gobwas/glob/syntax/lexer/lexer.go
generated
vendored
Normal file
@@ -0,0 +1,273 @@
|
||||
package lexer
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"github.com/gobwas/glob/util/runes"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
const (
|
||||
char_any = '*'
|
||||
char_comma = ','
|
||||
char_single = '?'
|
||||
char_escape = '\\'
|
||||
char_range_open = '['
|
||||
char_range_close = ']'
|
||||
char_terms_open = '{'
|
||||
char_terms_close = '}'
|
||||
char_range_not = '!'
|
||||
char_range_between = '-'
|
||||
)
|
||||
|
||||
var specials = []byte{
|
||||
char_any,
|
||||
char_single,
|
||||
char_escape,
|
||||
char_range_open,
|
||||
char_range_close,
|
||||
char_terms_open,
|
||||
char_terms_close,
|
||||
}
|
||||
|
||||
func Special(c byte) bool {
|
||||
return bytes.IndexByte(specials, c) != -1
|
||||
}
|
||||
|
||||
type tokens []Token
|
||||
|
||||
func (i *tokens) shift() (ret Token) {
|
||||
ret = (*i)[0]
|
||||
copy(*i, (*i)[1:])
|
||||
*i = (*i)[:len(*i)-1]
|
||||
return
|
||||
}
|
||||
|
||||
func (i *tokens) push(v Token) {
|
||||
*i = append(*i, v)
|
||||
}
|
||||
|
||||
func (i *tokens) empty() bool {
|
||||
return len(*i) == 0
|
||||
}
|
||||
|
||||
var eof rune = 0
|
||||
|
||||
type lexer struct {
|
||||
data string
|
||||
pos int
|
||||
err error
|
||||
|
||||
tokens tokens
|
||||
termsLevel int
|
||||
|
||||
lastRune rune
|
||||
lastRuneSize int
|
||||
hasRune bool
|
||||
}
|
||||
|
||||
func NewLexer(source string) *lexer {
|
||||
l := &lexer{
|
||||
data: source,
|
||||
tokens: tokens(make([]Token, 0, 4)),
|
||||
}
|
||||
return l
|
||||
}
|
||||
|
||||
func (l *lexer) Next() Token {
|
||||
if l.err != nil {
|
||||
return Token{Error, l.err.Error()}
|
||||
}
|
||||
if !l.tokens.empty() {
|
||||
return l.tokens.shift()
|
||||
}
|
||||
|
||||
l.fetchItem()
|
||||
return l.Next()
|
||||
}
|
||||
|
||||
func (l *lexer) peek() (r rune, w int) {
|
||||
if l.pos == len(l.data) {
|
||||
return eof, 0
|
||||
}
|
||||
|
||||
r, w = utf8.DecodeRuneInString(l.data[l.pos:])
|
||||
if r == utf8.RuneError {
|
||||
l.errorf("could not read rune")
|
||||
r = eof
|
||||
w = 0
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (l *lexer) read() rune {
|
||||
if l.hasRune {
|
||||
l.hasRune = false
|
||||
l.seek(l.lastRuneSize)
|
||||
return l.lastRune
|
||||
}
|
||||
|
||||
r, s := l.peek()
|
||||
l.seek(s)
|
||||
|
||||
l.lastRune = r
|
||||
l.lastRuneSize = s
|
||||
|
||||
return r
|
||||
}
|
||||
|
||||
func (l *lexer) seek(w int) {
|
||||
l.pos += w
|
||||
}
|
||||
|
||||
func (l *lexer) unread() {
|
||||
if l.hasRune {
|
||||
l.errorf("could not unread rune")
|
||||
return
|
||||
}
|
||||
l.seek(-l.lastRuneSize)
|
||||
l.hasRune = true
|
||||
}
|
||||
|
||||
func (l *lexer) errorf(f string, v ...interface{}) {
|
||||
l.err = fmt.Errorf(f, v...)
|
||||
}
|
||||
|
||||
func (l *lexer) inTerms() bool {
|
||||
return l.termsLevel > 0
|
||||
}
|
||||
|
||||
func (l *lexer) termsEnter() {
|
||||
l.termsLevel++
|
||||
}
|
||||
|
||||
func (l *lexer) termsLeave() {
|
||||
l.termsLevel--
|
||||
}
|
||||
|
||||
var inTextBreakers = []rune{char_single, char_any, char_range_open, char_terms_open}
|
||||
var inTermsBreakers = append(inTextBreakers, char_terms_close, char_comma)
|
||||
|
||||
func (l *lexer) fetchItem() {
|
||||
r := l.read()
|
||||
switch {
|
||||
case r == eof:
|
||||
l.tokens.push(Token{EOF, ""})
|
||||
|
||||
case r == char_terms_open:
|
||||
l.termsEnter()
|
||||
l.tokens.push(Token{TermsOpen, string(r)})
|
||||
|
||||
case r == char_comma && l.inTerms():
|
||||
l.tokens.push(Token{Separator, string(r)})
|
||||
|
||||
case r == char_terms_close && l.inTerms():
|
||||
l.tokens.push(Token{TermsClose, string(r)})
|
||||
l.termsLeave()
|
||||
|
||||
case r == char_range_open:
|
||||
l.tokens.push(Token{RangeOpen, string(r)})
|
||||
l.fetchRange()
|
||||
|
||||
case r == char_single:
|
||||
l.tokens.push(Token{Single, string(r)})
|
||||
|
||||
case r == char_any:
|
||||
if l.read() == char_any {
|
||||
l.tokens.push(Token{Super, string(r) + string(r)})
|
||||
} else {
|
||||
l.unread()
|
||||
l.tokens.push(Token{Any, string(r)})
|
||||
}
|
||||
|
||||
default:
|
||||
l.unread()
|
||||
|
||||
var breakers []rune
|
||||
if l.inTerms() {
|
||||
breakers = inTermsBreakers
|
||||
} else {
|
||||
breakers = inTextBreakers
|
||||
}
|
||||
l.fetchText(breakers)
|
||||
}
|
||||
}
|
||||
|
||||
func (l *lexer) fetchRange() {
|
||||
var wantHi bool
|
||||
var wantClose bool
|
||||
var seenNot bool
|
||||
for {
|
||||
r := l.read()
|
||||
if r == eof {
|
||||
l.errorf("unexpected end of input")
|
||||
return
|
||||
}
|
||||
|
||||
if wantClose {
|
||||
if r != char_range_close {
|
||||
l.errorf("expected close range character")
|
||||
} else {
|
||||
l.tokens.push(Token{RangeClose, string(r)})
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if wantHi {
|
||||
l.tokens.push(Token{RangeHi, string(r)})
|
||||
wantClose = true
|
||||
continue
|
||||
}
|
||||
|
||||
if !seenNot && r == char_range_not {
|
||||
l.tokens.push(Token{Not, string(r)})
|
||||
seenNot = true
|
||||
continue
|
||||
}
|
||||
|
||||
if n, w := l.peek(); n == char_range_between {
|
||||
l.seek(w)
|
||||
l.tokens.push(Token{RangeLo, string(r)})
|
||||
l.tokens.push(Token{RangeBetween, string(n)})
|
||||
wantHi = true
|
||||
continue
|
||||
}
|
||||
|
||||
l.unread() // unread first peek and fetch as text
|
||||
l.fetchText([]rune{char_range_close})
|
||||
wantClose = true
|
||||
}
|
||||
}
|
||||
|
||||
func (l *lexer) fetchText(breakers []rune) {
|
||||
var data []rune
|
||||
var escaped bool
|
||||
|
||||
reading:
|
||||
for {
|
||||
r := l.read()
|
||||
if r == eof {
|
||||
break
|
||||
}
|
||||
|
||||
if !escaped {
|
||||
if r == char_escape {
|
||||
escaped = true
|
||||
continue
|
||||
}
|
||||
|
||||
if runes.IndexRune(breakers, r) != -1 {
|
||||
l.unread()
|
||||
break reading
|
||||
}
|
||||
}
|
||||
|
||||
escaped = false
|
||||
data = append(data, r)
|
||||
}
|
||||
|
||||
if len(data) > 0 {
|
||||
l.tokens.push(Token{Text, string(data)})
|
||||
}
|
||||
}
|
||||
88
vendor/github.com/gobwas/glob/syntax/lexer/token.go
generated
vendored
Normal file
88
vendor/github.com/gobwas/glob/syntax/lexer/token.go
generated
vendored
Normal file
@@ -0,0 +1,88 @@
|
||||
package lexer
|
||||
|
||||
import "fmt"
|
||||
|
||||
type TokenType int
|
||||
|
||||
const (
|
||||
EOF TokenType = iota
|
||||
Error
|
||||
Text
|
||||
Char
|
||||
Any
|
||||
Super
|
||||
Single
|
||||
Not
|
||||
Separator
|
||||
RangeOpen
|
||||
RangeClose
|
||||
RangeLo
|
||||
RangeHi
|
||||
RangeBetween
|
||||
TermsOpen
|
||||
TermsClose
|
||||
)
|
||||
|
||||
func (tt TokenType) String() string {
|
||||
switch tt {
|
||||
case EOF:
|
||||
return "eof"
|
||||
|
||||
case Error:
|
||||
return "error"
|
||||
|
||||
case Text:
|
||||
return "text"
|
||||
|
||||
case Char:
|
||||
return "char"
|
||||
|
||||
case Any:
|
||||
return "any"
|
||||
|
||||
case Super:
|
||||
return "super"
|
||||
|
||||
case Single:
|
||||
return "single"
|
||||
|
||||
case Not:
|
||||
return "not"
|
||||
|
||||
case Separator:
|
||||
return "separator"
|
||||
|
||||
case RangeOpen:
|
||||
return "range_open"
|
||||
|
||||
case RangeClose:
|
||||
return "range_close"
|
||||
|
||||
case RangeLo:
|
||||
return "range_lo"
|
||||
|
||||
case RangeHi:
|
||||
return "range_hi"
|
||||
|
||||
case RangeBetween:
|
||||
return "range_between"
|
||||
|
||||
case TermsOpen:
|
||||
return "terms_open"
|
||||
|
||||
case TermsClose:
|
||||
return "terms_close"
|
||||
|
||||
default:
|
||||
return "undef"
|
||||
}
|
||||
}
|
||||
|
||||
type Token struct {
|
||||
Type TokenType
|
||||
Raw string
|
||||
}
|
||||
|
||||
func (t Token) String() string {
|
||||
return fmt.Sprintf("%v<%q>", t.Type, t.Raw)
|
||||
}
|
||||
14
vendor/github.com/gobwas/glob/syntax/syntax.go
generated
vendored
Normal file
14
vendor/github.com/gobwas/glob/syntax/syntax.go
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
package syntax
|
||||
|
||||
import (
|
||||
"github.com/gobwas/glob/syntax/ast"
|
||||
"github.com/gobwas/glob/syntax/lexer"
|
||||
)
|
||||
|
||||
func Parse(s string) (*ast.Node, error) {
|
||||
return ast.Parse(lexer.NewLexer(s))
|
||||
}
|
||||
|
||||
func Special(b byte) bool {
|
||||
return lexer.Special(b)
|
||||
}
|
||||
154
vendor/github.com/gobwas/glob/util/runes/runes.go
generated
vendored
Normal file
154
vendor/github.com/gobwas/glob/util/runes/runes.go
generated
vendored
Normal file
@@ -0,0 +1,154 @@
|
||||
package runes
|
||||
|
||||
func Index(s, needle []rune) int {
|
||||
ls, ln := len(s), len(needle)
|
||||
|
||||
switch {
|
||||
case ln == 0:
|
||||
return 0
|
||||
case ln == 1:
|
||||
return IndexRune(s, needle[0])
|
||||
case ln == ls:
|
||||
if Equal(s, needle) {
|
||||
return 0
|
||||
}
|
||||
return -1
|
||||
case ln > ls:
|
||||
return -1
|
||||
}
|
||||
|
||||
head:
|
||||
for i := 0; i < ls && ls-i >= ln; i++ {
|
||||
for y := 0; y < ln; y++ {
|
||||
if s[i+y] != needle[y] {
|
||||
continue head
|
||||
}
|
||||
}
|
||||
|
||||
return i
|
||||
}
|
||||
|
||||
return -1
|
||||
}
|
||||
|
||||
func LastIndex(s, needle []rune) int {
|
||||
ls, ln := len(s), len(needle)
|
||||
|
||||
switch {
|
||||
case ln == 0:
|
||||
if ls == 0 {
|
||||
return 0
|
||||
}
|
||||
return ls
|
||||
case ln == 1:
|
||||
return IndexLastRune(s, needle[0])
|
||||
case ln == ls:
|
||||
if Equal(s, needle) {
|
||||
return 0
|
||||
}
|
||||
return -1
|
||||
case ln > ls:
|
||||
return -1
|
||||
}
|
||||
|
||||
head:
|
||||
for i := ls - 1; i >= 0 && i >= ln; i-- {
|
||||
for y := ln - 1; y >= 0; y-- {
|
||||
if s[i-(ln-y-1)] != needle[y] {
|
||||
continue head
|
||||
}
|
||||
}
|
||||
|
||||
return i - ln + 1
|
||||
}
|
||||
|
||||
return -1
|
||||
}
|
||||
|
||||
// IndexAny returns the index of the first instance of any Unicode code point
|
||||
// from chars in s, or -1 if no Unicode code point from chars is present in s.
|
||||
func IndexAny(s, chars []rune) int {
|
||||
if len(chars) > 0 {
|
||||
for i, c := range s {
|
||||
for _, m := range chars {
|
||||
if c == m {
|
||||
return i
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return -1
|
||||
}
|
||||
|
||||
func Contains(s, needle []rune) bool {
|
||||
return Index(s, needle) >= 0
|
||||
}
|
||||
|
||||
func Max(s []rune) (max rune) {
|
||||
for _, r := range s {
|
||||
if r > max {
|
||||
max = r
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func Min(s []rune) rune {
|
||||
min := rune(-1)
|
||||
for _, r := range s {
|
||||
if min == -1 {
|
||||
min = r
|
||||
continue
|
||||
}
|
||||
|
||||
if r < min {
|
||||
min = r
|
||||
}
|
||||
}
|
||||
|
||||
return min
|
||||
}
|
||||
|
||||
func IndexRune(s []rune, r rune) int {
|
||||
for i, c := range s {
|
||||
if c == r {
|
||||
return i
|
||||
}
|
||||
}
|
||||
return -1
|
||||
}
|
||||
|
||||
func IndexLastRune(s []rune, r rune) int {
|
||||
for i := len(s) - 1; i >= 0; i-- {
|
||||
if s[i] == r {
|
||||
return i
|
||||
}
|
||||
}
|
||||
|
||||
return -1
|
||||
}
|
||||
|
||||
func Equal(a, b []rune) bool {
|
||||
if len(a) == len(b) {
|
||||
for i := 0; i < len(a); i++ {
|
||||
if a[i] != b[i] {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// HasPrefix tests whether the string s begins with prefix.
|
||||
func HasPrefix(s, prefix []rune) bool {
|
||||
return len(s) >= len(prefix) && Equal(s[0:len(prefix)], prefix)
|
||||
}
|
||||
|
||||
// HasSuffix tests whether the string s ends with suffix.
|
||||
func HasSuffix(s, suffix []rune) bool {
|
||||
return len(s) >= len(suffix) && Equal(s[len(s)-len(suffix):], suffix)
|
||||
}
|
||||
39
vendor/github.com/gobwas/glob/util/strings/strings.go
generated
vendored
Normal file
39
vendor/github.com/gobwas/glob/util/strings/strings.go
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
package strings
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
func IndexAnyRunes(s string, rs []rune) int {
|
||||
for _, r := range rs {
|
||||
if i := strings.IndexRune(s, r); i != -1 {
|
||||
return i
|
||||
}
|
||||
}
|
||||
|
||||
return -1
|
||||
}
|
||||
|
||||
func LastIndexAnyRunes(s string, rs []rune) int {
|
||||
for _, r := range rs {
|
||||
i := -1
|
||||
if 0 <= r && r < utf8.RuneSelf {
|
||||
i = strings.LastIndexByte(s, byte(r))
|
||||
} else {
|
||||
sub := s
|
||||
for len(sub) > 0 {
|
||||
j := strings.IndexRune(s, r)
|
||||
if j == -1 {
|
||||
break
|
||||
}
|
||||
i = j
|
||||
sub = sub[i+1:]
|
||||
}
|
||||
}
|
||||
if i != -1 {
|
||||
return i
|
||||
}
|
||||
}
|
||||
return -1
|
||||
}
|
||||
476
vendor/github.com/klauspost/cpuid/private-gen.go
generated
vendored
Normal file
476
vendor/github.com/klauspost/cpuid/private-gen.go
generated
vendored
Normal file
@@ -0,0 +1,476 @@
|
||||
// +build ignore
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/parser"
|
||||
"go/printer"
|
||||
"go/token"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
"reflect"
|
||||
"strings"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
var inFiles = []string{"cpuid.go", "cpuid_test.go"}
|
||||
var copyFiles = []string{"cpuid_amd64.s", "cpuid_386.s", "detect_ref.go", "detect_intel.go"}
|
||||
var fileSet = token.NewFileSet()
|
||||
var reWrites = []rewrite{
|
||||
initRewrite("CPUInfo -> cpuInfo"),
|
||||
initRewrite("Vendor -> vendor"),
|
||||
initRewrite("Flags -> flags"),
|
||||
initRewrite("Detect -> detect"),
|
||||
initRewrite("CPU -> cpu"),
|
||||
}
|
||||
var excludeNames = map[string]bool{"string": true, "join": true, "trim": true,
|
||||
// cpuid_test.go
|
||||
"t": true, "println": true, "logf": true, "log": true, "fatalf": true, "fatal": true,
|
||||
}
|
||||
|
||||
var excludePrefixes = []string{"test", "benchmark"}
|
||||
|
||||
func main() {
|
||||
Package := "private"
|
||||
parserMode := parser.ParseComments
|
||||
exported := make(map[string]rewrite)
|
||||
for _, file := range inFiles {
|
||||
in, err := os.Open(file)
|
||||
if err != nil {
|
||||
log.Fatalf("opening input", err)
|
||||
}
|
||||
|
||||
src, err := ioutil.ReadAll(in)
|
||||
if err != nil {
|
||||
log.Fatalf("reading input", err)
|
||||
}
|
||||
|
||||
astfile, err := parser.ParseFile(fileSet, file, src, parserMode)
|
||||
if err != nil {
|
||||
log.Fatalf("parsing input", err)
|
||||
}
|
||||
|
||||
for _, rw := range reWrites {
|
||||
astfile = rw(astfile)
|
||||
}
|
||||
|
||||
// Inspect the AST and print all identifiers and literals.
|
||||
var startDecl token.Pos
|
||||
var endDecl token.Pos
|
||||
ast.Inspect(astfile, func(n ast.Node) bool {
|
||||
var s string
|
||||
switch x := n.(type) {
|
||||
case *ast.Ident:
|
||||
if x.IsExported() {
|
||||
t := strings.ToLower(x.Name)
|
||||
for _, pre := range excludePrefixes {
|
||||
if strings.HasPrefix(t, pre) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
if excludeNames[t] != true {
|
||||
//if x.Pos() > startDecl && x.Pos() < endDecl {
|
||||
exported[x.Name] = initRewrite(x.Name + " -> " + t)
|
||||
}
|
||||
}
|
||||
|
||||
case *ast.GenDecl:
|
||||
if x.Tok == token.CONST && x.Lparen > 0 {
|
||||
startDecl = x.Lparen
|
||||
endDecl = x.Rparen
|
||||
// fmt.Printf("Decl:%s -> %s\n", fileSet.Position(startDecl), fileSet.Position(endDecl))
|
||||
}
|
||||
}
|
||||
if s != "" {
|
||||
fmt.Printf("%s:\t%s\n", fileSet.Position(n.Pos()), s)
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
for _, rw := range exported {
|
||||
astfile = rw(astfile)
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
|
||||
printer.Fprint(&buf, fileSet, astfile)
|
||||
|
||||
// Remove package documentation and insert information
|
||||
s := buf.String()
|
||||
ind := strings.Index(buf.String(), "\npackage cpuid")
|
||||
s = s[ind:]
|
||||
s = "// Generated, DO NOT EDIT,\n" +
|
||||
"// but copy it to your own project and rename the package.\n" +
|
||||
"// See more at http://github.com/klauspost/cpuid\n" +
|
||||
s
|
||||
|
||||
outputName := Package + string(os.PathSeparator) + file
|
||||
|
||||
err = ioutil.WriteFile(outputName, []byte(s), 0644)
|
||||
if err != nil {
|
||||
log.Fatalf("writing output: %s", err)
|
||||
}
|
||||
log.Println("Generated", outputName)
|
||||
}
|
||||
|
||||
for _, file := range copyFiles {
|
||||
dst := ""
|
||||
if strings.HasPrefix(file, "cpuid") {
|
||||
dst = Package + string(os.PathSeparator) + file
|
||||
} else {
|
||||
dst = Package + string(os.PathSeparator) + "cpuid_" + file
|
||||
}
|
||||
err := copyFile(file, dst)
|
||||
if err != nil {
|
||||
log.Fatalf("copying file: %s", err)
|
||||
}
|
||||
log.Println("Copied", dst)
|
||||
}
|
||||
}
|
||||
|
||||
// CopyFile copies a file from src to dst. If src and dst files exist, and are
|
||||
// the same, then return success. Copy the file contents from src to dst.
|
||||
func copyFile(src, dst string) (err error) {
|
||||
sfi, err := os.Stat(src)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
if !sfi.Mode().IsRegular() {
|
||||
// cannot copy non-regular files (e.g., directories,
|
||||
// symlinks, devices, etc.)
|
||||
return fmt.Errorf("CopyFile: non-regular source file %s (%q)", sfi.Name(), sfi.Mode().String())
|
||||
}
|
||||
dfi, err := os.Stat(dst)
|
||||
if err != nil {
|
||||
if !os.IsNotExist(err) {
|
||||
return
|
||||
}
|
||||
} else {
|
||||
if !(dfi.Mode().IsRegular()) {
|
||||
return fmt.Errorf("CopyFile: non-regular destination file %s (%q)", dfi.Name(), dfi.Mode().String())
|
||||
}
|
||||
if os.SameFile(sfi, dfi) {
|
||||
return
|
||||
}
|
||||
}
|
||||
err = copyFileContents(src, dst)
|
||||
return
|
||||
}
|
||||
|
||||
// copyFileContents copies the contents of the file named src to the file named
|
||||
// by dst. The file will be created if it does not already exist. If the
|
||||
// destination file exists, all it's contents will be replaced by the contents
|
||||
// of the source file.
|
||||
func copyFileContents(src, dst string) (err error) {
|
||||
in, err := os.Open(src)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
defer in.Close()
|
||||
out, err := os.Create(dst)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
defer func() {
|
||||
cerr := out.Close()
|
||||
if err == nil {
|
||||
err = cerr
|
||||
}
|
||||
}()
|
||||
if _, err = io.Copy(out, in); err != nil {
|
||||
return
|
||||
}
|
||||
err = out.Sync()
|
||||
return
|
||||
}
|
||||
|
||||
type rewrite func(*ast.File) *ast.File
|
||||
|
||||
// Mostly copied from gofmt
|
||||
func initRewrite(rewriteRule string) rewrite {
|
||||
f := strings.Split(rewriteRule, "->")
|
||||
if len(f) != 2 {
|
||||
fmt.Fprintf(os.Stderr, "rewrite rule must be of the form 'pattern -> replacement'\n")
|
||||
os.Exit(2)
|
||||
}
|
||||
pattern := parseExpr(f[0], "pattern")
|
||||
replace := parseExpr(f[1], "replacement")
|
||||
return func(p *ast.File) *ast.File { return rewriteFile(pattern, replace, p) }
|
||||
}
|
||||
|
||||
// parseExpr parses s as an expression.
|
||||
// It might make sense to expand this to allow statement patterns,
|
||||
// but there are problems with preserving formatting and also
|
||||
// with what a wildcard for a statement looks like.
|
||||
func parseExpr(s, what string) ast.Expr {
|
||||
x, err := parser.ParseExpr(s)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "parsing %s %s at %s\n", what, s, err)
|
||||
os.Exit(2)
|
||||
}
|
||||
return x
|
||||
}
|
||||
|
||||
// Keep this function for debugging.
|
||||
/*
|
||||
func dump(msg string, val reflect.Value) {
|
||||
fmt.Printf("%s:\n", msg)
|
||||
ast.Print(fileSet, val.Interface())
|
||||
fmt.Println()
|
||||
}
|
||||
*/
|
||||
|
||||
// rewriteFile applies the rewrite rule 'pattern -> replace' to an entire file.
|
||||
func rewriteFile(pattern, replace ast.Expr, p *ast.File) *ast.File {
|
||||
cmap := ast.NewCommentMap(fileSet, p, p.Comments)
|
||||
m := make(map[string]reflect.Value)
|
||||
pat := reflect.ValueOf(pattern)
|
||||
repl := reflect.ValueOf(replace)
|
||||
|
||||
var rewriteVal func(val reflect.Value) reflect.Value
|
||||
rewriteVal = func(val reflect.Value) reflect.Value {
|
||||
// don't bother if val is invalid to start with
|
||||
if !val.IsValid() {
|
||||
return reflect.Value{}
|
||||
}
|
||||
for k := range m {
|
||||
delete(m, k)
|
||||
}
|
||||
val = apply(rewriteVal, val)
|
||||
if match(m, pat, val) {
|
||||
val = subst(m, repl, reflect.ValueOf(val.Interface().(ast.Node).Pos()))
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
r := apply(rewriteVal, reflect.ValueOf(p)).Interface().(*ast.File)
|
||||
r.Comments = cmap.Filter(r).Comments() // recreate comments list
|
||||
return r
|
||||
}
|
||||
|
||||
// set is a wrapper for x.Set(y); it protects the caller from panics if x cannot be changed to y.
|
||||
func set(x, y reflect.Value) {
|
||||
// don't bother if x cannot be set or y is invalid
|
||||
if !x.CanSet() || !y.IsValid() {
|
||||
return
|
||||
}
|
||||
defer func() {
|
||||
if x := recover(); x != nil {
|
||||
if s, ok := x.(string); ok &&
|
||||
(strings.Contains(s, "type mismatch") || strings.Contains(s, "not assignable")) {
|
||||
// x cannot be set to y - ignore this rewrite
|
||||
return
|
||||
}
|
||||
panic(x)
|
||||
}
|
||||
}()
|
||||
x.Set(y)
|
||||
}
|
||||
|
||||
// Values/types for special cases.
|
||||
var (
|
||||
objectPtrNil = reflect.ValueOf((*ast.Object)(nil))
|
||||
scopePtrNil = reflect.ValueOf((*ast.Scope)(nil))
|
||||
|
||||
identType = reflect.TypeOf((*ast.Ident)(nil))
|
||||
objectPtrType = reflect.TypeOf((*ast.Object)(nil))
|
||||
positionType = reflect.TypeOf(token.NoPos)
|
||||
callExprType = reflect.TypeOf((*ast.CallExpr)(nil))
|
||||
scopePtrType = reflect.TypeOf((*ast.Scope)(nil))
|
||||
)
|
||||
|
||||
// apply replaces each AST field x in val with f(x), returning val.
|
||||
// To avoid extra conversions, f operates on the reflect.Value form.
|
||||
func apply(f func(reflect.Value) reflect.Value, val reflect.Value) reflect.Value {
|
||||
if !val.IsValid() {
|
||||
return reflect.Value{}
|
||||
}
|
||||
|
||||
// *ast.Objects introduce cycles and are likely incorrect after
|
||||
// rewrite; don't follow them but replace with nil instead
|
||||
if val.Type() == objectPtrType {
|
||||
return objectPtrNil
|
||||
}
|
||||
|
||||
// similarly for scopes: they are likely incorrect after a rewrite;
|
||||
// replace them with nil
|
||||
if val.Type() == scopePtrType {
|
||||
return scopePtrNil
|
||||
}
|
||||
|
||||
switch v := reflect.Indirect(val); v.Kind() {
|
||||
case reflect.Slice:
|
||||
for i := 0; i < v.Len(); i++ {
|
||||
e := v.Index(i)
|
||||
set(e, f(e))
|
||||
}
|
||||
case reflect.Struct:
|
||||
for i := 0; i < v.NumField(); i++ {
|
||||
e := v.Field(i)
|
||||
set(e, f(e))
|
||||
}
|
||||
case reflect.Interface:
|
||||
e := v.Elem()
|
||||
set(v, f(e))
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
func isWildcard(s string) bool {
|
||||
rune, size := utf8.DecodeRuneInString(s)
|
||||
return size == len(s) && unicode.IsLower(rune)
|
||||
}
|
||||
|
||||
// match returns true if pattern matches val,
|
||||
// recording wildcard submatches in m.
|
||||
// If m == nil, match checks whether pattern == val.
|
||||
func match(m map[string]reflect.Value, pattern, val reflect.Value) bool {
|
||||
// Wildcard matches any expression. If it appears multiple
|
||||
// times in the pattern, it must match the same expression
|
||||
// each time.
|
||||
if m != nil && pattern.IsValid() && pattern.Type() == identType {
|
||||
name := pattern.Interface().(*ast.Ident).Name
|
||||
if isWildcard(name) && val.IsValid() {
|
||||
// wildcards only match valid (non-nil) expressions.
|
||||
if _, ok := val.Interface().(ast.Expr); ok && !val.IsNil() {
|
||||
if old, ok := m[name]; ok {
|
||||
return match(nil, old, val)
|
||||
}
|
||||
m[name] = val
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Otherwise, pattern and val must match recursively.
|
||||
if !pattern.IsValid() || !val.IsValid() {
|
||||
return !pattern.IsValid() && !val.IsValid()
|
||||
}
|
||||
if pattern.Type() != val.Type() {
|
||||
return false
|
||||
}
|
||||
|
||||
// Special cases.
|
||||
switch pattern.Type() {
|
||||
case identType:
|
||||
// For identifiers, only the names need to match
|
||||
// (and none of the other *ast.Object information).
|
||||
// This is a common case, handle it all here instead
|
||||
// of recursing down any further via reflection.
|
||||
p := pattern.Interface().(*ast.Ident)
|
||||
v := val.Interface().(*ast.Ident)
|
||||
return p == nil && v == nil || p != nil && v != nil && p.Name == v.Name
|
||||
case objectPtrType, positionType:
|
||||
// object pointers and token positions always match
|
||||
return true
|
||||
case callExprType:
|
||||
// For calls, the Ellipsis fields (token.Position) must
|
||||
// match since that is how f(x) and f(x...) are different.
|
||||
// Check them here but fall through for the remaining fields.
|
||||
p := pattern.Interface().(*ast.CallExpr)
|
||||
v := val.Interface().(*ast.CallExpr)
|
||||
if p.Ellipsis.IsValid() != v.Ellipsis.IsValid() {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
p := reflect.Indirect(pattern)
|
||||
v := reflect.Indirect(val)
|
||||
if !p.IsValid() || !v.IsValid() {
|
||||
return !p.IsValid() && !v.IsValid()
|
||||
}
|
||||
|
||||
switch p.Kind() {
|
||||
case reflect.Slice:
|
||||
if p.Len() != v.Len() {
|
||||
return false
|
||||
}
|
||||
for i := 0; i < p.Len(); i++ {
|
||||
if !match(m, p.Index(i), v.Index(i)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
|
||||
case reflect.Struct:
|
||||
for i := 0; i < p.NumField(); i++ {
|
||||
if !match(m, p.Field(i), v.Field(i)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
|
||||
case reflect.Interface:
|
||||
return match(m, p.Elem(), v.Elem())
|
||||
}
|
||||
|
||||
// Handle token integers, etc.
|
||||
return p.Interface() == v.Interface()
|
||||
}
|
||||
|
||||
// subst returns a copy of pattern with values from m substituted in place
|
||||
// of wildcards and pos used as the position of tokens from the pattern.
|
||||
// if m == nil, subst returns a copy of pattern and doesn't change the line
|
||||
// number information.
|
||||
func subst(m map[string]reflect.Value, pattern reflect.Value, pos reflect.Value) reflect.Value {
|
||||
if !pattern.IsValid() {
|
||||
return reflect.Value{}
|
||||
}
|
||||
|
||||
// Wildcard gets replaced with map value.
|
||||
if m != nil && pattern.Type() == identType {
|
||||
name := pattern.Interface().(*ast.Ident).Name
|
||||
if isWildcard(name) {
|
||||
if old, ok := m[name]; ok {
|
||||
return subst(nil, old, reflect.Value{})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if pos.IsValid() && pattern.Type() == positionType {
|
||||
// use new position only if old position was valid in the first place
|
||||
if old := pattern.Interface().(token.Pos); !old.IsValid() {
|
||||
return pattern
|
||||
}
|
||||
return pos
|
||||
}
|
||||
|
||||
// Otherwise copy.
|
||||
switch p := pattern; p.Kind() {
|
||||
case reflect.Slice:
|
||||
v := reflect.MakeSlice(p.Type(), p.Len(), p.Len())
|
||||
for i := 0; i < p.Len(); i++ {
|
||||
v.Index(i).Set(subst(m, p.Index(i), pos))
|
||||
}
|
||||
return v
|
||||
|
||||
case reflect.Struct:
|
||||
v := reflect.New(p.Type()).Elem()
|
||||
for i := 0; i < p.NumField(); i++ {
|
||||
v.Field(i).Set(subst(m, p.Field(i), pos))
|
||||
}
|
||||
return v
|
||||
|
||||
case reflect.Ptr:
|
||||
v := reflect.New(p.Type()).Elem()
|
||||
if elem := p.Elem(); elem.IsValid() {
|
||||
v.Set(subst(m, elem, pos).Addr())
|
||||
}
|
||||
return v
|
||||
|
||||
case reflect.Interface:
|
||||
v := reflect.New(p.Type()).Elem()
|
||||
if elem := p.Elem(); elem.IsValid() {
|
||||
v.Set(subst(m, elem, pos))
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
return pattern
|
||||
}
|
||||
169
vendor/github.com/marten-seemann/qtls/generate_cert.go
generated
vendored
Normal file
169
vendor/github.com/marten-seemann/qtls/generate_cert.go
generated
vendored
Normal file
@@ -0,0 +1,169 @@
|
||||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build ignore
|
||||
|
||||
// Generate a self-signed X.509 certificate for a TLS server. Outputs to
|
||||
// 'cert.pem' and 'key.pem' and will overwrite existing files.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"crypto/ecdsa"
|
||||
"crypto/elliptic"
|
||||
"crypto/rand"
|
||||
"crypto/rsa"
|
||||
"crypto/x509"
|
||||
"crypto/x509/pkix"
|
||||
"encoding/pem"
|
||||
"flag"
|
||||
"fmt"
|
||||
"log"
|
||||
"math/big"
|
||||
"net"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
var (
|
||||
host = flag.String("host", "", "Comma-separated hostnames and IPs to generate a certificate for")
|
||||
validFrom = flag.String("start-date", "", "Creation date formatted as Jan 1 15:04:05 2011")
|
||||
validFor = flag.Duration("duration", 365*24*time.Hour, "Duration that certificate is valid for")
|
||||
isCA = flag.Bool("ca", false, "whether this cert should be its own Certificate Authority")
|
||||
rsaBits = flag.Int("rsa-bits", 2048, "Size of RSA key to generate. Ignored if --ecdsa-curve is set")
|
||||
ecdsaCurve = flag.String("ecdsa-curve", "", "ECDSA curve to use to generate a key. Valid values are P224, P256 (recommended), P384, P521")
|
||||
)
|
||||
|
||||
func publicKey(priv interface{}) interface{} {
|
||||
switch k := priv.(type) {
|
||||
case *rsa.PrivateKey:
|
||||
return &k.PublicKey
|
||||
case *ecdsa.PrivateKey:
|
||||
return &k.PublicKey
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func pemBlockForKey(priv interface{}) *pem.Block {
|
||||
switch k := priv.(type) {
|
||||
case *rsa.PrivateKey:
|
||||
return &pem.Block{Type: "RSA PRIVATE KEY", Bytes: x509.MarshalPKCS1PrivateKey(k)}
|
||||
case *ecdsa.PrivateKey:
|
||||
b, err := x509.MarshalECPrivateKey(k)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Unable to marshal ECDSA private key: %v", err)
|
||||
os.Exit(2)
|
||||
}
|
||||
return &pem.Block{Type: "EC PRIVATE KEY", Bytes: b}
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func main() {
|
||||
flag.Parse()
|
||||
|
||||
if len(*host) == 0 {
|
||||
log.Fatalf("Missing required --host parameter")
|
||||
}
|
||||
|
||||
var priv interface{}
|
||||
var err error
|
||||
switch *ecdsaCurve {
|
||||
case "":
|
||||
priv, err = rsa.GenerateKey(rand.Reader, *rsaBits)
|
||||
case "P224":
|
||||
priv, err = ecdsa.GenerateKey(elliptic.P224(), rand.Reader)
|
||||
case "P256":
|
||||
priv, err = ecdsa.GenerateKey(elliptic.P256(), rand.Reader)
|
||||
case "P384":
|
||||
priv, err = ecdsa.GenerateKey(elliptic.P384(), rand.Reader)
|
||||
case "P521":
|
||||
priv, err = ecdsa.GenerateKey(elliptic.P521(), rand.Reader)
|
||||
default:
|
||||
fmt.Fprintf(os.Stderr, "Unrecognized elliptic curve: %q", *ecdsaCurve)
|
||||
os.Exit(1)
|
||||
}
|
||||
if err != nil {
|
||||
log.Fatalf("failed to generate private key: %s", err)
|
||||
}
|
||||
|
||||
var notBefore time.Time
|
||||
if len(*validFrom) == 0 {
|
||||
notBefore = time.Now()
|
||||
} else {
|
||||
notBefore, err = time.Parse("Jan 2 15:04:05 2006", *validFrom)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Failed to parse creation date: %s\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
notAfter := notBefore.Add(*validFor)
|
||||
|
||||
serialNumberLimit := new(big.Int).Lsh(big.NewInt(1), 128)
|
||||
serialNumber, err := rand.Int(rand.Reader, serialNumberLimit)
|
||||
if err != nil {
|
||||
log.Fatalf("failed to generate serial number: %s", err)
|
||||
}
|
||||
|
||||
template := x509.Certificate{
|
||||
SerialNumber: serialNumber,
|
||||
Subject: pkix.Name{
|
||||
Organization: []string{"Acme Co"},
|
||||
},
|
||||
NotBefore: notBefore,
|
||||
NotAfter: notAfter,
|
||||
|
||||
KeyUsage: x509.KeyUsageKeyEncipherment | x509.KeyUsageDigitalSignature,
|
||||
ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageServerAuth},
|
||||
BasicConstraintsValid: true,
|
||||
}
|
||||
|
||||
hosts := strings.Split(*host, ",")
|
||||
for _, h := range hosts {
|
||||
if ip := net.ParseIP(h); ip != nil {
|
||||
template.IPAddresses = append(template.IPAddresses, ip)
|
||||
} else {
|
||||
template.DNSNames = append(template.DNSNames, h)
|
||||
}
|
||||
}
|
||||
|
||||
if *isCA {
|
||||
template.IsCA = true
|
||||
template.KeyUsage |= x509.KeyUsageCertSign
|
||||
}
|
||||
|
||||
derBytes, err := x509.CreateCertificate(rand.Reader, &template, &template, publicKey(priv), priv)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create certificate: %s", err)
|
||||
}
|
||||
|
||||
certOut, err := os.Create("cert.pem")
|
||||
if err != nil {
|
||||
log.Fatalf("failed to open cert.pem for writing: %s", err)
|
||||
}
|
||||
if err := pem.Encode(certOut, &pem.Block{Type: "CERTIFICATE", Bytes: derBytes}); err != nil {
|
||||
log.Fatalf("failed to write data to cert.pem: %s", err)
|
||||
}
|
||||
if err := certOut.Close(); err != nil {
|
||||
log.Fatalf("error closing cert.pem: %s", err)
|
||||
}
|
||||
log.Print("wrote cert.pem\n")
|
||||
|
||||
keyOut, err := os.OpenFile("key.pem", os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0600)
|
||||
if err != nil {
|
||||
log.Print("failed to open key.pem for writing:", err)
|
||||
return
|
||||
}
|
||||
if err := pem.Encode(keyOut, pemBlockForKey(priv)); err != nil {
|
||||
log.Fatalf("failed to write data to key.pem: %s", err)
|
||||
}
|
||||
if err := keyOut.Close(); err != nil {
|
||||
log.Fatalf("error closing key.pem: %s", err)
|
||||
}
|
||||
log.Print("wrote key.pem\n")
|
||||
}
|
||||
144
vendor/github.com/miekg/dns/duplicate_generate.go
generated
vendored
Normal file
144
vendor/github.com/miekg/dns/duplicate_generate.go
generated
vendored
Normal file
@@ -0,0 +1,144 @@
|
||||
//+build ignore
|
||||
|
||||
// types_generate.go is meant to run with go generate. It will use
|
||||
// go/{importer,types} to track down all the RR struct types. Then for each type
|
||||
// it will generate conversion tables (TypeToRR and TypeToString) and banal
|
||||
// methods (len, Header, copy) based on the struct tags. The generated source is
|
||||
// written to ztypes.go, and is meant to be checked into git.
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"go/format"
|
||||
"go/importer"
|
||||
"go/types"
|
||||
"log"
|
||||
"os"
|
||||
)
|
||||
|
||||
var packageHdr = `
|
||||
// Code generated by "go run duplicate_generate.go"; DO NOT EDIT.
|
||||
|
||||
package dns
|
||||
|
||||
`
|
||||
|
||||
func getTypeStruct(t types.Type, scope *types.Scope) (*types.Struct, bool) {
|
||||
st, ok := t.Underlying().(*types.Struct)
|
||||
if !ok {
|
||||
return nil, false
|
||||
}
|
||||
if st.Field(0).Type() == scope.Lookup("RR_Header").Type() {
|
||||
return st, false
|
||||
}
|
||||
if st.Field(0).Anonymous() {
|
||||
st, _ := getTypeStruct(st.Field(0).Type(), scope)
|
||||
return st, true
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
|
||||
func main() {
|
||||
// Import and type-check the package
|
||||
pkg, err := importer.Default().Import("github.com/miekg/dns")
|
||||
fatalIfErr(err)
|
||||
scope := pkg.Scope()
|
||||
|
||||
// Collect actual types (*X)
|
||||
var namedTypes []string
|
||||
for _, name := range scope.Names() {
|
||||
o := scope.Lookup(name)
|
||||
if o == nil || !o.Exported() {
|
||||
continue
|
||||
}
|
||||
|
||||
if st, _ := getTypeStruct(o.Type(), scope); st == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
if name == "PrivateRR" || name == "OPT" {
|
||||
continue
|
||||
}
|
||||
|
||||
namedTypes = append(namedTypes, o.Name())
|
||||
}
|
||||
|
||||
b := &bytes.Buffer{}
|
||||
b.WriteString(packageHdr)
|
||||
|
||||
// Generate the duplicate check for each type.
|
||||
fmt.Fprint(b, "// isDuplicate() functions\n\n")
|
||||
for _, name := range namedTypes {
|
||||
|
||||
o := scope.Lookup(name)
|
||||
st, isEmbedded := getTypeStruct(o.Type(), scope)
|
||||
if isEmbedded {
|
||||
continue
|
||||
}
|
||||
fmt.Fprintf(b, "func (r1 *%s) isDuplicate(_r2 RR) bool {\n", name)
|
||||
fmt.Fprintf(b, "r2, ok := _r2.(*%s)\n", name)
|
||||
fmt.Fprint(b, "if !ok { return false }\n")
|
||||
fmt.Fprint(b, "_ = r2\n")
|
||||
for i := 1; i < st.NumFields(); i++ {
|
||||
field := st.Field(i).Name()
|
||||
o2 := func(s string) { fmt.Fprintf(b, s+"\n", field, field) }
|
||||
o3 := func(s string) { fmt.Fprintf(b, s+"\n", field, field, field) }
|
||||
|
||||
// For some reason, a and aaaa don't pop up as *types.Slice here (mostly like because the are
|
||||
// *indirectly* defined as a slice in the net package).
|
||||
if _, ok := st.Field(i).Type().(*types.Slice); ok {
|
||||
o2("if len(r1.%s) != len(r2.%s) {\nreturn false\n}")
|
||||
|
||||
if st.Tag(i) == `dns:"cdomain-name"` || st.Tag(i) == `dns:"domain-name"` {
|
||||
o3(`for i := 0; i < len(r1.%s); i++ {
|
||||
if !isDuplicateName(r1.%s[i], r2.%s[i]) {
|
||||
return false
|
||||
}
|
||||
}`)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
o3(`for i := 0; i < len(r1.%s); i++ {
|
||||
if r1.%s[i] != r2.%s[i] {
|
||||
return false
|
||||
}
|
||||
}`)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
switch st.Tag(i) {
|
||||
case `dns:"-"`:
|
||||
// ignored
|
||||
case `dns:"a"`, `dns:"aaaa"`:
|
||||
o2("if !r1.%s.Equal(r2.%s) {\nreturn false\n}")
|
||||
case `dns:"cdomain-name"`, `dns:"domain-name"`:
|
||||
o2("if !isDuplicateName(r1.%s, r2.%s) {\nreturn false\n}")
|
||||
default:
|
||||
o2("if r1.%s != r2.%s {\nreturn false\n}")
|
||||
}
|
||||
}
|
||||
fmt.Fprintf(b, "return true\n}\n\n")
|
||||
}
|
||||
|
||||
// gofmt
|
||||
res, err := format.Source(b.Bytes())
|
||||
if err != nil {
|
||||
b.WriteTo(os.Stderr)
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
// write result
|
||||
f, err := os.Create("zduplicate.go")
|
||||
fatalIfErr(err)
|
||||
defer f.Close()
|
||||
f.Write(res)
|
||||
}
|
||||
|
||||
func fatalIfErr(err error) {
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
328
vendor/github.com/miekg/dns/msg_generate.go
generated
vendored
Normal file
328
vendor/github.com/miekg/dns/msg_generate.go
generated
vendored
Normal file
@@ -0,0 +1,328 @@
|
||||
//+build ignore
|
||||
|
||||
// msg_generate.go is meant to run with go generate. It will use
|
||||
// go/{importer,types} to track down all the RR struct types. Then for each type
|
||||
// it will generate pack/unpack methods based on the struct tags. The generated source is
|
||||
// written to zmsg.go, and is meant to be checked into git.
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"go/format"
|
||||
"go/importer"
|
||||
"go/types"
|
||||
"log"
|
||||
"os"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var packageHdr = `
|
||||
// Code generated by "go run msg_generate.go"; DO NOT EDIT.
|
||||
|
||||
package dns
|
||||
|
||||
`
|
||||
|
||||
// getTypeStruct will take a type and the package scope, and return the
|
||||
// (innermost) struct if the type is considered a RR type (currently defined as
|
||||
// those structs beginning with a RR_Header, could be redefined as implementing
|
||||
// the RR interface). The bool return value indicates if embedded structs were
|
||||
// resolved.
|
||||
func getTypeStruct(t types.Type, scope *types.Scope) (*types.Struct, bool) {
|
||||
st, ok := t.Underlying().(*types.Struct)
|
||||
if !ok {
|
||||
return nil, false
|
||||
}
|
||||
if st.Field(0).Type() == scope.Lookup("RR_Header").Type() {
|
||||
return st, false
|
||||
}
|
||||
if st.Field(0).Anonymous() {
|
||||
st, _ := getTypeStruct(st.Field(0).Type(), scope)
|
||||
return st, true
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
|
||||
func main() {
|
||||
// Import and type-check the package
|
||||
pkg, err := importer.Default().Import("github.com/miekg/dns")
|
||||
fatalIfErr(err)
|
||||
scope := pkg.Scope()
|
||||
|
||||
// Collect actual types (*X)
|
||||
var namedTypes []string
|
||||
for _, name := range scope.Names() {
|
||||
o := scope.Lookup(name)
|
||||
if o == nil || !o.Exported() {
|
||||
continue
|
||||
}
|
||||
if st, _ := getTypeStruct(o.Type(), scope); st == nil {
|
||||
continue
|
||||
}
|
||||
if name == "PrivateRR" {
|
||||
continue
|
||||
}
|
||||
|
||||
// Check if corresponding TypeX exists
|
||||
if scope.Lookup("Type"+o.Name()) == nil && o.Name() != "RFC3597" {
|
||||
log.Fatalf("Constant Type%s does not exist.", o.Name())
|
||||
}
|
||||
|
||||
namedTypes = append(namedTypes, o.Name())
|
||||
}
|
||||
|
||||
b := &bytes.Buffer{}
|
||||
b.WriteString(packageHdr)
|
||||
|
||||
fmt.Fprint(b, "// pack*() functions\n\n")
|
||||
for _, name := range namedTypes {
|
||||
o := scope.Lookup(name)
|
||||
st, _ := getTypeStruct(o.Type(), scope)
|
||||
|
||||
fmt.Fprintf(b, "func (rr *%s) pack(msg []byte, off int, compression compressionMap, compress bool) (off1 int, err error) {\n", name)
|
||||
for i := 1; i < st.NumFields(); i++ {
|
||||
o := func(s string) {
|
||||
fmt.Fprintf(b, s, st.Field(i).Name())
|
||||
fmt.Fprint(b, `if err != nil {
|
||||
return off, err
|
||||
}
|
||||
`)
|
||||
}
|
||||
|
||||
if _, ok := st.Field(i).Type().(*types.Slice); ok {
|
||||
switch st.Tag(i) {
|
||||
case `dns:"-"`: // ignored
|
||||
case `dns:"txt"`:
|
||||
o("off, err = packStringTxt(rr.%s, msg, off)\n")
|
||||
case `dns:"opt"`:
|
||||
o("off, err = packDataOpt(rr.%s, msg, off)\n")
|
||||
case `dns:"nsec"`:
|
||||
o("off, err = packDataNsec(rr.%s, msg, off)\n")
|
||||
case `dns:"domain-name"`:
|
||||
o("off, err = packDataDomainNames(rr.%s, msg, off, compression, false)\n")
|
||||
default:
|
||||
log.Fatalln(name, st.Field(i).Name(), st.Tag(i))
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
switch {
|
||||
case st.Tag(i) == `dns:"-"`: // ignored
|
||||
case st.Tag(i) == `dns:"cdomain-name"`:
|
||||
o("off, err = packDomainName(rr.%s, msg, off, compression, compress)\n")
|
||||
case st.Tag(i) == `dns:"domain-name"`:
|
||||
o("off, err = packDomainName(rr.%s, msg, off, compression, false)\n")
|
||||
case st.Tag(i) == `dns:"a"`:
|
||||
o("off, err = packDataA(rr.%s, msg, off)\n")
|
||||
case st.Tag(i) == `dns:"aaaa"`:
|
||||
o("off, err = packDataAAAA(rr.%s, msg, off)\n")
|
||||
case st.Tag(i) == `dns:"uint48"`:
|
||||
o("off, err = packUint48(rr.%s, msg, off)\n")
|
||||
case st.Tag(i) == `dns:"txt"`:
|
||||
o("off, err = packString(rr.%s, msg, off)\n")
|
||||
|
||||
case strings.HasPrefix(st.Tag(i), `dns:"size-base32`): // size-base32 can be packed just like base32
|
||||
fallthrough
|
||||
case st.Tag(i) == `dns:"base32"`:
|
||||
o("off, err = packStringBase32(rr.%s, msg, off)\n")
|
||||
|
||||
case strings.HasPrefix(st.Tag(i), `dns:"size-base64`): // size-base64 can be packed just like base64
|
||||
fallthrough
|
||||
case st.Tag(i) == `dns:"base64"`:
|
||||
o("off, err = packStringBase64(rr.%s, msg, off)\n")
|
||||
|
||||
case strings.HasPrefix(st.Tag(i), `dns:"size-hex:SaltLength`):
|
||||
// directly write instead of using o() so we get the error check in the correct place
|
||||
field := st.Field(i).Name()
|
||||
fmt.Fprintf(b, `// Only pack salt if value is not "-", i.e. empty
|
||||
if rr.%s != "-" {
|
||||
off, err = packStringHex(rr.%s, msg, off)
|
||||
if err != nil {
|
||||
return off, err
|
||||
}
|
||||
}
|
||||
`, field, field)
|
||||
continue
|
||||
case strings.HasPrefix(st.Tag(i), `dns:"size-hex`): // size-hex can be packed just like hex
|
||||
fallthrough
|
||||
case st.Tag(i) == `dns:"hex"`:
|
||||
o("off, err = packStringHex(rr.%s, msg, off)\n")
|
||||
case st.Tag(i) == `dns:"any"`:
|
||||
o("off, err = packStringAny(rr.%s, msg, off)\n")
|
||||
case st.Tag(i) == `dns:"octet"`:
|
||||
o("off, err = packStringOctet(rr.%s, msg, off)\n")
|
||||
case st.Tag(i) == "":
|
||||
switch st.Field(i).Type().(*types.Basic).Kind() {
|
||||
case types.Uint8:
|
||||
o("off, err = packUint8(rr.%s, msg, off)\n")
|
||||
case types.Uint16:
|
||||
o("off, err = packUint16(rr.%s, msg, off)\n")
|
||||
case types.Uint32:
|
||||
o("off, err = packUint32(rr.%s, msg, off)\n")
|
||||
case types.Uint64:
|
||||
o("off, err = packUint64(rr.%s, msg, off)\n")
|
||||
case types.String:
|
||||
o("off, err = packString(rr.%s, msg, off)\n")
|
||||
default:
|
||||
log.Fatalln(name, st.Field(i).Name())
|
||||
}
|
||||
default:
|
||||
log.Fatalln(name, st.Field(i).Name(), st.Tag(i))
|
||||
}
|
||||
}
|
||||
fmt.Fprintln(b, "return off, nil }\n")
|
||||
}
|
||||
|
||||
fmt.Fprint(b, "// unpack*() functions\n\n")
|
||||
for _, name := range namedTypes {
|
||||
o := scope.Lookup(name)
|
||||
st, _ := getTypeStruct(o.Type(), scope)
|
||||
|
||||
fmt.Fprintf(b, "func (rr *%s) unpack(msg []byte, off int) (off1 int, err error) {\n", name)
|
||||
fmt.Fprint(b, `rdStart := off
|
||||
_ = rdStart
|
||||
|
||||
`)
|
||||
for i := 1; i < st.NumFields(); i++ {
|
||||
o := func(s string) {
|
||||
fmt.Fprintf(b, s, st.Field(i).Name())
|
||||
fmt.Fprint(b, `if err != nil {
|
||||
return off, err
|
||||
}
|
||||
`)
|
||||
}
|
||||
|
||||
// size-* are special, because they reference a struct member we should use for the length.
|
||||
if strings.HasPrefix(st.Tag(i), `dns:"size-`) {
|
||||
structMember := structMember(st.Tag(i))
|
||||
structTag := structTag(st.Tag(i))
|
||||
switch structTag {
|
||||
case "hex":
|
||||
fmt.Fprintf(b, "rr.%s, off, err = unpackStringHex(msg, off, off + int(rr.%s))\n", st.Field(i).Name(), structMember)
|
||||
case "base32":
|
||||
fmt.Fprintf(b, "rr.%s, off, err = unpackStringBase32(msg, off, off + int(rr.%s))\n", st.Field(i).Name(), structMember)
|
||||
case "base64":
|
||||
fmt.Fprintf(b, "rr.%s, off, err = unpackStringBase64(msg, off, off + int(rr.%s))\n", st.Field(i).Name(), structMember)
|
||||
default:
|
||||
log.Fatalln(name, st.Field(i).Name(), st.Tag(i))
|
||||
}
|
||||
fmt.Fprint(b, `if err != nil {
|
||||
return off, err
|
||||
}
|
||||
`)
|
||||
continue
|
||||
}
|
||||
|
||||
if _, ok := st.Field(i).Type().(*types.Slice); ok {
|
||||
switch st.Tag(i) {
|
||||
case `dns:"-"`: // ignored
|
||||
case `dns:"txt"`:
|
||||
o("rr.%s, off, err = unpackStringTxt(msg, off)\n")
|
||||
case `dns:"opt"`:
|
||||
o("rr.%s, off, err = unpackDataOpt(msg, off)\n")
|
||||
case `dns:"nsec"`:
|
||||
o("rr.%s, off, err = unpackDataNsec(msg, off)\n")
|
||||
case `dns:"domain-name"`:
|
||||
o("rr.%s, off, err = unpackDataDomainNames(msg, off, rdStart + int(rr.Hdr.Rdlength))\n")
|
||||
default:
|
||||
log.Fatalln(name, st.Field(i).Name(), st.Tag(i))
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
switch st.Tag(i) {
|
||||
case `dns:"-"`: // ignored
|
||||
case `dns:"cdomain-name"`:
|
||||
fallthrough
|
||||
case `dns:"domain-name"`:
|
||||
o("rr.%s, off, err = UnpackDomainName(msg, off)\n")
|
||||
case `dns:"a"`:
|
||||
o("rr.%s, off, err = unpackDataA(msg, off)\n")
|
||||
case `dns:"aaaa"`:
|
||||
o("rr.%s, off, err = unpackDataAAAA(msg, off)\n")
|
||||
case `dns:"uint48"`:
|
||||
o("rr.%s, off, err = unpackUint48(msg, off)\n")
|
||||
case `dns:"txt"`:
|
||||
o("rr.%s, off, err = unpackString(msg, off)\n")
|
||||
case `dns:"base32"`:
|
||||
o("rr.%s, off, err = unpackStringBase32(msg, off, rdStart + int(rr.Hdr.Rdlength))\n")
|
||||
case `dns:"base64"`:
|
||||
o("rr.%s, off, err = unpackStringBase64(msg, off, rdStart + int(rr.Hdr.Rdlength))\n")
|
||||
case `dns:"hex"`:
|
||||
o("rr.%s, off, err = unpackStringHex(msg, off, rdStart + int(rr.Hdr.Rdlength))\n")
|
||||
case `dns:"any"`:
|
||||
o("rr.%s, off, err = unpackStringAny(msg, off, rdStart + int(rr.Hdr.Rdlength))\n")
|
||||
case `dns:"octet"`:
|
||||
o("rr.%s, off, err = unpackStringOctet(msg, off)\n")
|
||||
case "":
|
||||
switch st.Field(i).Type().(*types.Basic).Kind() {
|
||||
case types.Uint8:
|
||||
o("rr.%s, off, err = unpackUint8(msg, off)\n")
|
||||
case types.Uint16:
|
||||
o("rr.%s, off, err = unpackUint16(msg, off)\n")
|
||||
case types.Uint32:
|
||||
o("rr.%s, off, err = unpackUint32(msg, off)\n")
|
||||
case types.Uint64:
|
||||
o("rr.%s, off, err = unpackUint64(msg, off)\n")
|
||||
case types.String:
|
||||
o("rr.%s, off, err = unpackString(msg, off)\n")
|
||||
default:
|
||||
log.Fatalln(name, st.Field(i).Name())
|
||||
}
|
||||
default:
|
||||
log.Fatalln(name, st.Field(i).Name(), st.Tag(i))
|
||||
}
|
||||
// If we've hit len(msg) we return without error.
|
||||
if i < st.NumFields()-1 {
|
||||
fmt.Fprintf(b, `if off == len(msg) {
|
||||
return off, nil
|
||||
}
|
||||
`)
|
||||
}
|
||||
}
|
||||
fmt.Fprintf(b, "return off, nil }\n\n")
|
||||
}
|
||||
|
||||
// gofmt
|
||||
res, err := format.Source(b.Bytes())
|
||||
if err != nil {
|
||||
b.WriteTo(os.Stderr)
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
// write result
|
||||
f, err := os.Create("zmsg.go")
|
||||
fatalIfErr(err)
|
||||
defer f.Close()
|
||||
f.Write(res)
|
||||
}
|
||||
|
||||
// structMember will take a tag like dns:"size-base32:SaltLength" and return the last part of this string.
|
||||
func structMember(s string) string {
|
||||
fields := strings.Split(s, ":")
|
||||
if len(fields) == 0 {
|
||||
return ""
|
||||
}
|
||||
f := fields[len(fields)-1]
|
||||
// f should have a closing "
|
||||
if len(f) > 1 {
|
||||
return f[:len(f)-1]
|
||||
}
|
||||
return f
|
||||
}
|
||||
|
||||
// structTag will take a tag like dns:"size-base32:SaltLength" and return base32.
|
||||
func structTag(s string) string {
|
||||
fields := strings.Split(s, ":")
|
||||
if len(fields) < 2 {
|
||||
return ""
|
||||
}
|
||||
return fields[1][len("\"size-"):]
|
||||
}
|
||||
|
||||
func fatalIfErr(err error) {
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
287
vendor/github.com/miekg/dns/types_generate.go
generated
vendored
Normal file
287
vendor/github.com/miekg/dns/types_generate.go
generated
vendored
Normal file
@@ -0,0 +1,287 @@
|
||||
//+build ignore
|
||||
|
||||
// types_generate.go is meant to run with go generate. It will use
|
||||
// go/{importer,types} to track down all the RR struct types. Then for each type
|
||||
// it will generate conversion tables (TypeToRR and TypeToString) and banal
|
||||
// methods (len, Header, copy) based on the struct tags. The generated source is
|
||||
// written to ztypes.go, and is meant to be checked into git.
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"go/format"
|
||||
"go/importer"
|
||||
"go/types"
|
||||
"log"
|
||||
"os"
|
||||
"strings"
|
||||
"text/template"
|
||||
)
|
||||
|
||||
var skipLen = map[string]struct{}{
|
||||
"NSEC": {},
|
||||
"NSEC3": {},
|
||||
"OPT": {},
|
||||
"CSYNC": {},
|
||||
}
|
||||
|
||||
var packageHdr = `
|
||||
// Code generated by "go run types_generate.go"; DO NOT EDIT.
|
||||
|
||||
package dns
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"net"
|
||||
)
|
||||
|
||||
`
|
||||
|
||||
var TypeToRR = template.Must(template.New("TypeToRR").Parse(`
|
||||
// TypeToRR is a map of constructors for each RR type.
|
||||
var TypeToRR = map[uint16]func() RR{
|
||||
{{range .}}{{if ne . "RFC3597"}} Type{{.}}: func() RR { return new({{.}}) },
|
||||
{{end}}{{end}} }
|
||||
|
||||
`))
|
||||
|
||||
var typeToString = template.Must(template.New("typeToString").Parse(`
|
||||
// TypeToString is a map of strings for each RR type.
|
||||
var TypeToString = map[uint16]string{
|
||||
{{range .}}{{if ne . "NSAPPTR"}} Type{{.}}: "{{.}}",
|
||||
{{end}}{{end}} TypeNSAPPTR: "NSAP-PTR",
|
||||
}
|
||||
|
||||
`))
|
||||
|
||||
var headerFunc = template.Must(template.New("headerFunc").Parse(`
|
||||
{{range .}} func (rr *{{.}}) Header() *RR_Header { return &rr.Hdr }
|
||||
{{end}}
|
||||
|
||||
`))
|
||||
|
||||
// getTypeStruct will take a type and the package scope, and return the
|
||||
// (innermost) struct if the type is considered a RR type (currently defined as
|
||||
// those structs beginning with a RR_Header, could be redefined as implementing
|
||||
// the RR interface). The bool return value indicates if embedded structs were
|
||||
// resolved.
|
||||
func getTypeStruct(t types.Type, scope *types.Scope) (*types.Struct, bool) {
|
||||
st, ok := t.Underlying().(*types.Struct)
|
||||
if !ok {
|
||||
return nil, false
|
||||
}
|
||||
if st.Field(0).Type() == scope.Lookup("RR_Header").Type() {
|
||||
return st, false
|
||||
}
|
||||
if st.Field(0).Anonymous() {
|
||||
st, _ := getTypeStruct(st.Field(0).Type(), scope)
|
||||
return st, true
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
|
||||
func main() {
|
||||
// Import and type-check the package
|
||||
pkg, err := importer.Default().Import("github.com/miekg/dns")
|
||||
fatalIfErr(err)
|
||||
scope := pkg.Scope()
|
||||
|
||||
// Collect constants like TypeX
|
||||
var numberedTypes []string
|
||||
for _, name := range scope.Names() {
|
||||
o := scope.Lookup(name)
|
||||
if o == nil || !o.Exported() {
|
||||
continue
|
||||
}
|
||||
b, ok := o.Type().(*types.Basic)
|
||||
if !ok || b.Kind() != types.Uint16 {
|
||||
continue
|
||||
}
|
||||
if !strings.HasPrefix(o.Name(), "Type") {
|
||||
continue
|
||||
}
|
||||
name := strings.TrimPrefix(o.Name(), "Type")
|
||||
if name == "PrivateRR" {
|
||||
continue
|
||||
}
|
||||
numberedTypes = append(numberedTypes, name)
|
||||
}
|
||||
|
||||
// Collect actual types (*X)
|
||||
var namedTypes []string
|
||||
for _, name := range scope.Names() {
|
||||
o := scope.Lookup(name)
|
||||
if o == nil || !o.Exported() {
|
||||
continue
|
||||
}
|
||||
if st, _ := getTypeStruct(o.Type(), scope); st == nil {
|
||||
continue
|
||||
}
|
||||
if name == "PrivateRR" {
|
||||
continue
|
||||
}
|
||||
|
||||
// Check if corresponding TypeX exists
|
||||
if scope.Lookup("Type"+o.Name()) == nil && o.Name() != "RFC3597" {
|
||||
log.Fatalf("Constant Type%s does not exist.", o.Name())
|
||||
}
|
||||
|
||||
namedTypes = append(namedTypes, o.Name())
|
||||
}
|
||||
|
||||
b := &bytes.Buffer{}
|
||||
b.WriteString(packageHdr)
|
||||
|
||||
// Generate TypeToRR
|
||||
fatalIfErr(TypeToRR.Execute(b, namedTypes))
|
||||
|
||||
// Generate typeToString
|
||||
fatalIfErr(typeToString.Execute(b, numberedTypes))
|
||||
|
||||
// Generate headerFunc
|
||||
fatalIfErr(headerFunc.Execute(b, namedTypes))
|
||||
|
||||
// Generate len()
|
||||
fmt.Fprint(b, "// len() functions\n")
|
||||
for _, name := range namedTypes {
|
||||
if _, ok := skipLen[name]; ok {
|
||||
continue
|
||||
}
|
||||
o := scope.Lookup(name)
|
||||
st, isEmbedded := getTypeStruct(o.Type(), scope)
|
||||
if isEmbedded {
|
||||
continue
|
||||
}
|
||||
fmt.Fprintf(b, "func (rr *%s) len(off int, compression map[string]struct{}) int {\n", name)
|
||||
fmt.Fprintf(b, "l := rr.Hdr.len(off, compression)\n")
|
||||
for i := 1; i < st.NumFields(); i++ {
|
||||
o := func(s string) { fmt.Fprintf(b, s, st.Field(i).Name()) }
|
||||
|
||||
if _, ok := st.Field(i).Type().(*types.Slice); ok {
|
||||
switch st.Tag(i) {
|
||||
case `dns:"-"`:
|
||||
// ignored
|
||||
case `dns:"cdomain-name"`:
|
||||
o("for _, x := range rr.%s { l += domainNameLen(x, off+l, compression, true) }\n")
|
||||
case `dns:"domain-name"`:
|
||||
o("for _, x := range rr.%s { l += domainNameLen(x, off+l, compression, false) }\n")
|
||||
case `dns:"txt"`:
|
||||
o("for _, x := range rr.%s { l += len(x) + 1 }\n")
|
||||
default:
|
||||
log.Fatalln(name, st.Field(i).Name(), st.Tag(i))
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
switch {
|
||||
case st.Tag(i) == `dns:"-"`:
|
||||
// ignored
|
||||
case st.Tag(i) == `dns:"cdomain-name"`:
|
||||
o("l += domainNameLen(rr.%s, off+l, compression, true)\n")
|
||||
case st.Tag(i) == `dns:"domain-name"`:
|
||||
o("l += domainNameLen(rr.%s, off+l, compression, false)\n")
|
||||
case st.Tag(i) == `dns:"octet"`:
|
||||
o("l += len(rr.%s)\n")
|
||||
case strings.HasPrefix(st.Tag(i), `dns:"size-base64`):
|
||||
fallthrough
|
||||
case st.Tag(i) == `dns:"base64"`:
|
||||
o("l += base64.StdEncoding.DecodedLen(len(rr.%s))\n")
|
||||
case strings.HasPrefix(st.Tag(i), `dns:"size-hex:`): // this has an extra field where the length is stored
|
||||
o("l += len(rr.%s)/2\n")
|
||||
case strings.HasPrefix(st.Tag(i), `dns:"size-hex`):
|
||||
fallthrough
|
||||
case st.Tag(i) == `dns:"hex"`:
|
||||
o("l += len(rr.%s)/2 + 1\n")
|
||||
case st.Tag(i) == `dns:"any"`:
|
||||
o("l += len(rr.%s)\n")
|
||||
case st.Tag(i) == `dns:"a"`:
|
||||
o("if len(rr.%s) != 0 { l += net.IPv4len }\n")
|
||||
case st.Tag(i) == `dns:"aaaa"`:
|
||||
o("if len(rr.%s) != 0 { l += net.IPv6len }\n")
|
||||
case st.Tag(i) == `dns:"txt"`:
|
||||
o("for _, t := range rr.%s { l += len(t) + 1 }\n")
|
||||
case st.Tag(i) == `dns:"uint48"`:
|
||||
o("l += 6 // %s\n")
|
||||
case st.Tag(i) == "":
|
||||
switch st.Field(i).Type().(*types.Basic).Kind() {
|
||||
case types.Uint8:
|
||||
o("l++ // %s\n")
|
||||
case types.Uint16:
|
||||
o("l += 2 // %s\n")
|
||||
case types.Uint32:
|
||||
o("l += 4 // %s\n")
|
||||
case types.Uint64:
|
||||
o("l += 8 // %s\n")
|
||||
case types.String:
|
||||
o("l += len(rr.%s) + 1\n")
|
||||
default:
|
||||
log.Fatalln(name, st.Field(i).Name())
|
||||
}
|
||||
default:
|
||||
log.Fatalln(name, st.Field(i).Name(), st.Tag(i))
|
||||
}
|
||||
}
|
||||
fmt.Fprintf(b, "return l }\n")
|
||||
}
|
||||
|
||||
// Generate copy()
|
||||
fmt.Fprint(b, "// copy() functions\n")
|
||||
for _, name := range namedTypes {
|
||||
o := scope.Lookup(name)
|
||||
st, isEmbedded := getTypeStruct(o.Type(), scope)
|
||||
if isEmbedded {
|
||||
continue
|
||||
}
|
||||
fmt.Fprintf(b, "func (rr *%s) copy() RR {\n", name)
|
||||
fields := []string{"rr.Hdr"}
|
||||
for i := 1; i < st.NumFields(); i++ {
|
||||
f := st.Field(i).Name()
|
||||
if sl, ok := st.Field(i).Type().(*types.Slice); ok {
|
||||
t := sl.Underlying().String()
|
||||
t = strings.TrimPrefix(t, "[]")
|
||||
if strings.Contains(t, ".") {
|
||||
splits := strings.Split(t, ".")
|
||||
t = splits[len(splits)-1]
|
||||
}
|
||||
// For the EDNS0 interface (used in the OPT RR), we need to call the copy method on each element.
|
||||
if t == "EDNS0" {
|
||||
fmt.Fprintf(b, "%s := make([]%s, len(rr.%s));\nfor i,e := range rr.%s {\n %s[i] = e.copy()\n}\n",
|
||||
f, t, f, f, f)
|
||||
fields = append(fields, f)
|
||||
continue
|
||||
}
|
||||
fmt.Fprintf(b, "%s := make([]%s, len(rr.%s)); copy(%s, rr.%s)\n",
|
||||
f, t, f, f, f)
|
||||
fields = append(fields, f)
|
||||
continue
|
||||
}
|
||||
if st.Field(i).Type().String() == "net.IP" {
|
||||
fields = append(fields, "copyIP(rr."+f+")")
|
||||
continue
|
||||
}
|
||||
fields = append(fields, "rr."+f)
|
||||
}
|
||||
fmt.Fprintf(b, "return &%s{%s}\n", name, strings.Join(fields, ","))
|
||||
fmt.Fprintf(b, "}\n")
|
||||
}
|
||||
|
||||
// gofmt
|
||||
res, err := format.Source(b.Bytes())
|
||||
if err != nil {
|
||||
b.WriteTo(os.Stderr)
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
// write result
|
||||
f, err := os.Create("ztypes.go")
|
||||
fatalIfErr(err)
|
||||
defer f.Close()
|
||||
f.Write(res)
|
||||
}
|
||||
|
||||
func fatalIfErr(err error) {
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
202
vendor/github.com/open-policy-agent/opa/LICENSE
generated
vendored
Normal file
202
vendor/github.com/open-policy-agent/opa/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,202 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright {yyyy} {name of copyright owner}
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
1902
vendor/github.com/open-policy-agent/opa/ast/builtins.go
generated
vendored
Normal file
1902
vendor/github.com/open-policy-agent/opa/ast/builtins.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
983
vendor/github.com/open-policy-agent/opa/ast/check.go
generated
vendored
Normal file
983
vendor/github.com/open-policy-agent/opa/ast/check.go
generated
vendored
Normal file
@@ -0,0 +1,983 @@
|
||||
// Copyright 2017 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package ast
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/open-policy-agent/opa/types"
|
||||
"github.com/open-policy-agent/opa/util"
|
||||
)
|
||||
|
||||
type rewriteVars func(x Ref) Ref
|
||||
|
||||
// exprChecker defines the interface for executing type checking on a single
|
||||
// expression. The exprChecker must update the provided TypeEnv with inferred
|
||||
// types of vars.
|
||||
type exprChecker func(*TypeEnv, *Expr) *Error
|
||||
|
||||
// typeChecker implements type checking on queries and rules. Errors are
|
||||
// accumulated on the typeChecker so that a single run can report multiple
|
||||
// issues.
|
||||
type typeChecker struct {
|
||||
errs Errors
|
||||
exprCheckers map[string]exprChecker
|
||||
varRewriter rewriteVars
|
||||
}
|
||||
|
||||
// newTypeChecker returns a new typeChecker object that has no errors.
|
||||
func newTypeChecker() *typeChecker {
|
||||
tc := &typeChecker{}
|
||||
tc.exprCheckers = map[string]exprChecker{
|
||||
"eq": tc.checkExprEq,
|
||||
}
|
||||
return tc
|
||||
}
|
||||
|
||||
func (tc *typeChecker) WithVarRewriter(f rewriteVars) *typeChecker {
|
||||
tc.varRewriter = f
|
||||
return tc
|
||||
}
|
||||
|
||||
// CheckBody runs type checking on the body and returns a TypeEnv if no errors
|
||||
// are found. The resulting TypeEnv wraps the provided one. The resulting
|
||||
// TypeEnv will be able to resolve types of vars contained in the body.
|
||||
func (tc *typeChecker) CheckBody(env *TypeEnv, body Body) (*TypeEnv, Errors) {
|
||||
|
||||
if env == nil {
|
||||
env = NewTypeEnv()
|
||||
} else {
|
||||
env = env.wrap()
|
||||
}
|
||||
|
||||
WalkExprs(body, func(expr *Expr) bool {
|
||||
|
||||
closureErrs := tc.checkClosures(env, expr)
|
||||
for _, err := range closureErrs {
|
||||
tc.err(err)
|
||||
}
|
||||
|
||||
hasClosureErrors := len(closureErrs) > 0
|
||||
|
||||
vis := newRefChecker(env, tc.varRewriter)
|
||||
NewGenericVisitor(vis.Visit).Walk(expr)
|
||||
for _, err := range vis.errs {
|
||||
tc.err(err)
|
||||
}
|
||||
|
||||
hasRefErrors := len(vis.errs) > 0
|
||||
|
||||
if err := tc.checkExpr(env, expr); err != nil {
|
||||
// Suppress this error if a more actionable one has occurred. In
|
||||
// this case, if an error occurred in a ref or closure contained in
|
||||
// this expression, and the error is due to a nil type, then it's
|
||||
// likely to be the result of the more specific error.
|
||||
skip := (hasClosureErrors || hasRefErrors) && causedByNilType(err)
|
||||
if !skip {
|
||||
tc.err(err)
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
})
|
||||
|
||||
return env, tc.errs
|
||||
}
|
||||
|
||||
// CheckTypes runs type checking on the rules returns a TypeEnv if no errors
|
||||
// are found. The resulting TypeEnv wraps the provided one. The resulting
|
||||
// TypeEnv will be able to resolve types of refs that refer to rules.
|
||||
func (tc *typeChecker) CheckTypes(env *TypeEnv, sorted []util.T) (*TypeEnv, Errors) {
|
||||
if env == nil {
|
||||
env = NewTypeEnv()
|
||||
} else {
|
||||
env = env.wrap()
|
||||
}
|
||||
for _, s := range sorted {
|
||||
tc.checkRule(env, s.(*Rule))
|
||||
}
|
||||
tc.errs.Sort()
|
||||
return env, tc.errs
|
||||
}
|
||||
|
||||
func (tc *typeChecker) checkClosures(env *TypeEnv, expr *Expr) Errors {
|
||||
var result Errors
|
||||
WalkClosures(expr, func(x interface{}) bool {
|
||||
switch x := x.(type) {
|
||||
case *ArrayComprehension:
|
||||
_, errs := newTypeChecker().WithVarRewriter(tc.varRewriter).CheckBody(env, x.Body)
|
||||
if len(errs) > 0 {
|
||||
result = errs
|
||||
return true
|
||||
}
|
||||
case *SetComprehension:
|
||||
_, errs := newTypeChecker().WithVarRewriter(tc.varRewriter).CheckBody(env, x.Body)
|
||||
if len(errs) > 0 {
|
||||
result = errs
|
||||
return true
|
||||
}
|
||||
case *ObjectComprehension:
|
||||
_, errs := newTypeChecker().WithVarRewriter(tc.varRewriter).CheckBody(env, x.Body)
|
||||
if len(errs) > 0 {
|
||||
result = errs
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
})
|
||||
return result
|
||||
}
|
||||
|
||||
func (tc *typeChecker) checkLanguageBuiltins(env *TypeEnv, builtins map[string]*Builtin) *TypeEnv {
|
||||
if env == nil {
|
||||
env = NewTypeEnv()
|
||||
} else {
|
||||
env = env.wrap()
|
||||
}
|
||||
for _, bi := range builtins {
|
||||
env.tree.Put(bi.Ref(), bi.Decl)
|
||||
}
|
||||
return env
|
||||
}
|
||||
|
||||
func (tc *typeChecker) checkRule(env *TypeEnv, rule *Rule) {
|
||||
|
||||
cpy, err := tc.CheckBody(env, rule.Body)
|
||||
|
||||
if len(err) == 0 {
|
||||
|
||||
path := rule.Path()
|
||||
var tpe types.Type
|
||||
|
||||
if len(rule.Head.Args) > 0 {
|
||||
|
||||
// If args are not referred to in body, infer as any.
|
||||
WalkVars(rule.Head.Args, func(v Var) bool {
|
||||
if cpy.Get(v) == nil {
|
||||
cpy.tree.PutOne(v, types.A)
|
||||
}
|
||||
return false
|
||||
})
|
||||
|
||||
// Construct function type.
|
||||
args := make([]types.Type, len(rule.Head.Args))
|
||||
for i := 0; i < len(rule.Head.Args); i++ {
|
||||
args[i] = cpy.Get(rule.Head.Args[i])
|
||||
}
|
||||
|
||||
f := types.NewFunction(args, cpy.Get(rule.Head.Value))
|
||||
|
||||
// Union with existing.
|
||||
exist := env.tree.Get(path)
|
||||
tpe = types.Or(exist, f)
|
||||
|
||||
} else {
|
||||
switch rule.Head.DocKind() {
|
||||
case CompleteDoc:
|
||||
typeV := cpy.Get(rule.Head.Value)
|
||||
if typeV != nil {
|
||||
exist := env.tree.Get(path)
|
||||
tpe = types.Or(typeV, exist)
|
||||
}
|
||||
case PartialObjectDoc:
|
||||
typeK := cpy.Get(rule.Head.Key)
|
||||
typeV := cpy.Get(rule.Head.Value)
|
||||
if typeK != nil && typeV != nil {
|
||||
exist := env.tree.Get(path)
|
||||
typeV = types.Or(types.Values(exist), typeV)
|
||||
typeK = types.Or(types.Keys(exist), typeK)
|
||||
tpe = types.NewObject(nil, types.NewDynamicProperty(typeK, typeV))
|
||||
}
|
||||
case PartialSetDoc:
|
||||
typeK := cpy.Get(rule.Head.Key)
|
||||
if typeK != nil {
|
||||
exist := env.tree.Get(path)
|
||||
typeK = types.Or(types.Keys(exist), typeK)
|
||||
tpe = types.NewSet(typeK)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if tpe != nil {
|
||||
env.tree.Put(path, tpe)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (tc *typeChecker) checkExpr(env *TypeEnv, expr *Expr) *Error {
|
||||
if !expr.IsCall() {
|
||||
return nil
|
||||
}
|
||||
|
||||
checker := tc.exprCheckers[expr.Operator().String()]
|
||||
if checker != nil {
|
||||
return checker(env, expr)
|
||||
}
|
||||
|
||||
return tc.checkExprBuiltin(env, expr)
|
||||
}
|
||||
|
||||
func (tc *typeChecker) checkExprBuiltin(env *TypeEnv, expr *Expr) *Error {
|
||||
|
||||
args := expr.Operands()
|
||||
pre := getArgTypes(env, args)
|
||||
|
||||
// NOTE(tsandall): undefined functions will have been caught earlier in the
|
||||
// compiler. We check for undefined functions before the safety check so
|
||||
// that references to non-existent functions result in undefined function
|
||||
// errors as opposed to unsafe var errors.
|
||||
//
|
||||
// We cannot run type checking before the safety check because part of the
|
||||
// type checker relies on reordering (in particular for references to local
|
||||
// vars).
|
||||
name := expr.Operator()
|
||||
tpe := env.Get(name)
|
||||
|
||||
if tpe == nil {
|
||||
return NewError(TypeErr, expr.Location, "undefined function %v", name)
|
||||
}
|
||||
|
||||
ftpe, ok := tpe.(*types.Function)
|
||||
if !ok {
|
||||
return NewError(TypeErr, expr.Location, "undefined function %v", name)
|
||||
}
|
||||
|
||||
maxArgs := len(ftpe.Args())
|
||||
expArgs := ftpe.Args()
|
||||
|
||||
if ftpe.Result() != nil {
|
||||
maxArgs++
|
||||
expArgs = append(expArgs, ftpe.Result())
|
||||
}
|
||||
|
||||
if len(args) > maxArgs {
|
||||
return newArgError(expr.Location, name, "too many arguments", pre, expArgs)
|
||||
} else if len(args) < len(ftpe.Args()) {
|
||||
return newArgError(expr.Location, name, "too few arguments", pre, expArgs)
|
||||
}
|
||||
|
||||
for i := range args {
|
||||
if !unify1(env, args[i], expArgs[i], false) {
|
||||
post := make([]types.Type, len(args))
|
||||
for i := range args {
|
||||
post[i] = env.Get(args[i])
|
||||
}
|
||||
return newArgError(expr.Location, name, "invalid argument(s)", post, expArgs)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (tc *typeChecker) checkExprEq(env *TypeEnv, expr *Expr) *Error {
|
||||
|
||||
pre := getArgTypes(env, expr.Operands())
|
||||
exp := Equality.Decl.Args()
|
||||
|
||||
if len(pre) < len(exp) {
|
||||
return newArgError(expr.Location, expr.Operator(), "too few arguments", pre, exp)
|
||||
} else if len(exp) < len(pre) {
|
||||
return newArgError(expr.Location, expr.Operator(), "too many arguments", pre, exp)
|
||||
}
|
||||
|
||||
a, b := expr.Operand(0), expr.Operand(1)
|
||||
typeA, typeB := env.Get(a), env.Get(b)
|
||||
|
||||
if !unify2(env, a, typeA, b, typeB) {
|
||||
err := NewError(TypeErr, expr.Location, "match error")
|
||||
err.Details = &UnificationErrDetail{
|
||||
Left: typeA,
|
||||
Right: typeB,
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func unify2(env *TypeEnv, a *Term, typeA types.Type, b *Term, typeB types.Type) bool {
|
||||
|
||||
nilA := types.Nil(typeA)
|
||||
nilB := types.Nil(typeB)
|
||||
|
||||
if nilA && !nilB {
|
||||
return unify1(env, a, typeB, false)
|
||||
} else if nilB && !nilA {
|
||||
return unify1(env, b, typeA, false)
|
||||
} else if !nilA && !nilB {
|
||||
return unifies(typeA, typeB)
|
||||
}
|
||||
|
||||
switch a.Value.(type) {
|
||||
case Array:
|
||||
return unify2Array(env, a, typeA, b, typeB)
|
||||
case Object:
|
||||
return unify2Object(env, a, typeA, b, typeB)
|
||||
case Var:
|
||||
switch b.Value.(type) {
|
||||
case Var:
|
||||
return unify1(env, a, types.A, false) && unify1(env, b, env.Get(a), false)
|
||||
case Array:
|
||||
return unify2Array(env, b, typeB, a, typeA)
|
||||
case Object:
|
||||
return unify2Object(env, b, typeB, a, typeA)
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func unify2Array(env *TypeEnv, a *Term, typeA types.Type, b *Term, typeB types.Type) bool {
|
||||
arr := a.Value.(Array)
|
||||
switch bv := b.Value.(type) {
|
||||
case Array:
|
||||
if len(arr) == len(bv) {
|
||||
for i := range arr {
|
||||
if !unify2(env, arr[i], env.Get(arr[i]), bv[i], env.Get(bv[i])) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
case Var:
|
||||
return unify1(env, a, types.A, false) && unify1(env, b, env.Get(a), false)
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func unify2Object(env *TypeEnv, a *Term, typeA types.Type, b *Term, typeB types.Type) bool {
|
||||
obj := a.Value.(Object)
|
||||
switch bv := b.Value.(type) {
|
||||
case Object:
|
||||
cv := obj.Intersect(bv)
|
||||
if obj.Len() == bv.Len() && bv.Len() == len(cv) {
|
||||
for i := range cv {
|
||||
if !unify2(env, cv[i][1], env.Get(cv[i][1]), cv[i][2], env.Get(cv[i][2])) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
case Var:
|
||||
return unify1(env, a, types.A, false) && unify1(env, b, env.Get(a), false)
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func unify1(env *TypeEnv, term *Term, tpe types.Type, union bool) bool {
|
||||
switch v := term.Value.(type) {
|
||||
case Array:
|
||||
switch tpe := tpe.(type) {
|
||||
case *types.Array:
|
||||
return unify1Array(env, v, tpe, union)
|
||||
case types.Any:
|
||||
if types.Compare(tpe, types.A) == 0 {
|
||||
for i := range v {
|
||||
unify1(env, v[i], types.A, true)
|
||||
}
|
||||
return true
|
||||
}
|
||||
unifies := false
|
||||
for i := range tpe {
|
||||
unifies = unify1(env, term, tpe[i], true) || unifies
|
||||
}
|
||||
return unifies
|
||||
}
|
||||
return false
|
||||
case Object:
|
||||
switch tpe := tpe.(type) {
|
||||
case *types.Object:
|
||||
return unify1Object(env, v, tpe, union)
|
||||
case types.Any:
|
||||
if types.Compare(tpe, types.A) == 0 {
|
||||
v.Foreach(func(key, value *Term) {
|
||||
unify1(env, key, types.A, true)
|
||||
unify1(env, value, types.A, true)
|
||||
})
|
||||
return true
|
||||
}
|
||||
unifies := false
|
||||
for i := range tpe {
|
||||
unifies = unify1(env, term, tpe[i], true) || unifies
|
||||
}
|
||||
return unifies
|
||||
}
|
||||
return false
|
||||
case Set:
|
||||
switch tpe := tpe.(type) {
|
||||
case *types.Set:
|
||||
return unify1Set(env, v, tpe, union)
|
||||
case types.Any:
|
||||
if types.Compare(tpe, types.A) == 0 {
|
||||
v.Foreach(func(elem *Term) {
|
||||
unify1(env, elem, types.A, true)
|
||||
})
|
||||
return true
|
||||
}
|
||||
unifies := false
|
||||
for i := range tpe {
|
||||
unifies = unify1(env, term, tpe[i], true) || unifies
|
||||
}
|
||||
return unifies
|
||||
}
|
||||
return false
|
||||
case Ref, *ArrayComprehension, *ObjectComprehension, *SetComprehension:
|
||||
return unifies(env.Get(v), tpe)
|
||||
case Var:
|
||||
if !union {
|
||||
if exist := env.Get(v); exist != nil {
|
||||
return unifies(exist, tpe)
|
||||
}
|
||||
env.tree.PutOne(term.Value, tpe)
|
||||
} else {
|
||||
env.tree.PutOne(term.Value, types.Or(env.Get(v), tpe))
|
||||
}
|
||||
return true
|
||||
default:
|
||||
if !IsConstant(v) {
|
||||
panic("unreachable")
|
||||
}
|
||||
return unifies(env.Get(term), tpe)
|
||||
}
|
||||
}
|
||||
|
||||
func unify1Array(env *TypeEnv, val Array, tpe *types.Array, union bool) bool {
|
||||
if len(val) != tpe.Len() && tpe.Dynamic() == nil {
|
||||
return false
|
||||
}
|
||||
for i := range val {
|
||||
if !unify1(env, val[i], tpe.Select(i), union) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func unify1Object(env *TypeEnv, val Object, tpe *types.Object, union bool) bool {
|
||||
if val.Len() != len(tpe.Keys()) && tpe.DynamicValue() == nil {
|
||||
return false
|
||||
}
|
||||
stop := val.Until(func(k, v *Term) bool {
|
||||
if IsConstant(k.Value) {
|
||||
if child := selectConstant(tpe, k); child != nil {
|
||||
if !unify1(env, v, child, union) {
|
||||
return true
|
||||
}
|
||||
} else {
|
||||
return true
|
||||
}
|
||||
} else {
|
||||
// Inferring type of value under dynamic key would involve unioning
|
||||
// with all property values of tpe whose keys unify. For now, type
|
||||
// these values as Any. We can investigate stricter inference in
|
||||
// the future.
|
||||
unify1(env, v, types.A, union)
|
||||
}
|
||||
return false
|
||||
})
|
||||
return !stop
|
||||
}
|
||||
|
||||
func unify1Set(env *TypeEnv, val Set, tpe *types.Set, union bool) bool {
|
||||
of := types.Values(tpe)
|
||||
return !val.Until(func(elem *Term) bool {
|
||||
return !unify1(env, elem, of, union)
|
||||
})
|
||||
}
|
||||
|
||||
func (tc *typeChecker) err(err *Error) {
|
||||
tc.errs = append(tc.errs, err)
|
||||
}
|
||||
|
||||
type refChecker struct {
|
||||
env *TypeEnv
|
||||
errs Errors
|
||||
varRewriter rewriteVars
|
||||
}
|
||||
|
||||
func newRefChecker(env *TypeEnv, f rewriteVars) *refChecker {
|
||||
|
||||
if f == nil {
|
||||
f = rewriteVarsNop
|
||||
}
|
||||
|
||||
return &refChecker{
|
||||
env: env,
|
||||
errs: nil,
|
||||
varRewriter: f,
|
||||
}
|
||||
}
|
||||
|
||||
func (rc *refChecker) Visit(x interface{}) bool {
|
||||
switch x := x.(type) {
|
||||
case *ArrayComprehension, *ObjectComprehension, *SetComprehension:
|
||||
return true
|
||||
case *Expr:
|
||||
switch terms := x.Terms.(type) {
|
||||
case []*Term:
|
||||
for i := 1; i < len(terms); i++ {
|
||||
NewGenericVisitor(rc.Visit).Walk(terms[i])
|
||||
}
|
||||
return true
|
||||
case *Term:
|
||||
NewGenericVisitor(rc.Visit).Walk(terms)
|
||||
return true
|
||||
}
|
||||
case Ref:
|
||||
if err := rc.checkApply(rc.env, x); err != nil {
|
||||
rc.errs = append(rc.errs, err)
|
||||
return true
|
||||
}
|
||||
if err := rc.checkRef(rc.env, rc.env.tree, x, 0); err != nil {
|
||||
rc.errs = append(rc.errs, err)
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (rc *refChecker) checkApply(curr *TypeEnv, ref Ref) *Error {
|
||||
if tpe := curr.Get(ref); tpe != nil {
|
||||
if _, ok := tpe.(*types.Function); ok {
|
||||
return newRefErrUnsupported(ref[0].Location, rc.varRewriter(ref), len(ref)-1, tpe)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (rc *refChecker) checkRef(curr *TypeEnv, node *typeTreeNode, ref Ref, idx int) *Error {
|
||||
|
||||
if idx == len(ref) {
|
||||
return nil
|
||||
}
|
||||
|
||||
head := ref[idx]
|
||||
|
||||
// Handle constant ref operands, i.e., strings or the ref head.
|
||||
if _, ok := head.Value.(String); ok || idx == 0 {
|
||||
|
||||
child := node.Child(head.Value)
|
||||
if child == nil {
|
||||
|
||||
if curr.next != nil {
|
||||
next := curr.next
|
||||
return rc.checkRef(next, next.tree, ref, 0)
|
||||
}
|
||||
|
||||
if RootDocumentNames.Contains(ref[0]) {
|
||||
return rc.checkRefLeaf(types.A, ref, 1)
|
||||
}
|
||||
|
||||
return rc.checkRefLeaf(types.A, ref, 0)
|
||||
}
|
||||
|
||||
if child.Leaf() {
|
||||
return rc.checkRefLeaf(child.Value(), ref, idx+1)
|
||||
}
|
||||
|
||||
return rc.checkRef(curr, child, ref, idx+1)
|
||||
}
|
||||
|
||||
// Handle dynamic ref operands.
|
||||
switch value := head.Value.(type) {
|
||||
|
||||
case Var:
|
||||
|
||||
if exist := rc.env.Get(value); exist != nil {
|
||||
if !unifies(types.S, exist) {
|
||||
return newRefErrInvalid(ref[0].Location, rc.varRewriter(ref), idx, exist, types.S, getOneOfForNode(node))
|
||||
}
|
||||
} else {
|
||||
rc.env.tree.PutOne(value, types.S)
|
||||
}
|
||||
|
||||
case Ref:
|
||||
|
||||
exist := rc.env.Get(value)
|
||||
if exist == nil {
|
||||
// If ref type is unknown, an error will already be reported so
|
||||
// stop here.
|
||||
return nil
|
||||
}
|
||||
|
||||
if !unifies(types.S, exist) {
|
||||
return newRefErrInvalid(ref[0].Location, rc.varRewriter(ref), idx, exist, types.S, getOneOfForNode(node))
|
||||
}
|
||||
|
||||
// Catch other ref operand types here. Non-leaf nodes must be referred to
|
||||
// with string values.
|
||||
default:
|
||||
return newRefErrInvalid(ref[0].Location, rc.varRewriter(ref), idx, nil, types.S, getOneOfForNode(node))
|
||||
}
|
||||
|
||||
// Run checking on remaining portion of the ref. Note, since the ref
|
||||
// potentially refers to data for which no type information exists,
|
||||
// checking should never fail.
|
||||
node.Children().Iter(func(_, child util.T) bool {
|
||||
rc.checkRef(curr, child.(*typeTreeNode), ref, idx+1)
|
||||
return false
|
||||
})
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (rc *refChecker) checkRefLeaf(tpe types.Type, ref Ref, idx int) *Error {
|
||||
|
||||
if idx == len(ref) {
|
||||
return nil
|
||||
}
|
||||
|
||||
head := ref[idx]
|
||||
|
||||
keys := types.Keys(tpe)
|
||||
if keys == nil {
|
||||
return newRefErrUnsupported(ref[0].Location, rc.varRewriter(ref), idx-1, tpe)
|
||||
}
|
||||
|
||||
switch value := head.Value.(type) {
|
||||
|
||||
case Var:
|
||||
if exist := rc.env.Get(value); exist != nil {
|
||||
if !unifies(exist, keys) {
|
||||
return newRefErrInvalid(ref[0].Location, rc.varRewriter(ref), idx, exist, keys, getOneOfForType(tpe))
|
||||
}
|
||||
} else {
|
||||
rc.env.tree.PutOne(value, types.Keys(tpe))
|
||||
}
|
||||
|
||||
case Ref:
|
||||
if exist := rc.env.Get(value); exist != nil {
|
||||
if !unifies(exist, keys) {
|
||||
return newRefErrInvalid(ref[0].Location, rc.varRewriter(ref), idx, exist, keys, getOneOfForType(tpe))
|
||||
}
|
||||
}
|
||||
|
||||
case Array, Object, Set:
|
||||
// Composite references operands may only be used with a set.
|
||||
if !unifies(tpe, types.NewSet(types.A)) {
|
||||
return newRefErrInvalid(ref[0].Location, rc.varRewriter(ref), idx, tpe, types.NewSet(types.A), nil)
|
||||
}
|
||||
if !unify1(rc.env, head, keys, false) {
|
||||
return newRefErrInvalid(ref[0].Location, rc.varRewriter(ref), idx, rc.env.Get(head), keys, nil)
|
||||
}
|
||||
|
||||
default:
|
||||
child := selectConstant(tpe, head)
|
||||
if child == nil {
|
||||
return newRefErrInvalid(ref[0].Location, rc.varRewriter(ref), idx, nil, types.Keys(tpe), getOneOfForType(tpe))
|
||||
}
|
||||
return rc.checkRefLeaf(child, ref, idx+1)
|
||||
}
|
||||
|
||||
return rc.checkRefLeaf(types.Values(tpe), ref, idx+1)
|
||||
}
|
||||
|
||||
func unifies(a, b types.Type) bool {
|
||||
|
||||
if a == nil || b == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
anyA, ok1 := a.(types.Any)
|
||||
if ok1 {
|
||||
if unifiesAny(anyA, b) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
anyB, ok2 := b.(types.Any)
|
||||
if ok2 {
|
||||
if unifiesAny(anyB, a) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
if ok1 || ok2 {
|
||||
return false
|
||||
}
|
||||
|
||||
switch a := a.(type) {
|
||||
case types.Null:
|
||||
_, ok := b.(types.Null)
|
||||
return ok
|
||||
case types.Boolean:
|
||||
_, ok := b.(types.Boolean)
|
||||
return ok
|
||||
case types.Number:
|
||||
_, ok := b.(types.Number)
|
||||
return ok
|
||||
case types.String:
|
||||
_, ok := b.(types.String)
|
||||
return ok
|
||||
case *types.Array:
|
||||
b, ok := b.(*types.Array)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
return unifiesArrays(a, b)
|
||||
case *types.Object:
|
||||
b, ok := b.(*types.Object)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
return unifiesObjects(a, b)
|
||||
case *types.Set:
|
||||
b, ok := b.(*types.Set)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
return unifies(types.Values(a), types.Values(b))
|
||||
case *types.Function:
|
||||
// TODO(tsandall): revisit once functions become first-class values.
|
||||
return false
|
||||
default:
|
||||
panic("unreachable")
|
||||
}
|
||||
}
|
||||
|
||||
func unifiesAny(a types.Any, b types.Type) bool {
|
||||
if _, ok := b.(*types.Function); ok {
|
||||
return false
|
||||
}
|
||||
for i := range a {
|
||||
if unifies(a[i], b) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return len(a) == 0
|
||||
}
|
||||
|
||||
func unifiesArrays(a, b *types.Array) bool {
|
||||
|
||||
if !unifiesArraysStatic(a, b) {
|
||||
return false
|
||||
}
|
||||
|
||||
if !unifiesArraysStatic(b, a) {
|
||||
return false
|
||||
}
|
||||
|
||||
return a.Dynamic() == nil || b.Dynamic() == nil || unifies(a.Dynamic(), b.Dynamic())
|
||||
}
|
||||
|
||||
func unifiesArraysStatic(a, b *types.Array) bool {
|
||||
if a.Len() != 0 {
|
||||
for i := 0; i < a.Len(); i++ {
|
||||
if !unifies(a.Select(i), b.Select(i)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func unifiesObjects(a, b *types.Object) bool {
|
||||
if !unifiesObjectsStatic(a, b) {
|
||||
return false
|
||||
}
|
||||
|
||||
if !unifiesObjectsStatic(b, a) {
|
||||
return false
|
||||
}
|
||||
|
||||
return a.DynamicValue() == nil || b.DynamicValue() == nil || unifies(a.DynamicValue(), b.DynamicValue())
|
||||
}
|
||||
|
||||
func unifiesObjectsStatic(a, b *types.Object) bool {
|
||||
for _, k := range a.Keys() {
|
||||
if !unifies(a.Select(k), b.Select(k)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// typeErrorCause defines an interface to determine the reason for a type
|
||||
// error. The type error details implement this interface so that type checking
|
||||
// can report more actionable errors.
|
||||
type typeErrorCause interface {
|
||||
nilType() bool
|
||||
}
|
||||
|
||||
func causedByNilType(err *Error) bool {
|
||||
cause, ok := err.Details.(typeErrorCause)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
return cause.nilType()
|
||||
}
|
||||
|
||||
// ArgErrDetail represents a generic argument error.
|
||||
type ArgErrDetail struct {
|
||||
Have []types.Type `json:"have"`
|
||||
Want []types.Type `json:"want"`
|
||||
}
|
||||
|
||||
// Lines returns the string representation of the detail.
|
||||
func (d *ArgErrDetail) Lines() []string {
|
||||
lines := make([]string, 2)
|
||||
lines[0] = fmt.Sprint("have: ", formatArgs(d.Have))
|
||||
lines[1] = fmt.Sprint("want: ", formatArgs(d.Want))
|
||||
return lines
|
||||
}
|
||||
|
||||
func (d *ArgErrDetail) nilType() bool {
|
||||
for i := range d.Have {
|
||||
if types.Nil(d.Have[i]) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// UnificationErrDetail describes a type mismatch error when two values are
|
||||
// unified (e.g., x = [1,2,y]).
|
||||
type UnificationErrDetail struct {
|
||||
Left types.Type `json:"a"`
|
||||
Right types.Type `json:"b"`
|
||||
}
|
||||
|
||||
func (a *UnificationErrDetail) nilType() bool {
|
||||
return types.Nil(a.Left) || types.Nil(a.Right)
|
||||
}
|
||||
|
||||
// Lines returns the string representation of the detail.
|
||||
func (a *UnificationErrDetail) Lines() []string {
|
||||
lines := make([]string, 2)
|
||||
lines[0] = fmt.Sprint("left : ", types.Sprint(a.Left))
|
||||
lines[1] = fmt.Sprint("right : ", types.Sprint(a.Right))
|
||||
return lines
|
||||
}
|
||||
|
||||
// RefErrUnsupportedDetail describes an undefined reference error where the
|
||||
// referenced value does not support dereferencing (e.g., scalars).
|
||||
type RefErrUnsupportedDetail struct {
|
||||
Ref Ref `json:"ref"` // invalid ref
|
||||
Pos int `json:"pos"` // invalid element
|
||||
Have types.Type `json:"have"` // referenced type
|
||||
}
|
||||
|
||||
// Lines returns the string representation of the detail.
|
||||
func (r *RefErrUnsupportedDetail) Lines() []string {
|
||||
lines := []string{
|
||||
r.Ref.String(),
|
||||
strings.Repeat("^", len(r.Ref[:r.Pos+1].String())),
|
||||
fmt.Sprintf("have: %v", r.Have),
|
||||
}
|
||||
return lines
|
||||
}
|
||||
|
||||
// RefErrInvalidDetail describes an undefined reference error where the referenced
|
||||
// value does not support the reference operand (e.g., missing object key,
|
||||
// invalid key type, etc.)
|
||||
type RefErrInvalidDetail struct {
|
||||
Ref Ref `json:"ref"` // invalid ref
|
||||
Pos int `json:"pos"` // invalid element
|
||||
Have types.Type `json:"have,omitempty"` // type of invalid element (for var/ref elements)
|
||||
Want types.Type `json:"want"` // allowed type (for non-object values)
|
||||
OneOf []Value `json:"oneOf"` // allowed values (e.g., for object keys)
|
||||
}
|
||||
|
||||
// Lines returns the string representation of the detail.
|
||||
func (r *RefErrInvalidDetail) Lines() []string {
|
||||
lines := []string{r.Ref.String()}
|
||||
offset := len(r.Ref[:r.Pos].String()) + 1
|
||||
pad := strings.Repeat(" ", offset)
|
||||
lines = append(lines, fmt.Sprintf("%s^", pad))
|
||||
if r.Have != nil {
|
||||
lines = append(lines, fmt.Sprintf("%shave (type): %v", pad, r.Have))
|
||||
} else {
|
||||
lines = append(lines, fmt.Sprintf("%shave: %v", pad, r.Ref[r.Pos]))
|
||||
}
|
||||
if len(r.OneOf) > 0 {
|
||||
lines = append(lines, fmt.Sprintf("%swant (one of): %v", pad, r.OneOf))
|
||||
} else {
|
||||
lines = append(lines, fmt.Sprintf("%swant (type): %v", pad, r.Want))
|
||||
}
|
||||
return lines
|
||||
}
|
||||
|
||||
func formatArgs(args []types.Type) string {
|
||||
buf := make([]string, len(args))
|
||||
for i := range args {
|
||||
buf[i] = types.Sprint(args[i])
|
||||
}
|
||||
return "(" + strings.Join(buf, ", ") + ")"
|
||||
}
|
||||
|
||||
func newRefErrInvalid(loc *Location, ref Ref, idx int, have, want types.Type, oneOf []Value) *Error {
|
||||
err := newRefError(loc, ref)
|
||||
err.Details = &RefErrInvalidDetail{
|
||||
Ref: ref,
|
||||
Pos: idx,
|
||||
Have: have,
|
||||
Want: want,
|
||||
OneOf: oneOf,
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func newRefErrUnsupported(loc *Location, ref Ref, idx int, have types.Type) *Error {
|
||||
err := newRefError(loc, ref)
|
||||
err.Details = &RefErrUnsupportedDetail{
|
||||
Ref: ref,
|
||||
Pos: idx,
|
||||
Have: have,
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func newRefError(loc *Location, ref Ref) *Error {
|
||||
return NewError(TypeErr, loc, "undefined ref: %v", ref)
|
||||
}
|
||||
|
||||
func newArgError(loc *Location, builtinName Ref, msg string, have []types.Type, want []types.Type) *Error {
|
||||
err := NewError(TypeErr, loc, "%v: %v", builtinName, msg)
|
||||
err.Details = &ArgErrDetail{
|
||||
Have: have,
|
||||
Want: want,
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func getOneOfForNode(node *typeTreeNode) (result []Value) {
|
||||
node.Children().Iter(func(k, _ util.T) bool {
|
||||
result = append(result, k.(Value))
|
||||
return false
|
||||
})
|
||||
|
||||
sortValueSlice(result)
|
||||
return result
|
||||
}
|
||||
|
||||
func getOneOfForType(tpe types.Type) (result []Value) {
|
||||
switch tpe := tpe.(type) {
|
||||
case *types.Object:
|
||||
for _, k := range tpe.Keys() {
|
||||
v, err := InterfaceToValue(k)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
result = append(result, v)
|
||||
}
|
||||
}
|
||||
sortValueSlice(result)
|
||||
return result
|
||||
}
|
||||
|
||||
func sortValueSlice(sl []Value) {
|
||||
sort.Slice(sl, func(i, j int) bool {
|
||||
return sl[i].Compare(sl[j]) < 0
|
||||
})
|
||||
}
|
||||
|
||||
func getArgTypes(env *TypeEnv, args []*Term) []types.Type {
|
||||
pre := make([]types.Type, len(args))
|
||||
for i := range args {
|
||||
pre[i] = env.Get(args[i])
|
||||
}
|
||||
return pre
|
||||
}
|
||||
327
vendor/github.com/open-policy-agent/opa/ast/compare.go
generated
vendored
Normal file
327
vendor/github.com/open-policy-agent/opa/ast/compare.go
generated
vendored
Normal file
@@ -0,0 +1,327 @@
|
||||
// Copyright 2016 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package ast
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math/big"
|
||||
)
|
||||
|
||||
// Compare returns an integer indicating whether two AST values are less than,
|
||||
// equal to, or greater than each other.
|
||||
//
|
||||
// If a is less than b, the return value is negative. If a is greater than b,
|
||||
// the return value is positive. If a is equal to b, the return value is zero.
|
||||
//
|
||||
// Different types are never equal to each other. For comparison purposes, types
|
||||
// are sorted as follows:
|
||||
//
|
||||
// nil < Null < Boolean < Number < String < Var < Ref < Array < Object < Set <
|
||||
// ArrayComprehension < ObjectComprehension < SetComprehension < Expr < SomeDecl
|
||||
// < With < Body < Rule < Import < Package < Module.
|
||||
//
|
||||
// Arrays and Refs are equal iff both a and b have the same length and all
|
||||
// corresponding elements are equal. If one element is not equal, the return
|
||||
// value is the same as for the first differing element. If all elements are
|
||||
// equal but a and b have different lengths, the shorter is considered less than
|
||||
// the other.
|
||||
//
|
||||
// Objects are considered equal iff both a and b have the same sorted (key,
|
||||
// value) pairs and are of the same length. Other comparisons are consistent but
|
||||
// not defined.
|
||||
//
|
||||
// Sets are considered equal iff the symmetric difference of a and b is empty.
|
||||
// Other comparisons are consistent but not defined.
|
||||
func Compare(a, b interface{}) int {
|
||||
|
||||
if t, ok := a.(*Term); ok {
|
||||
if t == nil {
|
||||
a = nil
|
||||
} else {
|
||||
a = t.Value
|
||||
}
|
||||
}
|
||||
|
||||
if t, ok := b.(*Term); ok {
|
||||
if t == nil {
|
||||
b = nil
|
||||
} else {
|
||||
b = t.Value
|
||||
}
|
||||
}
|
||||
|
||||
if a == nil {
|
||||
if b == nil {
|
||||
return 0
|
||||
}
|
||||
return -1
|
||||
}
|
||||
if b == nil {
|
||||
return 1
|
||||
}
|
||||
|
||||
sortA := sortOrder(a)
|
||||
sortB := sortOrder(b)
|
||||
|
||||
if sortA < sortB {
|
||||
return -1
|
||||
} else if sortB < sortA {
|
||||
return 1
|
||||
}
|
||||
|
||||
switch a := a.(type) {
|
||||
case Null:
|
||||
return 0
|
||||
case Boolean:
|
||||
b := b.(Boolean)
|
||||
if a.Equal(b) {
|
||||
return 0
|
||||
}
|
||||
if !a {
|
||||
return -1
|
||||
}
|
||||
return 1
|
||||
case Number:
|
||||
if ai, err := json.Number(a).Int64(); err == nil {
|
||||
if bi, err := json.Number(b.(Number)).Int64(); err == nil {
|
||||
if ai == bi {
|
||||
return 0
|
||||
}
|
||||
if ai < bi {
|
||||
return -1
|
||||
}
|
||||
return 1
|
||||
}
|
||||
}
|
||||
|
||||
bigA, ok := new(big.Float).SetString(string(a))
|
||||
if !ok {
|
||||
panic("illegal value")
|
||||
}
|
||||
bigB, ok := new(big.Float).SetString(string(b.(Number)))
|
||||
if !ok {
|
||||
panic("illegal value")
|
||||
}
|
||||
return bigA.Cmp(bigB)
|
||||
case String:
|
||||
b := b.(String)
|
||||
if a.Equal(b) {
|
||||
return 0
|
||||
}
|
||||
if a < b {
|
||||
return -1
|
||||
}
|
||||
return 1
|
||||
case Var:
|
||||
b := b.(Var)
|
||||
if a.Equal(b) {
|
||||
return 0
|
||||
}
|
||||
if a < b {
|
||||
return -1
|
||||
}
|
||||
return 1
|
||||
case Ref:
|
||||
b := b.(Ref)
|
||||
return termSliceCompare(a, b)
|
||||
case Array:
|
||||
b := b.(Array)
|
||||
return termSliceCompare(a, b)
|
||||
case Object:
|
||||
b := b.(Object)
|
||||
return a.Compare(b)
|
||||
case Set:
|
||||
b := b.(Set)
|
||||
return a.Compare(b)
|
||||
case *ArrayComprehension:
|
||||
b := b.(*ArrayComprehension)
|
||||
if cmp := Compare(a.Term, b.Term); cmp != 0 {
|
||||
return cmp
|
||||
}
|
||||
return Compare(a.Body, b.Body)
|
||||
case *ObjectComprehension:
|
||||
b := b.(*ObjectComprehension)
|
||||
if cmp := Compare(a.Key, b.Key); cmp != 0 {
|
||||
return cmp
|
||||
}
|
||||
if cmp := Compare(a.Value, b.Value); cmp != 0 {
|
||||
return cmp
|
||||
}
|
||||
return Compare(a.Body, b.Body)
|
||||
case *SetComprehension:
|
||||
b := b.(*SetComprehension)
|
||||
if cmp := Compare(a.Term, b.Term); cmp != 0 {
|
||||
return cmp
|
||||
}
|
||||
return Compare(a.Body, b.Body)
|
||||
case Call:
|
||||
b := b.(Call)
|
||||
return termSliceCompare(a, b)
|
||||
case *Expr:
|
||||
b := b.(*Expr)
|
||||
return a.Compare(b)
|
||||
case *SomeDecl:
|
||||
b := b.(*SomeDecl)
|
||||
return a.Compare(b)
|
||||
case *With:
|
||||
b := b.(*With)
|
||||
return a.Compare(b)
|
||||
case Body:
|
||||
b := b.(Body)
|
||||
return a.Compare(b)
|
||||
case *Head:
|
||||
b := b.(*Head)
|
||||
return a.Compare(b)
|
||||
case *Rule:
|
||||
b := b.(*Rule)
|
||||
return a.Compare(b)
|
||||
case Args:
|
||||
b := b.(Args)
|
||||
return termSliceCompare(a, b)
|
||||
case *Import:
|
||||
b := b.(*Import)
|
||||
return a.Compare(b)
|
||||
case *Package:
|
||||
b := b.(*Package)
|
||||
return a.Compare(b)
|
||||
case *Module:
|
||||
b := b.(*Module)
|
||||
return a.Compare(b)
|
||||
}
|
||||
panic(fmt.Sprintf("illegal value: %T", a))
|
||||
}
|
||||
|
||||
type termSlice []*Term
|
||||
|
||||
func (s termSlice) Less(i, j int) bool { return Compare(s[i].Value, s[j].Value) < 0 }
|
||||
func (s termSlice) Swap(i, j int) { x := s[i]; s[i] = s[j]; s[j] = x }
|
||||
func (s termSlice) Len() int { return len(s) }
|
||||
|
||||
func sortOrder(x interface{}) int {
|
||||
switch x.(type) {
|
||||
case Null:
|
||||
return 0
|
||||
case Boolean:
|
||||
return 1
|
||||
case Number:
|
||||
return 2
|
||||
case String:
|
||||
return 3
|
||||
case Var:
|
||||
return 4
|
||||
case Ref:
|
||||
return 5
|
||||
case Array:
|
||||
return 6
|
||||
case Object:
|
||||
return 7
|
||||
case Set:
|
||||
return 8
|
||||
case *ArrayComprehension:
|
||||
return 9
|
||||
case *ObjectComprehension:
|
||||
return 10
|
||||
case *SetComprehension:
|
||||
return 11
|
||||
case Call:
|
||||
return 12
|
||||
case Args:
|
||||
return 13
|
||||
case *Expr:
|
||||
return 100
|
||||
case *SomeDecl:
|
||||
return 101
|
||||
case *With:
|
||||
return 110
|
||||
case *Head:
|
||||
return 120
|
||||
case Body:
|
||||
return 200
|
||||
case *Rule:
|
||||
return 1000
|
||||
case *Import:
|
||||
return 1001
|
||||
case *Package:
|
||||
return 1002
|
||||
case *Module:
|
||||
return 10000
|
||||
}
|
||||
panic(fmt.Sprintf("illegal value: %T", x))
|
||||
}
|
||||
|
||||
func importsCompare(a, b []*Import) int {
|
||||
minLen := len(a)
|
||||
if len(b) < minLen {
|
||||
minLen = len(b)
|
||||
}
|
||||
for i := 0; i < minLen; i++ {
|
||||
if cmp := a[i].Compare(b[i]); cmp != 0 {
|
||||
return cmp
|
||||
}
|
||||
}
|
||||
if len(a) < len(b) {
|
||||
return -1
|
||||
}
|
||||
if len(b) < len(a) {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func rulesCompare(a, b []*Rule) int {
|
||||
minLen := len(a)
|
||||
if len(b) < minLen {
|
||||
minLen = len(b)
|
||||
}
|
||||
for i := 0; i < minLen; i++ {
|
||||
if cmp := a[i].Compare(b[i]); cmp != 0 {
|
||||
return cmp
|
||||
}
|
||||
}
|
||||
if len(a) < len(b) {
|
||||
return -1
|
||||
}
|
||||
if len(b) < len(a) {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func termSliceCompare(a, b []*Term) int {
|
||||
minLen := len(a)
|
||||
if len(b) < minLen {
|
||||
minLen = len(b)
|
||||
}
|
||||
for i := 0; i < minLen; i++ {
|
||||
if cmp := Compare(a[i], b[i]); cmp != 0 {
|
||||
return cmp
|
||||
}
|
||||
}
|
||||
if len(a) < len(b) {
|
||||
return -1
|
||||
} else if len(b) < len(a) {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func withSliceCompare(a, b []*With) int {
|
||||
minLen := len(a)
|
||||
if len(b) < minLen {
|
||||
minLen = len(b)
|
||||
}
|
||||
for i := 0; i < minLen; i++ {
|
||||
if cmp := Compare(a[i], b[i]); cmp != 0 {
|
||||
return cmp
|
||||
}
|
||||
}
|
||||
if len(a) < len(b) {
|
||||
return -1
|
||||
} else if len(b) < len(a) {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
3419
vendor/github.com/open-policy-agent/opa/ast/compile.go
generated
vendored
Normal file
3419
vendor/github.com/open-policy-agent/opa/ast/compile.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
42
vendor/github.com/open-policy-agent/opa/ast/compilehelper.go
generated
vendored
Normal file
42
vendor/github.com/open-policy-agent/opa/ast/compilehelper.go
generated
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
// Copyright 2016 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package ast
|
||||
|
||||
// CompileModules takes a set of Rego modules represented as strings and
|
||||
// compiles them for evaluation. The keys of the map are used as filenames.
|
||||
func CompileModules(modules map[string]string) (*Compiler, error) {
|
||||
|
||||
parsed := make(map[string]*Module, len(modules))
|
||||
|
||||
for f, module := range modules {
|
||||
var pm *Module
|
||||
var err error
|
||||
if pm, err = ParseModule(f, module); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
parsed[f] = pm
|
||||
}
|
||||
|
||||
compiler := NewCompiler()
|
||||
compiler.Compile(parsed)
|
||||
|
||||
if compiler.Failed() {
|
||||
return nil, compiler.Errors
|
||||
}
|
||||
|
||||
return compiler, nil
|
||||
}
|
||||
|
||||
// MustCompileModules compiles a set of Rego modules represented as strings. If
|
||||
// the compilation process fails, this function panics.
|
||||
func MustCompileModules(modules map[string]string) *Compiler {
|
||||
|
||||
compiler, err := CompileModules(modules)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
return compiler
|
||||
}
|
||||
48
vendor/github.com/open-policy-agent/opa/ast/conflicts.go
generated
vendored
Normal file
48
vendor/github.com/open-policy-agent/opa/ast/conflicts.go
generated
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
// Copyright 2019 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package ast
|
||||
|
||||
import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
// CheckPathConflicts returns a set of errors indicating paths that
|
||||
// are in conflict with the result of the provided callable.
|
||||
func CheckPathConflicts(c *Compiler, exists func([]string) (bool, error)) Errors {
|
||||
var errs Errors
|
||||
|
||||
root := c.RuleTree.Child(DefaultRootDocument.Value)
|
||||
if root == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, node := range root.Children {
|
||||
errs = append(errs, checkDocumentConflicts(node, exists, nil)...)
|
||||
}
|
||||
|
||||
return errs
|
||||
}
|
||||
|
||||
func checkDocumentConflicts(node *TreeNode, exists func([]string) (bool, error), path []string) Errors {
|
||||
|
||||
path = append(path, string(node.Key.(String)))
|
||||
|
||||
if len(node.Values) > 0 {
|
||||
s := strings.Join(path, "/")
|
||||
if ok, err := exists(path); err != nil {
|
||||
return Errors{NewError(CompileErr, node.Values[0].(*Rule).Loc(), "conflict check for data path %v: %v", s, err.Error())}
|
||||
} else if ok {
|
||||
return Errors{NewError(CompileErr, node.Values[0].(*Rule).Loc(), "conflicting rule for data path %v found", s)}
|
||||
}
|
||||
}
|
||||
|
||||
var errs Errors
|
||||
|
||||
for _, child := range node.Children {
|
||||
errs = append(errs, checkDocumentConflicts(child, exists, path)...)
|
||||
}
|
||||
|
||||
return errs
|
||||
}
|
||||
36
vendor/github.com/open-policy-agent/opa/ast/doc.go
generated
vendored
Normal file
36
vendor/github.com/open-policy-agent/opa/ast/doc.go
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
// Copyright 2016 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package ast declares Rego syntax tree types and also includes a parser and compiler for preparing policies for execution in the policy engine.
|
||||
//
|
||||
// Rego policies are defined using a relatively small set of types: modules, package and import declarations, rules, expressions, and terms. At their core, policies consist of rules that are defined by one or more expressions over documents available to the policy engine. The expressions are defined by intrinsic values (terms) such as strings, objects, variables, etc.
|
||||
//
|
||||
// Rego policies are typically defined in text files and then parsed and compiled by the policy engine at runtime. The parsing stage takes the text or string representation of the policy and converts it into an abstract syntax tree (AST) that consists of the types mentioned above. The AST is organized as follows:
|
||||
//
|
||||
// Module
|
||||
// |
|
||||
// +--- Package (Reference)
|
||||
// |
|
||||
// +--- Imports
|
||||
// | |
|
||||
// | +--- Import (Term)
|
||||
// |
|
||||
// +--- Rules
|
||||
// |
|
||||
// +--- Rule
|
||||
// |
|
||||
// +--- Head
|
||||
// | |
|
||||
// | +--- Name (Variable)
|
||||
// | |
|
||||
// | +--- Key (Term)
|
||||
// | |
|
||||
// | +--- Value (Term)
|
||||
// |
|
||||
// +--- Body
|
||||
// |
|
||||
// +--- Expression (Term | Terms | Variable Declaration)
|
||||
//
|
||||
// At query time, the policy engine expects policies to have been compiled. The compilation stage takes one or more modules and compiles them into a format that the policy engine supports.
|
||||
package ast
|
||||
323
vendor/github.com/open-policy-agent/opa/ast/env.go
generated
vendored
Normal file
323
vendor/github.com/open-policy-agent/opa/ast/env.go
generated
vendored
Normal file
@@ -0,0 +1,323 @@
|
||||
// Copyright 2017 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package ast
|
||||
|
||||
import (
|
||||
"github.com/open-policy-agent/opa/types"
|
||||
"github.com/open-policy-agent/opa/util"
|
||||
)
|
||||
|
||||
// TypeEnv contains type info for static analysis such as type checking.
|
||||
type TypeEnv struct {
|
||||
tree *typeTreeNode
|
||||
next *TypeEnv
|
||||
}
|
||||
|
||||
// NewTypeEnv returns an empty TypeEnv.
|
||||
func NewTypeEnv() *TypeEnv {
|
||||
return &TypeEnv{
|
||||
tree: newTypeTree(),
|
||||
}
|
||||
}
|
||||
|
||||
// Get returns the type of x.
|
||||
func (env *TypeEnv) Get(x interface{}) types.Type {
|
||||
|
||||
if term, ok := x.(*Term); ok {
|
||||
x = term.Value
|
||||
}
|
||||
|
||||
switch x := x.(type) {
|
||||
|
||||
// Scalars.
|
||||
case Null:
|
||||
return types.NewNull()
|
||||
case Boolean:
|
||||
return types.NewBoolean()
|
||||
case Number:
|
||||
return types.NewNumber()
|
||||
case String:
|
||||
return types.NewString()
|
||||
|
||||
// Composites.
|
||||
case Array:
|
||||
static := make([]types.Type, len(x))
|
||||
for i := range static {
|
||||
tpe := env.Get(x[i].Value)
|
||||
static[i] = tpe
|
||||
}
|
||||
|
||||
var dynamic types.Type
|
||||
if len(static) == 0 {
|
||||
dynamic = types.A
|
||||
}
|
||||
|
||||
return types.NewArray(static, dynamic)
|
||||
|
||||
case Object:
|
||||
static := []*types.StaticProperty{}
|
||||
var dynamic *types.DynamicProperty
|
||||
|
||||
x.Foreach(func(k, v *Term) {
|
||||
if IsConstant(k.Value) {
|
||||
kjson, err := JSON(k.Value)
|
||||
if err == nil {
|
||||
tpe := env.Get(v)
|
||||
static = append(static, types.NewStaticProperty(kjson, tpe))
|
||||
return
|
||||
}
|
||||
}
|
||||
// Can't handle it as a static property, fallback to dynamic
|
||||
typeK := env.Get(k.Value)
|
||||
typeV := env.Get(v.Value)
|
||||
dynamic = types.NewDynamicProperty(typeK, typeV)
|
||||
})
|
||||
|
||||
if len(static) == 0 && dynamic == nil {
|
||||
dynamic = types.NewDynamicProperty(types.A, types.A)
|
||||
}
|
||||
|
||||
return types.NewObject(static, dynamic)
|
||||
|
||||
case Set:
|
||||
var tpe types.Type
|
||||
x.Foreach(func(elem *Term) {
|
||||
other := env.Get(elem.Value)
|
||||
tpe = types.Or(tpe, other)
|
||||
})
|
||||
if tpe == nil {
|
||||
tpe = types.A
|
||||
}
|
||||
return types.NewSet(tpe)
|
||||
|
||||
// Comprehensions.
|
||||
case *ArrayComprehension:
|
||||
checker := newTypeChecker()
|
||||
cpy, errs := checker.CheckBody(env, x.Body)
|
||||
if len(errs) == 0 {
|
||||
return types.NewArray(nil, cpy.Get(x.Term))
|
||||
}
|
||||
return nil
|
||||
case *ObjectComprehension:
|
||||
checker := newTypeChecker()
|
||||
cpy, errs := checker.CheckBody(env, x.Body)
|
||||
if len(errs) == 0 {
|
||||
return types.NewObject(nil, types.NewDynamicProperty(cpy.Get(x.Key), cpy.Get(x.Value)))
|
||||
}
|
||||
return nil
|
||||
case *SetComprehension:
|
||||
checker := newTypeChecker()
|
||||
cpy, errs := checker.CheckBody(env, x.Body)
|
||||
if len(errs) == 0 {
|
||||
return types.NewSet(cpy.Get(x.Term))
|
||||
}
|
||||
return nil
|
||||
|
||||
// Refs.
|
||||
case Ref:
|
||||
return env.getRef(x)
|
||||
|
||||
// Vars.
|
||||
case Var:
|
||||
if node := env.tree.Child(x); node != nil {
|
||||
return node.Value()
|
||||
}
|
||||
if env.next != nil {
|
||||
return env.next.Get(x)
|
||||
}
|
||||
return nil
|
||||
|
||||
default:
|
||||
panic("unreachable")
|
||||
}
|
||||
}
|
||||
|
||||
func (env *TypeEnv) getRef(ref Ref) types.Type {
|
||||
|
||||
node := env.tree.Child(ref[0].Value)
|
||||
if node == nil {
|
||||
return env.getRefFallback(ref)
|
||||
}
|
||||
|
||||
return env.getRefRec(node, ref, ref[1:])
|
||||
}
|
||||
|
||||
func (env *TypeEnv) getRefFallback(ref Ref) types.Type {
|
||||
|
||||
if env.next != nil {
|
||||
return env.next.Get(ref)
|
||||
}
|
||||
|
||||
if RootDocumentNames.Contains(ref[0]) {
|
||||
return types.A
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (env *TypeEnv) getRefRec(node *typeTreeNode, ref, tail Ref) types.Type {
|
||||
if len(tail) == 0 {
|
||||
return env.getRefRecExtent(node)
|
||||
}
|
||||
|
||||
if node.Leaf() {
|
||||
return selectRef(node.Value(), tail)
|
||||
}
|
||||
|
||||
if !IsConstant(tail[0].Value) {
|
||||
return selectRef(env.getRefRecExtent(node), tail)
|
||||
}
|
||||
|
||||
child := node.Child(tail[0].Value)
|
||||
if child == nil {
|
||||
return env.getRefFallback(ref)
|
||||
}
|
||||
|
||||
return env.getRefRec(child, ref, tail[1:])
|
||||
}
|
||||
|
||||
func (env *TypeEnv) getRefRecExtent(node *typeTreeNode) types.Type {
|
||||
|
||||
if node.Leaf() {
|
||||
return node.Value()
|
||||
}
|
||||
|
||||
children := []*types.StaticProperty{}
|
||||
|
||||
node.Children().Iter(func(k, v util.T) bool {
|
||||
key := k.(Value)
|
||||
child := v.(*typeTreeNode)
|
||||
|
||||
tpe := env.getRefRecExtent(child)
|
||||
// TODO(tsandall): handle non-string keys?
|
||||
if s, ok := key.(String); ok {
|
||||
children = append(children, types.NewStaticProperty(string(s), tpe))
|
||||
}
|
||||
return false
|
||||
})
|
||||
|
||||
// TODO(tsandall): for now, these objects can have any dynamic properties
|
||||
// because we don't have schema for base docs. Once schemas are supported
|
||||
// we can improve this.
|
||||
return types.NewObject(children, types.NewDynamicProperty(types.S, types.A))
|
||||
}
|
||||
|
||||
func (env *TypeEnv) wrap() *TypeEnv {
|
||||
cpy := *env
|
||||
cpy.next = env
|
||||
cpy.tree = newTypeTree()
|
||||
return &cpy
|
||||
}
|
||||
|
||||
// typeTreeNode is used to store type information in a tree.
|
||||
type typeTreeNode struct {
|
||||
key Value
|
||||
value types.Type
|
||||
children *util.HashMap
|
||||
}
|
||||
|
||||
func newTypeTree() *typeTreeNode {
|
||||
return &typeTreeNode{
|
||||
key: nil,
|
||||
value: nil,
|
||||
children: util.NewHashMap(valueEq, valueHash),
|
||||
}
|
||||
}
|
||||
|
||||
func (n *typeTreeNode) Child(key Value) *typeTreeNode {
|
||||
value, ok := n.children.Get(key)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
return value.(*typeTreeNode)
|
||||
}
|
||||
|
||||
func (n *typeTreeNode) Children() *util.HashMap {
|
||||
return n.children
|
||||
}
|
||||
|
||||
func (n *typeTreeNode) Get(path Ref) types.Type {
|
||||
curr := n
|
||||
for _, term := range path {
|
||||
child, ok := curr.children.Get(term.Value)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
curr = child.(*typeTreeNode)
|
||||
}
|
||||
return curr.Value()
|
||||
}
|
||||
|
||||
func (n *typeTreeNode) Leaf() bool {
|
||||
return n.value != nil
|
||||
}
|
||||
|
||||
func (n *typeTreeNode) PutOne(key Value, tpe types.Type) {
|
||||
c, ok := n.children.Get(key)
|
||||
|
||||
var child *typeTreeNode
|
||||
if !ok {
|
||||
child = newTypeTree()
|
||||
child.key = key
|
||||
n.children.Put(key, child)
|
||||
} else {
|
||||
child = c.(*typeTreeNode)
|
||||
}
|
||||
|
||||
child.value = tpe
|
||||
}
|
||||
|
||||
func (n *typeTreeNode) Put(path Ref, tpe types.Type) {
|
||||
curr := n
|
||||
for _, term := range path {
|
||||
c, ok := curr.children.Get(term.Value)
|
||||
|
||||
var child *typeTreeNode
|
||||
if !ok {
|
||||
child = newTypeTree()
|
||||
child.key = term.Value
|
||||
curr.children.Put(child.key, child)
|
||||
} else {
|
||||
child = c.(*typeTreeNode)
|
||||
}
|
||||
|
||||
curr = child
|
||||
}
|
||||
curr.value = tpe
|
||||
}
|
||||
|
||||
func (n *typeTreeNode) Value() types.Type {
|
||||
return n.value
|
||||
}
|
||||
|
||||
// selectConstant returns the attribute of the type referred to by the term. If
|
||||
// the attribute type cannot be determined, nil is returned.
|
||||
func selectConstant(tpe types.Type, term *Term) types.Type {
|
||||
x, err := JSON(term.Value)
|
||||
if err == nil {
|
||||
return types.Select(tpe, x)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// selectRef returns the type of the nested attribute referred to by ref. If
|
||||
// the attribute type cannot be determined, nil is returned. If the ref
|
||||
// contains vars or refs, then the returned type will be a union of the
|
||||
// possible types.
|
||||
func selectRef(tpe types.Type, ref Ref) types.Type {
|
||||
|
||||
if tpe == nil || len(ref) == 0 {
|
||||
return tpe
|
||||
}
|
||||
|
||||
head, tail := ref[0], ref[1:]
|
||||
|
||||
switch head.Value.(type) {
|
||||
case Var, Ref, Array, Object, Set:
|
||||
return selectRef(types.Values(tpe), tail)
|
||||
default:
|
||||
return selectRef(selectConstant(tpe, head), tail)
|
||||
}
|
||||
}
|
||||
133
vendor/github.com/open-policy-agent/opa/ast/errors.go
generated
vendored
Normal file
133
vendor/github.com/open-policy-agent/opa/ast/errors.go
generated
vendored
Normal file
@@ -0,0 +1,133 @@
|
||||
// Copyright 2016 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package ast
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Errors represents a series of errors encountered during parsing, compiling,
|
||||
// etc.
|
||||
type Errors []*Error
|
||||
|
||||
func (e Errors) Error() string {
|
||||
|
||||
if len(e) == 0 {
|
||||
return "no error(s)"
|
||||
}
|
||||
|
||||
if len(e) == 1 {
|
||||
return fmt.Sprintf("1 error occurred: %v", e[0].Error())
|
||||
}
|
||||
|
||||
s := []string{}
|
||||
for _, err := range e {
|
||||
s = append(s, err.Error())
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%d errors occurred:\n%s", len(e), strings.Join(s, "\n"))
|
||||
}
|
||||
|
||||
// Sort sorts the error slice by location. If the locations are equal then the
|
||||
// error message is compared.
|
||||
func (e Errors) Sort() {
|
||||
sort.Slice(e, func(i, j int) bool {
|
||||
a := e[i]
|
||||
b := e[j]
|
||||
|
||||
if cmp := a.Location.Compare(b.Location); cmp != 0 {
|
||||
return cmp < 0
|
||||
}
|
||||
|
||||
return a.Error() < b.Error()
|
||||
})
|
||||
}
|
||||
|
||||
const (
|
||||
// ParseErr indicates an unclassified parse error occurred.
|
||||
ParseErr = "rego_parse_error"
|
||||
|
||||
// CompileErr indicates an unclassified compile error occurred.
|
||||
CompileErr = "rego_compile_error"
|
||||
|
||||
// TypeErr indicates a type error was caught.
|
||||
TypeErr = "rego_type_error"
|
||||
|
||||
// UnsafeVarErr indicates an unsafe variable was found during compilation.
|
||||
UnsafeVarErr = "rego_unsafe_var_error"
|
||||
|
||||
// RecursionErr indicates recursion was found during compilation.
|
||||
RecursionErr = "rego_recursion_error"
|
||||
)
|
||||
|
||||
// IsError returns true if err is an AST error with code.
|
||||
func IsError(code string, err error) bool {
|
||||
if err, ok := err.(*Error); ok {
|
||||
return err.Code == code
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// ErrorDetails defines the interface for detailed error messages.
|
||||
type ErrorDetails interface {
|
||||
Lines() []string
|
||||
}
|
||||
|
||||
// Error represents a single error caught during parsing, compiling, etc.
|
||||
type Error struct {
|
||||
Code string `json:"code"`
|
||||
Message string `json:"message"`
|
||||
Location *Location `json:"location,omitempty"`
|
||||
Details ErrorDetails `json:"details,omitempty"`
|
||||
}
|
||||
|
||||
func (e *Error) Error() string {
|
||||
|
||||
var prefix string
|
||||
|
||||
if e.Location != nil {
|
||||
|
||||
if len(e.Location.File) > 0 {
|
||||
prefix += e.Location.File + ":" + fmt.Sprint(e.Location.Row)
|
||||
} else {
|
||||
prefix += fmt.Sprint(e.Location.Row) + ":" + fmt.Sprint(e.Location.Col)
|
||||
}
|
||||
}
|
||||
|
||||
msg := fmt.Sprintf("%v: %v", e.Code, e.Message)
|
||||
|
||||
if len(prefix) > 0 {
|
||||
msg = prefix + ": " + msg
|
||||
}
|
||||
|
||||
if e.Details != nil {
|
||||
for _, line := range e.Details.Lines() {
|
||||
msg += "\n\t" + line
|
||||
}
|
||||
}
|
||||
|
||||
return msg
|
||||
}
|
||||
|
||||
// NewError returns a new Error object.
|
||||
func NewError(code string, loc *Location, f string, a ...interface{}) *Error {
|
||||
return &Error{
|
||||
Code: code,
|
||||
Location: loc,
|
||||
Message: fmt.Sprintf(f, a...),
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
errPartialRuleAssignOperator = fmt.Errorf("partial rules must use = operator (not := operator)")
|
||||
errElseAssignOperator = fmt.Errorf("else keyword cannot be used on rule declared with := operator")
|
||||
errFunctionAssignOperator = fmt.Errorf("functions must use = operator (not := operator)")
|
||||
)
|
||||
|
||||
func errTermAssignOperator(x interface{}) error {
|
||||
return fmt.Errorf("cannot assign to %v", TypeName(x))
|
||||
}
|
||||
28
vendor/github.com/open-policy-agent/opa/ast/fuzz.go
generated
vendored
Normal file
28
vendor/github.com/open-policy-agent/opa/ast/fuzz.go
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
// +build gofuzz
|
||||
|
||||
package ast
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
)
|
||||
|
||||
// nested { and [ tokens cause the parse time to explode.
|
||||
// see: https://github.com/mna/pigeon/issues/75
|
||||
var blacklistRegexp = regexp.MustCompile(`[{(\[]{5,}`)
|
||||
|
||||
func Fuzz(data []byte) int {
|
||||
|
||||
if blacklistRegexp.Match(data) {
|
||||
return -1
|
||||
}
|
||||
|
||||
str := string(data)
|
||||
_, _, err := ParseStatements("", str)
|
||||
|
||||
if err == nil {
|
||||
CompileModules(map[string]string{"": str})
|
||||
return 1
|
||||
}
|
||||
|
||||
return 0
|
||||
}
|
||||
798
vendor/github.com/open-policy-agent/opa/ast/index.go
generated
vendored
Normal file
798
vendor/github.com/open-policy-agent/opa/ast/index.go
generated
vendored
Normal file
@@ -0,0 +1,798 @@
|
||||
// Copyright 2017 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package ast
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/open-policy-agent/opa/util"
|
||||
)
|
||||
|
||||
// RuleIndex defines the interface for rule indices.
|
||||
type RuleIndex interface {
|
||||
|
||||
// Build tries to construct an index for the given rules. If the index was
|
||||
// constructed, ok is true, otherwise false.
|
||||
Build(rules []*Rule) (ok bool)
|
||||
|
||||
// Lookup searches the index for rules that will match the provided
|
||||
// resolver. If the resolver returns an error, it is returned via err.
|
||||
Lookup(resolver ValueResolver) (result *IndexResult, err error)
|
||||
|
||||
// AllRules traverses the index and returns all rules that will match
|
||||
// the provided resolver without any optimizations (effectively with
|
||||
// indexing disabled). If the resolver returns an error, it is returned
|
||||
// via err.
|
||||
AllRules(resolver ValueResolver) (result *IndexResult, err error)
|
||||
}
|
||||
|
||||
// IndexResult contains the result of an index lookup.
|
||||
type IndexResult struct {
|
||||
Kind DocKind
|
||||
Rules []*Rule
|
||||
Else map[*Rule][]*Rule
|
||||
Default *Rule
|
||||
}
|
||||
|
||||
// NewIndexResult returns a new IndexResult object.
|
||||
func NewIndexResult(kind DocKind) *IndexResult {
|
||||
return &IndexResult{
|
||||
Kind: kind,
|
||||
Else: map[*Rule][]*Rule{},
|
||||
}
|
||||
}
|
||||
|
||||
// Empty returns true if there are no rules to evaluate.
|
||||
func (ir *IndexResult) Empty() bool {
|
||||
return len(ir.Rules) == 0 && ir.Default == nil
|
||||
}
|
||||
|
||||
type baseDocEqIndex struct {
|
||||
isVirtual func(Ref) bool
|
||||
root *trieNode
|
||||
defaultRule *Rule
|
||||
kind DocKind
|
||||
}
|
||||
|
||||
func newBaseDocEqIndex(isVirtual func(Ref) bool) *baseDocEqIndex {
|
||||
return &baseDocEqIndex{
|
||||
isVirtual: isVirtual,
|
||||
root: newTrieNodeImpl(),
|
||||
}
|
||||
}
|
||||
|
||||
func (i *baseDocEqIndex) Build(rules []*Rule) bool {
|
||||
if len(rules) == 0 {
|
||||
return false
|
||||
}
|
||||
|
||||
i.kind = rules[0].Head.DocKind()
|
||||
indices := newrefindices(i.isVirtual)
|
||||
|
||||
// build indices for each rule.
|
||||
for idx := range rules {
|
||||
WalkRules(rules[idx], func(rule *Rule) bool {
|
||||
if rule.Default {
|
||||
i.defaultRule = rule
|
||||
return false
|
||||
}
|
||||
for _, expr := range rule.Body {
|
||||
indices.Update(rule, expr)
|
||||
}
|
||||
return false
|
||||
})
|
||||
}
|
||||
|
||||
// build trie out of indices.
|
||||
for idx := range rules {
|
||||
var prio int
|
||||
WalkRules(rules[idx], func(rule *Rule) bool {
|
||||
if rule.Default {
|
||||
return false
|
||||
}
|
||||
node := i.root
|
||||
if indices.Indexed(rule) {
|
||||
for _, ref := range indices.Sorted() {
|
||||
node = node.Insert(ref, indices.Value(rule, ref), indices.Mapper(rule, ref))
|
||||
}
|
||||
}
|
||||
// Insert rule into trie with (insertion order, priority order)
|
||||
// tuple. Retaining the insertion order allows us to return rules
|
||||
// in the order they were passed to this function.
|
||||
node.rules = append(node.rules, &ruleNode{[...]int{idx, prio}, rule})
|
||||
prio++
|
||||
return false
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func (i *baseDocEqIndex) Lookup(resolver ValueResolver) (*IndexResult, error) {
|
||||
|
||||
tr := newTrieTraversalResult()
|
||||
|
||||
err := i.root.Traverse(resolver, tr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result := NewIndexResult(i.kind)
|
||||
result.Default = i.defaultRule
|
||||
result.Rules = make([]*Rule, 0, len(tr.ordering))
|
||||
|
||||
for _, pos := range tr.ordering {
|
||||
sort.Slice(tr.unordered[pos], func(i, j int) bool {
|
||||
return tr.unordered[pos][i].prio[1] < tr.unordered[pos][j].prio[1]
|
||||
})
|
||||
nodes := tr.unordered[pos]
|
||||
root := nodes[0].rule
|
||||
result.Rules = append(result.Rules, root)
|
||||
if len(nodes) > 1 {
|
||||
result.Else[root] = make([]*Rule, len(nodes)-1)
|
||||
for i := 1; i < len(nodes); i++ {
|
||||
result.Else[root][i-1] = nodes[i].rule
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (i *baseDocEqIndex) AllRules(resolver ValueResolver) (*IndexResult, error) {
|
||||
tr := newTrieTraversalResult()
|
||||
|
||||
// Walk over the rule trie and accumulate _all_ rules
|
||||
rw := &ruleWalker{result: tr}
|
||||
i.root.Do(rw)
|
||||
|
||||
result := NewIndexResult(i.kind)
|
||||
result.Default = i.defaultRule
|
||||
result.Rules = make([]*Rule, 0, len(tr.ordering))
|
||||
|
||||
for _, pos := range tr.ordering {
|
||||
sort.Slice(tr.unordered[pos], func(i, j int) bool {
|
||||
return tr.unordered[pos][i].prio[1] < tr.unordered[pos][j].prio[1]
|
||||
})
|
||||
nodes := tr.unordered[pos]
|
||||
root := nodes[0].rule
|
||||
result.Rules = append(result.Rules, root)
|
||||
if len(nodes) > 1 {
|
||||
result.Else[root] = make([]*Rule, len(nodes)-1)
|
||||
for i := 1; i < len(nodes); i++ {
|
||||
result.Else[root][i-1] = nodes[i].rule
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
type ruleWalker struct {
|
||||
result *trieTraversalResult
|
||||
}
|
||||
|
||||
func (r *ruleWalker) Do(x interface{}) trieWalker {
|
||||
tn := x.(*trieNode)
|
||||
for _, rn := range tn.rules {
|
||||
r.result.Add(rn)
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
type valueMapper func(Value) Value
|
||||
|
||||
type refindex struct {
|
||||
Ref Ref
|
||||
Value Value
|
||||
Mapper func(Value) Value
|
||||
}
|
||||
|
||||
type refindices struct {
|
||||
isVirtual func(Ref) bool
|
||||
rules map[*Rule][]*refindex
|
||||
frequency *util.HashMap
|
||||
sorted []Ref
|
||||
}
|
||||
|
||||
func newrefindices(isVirtual func(Ref) bool) *refindices {
|
||||
return &refindices{
|
||||
isVirtual: isVirtual,
|
||||
rules: map[*Rule][]*refindex{},
|
||||
frequency: util.NewHashMap(func(a, b util.T) bool {
|
||||
r1, r2 := a.(Ref), b.(Ref)
|
||||
return r1.Equal(r2)
|
||||
}, func(x util.T) int {
|
||||
return x.(Ref).Hash()
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
// Update attempts to update the refindices for the given expression in the
|
||||
// given rule. If the expression cannot be indexed the update does not affect
|
||||
// the indices.
|
||||
func (i *refindices) Update(rule *Rule, expr *Expr) {
|
||||
|
||||
if expr.Negated {
|
||||
return
|
||||
}
|
||||
|
||||
if len(expr.With) > 0 {
|
||||
// NOTE(tsandall): In the future, we may need to consider expressions
|
||||
// that have with statements applied to them.
|
||||
return
|
||||
}
|
||||
|
||||
op := expr.Operator()
|
||||
|
||||
if op.Equal(Equality.Ref()) || op.Equal(Equal.Ref()) {
|
||||
|
||||
i.updateEq(rule, expr)
|
||||
|
||||
} else if op.Equal(GlobMatch.Ref()) {
|
||||
|
||||
i.updateGlobMatch(rule, expr)
|
||||
}
|
||||
}
|
||||
|
||||
// Sorted returns a sorted list of references that the indices were built from.
|
||||
// References that appear more frequently in the indexed rules are ordered
|
||||
// before less frequently appearing references.
|
||||
func (i *refindices) Sorted() []Ref {
|
||||
|
||||
if i.sorted == nil {
|
||||
counts := make([]int, 0, i.frequency.Len())
|
||||
i.sorted = make([]Ref, 0, i.frequency.Len())
|
||||
|
||||
i.frequency.Iter(func(k, v util.T) bool {
|
||||
counts = append(counts, v.(int))
|
||||
i.sorted = append(i.sorted, k.(Ref))
|
||||
return false
|
||||
})
|
||||
|
||||
sort.Slice(i.sorted, func(i, j int) bool {
|
||||
return counts[i] > counts[j]
|
||||
})
|
||||
}
|
||||
|
||||
return i.sorted
|
||||
}
|
||||
|
||||
func (i *refindices) Indexed(rule *Rule) bool {
|
||||
return len(i.rules[rule]) > 0
|
||||
}
|
||||
|
||||
func (i *refindices) Value(rule *Rule, ref Ref) Value {
|
||||
if index := i.index(rule, ref); index != nil {
|
||||
return index.Value
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (i *refindices) Mapper(rule *Rule, ref Ref) valueMapper {
|
||||
if index := i.index(rule, ref); index != nil {
|
||||
return index.Mapper
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (i *refindices) updateEq(rule *Rule, expr *Expr) {
|
||||
a, b := expr.Operand(0), expr.Operand(1)
|
||||
if ref, value, ok := eqOperandsToRefAndValue(i.isVirtual, a, b); ok {
|
||||
i.insert(rule, &refindex{
|
||||
Ref: ref,
|
||||
Value: value,
|
||||
})
|
||||
} else if ref, value, ok := eqOperandsToRefAndValue(i.isVirtual, b, a); ok {
|
||||
i.insert(rule, &refindex{
|
||||
Ref: ref,
|
||||
Value: value,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (i *refindices) updateGlobMatch(rule *Rule, expr *Expr) {
|
||||
|
||||
delim, ok := globDelimiterToString(expr.Operand(1))
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
|
||||
if arr := globPatternToArray(expr.Operand(0), delim); arr != nil {
|
||||
// The 3rd operand of glob.match is the value to match. We assume the
|
||||
// 3rd operand was a reference that has been rewritten and bound to a
|
||||
// variable earlier in the query.
|
||||
match := expr.Operand(2)
|
||||
if _, ok := match.Value.(Var); ok {
|
||||
for _, other := range i.rules[rule] {
|
||||
if _, ok := other.Value.(Var); ok && other.Value.Compare(match.Value) == 0 {
|
||||
i.insert(rule, &refindex{
|
||||
Ref: other.Ref,
|
||||
Value: arr.Value,
|
||||
Mapper: func(v Value) Value {
|
||||
if s, ok := v.(String); ok {
|
||||
return stringSliceToArray(splitStringEscaped(string(s), delim))
|
||||
}
|
||||
return v
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (i *refindices) insert(rule *Rule, index *refindex) {
|
||||
|
||||
count, ok := i.frequency.Get(index.Ref)
|
||||
if !ok {
|
||||
count = 0
|
||||
}
|
||||
|
||||
i.frequency.Put(index.Ref, count.(int)+1)
|
||||
|
||||
for pos, other := range i.rules[rule] {
|
||||
if other.Ref.Equal(index.Ref) {
|
||||
i.rules[rule][pos] = index
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
i.rules[rule] = append(i.rules[rule], index)
|
||||
}
|
||||
|
||||
func (i *refindices) index(rule *Rule, ref Ref) *refindex {
|
||||
for _, index := range i.rules[rule] {
|
||||
if index.Ref.Equal(ref) {
|
||||
return index
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type trieWalker interface {
|
||||
Do(x interface{}) trieWalker
|
||||
}
|
||||
|
||||
type trieTraversalResult struct {
|
||||
unordered map[int][]*ruleNode
|
||||
ordering []int
|
||||
}
|
||||
|
||||
func newTrieTraversalResult() *trieTraversalResult {
|
||||
return &trieTraversalResult{
|
||||
unordered: map[int][]*ruleNode{},
|
||||
}
|
||||
}
|
||||
|
||||
func (tr *trieTraversalResult) Add(node *ruleNode) {
|
||||
root := node.prio[0]
|
||||
nodes, ok := tr.unordered[root]
|
||||
if !ok {
|
||||
tr.ordering = append(tr.ordering, root)
|
||||
}
|
||||
tr.unordered[root] = append(nodes, node)
|
||||
}
|
||||
|
||||
type trieNode struct {
|
||||
ref Ref
|
||||
mapper valueMapper
|
||||
next *trieNode
|
||||
any *trieNode
|
||||
undefined *trieNode
|
||||
scalars map[Value]*trieNode
|
||||
array *trieNode
|
||||
rules []*ruleNode
|
||||
}
|
||||
|
||||
func (node *trieNode) String() string {
|
||||
var flags []string
|
||||
flags = append(flags, fmt.Sprintf("self:%p", node))
|
||||
if len(node.ref) > 0 {
|
||||
flags = append(flags, node.ref.String())
|
||||
}
|
||||
if node.next != nil {
|
||||
flags = append(flags, fmt.Sprintf("next:%p", node.next))
|
||||
}
|
||||
if node.any != nil {
|
||||
flags = append(flags, fmt.Sprintf("any:%p", node.any))
|
||||
}
|
||||
if node.undefined != nil {
|
||||
flags = append(flags, fmt.Sprintf("undefined:%p", node.undefined))
|
||||
}
|
||||
if node.array != nil {
|
||||
flags = append(flags, fmt.Sprintf("array:%p", node.array))
|
||||
}
|
||||
if len(node.scalars) > 0 {
|
||||
buf := []string{}
|
||||
for k, v := range node.scalars {
|
||||
buf = append(buf, fmt.Sprintf("scalar(%v):%p", k, v))
|
||||
}
|
||||
sort.Strings(buf)
|
||||
flags = append(flags, strings.Join(buf, " "))
|
||||
}
|
||||
if len(node.rules) > 0 {
|
||||
flags = append(flags, fmt.Sprintf("%d rule(s)", len(node.rules)))
|
||||
}
|
||||
if node.mapper != nil {
|
||||
flags = append(flags, "mapper")
|
||||
}
|
||||
return strings.Join(flags, " ")
|
||||
}
|
||||
|
||||
type ruleNode struct {
|
||||
prio [2]int
|
||||
rule *Rule
|
||||
}
|
||||
|
||||
func newTrieNodeImpl() *trieNode {
|
||||
return &trieNode{
|
||||
scalars: map[Value]*trieNode{},
|
||||
}
|
||||
}
|
||||
|
||||
func (node *trieNode) Do(walker trieWalker) {
|
||||
next := walker.Do(node)
|
||||
if next == nil {
|
||||
return
|
||||
}
|
||||
if node.any != nil {
|
||||
node.any.Do(next)
|
||||
}
|
||||
if node.undefined != nil {
|
||||
node.undefined.Do(next)
|
||||
}
|
||||
for _, child := range node.scalars {
|
||||
child.Do(next)
|
||||
}
|
||||
if node.array != nil {
|
||||
node.array.Do(next)
|
||||
}
|
||||
if node.next != nil {
|
||||
node.next.Do(next)
|
||||
}
|
||||
}
|
||||
|
||||
func (node *trieNode) Insert(ref Ref, value Value, mapper valueMapper) *trieNode {
|
||||
|
||||
if node.next == nil {
|
||||
node.next = newTrieNodeImpl()
|
||||
node.next.ref = ref
|
||||
}
|
||||
|
||||
node.next.mapper = mapper
|
||||
|
||||
return node.next.insertValue(value)
|
||||
}
|
||||
|
||||
func (node *trieNode) Traverse(resolver ValueResolver, tr *trieTraversalResult) error {
|
||||
|
||||
if node == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
for i := range node.rules {
|
||||
tr.Add(node.rules[i])
|
||||
}
|
||||
|
||||
return node.next.traverse(resolver, tr)
|
||||
}
|
||||
|
||||
func (node *trieNode) insertValue(value Value) *trieNode {
|
||||
|
||||
switch value := value.(type) {
|
||||
case nil:
|
||||
if node.undefined == nil {
|
||||
node.undefined = newTrieNodeImpl()
|
||||
}
|
||||
return node.undefined
|
||||
case Var:
|
||||
if node.any == nil {
|
||||
node.any = newTrieNodeImpl()
|
||||
}
|
||||
return node.any
|
||||
case Null, Boolean, Number, String:
|
||||
child, ok := node.scalars[value]
|
||||
if !ok {
|
||||
child = newTrieNodeImpl()
|
||||
node.scalars[value] = child
|
||||
}
|
||||
return child
|
||||
case Array:
|
||||
if node.array == nil {
|
||||
node.array = newTrieNodeImpl()
|
||||
}
|
||||
return node.array.insertArray(value)
|
||||
}
|
||||
|
||||
panic("illegal value")
|
||||
}
|
||||
|
||||
func (node *trieNode) insertArray(arr Array) *trieNode {
|
||||
|
||||
if len(arr) == 0 {
|
||||
return node
|
||||
}
|
||||
|
||||
switch head := arr[0].Value.(type) {
|
||||
case Var:
|
||||
if node.any == nil {
|
||||
node.any = newTrieNodeImpl()
|
||||
}
|
||||
return node.any.insertArray(arr[1:])
|
||||
case Null, Boolean, Number, String:
|
||||
child, ok := node.scalars[head]
|
||||
if !ok {
|
||||
child = newTrieNodeImpl()
|
||||
node.scalars[head] = child
|
||||
}
|
||||
return child.insertArray(arr[1:])
|
||||
}
|
||||
|
||||
panic("illegal value")
|
||||
}
|
||||
|
||||
func (node *trieNode) traverse(resolver ValueResolver, tr *trieTraversalResult) error {
|
||||
|
||||
if node == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
v, err := resolver.Resolve(node.ref)
|
||||
if err != nil {
|
||||
if IsUnknownValueErr(err) {
|
||||
return node.traverseUnknown(resolver, tr)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
if node.undefined != nil {
|
||||
node.undefined.Traverse(resolver, tr)
|
||||
}
|
||||
|
||||
if v == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
if node.any != nil {
|
||||
node.any.Traverse(resolver, tr)
|
||||
}
|
||||
|
||||
if node.mapper != nil {
|
||||
v = node.mapper(v)
|
||||
}
|
||||
|
||||
return node.traverseValue(resolver, tr, v)
|
||||
}
|
||||
|
||||
func (node *trieNode) traverseValue(resolver ValueResolver, tr *trieTraversalResult, value Value) error {
|
||||
|
||||
switch value := value.(type) {
|
||||
case Array:
|
||||
if node.array == nil {
|
||||
return nil
|
||||
}
|
||||
return node.array.traverseArray(resolver, tr, value)
|
||||
|
||||
case Null, Boolean, Number, String:
|
||||
child, ok := node.scalars[value]
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
return child.Traverse(resolver, tr)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (node *trieNode) traverseArray(resolver ValueResolver, tr *trieTraversalResult, arr Array) error {
|
||||
|
||||
if len(arr) == 0 {
|
||||
return node.Traverse(resolver, tr)
|
||||
}
|
||||
|
||||
head := arr[0].Value
|
||||
|
||||
if !IsScalar(head) {
|
||||
return nil
|
||||
}
|
||||
|
||||
if node.any != nil {
|
||||
node.any.traverseArray(resolver, tr, arr[1:])
|
||||
}
|
||||
|
||||
child, ok := node.scalars[head]
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
return child.traverseArray(resolver, tr, arr[1:])
|
||||
}
|
||||
|
||||
func (node *trieNode) traverseUnknown(resolver ValueResolver, tr *trieTraversalResult) error {
|
||||
|
||||
if node == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := node.Traverse(resolver, tr); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := node.undefined.traverseUnknown(resolver, tr); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := node.any.traverseUnknown(resolver, tr); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := node.array.traverseUnknown(resolver, tr); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, child := range node.scalars {
|
||||
if err := child.traverseUnknown(resolver, tr); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type triePrinter struct {
|
||||
depth int
|
||||
w io.Writer
|
||||
}
|
||||
|
||||
func (p triePrinter) Do(x interface{}) trieWalker {
|
||||
padding := strings.Repeat(" ", p.depth)
|
||||
fmt.Fprintf(p.w, "%v%v\n", padding, x)
|
||||
p.depth++
|
||||
return p
|
||||
}
|
||||
|
||||
func eqOperandsToRefAndValue(isVirtual func(Ref) bool, a, b *Term) (Ref, Value, bool) {
|
||||
|
||||
ref, ok := a.Value.(Ref)
|
||||
if !ok {
|
||||
return nil, nil, false
|
||||
}
|
||||
|
||||
if !RootDocumentNames.Contains(ref[0]) {
|
||||
return nil, nil, false
|
||||
}
|
||||
|
||||
if isVirtual(ref) {
|
||||
return nil, nil, false
|
||||
}
|
||||
|
||||
if ref.IsNested() || !ref.IsGround() {
|
||||
return nil, nil, false
|
||||
}
|
||||
|
||||
switch b := b.Value.(type) {
|
||||
case Null, Boolean, Number, String, Var:
|
||||
return ref, b, true
|
||||
case Array:
|
||||
stop := false
|
||||
first := true
|
||||
vis := NewGenericVisitor(func(x interface{}) bool {
|
||||
if first {
|
||||
first = false
|
||||
return false
|
||||
}
|
||||
switch x.(type) {
|
||||
// No nested structures or values that require evaluation (other than var).
|
||||
case Array, Object, Set, *ArrayComprehension, *ObjectComprehension, *SetComprehension, Ref:
|
||||
stop = true
|
||||
}
|
||||
return stop
|
||||
})
|
||||
vis.Walk(b)
|
||||
if !stop {
|
||||
return ref, b, true
|
||||
}
|
||||
}
|
||||
|
||||
return nil, nil, false
|
||||
}
|
||||
|
||||
func globDelimiterToString(delim *Term) (string, bool) {
|
||||
|
||||
arr, ok := delim.Value.(Array)
|
||||
if !ok {
|
||||
return "", false
|
||||
}
|
||||
|
||||
var result string
|
||||
|
||||
if len(arr) == 0 {
|
||||
result = "."
|
||||
} else {
|
||||
for _, term := range arr {
|
||||
s, ok := term.Value.(String)
|
||||
if !ok {
|
||||
return "", false
|
||||
}
|
||||
result += string(s)
|
||||
}
|
||||
}
|
||||
|
||||
return result, true
|
||||
}
|
||||
|
||||
func globPatternToArray(pattern *Term, delim string) *Term {
|
||||
|
||||
s, ok := pattern.Value.(String)
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
|
||||
parts := splitStringEscaped(string(s), delim)
|
||||
result := make(Array, len(parts))
|
||||
|
||||
for i := range parts {
|
||||
if parts[i] == "*" {
|
||||
result[i] = VarTerm("$globwildcard")
|
||||
} else {
|
||||
var escaped bool
|
||||
for _, c := range parts[i] {
|
||||
if c == '\\' {
|
||||
escaped = !escaped
|
||||
continue
|
||||
}
|
||||
if !escaped {
|
||||
switch c {
|
||||
case '[', '?', '{', '*':
|
||||
// TODO(tsandall): super glob and character pattern
|
||||
// matching not supported yet.
|
||||
return nil
|
||||
}
|
||||
}
|
||||
escaped = false
|
||||
}
|
||||
result[i] = StringTerm(parts[i])
|
||||
}
|
||||
}
|
||||
|
||||
return NewTerm(result)
|
||||
}
|
||||
|
||||
// splits s on characters in delim except if delim characters have been escaped
|
||||
// with reverse solidus.
|
||||
func splitStringEscaped(s string, delim string) []string {
|
||||
|
||||
var last, curr int
|
||||
var escaped bool
|
||||
var result []string
|
||||
|
||||
for ; curr < len(s); curr++ {
|
||||
if s[curr] == '\\' || escaped {
|
||||
escaped = !escaped
|
||||
continue
|
||||
}
|
||||
if strings.ContainsRune(delim, rune(s[curr])) {
|
||||
result = append(result, s[last:curr])
|
||||
last = curr + 1
|
||||
}
|
||||
}
|
||||
|
||||
result = append(result, s[last:])
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func stringSliceToArray(s []string) (result Array) {
|
||||
result = make(Array, len(s))
|
||||
for i := range s {
|
||||
result[i] = StringTerm(s[i])
|
||||
}
|
||||
return
|
||||
}
|
||||
133
vendor/github.com/open-policy-agent/opa/ast/map.go
generated
vendored
Normal file
133
vendor/github.com/open-policy-agent/opa/ast/map.go
generated
vendored
Normal file
@@ -0,0 +1,133 @@
|
||||
// Copyright 2016 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package ast
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
||||
"github.com/open-policy-agent/opa/util"
|
||||
)
|
||||
|
||||
// ValueMap represents a key/value map between AST term values. Any type of term
|
||||
// can be used as a key in the map.
|
||||
type ValueMap struct {
|
||||
hashMap *util.HashMap
|
||||
}
|
||||
|
||||
// NewValueMap returns a new ValueMap.
|
||||
func NewValueMap() *ValueMap {
|
||||
vs := &ValueMap{
|
||||
hashMap: util.NewHashMap(valueEq, valueHash),
|
||||
}
|
||||
return vs
|
||||
}
|
||||
|
||||
// MarshalJSON provides a custom marshaller for the ValueMap which
|
||||
// will include the key, value, and value type.
|
||||
func (vs *ValueMap) MarshalJSON() ([]byte, error) {
|
||||
var tmp []map[string]interface{}
|
||||
vs.Iter(func(k Value, v Value) bool {
|
||||
tmp = append(tmp, map[string]interface{}{
|
||||
"name": k.String(),
|
||||
"type": TypeName(v),
|
||||
"value": v,
|
||||
})
|
||||
return false
|
||||
})
|
||||
return json.Marshal(tmp)
|
||||
}
|
||||
|
||||
// Copy returns a shallow copy of the ValueMap.
|
||||
func (vs *ValueMap) Copy() *ValueMap {
|
||||
if vs == nil {
|
||||
return nil
|
||||
}
|
||||
cpy := NewValueMap()
|
||||
cpy.hashMap = vs.hashMap.Copy()
|
||||
return cpy
|
||||
}
|
||||
|
||||
// Equal returns true if this ValueMap equals the other.
|
||||
func (vs *ValueMap) Equal(other *ValueMap) bool {
|
||||
if vs == nil {
|
||||
return other == nil || other.Len() == 0
|
||||
}
|
||||
if other == nil {
|
||||
return vs == nil || vs.Len() == 0
|
||||
}
|
||||
return vs.hashMap.Equal(other.hashMap)
|
||||
}
|
||||
|
||||
// Len returns the number of elements in the map.
|
||||
func (vs *ValueMap) Len() int {
|
||||
if vs == nil {
|
||||
return 0
|
||||
}
|
||||
return vs.hashMap.Len()
|
||||
}
|
||||
|
||||
// Get returns the value in the map for k.
|
||||
func (vs *ValueMap) Get(k Value) Value {
|
||||
if vs != nil {
|
||||
if v, ok := vs.hashMap.Get(k); ok {
|
||||
return v.(Value)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Hash returns a hash code for this ValueMap.
|
||||
func (vs *ValueMap) Hash() int {
|
||||
if vs == nil {
|
||||
return 0
|
||||
}
|
||||
return vs.hashMap.Hash()
|
||||
}
|
||||
|
||||
// Iter calls the iter function for each key/value pair in the map. If the iter
|
||||
// function returns true, iteration stops.
|
||||
func (vs *ValueMap) Iter(iter func(Value, Value) bool) bool {
|
||||
if vs == nil {
|
||||
return false
|
||||
}
|
||||
return vs.hashMap.Iter(func(kt, vt util.T) bool {
|
||||
k := kt.(Value)
|
||||
v := vt.(Value)
|
||||
return iter(k, v)
|
||||
})
|
||||
}
|
||||
|
||||
// Put inserts a key k into the map with value v.
|
||||
func (vs *ValueMap) Put(k, v Value) {
|
||||
if vs == nil {
|
||||
panic("put on nil value map")
|
||||
}
|
||||
vs.hashMap.Put(k, v)
|
||||
}
|
||||
|
||||
// Delete removes a key k from the map.
|
||||
func (vs *ValueMap) Delete(k Value) {
|
||||
if vs == nil {
|
||||
return
|
||||
}
|
||||
vs.hashMap.Delete(k)
|
||||
}
|
||||
|
||||
func (vs *ValueMap) String() string {
|
||||
if vs == nil {
|
||||
return "{}"
|
||||
}
|
||||
return vs.hashMap.String()
|
||||
}
|
||||
|
||||
func valueHash(v util.T) int {
|
||||
return v.(Value).Hash()
|
||||
}
|
||||
|
||||
func valueEq(a, b util.T) bool {
|
||||
av := a.(Value)
|
||||
bv := b.(Value)
|
||||
return av.Compare(bv) == 0
|
||||
}
|
||||
5150
vendor/github.com/open-policy-agent/opa/ast/parser.go
generated
vendored
Normal file
5150
vendor/github.com/open-policy-agent/opa/ast/parser.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
838
vendor/github.com/open-policy-agent/opa/ast/parser_ext.go
generated
vendored
Normal file
838
vendor/github.com/open-policy-agent/opa/ast/parser_ext.go
generated
vendored
Normal file
@@ -0,0 +1,838 @@
|
||||
// Copyright 2016 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// This file contains extra functions for parsing Rego.
|
||||
// Most of the parsing is handled by the auto-generated code in
|
||||
// parser.go, however, there are additional utilities that are
|
||||
// helpful for dealing with Rego source inputs (e.g., REPL
|
||||
// statements, source files, etc.)
|
||||
|
||||
package ast
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
"strings"
|
||||
"unicode"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
// MustParseBody returns a parsed body.
|
||||
// If an error occurs during parsing, panic.
|
||||
func MustParseBody(input string) Body {
|
||||
parsed, err := ParseBody(input)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return parsed
|
||||
}
|
||||
|
||||
// MustParseExpr returns a parsed expression.
|
||||
// If an error occurs during parsing, panic.
|
||||
func MustParseExpr(input string) *Expr {
|
||||
parsed, err := ParseExpr(input)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return parsed
|
||||
}
|
||||
|
||||
// MustParseImports returns a slice of imports.
|
||||
// If an error occurs during parsing, panic.
|
||||
func MustParseImports(input string) []*Import {
|
||||
parsed, err := ParseImports(input)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return parsed
|
||||
}
|
||||
|
||||
// MustParseModule returns a parsed module.
|
||||
// If an error occurs during parsing, panic.
|
||||
func MustParseModule(input string) *Module {
|
||||
parsed, err := ParseModule("", input)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return parsed
|
||||
}
|
||||
|
||||
// MustParsePackage returns a Package.
|
||||
// If an error occurs during parsing, panic.
|
||||
func MustParsePackage(input string) *Package {
|
||||
parsed, err := ParsePackage(input)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return parsed
|
||||
}
|
||||
|
||||
// MustParseStatements returns a slice of parsed statements.
|
||||
// If an error occurs during parsing, panic.
|
||||
func MustParseStatements(input string) []Statement {
|
||||
parsed, _, err := ParseStatements("", input)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return parsed
|
||||
}
|
||||
|
||||
// MustParseStatement returns exactly one statement.
|
||||
// If an error occurs during parsing, panic.
|
||||
func MustParseStatement(input string) Statement {
|
||||
parsed, err := ParseStatement(input)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return parsed
|
||||
}
|
||||
|
||||
// MustParseRef returns a parsed reference.
|
||||
// If an error occurs during parsing, panic.
|
||||
func MustParseRef(input string) Ref {
|
||||
parsed, err := ParseRef(input)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return parsed
|
||||
}
|
||||
|
||||
// MustParseRule returns a parsed rule.
|
||||
// If an error occurs during parsing, panic.
|
||||
func MustParseRule(input string) *Rule {
|
||||
parsed, err := ParseRule(input)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return parsed
|
||||
}
|
||||
|
||||
// MustParseTerm returns a parsed term.
|
||||
// If an error occurs during parsing, panic.
|
||||
func MustParseTerm(input string) *Term {
|
||||
parsed, err := ParseTerm(input)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return parsed
|
||||
}
|
||||
|
||||
// ParseRuleFromBody returns a rule if the body can be interpreted as a rule
|
||||
// definition. Otherwise, an error is returned.
|
||||
func ParseRuleFromBody(module *Module, body Body) (*Rule, error) {
|
||||
|
||||
if len(body) != 1 {
|
||||
return nil, fmt.Errorf("multiple expressions cannot be used for rule head")
|
||||
}
|
||||
|
||||
return ParseRuleFromExpr(module, body[0])
|
||||
}
|
||||
|
||||
// ParseRuleFromExpr returns a rule if the expression can be interpreted as a
|
||||
// rule definition.
|
||||
func ParseRuleFromExpr(module *Module, expr *Expr) (*Rule, error) {
|
||||
|
||||
if len(expr.With) > 0 {
|
||||
return nil, fmt.Errorf("expressions using with keyword cannot be used for rule head")
|
||||
}
|
||||
|
||||
if expr.Negated {
|
||||
return nil, fmt.Errorf("negated expressions cannot be used for rule head")
|
||||
}
|
||||
|
||||
if _, ok := expr.Terms.(*SomeDecl); ok {
|
||||
return nil, errors.New("some declarations cannot be used for rule head")
|
||||
}
|
||||
|
||||
if term, ok := expr.Terms.(*Term); ok {
|
||||
switch v := term.Value.(type) {
|
||||
case Ref:
|
||||
return ParsePartialSetDocRuleFromTerm(module, term)
|
||||
default:
|
||||
return nil, fmt.Errorf("%v cannot be used for rule name", TypeName(v))
|
||||
}
|
||||
}
|
||||
|
||||
if _, ok := expr.Terms.([]*Term); !ok {
|
||||
// This is a defensive check in case other kinds of expression terms are
|
||||
// introduced in the future.
|
||||
return nil, errors.New("expression cannot be used for rule head")
|
||||
}
|
||||
|
||||
if expr.IsAssignment() {
|
||||
|
||||
lhs, rhs := expr.Operand(0), expr.Operand(1)
|
||||
rule, err := ParseCompleteDocRuleFromAssignmentExpr(module, lhs, rhs)
|
||||
|
||||
if err == nil {
|
||||
return rule, nil
|
||||
} else if _, ok := lhs.Value.(Call); ok {
|
||||
return nil, errFunctionAssignOperator
|
||||
} else if _, ok := lhs.Value.(Ref); ok {
|
||||
return nil, errPartialRuleAssignOperator
|
||||
}
|
||||
|
||||
return nil, errTermAssignOperator(lhs.Value)
|
||||
}
|
||||
|
||||
if expr.IsEquality() {
|
||||
|
||||
lhs, rhs := expr.Operand(0), expr.Operand(1)
|
||||
rule, err := ParseCompleteDocRuleFromEqExpr(module, lhs, rhs)
|
||||
|
||||
if err == nil {
|
||||
return rule, nil
|
||||
}
|
||||
|
||||
rule, err = ParseRuleFromCallEqExpr(module, lhs, rhs)
|
||||
if err == nil {
|
||||
return rule, nil
|
||||
}
|
||||
|
||||
return ParsePartialObjectDocRuleFromEqExpr(module, lhs, rhs)
|
||||
}
|
||||
|
||||
if _, ok := BuiltinMap[expr.Operator().String()]; ok {
|
||||
return nil, fmt.Errorf("rule name conflicts with built-in function")
|
||||
}
|
||||
|
||||
return ParseRuleFromCallExpr(module, expr.Terms.([]*Term))
|
||||
}
|
||||
|
||||
// ParseCompleteDocRuleFromAssignmentExpr returns a rule if the expression can
|
||||
// be interpreted as a complete document definition declared with the assignment
|
||||
// operator.
|
||||
func ParseCompleteDocRuleFromAssignmentExpr(module *Module, lhs, rhs *Term) (*Rule, error) {
|
||||
|
||||
rule, err := ParseCompleteDocRuleFromEqExpr(module, lhs, rhs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
rule.Head.Assign = true
|
||||
|
||||
return rule, nil
|
||||
}
|
||||
|
||||
// ParseCompleteDocRuleFromEqExpr returns a rule if the expression can be
|
||||
// interpreted as a complete document definition.
|
||||
func ParseCompleteDocRuleFromEqExpr(module *Module, lhs, rhs *Term) (*Rule, error) {
|
||||
|
||||
var name Var
|
||||
|
||||
if RootDocumentRefs.Contains(lhs) {
|
||||
name = lhs.Value.(Ref)[0].Value.(Var)
|
||||
} else if v, ok := lhs.Value.(Var); ok {
|
||||
name = v
|
||||
} else {
|
||||
return nil, fmt.Errorf("%v cannot be used for rule name", TypeName(lhs.Value))
|
||||
}
|
||||
|
||||
rule := &Rule{
|
||||
Location: rhs.Location,
|
||||
Head: &Head{
|
||||
Location: rhs.Location,
|
||||
Name: name,
|
||||
Value: rhs,
|
||||
},
|
||||
Body: NewBody(
|
||||
NewExpr(BooleanTerm(true).SetLocation(rhs.Location)).SetLocation(rhs.Location),
|
||||
),
|
||||
Module: module,
|
||||
}
|
||||
|
||||
return rule, nil
|
||||
}
|
||||
|
||||
// ParsePartialObjectDocRuleFromEqExpr returns a rule if the expression can be
|
||||
// interpreted as a partial object document definition.
|
||||
func ParsePartialObjectDocRuleFromEqExpr(module *Module, lhs, rhs *Term) (*Rule, error) {
|
||||
|
||||
ref, ok := lhs.Value.(Ref)
|
||||
if !ok || len(ref) != 2 {
|
||||
return nil, fmt.Errorf("%v cannot be used for rule name", TypeName(lhs.Value))
|
||||
}
|
||||
|
||||
name := ref[0].Value.(Var)
|
||||
key := ref[1]
|
||||
|
||||
rule := &Rule{
|
||||
Location: rhs.Location,
|
||||
Head: &Head{
|
||||
Location: rhs.Location,
|
||||
Name: name,
|
||||
Key: key,
|
||||
Value: rhs,
|
||||
},
|
||||
Body: NewBody(
|
||||
NewExpr(BooleanTerm(true).SetLocation(rhs.Location)).SetLocation(rhs.Location),
|
||||
),
|
||||
Module: module,
|
||||
}
|
||||
|
||||
return rule, nil
|
||||
}
|
||||
|
||||
// ParsePartialSetDocRuleFromTerm returns a rule if the term can be interpreted
|
||||
// as a partial set document definition.
|
||||
func ParsePartialSetDocRuleFromTerm(module *Module, term *Term) (*Rule, error) {
|
||||
|
||||
ref, ok := term.Value.(Ref)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("%vs cannot be used for rule head", TypeName(term.Value))
|
||||
}
|
||||
|
||||
if len(ref) != 2 {
|
||||
return nil, fmt.Errorf("refs cannot be used for rule")
|
||||
}
|
||||
|
||||
rule := &Rule{
|
||||
Location: term.Location,
|
||||
Head: &Head{
|
||||
Location: term.Location,
|
||||
Name: ref[0].Value.(Var),
|
||||
Key: ref[1],
|
||||
},
|
||||
Body: NewBody(
|
||||
NewExpr(BooleanTerm(true).SetLocation(term.Location)).SetLocation(term.Location),
|
||||
),
|
||||
Module: module,
|
||||
}
|
||||
|
||||
return rule, nil
|
||||
}
|
||||
|
||||
// ParseRuleFromCallEqExpr returns a rule if the term can be interpreted as a
|
||||
// function definition (e.g., f(x) = y => f(x) = y { true }).
|
||||
func ParseRuleFromCallEqExpr(module *Module, lhs, rhs *Term) (*Rule, error) {
|
||||
|
||||
call, ok := lhs.Value.(Call)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("must be call")
|
||||
}
|
||||
|
||||
rule := &Rule{
|
||||
Location: lhs.Location,
|
||||
Head: &Head{
|
||||
Location: lhs.Location,
|
||||
Name: call[0].Value.(Ref)[0].Value.(Var),
|
||||
Args: Args(call[1:]),
|
||||
Value: rhs,
|
||||
},
|
||||
Body: NewBody(NewExpr(BooleanTerm(true).SetLocation(rhs.Location)).SetLocation(rhs.Location)),
|
||||
Module: module,
|
||||
}
|
||||
|
||||
return rule, nil
|
||||
}
|
||||
|
||||
// ParseRuleFromCallExpr returns a rule if the terms can be interpreted as a
|
||||
// function returning true or some value (e.g., f(x) => f(x) = true { true }).
|
||||
func ParseRuleFromCallExpr(module *Module, terms []*Term) (*Rule, error) {
|
||||
|
||||
if len(terms) <= 1 {
|
||||
return nil, fmt.Errorf("rule argument list must take at least one argument")
|
||||
}
|
||||
|
||||
loc := terms[0].Location
|
||||
args := terms[1:]
|
||||
value := BooleanTerm(true).SetLocation(loc)
|
||||
|
||||
rule := &Rule{
|
||||
Location: loc,
|
||||
Head: &Head{
|
||||
Location: loc,
|
||||
Name: Var(terms[0].String()),
|
||||
Args: args,
|
||||
Value: value,
|
||||
},
|
||||
Module: module,
|
||||
Body: NewBody(NewExpr(BooleanTerm(true).SetLocation(loc)).SetLocation(loc)),
|
||||
}
|
||||
return rule, nil
|
||||
}
|
||||
|
||||
// ParseImports returns a slice of Import objects.
|
||||
func ParseImports(input string) ([]*Import, error) {
|
||||
stmts, _, err := ParseStatements("", input)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result := []*Import{}
|
||||
for _, stmt := range stmts {
|
||||
if imp, ok := stmt.(*Import); ok {
|
||||
result = append(result, imp)
|
||||
} else {
|
||||
return nil, fmt.Errorf("expected import but got %T", stmt)
|
||||
}
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// ParseModule returns a parsed Module object.
|
||||
// For details on Module objects and their fields, see policy.go.
|
||||
// Empty input will return nil, nil.
|
||||
func ParseModule(filename, input string) (*Module, error) {
|
||||
stmts, comments, err := ParseStatements(filename, input)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return parseModule(filename, stmts, comments)
|
||||
}
|
||||
|
||||
// ParseBody returns exactly one body.
|
||||
// If multiple bodies are parsed, an error is returned.
|
||||
func ParseBody(input string) (Body, error) {
|
||||
stmts, _, err := ParseStatements("", input)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result := Body{}
|
||||
|
||||
for _, stmt := range stmts {
|
||||
switch stmt := stmt.(type) {
|
||||
case Body:
|
||||
result = append(result, stmt...)
|
||||
case *Comment:
|
||||
// skip
|
||||
default:
|
||||
return nil, fmt.Errorf("expected body but got %T", stmt)
|
||||
}
|
||||
}
|
||||
|
||||
setExprIndices(result)
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// ParseExpr returns exactly one expression.
|
||||
// If multiple expressions are parsed, an error is returned.
|
||||
func ParseExpr(input string) (*Expr, error) {
|
||||
body, err := ParseBody(input)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to parse expression")
|
||||
}
|
||||
if len(body) != 1 {
|
||||
return nil, fmt.Errorf("expected exactly one expression but got: %v", body)
|
||||
}
|
||||
return body[0], nil
|
||||
}
|
||||
|
||||
// ParsePackage returns exactly one Package.
|
||||
// If multiple statements are parsed, an error is returned.
|
||||
func ParsePackage(input string) (*Package, error) {
|
||||
stmt, err := ParseStatement(input)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
pkg, ok := stmt.(*Package)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("expected package but got %T", stmt)
|
||||
}
|
||||
return pkg, nil
|
||||
}
|
||||
|
||||
// ParseTerm returns exactly one term.
|
||||
// If multiple terms are parsed, an error is returned.
|
||||
func ParseTerm(input string) (*Term, error) {
|
||||
body, err := ParseBody(input)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to parse term")
|
||||
}
|
||||
if len(body) != 1 {
|
||||
return nil, fmt.Errorf("expected exactly one term but got: %v", body)
|
||||
}
|
||||
term, ok := body[0].Terms.(*Term)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("expected term but got %v", body[0].Terms)
|
||||
}
|
||||
return term, nil
|
||||
}
|
||||
|
||||
// ParseRef returns exactly one reference.
|
||||
func ParseRef(input string) (Ref, error) {
|
||||
term, err := ParseTerm(input)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to parse ref")
|
||||
}
|
||||
ref, ok := term.Value.(Ref)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("expected ref but got %v", term)
|
||||
}
|
||||
return ref, nil
|
||||
}
|
||||
|
||||
// ParseRule returns exactly one rule.
|
||||
// If multiple rules are parsed, an error is returned.
|
||||
func ParseRule(input string) (*Rule, error) {
|
||||
stmts, _, err := ParseStatements("", input)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(stmts) != 1 {
|
||||
return nil, fmt.Errorf("expected exactly one statement (rule)")
|
||||
}
|
||||
rule, ok := stmts[0].(*Rule)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("expected rule but got %T", stmts[0])
|
||||
}
|
||||
return rule, nil
|
||||
}
|
||||
|
||||
// ParseStatement returns exactly one statement.
|
||||
// A statement might be a term, expression, rule, etc. Regardless,
|
||||
// this function expects *exactly* one statement. If multiple
|
||||
// statements are parsed, an error is returned.
|
||||
func ParseStatement(input string) (Statement, error) {
|
||||
stmts, _, err := ParseStatements("", input)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(stmts) != 1 {
|
||||
return nil, fmt.Errorf("expected exactly one statement")
|
||||
}
|
||||
return stmts[0], nil
|
||||
}
|
||||
|
||||
// CommentsOption returns a parser option to initialize the comments store within
|
||||
// the parser.
|
||||
func CommentsOption() Option {
|
||||
return GlobalStore(commentsKey, map[commentKey]*Comment{})
|
||||
}
|
||||
|
||||
type commentKey struct {
|
||||
File string
|
||||
Row int
|
||||
Col int
|
||||
}
|
||||
|
||||
func (a commentKey) Compare(other commentKey) int {
|
||||
if a.File < other.File {
|
||||
return -1
|
||||
} else if a.File > other.File {
|
||||
return 1
|
||||
} else if a.Row < other.Row {
|
||||
return -1
|
||||
} else if a.Row > other.Row {
|
||||
return 1
|
||||
} else if a.Col < other.Col {
|
||||
return -1
|
||||
} else if a.Col > other.Col {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
// ParseStatements returns a slice of parsed statements.
|
||||
// This is the default return value from the parser.
|
||||
func ParseStatements(filename, input string) ([]Statement, []*Comment, error) {
|
||||
|
||||
bs := []byte(input)
|
||||
|
||||
parsed, err := Parse(filename, bs, GlobalStore(filenameKey, filename), CommentsOption())
|
||||
if err != nil {
|
||||
return nil, nil, formatParserErrors(filename, bs, err)
|
||||
}
|
||||
|
||||
var comments []*Comment
|
||||
var sl []interface{}
|
||||
if p, ok := parsed.(program); ok {
|
||||
sl = p.buf
|
||||
commentMap := p.comments.(map[commentKey]*Comment)
|
||||
commentKeys := []commentKey{}
|
||||
for k := range commentMap {
|
||||
commentKeys = append(commentKeys, k)
|
||||
}
|
||||
sort.Slice(commentKeys, func(i, j int) bool {
|
||||
return commentKeys[i].Compare(commentKeys[j]) < 0
|
||||
})
|
||||
for _, k := range commentKeys {
|
||||
comments = append(comments, commentMap[k])
|
||||
}
|
||||
} else {
|
||||
sl = parsed.([]interface{})
|
||||
}
|
||||
stmts := make([]Statement, 0, len(sl))
|
||||
|
||||
for _, x := range sl {
|
||||
if rules, ok := x.([]*Rule); ok {
|
||||
for _, rule := range rules {
|
||||
stmts = append(stmts, rule)
|
||||
}
|
||||
} else {
|
||||
// Unchecked cast should be safe. A panic indicates grammar is
|
||||
// out-of-sync.
|
||||
stmts = append(stmts, x.(Statement))
|
||||
}
|
||||
}
|
||||
|
||||
return stmts, comments, postProcess(filename, stmts)
|
||||
}
|
||||
|
||||
func formatParserErrors(filename string, bs []byte, err error) error {
|
||||
// Errors returned by the parser are always of type errList and the errList
|
||||
// always contains *parserError.
|
||||
// https://godoc.org/github.com/mna/pigeon#hdr-Error_reporting.
|
||||
errs := err.(errList)
|
||||
r := make(Errors, len(errs))
|
||||
for i, e := range errs {
|
||||
r[i] = formatParserError(filename, bs, e.(*parserError))
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
func formatParserError(filename string, bs []byte, e *parserError) *Error {
|
||||
loc := NewLocation(nil, filename, e.pos.line, e.pos.col)
|
||||
inner := e.Inner.Error()
|
||||
idx := strings.Index(inner, "no match found")
|
||||
if idx >= 0 {
|
||||
// Match errors end with "no match found, expected: ...". We do not want to
|
||||
// include ", expected: ..." as it does not provide any value, so truncate the
|
||||
// string here.
|
||||
inner = inner[:idx+14]
|
||||
}
|
||||
err := NewError(ParseErr, loc, inner)
|
||||
err.Details = newParserErrorDetail(bs, e.pos)
|
||||
return err
|
||||
}
|
||||
|
||||
func parseModule(filename string, stmts []Statement, comments []*Comment) (*Module, error) {
|
||||
|
||||
if len(stmts) == 0 {
|
||||
return nil, NewError(ParseErr, &Location{File: filename}, "empty module")
|
||||
}
|
||||
|
||||
var errs Errors
|
||||
|
||||
_package, ok := stmts[0].(*Package)
|
||||
if !ok {
|
||||
loc := stmts[0].(Statement).Loc()
|
||||
errs = append(errs, NewError(ParseErr, loc, "package expected"))
|
||||
}
|
||||
|
||||
mod := &Module{
|
||||
Package: _package,
|
||||
}
|
||||
|
||||
// The comments slice only holds comments that were not their own statements.
|
||||
mod.Comments = append(mod.Comments, comments...)
|
||||
|
||||
for _, stmt := range stmts[1:] {
|
||||
switch stmt := stmt.(type) {
|
||||
case *Import:
|
||||
mod.Imports = append(mod.Imports, stmt)
|
||||
case *Rule:
|
||||
setRuleModule(stmt, mod)
|
||||
mod.Rules = append(mod.Rules, stmt)
|
||||
case Body:
|
||||
rule, err := ParseRuleFromBody(mod, stmt)
|
||||
if err != nil {
|
||||
errs = append(errs, NewError(ParseErr, stmt[0].Location, err.Error()))
|
||||
} else {
|
||||
mod.Rules = append(mod.Rules, rule)
|
||||
}
|
||||
case *Package:
|
||||
errs = append(errs, NewError(ParseErr, stmt.Loc(), "unexpected package"))
|
||||
case *Comment: // Ignore comments, they're handled above.
|
||||
default:
|
||||
panic("illegal value") // Indicates grammar is out-of-sync with code.
|
||||
}
|
||||
}
|
||||
|
||||
if len(errs) == 0 {
|
||||
return mod, nil
|
||||
}
|
||||
|
||||
return nil, errs
|
||||
}
|
||||
|
||||
func postProcess(filename string, stmts []Statement) error {
|
||||
|
||||
if err := mangleDataVars(stmts); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := mangleInputVars(stmts); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
mangleWildcards(stmts)
|
||||
mangleExprIndices(stmts)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func mangleDataVars(stmts []Statement) error {
|
||||
for i := range stmts {
|
||||
vt := newVarToRefTransformer(DefaultRootDocument.Value.(Var), DefaultRootRef.Copy())
|
||||
stmt, err := Transform(vt, stmts[i])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
stmts[i] = stmt.(Statement)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func mangleInputVars(stmts []Statement) error {
|
||||
for i := range stmts {
|
||||
vt := newVarToRefTransformer(InputRootDocument.Value.(Var), InputRootRef.Copy())
|
||||
stmt, err := Transform(vt, stmts[i])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
stmts[i] = stmt.(Statement)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func mangleExprIndices(stmts []Statement) {
|
||||
for _, stmt := range stmts {
|
||||
setExprIndices(stmt)
|
||||
}
|
||||
}
|
||||
|
||||
func setExprIndices(x interface{}) {
|
||||
WalkBodies(x, func(b Body) bool {
|
||||
for i, expr := range b {
|
||||
expr.Index = i
|
||||
}
|
||||
return false
|
||||
})
|
||||
}
|
||||
|
||||
func mangleWildcards(stmts []Statement) {
|
||||
m := &wildcardMangler{}
|
||||
for i := range stmts {
|
||||
stmt, _ := Transform(m, stmts[i])
|
||||
stmts[i] = stmt.(Statement)
|
||||
}
|
||||
}
|
||||
|
||||
type wildcardMangler struct {
|
||||
c int
|
||||
}
|
||||
|
||||
func (m *wildcardMangler) Transform(x interface{}) (interface{}, error) {
|
||||
if term, ok := x.(Var); ok {
|
||||
if term.Equal(Wildcard.Value) {
|
||||
name := fmt.Sprintf("%s%d", WildcardPrefix, m.c)
|
||||
m.c++
|
||||
return Var(name), nil
|
||||
}
|
||||
}
|
||||
return x, nil
|
||||
}
|
||||
|
||||
func setRuleModule(rule *Rule, module *Module) {
|
||||
rule.Module = module
|
||||
if rule.Else != nil {
|
||||
setRuleModule(rule.Else, module)
|
||||
}
|
||||
}
|
||||
|
||||
type varToRefTransformer struct {
|
||||
orig Var
|
||||
target Ref
|
||||
// skip set to true to avoid recursively processing the result of
|
||||
// transformation.
|
||||
skip bool
|
||||
}
|
||||
|
||||
func newVarToRefTransformer(orig Var, target Ref) *varToRefTransformer {
|
||||
return &varToRefTransformer{
|
||||
orig: orig,
|
||||
target: target,
|
||||
skip: false,
|
||||
}
|
||||
}
|
||||
|
||||
func (vt *varToRefTransformer) Transform(x interface{}) (interface{}, error) {
|
||||
if vt.skip {
|
||||
vt.skip = false
|
||||
return x, nil
|
||||
}
|
||||
switch x := x.(type) {
|
||||
case *Head:
|
||||
// The next AST node will be the rule name (which should not be
|
||||
// transformed).
|
||||
vt.skip = true
|
||||
case Ref:
|
||||
// The next AST node will be the ref head (which should not be
|
||||
// transformed).
|
||||
vt.skip = true
|
||||
case Var:
|
||||
if x.Equal(vt.orig) {
|
||||
vt.skip = true
|
||||
return vt.target, nil
|
||||
}
|
||||
}
|
||||
return x, nil
|
||||
}
|
||||
|
||||
// ParserErrorDetail holds additional details for parser errors.
|
||||
type ParserErrorDetail struct {
|
||||
Line string `json:"line"`
|
||||
Idx int `json:"idx"`
|
||||
}
|
||||
|
||||
func newParserErrorDetail(bs []byte, pos position) *ParserErrorDetail {
|
||||
|
||||
offset := pos.offset
|
||||
|
||||
// Find first non-space character at or before offset position.
|
||||
if offset >= len(bs) {
|
||||
offset = len(bs) - 1
|
||||
} else if offset < 0 {
|
||||
offset = 0
|
||||
}
|
||||
|
||||
for offset > 0 && unicode.IsSpace(rune(bs[offset])) {
|
||||
offset--
|
||||
}
|
||||
|
||||
// Find beginning of line containing offset.
|
||||
begin := offset
|
||||
|
||||
for begin > 0 && !isNewLineChar(bs[begin]) {
|
||||
begin--
|
||||
}
|
||||
|
||||
if isNewLineChar(bs[begin]) {
|
||||
begin++
|
||||
}
|
||||
|
||||
// Find end of line containing offset.
|
||||
end := offset
|
||||
|
||||
for end < len(bs) && !isNewLineChar(bs[end]) {
|
||||
end++
|
||||
}
|
||||
|
||||
if begin > end {
|
||||
begin = end
|
||||
}
|
||||
|
||||
// Extract line and compute index of offset byte in line.
|
||||
line := bs[begin:end]
|
||||
index := offset - begin
|
||||
|
||||
return &ParserErrorDetail{
|
||||
Line: string(line),
|
||||
Idx: index,
|
||||
}
|
||||
}
|
||||
|
||||
// Lines returns the pretty formatted line output for the error details.
|
||||
func (d ParserErrorDetail) Lines() []string {
|
||||
line := strings.TrimLeft(d.Line, "\t") // remove leading tabs
|
||||
tabCount := len(d.Line) - len(line)
|
||||
return []string{line, strings.Repeat(" ", d.Idx-tabCount) + "^"}
|
||||
}
|
||||
|
||||
func isNewLineChar(b byte) bool {
|
||||
return b == '\r' || b == '\n'
|
||||
}
|
||||
620
vendor/github.com/open-policy-agent/opa/ast/parser_internal.go
generated
vendored
Normal file
620
vendor/github.com/open-policy-agent/opa/ast/parser_internal.go
generated
vendored
Normal file
@@ -0,0 +1,620 @@
|
||||
// Copyright 2018 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.op
|
||||
|
||||
package ast
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math/big"
|
||||
)
|
||||
|
||||
const (
|
||||
// commentsKey is the global map key for the comments slice.
|
||||
commentsKey = "comments"
|
||||
|
||||
// filenameKey is the global map key for the filename.
|
||||
filenameKey = "filename"
|
||||
)
|
||||
|
||||
type program struct {
|
||||
buf []interface{}
|
||||
comments interface{}
|
||||
}
|
||||
|
||||
type ruleExt struct {
|
||||
loc *Location
|
||||
term *Term
|
||||
body Body
|
||||
}
|
||||
|
||||
// currentLocation converts the parser context to a Location object.
|
||||
func currentLocation(c *current) *Location {
|
||||
return NewLocation(c.text, c.globalStore[filenameKey].(string), c.pos.line, c.pos.col)
|
||||
}
|
||||
|
||||
func makeProgram(c *current, vals interface{}) (interface{}, error) {
|
||||
var buf []interface{}
|
||||
if vals == nil {
|
||||
return buf, nil
|
||||
}
|
||||
ifaceSlice := vals.([]interface{})
|
||||
head := ifaceSlice[0]
|
||||
buf = append(buf, head)
|
||||
for _, tail := range ifaceSlice[1].([]interface{}) {
|
||||
stmt := tail.([]interface{})[1]
|
||||
buf = append(buf, stmt)
|
||||
}
|
||||
return program{buf, c.globalStore[commentsKey]}, nil
|
||||
}
|
||||
|
||||
func makePackage(loc *Location, value interface{}) (interface{}, error) {
|
||||
// All packages are implicitly declared under the default root document.
|
||||
term := value.(*Term)
|
||||
path := Ref{DefaultRootDocument.Copy().SetLocation(term.Location)}
|
||||
switch v := term.Value.(type) {
|
||||
case Ref:
|
||||
// Convert head of package Ref to String because it will be prefixed
|
||||
// with the root document variable.
|
||||
head := StringTerm(string(v[0].Value.(Var))).SetLocation(v[0].Location)
|
||||
tail := v[1:]
|
||||
if !tail.IsGround() {
|
||||
return nil, fmt.Errorf("package name cannot contain variables: %v", v)
|
||||
}
|
||||
|
||||
// We do not allow non-string values in package names.
|
||||
// Because documents are typically represented as JSON, non-string keys are
|
||||
// not allowed for now.
|
||||
// TODO(tsandall): consider special syntax for namespacing under arrays.
|
||||
for _, p := range tail {
|
||||
_, ok := p.Value.(String)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("package name cannot contain non-string values: %v", v)
|
||||
}
|
||||
}
|
||||
path = append(path, head)
|
||||
path = append(path, tail...)
|
||||
case Var:
|
||||
s := StringTerm(string(v)).SetLocation(term.Location)
|
||||
path = append(path, s)
|
||||
}
|
||||
pkg := &Package{Location: loc, Path: path}
|
||||
return pkg, nil
|
||||
}
|
||||
|
||||
func makeImport(loc *Location, path, alias interface{}) (interface{}, error) {
|
||||
imp := &Import{}
|
||||
imp.Location = loc
|
||||
imp.Path = path.(*Term)
|
||||
if err := IsValidImportPath(imp.Path.Value); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if alias == nil {
|
||||
return imp, nil
|
||||
}
|
||||
aliasSlice := alias.([]interface{})
|
||||
// Import definition above describes the "alias" slice. We only care about the "Var" element.
|
||||
imp.Alias = aliasSlice[3].(*Term).Value.(Var)
|
||||
return imp, nil
|
||||
}
|
||||
|
||||
func makeDefaultRule(loc *Location, name, operator, value interface{}) (interface{}, error) {
|
||||
|
||||
if string(operator.([]uint8)) == Assign.Infix {
|
||||
return nil, fmt.Errorf("default rules must use = operator (not := operator)")
|
||||
}
|
||||
|
||||
term := value.(*Term)
|
||||
var err error
|
||||
|
||||
vis := NewGenericVisitor(func(x interface{}) bool {
|
||||
if err != nil {
|
||||
return true
|
||||
}
|
||||
switch x.(type) {
|
||||
case *ArrayComprehension, *ObjectComprehension, *SetComprehension: // skip closures
|
||||
return true
|
||||
case Ref, Var:
|
||||
err = fmt.Errorf("default rule value cannot contain %v", TypeName(x))
|
||||
return true
|
||||
}
|
||||
return false
|
||||
})
|
||||
|
||||
vis.Walk(term)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
body := NewBody(NewExpr(BooleanTerm(true).SetLocation(loc)))
|
||||
|
||||
rule := &Rule{
|
||||
Location: loc,
|
||||
Default: true,
|
||||
Head: &Head{
|
||||
Location: loc,
|
||||
Name: name.(*Term).Value.(Var),
|
||||
Value: value.(*Term),
|
||||
},
|
||||
Body: body,
|
||||
}
|
||||
rule.Body[0].Location = loc
|
||||
|
||||
return []*Rule{rule}, nil
|
||||
}
|
||||
|
||||
func makeRule(loc *Location, head, rest interface{}) (interface{}, error) {
|
||||
|
||||
if head == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
sl := rest.([]interface{})
|
||||
|
||||
rules := []*Rule{
|
||||
{
|
||||
Location: loc,
|
||||
Head: head.(*Head),
|
||||
Body: sl[0].(Body),
|
||||
},
|
||||
}
|
||||
|
||||
var ordered bool
|
||||
prev := rules[0]
|
||||
|
||||
for i, elem := range sl[1].([]interface{}) {
|
||||
|
||||
next := elem.([]interface{})
|
||||
re := next[1].(ruleExt)
|
||||
|
||||
if rules[0].Head.Assign {
|
||||
return nil, errElseAssignOperator
|
||||
}
|
||||
|
||||
if re.term == nil {
|
||||
if ordered {
|
||||
return nil, fmt.Errorf("expected 'else' keyword")
|
||||
}
|
||||
rules = append(rules, &Rule{
|
||||
Location: re.loc,
|
||||
Head: prev.Head.Copy(),
|
||||
Body: re.body,
|
||||
})
|
||||
} else {
|
||||
if (rules[0].Head.DocKind() != CompleteDoc) || (i != 0 && !ordered) {
|
||||
return nil, fmt.Errorf("unexpected 'else' keyword")
|
||||
}
|
||||
ordered = true
|
||||
curr := &Rule{
|
||||
Location: re.loc,
|
||||
Head: &Head{
|
||||
Name: prev.Head.Name,
|
||||
Args: prev.Head.Args.Copy(),
|
||||
Value: re.term,
|
||||
Location: re.term.Location,
|
||||
},
|
||||
Body: re.body,
|
||||
}
|
||||
prev.Else = curr
|
||||
prev = curr
|
||||
}
|
||||
}
|
||||
|
||||
return rules, nil
|
||||
}
|
||||
|
||||
func makeRuleHead(loc *Location, name, args, key, value interface{}) (interface{}, error) {
|
||||
|
||||
head := &Head{}
|
||||
|
||||
head.Location = loc
|
||||
head.Name = name.(*Term).Value.(Var)
|
||||
|
||||
if args != nil && key != nil {
|
||||
return nil, fmt.Errorf("partial rules cannot take arguments")
|
||||
}
|
||||
|
||||
if args != nil {
|
||||
argSlice := args.([]interface{})
|
||||
head.Args = argSlice[3].(Args)
|
||||
}
|
||||
|
||||
if key != nil {
|
||||
keySlice := key.([]interface{})
|
||||
// Head definition above describes the "key" slice. We care about the "Term" element.
|
||||
head.Key = keySlice[3].(*Term)
|
||||
}
|
||||
|
||||
if value != nil {
|
||||
valueSlice := value.([]interface{})
|
||||
operator := string(valueSlice[1].([]uint8))
|
||||
|
||||
if operator == Assign.Infix {
|
||||
if head.Key != nil {
|
||||
return nil, errPartialRuleAssignOperator
|
||||
} else if len(head.Args) > 0 {
|
||||
return nil, errFunctionAssignOperator
|
||||
}
|
||||
head.Assign = true
|
||||
}
|
||||
|
||||
// Head definition above describes the "value" slice. We care about the "Term" element.
|
||||
head.Value = valueSlice[len(valueSlice)-1].(*Term)
|
||||
}
|
||||
|
||||
if key == nil && value == nil {
|
||||
head.Value = BooleanTerm(true).SetLocation(head.Location)
|
||||
}
|
||||
|
||||
if key != nil && value != nil {
|
||||
switch head.Key.Value.(type) {
|
||||
case Var, String, Ref: // nop
|
||||
default:
|
||||
return nil, fmt.Errorf("object key must be string, var, or ref, not %v", TypeName(head.Key.Value))
|
||||
}
|
||||
}
|
||||
|
||||
return head, nil
|
||||
}
|
||||
|
||||
func makeArgs(list interface{}) (interface{}, error) {
|
||||
termSlice := list.([]*Term)
|
||||
args := make(Args, len(termSlice))
|
||||
for i := 0; i < len(args); i++ {
|
||||
args[i] = termSlice[i]
|
||||
}
|
||||
return args, nil
|
||||
}
|
||||
|
||||
func makeRuleExt(loc *Location, val, b interface{}) (interface{}, error) {
|
||||
bs := b.([]interface{})
|
||||
body := bs[1].(Body)
|
||||
|
||||
if val == nil {
|
||||
term := BooleanTerm(true)
|
||||
term.Location = loc
|
||||
return ruleExt{term.Location, term, body}, nil
|
||||
}
|
||||
|
||||
vs := val.([]interface{})
|
||||
t := vs[3].(*Term)
|
||||
return ruleExt{loc, t, body}, nil
|
||||
}
|
||||
|
||||
func makeLiteral(negated, value, with interface{}) (interface{}, error) {
|
||||
|
||||
expr := value.(*Expr)
|
||||
|
||||
expr.Negated = negated.(bool)
|
||||
|
||||
if with != nil {
|
||||
expr.With = with.([]*With)
|
||||
}
|
||||
|
||||
return expr, nil
|
||||
}
|
||||
|
||||
func makeLiteralExpr(loc *Location, lhs, rest interface{}) (interface{}, error) {
|
||||
|
||||
if rest == nil {
|
||||
if call, ok := lhs.(*Term).Value.(Call); ok {
|
||||
return NewExpr([]*Term(call)).SetLocation(loc), nil
|
||||
}
|
||||
return NewExpr(lhs).SetLocation(loc), nil
|
||||
}
|
||||
|
||||
termSlice := rest.([]interface{})
|
||||
terms := []*Term{
|
||||
termSlice[1].(*Term),
|
||||
lhs.(*Term),
|
||||
termSlice[3].(*Term),
|
||||
}
|
||||
|
||||
expr := NewExpr(terms).SetLocation(loc)
|
||||
|
||||
return expr, nil
|
||||
}
|
||||
|
||||
func makeSomeDeclLiteral(loc *Location, sl interface{}) (interface{}, error) {
|
||||
symbols := sl.([]*Term)
|
||||
return NewExpr(&SomeDecl{Location: loc, Symbols: symbols}).SetLocation(loc), nil
|
||||
}
|
||||
|
||||
func makeSomeDeclSymbols(head interface{}, rest interface{}) (interface{}, error) {
|
||||
|
||||
var symbols []*Term
|
||||
|
||||
symbols = append(symbols, head.(*Term))
|
||||
|
||||
if sl1, ok := rest.([]interface{}); ok {
|
||||
for i := range sl1 {
|
||||
if sl2, ok := sl1[i].([]interface{}); ok {
|
||||
symbols = append(symbols, sl2[3].(*Term))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return symbols, nil
|
||||
}
|
||||
|
||||
func makeWithKeywordList(head, tail interface{}) (interface{}, error) {
|
||||
var withs []*With
|
||||
|
||||
if head == nil {
|
||||
return withs, nil
|
||||
}
|
||||
|
||||
sl := tail.([]interface{})
|
||||
|
||||
withs = make([]*With, 0, len(sl)+1)
|
||||
withs = append(withs, head.(*With))
|
||||
|
||||
for i := range sl {
|
||||
withSlice := sl[i].([]interface{})
|
||||
withs = append(withs, withSlice[1].(*With))
|
||||
}
|
||||
|
||||
return withs, nil
|
||||
}
|
||||
|
||||
func makeWithKeyword(loc *Location, target, value interface{}) (interface{}, error) {
|
||||
w := &With{
|
||||
Target: target.(*Term),
|
||||
Value: value.(*Term),
|
||||
}
|
||||
return w.SetLocation(loc), nil
|
||||
}
|
||||
|
||||
func makeExprTerm(loc *Location, lhs, rest interface{}) (interface{}, error) {
|
||||
|
||||
if rest == nil {
|
||||
return lhs, nil
|
||||
}
|
||||
|
||||
sl := rest.([]interface{})
|
||||
|
||||
if len(sl) == 0 {
|
||||
return lhs, nil
|
||||
}
|
||||
|
||||
for i := range sl {
|
||||
termSlice := sl[i].([]interface{})
|
||||
call := Call{
|
||||
termSlice[1].(*Term),
|
||||
lhs.(*Term),
|
||||
termSlice[3].(*Term),
|
||||
}
|
||||
lhs = NewTerm(call).SetLocation(loc)
|
||||
}
|
||||
|
||||
return lhs, nil
|
||||
}
|
||||
|
||||
func makeCall(loc *Location, operator, args interface{}) (interface{}, error) {
|
||||
|
||||
termSlice := args.([]*Term)
|
||||
termOperator := operator.(*Term)
|
||||
|
||||
call := make(Call, len(termSlice)+1)
|
||||
|
||||
if _, ok := termOperator.Value.(Var); ok {
|
||||
termOperator = RefTerm(termOperator).SetLocation(loc)
|
||||
}
|
||||
|
||||
call[0] = termOperator
|
||||
|
||||
for i := 1; i < len(call); i++ {
|
||||
call[i] = termSlice[i-1]
|
||||
}
|
||||
|
||||
return NewTerm(call).SetLocation(loc), nil
|
||||
}
|
||||
|
||||
func makeBraceEnclosedBody(loc *Location, body interface{}) (interface{}, error) {
|
||||
if body != nil {
|
||||
return body, nil
|
||||
}
|
||||
return NewBody(NewExpr(ObjectTerm().SetLocation(loc)).SetLocation(loc)), nil
|
||||
}
|
||||
|
||||
func makeBody(head, tail interface{}, pos int) (interface{}, error) {
|
||||
|
||||
sl := tail.([]interface{})
|
||||
body := make(Body, len(sl)+1)
|
||||
body[0] = head.(*Expr)
|
||||
|
||||
for i := 1; i < len(body); i++ {
|
||||
body[i] = sl[i-1].([]interface{})[pos].(*Expr)
|
||||
}
|
||||
|
||||
return body, nil
|
||||
}
|
||||
|
||||
func makeExprTermList(head, tail interface{}) (interface{}, error) {
|
||||
|
||||
var terms []*Term
|
||||
|
||||
if head == nil {
|
||||
return terms, nil
|
||||
}
|
||||
|
||||
sl := tail.([]interface{})
|
||||
|
||||
terms = make([]*Term, 0, len(sl)+1)
|
||||
terms = append(terms, head.(*Term))
|
||||
|
||||
for i := range sl {
|
||||
termSlice := sl[i].([]interface{})
|
||||
terms = append(terms, termSlice[3].(*Term))
|
||||
}
|
||||
|
||||
return terms, nil
|
||||
}
|
||||
|
||||
func makeExprTermPairList(head, tail interface{}) (interface{}, error) {
|
||||
|
||||
var terms [][2]*Term
|
||||
|
||||
if head == nil {
|
||||
return terms, nil
|
||||
}
|
||||
|
||||
sl := tail.([]interface{})
|
||||
|
||||
terms = make([][2]*Term, 0, len(sl)+1)
|
||||
terms = append(terms, head.([2]*Term))
|
||||
|
||||
for i := range sl {
|
||||
termSlice := sl[i].([]interface{})
|
||||
terms = append(terms, termSlice[3].([2]*Term))
|
||||
}
|
||||
|
||||
return terms, nil
|
||||
}
|
||||
|
||||
func makeExprTermPair(key, value interface{}) (interface{}, error) {
|
||||
return [2]*Term{key.(*Term), value.(*Term)}, nil
|
||||
}
|
||||
|
||||
func makeInfixOperator(loc *Location, text []byte) (interface{}, error) {
|
||||
op := string(text)
|
||||
for _, b := range Builtins {
|
||||
if string(b.Infix) == op {
|
||||
op = string(b.Name)
|
||||
}
|
||||
}
|
||||
operator := RefTerm(VarTerm(op).SetLocation(loc)).SetLocation(loc)
|
||||
return operator, nil
|
||||
}
|
||||
|
||||
func makeArray(loc *Location, list interface{}) (interface{}, error) {
|
||||
termSlice := list.([]*Term)
|
||||
return ArrayTerm(termSlice...).SetLocation(loc), nil
|
||||
}
|
||||
|
||||
func makeObject(loc *Location, list interface{}) (interface{}, error) {
|
||||
termPairSlice := list.([][2]*Term)
|
||||
return ObjectTerm(termPairSlice...).SetLocation(loc), nil
|
||||
}
|
||||
|
||||
func makeSet(loc *Location, list interface{}) (interface{}, error) {
|
||||
termSlice := list.([]*Term)
|
||||
return SetTerm(termSlice...).SetLocation(loc), nil
|
||||
}
|
||||
|
||||
func makeArrayComprehension(loc *Location, head, body interface{}) (interface{}, error) {
|
||||
return ArrayComprehensionTerm(head.(*Term), body.(Body)).SetLocation(loc), nil
|
||||
}
|
||||
|
||||
func makeSetComprehension(loc *Location, head, body interface{}) (interface{}, error) {
|
||||
return SetComprehensionTerm(head.(*Term), body.(Body)).SetLocation(loc), nil
|
||||
}
|
||||
|
||||
func makeObjectComprehension(loc *Location, head, body interface{}) (interface{}, error) {
|
||||
pair := head.([2]*Term)
|
||||
return ObjectComprehensionTerm(pair[0], pair[1], body.(Body)).SetLocation(loc), nil
|
||||
}
|
||||
|
||||
func makeRef(loc *Location, head, rest interface{}) (interface{}, error) {
|
||||
|
||||
headTerm := head.(*Term)
|
||||
ifaceSlice := rest.([]interface{})
|
||||
|
||||
if len(ifaceSlice) == 0 {
|
||||
return headTerm, nil
|
||||
}
|
||||
|
||||
ref := make(Ref, len(ifaceSlice)+1)
|
||||
ref[0] = headTerm
|
||||
|
||||
for i := 1; i < len(ref); i++ {
|
||||
ref[i] = ifaceSlice[i-1].(*Term)
|
||||
}
|
||||
|
||||
return NewTerm(ref).SetLocation(loc), nil
|
||||
}
|
||||
|
||||
func makeRefOperandDot(loc *Location, val interface{}) (interface{}, error) {
|
||||
return StringTerm(string(val.(*Term).Value.(Var))).SetLocation(loc), nil
|
||||
}
|
||||
|
||||
func makeVar(loc *Location, text interface{}) (interface{}, error) {
|
||||
str := string(text.([]byte))
|
||||
return VarTerm(str).SetLocation(loc), nil
|
||||
}
|
||||
|
||||
func makeNumber(loc *Location, text interface{}) (interface{}, error) {
|
||||
f, ok := new(big.Float).SetString(string(text.([]byte)))
|
||||
if !ok {
|
||||
// This indicates the grammar is out-of-sync with what the string
|
||||
// representation of floating point numbers. This should not be
|
||||
// possible.
|
||||
panic("illegal value")
|
||||
}
|
||||
|
||||
// Put limit on size of exponent to prevent non-linear cost of String()
|
||||
// function on big.Float from causing denial of service: https://github.com/golang/go/issues/11068
|
||||
//
|
||||
// n == sign * mantissa * 2^exp
|
||||
// 0.5 <= mantissa < 1.0
|
||||
//
|
||||
// The limit is arbitrary.
|
||||
exp := f.MantExp(nil)
|
||||
if exp > 1e5 || exp < -1e5 {
|
||||
return nil, fmt.Errorf("number too big")
|
||||
}
|
||||
|
||||
return NumberTerm(json.Number(f.String())).SetLocation(loc), nil
|
||||
}
|
||||
|
||||
func makeString(loc *Location, text interface{}) (interface{}, error) {
|
||||
var v string
|
||||
err := json.Unmarshal(text.([]byte), &v)
|
||||
return StringTerm(v).SetLocation(loc), err
|
||||
}
|
||||
|
||||
func makeRawString(loc *Location, text interface{}) (interface{}, error) {
|
||||
s := string(text.([]byte))
|
||||
s = s[1 : len(s)-1] // Trim surrounding quotes.
|
||||
return StringTerm(s).SetLocation(loc), nil
|
||||
}
|
||||
|
||||
func makeNonterminatedString(loc *Location, s string) (interface{}, error) {
|
||||
return StringTerm(s).SetLocation(loc), fmt.Errorf("found non-terminated string literal")
|
||||
}
|
||||
|
||||
func makeBool(loc *Location, text interface{}) (interface{}, error) {
|
||||
var term *Term
|
||||
if string(text.([]byte)) == "true" {
|
||||
term = BooleanTerm(true)
|
||||
} else {
|
||||
term = BooleanTerm(false)
|
||||
}
|
||||
return term.SetLocation(loc), nil
|
||||
}
|
||||
|
||||
func makeNull(loc *Location) (interface{}, error) {
|
||||
return NullTerm().SetLocation(loc), nil
|
||||
}
|
||||
|
||||
func makeComments(c *current, text interface{}) (interface{}, error) {
|
||||
|
||||
var buf bytes.Buffer
|
||||
for _, x := range text.([]interface{}) {
|
||||
buf.Write(x.([]byte))
|
||||
}
|
||||
|
||||
comment := NewComment(buf.Bytes())
|
||||
comment.Location = currentLocation(c)
|
||||
comments := c.globalStore[commentsKey].(map[commentKey]*Comment)
|
||||
key := commentKey{
|
||||
File: comment.Location.File,
|
||||
Row: comment.Location.Row,
|
||||
Col: comment.Location.Col,
|
||||
}
|
||||
comments[key] = comment
|
||||
return comment, nil
|
||||
}
|
||||
1352
vendor/github.com/open-policy-agent/opa/ast/policy.go
generated
vendored
Normal file
1352
vendor/github.com/open-policy-agent/opa/ast/policy.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
82
vendor/github.com/open-policy-agent/opa/ast/pretty.go
generated
vendored
Normal file
82
vendor/github.com/open-policy-agent/opa/ast/pretty.go
generated
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
// Copyright 2018 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package ast
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Pretty writes a pretty representation of the AST rooted at x to w.
|
||||
//
|
||||
// This is function is intended for debug purposes when inspecting ASTs.
|
||||
func Pretty(w io.Writer, x interface{}) {
|
||||
pp := &prettyPrinter{
|
||||
depth: -1,
|
||||
w: w,
|
||||
}
|
||||
NewBeforeAfterVisitor(pp.Before, pp.After).Walk(x)
|
||||
}
|
||||
|
||||
type prettyPrinter struct {
|
||||
depth int
|
||||
w io.Writer
|
||||
}
|
||||
|
||||
func (pp *prettyPrinter) Before(x interface{}) bool {
|
||||
switch x.(type) {
|
||||
case *Term:
|
||||
default:
|
||||
pp.depth++
|
||||
}
|
||||
|
||||
switch x := x.(type) {
|
||||
case *Term:
|
||||
return false
|
||||
case Args:
|
||||
if len(x) == 0 {
|
||||
return false
|
||||
}
|
||||
pp.writeType(x)
|
||||
case *Expr:
|
||||
extras := []string{}
|
||||
if x.Negated {
|
||||
extras = append(extras, "negated")
|
||||
}
|
||||
extras = append(extras, fmt.Sprintf("index=%d", x.Index))
|
||||
pp.writeIndent("%v %v", TypeName(x), strings.Join(extras, " "))
|
||||
case Null, Boolean, Number, String, Var:
|
||||
pp.writeValue(x)
|
||||
default:
|
||||
pp.writeType(x)
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (pp *prettyPrinter) After(x interface{}) {
|
||||
switch x.(type) {
|
||||
case *Term:
|
||||
default:
|
||||
pp.depth--
|
||||
}
|
||||
}
|
||||
|
||||
func (pp *prettyPrinter) writeValue(x interface{}) {
|
||||
pp.writeIndent(fmt.Sprint(x))
|
||||
}
|
||||
|
||||
func (pp *prettyPrinter) writeType(x interface{}) {
|
||||
pp.writeIndent(TypeName(x))
|
||||
}
|
||||
|
||||
func (pp *prettyPrinter) writeIndent(f string, a ...interface{}) {
|
||||
pad := strings.Repeat(" ", pp.depth)
|
||||
pp.write(pad+f, a...)
|
||||
}
|
||||
|
||||
func (pp *prettyPrinter) write(f string, a ...interface{}) {
|
||||
fmt.Fprintf(pp.w, f+"\n", a...)
|
||||
}
|
||||
311
vendor/github.com/open-policy-agent/opa/ast/rego.peg
generated
vendored
Normal file
311
vendor/github.com/open-policy-agent/opa/ast/rego.peg
generated
vendored
Normal file
@@ -0,0 +1,311 @@
|
||||
{
|
||||
package ast
|
||||
}
|
||||
|
||||
Program <- _ vals:(Stmt (ws Stmt)*)? _ EOF {
|
||||
return makeProgram(c, vals)
|
||||
}
|
||||
|
||||
Stmt <- val:(Package / Import / Rules / Body / Comment) {
|
||||
return val, nil
|
||||
}
|
||||
|
||||
Package <- "package" ws val:(Ref / Var) {
|
||||
return makePackage(currentLocation(c), val)
|
||||
}
|
||||
|
||||
Import <- "import" ws path:(Ref / Var) alias:(ws "as" ws Var)? {
|
||||
return makeImport(currentLocation(c), path, alias)
|
||||
}
|
||||
|
||||
Rules <- DefaultRules / NormalRules
|
||||
|
||||
DefaultRules <- "default" ws name:Var _ operator:( ":=" / "=" ) _ value:Term {
|
||||
return makeDefaultRule(currentLocation(c), name, operator, value)
|
||||
}
|
||||
|
||||
NormalRules <- head:(PartialRuleHead / RuleHead) _ rest:(NonEmptyBraceEnclosedBody ( _ RuleExt)* ) {
|
||||
return makeRule(currentLocation(c), head, rest)
|
||||
}
|
||||
|
||||
PartialRuleHead <- name:Var args:( _ "(" _ Args _ ")" _ ) value:( _ ( ":=" / "=" ) _ ExprTerm )? {
|
||||
return makeRuleHead(currentLocation(c), name, args, nil, value)
|
||||
}
|
||||
|
||||
RuleHead <- name:Var key:( _ "[" _ ExprTerm _ "]" _ )? value:( _ ( ":=" / "=" ) _ ExprTerm )? {
|
||||
return makeRuleHead(currentLocation(c), name, nil, key, value)
|
||||
}
|
||||
|
||||
Args <- list:ExprTermList {
|
||||
return makeArgs(list)
|
||||
}
|
||||
|
||||
Else <- "else" value:( _ "=" _ Term )? body:( _ NonEmptyBraceEnclosedBody ) {
|
||||
return makeRuleExt(currentLocation(c), value, body)
|
||||
}
|
||||
|
||||
RuleDup <- b:NonEmptyBraceEnclosedBody {
|
||||
return ruleExt{loc: currentLocation(c), body: b.(Body)}, nil
|
||||
}
|
||||
|
||||
RuleExt <- Else / RuleDup
|
||||
|
||||
Body <- NonWhitespaceBody / BraceEnclosedBody
|
||||
|
||||
NonEmptyBraceEnclosedBody <- "{" _ val:WhitespaceBody? _ "}" {
|
||||
if val == nil {
|
||||
return NewBody(), fmt.Errorf("found empty body")
|
||||
}
|
||||
return val, nil
|
||||
}
|
||||
|
||||
BraceEnclosedBody <- "{" _ val:WhitespaceBody? _ "}" {
|
||||
return makeBraceEnclosedBody(currentLocation(c), val)
|
||||
}
|
||||
|
||||
WhitespaceBody <- head:Literal tail:(WhitespaceLiteralSeparator _ Literal)* {
|
||||
return makeBody(head, tail, 2)
|
||||
}
|
||||
|
||||
NonWhitespaceBody <- head:Literal tail:( _ NonWhitespaceLiteralSeparator _ Literal)* {
|
||||
return makeBody(head, tail, 3)
|
||||
}
|
||||
|
||||
WhitespaceLiteralSeparator <- [ \t]* ((NonWhitespaceLiteralSeparator Comment?) / (Comment? [\r\n]))
|
||||
|
||||
NonWhitespaceLiteralSeparator <- ";"
|
||||
|
||||
Literal <- TermExpr / SomeDecl
|
||||
|
||||
SomeDecl <- "some" ws symbols:SomeDeclList {
|
||||
return makeSomeDeclLiteral(currentLocation(c), symbols)
|
||||
}
|
||||
|
||||
SomeDeclList <- head:Var rest:( _ ',' _ Var)* {
|
||||
return makeSomeDeclSymbols(head, rest)
|
||||
}
|
||||
|
||||
TermExpr <- negated:NotKeyword? value:LiteralExpr with:WithKeywordList? {
|
||||
return makeLiteral(negated, value, with)
|
||||
}
|
||||
|
||||
LiteralExpr <- lhs:ExprTerm rest:( _ LiteralExprOperator _ ExprTerm)? {
|
||||
return makeLiteralExpr(currentLocation(c), lhs, rest)
|
||||
}
|
||||
|
||||
LiteralExprOperator <- val:( ":=" / "=" ) {
|
||||
return makeInfixOperator(currentLocation(c), c.text)
|
||||
}
|
||||
|
||||
NotKeyword <- val:("not" ws)? {
|
||||
return val != nil, nil
|
||||
}
|
||||
|
||||
WithKeywordList <- ws head:WithKeyword rest:( ws WithKeyword )* {
|
||||
return makeWithKeywordList(head, rest)
|
||||
}
|
||||
|
||||
WithKeyword <- "with" ws target:ExprTerm ws "as" ws value:ExprTerm {
|
||||
return makeWithKeyword(currentLocation(c), target, value)
|
||||
}
|
||||
|
||||
ExprTerm <- lhs:RelationExpr rest:( _ RelationOperator _ RelationExpr )* {
|
||||
return makeExprTerm(currentLocation(c), lhs, rest)
|
||||
}
|
||||
|
||||
ExprTermPairList <- head:ExprTermPair? tail:( _ ',' _ ExprTermPair )* _ ","? {
|
||||
return makeExprTermPairList(head, tail)
|
||||
}
|
||||
|
||||
ExprTermList <- head:ExprTerm? tail:( _ ',' _ ExprTerm )* _ ","? {
|
||||
return makeExprTermList(head, tail)
|
||||
}
|
||||
|
||||
ExprTermPair <- key:ExprTerm _ ':' _ value:ExprTerm {
|
||||
return makeExprTermPair(key, value)
|
||||
}
|
||||
|
||||
RelationOperator <- val:("==" / "!=" / "<=" / ">=" / ">" / "<") {
|
||||
return makeInfixOperator(currentLocation(c), c.text)
|
||||
}
|
||||
|
||||
RelationExpr <- lhs:BitwiseOrExpr rest:( _ BitwiseOrOperator _ BitwiseOrExpr)* {
|
||||
return makeExprTerm(currentLocation(c), lhs, rest)
|
||||
}
|
||||
|
||||
BitwiseOrOperator <- val:"|" {
|
||||
return makeInfixOperator(currentLocation(c), c.text)
|
||||
}
|
||||
|
||||
BitwiseOrExpr <- lhs:BitwiseAndExpr rest:( _ BitwiseAndOperator _ BitwiseAndExpr)* {
|
||||
return makeExprTerm(currentLocation(c), lhs, rest)
|
||||
}
|
||||
|
||||
BitwiseAndOperator <- val:"&" {
|
||||
return makeInfixOperator(currentLocation(c), c.text)
|
||||
}
|
||||
|
||||
BitwiseAndExpr <- lhs:ArithExpr rest:( _ ArithOperator _ ArithExpr)* {
|
||||
return makeExprTerm(currentLocation(c), lhs, rest)
|
||||
}
|
||||
|
||||
ArithOperator <- val:("+" / "-") {
|
||||
return makeInfixOperator(currentLocation(c), c.text)
|
||||
}
|
||||
|
||||
ArithExpr <- lhs:FactorExpr rest:( _ FactorOperator _ FactorExpr )* {
|
||||
return makeExprTerm(currentLocation(c), lhs, rest)
|
||||
}
|
||||
|
||||
FactorOperator <- val:("*" / "/" / "%"){
|
||||
return makeInfixOperator(currentLocation(c), c.text)
|
||||
}
|
||||
|
||||
FactorExpr <- ( "(" _ expr:ExprTerm _ ")" ) {
|
||||
return expr, nil
|
||||
} / term:Term {
|
||||
return term, nil
|
||||
}
|
||||
|
||||
Call <- operator:(Ref / Var) "(" _ args:ExprTermList _ ")" {
|
||||
return makeCall(currentLocation(c), operator, args)
|
||||
}
|
||||
|
||||
Term <- val:( Comprehension / Composite / Scalar / Call / Var ) refs:RefOperand* {
|
||||
return makeRef(currentLocation(c), val, refs)
|
||||
}
|
||||
|
||||
TermPair <- key:Term _ ":" _ value:Term {
|
||||
return makeExprTermPair(key, value)
|
||||
}
|
||||
|
||||
Comprehension <- ArrayComprehension / ObjectComprehension / SetComprehension
|
||||
|
||||
ArrayComprehension <- "[" _ head:Term _ "|" _ body:WhitespaceBody _ "]" {
|
||||
return makeArrayComprehension(currentLocation(c), head, body)
|
||||
}
|
||||
|
||||
ObjectComprehension <- "{" _ head:TermPair _ "|" _ body:WhitespaceBody _ "}" {
|
||||
return makeObjectComprehension(currentLocation(c), head, body)
|
||||
}
|
||||
|
||||
SetComprehension <- "{" _ head:Term _ "|" _ body:WhitespaceBody _ "}" {
|
||||
return makeSetComprehension(currentLocation(c), head, body)
|
||||
}
|
||||
|
||||
Composite <- Object / Array / Set
|
||||
|
||||
Scalar <- Number / String / Bool / Null
|
||||
|
||||
Object <- '{' _ list:ExprTermPairList _ '}' {
|
||||
return makeObject(currentLocation(c), list)
|
||||
}
|
||||
|
||||
Array <- '[' _ list:ExprTermList _ ']' {
|
||||
return makeArray(currentLocation(c), list)
|
||||
}
|
||||
|
||||
Set <- SetEmpty / SetNonEmpty
|
||||
|
||||
SetEmpty <- "set(" _ ")" {
|
||||
var empty []*Term
|
||||
return makeSet(currentLocation(c), empty)
|
||||
}
|
||||
|
||||
SetNonEmpty <- '{' _ list:ExprTermList _ '}' {
|
||||
return makeSet(currentLocation(c), list)
|
||||
}
|
||||
|
||||
Ref <- head:(Composite / Var) rest:RefOperand+ {
|
||||
return makeRef(currentLocation(c), head, rest)
|
||||
}
|
||||
|
||||
RefOperand <- RefOperandDot / RefOperandCanonical
|
||||
|
||||
RefOperandDot <- "." val:Var {
|
||||
return makeRefOperandDot(currentLocation(c), val)
|
||||
}
|
||||
|
||||
RefOperandCanonical <- "[" val:ExprTerm "]" {
|
||||
return val, nil
|
||||
}
|
||||
|
||||
Var <- val:VarChecked {
|
||||
return val.([]interface{})[0], nil
|
||||
}
|
||||
|
||||
VarChecked <- val:VarUnchecked !{
|
||||
return IsKeyword(string(val.(*Term).Value.(Var))), nil
|
||||
}
|
||||
|
||||
VarUnchecked <- VarStart VarChar* {
|
||||
return makeVar(currentLocation(c), c.text)
|
||||
}
|
||||
|
||||
Number <- '-'? ( Float / Integer ) {
|
||||
return makeNumber(currentLocation(c), c.text)
|
||||
}
|
||||
|
||||
Float <- ExponentFloat / PointFloat
|
||||
|
||||
ExponentFloat <- ( PointFloat / Integer ) Exponent
|
||||
|
||||
PointFloat <- Integer? Fraction
|
||||
|
||||
Fraction <- '.' DecimalDigit+
|
||||
|
||||
Exponent <- 'e'i [+-]? DecimalDigit+
|
||||
|
||||
Integer <- '0' / ( NonZeroDecimalDigit DecimalDigit* )
|
||||
|
||||
String <- QuotedString / RawString
|
||||
|
||||
QuotedString <- '"' Char* '"' {
|
||||
return makeString(currentLocation(c), c.text)
|
||||
} / '"' Char* !'"' {
|
||||
return makeNonterminatedString(currentLocation(c), string(c.text))
|
||||
}
|
||||
|
||||
RawString <- '`' [^`]* '`' {
|
||||
return makeRawString(currentLocation(c), c.text)
|
||||
}
|
||||
|
||||
Bool <- val:("true" / "false") !VarChar {
|
||||
return makeBool(currentLocation(c), c.text)
|
||||
}
|
||||
|
||||
Null <- "null" !VarChar {
|
||||
return makeNull(currentLocation(c))
|
||||
}
|
||||
|
||||
VarStart <- AsciiLetter
|
||||
|
||||
VarChar <- AsciiLetter / DecimalDigit
|
||||
|
||||
AsciiLetter <- [A-Za-z_]
|
||||
|
||||
Char <- ( !EscapedChar . ) / ( '\\' EscapeSequence )
|
||||
|
||||
EscapedChar <- [\x00-\x1f"\\]
|
||||
|
||||
EscapeSequence <- SingleCharEscape / UnicodeEscape
|
||||
|
||||
SingleCharEscape <- [ " \\ / b f n r t ]
|
||||
|
||||
UnicodeEscape <- 'u' HexDigit HexDigit HexDigit HexDigit
|
||||
|
||||
DecimalDigit <- [0-9]
|
||||
|
||||
NonZeroDecimalDigit <- [1-9]
|
||||
|
||||
HexDigit <- [0-9a-fA-F]
|
||||
|
||||
ws "whitespace" <- [ \t\r\n]+
|
||||
|
||||
_ "whitespace" <- ( [ \t\r\n] / Comment )*
|
||||
|
||||
Comment <- [ \t]* "#" text:[^\r\n]* {
|
||||
return makeComments(c, text)
|
||||
}
|
||||
|
||||
EOF <- !.
|
||||
15
vendor/github.com/open-policy-agent/opa/ast/strings.go
generated
vendored
Normal file
15
vendor/github.com/open-policy-agent/opa/ast/strings.go
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
// Copyright 2016 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package ast
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// TypeName returns a human readable name for the AST element type.
|
||||
func TypeName(x interface{}) string {
|
||||
return strings.ToLower(reflect.Indirect(reflect.ValueOf(x)).Type().Name())
|
||||
}
|
||||
2572
vendor/github.com/open-policy-agent/opa/ast/term.go
generated
vendored
Normal file
2572
vendor/github.com/open-policy-agent/opa/ast/term.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
382
vendor/github.com/open-policy-agent/opa/ast/transform.go
generated
vendored
Normal file
382
vendor/github.com/open-policy-agent/opa/ast/transform.go
generated
vendored
Normal file
@@ -0,0 +1,382 @@
|
||||
// Copyright 2016 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package ast
|
||||
|
||||
import "fmt"
|
||||
|
||||
// Transformer defines the interface for transforming AST elements. If the
|
||||
// transformer returns nil and does not indicate an error, the AST element will
|
||||
// be set to nil and no transformations will be applied to children of the
|
||||
// element.
|
||||
type Transformer interface {
|
||||
Transform(v interface{}) (interface{}, error)
|
||||
}
|
||||
|
||||
// Transform iterates the AST and calls the Transform function on the
|
||||
// Transformer t for x before recursing.
|
||||
func Transform(t Transformer, x interface{}) (interface{}, error) {
|
||||
|
||||
if term, ok := x.(*Term); ok {
|
||||
return Transform(t, term.Value)
|
||||
}
|
||||
|
||||
y, err := t.Transform(x)
|
||||
if err != nil {
|
||||
return x, err
|
||||
}
|
||||
|
||||
if y == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
var ok bool
|
||||
switch y := y.(type) {
|
||||
case *Module:
|
||||
p, err := Transform(t, y.Package)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if y.Package, ok = p.(*Package); !ok {
|
||||
return nil, fmt.Errorf("illegal transform: %T != %T", y.Package, p)
|
||||
}
|
||||
for i := range y.Imports {
|
||||
imp, err := Transform(t, y.Imports[i])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if y.Imports[i], ok = imp.(*Import); !ok {
|
||||
return nil, fmt.Errorf("illegal transform: %T != %T", y.Imports[i], imp)
|
||||
}
|
||||
}
|
||||
for i := range y.Rules {
|
||||
rule, err := Transform(t, y.Rules[i])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if y.Rules[i], ok = rule.(*Rule); !ok {
|
||||
return nil, fmt.Errorf("illegal transform: %T != %T", y.Rules[i], rule)
|
||||
}
|
||||
}
|
||||
for i := range y.Comments {
|
||||
comment, err := Transform(t, y.Comments[i])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if y.Comments[i], ok = comment.(*Comment); !ok {
|
||||
return nil, fmt.Errorf("illegal transform: %T != %T", y.Comments[i], comment)
|
||||
}
|
||||
}
|
||||
return y, nil
|
||||
case *Package:
|
||||
ref, err := Transform(t, y.Path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if y.Path, ok = ref.(Ref); !ok {
|
||||
return nil, fmt.Errorf("illegal transform: %T != %T", y.Path, ref)
|
||||
}
|
||||
return y, nil
|
||||
case *Import:
|
||||
y.Path, err = transformTerm(t, y.Path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if y.Alias, err = transformVar(t, y.Alias); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return y, nil
|
||||
case *Rule:
|
||||
if y.Head, err = transformHead(t, y.Head); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if y.Body, err = transformBody(t, y.Body); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if y.Else != nil {
|
||||
rule, err := Transform(t, y.Else)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if y.Else, ok = rule.(*Rule); !ok {
|
||||
return nil, fmt.Errorf("illegal transform: %T != %T", y.Else, rule)
|
||||
}
|
||||
}
|
||||
return y, nil
|
||||
case *Head:
|
||||
if y.Name, err = transformVar(t, y.Name); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if y.Args, err = transformArgs(t, y.Args); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if y.Key != nil {
|
||||
if y.Key, err = transformTerm(t, y.Key); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
if y.Value != nil {
|
||||
if y.Value, err = transformTerm(t, y.Value); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return y, nil
|
||||
case Args:
|
||||
for i := range y {
|
||||
if y[i], err = transformTerm(t, y[i]); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return y, nil
|
||||
case Body:
|
||||
for i, e := range y {
|
||||
e, err := Transform(t, e)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if y[i], ok = e.(*Expr); !ok {
|
||||
return nil, fmt.Errorf("illegal transform: %T != %T", y[i], e)
|
||||
}
|
||||
}
|
||||
return y, nil
|
||||
case *Expr:
|
||||
switch ts := y.Terms.(type) {
|
||||
case *SomeDecl:
|
||||
decl, err := Transform(t, ts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if y.Terms, ok = decl.(*SomeDecl); !ok {
|
||||
return nil, fmt.Errorf("illegal transform: %T != %T", y, decl)
|
||||
}
|
||||
return y, nil
|
||||
case []*Term:
|
||||
for i := range ts {
|
||||
if ts[i], err = transformTerm(t, ts[i]); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
case *Term:
|
||||
if y.Terms, err = transformTerm(t, ts); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
for i, w := range y.With {
|
||||
w, err := Transform(t, w)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if y.With[i], ok = w.(*With); !ok {
|
||||
return nil, fmt.Errorf("illegal transform: %T != %T", y.With[i], w)
|
||||
}
|
||||
}
|
||||
return y, nil
|
||||
case *With:
|
||||
if y.Target, err = transformTerm(t, y.Target); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if y.Value, err = transformTerm(t, y.Value); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return y, nil
|
||||
case Ref:
|
||||
for i, term := range y {
|
||||
if y[i], err = transformTerm(t, term); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return y, nil
|
||||
case Object:
|
||||
return y.Map(func(k, v *Term) (*Term, *Term, error) {
|
||||
k, err := transformTerm(t, k)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
v, err = transformTerm(t, v)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
return k, v, nil
|
||||
})
|
||||
case Array:
|
||||
for i := range y {
|
||||
if y[i], err = transformTerm(t, y[i]); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return y, nil
|
||||
case Set:
|
||||
y, err = y.Map(func(term *Term) (*Term, error) {
|
||||
return transformTerm(t, term)
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return y, nil
|
||||
case *ArrayComprehension:
|
||||
if y.Term, err = transformTerm(t, y.Term); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if y.Body, err = transformBody(t, y.Body); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return y, nil
|
||||
case *ObjectComprehension:
|
||||
if y.Key, err = transformTerm(t, y.Key); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if y.Value, err = transformTerm(t, y.Value); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if y.Body, err = transformBody(t, y.Body); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return y, nil
|
||||
case *SetComprehension:
|
||||
if y.Term, err = transformTerm(t, y.Term); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if y.Body, err = transformBody(t, y.Body); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return y, nil
|
||||
case Call:
|
||||
for i := range y {
|
||||
if y[i], err = transformTerm(t, y[i]); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return y, nil
|
||||
default:
|
||||
return y, nil
|
||||
}
|
||||
}
|
||||
|
||||
// TransformRefs calls the function f on all references under x.
|
||||
func TransformRefs(x interface{}, f func(Ref) (Value, error)) (interface{}, error) {
|
||||
t := &GenericTransformer{func(x interface{}) (interface{}, error) {
|
||||
if r, ok := x.(Ref); ok {
|
||||
return f(r)
|
||||
}
|
||||
return x, nil
|
||||
}}
|
||||
return Transform(t, x)
|
||||
}
|
||||
|
||||
// TransformVars calls the function f on all vars under x.
|
||||
func TransformVars(x interface{}, f func(Var) (Value, error)) (interface{}, error) {
|
||||
t := &GenericTransformer{func(x interface{}) (interface{}, error) {
|
||||
if v, ok := x.(Var); ok {
|
||||
return f(v)
|
||||
}
|
||||
return x, nil
|
||||
}}
|
||||
return Transform(t, x)
|
||||
}
|
||||
|
||||
// TransformComprehensions calls the functio nf on all comprehensions under x.
|
||||
func TransformComprehensions(x interface{}, f func(interface{}) (Value, error)) (interface{}, error) {
|
||||
t := &GenericTransformer{func(x interface{}) (interface{}, error) {
|
||||
switch x := x.(type) {
|
||||
case *ArrayComprehension:
|
||||
return f(x)
|
||||
case *SetComprehension:
|
||||
return f(x)
|
||||
case *ObjectComprehension:
|
||||
return f(x)
|
||||
}
|
||||
return x, nil
|
||||
}}
|
||||
return Transform(t, x)
|
||||
}
|
||||
|
||||
// GenericTransformer implements the Transformer interface to provide a utility
|
||||
// to transform AST nodes using a closure.
|
||||
type GenericTransformer struct {
|
||||
f func(x interface{}) (interface{}, error)
|
||||
}
|
||||
|
||||
// NewGenericTransformer returns a new GenericTransformer that will transform
|
||||
// AST nodes using the function f.
|
||||
func NewGenericTransformer(f func(x interface{}) (interface{}, error)) *GenericTransformer {
|
||||
return &GenericTransformer{
|
||||
f: f,
|
||||
}
|
||||
}
|
||||
|
||||
// Transform calls the function f on the GenericTransformer.
|
||||
func (t *GenericTransformer) Transform(x interface{}) (interface{}, error) {
|
||||
return t.f(x)
|
||||
}
|
||||
|
||||
func transformHead(t Transformer, head *Head) (*Head, error) {
|
||||
y, err := Transform(t, head)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
h, ok := y.(*Head)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("illegal transform: %T != %T", head, y)
|
||||
}
|
||||
return h, nil
|
||||
}
|
||||
func transformArgs(t Transformer, args Args) (Args, error) {
|
||||
y, err := Transform(t, args)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
a, ok := y.(Args)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("illegal transform: %T != %T", args, y)
|
||||
}
|
||||
return a, nil
|
||||
}
|
||||
|
||||
func transformBody(t Transformer, body Body) (Body, error) {
|
||||
y, err := Transform(t, body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
r, ok := y.(Body)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("illegal transform: %T != %T", body, y)
|
||||
}
|
||||
return r, nil
|
||||
}
|
||||
|
||||
func transformTerm(t Transformer, term *Term) (*Term, error) {
|
||||
v, err := transformValue(t, term.Value)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
r := &Term{
|
||||
Value: v,
|
||||
Location: term.Location,
|
||||
}
|
||||
return r, nil
|
||||
}
|
||||
|
||||
func transformValue(t Transformer, v Value) (Value, error) {
|
||||
v1, err := Transform(t, v)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
r, ok := v1.(Value)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("illegal transform: %T != %T", v, v1)
|
||||
}
|
||||
return r, nil
|
||||
}
|
||||
|
||||
func transformVar(t Transformer, v Var) (Var, error) {
|
||||
v1, err := Transform(t, v)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
r, ok := v1.(Var)
|
||||
if !ok {
|
||||
return "", fmt.Errorf("illegal transform: %T != %T", v, v1)
|
||||
}
|
||||
return r, nil
|
||||
}
|
||||
184
vendor/github.com/open-policy-agent/opa/ast/unify.go
generated
vendored
Normal file
184
vendor/github.com/open-policy-agent/opa/ast/unify.go
generated
vendored
Normal file
@@ -0,0 +1,184 @@
|
||||
// Copyright 2016 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package ast
|
||||
|
||||
// Unify returns a set of variables that will be unified when the equality expression defined by
|
||||
// terms a and b is evaluated. The unifier assumes that variables in the VarSet safe are already
|
||||
// unified.
|
||||
func Unify(safe VarSet, a *Term, b *Term) VarSet {
|
||||
u := &unifier{
|
||||
safe: safe,
|
||||
unified: VarSet{},
|
||||
unknown: map[Var]VarSet{},
|
||||
}
|
||||
u.unify(a, b)
|
||||
return u.unified
|
||||
}
|
||||
|
||||
type unifier struct {
|
||||
safe VarSet
|
||||
unified VarSet
|
||||
unknown map[Var]VarSet
|
||||
}
|
||||
|
||||
func (u *unifier) isSafe(x Var) bool {
|
||||
return u.safe.Contains(x) || u.unified.Contains(x)
|
||||
}
|
||||
|
||||
func (u *unifier) unify(a *Term, b *Term) {
|
||||
|
||||
switch a := a.Value.(type) {
|
||||
|
||||
case Var:
|
||||
switch b := b.Value.(type) {
|
||||
case Var:
|
||||
if u.isSafe(b) {
|
||||
u.markSafe(a)
|
||||
} else if u.isSafe(a) {
|
||||
u.markSafe(b)
|
||||
} else {
|
||||
u.markUnknown(a, b)
|
||||
u.markUnknown(b, a)
|
||||
}
|
||||
case Array, Object:
|
||||
u.unifyAll(a, b)
|
||||
case Ref:
|
||||
if u.isSafe(b[0].Value.(Var)) {
|
||||
u.markSafe(a)
|
||||
}
|
||||
default:
|
||||
u.markSafe(a)
|
||||
}
|
||||
|
||||
case Ref:
|
||||
if u.isSafe(a[0].Value.(Var)) {
|
||||
switch b := b.Value.(type) {
|
||||
case Var:
|
||||
u.markSafe(b)
|
||||
case Array, Object:
|
||||
u.markAllSafe(b)
|
||||
}
|
||||
}
|
||||
|
||||
case *ArrayComprehension:
|
||||
switch b := b.Value.(type) {
|
||||
case Var:
|
||||
u.markSafe(b)
|
||||
case Array:
|
||||
u.markAllSafe(b)
|
||||
}
|
||||
case *ObjectComprehension:
|
||||
switch b := b.Value.(type) {
|
||||
case Var:
|
||||
u.markSafe(b)
|
||||
case Object:
|
||||
u.markAllSafe(b)
|
||||
}
|
||||
case *SetComprehension:
|
||||
switch b := b.Value.(type) {
|
||||
case Var:
|
||||
u.markSafe(b)
|
||||
}
|
||||
|
||||
case Array:
|
||||
switch b := b.Value.(type) {
|
||||
case Var:
|
||||
u.unifyAll(b, a)
|
||||
case Ref, *ArrayComprehension, *ObjectComprehension, *SetComprehension:
|
||||
u.markAllSafe(a)
|
||||
case Array:
|
||||
if len(a) == len(b) {
|
||||
for i := range a {
|
||||
u.unify(a[i], b[i])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
case Object:
|
||||
switch b := b.Value.(type) {
|
||||
case Var:
|
||||
u.unifyAll(b, a)
|
||||
case Ref:
|
||||
u.markAllSafe(a)
|
||||
case Object:
|
||||
if a.Len() == b.Len() {
|
||||
a.Iter(func(k, v *Term) error {
|
||||
if v2 := b.Get(k); v2 != nil {
|
||||
u.unify(v, v2)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
default:
|
||||
switch b := b.Value.(type) {
|
||||
case Var:
|
||||
u.markSafe(b)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (u *unifier) markAllSafe(x Value) {
|
||||
vis := u.varVisitor()
|
||||
vis.Walk(x)
|
||||
for v := range vis.Vars() {
|
||||
u.markSafe(v)
|
||||
}
|
||||
}
|
||||
|
||||
func (u *unifier) markSafe(x Var) {
|
||||
u.unified.Add(x)
|
||||
|
||||
// Add dependencies of 'x' to safe set
|
||||
vs := u.unknown[x]
|
||||
delete(u.unknown, x)
|
||||
for v := range vs {
|
||||
u.markSafe(v)
|
||||
}
|
||||
|
||||
// Add dependants of 'x' to safe set if they have no more
|
||||
// dependencies.
|
||||
for v, deps := range u.unknown {
|
||||
if deps.Contains(x) {
|
||||
delete(deps, x)
|
||||
if len(deps) == 0 {
|
||||
u.markSafe(v)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (u *unifier) markUnknown(a, b Var) {
|
||||
if _, ok := u.unknown[a]; !ok {
|
||||
u.unknown[a] = NewVarSet()
|
||||
}
|
||||
u.unknown[a].Add(b)
|
||||
}
|
||||
|
||||
func (u *unifier) unifyAll(a Var, b Value) {
|
||||
if u.isSafe(a) {
|
||||
u.markAllSafe(b)
|
||||
} else {
|
||||
vis := u.varVisitor()
|
||||
vis.Walk(b)
|
||||
unsafe := vis.Vars().Diff(u.safe).Diff(u.unified)
|
||||
if len(unsafe) == 0 {
|
||||
u.markSafe(a)
|
||||
} else {
|
||||
for v := range unsafe {
|
||||
u.markUnknown(a, v)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (u *unifier) varVisitor() *VarVisitor {
|
||||
return NewVarVisitor().WithParams(VarVisitorParams{
|
||||
SkipRefHead: true,
|
||||
SkipObjectKeys: true,
|
||||
SkipClosures: true,
|
||||
})
|
||||
}
|
||||
100
vendor/github.com/open-policy-agent/opa/ast/varset.go
generated
vendored
Normal file
100
vendor/github.com/open-policy-agent/opa/ast/varset.go
generated
vendored
Normal file
@@ -0,0 +1,100 @@
|
||||
// Copyright 2016 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package ast
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
)
|
||||
|
||||
// VarSet represents a set of variables.
|
||||
type VarSet map[Var]struct{}
|
||||
|
||||
// NewVarSet returns a new VarSet containing the specified variables.
|
||||
func NewVarSet(vs ...Var) VarSet {
|
||||
s := VarSet{}
|
||||
for _, v := range vs {
|
||||
s.Add(v)
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// Add updates the set to include the variable "v".
|
||||
func (s VarSet) Add(v Var) {
|
||||
s[v] = struct{}{}
|
||||
}
|
||||
|
||||
// Contains returns true if the set contains the variable "v".
|
||||
func (s VarSet) Contains(v Var) bool {
|
||||
_, ok := s[v]
|
||||
return ok
|
||||
}
|
||||
|
||||
// Copy returns a shallow copy of the VarSet.
|
||||
func (s VarSet) Copy() VarSet {
|
||||
cpy := VarSet{}
|
||||
for v := range s {
|
||||
cpy.Add(v)
|
||||
}
|
||||
return cpy
|
||||
}
|
||||
|
||||
// Diff returns a VarSet containing variables in s that are not in vs.
|
||||
func (s VarSet) Diff(vs VarSet) VarSet {
|
||||
r := VarSet{}
|
||||
for v := range s {
|
||||
if !vs.Contains(v) {
|
||||
r.Add(v)
|
||||
}
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
// Equal returns true if s contains exactly the same elements as vs.
|
||||
func (s VarSet) Equal(vs VarSet) bool {
|
||||
if len(s.Diff(vs)) > 0 {
|
||||
return false
|
||||
}
|
||||
return len(vs.Diff(s)) == 0
|
||||
}
|
||||
|
||||
// Intersect returns a VarSet containing variables in s that are in vs.
|
||||
func (s VarSet) Intersect(vs VarSet) VarSet {
|
||||
r := VarSet{}
|
||||
for v := range s {
|
||||
if vs.Contains(v) {
|
||||
r.Add(v)
|
||||
}
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
// Sorted returns a sorted slice of vars from s.
|
||||
func (s VarSet) Sorted() []Var {
|
||||
sorted := make([]Var, 0, len(s))
|
||||
for v := range s {
|
||||
sorted = append(sorted, v)
|
||||
}
|
||||
sort.Slice(sorted, func(i, j int) bool {
|
||||
return sorted[i].Compare(sorted[j]) < 0
|
||||
})
|
||||
return sorted
|
||||
}
|
||||
|
||||
// Update merges the other VarSet into this VarSet.
|
||||
func (s VarSet) Update(vs VarSet) {
|
||||
for v := range vs {
|
||||
s.Add(v)
|
||||
}
|
||||
}
|
||||
|
||||
func (s VarSet) String() string {
|
||||
tmp := []string{}
|
||||
for v := range s {
|
||||
tmp = append(tmp, string(v))
|
||||
}
|
||||
sort.Strings(tmp)
|
||||
return fmt.Sprintf("%v", tmp)
|
||||
}
|
||||
686
vendor/github.com/open-policy-agent/opa/ast/visit.go
generated
vendored
Normal file
686
vendor/github.com/open-policy-agent/opa/ast/visit.go
generated
vendored
Normal file
@@ -0,0 +1,686 @@
|
||||
// Copyright 2016 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package ast
|
||||
|
||||
// Visitor defines the interface for iterating AST elements. The Visit function
|
||||
// can return a Visitor w which will be used to visit the children of the AST
|
||||
// element v. If the Visit function returns nil, the children will not be
|
||||
// visited. This is deprecated.
|
||||
type Visitor interface {
|
||||
Visit(v interface{}) (w Visitor)
|
||||
}
|
||||
|
||||
// BeforeAndAfterVisitor wraps Visitor to provide hooks for being called before
|
||||
// and after the AST has been visited. This is deprecated.
|
||||
type BeforeAndAfterVisitor interface {
|
||||
Visitor
|
||||
Before(x interface{})
|
||||
After(x interface{})
|
||||
}
|
||||
|
||||
// Walk iterates the AST by calling the Visit function on the Visitor
|
||||
// v for x before recursing. This is deprecated.
|
||||
func Walk(v Visitor, x interface{}) {
|
||||
if bav, ok := v.(BeforeAndAfterVisitor); !ok {
|
||||
walk(v, x)
|
||||
} else {
|
||||
bav.Before(x)
|
||||
defer bav.After(x)
|
||||
walk(bav, x)
|
||||
}
|
||||
}
|
||||
|
||||
// WalkBeforeAndAfter iterates the AST by calling the Visit function on the
|
||||
// Visitor v for x before recursing. This is deprecated.
|
||||
func WalkBeforeAndAfter(v BeforeAndAfterVisitor, x interface{}) {
|
||||
Walk(v, x)
|
||||
}
|
||||
|
||||
func walk(v Visitor, x interface{}) {
|
||||
w := v.Visit(x)
|
||||
if w == nil {
|
||||
return
|
||||
}
|
||||
switch x := x.(type) {
|
||||
case *Module:
|
||||
Walk(w, x.Package)
|
||||
for _, i := range x.Imports {
|
||||
Walk(w, i)
|
||||
}
|
||||
for _, r := range x.Rules {
|
||||
Walk(w, r)
|
||||
}
|
||||
for _, c := range x.Comments {
|
||||
Walk(w, c)
|
||||
}
|
||||
case *Package:
|
||||
Walk(w, x.Path)
|
||||
case *Import:
|
||||
Walk(w, x.Path)
|
||||
Walk(w, x.Alias)
|
||||
case *Rule:
|
||||
Walk(w, x.Head)
|
||||
Walk(w, x.Body)
|
||||
if x.Else != nil {
|
||||
Walk(w, x.Else)
|
||||
}
|
||||
case *Head:
|
||||
Walk(w, x.Name)
|
||||
Walk(w, x.Args)
|
||||
if x.Key != nil {
|
||||
Walk(w, x.Key)
|
||||
}
|
||||
if x.Value != nil {
|
||||
Walk(w, x.Value)
|
||||
}
|
||||
case Body:
|
||||
for _, e := range x {
|
||||
Walk(w, e)
|
||||
}
|
||||
case Args:
|
||||
for _, t := range x {
|
||||
Walk(w, t)
|
||||
}
|
||||
case *Expr:
|
||||
switch ts := x.Terms.(type) {
|
||||
case *SomeDecl:
|
||||
Walk(w, ts)
|
||||
case []*Term:
|
||||
for _, t := range ts {
|
||||
Walk(w, t)
|
||||
}
|
||||
case *Term:
|
||||
Walk(w, ts)
|
||||
}
|
||||
for i := range x.With {
|
||||
Walk(w, x.With[i])
|
||||
}
|
||||
case *With:
|
||||
Walk(w, x.Target)
|
||||
Walk(w, x.Value)
|
||||
case *Term:
|
||||
Walk(w, x.Value)
|
||||
case Ref:
|
||||
for _, t := range x {
|
||||
Walk(w, t)
|
||||
}
|
||||
case Object:
|
||||
x.Foreach(func(k, vv *Term) {
|
||||
Walk(w, k)
|
||||
Walk(w, vv)
|
||||
})
|
||||
case Array:
|
||||
for _, t := range x {
|
||||
Walk(w, t)
|
||||
}
|
||||
case Set:
|
||||
x.Foreach(func(t *Term) {
|
||||
Walk(w, t)
|
||||
})
|
||||
case *ArrayComprehension:
|
||||
Walk(w, x.Term)
|
||||
Walk(w, x.Body)
|
||||
case *ObjectComprehension:
|
||||
Walk(w, x.Key)
|
||||
Walk(w, x.Value)
|
||||
Walk(w, x.Body)
|
||||
case *SetComprehension:
|
||||
Walk(w, x.Term)
|
||||
Walk(w, x.Body)
|
||||
case Call:
|
||||
for _, t := range x {
|
||||
Walk(w, t)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// WalkVars calls the function f on all vars under x. If the function f
|
||||
// returns true, AST nodes under the last node will not be visited.
|
||||
func WalkVars(x interface{}, f func(Var) bool) {
|
||||
vis := &GenericVisitor{func(x interface{}) bool {
|
||||
if v, ok := x.(Var); ok {
|
||||
return f(v)
|
||||
}
|
||||
return false
|
||||
}}
|
||||
vis.Walk(x)
|
||||
}
|
||||
|
||||
// WalkClosures calls the function f on all closures under x. If the function f
|
||||
// returns true, AST nodes under the last node will not be visited.
|
||||
func WalkClosures(x interface{}, f func(interface{}) bool) {
|
||||
vis := &GenericVisitor{func(x interface{}) bool {
|
||||
switch x.(type) {
|
||||
case *ArrayComprehension, *ObjectComprehension, *SetComprehension:
|
||||
return f(x)
|
||||
}
|
||||
return false
|
||||
}}
|
||||
vis.Walk(x)
|
||||
}
|
||||
|
||||
// WalkRefs calls the function f on all references under x. If the function f
|
||||
// returns true, AST nodes under the last node will not be visited.
|
||||
func WalkRefs(x interface{}, f func(Ref) bool) {
|
||||
vis := &GenericVisitor{func(x interface{}) bool {
|
||||
if r, ok := x.(Ref); ok {
|
||||
return f(r)
|
||||
}
|
||||
return false
|
||||
}}
|
||||
vis.Walk(x)
|
||||
}
|
||||
|
||||
// WalkTerms calls the function f on all terms under x. If the function f
|
||||
// returns true, AST nodes under the last node will not be visited.
|
||||
func WalkTerms(x interface{}, f func(*Term) bool) {
|
||||
vis := &GenericVisitor{func(x interface{}) bool {
|
||||
if term, ok := x.(*Term); ok {
|
||||
return f(term)
|
||||
}
|
||||
return false
|
||||
}}
|
||||
vis.Walk(x)
|
||||
}
|
||||
|
||||
// WalkWiths calls the function f on all with modifiers under x. If the function f
|
||||
// returns true, AST nodes under the last node will not be visited.
|
||||
func WalkWiths(x interface{}, f func(*With) bool) {
|
||||
vis := &GenericVisitor{func(x interface{}) bool {
|
||||
if w, ok := x.(*With); ok {
|
||||
return f(w)
|
||||
}
|
||||
return false
|
||||
}}
|
||||
vis.Walk(x)
|
||||
}
|
||||
|
||||
// WalkExprs calls the function f on all expressions under x. If the function f
|
||||
// returns true, AST nodes under the last node will not be visited.
|
||||
func WalkExprs(x interface{}, f func(*Expr) bool) {
|
||||
vis := &GenericVisitor{func(x interface{}) bool {
|
||||
if r, ok := x.(*Expr); ok {
|
||||
return f(r)
|
||||
}
|
||||
return false
|
||||
}}
|
||||
vis.Walk(x)
|
||||
}
|
||||
|
||||
// WalkBodies calls the function f on all bodies under x. If the function f
|
||||
// returns true, AST nodes under the last node will not be visited.
|
||||
func WalkBodies(x interface{}, f func(Body) bool) {
|
||||
vis := &GenericVisitor{func(x interface{}) bool {
|
||||
if b, ok := x.(Body); ok {
|
||||
return f(b)
|
||||
}
|
||||
return false
|
||||
}}
|
||||
vis.Walk(x)
|
||||
}
|
||||
|
||||
// WalkRules calls the function f on all rules under x. If the function f
|
||||
// returns true, AST nodes under the last node will not be visited.
|
||||
func WalkRules(x interface{}, f func(*Rule) bool) {
|
||||
vis := &GenericVisitor{func(x interface{}) bool {
|
||||
if r, ok := x.(*Rule); ok {
|
||||
stop := f(r)
|
||||
// NOTE(tsandall): since rules cannot be embedded inside of queries
|
||||
// we can stop early if there is no else block.
|
||||
if stop || r.Else == nil {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}}
|
||||
vis.Walk(x)
|
||||
}
|
||||
|
||||
// WalkNodes calls the function f on all nodes under x. If the function f
|
||||
// returns true, AST nodes under the last node will not be visited.
|
||||
func WalkNodes(x interface{}, f func(Node) bool) {
|
||||
vis := &GenericVisitor{func(x interface{}) bool {
|
||||
if n, ok := x.(Node); ok {
|
||||
return f(n)
|
||||
}
|
||||
return false
|
||||
}}
|
||||
vis.Walk(x)
|
||||
}
|
||||
|
||||
// GenericVisitor provides a utility to walk over AST nodes using a
|
||||
// closure. If the closure returns true, the visitor will not walk
|
||||
// over AST nodes under x.
|
||||
type GenericVisitor struct {
|
||||
f func(x interface{}) bool
|
||||
}
|
||||
|
||||
// NewGenericVisitor returns a new GenericVisitor that will invoke the function
|
||||
// f on AST nodes.
|
||||
func NewGenericVisitor(f func(x interface{}) bool) *GenericVisitor {
|
||||
return &GenericVisitor{f}
|
||||
}
|
||||
|
||||
// Walk iterates the AST by calling the function f on the
|
||||
// GenericVisitor before recursing. Contrary to the generic Walk, this
|
||||
// does not require allocating the visitor from heap.
|
||||
func (vis *GenericVisitor) Walk(x interface{}) {
|
||||
if vis.f(x) {
|
||||
return
|
||||
}
|
||||
|
||||
switch x := x.(type) {
|
||||
case *Module:
|
||||
vis.Walk(x.Package)
|
||||
for _, i := range x.Imports {
|
||||
vis.Walk(i)
|
||||
}
|
||||
for _, r := range x.Rules {
|
||||
vis.Walk(r)
|
||||
}
|
||||
for _, c := range x.Comments {
|
||||
vis.Walk(c)
|
||||
}
|
||||
case *Package:
|
||||
vis.Walk(x.Path)
|
||||
case *Import:
|
||||
vis.Walk(x.Path)
|
||||
vis.Walk(x.Alias)
|
||||
case *Rule:
|
||||
vis.Walk(x.Head)
|
||||
vis.Walk(x.Body)
|
||||
if x.Else != nil {
|
||||
vis.Walk(x.Else)
|
||||
}
|
||||
case *Head:
|
||||
vis.Walk(x.Name)
|
||||
vis.Walk(x.Args)
|
||||
if x.Key != nil {
|
||||
vis.Walk(x.Key)
|
||||
}
|
||||
if x.Value != nil {
|
||||
vis.Walk(x.Value)
|
||||
}
|
||||
case Body:
|
||||
for _, e := range x {
|
||||
vis.Walk(e)
|
||||
}
|
||||
case Args:
|
||||
for _, t := range x {
|
||||
vis.Walk(t)
|
||||
}
|
||||
case *Expr:
|
||||
switch ts := x.Terms.(type) {
|
||||
case *SomeDecl:
|
||||
vis.Walk(ts)
|
||||
case []*Term:
|
||||
for _, t := range ts {
|
||||
vis.Walk(t)
|
||||
}
|
||||
case *Term:
|
||||
vis.Walk(ts)
|
||||
}
|
||||
for i := range x.With {
|
||||
vis.Walk(x.With[i])
|
||||
}
|
||||
case *With:
|
||||
vis.Walk(x.Target)
|
||||
vis.Walk(x.Value)
|
||||
case *Term:
|
||||
vis.Walk(x.Value)
|
||||
case Ref:
|
||||
for _, t := range x {
|
||||
vis.Walk(t)
|
||||
}
|
||||
case Object:
|
||||
for _, k := range x.Keys() {
|
||||
vis.Walk(k)
|
||||
vis.Walk(x.Get(k))
|
||||
}
|
||||
case Array:
|
||||
for _, t := range x {
|
||||
vis.Walk(t)
|
||||
}
|
||||
case Set:
|
||||
for _, t := range x.Slice() {
|
||||
vis.Walk(t)
|
||||
}
|
||||
case *ArrayComprehension:
|
||||
vis.Walk(x.Term)
|
||||
vis.Walk(x.Body)
|
||||
case *ObjectComprehension:
|
||||
vis.Walk(x.Key)
|
||||
vis.Walk(x.Value)
|
||||
vis.Walk(x.Body)
|
||||
case *SetComprehension:
|
||||
vis.Walk(x.Term)
|
||||
vis.Walk(x.Body)
|
||||
case Call:
|
||||
for _, t := range x {
|
||||
vis.Walk(t)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// BeforeAfterVisitor provides a utility to walk over AST nodes using
|
||||
// closures. If the before closure returns true, the visitor will not
|
||||
// walk over AST nodes under x. The after closure is invoked always
|
||||
// after visiting a node.
|
||||
type BeforeAfterVisitor struct {
|
||||
before func(x interface{}) bool
|
||||
after func(x interface{})
|
||||
}
|
||||
|
||||
// NewBeforeAfterVisitor returns a new BeforeAndAfterVisitor that
|
||||
// will invoke the functions before and after AST nodes.
|
||||
func NewBeforeAfterVisitor(before func(x interface{}) bool, after func(x interface{})) *BeforeAfterVisitor {
|
||||
return &BeforeAfterVisitor{before, after}
|
||||
}
|
||||
|
||||
// Walk iterates the AST by calling the functions on the
|
||||
// BeforeAndAfterVisitor before and after recursing. Contrary to the
|
||||
// generic Walk, this does not require allocating the visitor from
|
||||
// heap.
|
||||
func (vis *BeforeAfterVisitor) Walk(x interface{}) {
|
||||
defer vis.after(x)
|
||||
if vis.before(x) {
|
||||
return
|
||||
}
|
||||
|
||||
switch x := x.(type) {
|
||||
case *Module:
|
||||
vis.Walk(x.Package)
|
||||
for _, i := range x.Imports {
|
||||
vis.Walk(i)
|
||||
}
|
||||
for _, r := range x.Rules {
|
||||
vis.Walk(r)
|
||||
}
|
||||
for _, c := range x.Comments {
|
||||
vis.Walk(c)
|
||||
}
|
||||
case *Package:
|
||||
vis.Walk(x.Path)
|
||||
case *Import:
|
||||
vis.Walk(x.Path)
|
||||
vis.Walk(x.Alias)
|
||||
case *Rule:
|
||||
vis.Walk(x.Head)
|
||||
vis.Walk(x.Body)
|
||||
if x.Else != nil {
|
||||
vis.Walk(x.Else)
|
||||
}
|
||||
case *Head:
|
||||
vis.Walk(x.Name)
|
||||
vis.Walk(x.Args)
|
||||
if x.Key != nil {
|
||||
vis.Walk(x.Key)
|
||||
}
|
||||
if x.Value != nil {
|
||||
vis.Walk(x.Value)
|
||||
}
|
||||
case Body:
|
||||
for _, e := range x {
|
||||
vis.Walk(e)
|
||||
}
|
||||
case Args:
|
||||
for _, t := range x {
|
||||
vis.Walk(t)
|
||||
}
|
||||
case *Expr:
|
||||
switch ts := x.Terms.(type) {
|
||||
case *SomeDecl:
|
||||
vis.Walk(ts)
|
||||
case []*Term:
|
||||
for _, t := range ts {
|
||||
vis.Walk(t)
|
||||
}
|
||||
case *Term:
|
||||
vis.Walk(ts)
|
||||
}
|
||||
for i := range x.With {
|
||||
vis.Walk(x.With[i])
|
||||
}
|
||||
case *With:
|
||||
vis.Walk(x.Target)
|
||||
vis.Walk(x.Value)
|
||||
case *Term:
|
||||
vis.Walk(x.Value)
|
||||
case Ref:
|
||||
for _, t := range x {
|
||||
vis.Walk(t)
|
||||
}
|
||||
case Object:
|
||||
for _, k := range x.Keys() {
|
||||
vis.Walk(k)
|
||||
vis.Walk(x.Get(k))
|
||||
}
|
||||
case Array:
|
||||
for _, t := range x {
|
||||
vis.Walk(t)
|
||||
}
|
||||
case Set:
|
||||
for _, t := range x.Slice() {
|
||||
vis.Walk(t)
|
||||
}
|
||||
case *ArrayComprehension:
|
||||
vis.Walk(x.Term)
|
||||
vis.Walk(x.Body)
|
||||
case *ObjectComprehension:
|
||||
vis.Walk(x.Key)
|
||||
vis.Walk(x.Value)
|
||||
vis.Walk(x.Body)
|
||||
case *SetComprehension:
|
||||
vis.Walk(x.Term)
|
||||
vis.Walk(x.Body)
|
||||
case Call:
|
||||
for _, t := range x {
|
||||
vis.Walk(t)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// VarVisitor walks AST nodes under a given node and collects all encountered
|
||||
// variables. The collected variables can be controlled by specifying
|
||||
// VarVisitorParams when creating the visitor.
|
||||
type VarVisitor struct {
|
||||
params VarVisitorParams
|
||||
vars VarSet
|
||||
}
|
||||
|
||||
// VarVisitorParams contains settings for a VarVisitor.
|
||||
type VarVisitorParams struct {
|
||||
SkipRefHead bool
|
||||
SkipRefCallHead bool
|
||||
SkipObjectKeys bool
|
||||
SkipClosures bool
|
||||
SkipWithTarget bool
|
||||
SkipSets bool
|
||||
}
|
||||
|
||||
// NewVarVisitor returns a new VarVisitor object.
|
||||
func NewVarVisitor() *VarVisitor {
|
||||
return &VarVisitor{
|
||||
vars: NewVarSet(),
|
||||
}
|
||||
}
|
||||
|
||||
// WithParams sets the parameters in params on vis.
|
||||
func (vis *VarVisitor) WithParams(params VarVisitorParams) *VarVisitor {
|
||||
vis.params = params
|
||||
return vis
|
||||
}
|
||||
|
||||
// Vars returns a VarSet that contains collected vars.
|
||||
func (vis *VarVisitor) Vars() VarSet {
|
||||
return vis.vars
|
||||
}
|
||||
|
||||
func (vis *VarVisitor) visit(v interface{}) bool {
|
||||
if vis.params.SkipObjectKeys {
|
||||
if o, ok := v.(Object); ok {
|
||||
for _, k := range o.Keys() {
|
||||
vis.Walk(o.Get(k))
|
||||
}
|
||||
return true
|
||||
}
|
||||
}
|
||||
if vis.params.SkipRefHead {
|
||||
if r, ok := v.(Ref); ok {
|
||||
for _, t := range r[1:] {
|
||||
vis.Walk(t)
|
||||
}
|
||||
return true
|
||||
}
|
||||
}
|
||||
if vis.params.SkipClosures {
|
||||
switch v.(type) {
|
||||
case *ArrayComprehension, *ObjectComprehension, *SetComprehension:
|
||||
return true
|
||||
}
|
||||
}
|
||||
if vis.params.SkipWithTarget {
|
||||
if v, ok := v.(*With); ok {
|
||||
vis.Walk(v.Value)
|
||||
return true
|
||||
}
|
||||
}
|
||||
if vis.params.SkipSets {
|
||||
if _, ok := v.(Set); ok {
|
||||
return true
|
||||
}
|
||||
}
|
||||
if vis.params.SkipRefCallHead {
|
||||
switch v := v.(type) {
|
||||
case *Expr:
|
||||
if terms, ok := v.Terms.([]*Term); ok {
|
||||
for _, t := range terms[0].Value.(Ref)[1:] {
|
||||
vis.Walk(t)
|
||||
}
|
||||
for i := 1; i < len(terms); i++ {
|
||||
vis.Walk(terms[i])
|
||||
}
|
||||
for _, w := range v.With {
|
||||
vis.Walk(w)
|
||||
}
|
||||
return true
|
||||
}
|
||||
case Call:
|
||||
operator := v[0].Value.(Ref)
|
||||
for i := 1; i < len(operator); i++ {
|
||||
vis.Walk(operator[i])
|
||||
}
|
||||
for i := 1; i < len(v); i++ {
|
||||
vis.Walk(v[i])
|
||||
}
|
||||
return true
|
||||
}
|
||||
}
|
||||
if v, ok := v.(Var); ok {
|
||||
vis.vars.Add(v)
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Walk iterates the AST by calling the function f on the
|
||||
// GenericVisitor before recursing. Contrary to the generic Walk, this
|
||||
// does not require allocating the visitor from heap.
|
||||
func (vis *VarVisitor) Walk(x interface{}) {
|
||||
if vis.visit(x) {
|
||||
return
|
||||
}
|
||||
|
||||
switch x := x.(type) {
|
||||
case *Module:
|
||||
vis.Walk(x.Package)
|
||||
for _, i := range x.Imports {
|
||||
vis.Walk(i)
|
||||
}
|
||||
for _, r := range x.Rules {
|
||||
vis.Walk(r)
|
||||
}
|
||||
for _, c := range x.Comments {
|
||||
vis.Walk(c)
|
||||
}
|
||||
case *Package:
|
||||
vis.Walk(x.Path)
|
||||
case *Import:
|
||||
vis.Walk(x.Path)
|
||||
vis.Walk(x.Alias)
|
||||
case *Rule:
|
||||
vis.Walk(x.Head)
|
||||
vis.Walk(x.Body)
|
||||
if x.Else != nil {
|
||||
vis.Walk(x.Else)
|
||||
}
|
||||
case *Head:
|
||||
vis.Walk(x.Name)
|
||||
vis.Walk(x.Args)
|
||||
if x.Key != nil {
|
||||
vis.Walk(x.Key)
|
||||
}
|
||||
if x.Value != nil {
|
||||
vis.Walk(x.Value)
|
||||
}
|
||||
case Body:
|
||||
for _, e := range x {
|
||||
vis.Walk(e)
|
||||
}
|
||||
case Args:
|
||||
for _, t := range x {
|
||||
vis.Walk(t)
|
||||
}
|
||||
case *Expr:
|
||||
switch ts := x.Terms.(type) {
|
||||
case *SomeDecl:
|
||||
vis.Walk(ts)
|
||||
case []*Term:
|
||||
for _, t := range ts {
|
||||
vis.Walk(t)
|
||||
}
|
||||
case *Term:
|
||||
vis.Walk(ts)
|
||||
}
|
||||
for i := range x.With {
|
||||
vis.Walk(x.With[i])
|
||||
}
|
||||
case *With:
|
||||
vis.Walk(x.Target)
|
||||
vis.Walk(x.Value)
|
||||
case *Term:
|
||||
vis.Walk(x.Value)
|
||||
case Ref:
|
||||
for _, t := range x {
|
||||
vis.Walk(t)
|
||||
}
|
||||
case Object:
|
||||
for _, k := range x.Keys() {
|
||||
vis.Walk(k)
|
||||
vis.Walk(x.Get(k))
|
||||
}
|
||||
case Array:
|
||||
for _, t := range x {
|
||||
vis.Walk(t)
|
||||
}
|
||||
case Set:
|
||||
for _, t := range x.Slice() {
|
||||
vis.Walk(t)
|
||||
}
|
||||
case *ArrayComprehension:
|
||||
vis.Walk(x.Term)
|
||||
vis.Walk(x.Body)
|
||||
case *ObjectComprehension:
|
||||
vis.Walk(x.Key)
|
||||
vis.Walk(x.Value)
|
||||
vis.Walk(x.Body)
|
||||
case *SetComprehension:
|
||||
vis.Walk(x.Term)
|
||||
vis.Walk(x.Body)
|
||||
case Call:
|
||||
for _, t := range x {
|
||||
vis.Walk(t)
|
||||
}
|
||||
}
|
||||
}
|
||||
493
vendor/github.com/open-policy-agent/opa/bundle/bundle.go
generated
vendored
Normal file
493
vendor/github.com/open-policy-agent/opa/bundle/bundle.go
generated
vendored
Normal file
@@ -0,0 +1,493 @@
|
||||
// Copyright 2018 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package bundle implements bundle loading.
|
||||
package bundle
|
||||
|
||||
import (
|
||||
"archive/tar"
|
||||
"bytes"
|
||||
"compress/gzip"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/url"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"strings"
|
||||
|
||||
"github.com/open-policy-agent/opa/internal/file/archive"
|
||||
"github.com/open-policy-agent/opa/internal/merge"
|
||||
"github.com/open-policy-agent/opa/metrics"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
|
||||
"github.com/open-policy-agent/opa/ast"
|
||||
"github.com/open-policy-agent/opa/util"
|
||||
)
|
||||
|
||||
// Common file extensions and file names.
|
||||
const (
|
||||
RegoExt = ".rego"
|
||||
WasmFile = "/policy.wasm"
|
||||
manifestExt = ".manifest"
|
||||
dataFile = "data.json"
|
||||
yamlDataFile = "data.yaml"
|
||||
)
|
||||
|
||||
const bundleLimitBytes = (1024 * 1024 * 1024) + 1 // limit bundle reads to 1GB to protect against gzip bombs
|
||||
|
||||
// Bundle represents a loaded bundle. The bundle can contain data and policies.
|
||||
type Bundle struct {
|
||||
Manifest Manifest
|
||||
Data map[string]interface{}
|
||||
Modules []ModuleFile
|
||||
Wasm []byte
|
||||
}
|
||||
|
||||
// Manifest represents the manifest from a bundle. The manifest may contain
|
||||
// metadata such as the bundle revision.
|
||||
type Manifest struct {
|
||||
Revision string `json:"revision"`
|
||||
Roots *[]string `json:"roots,omitempty"`
|
||||
}
|
||||
|
||||
// Init initializes the manifest. If you instantiate a manifest
|
||||
// manually, call Init to ensure that the roots are set properly.
|
||||
func (m *Manifest) Init() {
|
||||
if m.Roots == nil {
|
||||
defaultRoots := []string{""}
|
||||
m.Roots = &defaultRoots
|
||||
}
|
||||
}
|
||||
|
||||
func (m *Manifest) validateAndInjectDefaults(b Bundle) error {
|
||||
|
||||
m.Init()
|
||||
|
||||
// Validate roots in bundle.
|
||||
roots := *m.Roots
|
||||
|
||||
// Standardize the roots (no starting or trailing slash)
|
||||
for i := range roots {
|
||||
roots[i] = strings.Trim(roots[i], "/")
|
||||
}
|
||||
|
||||
for i := 0; i < len(roots)-1; i++ {
|
||||
for j := i + 1; j < len(roots); j++ {
|
||||
if RootPathsOverlap(roots[i], roots[j]) {
|
||||
return fmt.Errorf("manifest has overlapped roots: %v and %v", roots[i], roots[j])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Validate modules in bundle.
|
||||
for _, module := range b.Modules {
|
||||
found := false
|
||||
if path, err := module.Parsed.Package.Path.Ptr(); err == nil {
|
||||
for i := range roots {
|
||||
if strings.HasPrefix(path, roots[i]) {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
return fmt.Errorf("manifest roots %v do not permit '%v' in module '%v'", roots, module.Parsed.Package, module.Path)
|
||||
}
|
||||
}
|
||||
|
||||
// Validate data in bundle.
|
||||
return dfs(b.Data, "", func(path string, node interface{}) (bool, error) {
|
||||
path = strings.Trim(path, "/")
|
||||
for i := range roots {
|
||||
if strings.HasPrefix(path, roots[i]) {
|
||||
return true, nil
|
||||
}
|
||||
}
|
||||
if _, ok := node.(map[string]interface{}); ok {
|
||||
for i := range roots {
|
||||
if strings.HasPrefix(roots[i], path) {
|
||||
return false, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
return false, fmt.Errorf("manifest roots %v do not permit data at path '/%s' (hint: check bundle directory structure)", roots, path)
|
||||
})
|
||||
}
|
||||
|
||||
// ModuleFile represents a single module contained a bundle.
|
||||
type ModuleFile struct {
|
||||
Path string
|
||||
Raw []byte
|
||||
Parsed *ast.Module
|
||||
}
|
||||
|
||||
// Reader contains the reader to load the bundle from.
|
||||
type Reader struct {
|
||||
loader DirectoryLoader
|
||||
includeManifestInData bool
|
||||
metrics metrics.Metrics
|
||||
baseDir string
|
||||
}
|
||||
|
||||
// NewReader returns a new Reader which is configured for reading tarballs.
|
||||
func NewReader(r io.Reader) *Reader {
|
||||
return NewCustomReader(NewTarballLoader(r))
|
||||
}
|
||||
|
||||
// NewCustomReader returns a new Reader configured to use the
|
||||
// specified DirectoryLoader.
|
||||
func NewCustomReader(loader DirectoryLoader) *Reader {
|
||||
nr := Reader{
|
||||
loader: loader,
|
||||
metrics: metrics.New(),
|
||||
}
|
||||
return &nr
|
||||
}
|
||||
|
||||
// IncludeManifestInData sets whether the manifest metadata should be
|
||||
// included in the bundle's data.
|
||||
func (r *Reader) IncludeManifestInData(includeManifestInData bool) *Reader {
|
||||
r.includeManifestInData = includeManifestInData
|
||||
return r
|
||||
}
|
||||
|
||||
// WithMetrics sets the metrics object to be used while loading bundles
|
||||
func (r *Reader) WithMetrics(m metrics.Metrics) *Reader {
|
||||
r.metrics = m
|
||||
return r
|
||||
}
|
||||
|
||||
// WithBaseDir sets a base directory for file paths of loaded Rego
|
||||
// modules. This will *NOT* affect the loaded path of data files.
|
||||
func (r *Reader) WithBaseDir(dir string) *Reader {
|
||||
r.baseDir = dir
|
||||
return r
|
||||
}
|
||||
|
||||
// Read returns a new Bundle loaded from the reader.
|
||||
func (r *Reader) Read() (Bundle, error) {
|
||||
|
||||
var bundle Bundle
|
||||
|
||||
bundle.Data = map[string]interface{}{}
|
||||
|
||||
for {
|
||||
f, err := r.loader.NextFile()
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
return bundle, errors.Wrap(err, "bundle read failed")
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
n, err := f.Read(&buf, bundleLimitBytes)
|
||||
f.Close() // always close, even on error
|
||||
if err != nil && err != io.EOF {
|
||||
return bundle, err
|
||||
} else if err == nil && n >= bundleLimitBytes {
|
||||
return bundle, fmt.Errorf("bundle exceeded max size (%v bytes)", bundleLimitBytes-1)
|
||||
}
|
||||
|
||||
// Normalize the paths to use `/` separators
|
||||
path := filepath.ToSlash(f.Path())
|
||||
|
||||
if strings.HasSuffix(path, RegoExt) {
|
||||
fullPath := r.fullPath(path)
|
||||
r.metrics.Timer(metrics.RegoModuleParse).Start()
|
||||
module, err := ast.ParseModule(fullPath, buf.String())
|
||||
r.metrics.Timer(metrics.RegoModuleParse).Stop()
|
||||
if err != nil {
|
||||
return bundle, err
|
||||
}
|
||||
|
||||
mf := ModuleFile{
|
||||
Path: fullPath,
|
||||
Raw: buf.Bytes(),
|
||||
Parsed: module,
|
||||
}
|
||||
bundle.Modules = append(bundle.Modules, mf)
|
||||
|
||||
} else if path == WasmFile {
|
||||
bundle.Wasm = buf.Bytes()
|
||||
|
||||
} else if filepath.Base(path) == dataFile {
|
||||
var value interface{}
|
||||
|
||||
r.metrics.Timer(metrics.RegoDataParse).Start()
|
||||
err := util.NewJSONDecoder(&buf).Decode(&value)
|
||||
r.metrics.Timer(metrics.RegoDataParse).Stop()
|
||||
|
||||
if err != nil {
|
||||
return bundle, errors.Wrapf(err, "bundle load failed on %v", r.fullPath(path))
|
||||
}
|
||||
|
||||
if err := insertValue(&bundle, path, value); err != nil {
|
||||
return bundle, err
|
||||
}
|
||||
|
||||
} else if filepath.Base(path) == yamlDataFile {
|
||||
|
||||
var value interface{}
|
||||
|
||||
r.metrics.Timer(metrics.RegoDataParse).Start()
|
||||
err := util.Unmarshal(buf.Bytes(), &value)
|
||||
r.metrics.Timer(metrics.RegoDataParse).Stop()
|
||||
|
||||
if err != nil {
|
||||
return bundle, errors.Wrapf(err, "bundle load failed on %v", r.fullPath(path))
|
||||
}
|
||||
|
||||
if err := insertValue(&bundle, path, value); err != nil {
|
||||
return bundle, err
|
||||
}
|
||||
|
||||
} else if strings.HasSuffix(path, manifestExt) {
|
||||
if err := util.NewJSONDecoder(&buf).Decode(&bundle.Manifest); err != nil {
|
||||
return bundle, errors.Wrap(err, "bundle load failed on manifest decode")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if err := bundle.Manifest.validateAndInjectDefaults(bundle); err != nil {
|
||||
return bundle, err
|
||||
}
|
||||
|
||||
if r.includeManifestInData {
|
||||
var metadata map[string]interface{}
|
||||
|
||||
b, err := json.Marshal(&bundle.Manifest)
|
||||
if err != nil {
|
||||
return bundle, errors.Wrap(err, "bundle load failed on manifest marshal")
|
||||
}
|
||||
|
||||
err = util.UnmarshalJSON(b, &metadata)
|
||||
if err != nil {
|
||||
return bundle, errors.Wrap(err, "bundle load failed on manifest unmarshal")
|
||||
}
|
||||
|
||||
// For backwards compatibility always write to the old unnamed manifest path
|
||||
// This will *not* be correct if >1 bundle is in use...
|
||||
if err := bundle.insert(legacyManifestStoragePath, metadata); err != nil {
|
||||
return bundle, errors.Wrapf(err, "bundle load failed on %v", legacyRevisionStoragePath)
|
||||
}
|
||||
}
|
||||
|
||||
return bundle, nil
|
||||
}
|
||||
|
||||
func (r *Reader) fullPath(path string) string {
|
||||
if r.baseDir != "" {
|
||||
path = filepath.Join(r.baseDir, path)
|
||||
}
|
||||
return path
|
||||
}
|
||||
|
||||
// Write serializes the Bundle and writes it to w.
|
||||
func Write(w io.Writer, bundle Bundle) error {
|
||||
gw := gzip.NewWriter(w)
|
||||
tw := tar.NewWriter(gw)
|
||||
|
||||
var buf bytes.Buffer
|
||||
|
||||
if err := json.NewEncoder(&buf).Encode(bundle.Data); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := archive.WriteFile(tw, "data.json", buf.Bytes()); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, module := range bundle.Modules {
|
||||
if err := archive.WriteFile(tw, module.Path, module.Raw); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := writeWasm(tw, bundle); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := writeManifest(tw, bundle); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := tw.Close(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return gw.Close()
|
||||
}
|
||||
|
||||
func writeWasm(tw *tar.Writer, bundle Bundle) error {
|
||||
if len(bundle.Wasm) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
return archive.WriteFile(tw, WasmFile, bundle.Wasm)
|
||||
}
|
||||
|
||||
func writeManifest(tw *tar.Writer, bundle Bundle) error {
|
||||
|
||||
var buf bytes.Buffer
|
||||
|
||||
if err := json.NewEncoder(&buf).Encode(bundle.Manifest); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return archive.WriteFile(tw, manifestExt, buf.Bytes())
|
||||
}
|
||||
|
||||
// ParsedModules returns a map of parsed modules with names that are
|
||||
// unique and human readable for the given a bundle name.
|
||||
func (b *Bundle) ParsedModules(bundleName string) map[string]*ast.Module {
|
||||
|
||||
mods := make(map[string]*ast.Module, len(b.Modules))
|
||||
|
||||
for _, mf := range b.Modules {
|
||||
mods[modulePathWithPrefix(bundleName, mf.Path)] = mf.Parsed
|
||||
}
|
||||
|
||||
return mods
|
||||
}
|
||||
|
||||
// Equal returns true if this bundle's contents equal the other bundle's
|
||||
// contents.
|
||||
func (b Bundle) Equal(other Bundle) bool {
|
||||
if !reflect.DeepEqual(b.Data, other.Data) {
|
||||
return false
|
||||
}
|
||||
if len(b.Modules) != len(other.Modules) {
|
||||
return false
|
||||
}
|
||||
for i := range b.Modules {
|
||||
if b.Modules[i].Path != other.Modules[i].Path {
|
||||
return false
|
||||
}
|
||||
if !b.Modules[i].Parsed.Equal(other.Modules[i].Parsed) {
|
||||
return false
|
||||
}
|
||||
if !bytes.Equal(b.Modules[i].Raw, other.Modules[i].Raw) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
if (b.Wasm == nil && other.Wasm != nil) || (b.Wasm != nil && other.Wasm == nil) {
|
||||
return false
|
||||
}
|
||||
|
||||
return bytes.Equal(b.Wasm, other.Wasm)
|
||||
}
|
||||
|
||||
func (b *Bundle) insert(key []string, value interface{}) error {
|
||||
// Build an object with the full structure for the value
|
||||
obj, err := mktree(key, value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Merge the new data in with the current bundle data object
|
||||
merged, ok := merge.InterfaceMaps(b.Data, obj)
|
||||
if !ok {
|
||||
return fmt.Errorf("failed to insert data file from path %s", filepath.Join(key...))
|
||||
}
|
||||
|
||||
b.Data = merged
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func mktree(path []string, value interface{}) (map[string]interface{}, error) {
|
||||
if len(path) == 0 {
|
||||
// For 0 length path the value is the full tree.
|
||||
obj, ok := value.(map[string]interface{})
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("root value must be object")
|
||||
}
|
||||
return obj, nil
|
||||
}
|
||||
|
||||
dir := map[string]interface{}{}
|
||||
for i := len(path) - 1; i > 0; i-- {
|
||||
dir[path[i]] = value
|
||||
value = dir
|
||||
dir = map[string]interface{}{}
|
||||
}
|
||||
dir[path[0]] = value
|
||||
|
||||
return dir, nil
|
||||
}
|
||||
|
||||
// RootPathsOverlap takes in two bundle root paths and returns
|
||||
// true if they overlap.
|
||||
func RootPathsOverlap(pathA string, pathB string) bool {
|
||||
|
||||
// Special case for empty prefixes, they always overlap
|
||||
if pathA == "" || pathB == "" {
|
||||
return true
|
||||
}
|
||||
|
||||
aParts := strings.Split(pathA, "/")
|
||||
bParts := strings.Split(pathB, "/")
|
||||
|
||||
for i := 0; i < len(aParts) && i < len(bParts); i++ {
|
||||
if aParts[i] != bParts[i] {
|
||||
// Found diverging path segments, no overlap
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func insertValue(b *Bundle, path string, value interface{}) error {
|
||||
|
||||
// Remove leading / and . characters from the directory path. If the bundle
|
||||
// was written with OPA then the paths will contain a leading slash. On the
|
||||
// other hand, if the path is empty, filepath.Dir will return '.'.
|
||||
// Note: filepath.Dir can return paths with '\' separators, always use
|
||||
// filepath.ToSlash to keep them normalized.
|
||||
dirpath := strings.TrimLeft(filepath.ToSlash(filepath.Dir(path)), "/.")
|
||||
var key []string
|
||||
if dirpath != "" {
|
||||
key = strings.Split(dirpath, "/")
|
||||
}
|
||||
if err := b.insert(key, value); err != nil {
|
||||
return errors.Wrapf(err, "bundle load failed on %v", path)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func dfs(value interface{}, path string, fn func(string, interface{}) (bool, error)) error {
|
||||
if stop, err := fn(path, value); err != nil {
|
||||
return err
|
||||
} else if stop {
|
||||
return nil
|
||||
}
|
||||
obj, ok := value.(map[string]interface{})
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
for key := range obj {
|
||||
if err := dfs(obj[key], path+"/"+key, fn); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func modulePathWithPrefix(bundleName string, modulePath string) string {
|
||||
// Default prefix is just the bundle name
|
||||
prefix := bundleName
|
||||
|
||||
// Bundle names are sometimes just file paths, some of which
|
||||
// are full urls (file:///foo/). Parse these and only use the path.
|
||||
parsed, err := url.Parse(bundleName)
|
||||
if err == nil {
|
||||
prefix = filepath.Join(parsed.Host, parsed.Path)
|
||||
}
|
||||
|
||||
return filepath.Join(prefix, modulePath)
|
||||
}
|
||||
166
vendor/github.com/open-policy-agent/opa/bundle/file.go
generated
vendored
Normal file
166
vendor/github.com/open-policy-agent/opa/bundle/file.go
generated
vendored
Normal file
@@ -0,0 +1,166 @@
|
||||
package bundle
|
||||
|
||||
import (
|
||||
"archive/tar"
|
||||
"compress/gzip"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
// Descriptor contains information about a file and
|
||||
// can be used to read the file contents.
|
||||
type Descriptor struct {
|
||||
path string
|
||||
reader io.Reader
|
||||
closer io.Closer
|
||||
closeOnce *sync.Once
|
||||
}
|
||||
|
||||
func newDescriptor(path string, reader io.Reader) *Descriptor {
|
||||
return &Descriptor{
|
||||
path: path,
|
||||
reader: reader,
|
||||
}
|
||||
}
|
||||
|
||||
func (d *Descriptor) withCloser(closer io.Closer) *Descriptor {
|
||||
d.closer = closer
|
||||
d.closeOnce = new(sync.Once)
|
||||
return d
|
||||
}
|
||||
|
||||
// Path returns the path of the file.
|
||||
func (d *Descriptor) Path() string {
|
||||
return d.path
|
||||
}
|
||||
|
||||
// Read will read all the contents from the file the Descriptor refers to
|
||||
// into the dest writer up n bytes. Will return an io.EOF error
|
||||
// if EOF is encountered before n bytes are read.
|
||||
func (d *Descriptor) Read(dest io.Writer, n int64) (int64, error) {
|
||||
n, err := io.CopyN(dest, d.reader, n)
|
||||
return n, err
|
||||
}
|
||||
|
||||
// Close the file, on some Loader implementations this might be a no-op.
|
||||
// It should *always* be called regardless of file.
|
||||
func (d *Descriptor) Close() error {
|
||||
var err error
|
||||
if d.closer != nil {
|
||||
d.closeOnce.Do(func() {
|
||||
err = d.closer.Close()
|
||||
})
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// DirectoryLoader defines an interface which can be used to load
|
||||
// files from a directory by iterating over each one in the tree.
|
||||
type DirectoryLoader interface {
|
||||
// NextFile must return io.EOF if there is no next value. The returned
|
||||
// descriptor should *always* be closed when no longer needed.
|
||||
NextFile() (*Descriptor, error)
|
||||
}
|
||||
|
||||
type dirLoader struct {
|
||||
root string
|
||||
files []string
|
||||
idx int
|
||||
}
|
||||
|
||||
// NewDirectoryLoader returns a basic DirectoryLoader implementation
|
||||
// that will load files from a given root directory path.
|
||||
func NewDirectoryLoader(root string) DirectoryLoader {
|
||||
d := dirLoader{
|
||||
root: root,
|
||||
}
|
||||
return &d
|
||||
}
|
||||
|
||||
// NextFile iterates to the next file in the directory tree
|
||||
// and returns a file Descriptor for the file.
|
||||
func (d *dirLoader) NextFile() (*Descriptor, error) {
|
||||
// build a list of all files we will iterate over and read, but only one time
|
||||
if d.files == nil {
|
||||
d.files = []string{}
|
||||
err := filepath.Walk(d.root, func(path string, info os.FileInfo, err error) error {
|
||||
if info != nil && info.Mode().IsRegular() {
|
||||
d.files = append(d.files, filepath.ToSlash(path))
|
||||
}
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "failed to list files")
|
||||
}
|
||||
}
|
||||
|
||||
// If done reading files then just return io.EOF
|
||||
// errors for each NextFile() call
|
||||
if d.idx >= len(d.files) {
|
||||
return nil, io.EOF
|
||||
}
|
||||
|
||||
fileName := d.files[d.idx]
|
||||
d.idx++
|
||||
fh, err := os.Open(fileName)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "failed to open file %s", fileName)
|
||||
}
|
||||
|
||||
// Trim off the root directory and return path as if chrooted
|
||||
cleanedPath := strings.TrimPrefix(fileName, d.root)
|
||||
if !strings.HasPrefix(cleanedPath, "/") {
|
||||
cleanedPath = "/" + cleanedPath
|
||||
}
|
||||
|
||||
f := newDescriptor(cleanedPath, fh).withCloser(fh)
|
||||
return f, nil
|
||||
}
|
||||
|
||||
type tarballLoader struct {
|
||||
r io.Reader
|
||||
tr *tar.Reader
|
||||
}
|
||||
|
||||
// NewTarballLoader returns a new DirectoryLoader that reads
|
||||
// files out of a gzipped tar archive.
|
||||
func NewTarballLoader(r io.Reader) DirectoryLoader {
|
||||
l := tarballLoader{
|
||||
r: r,
|
||||
}
|
||||
return &l
|
||||
}
|
||||
|
||||
// NextFile iterates to the next file in the directory tree
|
||||
// and returns a file Descriptor for the file.
|
||||
func (t *tarballLoader) NextFile() (*Descriptor, error) {
|
||||
if t.tr == nil {
|
||||
gr, err := gzip.NewReader(t.r)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "archive read failed")
|
||||
}
|
||||
|
||||
t.tr = tar.NewReader(gr)
|
||||
}
|
||||
|
||||
for {
|
||||
header, err := t.tr.Next()
|
||||
// Eventually we will get an io.EOF error when finished
|
||||
// iterating through the archive
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Keep iterating on the archive until we find a normal file
|
||||
if header.Typeflag == tar.TypeReg {
|
||||
// no need to close this descriptor after reading
|
||||
f := newDescriptor(header.Name, t.tr)
|
||||
return f, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
526
vendor/github.com/open-policy-agent/opa/bundle/store.go
generated
vendored
Normal file
526
vendor/github.com/open-policy-agent/opa/bundle/store.go
generated
vendored
Normal file
@@ -0,0 +1,526 @@
|
||||
// Copyright 2019 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package bundle
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/open-policy-agent/opa/metrics"
|
||||
|
||||
"github.com/open-policy-agent/opa/ast"
|
||||
|
||||
"github.com/open-policy-agent/opa/storage"
|
||||
"github.com/open-policy-agent/opa/util"
|
||||
)
|
||||
|
||||
var bundlesBasePath = storage.MustParsePath("/system/bundles")
|
||||
|
||||
// Note: As needed these helpers could be memoized.
|
||||
|
||||
// ManifestStoragePath is the storage path used for the given named bundle manifest.
|
||||
func ManifestStoragePath(name string) storage.Path {
|
||||
return append(bundlesBasePath, name, "manifest")
|
||||
}
|
||||
|
||||
func namedBundlePath(name string) storage.Path {
|
||||
return append(bundlesBasePath, name)
|
||||
}
|
||||
|
||||
func rootsPath(name string) storage.Path {
|
||||
return append(bundlesBasePath, name, "manifest", "roots")
|
||||
}
|
||||
|
||||
func revisionPath(name string) storage.Path {
|
||||
return append(bundlesBasePath, name, "manifest", "revision")
|
||||
}
|
||||
|
||||
// ReadBundleNamesFromStore will return a list of bundle names which have had their metadata stored.
|
||||
func ReadBundleNamesFromStore(ctx context.Context, store storage.Store, txn storage.Transaction) ([]string, error) {
|
||||
value, err := store.Read(ctx, txn, bundlesBasePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
bundleMap, ok := value.(map[string]interface{})
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("corrupt manifest roots")
|
||||
}
|
||||
|
||||
bundles := make([]string, len(bundleMap))
|
||||
idx := 0
|
||||
for name := range bundleMap {
|
||||
bundles[idx] = name
|
||||
idx++
|
||||
}
|
||||
return bundles, nil
|
||||
}
|
||||
|
||||
// WriteManifestToStore will write the manifest into the storage. This function is called when
|
||||
// the bundle is activated.
|
||||
func WriteManifestToStore(ctx context.Context, store storage.Store, txn storage.Transaction, name string, manifest Manifest) error {
|
||||
return write(ctx, store, txn, ManifestStoragePath(name), manifest)
|
||||
}
|
||||
|
||||
func write(ctx context.Context, store storage.Store, txn storage.Transaction, path storage.Path, manifest Manifest) error {
|
||||
var value interface{} = manifest
|
||||
if err := util.RoundTrip(&value); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var dir []string
|
||||
if len(path) > 1 {
|
||||
dir = path[:len(path)-1]
|
||||
}
|
||||
|
||||
if err := storage.MakeDir(ctx, store, txn, dir); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return store.Write(ctx, txn, storage.AddOp, path, value)
|
||||
}
|
||||
|
||||
// EraseManifestFromStore will remove the manifest from storage. This function is called
|
||||
// when the bundle is deactivated.
|
||||
func EraseManifestFromStore(ctx context.Context, store storage.Store, txn storage.Transaction, name string) error {
|
||||
path := namedBundlePath(name)
|
||||
err := store.Write(ctx, txn, storage.RemoveOp, path, nil)
|
||||
if err != nil && !storage.IsNotFound(err) {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ReadBundleRootsFromStore returns the roots in the specified bundle.
|
||||
// If the bundle is not activated, this function will return
|
||||
// storage NotFound error.
|
||||
func ReadBundleRootsFromStore(ctx context.Context, store storage.Store, txn storage.Transaction, name string) ([]string, error) {
|
||||
value, err := store.Read(ctx, txn, rootsPath(name))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
sl, ok := value.([]interface{})
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("corrupt manifest roots")
|
||||
}
|
||||
|
||||
roots := make([]string, len(sl))
|
||||
|
||||
for i := range sl {
|
||||
roots[i], ok = sl[i].(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("corrupt manifest root")
|
||||
}
|
||||
}
|
||||
|
||||
return roots, nil
|
||||
}
|
||||
|
||||
// ReadBundleRevisionFromStore returns the revision in the specified bundle.
|
||||
// If the bundle is not activated, this function will return
|
||||
// storage NotFound error.
|
||||
func ReadBundleRevisionFromStore(ctx context.Context, store storage.Store, txn storage.Transaction, name string) (string, error) {
|
||||
return readRevisionFromStore(ctx, store, txn, revisionPath(name))
|
||||
}
|
||||
|
||||
func readRevisionFromStore(ctx context.Context, store storage.Store, txn storage.Transaction, path storage.Path) (string, error) {
|
||||
value, err := store.Read(ctx, txn, path)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
str, ok := value.(string)
|
||||
if !ok {
|
||||
return "", fmt.Errorf("corrupt manifest revision")
|
||||
}
|
||||
|
||||
return str, nil
|
||||
}
|
||||
|
||||
// ActivateOpts defines options for the Activate API call.
|
||||
type ActivateOpts struct {
|
||||
Ctx context.Context
|
||||
Store storage.Store
|
||||
Txn storage.Transaction
|
||||
Compiler *ast.Compiler
|
||||
Metrics metrics.Metrics
|
||||
Bundles map[string]*Bundle // Optional
|
||||
ExtraModules map[string]*ast.Module // Optional
|
||||
|
||||
legacy bool
|
||||
}
|
||||
|
||||
// Activate the bundle(s) by loading into the given Store. This will load policies, data, and record
|
||||
// the manifest in storage. The compiler provided will have had the polices compiled on it.
|
||||
func Activate(opts *ActivateOpts) error {
|
||||
opts.legacy = false
|
||||
return activateBundles(opts)
|
||||
}
|
||||
|
||||
// DeactivateOpts defines options for the Deactivate API call
|
||||
type DeactivateOpts struct {
|
||||
Ctx context.Context
|
||||
Store storage.Store
|
||||
Txn storage.Transaction
|
||||
BundleNames map[string]struct{}
|
||||
}
|
||||
|
||||
// Deactivate the bundle(s). This will erase associated data, policies, and the manifest entry from the store.
|
||||
func Deactivate(opts *DeactivateOpts) error {
|
||||
erase := map[string]struct{}{}
|
||||
for name := range opts.BundleNames {
|
||||
if roots, err := ReadBundleRootsFromStore(opts.Ctx, opts.Store, opts.Txn, name); err == nil {
|
||||
for _, root := range roots {
|
||||
erase[root] = struct{}{}
|
||||
}
|
||||
} else if !storage.IsNotFound(err) {
|
||||
return err
|
||||
}
|
||||
}
|
||||
_, err := eraseBundles(opts.Ctx, opts.Store, opts.Txn, opts.BundleNames, erase)
|
||||
return err
|
||||
}
|
||||
|
||||
func activateBundles(opts *ActivateOpts) error {
|
||||
|
||||
// Build collections of bundle names, modules, and roots to erase
|
||||
erase := map[string]struct{}{}
|
||||
names := map[string]struct{}{}
|
||||
|
||||
for name, b := range opts.Bundles {
|
||||
names[name] = struct{}{}
|
||||
|
||||
if roots, err := ReadBundleRootsFromStore(opts.Ctx, opts.Store, opts.Txn, name); err == nil {
|
||||
for _, root := range roots {
|
||||
erase[root] = struct{}{}
|
||||
}
|
||||
} else if !storage.IsNotFound(err) {
|
||||
return err
|
||||
}
|
||||
|
||||
// Erase data at new roots to prepare for writing the new data
|
||||
for _, root := range *b.Manifest.Roots {
|
||||
erase[root] = struct{}{}
|
||||
}
|
||||
}
|
||||
|
||||
// Before changing anything make sure the roots don't collide with any
|
||||
// other bundles that already are activated or other bundles being activated.
|
||||
err := hasRootsOverlap(opts.Ctx, opts.Store, opts.Txn, opts.Bundles)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Erase data and policies at new + old roots, and remove the old
|
||||
// manifests before activating a new bundles.
|
||||
remaining, err := eraseBundles(opts.Ctx, opts.Store, opts.Txn, names, erase)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, b := range opts.Bundles {
|
||||
// Write data from each new bundle into the store. Only write under the
|
||||
// roots contained in their manifest. This should be done *before* the
|
||||
// policies so that path conflict checks can occur.
|
||||
if err := writeData(opts.Ctx, opts.Store, opts.Txn, *b.Manifest.Roots, b.Data); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Write and compile the modules all at once to avoid having to re-do work.
|
||||
remainingAndExtra := make(map[string]*ast.Module)
|
||||
for name, mod := range remaining {
|
||||
remainingAndExtra[name] = mod
|
||||
}
|
||||
for name, mod := range opts.ExtraModules {
|
||||
remainingAndExtra[name] = mod
|
||||
}
|
||||
|
||||
err = writeModules(opts.Ctx, opts.Store, opts.Txn, opts.Compiler, opts.Metrics, opts.Bundles, remainingAndExtra, opts.legacy)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for name, b := range opts.Bundles {
|
||||
// Always write manifests to the named location. If the plugin is in the older style config
|
||||
// then also write to the old legacy unnamed location.
|
||||
if err := WriteManifestToStore(opts.Ctx, opts.Store, opts.Txn, name, b.Manifest); err != nil {
|
||||
return err
|
||||
}
|
||||
if opts.legacy {
|
||||
if err := LegacyWriteManifestToStore(opts.Ctx, opts.Store, opts.Txn, b.Manifest); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// erase bundles by name and roots. This will clear all policies and data at its roots and remove its
|
||||
// manifest from storage.
|
||||
func eraseBundles(ctx context.Context, store storage.Store, txn storage.Transaction, names map[string]struct{}, roots map[string]struct{}) (map[string]*ast.Module, error) {
|
||||
|
||||
if err := eraseData(ctx, store, txn, roots); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
remaining, err := erasePolicies(ctx, store, txn, roots)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for name := range names {
|
||||
if err := EraseManifestFromStore(ctx, store, txn, name); err != nil && !storage.IsNotFound(err) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := LegacyEraseManifestFromStore(ctx, store, txn); err != nil && !storage.IsNotFound(err) {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return remaining, nil
|
||||
}
|
||||
|
||||
func eraseData(ctx context.Context, store storage.Store, txn storage.Transaction, roots map[string]struct{}) error {
|
||||
for root := range roots {
|
||||
path, ok := storage.ParsePathEscaped("/" + root)
|
||||
if !ok {
|
||||
return fmt.Errorf("manifest root path invalid: %v", root)
|
||||
}
|
||||
if len(path) > 0 {
|
||||
if err := store.Write(ctx, txn, storage.RemoveOp, path, nil); err != nil {
|
||||
if !storage.IsNotFound(err) {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func erasePolicies(ctx context.Context, store storage.Store, txn storage.Transaction, roots map[string]struct{}) (map[string]*ast.Module, error) {
|
||||
|
||||
ids, err := store.ListPolicies(ctx, txn)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
remaining := map[string]*ast.Module{}
|
||||
|
||||
for _, id := range ids {
|
||||
bs, err := store.GetPolicy(ctx, txn, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
module, err := ast.ParseModule(id, string(bs))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
path, err := module.Package.Path.Ptr()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
deleted := false
|
||||
for root := range roots {
|
||||
if strings.HasPrefix(path, root) {
|
||||
if err := store.DeletePolicy(ctx, txn, id); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
deleted = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !deleted {
|
||||
remaining[id] = module
|
||||
}
|
||||
}
|
||||
|
||||
return remaining, nil
|
||||
}
|
||||
|
||||
func writeData(ctx context.Context, store storage.Store, txn storage.Transaction, roots []string, data map[string]interface{}) error {
|
||||
for _, root := range roots {
|
||||
path, ok := storage.ParsePathEscaped("/" + root)
|
||||
if !ok {
|
||||
return fmt.Errorf("manifest root path invalid: %v", root)
|
||||
}
|
||||
if value, ok := lookup(path, data); ok {
|
||||
if len(path) > 0 {
|
||||
if err := storage.MakeDir(ctx, store, txn, path[:len(path)-1]); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if err := store.Write(ctx, txn, storage.AddOp, path, value); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func writeModules(ctx context.Context, store storage.Store, txn storage.Transaction, compiler *ast.Compiler, m metrics.Metrics, bundles map[string]*Bundle, extraModules map[string]*ast.Module, legacy bool) error {
|
||||
|
||||
m.Timer(metrics.RegoModuleCompile).Start()
|
||||
defer m.Timer(metrics.RegoModuleCompile).Stop()
|
||||
|
||||
modules := map[string]*ast.Module{}
|
||||
|
||||
// preserve any modules already on the compiler
|
||||
for name, module := range compiler.Modules {
|
||||
modules[name] = module
|
||||
}
|
||||
|
||||
// preserve any modules passed in from the store
|
||||
for name, module := range extraModules {
|
||||
modules[name] = module
|
||||
}
|
||||
|
||||
// include all the new bundle modules
|
||||
for bundleName, b := range bundles {
|
||||
if legacy {
|
||||
for _, mf := range b.Modules {
|
||||
modules[mf.Path] = mf.Parsed
|
||||
}
|
||||
} else {
|
||||
for name, module := range b.ParsedModules(bundleName) {
|
||||
modules[name] = module
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if compiler.Compile(modules); compiler.Failed() {
|
||||
return compiler.Errors
|
||||
}
|
||||
for bundleName, b := range bundles {
|
||||
for _, mf := range b.Modules {
|
||||
var path string
|
||||
|
||||
// For backwards compatibility, in legacy mode, upsert policies to
|
||||
// the unprefixed path.
|
||||
if legacy {
|
||||
path = mf.Path
|
||||
} else {
|
||||
path = modulePathWithPrefix(bundleName, mf.Path)
|
||||
}
|
||||
|
||||
if err := store.UpsertPolicy(ctx, txn, path, mf.Raw); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func lookup(path storage.Path, data map[string]interface{}) (interface{}, bool) {
|
||||
if len(path) == 0 {
|
||||
return data, true
|
||||
}
|
||||
for i := 0; i < len(path)-1; i++ {
|
||||
value, ok := data[path[i]]
|
||||
if !ok {
|
||||
return nil, false
|
||||
}
|
||||
obj, ok := value.(map[string]interface{})
|
||||
if !ok {
|
||||
return nil, false
|
||||
}
|
||||
data = obj
|
||||
}
|
||||
value, ok := data[path[len(path)-1]]
|
||||
return value, ok
|
||||
}
|
||||
|
||||
func hasRootsOverlap(ctx context.Context, store storage.Store, txn storage.Transaction, bundles map[string]*Bundle) error {
|
||||
collisions := map[string][]string{}
|
||||
allBundles, err := ReadBundleNamesFromStore(ctx, store, txn)
|
||||
if err != nil && !storage.IsNotFound(err) {
|
||||
return err
|
||||
}
|
||||
|
||||
allRoots := map[string][]string{}
|
||||
|
||||
// Build a map of roots for existing bundles already in the system
|
||||
for _, name := range allBundles {
|
||||
roots, err := ReadBundleRootsFromStore(ctx, store, txn, name)
|
||||
if err != nil && !storage.IsNotFound(err) {
|
||||
return err
|
||||
}
|
||||
allRoots[name] = roots
|
||||
}
|
||||
|
||||
// Add in any bundles that are being activated, overwrite existing roots
|
||||
// with new ones where bundles are in both groups.
|
||||
for name, bundle := range bundles {
|
||||
allRoots[name] = *bundle.Manifest.Roots
|
||||
}
|
||||
|
||||
// Now check for each new bundle if it conflicts with any of the others
|
||||
for name, bundle := range bundles {
|
||||
for otherBundle, otherRoots := range allRoots {
|
||||
if name == otherBundle {
|
||||
// Skip the current bundle being checked
|
||||
continue
|
||||
}
|
||||
|
||||
// Compare the "new" roots with other existing (or a different bundles new roots)
|
||||
for _, newRoot := range *bundle.Manifest.Roots {
|
||||
for _, otherRoot := range otherRoots {
|
||||
if RootPathsOverlap(newRoot, otherRoot) {
|
||||
collisions[otherBundle] = append(collisions[otherBundle], newRoot)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(collisions) > 0 {
|
||||
var bundleNames []string
|
||||
for name := range collisions {
|
||||
bundleNames = append(bundleNames, name)
|
||||
}
|
||||
return fmt.Errorf("detected overlapping roots in bundle manifest with: %s", bundleNames)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Helpers for the older single (unnamed) bundle style manifest storage.
|
||||
|
||||
// LegacyManifestStoragePath is the older unnamed bundle path for manifests to be stored.
|
||||
// Deprecated: Use ManifestStoragePath and named bundles instead.
|
||||
var legacyManifestStoragePath = storage.MustParsePath("/system/bundle/manifest")
|
||||
var legacyRevisionStoragePath = append(legacyManifestStoragePath, "revision")
|
||||
|
||||
// LegacyWriteManifestToStore will write the bundle manifest to the older single (unnamed) bundle manifest location.
|
||||
// Deprecated: Use WriteManifestToStore and named bundles instead.
|
||||
func LegacyWriteManifestToStore(ctx context.Context, store storage.Store, txn storage.Transaction, manifest Manifest) error {
|
||||
return write(ctx, store, txn, legacyManifestStoragePath, manifest)
|
||||
}
|
||||
|
||||
// LegacyEraseManifestFromStore will erase the bundle manifest from the older single (unnamed) bundle manifest location.
|
||||
// Deprecated: Use WriteManifestToStore and named bundles instead.
|
||||
func LegacyEraseManifestFromStore(ctx context.Context, store storage.Store, txn storage.Transaction) error {
|
||||
err := store.Write(ctx, txn, storage.RemoveOp, legacyManifestStoragePath, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// LegacyReadRevisionFromStore will read the bundle manifest revision from the older single (unnamed) bundle manifest location.
|
||||
// Deprecated: Use ReadBundleRevisionFromStore and named bundles instead.
|
||||
func LegacyReadRevisionFromStore(ctx context.Context, store storage.Store, txn storage.Transaction) (string, error) {
|
||||
return readRevisionFromStore(ctx, store, txn, legacyRevisionStoragePath)
|
||||
}
|
||||
|
||||
// ActivateLegacy calls Activate for the bundles but will also write their manifest to the older unnamed store location.
|
||||
// Deprecated: Use Activate with named bundles instead.
|
||||
func ActivateLegacy(opts *ActivateOpts) error {
|
||||
opts.legacy = true
|
||||
return activateBundles(opts)
|
||||
}
|
||||
25
vendor/github.com/open-policy-agent/opa/internal/compiler/wasm/opa/opa.go
generated
vendored
Normal file
25
vendor/github.com/open-policy-agent/opa/internal/compiler/wasm/opa/opa.go
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
BIN
vendor/github.com/open-policy-agent/opa/internal/compiler/wasm/opa/opa.wasm
generated
vendored
Normal file
BIN
vendor/github.com/open-policy-agent/opa/internal/compiler/wasm/opa/opa.wasm
generated
vendored
Normal file
Binary file not shown.
1013
vendor/github.com/open-policy-agent/opa/internal/compiler/wasm/wasm.go
generated
vendored
Normal file
1013
vendor/github.com/open-policy-agent/opa/internal/compiler/wasm/wasm.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
42
vendor/github.com/open-policy-agent/opa/internal/file/archive/tarball.go
generated
vendored
Normal file
42
vendor/github.com/open-policy-agent/opa/internal/file/archive/tarball.go
generated
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
package archive
|
||||
|
||||
import (
|
||||
"archive/tar"
|
||||
"bytes"
|
||||
"compress/gzip"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// MustWriteTarGz write the list of file names and content
|
||||
// into a tarball.
|
||||
func MustWriteTarGz(files [][2]string) *bytes.Buffer {
|
||||
var buf bytes.Buffer
|
||||
gw := gzip.NewWriter(&buf)
|
||||
defer gw.Close()
|
||||
tw := tar.NewWriter(gw)
|
||||
defer tw.Close()
|
||||
for _, file := range files {
|
||||
if err := WriteFile(tw, file[0], []byte(file[1])); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
return &buf
|
||||
}
|
||||
|
||||
// WriteFile adds a file header with content to the given tar writer
|
||||
func WriteFile(tw *tar.Writer, path string, bs []byte) error {
|
||||
|
||||
hdr := &tar.Header{
|
||||
Name: "/" + strings.TrimLeft(path, "/"),
|
||||
Mode: 0600,
|
||||
Typeflag: tar.TypeReg,
|
||||
Size: int64(len(bs)),
|
||||
}
|
||||
|
||||
if err := tw.WriteHeader(hdr); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err := tw.Write(bs)
|
||||
return err
|
||||
}
|
||||
42
vendor/github.com/open-policy-agent/opa/internal/file/url/url.go
generated
vendored
Normal file
42
vendor/github.com/open-policy-agent/opa/internal/file/url/url.go
generated
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
// Copyright 2019 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package url contains helpers for dealing with file paths and URLs.
|
||||
package url
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/url"
|
||||
"runtime"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var goos = runtime.GOOS
|
||||
|
||||
// Clean returns a cleaned file path that may or may not be a URL.
|
||||
func Clean(path string) (string, error) {
|
||||
|
||||
if strings.Contains(path, "://") {
|
||||
|
||||
url, err := url.Parse(path)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if url.Scheme != "file" {
|
||||
return "", fmt.Errorf("unsupported URL scheme: %v", path)
|
||||
}
|
||||
|
||||
path = url.Path
|
||||
|
||||
// Trim leading slash on Windows if present. The url.Path field returned
|
||||
// by url.Parse has leading slash that causes CreateFile() calls to fail
|
||||
// on Windows. See https://github.com/golang/go/issues/6027 for details.
|
||||
if goos == "windows" && len(path) >= 1 && path[0] == '/' {
|
||||
path = path[1:]
|
||||
}
|
||||
}
|
||||
|
||||
return path, nil
|
||||
}
|
||||
395
vendor/github.com/open-policy-agent/opa/internal/ir/ir.go
generated
vendored
Normal file
395
vendor/github.com/open-policy-agent/opa/internal/ir/ir.go
generated
vendored
Normal file
@@ -0,0 +1,395 @@
|
||||
// Copyright 2018 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package ir defines an intermediate representation (IR) for Rego.
|
||||
//
|
||||
// The IR specifies an imperative execution model for Rego policies similar to a
|
||||
// query plan in traditional databases.
|
||||
package ir
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
type (
|
||||
// Policy represents a planned policy query.
|
||||
Policy struct {
|
||||
Static *Static
|
||||
Plan *Plan
|
||||
Funcs *Funcs
|
||||
}
|
||||
|
||||
// Static represents a static data segment that is indexed into by the policy.
|
||||
Static struct {
|
||||
Strings []*StringConst
|
||||
BuiltinFuncs []*BuiltinFunc
|
||||
}
|
||||
|
||||
// BuiltinFunc represents a built-in function that may be required by the
|
||||
// policy.
|
||||
BuiltinFunc struct {
|
||||
Name string
|
||||
}
|
||||
|
||||
// Funcs represents a collection of planned functions to include in the
|
||||
// policy.
|
||||
Funcs struct {
|
||||
Funcs []*Func
|
||||
}
|
||||
|
||||
// Func represents a named plan (function) that can be invoked. Functions
|
||||
// accept one or more parameters and return a value. By convention, the
|
||||
// input document and data documents are always passed as the first and
|
||||
// second arguments (respectively).
|
||||
Func struct {
|
||||
Name string
|
||||
Params []Local
|
||||
Return Local
|
||||
Blocks []*Block // TODO(tsandall): should this be a plan?
|
||||
}
|
||||
|
||||
// Plan represents an ordered series of blocks to execute. Plan execution
|
||||
// stops when a return statement is reached. Blocks are executed in-order.
|
||||
Plan struct {
|
||||
Blocks []*Block
|
||||
}
|
||||
|
||||
// Block represents an ordered sequence of statements to execute. Blocks are
|
||||
// executed until a return statement is encountered, a statement is undefined,
|
||||
// or there are no more statements. If all statements are defined but no return
|
||||
// statement is encountered, the block is undefined.
|
||||
Block struct {
|
||||
Stmts []Stmt
|
||||
}
|
||||
|
||||
// Stmt represents an operation (e.g., comparison, loop, dot, etc.) to execute.
|
||||
Stmt interface {
|
||||
}
|
||||
|
||||
// Local represents a plan-scoped variable.
|
||||
//
|
||||
// TODO(tsandall): should this be int32 for safety?
|
||||
Local int
|
||||
|
||||
// Const represents a constant value from the policy.
|
||||
Const interface {
|
||||
typeMarker()
|
||||
}
|
||||
|
||||
// NullConst represents a null value.
|
||||
NullConst struct{}
|
||||
|
||||
// BooleanConst represents a boolean value.
|
||||
BooleanConst struct {
|
||||
Value bool
|
||||
}
|
||||
|
||||
// StringConst represents a string value.
|
||||
StringConst struct {
|
||||
Value string
|
||||
}
|
||||
|
||||
// IntConst represents an integer constant.
|
||||
IntConst struct {
|
||||
Value int64
|
||||
}
|
||||
|
||||
// FloatConst represents a floating-point constant.
|
||||
FloatConst struct {
|
||||
Value float64
|
||||
}
|
||||
)
|
||||
|
||||
const (
|
||||
// Input is the local variable that refers to the global input document.
|
||||
Input Local = iota
|
||||
|
||||
// Data is the local variable that refers to the global data document.
|
||||
Data
|
||||
|
||||
// Unused is the free local variable that can be allocated in a plan.
|
||||
Unused
|
||||
)
|
||||
|
||||
func (a *Policy) String() string {
|
||||
return "Policy"
|
||||
}
|
||||
|
||||
func (a *Static) String() string {
|
||||
return fmt.Sprintf("Static (%d strings)", len(a.Strings))
|
||||
}
|
||||
|
||||
func (a *Funcs) String() string {
|
||||
return fmt.Sprintf("Funcs (%d funcs)", len(a.Funcs))
|
||||
}
|
||||
|
||||
func (a *Func) String() string {
|
||||
return fmt.Sprintf("%v (%d params: %v, %d blocks)", a.Name, len(a.Params), a.Params, len(a.Blocks))
|
||||
}
|
||||
|
||||
func (a *Plan) String() string {
|
||||
return fmt.Sprintf("Plan (%d blocks)", len(a.Blocks))
|
||||
}
|
||||
|
||||
func (a *Block) String() string {
|
||||
return fmt.Sprintf("Block (%d statements)", len(a.Stmts))
|
||||
}
|
||||
|
||||
func (a *BooleanConst) typeMarker() {}
|
||||
func (a *NullConst) typeMarker() {}
|
||||
func (a *IntConst) typeMarker() {}
|
||||
func (a *FloatConst) typeMarker() {}
|
||||
func (a *StringConst) typeMarker() {}
|
||||
|
||||
// ReturnLocalStmt represents a return statement that yields a local value.
|
||||
type ReturnLocalStmt struct {
|
||||
Source Local
|
||||
}
|
||||
|
||||
// CallStmt represents a named function call. The result should be stored in the
|
||||
// result local.
|
||||
type CallStmt struct {
|
||||
Func string
|
||||
Args []Local
|
||||
Result Local
|
||||
}
|
||||
|
||||
// BlockStmt represents a nested block. Nested blocks and break statements can
|
||||
// be used to short-circuit execution.
|
||||
type BlockStmt struct {
|
||||
Blocks []*Block
|
||||
}
|
||||
|
||||
func (a *BlockStmt) String() string {
|
||||
return fmt.Sprintf("BlockStmt (%d blocks)", len(a.Blocks))
|
||||
}
|
||||
|
||||
// BreakStmt represents a jump out of the current block. The index specifies how
|
||||
// many blocks to jump starting from zero (the current block). Execution will
|
||||
// continue from the end of the block that is jumped to.
|
||||
type BreakStmt struct {
|
||||
Index uint32
|
||||
}
|
||||
|
||||
// DotStmt represents a lookup operation on a value (e.g., array, object, etc.)
|
||||
// The source of a DotStmt may be a scalar value in which case the statement
|
||||
// will be undefined.
|
||||
type DotStmt struct {
|
||||
Source Local
|
||||
Key Local
|
||||
Target Local
|
||||
}
|
||||
|
||||
// LenStmt represents a length() operation on a local variable. The
|
||||
// result is stored in the target local variable.
|
||||
type LenStmt struct {
|
||||
Source Local
|
||||
Target Local
|
||||
}
|
||||
|
||||
// ScanStmt represents a linear scan over a composite value. The
|
||||
// source may be a scalar in which case the block will never execute.
|
||||
type ScanStmt struct {
|
||||
Source Local
|
||||
Key Local
|
||||
Value Local
|
||||
Block *Block
|
||||
}
|
||||
|
||||
// NotStmt represents a negated statement.
|
||||
type NotStmt struct {
|
||||
Block *Block
|
||||
}
|
||||
|
||||
// AssignBooleanStmt represents an assignment of a boolean value to a local variable.
|
||||
type AssignBooleanStmt struct {
|
||||
Value bool
|
||||
Target Local
|
||||
}
|
||||
|
||||
// AssignIntStmt represents an assignment of an integer value to a
|
||||
// local variable.
|
||||
type AssignIntStmt struct {
|
||||
Value int64
|
||||
Target Local
|
||||
}
|
||||
|
||||
// AssignVarStmt represents an assignment of one local variable to another.
|
||||
type AssignVarStmt struct {
|
||||
Source Local
|
||||
Target Local
|
||||
}
|
||||
|
||||
// AssignVarOnceStmt represents an assignment of one local variable to another.
|
||||
// If the target is defined, execution aborts with a conflict error.
|
||||
//
|
||||
// TODO(tsandall): is there a better name for this?
|
||||
type AssignVarOnceStmt struct {
|
||||
Target Local
|
||||
Source Local
|
||||
}
|
||||
|
||||
// MakeStringStmt constructs a local variable that refers to a string constant.
|
||||
type MakeStringStmt struct {
|
||||
Index int
|
||||
Target Local
|
||||
}
|
||||
|
||||
// MakeNullStmt constructs a local variable that refers to a null value.
|
||||
type MakeNullStmt struct {
|
||||
Target Local
|
||||
}
|
||||
|
||||
// MakeBooleanStmt constructs a local variable that refers to a boolean value.
|
||||
type MakeBooleanStmt struct {
|
||||
Value bool
|
||||
Target Local
|
||||
}
|
||||
|
||||
// MakeNumberFloatStmt constructs a local variable that refers to a
|
||||
// floating-point number value.
|
||||
type MakeNumberFloatStmt struct {
|
||||
Value float64
|
||||
Target Local
|
||||
}
|
||||
|
||||
// MakeNumberIntStmt constructs a local variable that refers to an integer value.
|
||||
type MakeNumberIntStmt struct {
|
||||
Value int64
|
||||
Target Local
|
||||
}
|
||||
|
||||
// MakeNumberRefStmt constructs a local variable that refers to a number stored as a string.
|
||||
type MakeNumberRefStmt struct {
|
||||
Index int
|
||||
Target Local
|
||||
}
|
||||
|
||||
// MakeArrayStmt constructs a local variable that refers to an array value.
|
||||
type MakeArrayStmt struct {
|
||||
Capacity int32
|
||||
Target Local
|
||||
}
|
||||
|
||||
// MakeObjectStmt constructs a local variable that refers to an object value.
|
||||
type MakeObjectStmt struct {
|
||||
Target Local
|
||||
}
|
||||
|
||||
// MakeSetStmt constructs a local variable that refers to a set value.
|
||||
type MakeSetStmt struct {
|
||||
Target Local
|
||||
}
|
||||
|
||||
// EqualStmt represents an value-equality check of two local variables.
|
||||
type EqualStmt struct {
|
||||
A Local
|
||||
B Local
|
||||
}
|
||||
|
||||
// LessThanStmt represents a < check of two local variables.
|
||||
type LessThanStmt struct {
|
||||
A Local
|
||||
B Local
|
||||
}
|
||||
|
||||
// LessThanEqualStmt represents a <= check of two local variables.
|
||||
type LessThanEqualStmt struct {
|
||||
A Local
|
||||
B Local
|
||||
}
|
||||
|
||||
// GreaterThanStmt represents a > check of two local variables.
|
||||
type GreaterThanStmt struct {
|
||||
A Local
|
||||
B Local
|
||||
}
|
||||
|
||||
// GreaterThanEqualStmt represents a >= check of two local variables.
|
||||
type GreaterThanEqualStmt struct {
|
||||
A Local
|
||||
B Local
|
||||
}
|
||||
|
||||
// NotEqualStmt represents a != check of two local variables.
|
||||
type NotEqualStmt struct {
|
||||
A Local
|
||||
B Local
|
||||
}
|
||||
|
||||
// IsArrayStmt represents a dynamic type check on a local variable.
|
||||
type IsArrayStmt struct {
|
||||
Source Local
|
||||
}
|
||||
|
||||
// IsObjectStmt represents a dynamic type check on a local variable.
|
||||
type IsObjectStmt struct {
|
||||
Source Local
|
||||
}
|
||||
|
||||
// IsDefinedStmt represents a check of whether a local variable is defined.
|
||||
type IsDefinedStmt struct {
|
||||
Source Local
|
||||
}
|
||||
|
||||
// IsUndefinedStmt represents a check of whether local variable is undefined.
|
||||
type IsUndefinedStmt struct {
|
||||
Source Local
|
||||
}
|
||||
|
||||
// ArrayAppendStmt represents a dynamic append operation of a value
|
||||
// onto an array.
|
||||
type ArrayAppendStmt struct {
|
||||
Value Local
|
||||
Array Local
|
||||
}
|
||||
|
||||
// ObjectInsertStmt represents a dynamic insert operation of a
|
||||
// key/value pair into an object.
|
||||
type ObjectInsertStmt struct {
|
||||
Key Local
|
||||
Value Local
|
||||
Object Local
|
||||
}
|
||||
|
||||
// ObjectInsertOnceStmt represents a dynamic insert operation of a key/value
|
||||
// pair into an object. If the key already exists and the value differs,
|
||||
// execution aborts with a conflict error.
|
||||
type ObjectInsertOnceStmt struct {
|
||||
Key Local
|
||||
Value Local
|
||||
Object Local
|
||||
}
|
||||
|
||||
// ObjectMergeStmt performs a recursive merge of two object values. If either of
|
||||
// the locals refer to non-object values this operation will abort with a
|
||||
// conflict error. Overlapping object keys are merged recursively.
|
||||
type ObjectMergeStmt struct {
|
||||
A Local
|
||||
B Local
|
||||
Target Local
|
||||
}
|
||||
|
||||
// SetAddStmt represents a dynamic add operation of an element into a set.
|
||||
type SetAddStmt struct {
|
||||
Value Local
|
||||
Set Local
|
||||
}
|
||||
|
||||
// WithStmt replaces the Local or a portion of the document referred to by the
|
||||
// Local with the Value and executes the contained block. If the Path is
|
||||
// non-empty, the Value is upserted into the Local. If the intermediate nodes in
|
||||
// the Local referred to by the Path do not exist, they will be created. When
|
||||
// the WithStmt finishes the Local is reset to it's original value.
|
||||
type WithStmt struct {
|
||||
Local Local
|
||||
Path []int
|
||||
Value Local
|
||||
Block *Block
|
||||
}
|
||||
|
||||
// ResultSetAdd adds a value into the result set returned by the query plan.
|
||||
type ResultSetAdd struct {
|
||||
Value Local
|
||||
}
|
||||
44
vendor/github.com/open-policy-agent/opa/internal/ir/pretty.go
generated
vendored
Normal file
44
vendor/github.com/open-policy-agent/opa/internal/ir/pretty.go
generated
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
// Copyright 2018 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package ir
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Pretty writes a human-readable representation of an IR object to w.
|
||||
func Pretty(w io.Writer, x interface{}) {
|
||||
|
||||
pp := &prettyPrinter{
|
||||
depth: -1,
|
||||
w: w,
|
||||
}
|
||||
Walk(pp, x)
|
||||
}
|
||||
|
||||
type prettyPrinter struct {
|
||||
depth int
|
||||
w io.Writer
|
||||
}
|
||||
|
||||
func (pp *prettyPrinter) Before(x interface{}) {
|
||||
pp.depth++
|
||||
}
|
||||
|
||||
func (pp *prettyPrinter) After(x interface{}) {
|
||||
pp.depth--
|
||||
}
|
||||
|
||||
func (pp *prettyPrinter) Visit(x interface{}) (Visitor, error) {
|
||||
pp.writeIndent("%T %+v", x, x)
|
||||
return pp, nil
|
||||
}
|
||||
|
||||
func (pp *prettyPrinter) writeIndent(f string, a ...interface{}) {
|
||||
pad := strings.Repeat("| ", pp.depth)
|
||||
fmt.Fprintf(pp.w, pad+f+"\n", a...)
|
||||
}
|
||||
84
vendor/github.com/open-policy-agent/opa/internal/ir/walk.go
generated
vendored
Normal file
84
vendor/github.com/open-policy-agent/opa/internal/ir/walk.go
generated
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
// Copyright 2018 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package ir
|
||||
|
||||
// Visitor defines the interface for visiting IR nodes.
|
||||
type Visitor interface {
|
||||
Before(x interface{})
|
||||
Visit(x interface{}) (Visitor, error)
|
||||
After(x interface{})
|
||||
}
|
||||
|
||||
// Walk invokes the visitor for nodes under x.
|
||||
func Walk(vis Visitor, x interface{}) error {
|
||||
impl := walkerImpl{
|
||||
vis: vis,
|
||||
}
|
||||
impl.walk(x)
|
||||
return impl.err
|
||||
}
|
||||
|
||||
type walkerImpl struct {
|
||||
vis Visitor
|
||||
err error
|
||||
}
|
||||
|
||||
func (w *walkerImpl) walk(x interface{}) {
|
||||
|
||||
if x == nil {
|
||||
return
|
||||
}
|
||||
|
||||
prev := w.vis
|
||||
w.vis.Before(x)
|
||||
defer w.vis.After(x)
|
||||
w.vis, w.err = w.vis.Visit(x)
|
||||
if w.err != nil {
|
||||
return
|
||||
} else if w.vis == nil {
|
||||
w.vis = prev
|
||||
return
|
||||
}
|
||||
|
||||
switch x := x.(type) {
|
||||
case *Policy:
|
||||
w.walk(x.Static)
|
||||
w.walk(x.Plan)
|
||||
w.walk(x.Funcs)
|
||||
case *Static:
|
||||
for _, s := range x.Strings {
|
||||
w.walk(s)
|
||||
}
|
||||
for _, f := range x.BuiltinFuncs {
|
||||
w.walk(f)
|
||||
}
|
||||
case *Funcs:
|
||||
for _, fn := range x.Funcs {
|
||||
w.walk(fn)
|
||||
}
|
||||
case *Func:
|
||||
for _, b := range x.Blocks {
|
||||
w.walk(b)
|
||||
}
|
||||
case *Plan:
|
||||
for _, b := range x.Blocks {
|
||||
w.walk(b)
|
||||
}
|
||||
case *Block:
|
||||
for _, s := range x.Stmts {
|
||||
w.walk(s)
|
||||
}
|
||||
case *BlockStmt:
|
||||
for _, b := range x.Blocks {
|
||||
w.walk(b)
|
||||
}
|
||||
case *ScanStmt:
|
||||
w.walk(x.Block)
|
||||
case *NotStmt:
|
||||
w.walk(x.Block)
|
||||
case *WithStmt:
|
||||
w.walk(x.Block)
|
||||
}
|
||||
}
|
||||
170
vendor/github.com/open-policy-agent/opa/internal/leb128/leb128.go
generated
vendored
Normal file
170
vendor/github.com/open-policy-agent/opa/internal/leb128/leb128.go
generated
vendored
Normal file
@@ -0,0 +1,170 @@
|
||||
// Copyright 2018 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package leb128 implements LEB128 integer encoding.
|
||||
package leb128
|
||||
|
||||
import (
|
||||
"io"
|
||||
)
|
||||
|
||||
// MustReadVarInt32 returns an int32 from r or panics.
|
||||
func MustReadVarInt32(r io.Reader) int32 {
|
||||
i32, err := ReadVarInt32(r)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return i32
|
||||
}
|
||||
|
||||
// MustReadVarInt64 returns an int64 from r or panics.
|
||||
func MustReadVarInt64(r io.Reader) int64 {
|
||||
i64, err := ReadVarInt64(r)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return i64
|
||||
}
|
||||
|
||||
// MustReadVarUint32 returns an uint32 from r or panics.
|
||||
func MustReadVarUint32(r io.Reader) uint32 {
|
||||
u32, err := ReadVarUint32(r)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return u32
|
||||
}
|
||||
|
||||
// MustReadVarUint64 returns an uint64 from r or panics.
|
||||
func MustReadVarUint64(r io.Reader) uint64 {
|
||||
u64, err := ReadVarUint64(r)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return u64
|
||||
}
|
||||
|
||||
// Copied rom http://dwarfstd.org/doc/Dwarf3.pdf.
|
||||
|
||||
// ReadVarUint32 tries to read a uint32 from r.
|
||||
func ReadVarUint32(r io.Reader) (uint32, error) {
|
||||
u64, err := ReadVarUint64(r)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return uint32(u64), nil
|
||||
}
|
||||
|
||||
// ReadVarUint64 tries to read a uint64 from r.
|
||||
func ReadVarUint64(r io.Reader) (uint64, error) {
|
||||
var result uint64
|
||||
var shift uint64
|
||||
buf := make([]byte, 1)
|
||||
for {
|
||||
if _, err := r.Read(buf); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
v := uint64(buf[0])
|
||||
result |= (v & 0x7F) << shift
|
||||
if v&0x80 == 0 {
|
||||
return result, nil
|
||||
}
|
||||
shift += 7
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// ReadVarInt32 tries to read a int32 from r.
|
||||
func ReadVarInt32(r io.Reader) (int32, error) {
|
||||
i64, err := ReadVarInt64(r)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return int32(i64), nil
|
||||
}
|
||||
|
||||
// ReadVarInt64 tries to read a int64 from r.
|
||||
func ReadVarInt64(r io.Reader) (int64, error) {
|
||||
var result int64
|
||||
var shift uint64
|
||||
size := uint64(32)
|
||||
buf := make([]byte, 1)
|
||||
for {
|
||||
if _, err := r.Read(buf); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
v := int64(buf[0])
|
||||
result |= (v & 0x7F) << shift
|
||||
shift += 7
|
||||
if v&0x80 == 0 {
|
||||
if (shift < size) && (v&0x40 != 0) {
|
||||
result |= (^0 << shift)
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// WriteVarUint32 writes u to w.
|
||||
func WriteVarUint32(w io.Writer, u uint32) error {
|
||||
var b []byte
|
||||
_, err := w.Write(appendUleb128(b, uint64(u)))
|
||||
return err
|
||||
}
|
||||
|
||||
// WriteVarUint64 writes u to w.
|
||||
func WriteVarUint64(w io.Writer, u uint64) error {
|
||||
var b []byte
|
||||
_, err := w.Write(appendUleb128(b, u))
|
||||
return err
|
||||
}
|
||||
|
||||
// WriteVarInt32 writes u to w.
|
||||
func WriteVarInt32(w io.Writer, i int32) error {
|
||||
var b []byte
|
||||
_, err := w.Write(appendSleb128(b, int64(i)))
|
||||
return err
|
||||
}
|
||||
|
||||
// WriteVarInt64 writes u to w.
|
||||
func WriteVarInt64(w io.Writer, i int64) error {
|
||||
var b []byte
|
||||
_, err := w.Write(appendSleb128(b, i))
|
||||
return err
|
||||
}
|
||||
|
||||
// Copied from https://github.com/golang/go/blob/master/src/cmd/internal/dwarf/dwarf.go.
|
||||
|
||||
// appendUleb128 appends v to b using DWARF's unsigned LEB128 encoding.
|
||||
func appendUleb128(b []byte, v uint64) []byte {
|
||||
for {
|
||||
c := uint8(v & 0x7f)
|
||||
v >>= 7
|
||||
if v != 0 {
|
||||
c |= 0x80
|
||||
}
|
||||
b = append(b, c)
|
||||
if c&0x80 == 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
// appendSleb128 appends v to b using DWARF's signed LEB128 encoding.
|
||||
func appendSleb128(b []byte, v int64) []byte {
|
||||
for {
|
||||
c := uint8(v & 0x7f)
|
||||
s := uint8(v & 0x40)
|
||||
v >>= 7
|
||||
if (v != -1 || s == 0) && (v != 0 || s != 0) {
|
||||
c |= 0x80
|
||||
}
|
||||
b = append(b, c)
|
||||
if c&0x80 == 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
return b
|
||||
}
|
||||
40
vendor/github.com/open-policy-agent/opa/internal/merge/merge.go
generated
vendored
Normal file
40
vendor/github.com/open-policy-agent/opa/internal/merge/merge.go
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
// Copyright 2017 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package merge contains helpers to merge data structures
|
||||
// frequently encountered in OPA.
|
||||
package merge
|
||||
|
||||
// InterfaceMaps returns the result of merging a and b. If a and b cannot be
|
||||
// merged because of conflicting key-value pairs, ok is false.
|
||||
func InterfaceMaps(a map[string]interface{}, b map[string]interface{}) (c map[string]interface{}, ok bool) {
|
||||
|
||||
c = map[string]interface{}{}
|
||||
for k := range a {
|
||||
c[k] = a[k]
|
||||
}
|
||||
|
||||
for k := range b {
|
||||
|
||||
add := b[k]
|
||||
exist, ok := c[k]
|
||||
if !ok {
|
||||
c[k] = add
|
||||
continue
|
||||
}
|
||||
|
||||
existObj, existOk := exist.(map[string]interface{})
|
||||
addObj, addOk := add.(map[string]interface{})
|
||||
if !existOk || !addOk {
|
||||
return nil, false
|
||||
}
|
||||
|
||||
c[k], ok = InterfaceMaps(existObj, addObj)
|
||||
if !ok {
|
||||
return nil, false
|
||||
}
|
||||
}
|
||||
|
||||
return c, true
|
||||
}
|
||||
1758
vendor/github.com/open-policy-agent/opa/internal/planner/planner.go
generated
vendored
Normal file
1758
vendor/github.com/open-policy-agent/opa/internal/planner/planner.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
156
vendor/github.com/open-policy-agent/opa/internal/planner/rules.go
generated
vendored
Normal file
156
vendor/github.com/open-policy-agent/opa/internal/planner/rules.go
generated
vendored
Normal file
@@ -0,0 +1,156 @@
|
||||
package planner
|
||||
|
||||
import (
|
||||
"sort"
|
||||
|
||||
"github.com/open-policy-agent/opa/ast"
|
||||
)
|
||||
|
||||
// funcstack implements a simple map structure used to keep track of virtual
|
||||
// document => planned function names. The structure supports Push and Pop
|
||||
// operations so that the planner can shadow planned functions when 'with'
|
||||
// statements are found.
|
||||
type funcstack struct {
|
||||
stack []map[string]string
|
||||
gen int
|
||||
}
|
||||
|
||||
func newFuncstack() *funcstack {
|
||||
return &funcstack{
|
||||
stack: []map[string]string{
|
||||
map[string]string{},
|
||||
},
|
||||
gen: 0,
|
||||
}
|
||||
}
|
||||
|
||||
func (p funcstack) Add(key, value string) {
|
||||
p.stack[len(p.stack)-1][key] = value
|
||||
}
|
||||
|
||||
func (p funcstack) Get(key string) (string, bool) {
|
||||
value, ok := p.stack[len(p.stack)-1][key]
|
||||
return value, ok
|
||||
}
|
||||
|
||||
func (p *funcstack) Push(funcs map[string]string) {
|
||||
p.stack = append(p.stack, funcs)
|
||||
p.gen++
|
||||
}
|
||||
|
||||
func (p *funcstack) Pop() map[string]string {
|
||||
last := p.stack[len(p.stack)-1]
|
||||
p.stack = p.stack[:len(p.stack)-1]
|
||||
p.gen++
|
||||
return last
|
||||
}
|
||||
|
||||
// ruletrie implements a simple trie structure for organizing rules that may be
|
||||
// planned. The trie nodes are keyed by the rule path. The ruletrie supports
|
||||
// Push and Pop operations that allow the planner to shadow subtrees when 'with'
|
||||
// statements are found.
|
||||
type ruletrie struct {
|
||||
children map[ast.Value][]*ruletrie
|
||||
rules []*ast.Rule
|
||||
}
|
||||
|
||||
func newRuletrie() *ruletrie {
|
||||
return &ruletrie{
|
||||
children: map[ast.Value][]*ruletrie{},
|
||||
}
|
||||
}
|
||||
|
||||
func (t *ruletrie) Arity() int {
|
||||
rules := t.Rules()
|
||||
if len(rules) > 0 {
|
||||
return len(rules[0].Head.Args)
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (t *ruletrie) Rules() []*ast.Rule {
|
||||
if t != nil {
|
||||
return t.rules
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t *ruletrie) Push(key ast.Ref) {
|
||||
node := t
|
||||
for i := 0; i < len(key)-1; i++ {
|
||||
node = node.Get(key[i].Value)
|
||||
if node == nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
elem := key[len(key)-1]
|
||||
node.children[elem.Value] = append(node.children[elem.Value], nil)
|
||||
}
|
||||
|
||||
func (t *ruletrie) Pop(key ast.Ref) {
|
||||
node := t
|
||||
for i := 0; i < len(key)-1; i++ {
|
||||
node = node.Get(key[i].Value)
|
||||
if node == nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
elem := key[len(key)-1]
|
||||
sl := node.children[elem.Value]
|
||||
node.children[elem.Value] = sl[:len(sl)-1]
|
||||
}
|
||||
|
||||
func (t *ruletrie) Insert(key ast.Ref) *ruletrie {
|
||||
node := t
|
||||
for _, elem := range key {
|
||||
child := node.Get(elem.Value)
|
||||
if child == nil {
|
||||
child = newRuletrie()
|
||||
node.children[elem.Value] = append(node.children[elem.Value], child)
|
||||
}
|
||||
node = child
|
||||
}
|
||||
return node
|
||||
}
|
||||
|
||||
func (t *ruletrie) Lookup(key ast.Ref) *ruletrie {
|
||||
node := t
|
||||
for _, elem := range key {
|
||||
node = node.Get(elem.Value)
|
||||
if node == nil {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
return node
|
||||
}
|
||||
|
||||
func (t *ruletrie) LookupOrInsert(key ast.Ref) *ruletrie {
|
||||
if val := t.Lookup(key); val != nil {
|
||||
return val
|
||||
}
|
||||
return t.Insert(key)
|
||||
}
|
||||
|
||||
func (t *ruletrie) Children() []ast.Value {
|
||||
sorted := make([]ast.Value, 0, len(t.children))
|
||||
for key := range t.children {
|
||||
if t.Get(key) != nil {
|
||||
sorted = append(sorted, key)
|
||||
}
|
||||
}
|
||||
sort.Slice(sorted, func(i, j int) bool {
|
||||
return sorted[i].Compare(sorted[j]) < 0
|
||||
})
|
||||
return sorted
|
||||
}
|
||||
|
||||
func (t *ruletrie) Get(k ast.Value) *ruletrie {
|
||||
if t == nil {
|
||||
return nil
|
||||
}
|
||||
nodes := t.children[k]
|
||||
if len(nodes) == 0 {
|
||||
return nil
|
||||
}
|
||||
return nodes[len(nodes)-1]
|
||||
}
|
||||
58
vendor/github.com/open-policy-agent/opa/internal/planner/varstack.go
generated
vendored
Normal file
58
vendor/github.com/open-policy-agent/opa/internal/planner/varstack.go
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
// Copyright 2019 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package planner
|
||||
|
||||
import (
|
||||
"github.com/open-policy-agent/opa/ast"
|
||||
"github.com/open-policy-agent/opa/internal/ir"
|
||||
)
|
||||
|
||||
type varstack []map[ast.Var]ir.Local
|
||||
|
||||
func newVarstack(frames ...map[ast.Var]ir.Local) *varstack {
|
||||
vs := &varstack{}
|
||||
for _, f := range frames {
|
||||
vs.Push(f)
|
||||
}
|
||||
return vs
|
||||
}
|
||||
|
||||
func (vs varstack) GetOrElse(k ast.Var, orElse func() ir.Local) ir.Local {
|
||||
l, ok := vs.Get(k)
|
||||
if !ok {
|
||||
l = orElse()
|
||||
vs.Put(k, l)
|
||||
}
|
||||
return l
|
||||
}
|
||||
|
||||
func (vs varstack) GetOrEmpty(k ast.Var) ir.Local {
|
||||
l, _ := vs.Get(k)
|
||||
return l
|
||||
}
|
||||
|
||||
func (vs varstack) Get(k ast.Var) (ir.Local, bool) {
|
||||
for i := len(vs) - 1; i >= 0; i-- {
|
||||
if l, ok := vs[i][k]; ok {
|
||||
return l, true
|
||||
}
|
||||
}
|
||||
return 0, false
|
||||
}
|
||||
|
||||
func (vs varstack) Put(k ast.Var, v ir.Local) {
|
||||
vs[len(vs)-1][k] = v
|
||||
}
|
||||
|
||||
func (vs *varstack) Push(frame map[ast.Var]ir.Local) {
|
||||
*vs = append(*vs, frame)
|
||||
}
|
||||
|
||||
func (vs *varstack) Pop() map[ast.Var]ir.Local {
|
||||
sl := *vs
|
||||
last := sl[len(sl)-1]
|
||||
*vs = sl[:len(sl)-1]
|
||||
return last
|
||||
}
|
||||
40
vendor/github.com/open-policy-agent/opa/internal/version/version.go
generated
vendored
Normal file
40
vendor/github.com/open-policy-agent/opa/internal/version/version.go
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
// Copyright 2019 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package version implements helper functions for the stored version.
|
||||
package version
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"runtime"
|
||||
|
||||
"github.com/open-policy-agent/opa/storage"
|
||||
"github.com/open-policy-agent/opa/version"
|
||||
)
|
||||
|
||||
var versionPath = storage.MustParsePath("/system/version")
|
||||
|
||||
// Write the build version information into storage. This makes the
|
||||
// version information available to the REPL and the HTTP server.
|
||||
func Write(ctx context.Context, store storage.Store, txn storage.Transaction) error {
|
||||
|
||||
if err := storage.MakeDir(ctx, store, txn, versionPath); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := store.Write(ctx, txn, storage.AddOp, versionPath, map[string]interface{}{
|
||||
"version": version.Version,
|
||||
"build_commit": version.Vcs,
|
||||
"build_timestamp": version.Timestamp,
|
||||
"build_hostname": version.Hostname,
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// UserAgent defines the current OPA instances User-Agent default header value.
|
||||
var UserAgent = fmt.Sprintf("Open Policy Agent/%s (%s, %s)", version.Version, runtime.GOOS, runtime.GOARCH)
|
||||
67
vendor/github.com/open-policy-agent/opa/internal/wasm/constant/constant.go
generated
vendored
Normal file
67
vendor/github.com/open-policy-agent/opa/internal/wasm/constant/constant.go
generated
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
// Copyright 2018 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package constant contains WASM constant definitions.
|
||||
package constant
|
||||
|
||||
// Magic bytes at the beginning of every WASM file ("\0asm").
|
||||
const Magic = uint32(0x6D736100)
|
||||
|
||||
// Version defines the WASM version.
|
||||
const Version = uint32(1)
|
||||
|
||||
// WASM module section IDs.
|
||||
const (
|
||||
CustomSectionID uint8 = iota
|
||||
TypeSectionID
|
||||
ImportSectionID
|
||||
FunctionSectionID
|
||||
TableSectionID
|
||||
MemorySectionID
|
||||
GlobalSectionID
|
||||
ExportSectionID
|
||||
StartSectionID
|
||||
ElementSectionID
|
||||
CodeSectionID
|
||||
DataSectionID
|
||||
)
|
||||
|
||||
// FunctionTypeID indicates the start of a function type definition.
|
||||
const FunctionTypeID = byte(0x60)
|
||||
|
||||
// ValueType represents an intrinsic value type in WASM.
|
||||
const (
|
||||
ValueTypeF64 byte = iota + 0x7C
|
||||
ValueTypeF32
|
||||
ValueTypeI64
|
||||
ValueTypeI32
|
||||
)
|
||||
|
||||
// WASM import descriptor types.
|
||||
const (
|
||||
ImportDescType byte = iota
|
||||
ImportDescTable
|
||||
ImportDescMem
|
||||
ImportDescGlobal
|
||||
)
|
||||
|
||||
// WASM export descriptor types.
|
||||
const (
|
||||
ExportDescType byte = iota
|
||||
ExportDescTable
|
||||
ExportDescMem
|
||||
ExportDescGlobal
|
||||
)
|
||||
|
||||
// ElementTypeAnyFunc indicates the type of a table import.
|
||||
const ElementTypeAnyFunc byte = 0x70
|
||||
|
||||
// BlockTypeEmpty represents a block type.
|
||||
const BlockTypeEmpty byte = 0x40
|
||||
|
||||
// WASM global varialbe mutability flag.
|
||||
const (
|
||||
Const byte = iota
|
||||
Mutable
|
||||
)
|
||||
6
vendor/github.com/open-policy-agent/opa/internal/wasm/encoding/doc.go
generated
vendored
Normal file
6
vendor/github.com/open-policy-agent/opa/internal/wasm/encoding/doc.go
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
// Copyright 2018 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package encoding implements WASM module reading and writing.
|
||||
package encoding
|
||||
809
vendor/github.com/open-policy-agent/opa/internal/wasm/encoding/reader.go
generated
vendored
Normal file
809
vendor/github.com/open-policy-agent/opa/internal/wasm/encoding/reader.go
generated
vendored
Normal file
@@ -0,0 +1,809 @@
|
||||
// Copyright 2018 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package encoding
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/binary"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
|
||||
"github.com/open-policy-agent/opa/internal/leb128"
|
||||
"github.com/open-policy-agent/opa/internal/wasm/constant"
|
||||
"github.com/open-policy-agent/opa/internal/wasm/instruction"
|
||||
"github.com/open-policy-agent/opa/internal/wasm/module"
|
||||
"github.com/open-policy-agent/opa/internal/wasm/opcode"
|
||||
"github.com/open-policy-agent/opa/internal/wasm/types"
|
||||
)
|
||||
|
||||
// ReadModule reads a binary-encoded WASM module from r.
|
||||
func ReadModule(r io.Reader) (*module.Module, error) {
|
||||
|
||||
wr := &reader{r: r, n: 0}
|
||||
module, err := readModule(wr)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "offset 0x%x", wr.n)
|
||||
}
|
||||
|
||||
return module, nil
|
||||
}
|
||||
|
||||
// ReadCodeEntry reads a binary-encoded WASM code entry from r.
|
||||
func ReadCodeEntry(r io.Reader) (*module.CodeEntry, error) {
|
||||
|
||||
wr := &reader{r: r, n: 0}
|
||||
entry, err := readCodeEntry(wr)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "offset 0x%x", wr.n)
|
||||
}
|
||||
|
||||
return entry, nil
|
||||
}
|
||||
|
||||
// CodeEntries returns the WASM code entries contained in r.
|
||||
func CodeEntries(m *module.Module) ([]*module.CodeEntry, error) {
|
||||
|
||||
entries := make([]*module.CodeEntry, len(m.Code.Segments))
|
||||
|
||||
for i, s := range m.Code.Segments {
|
||||
buf := bytes.NewBuffer(s.Code)
|
||||
entry, err := ReadCodeEntry(buf)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
entries[i] = entry
|
||||
}
|
||||
|
||||
return entries, nil
|
||||
}
|
||||
|
||||
type reader struct {
|
||||
r io.Reader
|
||||
n int
|
||||
}
|
||||
|
||||
func (r *reader) Read(bs []byte) (int, error) {
|
||||
n, err := r.r.Read(bs)
|
||||
r.n += n
|
||||
return n, err
|
||||
}
|
||||
|
||||
func readModule(r io.Reader) (*module.Module, error) {
|
||||
|
||||
if err := readMagic(r); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := readVersion(r); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var m module.Module
|
||||
|
||||
if err := readSections(r, &m); err != nil && err != io.EOF {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &m, nil
|
||||
}
|
||||
|
||||
func readCodeEntry(r io.Reader) (*module.CodeEntry, error) {
|
||||
|
||||
var entry module.CodeEntry
|
||||
|
||||
if err := readLocals(r, &entry.Func.Locals); err != nil {
|
||||
return nil, errors.Wrapf(err, "local declarations")
|
||||
}
|
||||
|
||||
return &entry, readExpr(r, &entry.Func.Expr)
|
||||
}
|
||||
|
||||
func readMagic(r io.Reader) error {
|
||||
var v uint32
|
||||
if err := binary.Read(r, binary.LittleEndian, &v); err != nil {
|
||||
return err
|
||||
} else if v != constant.Magic {
|
||||
return fmt.Errorf("illegal magic value")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func readVersion(r io.Reader) error {
|
||||
var v uint32
|
||||
if err := binary.Read(r, binary.LittleEndian, &v); err != nil {
|
||||
return err
|
||||
} else if v != constant.Version {
|
||||
return fmt.Errorf("illegal wasm version")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func readSections(r io.Reader, m *module.Module) error {
|
||||
for {
|
||||
id, err := readByte(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
size, err := leb128.ReadVarUint32(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
buf := make([]byte, size)
|
||||
if _, err := io.ReadFull(r, buf); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
bufr := bytes.NewReader(buf)
|
||||
|
||||
switch id {
|
||||
case constant.CustomSectionID, constant.StartSectionID, constant.MemorySectionID:
|
||||
continue
|
||||
case constant.TypeSectionID:
|
||||
if err := readTypeSection(bufr, &m.Type); err != nil {
|
||||
return errors.Wrap(err, "type section")
|
||||
}
|
||||
case constant.ImportSectionID:
|
||||
if err := readImportSection(bufr, &m.Import); err != nil {
|
||||
return errors.Wrap(err, "import section")
|
||||
}
|
||||
case constant.GlobalSectionID:
|
||||
if err := readGlobalSection(bufr, &m.Global); err != nil {
|
||||
return errors.Wrap(err, "global section")
|
||||
}
|
||||
case constant.TableSectionID:
|
||||
if err := readTableSection(bufr, &m.Table); err != nil {
|
||||
return errors.Wrap(err, "table section")
|
||||
}
|
||||
case constant.FunctionSectionID:
|
||||
if err := readFunctionSection(bufr, &m.Function); err != nil {
|
||||
return errors.Wrap(err, "function section")
|
||||
}
|
||||
case constant.ExportSectionID:
|
||||
if err := readExportSection(bufr, &m.Export); err != nil {
|
||||
return errors.Wrap(err, "export section")
|
||||
}
|
||||
case constant.ElementSectionID:
|
||||
if err := readElementSection(bufr, &m.Element); err != nil {
|
||||
return errors.Wrap(err, "element section")
|
||||
}
|
||||
case constant.DataSectionID:
|
||||
if err := readDataSection(bufr, &m.Data); err != nil {
|
||||
return errors.Wrap(err, "data section")
|
||||
}
|
||||
case constant.CodeSectionID:
|
||||
if err := readRawCodeSection(bufr, &m.Code); err != nil {
|
||||
return errors.Wrap(err, "code section")
|
||||
}
|
||||
default:
|
||||
return fmt.Errorf("illegal section id")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func readTypeSection(r io.Reader, s *module.TypeSection) error {
|
||||
|
||||
n, err := leb128.ReadVarUint32(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for i := uint32(0); i < n; i++ {
|
||||
|
||||
var ftype module.FunctionType
|
||||
if err := readFunctionType(r, &ftype); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
s.Functions = append(s.Functions, ftype)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func readImportSection(r io.Reader, s *module.ImportSection) error {
|
||||
|
||||
n, err := leb128.ReadVarUint32(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for i := uint32(0); i < n; i++ {
|
||||
|
||||
var imp module.Import
|
||||
|
||||
if err := readImport(r, &imp); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
s.Imports = append(s.Imports, imp)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func readTableSection(r io.Reader, s *module.TableSection) error {
|
||||
|
||||
n, err := leb128.ReadVarUint32(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for i := uint32(0); i < n; i++ {
|
||||
|
||||
var table module.Table
|
||||
|
||||
if elem, err := readByte(r); err != nil {
|
||||
return err
|
||||
} else if elem != constant.ElementTypeAnyFunc {
|
||||
return fmt.Errorf("illegal element type")
|
||||
} else {
|
||||
table.Type = types.Anyfunc
|
||||
}
|
||||
|
||||
if err := readLimits(r, &table.Lim); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
s.Tables = append(s.Tables, table)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func readGlobalSection(r io.Reader, s *module.GlobalSection) error {
|
||||
|
||||
n, err := leb128.ReadVarUint32(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for i := uint32(0); i < n; i++ {
|
||||
|
||||
var global module.Global
|
||||
|
||||
if err := readGlobal(r, &global); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
s.Globals = append(s.Globals, global)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func readFunctionSection(r io.Reader, s *module.FunctionSection) error {
|
||||
return readVarUint32Vector(r, &s.TypeIndices)
|
||||
}
|
||||
|
||||
func readExportSection(r io.Reader, s *module.ExportSection) error {
|
||||
|
||||
n, err := leb128.ReadVarUint32(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for i := uint32(0); i < n; i++ {
|
||||
|
||||
var exp module.Export
|
||||
|
||||
if err := readExport(r, &exp); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
s.Exports = append(s.Exports, exp)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func readElementSection(r io.Reader, s *module.ElementSection) error {
|
||||
|
||||
n, err := leb128.ReadVarUint32(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for i := uint32(0); i < n; i++ {
|
||||
|
||||
var seg module.ElementSegment
|
||||
|
||||
if err := readElementSegment(r, &seg); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
s.Segments = append(s.Segments, seg)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func readDataSection(r io.Reader, s *module.DataSection) error {
|
||||
|
||||
n, err := leb128.ReadVarUint32(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for i := uint32(0); i < n; i++ {
|
||||
|
||||
var seg module.DataSegment
|
||||
|
||||
if err := readDataSegment(r, &seg); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
s.Segments = append(s.Segments, seg)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func readRawCodeSection(r io.Reader, s *module.RawCodeSection) error {
|
||||
|
||||
n, err := leb128.ReadVarUint32(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for i := uint32(0); i < n; i++ {
|
||||
var seg module.RawCodeSegment
|
||||
|
||||
if err := readRawCodeSegment(r, &seg); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
s.Segments = append(s.Segments, seg)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func readFunctionType(r io.Reader, ftype *module.FunctionType) error {
|
||||
|
||||
if b, err := readByte(r); err != nil {
|
||||
return err
|
||||
} else if b != constant.FunctionTypeID {
|
||||
return fmt.Errorf("illegal function type id 0x%x", b)
|
||||
}
|
||||
|
||||
if err := readValueTypeVector(r, &ftype.Params); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return readValueTypeVector(r, &ftype.Results)
|
||||
}
|
||||
|
||||
func readGlobal(r io.Reader, global *module.Global) error {
|
||||
|
||||
if err := readValueType(r, &global.Type); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
b, err := readByte(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if b == 1 {
|
||||
global.Mutable = true
|
||||
} else if b != 0 {
|
||||
return fmt.Errorf("illegal mutability flag")
|
||||
}
|
||||
|
||||
if err := readConstantExpr(r, &global.Init); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func readImport(r io.Reader, imp *module.Import) error {
|
||||
|
||||
if err := readByteVectorString(r, &imp.Module); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := readByteVectorString(r, &imp.Name); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
b, err := readByte(r)
|
||||
if err != nil {
|
||||
return err
|
||||
|
||||
}
|
||||
|
||||
if b == constant.ImportDescType {
|
||||
index, err := leb128.ReadVarUint32(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
imp.Descriptor = module.FunctionImport{
|
||||
Func: index,
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
if b == constant.ImportDescTable {
|
||||
if elem, err := readByte(r); err != nil {
|
||||
return err
|
||||
} else if elem != constant.ElementTypeAnyFunc {
|
||||
return fmt.Errorf("illegal element type")
|
||||
}
|
||||
desc := module.TableImport{
|
||||
Type: types.Anyfunc,
|
||||
}
|
||||
if err := readLimits(r, &desc.Lim); err != nil {
|
||||
return err
|
||||
}
|
||||
imp.Descriptor = desc
|
||||
return nil
|
||||
}
|
||||
|
||||
if b == constant.ImportDescMem {
|
||||
desc := module.MemoryImport{}
|
||||
if err := readLimits(r, &desc.Mem.Lim); err != nil {
|
||||
return err
|
||||
}
|
||||
imp.Descriptor = desc
|
||||
return nil
|
||||
}
|
||||
|
||||
if b == constant.ImportDescGlobal {
|
||||
desc := module.GlobalImport{}
|
||||
if err := readValueType(r, &desc.Type); err != nil {
|
||||
return err
|
||||
}
|
||||
b, err := readByte(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if b == 1 {
|
||||
desc.Mutable = true
|
||||
} else if b != 0 {
|
||||
return fmt.Errorf("illegal mutability flag")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
return fmt.Errorf("illegal import descriptor type")
|
||||
}
|
||||
|
||||
func readExport(r io.Reader, exp *module.Export) error {
|
||||
|
||||
if err := readByteVectorString(r, &exp.Name); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
b, err := readByte(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
switch b {
|
||||
case constant.ExportDescType:
|
||||
exp.Descriptor.Type = module.FunctionExportType
|
||||
case constant.ExportDescTable:
|
||||
exp.Descriptor.Type = module.TableExportType
|
||||
case constant.ExportDescMem:
|
||||
exp.Descriptor.Type = module.MemoryExportType
|
||||
case constant.ExportDescGlobal:
|
||||
exp.Descriptor.Type = module.GlobalExportType
|
||||
default:
|
||||
return fmt.Errorf("illegal export descriptor type")
|
||||
}
|
||||
|
||||
exp.Descriptor.Index, err = leb128.ReadVarUint32(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func readElementSegment(r io.Reader, seg *module.ElementSegment) error {
|
||||
|
||||
if err := readVarUint32(r, &seg.Index); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := readConstantExpr(r, &seg.Offset); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := readVarUint32Vector(r, &seg.Indices); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func readDataSegment(r io.Reader, seg *module.DataSegment) error {
|
||||
|
||||
if err := readVarUint32(r, &seg.Index); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := readConstantExpr(r, &seg.Offset); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := readByteVector(r, &seg.Init); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func readRawCodeSegment(r io.Reader, seg *module.RawCodeSegment) error {
|
||||
return readByteVector(r, &seg.Code)
|
||||
}
|
||||
|
||||
func readConstantExpr(r io.Reader, expr *module.Expr) error {
|
||||
|
||||
instrs := make([]instruction.Instruction, 0)
|
||||
|
||||
for {
|
||||
b, err := readByte(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
switch opcode.Opcode(b) {
|
||||
case opcode.I32Const:
|
||||
i32, err := leb128.ReadVarInt32(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
instrs = append(instrs, instruction.I32Const{Value: i32})
|
||||
case opcode.I64Const:
|
||||
i64, err := leb128.ReadVarInt64(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
instrs = append(instrs, instruction.I64Const{Value: i64})
|
||||
case opcode.End:
|
||||
expr.Instrs = instrs
|
||||
return nil
|
||||
default:
|
||||
return fmt.Errorf("illegal constant expr opcode 0x%x", b)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func readExpr(r io.Reader, expr *module.Expr) (err error) {
|
||||
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
switch r := r.(type) {
|
||||
case error:
|
||||
err = r
|
||||
default:
|
||||
err = fmt.Errorf("unknown panic")
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
return readInstructions(r, &expr.Instrs)
|
||||
}
|
||||
|
||||
func readInstructions(r io.Reader, instrs *[]instruction.Instruction) error {
|
||||
|
||||
ret := make([]instruction.Instruction, 0)
|
||||
|
||||
for {
|
||||
b, err := readByte(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
switch opcode.Opcode(b) {
|
||||
case opcode.I32Const:
|
||||
ret = append(ret, instruction.I32Const{Value: leb128.MustReadVarInt32(r)})
|
||||
case opcode.I64Const:
|
||||
ret = append(ret, instruction.I64Const{Value: leb128.MustReadVarInt64(r)})
|
||||
case opcode.I32Eqz:
|
||||
ret = append(ret, instruction.I32Eqz{})
|
||||
case opcode.GetLocal:
|
||||
ret = append(ret, instruction.GetLocal{Index: leb128.MustReadVarUint32(r)})
|
||||
case opcode.SetLocal:
|
||||
ret = append(ret, instruction.SetLocal{Index: leb128.MustReadVarUint32(r)})
|
||||
case opcode.Call:
|
||||
ret = append(ret, instruction.Call{Index: leb128.MustReadVarUint32(r)})
|
||||
case opcode.BrIf:
|
||||
ret = append(ret, instruction.BrIf{Index: leb128.MustReadVarUint32(r)})
|
||||
case opcode.Return:
|
||||
ret = append(ret, instruction.Return{})
|
||||
case opcode.Block:
|
||||
block := instruction.Block{}
|
||||
if err := readBlockValueType(r, block.Type); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := readInstructions(r, &block.Instrs); err != nil {
|
||||
return err
|
||||
}
|
||||
ret = append(ret, block)
|
||||
case opcode.Loop:
|
||||
loop := instruction.Loop{}
|
||||
if err := readBlockValueType(r, loop.Type); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := readInstructions(r, &loop.Instrs); err != nil {
|
||||
return err
|
||||
}
|
||||
ret = append(ret, loop)
|
||||
case opcode.End:
|
||||
*instrs = ret
|
||||
return nil
|
||||
default:
|
||||
return fmt.Errorf("illegal opcode 0x%x", b)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func readLimits(r io.Reader, l *module.Limit) error {
|
||||
|
||||
b, err := readByte(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
min, err := leb128.ReadVarUint32(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
l.Min = min
|
||||
|
||||
if b == 1 {
|
||||
max, err := leb128.ReadVarUint32(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
l.Max = &max
|
||||
} else if b != 0 {
|
||||
return fmt.Errorf("illegal limit flag")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func readLocals(r io.Reader, locals *[]module.LocalDeclaration) error {
|
||||
|
||||
n, err := leb128.ReadVarUint32(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ret := make([]module.LocalDeclaration, n)
|
||||
|
||||
for i := uint32(0); i < n; i++ {
|
||||
if err := readVarUint32(r, &ret[i].Count); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := readValueType(r, &ret[i].Type); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
*locals = ret
|
||||
return nil
|
||||
}
|
||||
|
||||
func readByteVector(r io.Reader, v *[]byte) error {
|
||||
|
||||
n, err := leb128.ReadVarUint32(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
buf := make([]byte, n)
|
||||
if _, err := io.ReadFull(r, buf); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
*v = buf
|
||||
return nil
|
||||
}
|
||||
|
||||
func readByteVectorString(r io.Reader, v *string) error {
|
||||
|
||||
var buf []byte
|
||||
|
||||
if err := readByteVector(r, &buf); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
*v = string(buf)
|
||||
return nil
|
||||
}
|
||||
|
||||
func readVarUint32Vector(r io.Reader, v *[]uint32) error {
|
||||
|
||||
n, err := leb128.ReadVarUint32(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ret := make([]uint32, n)
|
||||
|
||||
for i := uint32(0); i < n; i++ {
|
||||
if err := readVarUint32(r, &ret[i]); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
*v = ret
|
||||
return nil
|
||||
}
|
||||
|
||||
func readValueTypeVector(r io.Reader, v *[]types.ValueType) error {
|
||||
|
||||
n, err := leb128.ReadVarUint32(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ret := make([]types.ValueType, n)
|
||||
|
||||
for i := uint32(0); i < n; i++ {
|
||||
if err := readValueType(r, &ret[i]); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
*v = ret
|
||||
return nil
|
||||
}
|
||||
|
||||
func readVarUint32(r io.Reader, v *uint32) error {
|
||||
var err error
|
||||
*v, err = leb128.ReadVarUint32(r)
|
||||
return err
|
||||
}
|
||||
|
||||
func readValueType(r io.Reader, v *types.ValueType) error {
|
||||
if b, err := readByte(r); err != nil {
|
||||
return err
|
||||
} else if b == constant.ValueTypeI32 {
|
||||
*v = types.I32
|
||||
} else if b == constant.ValueTypeI64 {
|
||||
*v = types.I64
|
||||
} else if b == constant.ValueTypeF32 {
|
||||
*v = types.F32
|
||||
} else if b == constant.ValueTypeF64 {
|
||||
*v = types.F64
|
||||
} else {
|
||||
return fmt.Errorf("illegal value type: 0x%x", b)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func readBlockValueType(r io.Reader, v *types.ValueType) error {
|
||||
if b, err := readByte(r); err != nil {
|
||||
return err
|
||||
} else if b == constant.ValueTypeI32 {
|
||||
*v = types.I32
|
||||
} else if b == constant.ValueTypeI64 {
|
||||
*v = types.I64
|
||||
} else if b == constant.ValueTypeF32 {
|
||||
*v = types.F32
|
||||
} else if b == constant.ValueTypeF64 {
|
||||
*v = types.F64
|
||||
} else if b != constant.BlockTypeEmpty {
|
||||
return fmt.Errorf("illegal value type: 0x%x", b)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func readByte(r io.Reader) (byte, error) {
|
||||
buf := make([]byte, 1)
|
||||
_, err := r.Read(buf)
|
||||
return buf[0], err
|
||||
}
|
||||
615
vendor/github.com/open-policy-agent/opa/internal/wasm/encoding/writer.go
generated
vendored
Normal file
615
vendor/github.com/open-policy-agent/opa/internal/wasm/encoding/writer.go
generated
vendored
Normal file
@@ -0,0 +1,615 @@
|
||||
// Copyright 2018 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package encoding
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/binary"
|
||||
"fmt"
|
||||
"io"
|
||||
"math"
|
||||
|
||||
"github.com/open-policy-agent/opa/internal/leb128"
|
||||
"github.com/open-policy-agent/opa/internal/wasm/constant"
|
||||
"github.com/open-policy-agent/opa/internal/wasm/instruction"
|
||||
"github.com/open-policy-agent/opa/internal/wasm/module"
|
||||
"github.com/open-policy-agent/opa/internal/wasm/opcode"
|
||||
"github.com/open-policy-agent/opa/internal/wasm/types"
|
||||
)
|
||||
|
||||
// WriteModule writes a binary-encoded representation of module to w.
|
||||
func WriteModule(w io.Writer, module *module.Module) error {
|
||||
|
||||
if err := writeMagic(w); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := writeVersion(w); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if module == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := writeTypeSection(w, module.Type); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := writeImportSection(w, module.Import); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := writeFunctionSection(w, module.Function); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := writeTableSection(w, module.Table); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := writeGlobalSection(w, module.Global); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := writeExportSection(w, module.Export); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := writeElementSection(w, module.Element); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := writeRawCodeSection(w, module.Code); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := writeDataSection(w, module.Data); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// WriteCodeEntry writes a binary encoded representation of entry to w.
|
||||
func WriteCodeEntry(w io.Writer, entry *module.CodeEntry) error {
|
||||
|
||||
if err := leb128.WriteVarUint32(w, uint32(len(entry.Func.Locals))); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, local := range entry.Func.Locals {
|
||||
|
||||
if err := leb128.WriteVarUint32(w, local.Count); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := writeValueType(w, local.Type); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return writeInstructions(w, entry.Func.Expr.Instrs)
|
||||
}
|
||||
|
||||
func writeMagic(w io.Writer) error {
|
||||
return binary.Write(w, binary.LittleEndian, constant.Magic)
|
||||
}
|
||||
|
||||
func writeVersion(w io.Writer) error {
|
||||
return binary.Write(w, binary.LittleEndian, constant.Version)
|
||||
}
|
||||
|
||||
func writeTypeSection(w io.Writer, s module.TypeSection) error {
|
||||
|
||||
if len(s.Functions) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := writeByte(w, constant.TypeSectionID); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
|
||||
if err := leb128.WriteVarUint32(&buf, uint32(len(s.Functions))); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, fsig := range s.Functions {
|
||||
if err := writeFunctionType(&buf, fsig); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return writeRawSection(w, &buf)
|
||||
}
|
||||
|
||||
func writeImportSection(w io.Writer, s module.ImportSection) error {
|
||||
|
||||
if len(s.Imports) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := writeByte(w, constant.ImportSectionID); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
|
||||
if err := leb128.WriteVarUint32(&buf, uint32(len(s.Imports))); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, imp := range s.Imports {
|
||||
if err := writeImport(&buf, imp); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return writeRawSection(w, &buf)
|
||||
}
|
||||
|
||||
func writeGlobalSection(w io.Writer, s module.GlobalSection) error {
|
||||
|
||||
if len(s.Globals) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := writeByte(w, constant.GlobalSectionID); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
|
||||
if err := leb128.WriteVarUint32(&buf, uint32(len(s.Globals))); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, global := range s.Globals {
|
||||
if err := writeGlobal(&buf, global); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return writeRawSection(w, &buf)
|
||||
}
|
||||
|
||||
func writeFunctionSection(w io.Writer, s module.FunctionSection) error {
|
||||
|
||||
if len(s.TypeIndices) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := writeByte(w, constant.FunctionSectionID); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
|
||||
if err := leb128.WriteVarUint32(&buf, uint32(len(s.TypeIndices))); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, idx := range s.TypeIndices {
|
||||
if err := leb128.WriteVarUint32(&buf, uint32(idx)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return writeRawSection(w, &buf)
|
||||
}
|
||||
|
||||
func writeTableSection(w io.Writer, s module.TableSection) error {
|
||||
|
||||
if len(s.Tables) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := writeByte(w, constant.TableSectionID); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
|
||||
if err := leb128.WriteVarUint32(&buf, uint32(len(s.Tables))); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, table := range s.Tables {
|
||||
switch table.Type {
|
||||
case types.Anyfunc:
|
||||
if err := writeByte(&buf, constant.ElementTypeAnyFunc); err != nil {
|
||||
return err
|
||||
}
|
||||
default:
|
||||
return fmt.Errorf("illegal table element type")
|
||||
}
|
||||
if err := writeLimits(&buf, table.Lim); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return writeRawSection(w, &buf)
|
||||
|
||||
}
|
||||
|
||||
func writeExportSection(w io.Writer, s module.ExportSection) error {
|
||||
|
||||
if len(s.Exports) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := writeByte(w, constant.ExportSectionID); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
|
||||
if err := leb128.WriteVarUint32(&buf, uint32(len(s.Exports))); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, exp := range s.Exports {
|
||||
if err := writeByteVector(&buf, []byte(exp.Name)); err != nil {
|
||||
return err
|
||||
}
|
||||
var tpe byte
|
||||
switch exp.Descriptor.Type {
|
||||
case module.FunctionExportType:
|
||||
tpe = constant.ExportDescType
|
||||
case module.TableExportType:
|
||||
tpe = constant.ExportDescTable
|
||||
case module.MemoryExportType:
|
||||
tpe = constant.ExportDescMem
|
||||
case module.GlobalExportType:
|
||||
tpe = constant.ExportDescGlobal
|
||||
default:
|
||||
return fmt.Errorf("illegal export descriptor type 0x%x", exp.Descriptor.Type)
|
||||
}
|
||||
if err := writeByte(&buf, tpe); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := leb128.WriteVarUint32(&buf, exp.Descriptor.Index); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return writeRawSection(w, &buf)
|
||||
}
|
||||
|
||||
func writeElementSection(w io.Writer, s module.ElementSection) error {
|
||||
|
||||
if len(s.Segments) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := writeByte(w, constant.ElementSectionID); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
|
||||
if err := leb128.WriteVarUint32(&buf, uint32(len(s.Segments))); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, seg := range s.Segments {
|
||||
if err := leb128.WriteVarUint32(&buf, seg.Index); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := writeInstructions(&buf, seg.Offset.Instrs); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := writeVarUint32Vector(&buf, seg.Indices); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return writeRawSection(w, &buf)
|
||||
}
|
||||
|
||||
func writeRawCodeSection(w io.Writer, s module.RawCodeSection) error {
|
||||
|
||||
if len(s.Segments) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := writeByte(w, constant.CodeSectionID); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
|
||||
if err := leb128.WriteVarUint32(&buf, uint32(len(s.Segments))); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, seg := range s.Segments {
|
||||
if err := leb128.WriteVarUint32(&buf, uint32(len(seg.Code))); err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err := buf.Write(seg.Code); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return writeRawSection(w, &buf)
|
||||
}
|
||||
|
||||
func writeDataSection(w io.Writer, s module.DataSection) error {
|
||||
|
||||
if len(s.Segments) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := writeByte(w, constant.DataSectionID); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
|
||||
if err := leb128.WriteVarUint32(&buf, uint32(len(s.Segments))); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, seg := range s.Segments {
|
||||
if err := leb128.WriteVarUint32(&buf, seg.Index); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := writeInstructions(&buf, seg.Offset.Instrs); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := writeByteVector(&buf, seg.Init); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return writeRawSection(w, &buf)
|
||||
}
|
||||
|
||||
func writeFunctionType(w io.Writer, fsig module.FunctionType) error {
|
||||
|
||||
if err := writeByte(w, constant.FunctionTypeID); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := writeValueTypeVector(w, fsig.Params); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return writeValueTypeVector(w, fsig.Results)
|
||||
}
|
||||
|
||||
func writeImport(w io.Writer, imp module.Import) error {
|
||||
|
||||
if err := writeByteVector(w, []byte(imp.Module)); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := writeByteVector(w, []byte(imp.Name)); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
switch desc := imp.Descriptor.(type) {
|
||||
case module.FunctionImport:
|
||||
if err := writeByte(w, constant.ImportDescType); err != nil {
|
||||
return err
|
||||
}
|
||||
return leb128.WriteVarUint32(w, desc.Func)
|
||||
case module.TableImport:
|
||||
if err := writeByte(w, constant.ImportDescTable); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := writeByte(w, constant.ElementTypeAnyFunc); err != nil {
|
||||
return err
|
||||
}
|
||||
return writeLimits(w, desc.Lim)
|
||||
case module.MemoryImport:
|
||||
if err := writeByte(w, constant.ImportDescMem); err != nil {
|
||||
return err
|
||||
}
|
||||
return writeLimits(w, desc.Mem.Lim)
|
||||
case module.GlobalImport:
|
||||
if err := writeByte(w, constant.ImportDescGlobal); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := writeValueType(w, desc.Type); err != nil {
|
||||
return err
|
||||
}
|
||||
if desc.Mutable {
|
||||
return writeByte(w, constant.Mutable)
|
||||
}
|
||||
return writeByte(w, constant.Const)
|
||||
default:
|
||||
return fmt.Errorf("illegal import descriptor type")
|
||||
}
|
||||
}
|
||||
|
||||
func writeGlobal(w io.Writer, global module.Global) error {
|
||||
|
||||
if err := writeValueType(w, global.Type); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var err error
|
||||
|
||||
if global.Mutable {
|
||||
err = writeByte(w, constant.Mutable)
|
||||
} else {
|
||||
err = writeByte(w, constant.Const)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := writeInstructions(w, global.Init.Instrs); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func writeInstructions(w io.Writer, instrs []instruction.Instruction) error {
|
||||
|
||||
for i, instr := range instrs {
|
||||
|
||||
_, err := w.Write([]byte{byte(instr.Op())})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, arg := range instr.ImmediateArgs() {
|
||||
var err error
|
||||
switch arg := arg.(type) {
|
||||
case int32:
|
||||
err = leb128.WriteVarInt32(w, arg)
|
||||
case int64:
|
||||
err = leb128.WriteVarInt64(w, arg)
|
||||
case uint32:
|
||||
err = leb128.WriteVarUint32(w, arg)
|
||||
case uint64:
|
||||
err = leb128.WriteVarUint64(w, arg)
|
||||
case float32:
|
||||
u32 := math.Float32bits(arg)
|
||||
err = binary.Write(w, binary.LittleEndian, u32)
|
||||
case float64:
|
||||
u64 := math.Float64bits(arg)
|
||||
err = binary.Write(w, binary.LittleEndian, u64)
|
||||
default:
|
||||
return fmt.Errorf("illegal immediate argument type on instruction %d", i)
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if si, ok := instr.(instruction.StructuredInstruction); ok {
|
||||
if err := writeBlockValueType(w, si.BlockType()); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := writeInstructions(w, si.Instructions()); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
_, err := w.Write([]byte{byte(opcode.End)})
|
||||
return err
|
||||
}
|
||||
|
||||
func writeLimits(w io.Writer, lim module.Limit) error {
|
||||
if lim.Max == nil {
|
||||
if err := writeByte(w, 0); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
if err := writeByte(w, 1); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if err := leb128.WriteVarUint32(w, lim.Min); err != nil {
|
||||
return err
|
||||
}
|
||||
if lim.Max != nil {
|
||||
return leb128.WriteVarUint32(w, *lim.Max)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func writeVarUint32Vector(w io.Writer, v []uint32) error {
|
||||
|
||||
if err := leb128.WriteVarUint32(w, uint32(len(v))); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for i := range v {
|
||||
if err := leb128.WriteVarUint32(w, v[i]); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func writeValueTypeVector(w io.Writer, v []types.ValueType) error {
|
||||
|
||||
if err := leb128.WriteVarUint32(w, uint32(len(v))); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for i := range v {
|
||||
if err := writeValueType(w, v[i]); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func writeBlockValueType(w io.Writer, v *types.ValueType) error {
|
||||
var b byte
|
||||
if v != nil {
|
||||
switch *v {
|
||||
case types.I32:
|
||||
b = constant.ValueTypeI32
|
||||
case types.I64:
|
||||
b = constant.ValueTypeI64
|
||||
case types.F32:
|
||||
b = constant.ValueTypeF32
|
||||
case types.F64:
|
||||
b = constant.ValueTypeF64
|
||||
}
|
||||
} else {
|
||||
b = constant.BlockTypeEmpty
|
||||
}
|
||||
return writeByte(w, b)
|
||||
}
|
||||
|
||||
func writeValueType(w io.Writer, v types.ValueType) error {
|
||||
var b byte
|
||||
switch v {
|
||||
case types.I32:
|
||||
b = constant.ValueTypeI32
|
||||
case types.I64:
|
||||
b = constant.ValueTypeI64
|
||||
case types.F32:
|
||||
b = constant.ValueTypeF32
|
||||
case types.F64:
|
||||
b = constant.ValueTypeF64
|
||||
}
|
||||
return writeByte(w, b)
|
||||
}
|
||||
|
||||
func writeRawSection(w io.Writer, buf *bytes.Buffer) error {
|
||||
|
||||
size := buf.Len()
|
||||
|
||||
if err := leb128.WriteVarUint32(w, uint32(size)); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err := io.Copy(w, buf)
|
||||
return err
|
||||
}
|
||||
|
||||
func writeByteVector(w io.Writer, bs []byte) error {
|
||||
|
||||
if err := leb128.WriteVarUint32(w, uint32(len(bs))); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err := w.Write(bs)
|
||||
return err
|
||||
}
|
||||
|
||||
func writeByte(w io.Writer, b byte) error {
|
||||
buf := make([]byte, 1)
|
||||
buf[0] = b
|
||||
_, err := w.Write(buf)
|
||||
return err
|
||||
}
|
||||
139
vendor/github.com/open-policy-agent/opa/internal/wasm/instruction/control.go
generated
vendored
Normal file
139
vendor/github.com/open-policy-agent/opa/internal/wasm/instruction/control.go
generated
vendored
Normal file
@@ -0,0 +1,139 @@
|
||||
// Copyright 2018 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package instruction
|
||||
|
||||
import (
|
||||
"github.com/open-policy-agent/opa/internal/wasm/opcode"
|
||||
"github.com/open-policy-agent/opa/internal/wasm/types"
|
||||
)
|
||||
|
||||
// Unreachable reprsents an unreachable opcode.
|
||||
type Unreachable struct {
|
||||
NoImmediateArgs
|
||||
}
|
||||
|
||||
// Op returns the opcode of the instruction.
|
||||
func (Unreachable) Op() opcode.Opcode {
|
||||
return opcode.Unreachable
|
||||
}
|
||||
|
||||
// Nop represents a WASM no-op instruction.
|
||||
type Nop struct {
|
||||
NoImmediateArgs
|
||||
}
|
||||
|
||||
// Op returns the opcode of the instruction.
|
||||
func (Nop) Op() opcode.Opcode {
|
||||
return opcode.Nop
|
||||
}
|
||||
|
||||
// Block represents a WASM block instruction.
|
||||
type Block struct {
|
||||
NoImmediateArgs
|
||||
Type *types.ValueType
|
||||
Instrs []Instruction
|
||||
}
|
||||
|
||||
// Op returns the opcode of the instruction
|
||||
func (Block) Op() opcode.Opcode {
|
||||
return opcode.Block
|
||||
}
|
||||
|
||||
// BlockType returns the type of the block's return value.
|
||||
func (i Block) BlockType() *types.ValueType {
|
||||
return i.Type
|
||||
}
|
||||
|
||||
// Instructions returns the instructions contained in the block.
|
||||
func (i Block) Instructions() []Instruction {
|
||||
return i.Instrs
|
||||
}
|
||||
|
||||
// Loop represents a WASM loop instruction.
|
||||
type Loop struct {
|
||||
NoImmediateArgs
|
||||
Type *types.ValueType
|
||||
Instrs []Instruction
|
||||
}
|
||||
|
||||
// Op returns the opcode of the instruction.
|
||||
func (Loop) Op() opcode.Opcode {
|
||||
return opcode.Loop
|
||||
}
|
||||
|
||||
// BlockType returns the type of the loop's return value.
|
||||
func (i Loop) BlockType() *types.ValueType {
|
||||
return i.Type
|
||||
}
|
||||
|
||||
// Instructions represents the instructions contained in the loop.
|
||||
func (i Loop) Instructions() []Instruction {
|
||||
return i.Instrs
|
||||
}
|
||||
|
||||
// Br represents a WASM br instruction.
|
||||
type Br struct {
|
||||
Index uint32
|
||||
}
|
||||
|
||||
// Op returns the opcode of the instruction.
|
||||
func (Br) Op() opcode.Opcode {
|
||||
return opcode.Br
|
||||
}
|
||||
|
||||
// ImmediateArgs returns the block index to break to.
|
||||
func (i Br) ImmediateArgs() []interface{} {
|
||||
return []interface{}{i.Index}
|
||||
}
|
||||
|
||||
// BrIf represents a WASM br_if instruction.
|
||||
type BrIf struct {
|
||||
Index uint32
|
||||
}
|
||||
|
||||
// Op returns the opcode of the instruction.
|
||||
func (BrIf) Op() opcode.Opcode {
|
||||
return opcode.BrIf
|
||||
}
|
||||
|
||||
// ImmediateArgs returns the block index to break to.
|
||||
func (i BrIf) ImmediateArgs() []interface{} {
|
||||
return []interface{}{i.Index}
|
||||
}
|
||||
|
||||
// Call represents a WASM call instruction.
|
||||
type Call struct {
|
||||
Index uint32
|
||||
}
|
||||
|
||||
// Op returns the opcode of the instruction.
|
||||
func (Call) Op() opcode.Opcode {
|
||||
return opcode.Call
|
||||
}
|
||||
|
||||
// ImmediateArgs returns the function index.
|
||||
func (i Call) ImmediateArgs() []interface{} {
|
||||
return []interface{}{i.Index}
|
||||
}
|
||||
|
||||
// Return represents a WASM return instruction.
|
||||
type Return struct {
|
||||
NoImmediateArgs
|
||||
}
|
||||
|
||||
// Op returns the opcode of the instruction.
|
||||
func (Return) Op() opcode.Opcode {
|
||||
return opcode.Return
|
||||
}
|
||||
|
||||
// End represents the special WASM end instruction.
|
||||
type End struct {
|
||||
NoImmediateArgs
|
||||
}
|
||||
|
||||
// Op returns the opcode of the instruction.
|
||||
func (End) Op() opcode.Opcode {
|
||||
return opcode.End
|
||||
}
|
||||
33
vendor/github.com/open-policy-agent/opa/internal/wasm/instruction/instruction.go
generated
vendored
Normal file
33
vendor/github.com/open-policy-agent/opa/internal/wasm/instruction/instruction.go
generated
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
// Copyright 2018 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package instruction defines WASM instruction types.
|
||||
package instruction
|
||||
|
||||
import (
|
||||
"github.com/open-policy-agent/opa/internal/wasm/opcode"
|
||||
"github.com/open-policy-agent/opa/internal/wasm/types"
|
||||
)
|
||||
|
||||
// NoImmediateArgs indicates the instruction has no immediate arguments.
|
||||
type NoImmediateArgs struct {
|
||||
}
|
||||
|
||||
// ImmediateArgs returns the immedate arguments of an instruction.
|
||||
func (NoImmediateArgs) ImmediateArgs() []interface{} {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Instruction represents a single WASM instruction.
|
||||
type Instruction interface {
|
||||
Op() opcode.Opcode
|
||||
ImmediateArgs() []interface{}
|
||||
}
|
||||
|
||||
// StructuredInstruction represents a structured control instruction like br_if.
|
||||
type StructuredInstruction interface {
|
||||
Instruction
|
||||
BlockType() *types.ValueType
|
||||
Instructions() []Instruction
|
||||
}
|
||||
39
vendor/github.com/open-policy-agent/opa/internal/wasm/instruction/memory.go
generated
vendored
Normal file
39
vendor/github.com/open-policy-agent/opa/internal/wasm/instruction/memory.go
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
// Copyright 2019 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package instruction
|
||||
|
||||
import "github.com/open-policy-agent/opa/internal/wasm/opcode"
|
||||
|
||||
// I32Load represents the WASM i32.load instruction.
|
||||
type I32Load struct {
|
||||
Offset int32
|
||||
Align int32 // expressed as a power of two
|
||||
}
|
||||
|
||||
// Op returns the opcode of the instruction.
|
||||
func (I32Load) Op() opcode.Opcode {
|
||||
return opcode.I32Load
|
||||
}
|
||||
|
||||
// ImmediateArgs returns the static offset and alignment operands.
|
||||
func (i I32Load) ImmediateArgs() []interface{} {
|
||||
return []interface{}{i.Align, i.Offset}
|
||||
}
|
||||
|
||||
// I32Store represents the WASM i32.store instruction.
|
||||
type I32Store struct {
|
||||
Offset int32
|
||||
Align int32 // expressed as a power of two
|
||||
}
|
||||
|
||||
// Op returns the opcode of the instruction.
|
||||
func (I32Store) Op() opcode.Opcode {
|
||||
return opcode.I32Store
|
||||
}
|
||||
|
||||
// ImmediateArgs returns the static offset and alignment operands.
|
||||
func (i I32Store) ImmediateArgs() []interface{} {
|
||||
return []interface{}{i.Align, i.Offset}
|
||||
}
|
||||
139
vendor/github.com/open-policy-agent/opa/internal/wasm/instruction/numeric.go
generated
vendored
Normal file
139
vendor/github.com/open-policy-agent/opa/internal/wasm/instruction/numeric.go
generated
vendored
Normal file
@@ -0,0 +1,139 @@
|
||||
// Copyright 2018 The OPA Authors. All rights reserved.
|
||||
// Use of this source code is governed by an Apache2
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package instruction
|
||||
|
||||
import (
|
||||
"github.com/open-policy-agent/opa/internal/wasm/opcode"
|
||||
)
|
||||
|
||||
// I32Const represents the WASM i32.const instruction.
|
||||
type I32Const struct {
|
||||
Value int32
|
||||
}
|
||||
|
||||
// Op returns the opcode of the instruction.
|
||||
func (I32Const) Op() opcode.Opcode {
|
||||
return opcode.I32Const
|
||||
}
|
||||
|
||||
// ImmediateArgs returns the i32 value to push onto the stack.
|
||||
func (i I32Const) ImmediateArgs() []interface{} {
|
||||
return []interface{}{i.Value}
|
||||
}
|
||||
|
||||
// I64Const represents the WASM i64.const instruction.
|
||||
type I64Const struct {
|
||||
Value int64
|
||||
}
|
||||
|
||||
// Op returns the opcode of the instruction.
|
||||
func (I64Const) Op() opcode.Opcode {
|
||||
return opcode.I64Const
|
||||
}
|
||||
|
||||
// ImmediateArgs returns the i64 value to push onto the stack.
|
||||
func (i I64Const) ImmediateArgs() []interface{} {
|
||||
return []interface{}{i.Value}
|
||||
}
|
||||
|
||||
// F32Const represents the WASM f32.const instruction.
|
||||
type F32Const struct {
|
||||
Value int32
|
||||
}
|
||||
|
||||
// Op returns the opcode of the instruction.
|
||||
func (F32Const) Op() opcode.Opcode {
|
||||
return opcode.F32Const
|
||||
}
|
||||
|
||||
// ImmediateArgs returns the f32 value to push onto the stack.
|
||||
func (i F32Const) ImmediateArgs() []interface{} {
|
||||
return []interface{}{i.Value}
|
||||
}
|
||||
|
||||
// F64Const represents the WASM f64.const instruction.
|
||||
type F64Const struct {
|
||||
Value float64
|
||||
}
|
||||
|
||||
// Op returns the opcode of the instruction.
|
||||
func (F64Const) Op() opcode.Opcode {
|
||||
return opcode.F64Const
|
||||
}
|
||||
|
||||
// ImmediateArgs returns the f64 value to push onto the stack.
|
||||
func (i F64Const) ImmediateArgs() []interface{} {
|
||||
return []interface{}{i.Value}
|
||||
}
|
||||
|
||||
// I32Eqz represents the WASM i32.eqz instruction.
|
||||
type I32Eqz struct {
|
||||
NoImmediateArgs
|
||||
}
|
||||
|
||||
// Op returns the opcode of the instruction.
|
||||
func (I32Eqz) Op() opcode.Opcode {
|
||||
return opcode.I32Eqz
|
||||
}
|
||||
|
||||
// I32Eq represents the WASM i32.eq instruction.
|
||||
type I32Eq struct {
|
||||
NoImmediateArgs
|
||||
}
|
||||
|
||||
// Op returns the opcode of the instruction.
|
||||
func (I32Eq) Op() opcode.Opcode {
|
||||
return opcode.I32Eq
|
||||
}
|
||||
|
||||
// I32Ne represents the WASM i32.ne instruction.
|
||||
type I32Ne struct {
|
||||
NoImmediateArgs
|
||||
}
|
||||
|
||||
// Op returns the opcode of the instruction.
|
||||
func (I32Ne) Op() opcode.Opcode {
|
||||
return opcode.I32Ne
|
||||
}
|
||||
|
||||
// I32GtS represents the WASM i32.gt_s instruction.
|
||||
type I32GtS struct {
|
||||
NoImmediateArgs
|
||||
}
|
||||
|
||||
// Op returns the opcode of the instruction.
|
||||
func (I32GtS) Op() opcode.Opcode {
|
||||
return opcode.I32GtS
|
||||
}
|
||||
|
||||
// I32GeS represents the WASM i32.ge_s instruction.
|
||||
type I32GeS struct {
|
||||
NoImmediateArgs
|
||||
}
|
||||
|
||||
// Op returns the opcode of the instruction.
|
||||
func (I32GeS) Op() opcode.Opcode {
|
||||
return opcode.I32GeS
|
||||
}
|
||||
|
||||
// I32LtS represents the WASM i32.lt_s instruction.
|
||||
type I32LtS struct {
|
||||
NoImmediateArgs
|
||||
}
|
||||
|
||||
// Op returns the opcode of the instruction.
|
||||
func (I32LtS) Op() opcode.Opcode {
|
||||
return opcode.I32LtS
|
||||
}
|
||||
|
||||
// I32LeS represents the WASM i32.le_s instruction.
|
||||
type I32LeS struct {
|
||||
NoImmediateArgs
|
||||
}
|
||||
|
||||
// Op returns the opcode of the instruction.
|
||||
func (I32LeS) Op() opcode.Opcode {
|
||||
return opcode.I32LeS
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user