Major rewrite

* use dep for vendoring
* lets encrypt
* moved web to transfer.sh-web repo
* single command install
* added first tests
This commit is contained in:
Remco
2017-03-22 18:09:21 +01:00
parent 6d68ad982f
commit cb6e5cb0c7
1917 changed files with 424197 additions and 260688 deletions

10
vendor/github.com/golang/gddo/.travis.yml generated vendored Normal file
View File

@@ -0,0 +1,10 @@
language: go
install:
- curl -sSo gae_sdk.zip https://storage.googleapis.com/appengine-sdks/featured/go_appengine_sdk_linux_amd64-1.9.40.zip
- unzip -q gae_sdk.zip
script:
- pushd talksapp
- ./setup.sh
- ../go_appengine/goapp get .
- ../go_appengine/goapp test -v .
- popd

23
vendor/github.com/golang/gddo/CONTRIBUTING.md generated vendored Normal file
View File

@@ -0,0 +1,23 @@
# Contributing to godoc.org
Contributions to this project are welcome, though please
[file an issue](https://github.com/golang/gddo/issues/new).
before starting work on anything major.
**We do not accept GitHub pull requests**
(we use [an instance](https://go-review.googlesource.com/) of the
[Gerrit](https://www.gerritcodereview.com/) code review system instead).
To get started contributing to this project,
clone the repository from its canonical location
git clone https://go.googlesource.com/gddo
or update the `origin` of your existing clone
git remote rm origin
git remote add origin https://go.googlesource.com/gddo
then read the [Contribution Guidelines](https://golang.org/doc/contribute.html)
to learn how to send patches using Gerrit.

41
vendor/github.com/golang/gddo/Dockerfile generated vendored Normal file
View File

@@ -0,0 +1,41 @@
FROM golang:latest
# Install redis, nginx, daemontools, etc.
RUN echo deb http://http.debian.net/debian wheezy-backports main > /etc/apt/sources.list.d/backports.list && \
apt-get update && \
apt-get install -y --no-install-recommends -t wheezy-backports redis-server && \
apt-get install -y --no-install-recommends graphviz nginx-full daemontools unzip
# Configure redis.
ADD deploy/redis.conf /etc/redis/redis.conf
# Configure nginx.
RUN echo "daemon off;" >> /etc/nginx/nginx.conf && \
rm /etc/nginx/sites-enabled/default
ADD deploy/gddo.conf /etc/nginx/sites-enabled/gddo.conf
# Configure daemontools services.
ADD deploy/services /services
# Manually fetch and install gddo-server dependencies (faster than "go get").
ADD https://github.com/garyburd/redigo/archive/779af66db5668074a96f522d9025cb0a5ef50d89.zip /x/redigo.zip
ADD https://github.com/golang/snappy/archive/master.zip /x/snappy-go.zip
RUN unzip /x/redigo.zip -d /x && unzip /x/snappy-go.zip -d /x && \
mkdir -p /go/src/github.com/garyburd && \
mkdir -p /go/src/github.com/golang && \
mv /x/redigo-* /go/src/github.com/garyburd/redigo && \
mv /x/snappy-master /go/src/github.com/golang/snappy && \
rm -rf /x
# Build the local gddo files.
ADD . /go/src/github.com/golang/gddo
RUN go get github.com/golang/gddo/gddo-server
# Exposed ports and volumes.
# /ssl should contain SSL certs.
# /data should contain the Redis database, "dump.rdb".
EXPOSE 80 443
VOLUME ["/ssl", "/data"]
# How to start it all.
CMD svscan /services

485
vendor/github.com/golang/gddo/Godeps/Godeps.json generated vendored Normal file
View File

@@ -0,0 +1,485 @@
{
"ImportPath": "github.com/golang/gddo",
"GoVersion": "go1.7",
"GodepVersion": "v74",
"Packages": [
"github.com/golang/gddo/gddo-server",
"github.com/golang/gddo/talksapp",
"github.com/golang/gddo/lintapp"
],
"Deps": [
{
"ImportPath": "cloud.google.com/go/compute/metadata",
"Comment": "v0.2.0-10-g5af4269",
"Rev": "5af4269f950e91e917bab77f1138139023c868c2"
},
{
"ImportPath": "cloud.google.com/go/internal",
"Comment": "v0.2.0-10-g5af4269",
"Rev": "5af4269f950e91e917bab77f1138139023c868c2"
},
{
"ImportPath": "cloud.google.com/go/internal/bundler",
"Comment": "v0.2.0-10-g5af4269",
"Rev": "5af4269f950e91e917bab77f1138139023c868c2"
},
{
"ImportPath": "cloud.google.com/go/logging",
"Comment": "v0.2.0-10-g5af4269",
"Rev": "5af4269f950e91e917bab77f1138139023c868c2"
},
{
"ImportPath": "cloud.google.com/go/logging/apiv2",
"Comment": "v0.2.0-10-g5af4269",
"Rev": "5af4269f950e91e917bab77f1138139023c868c2"
},
{
"ImportPath": "cloud.google.com/go/logging/internal",
"Comment": "v0.2.0-10-g5af4269",
"Rev": "5af4269f950e91e917bab77f1138139023c868c2"
},
{
"ImportPath": "github.com/bradfitz/gomemcache/memcache",
"Comment": "release.r60-41-gfb1f79c",
"Rev": "fb1f79c6b65acda83063cbc69f6bba1522558bfc"
},
{
"ImportPath": "github.com/fsnotify/fsnotify",
"Comment": "v1.4.2-2-gfd9ec7d",
"Rev": "fd9ec7deca8bf46ecd2a795baaacf2b3a9be1197"
},
{
"ImportPath": "github.com/garyburd/redigo/internal",
"Comment": "v1.0.0-5-gffa8d46",
"Rev": "ffa8d46ada782d81cfda81a0fbd9f45ceae448e8"
},
{
"ImportPath": "github.com/garyburd/redigo/redis",
"Comment": "v1.0.0-5-gffa8d46",
"Rev": "ffa8d46ada782d81cfda81a0fbd9f45ceae448e8"
},
{
"ImportPath": "github.com/go-stack/stack",
"Comment": "v1.5.2",
"Rev": "100eb0c0a9c5b306ca2fb4f165df21d80ada4b82"
},
{
"ImportPath": "github.com/golang/lint",
"Rev": "3390df4df2787994aea98de825b964ac7944b817"
},
{
"ImportPath": "github.com/golang/protobuf/proto",
"Rev": "4bd1920723d7b7c925de087aa32e2187708897f7"
},
{
"ImportPath": "github.com/golang/protobuf/ptypes",
"Rev": "4bd1920723d7b7c925de087aa32e2187708897f7"
},
{
"ImportPath": "github.com/golang/protobuf/ptypes/any",
"Rev": "4bd1920723d7b7c925de087aa32e2187708897f7"
},
{
"ImportPath": "github.com/golang/protobuf/ptypes/duration",
"Rev": "4bd1920723d7b7c925de087aa32e2187708897f7"
},
{
"ImportPath": "github.com/golang/protobuf/ptypes/empty",
"Rev": "4bd1920723d7b7c925de087aa32e2187708897f7"
},
{
"ImportPath": "github.com/golang/protobuf/ptypes/struct",
"Rev": "4bd1920723d7b7c925de087aa32e2187708897f7"
},
{
"ImportPath": "github.com/golang/protobuf/ptypes/timestamp",
"Rev": "4bd1920723d7b7c925de087aa32e2187708897f7"
},
{
"ImportPath": "github.com/golang/protobuf/ptypes/wrappers",
"Rev": "4bd1920723d7b7c925de087aa32e2187708897f7"
},
{
"ImportPath": "github.com/golang/snappy",
"Rev": "d9eb7a3d35ec988b8585d4a0068e462c27d28380"
},
{
"ImportPath": "github.com/googleapis/gax-go",
"Rev": "ed6ab759ab548d1e6e070f53f9d1105d2d8128b0"
},
{
"ImportPath": "github.com/gregjones/httpcache",
"Rev": "413781778738c08fdbb98e1dd65f5abffe8832d0"
},
{
"ImportPath": "github.com/gregjones/httpcache/memcache",
"Rev": "413781778738c08fdbb98e1dd65f5abffe8832d0"
},
{
"ImportPath": "github.com/hashicorp/hcl",
"Rev": "c3e054bfd4dcf77b9965ed2b79b22afa2f41d4eb"
},
{
"ImportPath": "github.com/hashicorp/hcl/hcl/ast",
"Rev": "c3e054bfd4dcf77b9965ed2b79b22afa2f41d4eb"
},
{
"ImportPath": "github.com/hashicorp/hcl/hcl/parser",
"Rev": "c3e054bfd4dcf77b9965ed2b79b22afa2f41d4eb"
},
{
"ImportPath": "github.com/hashicorp/hcl/hcl/scanner",
"Rev": "c3e054bfd4dcf77b9965ed2b79b22afa2f41d4eb"
},
{
"ImportPath": "github.com/hashicorp/hcl/hcl/strconv",
"Rev": "c3e054bfd4dcf77b9965ed2b79b22afa2f41d4eb"
},
{
"ImportPath": "github.com/hashicorp/hcl/hcl/token",
"Rev": "c3e054bfd4dcf77b9965ed2b79b22afa2f41d4eb"
},
{
"ImportPath": "github.com/hashicorp/hcl/json/parser",
"Rev": "c3e054bfd4dcf77b9965ed2b79b22afa2f41d4eb"
},
{
"ImportPath": "github.com/hashicorp/hcl/json/scanner",
"Rev": "c3e054bfd4dcf77b9965ed2b79b22afa2f41d4eb"
},
{
"ImportPath": "github.com/hashicorp/hcl/json/token",
"Rev": "c3e054bfd4dcf77b9965ed2b79b22afa2f41d4eb"
},
{
"ImportPath": "github.com/inconshreveable/log15",
"Comment": "v2.3-88-g46a701a",
"Rev": "46a701a619de90c65a78c04d1a58bf02585e9701"
},
{
"ImportPath": "github.com/inconshreveable/log15/term",
"Comment": "v2.3-88-g46a701a",
"Rev": "46a701a619de90c65a78c04d1a58bf02585e9701"
},
{
"ImportPath": "github.com/magiconair/properties",
"Comment": "v1.7.0-5-g0723e35",
"Rev": "0723e352fa358f9322c938cc2dadda874e9151a9"
},
{
"ImportPath": "github.com/mattn/go-colorable",
"Comment": "v0.0.6-6-g6c903ff",
"Rev": "6c903ff4aa50920ca86087a280590b36b3152b9c"
},
{
"ImportPath": "github.com/mattn/go-isatty",
"Rev": "66b8e73f3f5cda9f96b69efd03dd3d7fc4a5cdb8"
},
{
"ImportPath": "github.com/mitchellh/mapstructure",
"Rev": "f3009df150dadf309fdee4a54ed65c124afad715"
},
{
"ImportPath": "github.com/pelletier/go-buffruneio",
"Rev": "df1e16fde7fc330a0ca68167c23bf7ed6ac31d6d"
},
{
"ImportPath": "github.com/pelletier/go-toml",
"Comment": "v0.3.5-16-g45932ad",
"Rev": "45932ad32dfdd20826f5671da37a5f3ce9f26a8d"
},
{
"ImportPath": "github.com/spf13/afero",
"Rev": "06b7e5f50606ecd49148a01a6008942d9b669217"
},
{
"ImportPath": "github.com/spf13/afero/mem",
"Rev": "06b7e5f50606ecd49148a01a6008942d9b669217"
},
{
"ImportPath": "github.com/spf13/cast",
"Rev": "24b6558033ffe202bf42f0f3b870dcc798dd2ba8"
},
{
"ImportPath": "github.com/spf13/jwalterweatherman",
"Rev": "33c24e77fb80341fe7130ee7c594256ff08ccc46"
},
{
"ImportPath": "github.com/spf13/pflag",
"Rev": "5ccb023bc27df288a957c5e994cd44fd19619465"
},
{
"ImportPath": "github.com/spf13/viper",
"Rev": "651d9d916abc3c3d6a91a12549495caba5edffd2"
},
{
"ImportPath": "golang.org/x/net/context",
"Rev": "0e2717dc3cc05907dc23096ef3a9086ea93f567f"
},
{
"ImportPath": "golang.org/x/net/context/ctxhttp",
"Rev": "0e2717dc3cc05907dc23096ef3a9086ea93f567f"
},
{
"ImportPath": "golang.org/x/net/http2",
"Rev": "0e2717dc3cc05907dc23096ef3a9086ea93f567f"
},
{
"ImportPath": "golang.org/x/net/http2/hpack",
"Rev": "0e2717dc3cc05907dc23096ef3a9086ea93f567f"
},
{
"ImportPath": "golang.org/x/net/idna",
"Rev": "0e2717dc3cc05907dc23096ef3a9086ea93f567f"
},
{
"ImportPath": "golang.org/x/net/internal/timeseries",
"Rev": "0e2717dc3cc05907dc23096ef3a9086ea93f567f"
},
{
"ImportPath": "golang.org/x/net/lex/httplex",
"Rev": "0e2717dc3cc05907dc23096ef3a9086ea93f567f"
},
{
"ImportPath": "golang.org/x/net/trace",
"Rev": "0e2717dc3cc05907dc23096ef3a9086ea93f567f"
},
{
"ImportPath": "golang.org/x/oauth2",
"Rev": "1e695b1c8febf17aad3bfa7bf0a819ef94b98ad5"
},
{
"ImportPath": "golang.org/x/oauth2/google",
"Rev": "1e695b1c8febf17aad3bfa7bf0a819ef94b98ad5"
},
{
"ImportPath": "golang.org/x/oauth2/internal",
"Rev": "1e695b1c8febf17aad3bfa7bf0a819ef94b98ad5"
},
{
"ImportPath": "golang.org/x/oauth2/jws",
"Rev": "1e695b1c8febf17aad3bfa7bf0a819ef94b98ad5"
},
{
"ImportPath": "golang.org/x/oauth2/jwt",
"Rev": "1e695b1c8febf17aad3bfa7bf0a819ef94b98ad5"
},
{
"ImportPath": "golang.org/x/sys/unix",
"Rev": "b699b7032584f0953262cb2788a0ca19bb494703"
},
{
"ImportPath": "golang.org/x/text/transform",
"Rev": "a263ba8db058568bb9beba166777d9c9dbe75d68"
},
{
"ImportPath": "golang.org/x/text/unicode/norm",
"Rev": "a263ba8db058568bb9beba166777d9c9dbe75d68"
},
{
"ImportPath": "golang.org/x/tools/go/gcimporter15",
"Rev": "5e2ae75eb72a62985e086eed33a5982a929e4fff"
},
{
"ImportPath": "golang.org/x/tools/present",
"Rev": "5e2ae75eb72a62985e086eed33a5982a929e4fff"
},
{
"ImportPath": "google.golang.org/api/internal",
"Rev": "3cf64a039723963488f603d140d0aec154fdcd20"
},
{
"ImportPath": "google.golang.org/api/iterator",
"Rev": "3cf64a039723963488f603d140d0aec154fdcd20"
},
{
"ImportPath": "google.golang.org/api/option",
"Rev": "3cf64a039723963488f603d140d0aec154fdcd20"
},
{
"ImportPath": "google.golang.org/api/transport",
"Rev": "3cf64a039723963488f603d140d0aec154fdcd20"
},
{
"ImportPath": "google.golang.org/appengine",
"Comment": "v1.0.0-16-gc7b8227",
"Rev": "c7b8227c83007befd67b324a64c969ebc1d7475d"
},
{
"ImportPath": "google.golang.org/appengine/aetest",
"Comment": "v1.0.0-16-gc7b8227",
"Rev": "c7b8227c83007befd67b324a64c969ebc1d7475d"
},
{
"ImportPath": "google.golang.org/appengine/datastore",
"Comment": "v1.0.0-16-gc7b8227",
"Rev": "c7b8227c83007befd67b324a64c969ebc1d7475d"
},
{
"ImportPath": "google.golang.org/appengine/internal",
"Comment": "v1.0.0-16-gc7b8227",
"Rev": "c7b8227c83007befd67b324a64c969ebc1d7475d"
},
{
"ImportPath": "google.golang.org/appengine/internal/app_identity",
"Comment": "v1.0.0-16-gc7b8227",
"Rev": "c7b8227c83007befd67b324a64c969ebc1d7475d"
},
{
"ImportPath": "google.golang.org/appengine/internal/base",
"Comment": "v1.0.0-16-gc7b8227",
"Rev": "c7b8227c83007befd67b324a64c969ebc1d7475d"
},
{
"ImportPath": "google.golang.org/appengine/internal/datastore",
"Comment": "v1.0.0-16-gc7b8227",
"Rev": "c7b8227c83007befd67b324a64c969ebc1d7475d"
},
{
"ImportPath": "google.golang.org/appengine/internal/log",
"Comment": "v1.0.0-16-gc7b8227",
"Rev": "c7b8227c83007befd67b324a64c969ebc1d7475d"
},
{
"ImportPath": "google.golang.org/appengine/internal/memcache",
"Comment": "v1.0.0-16-gc7b8227",
"Rev": "c7b8227c83007befd67b324a64c969ebc1d7475d"
},
{
"ImportPath": "google.golang.org/appengine/internal/modules",
"Comment": "v1.0.0-16-gc7b8227",
"Rev": "c7b8227c83007befd67b324a64c969ebc1d7475d"
},
{
"ImportPath": "google.golang.org/appengine/internal/remote_api",
"Comment": "v1.0.0-16-gc7b8227",
"Rev": "c7b8227c83007befd67b324a64c969ebc1d7475d"
},
{
"ImportPath": "google.golang.org/appengine/internal/search",
"Comment": "v1.0.0-16-gc7b8227",
"Rev": "c7b8227c83007befd67b324a64c969ebc1d7475d"
},
{
"ImportPath": "google.golang.org/appengine/internal/urlfetch",
"Comment": "v1.0.0-16-gc7b8227",
"Rev": "c7b8227c83007befd67b324a64c969ebc1d7475d"
},
{
"ImportPath": "google.golang.org/appengine/internal/user",
"Comment": "v1.0.0-16-gc7b8227",
"Rev": "c7b8227c83007befd67b324a64c969ebc1d7475d"
},
{
"ImportPath": "google.golang.org/appengine/log",
"Comment": "v1.0.0-16-gc7b8227",
"Rev": "c7b8227c83007befd67b324a64c969ebc1d7475d"
},
{
"ImportPath": "google.golang.org/appengine/memcache",
"Comment": "v1.0.0-16-gc7b8227",
"Rev": "c7b8227c83007befd67b324a64c969ebc1d7475d"
},
{
"ImportPath": "google.golang.org/appengine/search",
"Comment": "v1.0.0-16-gc7b8227",
"Rev": "c7b8227c83007befd67b324a64c969ebc1d7475d"
},
{
"ImportPath": "google.golang.org/appengine/urlfetch",
"Comment": "v1.0.0-16-gc7b8227",
"Rev": "c7b8227c83007befd67b324a64c969ebc1d7475d"
},
{
"ImportPath": "google.golang.org/appengine/user",
"Comment": "v1.0.0-16-gc7b8227",
"Rev": "c7b8227c83007befd67b324a64c969ebc1d7475d"
},
{
"ImportPath": "google.golang.org/genproto/googleapis/api/label",
"Rev": "9359a8d303c45e3212571b77610f1cefb0c6f3eb"
},
{
"ImportPath": "google.golang.org/genproto/googleapis/api/metric",
"Rev": "9359a8d303c45e3212571b77610f1cefb0c6f3eb"
},
{
"ImportPath": "google.golang.org/genproto/googleapis/api/monitoredres",
"Rev": "9359a8d303c45e3212571b77610f1cefb0c6f3eb"
},
{
"ImportPath": "google.golang.org/genproto/googleapis/api/serviceconfig",
"Rev": "9359a8d303c45e3212571b77610f1cefb0c6f3eb"
},
{
"ImportPath": "google.golang.org/genproto/googleapis/logging/type",
"Rev": "9359a8d303c45e3212571b77610f1cefb0c6f3eb"
},
{
"ImportPath": "google.golang.org/genproto/googleapis/logging/v2",
"Rev": "9359a8d303c45e3212571b77610f1cefb0c6f3eb"
},
{
"ImportPath": "google.golang.org/genproto/googleapis/rpc/status",
"Rev": "9359a8d303c45e3212571b77610f1cefb0c6f3eb"
},
{
"ImportPath": "google.golang.org/genproto/protobuf",
"Rev": "9359a8d303c45e3212571b77610f1cefb0c6f3eb"
},
{
"ImportPath": "google.golang.org/grpc",
"Comment": "v1.0.2-24-g2131fed",
"Rev": "2131fedea9b3fe419b5c06200a674cdbc7bf986d"
},
{
"ImportPath": "google.golang.org/grpc/codes",
"Comment": "v1.0.2-24-g2131fed",
"Rev": "2131fedea9b3fe419b5c06200a674cdbc7bf986d"
},
{
"ImportPath": "google.golang.org/grpc/credentials",
"Comment": "v1.0.2-24-g2131fed",
"Rev": "2131fedea9b3fe419b5c06200a674cdbc7bf986d"
},
{
"ImportPath": "google.golang.org/grpc/credentials/oauth",
"Comment": "v1.0.2-24-g2131fed",
"Rev": "2131fedea9b3fe419b5c06200a674cdbc7bf986d"
},
{
"ImportPath": "google.golang.org/grpc/grpclog",
"Comment": "v1.0.2-24-g2131fed",
"Rev": "2131fedea9b3fe419b5c06200a674cdbc7bf986d"
},
{
"ImportPath": "google.golang.org/grpc/internal",
"Comment": "v1.0.2-24-g2131fed",
"Rev": "2131fedea9b3fe419b5c06200a674cdbc7bf986d"
},
{
"ImportPath": "google.golang.org/grpc/metadata",
"Comment": "v1.0.2-24-g2131fed",
"Rev": "2131fedea9b3fe419b5c06200a674cdbc7bf986d"
},
{
"ImportPath": "google.golang.org/grpc/naming",
"Comment": "v1.0.2-24-g2131fed",
"Rev": "2131fedea9b3fe419b5c06200a674cdbc7bf986d"
},
{
"ImportPath": "google.golang.org/grpc/peer",
"Comment": "v1.0.2-24-g2131fed",
"Rev": "2131fedea9b3fe419b5c06200a674cdbc7bf986d"
},
{
"ImportPath": "google.golang.org/grpc/transport",
"Comment": "v1.0.2-24-g2131fed",
"Rev": "2131fedea9b3fe419b5c06200a674cdbc7bf986d"
},
{
"ImportPath": "gopkg.in/yaml.v2",
"Rev": "a5b47d31c556af34a302ce5d659e6fea44d90de0"
}
]
}

5
vendor/github.com/golang/gddo/Godeps/Readme generated vendored Normal file
View File

@@ -0,0 +1,5 @@
This directory tree is generated automatically by godep.
Please do not edit.
See https://github.com/tools/godep for more information.

27
vendor/github.com/golang/gddo/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,27 @@
Copyright (c) 2013 The Go Authors. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

44
vendor/github.com/golang/gddo/README.markdown generated vendored Normal file
View File

@@ -0,0 +1,44 @@
This project is the source for http://godoc.org/
[![GoDoc](https://godoc.org/github.com/golang/gddo?status.svg)](http://godoc.org/github.com/golang/gddo)
[![Build
Status](https://travis-ci.org/golang/gddo.svg?branch=master)](https://travis-ci.org/golang/gddo)
The code in this project is designed to be used by godoc.org. Send mail to
golang-dev@googlegroups.com if you want to discuss other uses of the code.
## Feedback
Send ideas and questions to golang-dev@googlegroups.com. Request features and
report bugs using the [GitHub Issue
Tracker](https://github.com/golang/gddo/issues/new).
## Contributions
Contributions to this project are welcome, though please [file an
issue](https://github.com/golang/gddo/issues/new). before starting work on
anything major.
**We do not accept GitHub pull requests**
Please refer to the [Contribution
Guidelines](https://golang.org/doc/contribute.html) on how to submit changes.
We use https://go-review.googlesource.com to review change submissions.
## Getting the Source
To get started contributing to this project, clone the repository from its
canonical location
```
git clone https://go.googlesource.com/gddo $GOPATH/src/github.com/golang/gddo
```
Information on how to set up a local environment is available at
https://github.com/golang/gddo/wiki/Development-Environment-Setup.
## More Documentation
More documentation about this project is available on the
[wiki](https://github.com/golang/gddo/wiki).

1242
vendor/github.com/golang/gddo/database/database.go generated vendored Normal file

File diff suppressed because it is too large Load Diff

274
vendor/github.com/golang/gddo/database/database_test.go generated vendored Normal file
View File

@@ -0,0 +1,274 @@
// Copyright 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
package database
import (
"math"
"reflect"
"strconv"
"testing"
"time"
"github.com/garyburd/redigo/redis"
"golang.org/x/net/context"
"google.golang.org/appengine/aetest"
"github.com/golang/gddo/doc"
)
func newDB(t *testing.T) *Database {
p := redis.NewPool(func() (redis.Conn, error) {
c, err := redis.DialTimeout("tcp", ":6379", 0, 1*time.Second, 1*time.Second)
if err != nil {
return nil, err
}
_, err = c.Do("SELECT", "9")
if err != nil {
c.Close()
return nil, err
}
return c, nil
}, 1)
c := p.Get()
defer c.Close()
n, err := redis.Int(c.Do("DBSIZE"))
if n != 0 || err != nil {
t.Errorf("DBSIZE returned %d, %v", n, err)
}
return &Database{Pool: p}
}
func closeDB(db *Database) {
c := db.Pool.Get()
c.Do("FLUSHDB")
c.Close()
}
func TestPutGet(t *testing.T) {
var nextCrawl = time.Unix(time.Now().Add(time.Hour).Unix(), 0).UTC()
ctx, done, err := aetest.NewContext()
if err != nil {
t.Fatal(err)
}
defer done()
bgCtx = func() context.Context {
return ctx
}
db := newDB(t)
defer closeDB(db)
pdoc := &doc.Package{
ImportPath: "github.com/user/repo/foo/bar",
Name: "bar",
Synopsis: "hello",
ProjectRoot: "github.com/user/repo",
ProjectName: "foo",
Updated: time.Now().Add(-time.Hour),
Imports: []string{"C", "errors", "github.com/user/repo/foo/bar"}, // self import for testing convenience.
}
if err := db.Put(pdoc, nextCrawl, false); err != nil {
t.Errorf("db.Put() returned error %v", err)
}
if err := db.Put(pdoc, time.Time{}, false); err != nil {
t.Errorf("second db.Put() returned error %v", err)
}
actualPdoc, actualSubdirs, actualCrawl, err := db.Get("github.com/user/repo/foo/bar")
if err != nil {
t.Fatalf("db.Get(.../foo/bar) returned %v", err)
}
if len(actualSubdirs) != 0 {
t.Errorf("db.Get(.../foo/bar) returned subdirs %v, want none", actualSubdirs)
}
if !reflect.DeepEqual(actualPdoc, pdoc) {
t.Errorf("db.Get(.../foo/bar) returned doc %v, want %v", actualPdoc, pdoc)
}
if !nextCrawl.Equal(actualCrawl) {
t.Errorf("db.Get(.../foo/bar) returned crawl %v, want %v", actualCrawl, nextCrawl)
}
before := time.Now().Unix()
if err := db.BumpCrawl(pdoc.ProjectRoot); err != nil {
t.Errorf("db.BumpCrawl() returned %v", err)
}
after := time.Now().Unix()
_, _, actualCrawl, _ = db.Get("github.com/user/repo/foo/bar")
if actualCrawl.Unix() < before || after < actualCrawl.Unix() {
t.Errorf("actualCrawl=%v, expect value between %v and %v", actualCrawl.Unix(), before, after)
}
// Popular
if err := db.IncrementPopularScore(pdoc.ImportPath); err != nil {
t.Errorf("db.IncrementPopularScore() returned %v", err)
}
// Get "-"
actualPdoc, _, _, err = db.Get("-")
if err != nil {
t.Fatalf("db.Get(-) returned %v", err)
}
if !reflect.DeepEqual(actualPdoc, pdoc) {
t.Errorf("db.Get(-) returned doc %v, want %v", actualPdoc, pdoc)
}
actualPdoc, actualSubdirs, _, err = db.Get("github.com/user/repo/foo")
if err != nil {
t.Fatalf("db.Get(.../foo) returned %v", err)
}
if actualPdoc != nil {
t.Errorf("db.Get(.../foo) returned doc %v, want %v", actualPdoc, nil)
}
expectedSubdirs := []Package{{Path: "github.com/user/repo/foo/bar", Synopsis: "hello"}}
if !reflect.DeepEqual(actualSubdirs, expectedSubdirs) {
t.Errorf("db.Get(.../foo) returned subdirs %v, want %v", actualSubdirs, expectedSubdirs)
}
actualImporters, err := db.Importers("github.com/user/repo/foo/bar")
if err != nil {
t.Fatalf("db.Importers() returned error %v", err)
}
expectedImporters := []Package{{Path: "github.com/user/repo/foo/bar", Synopsis: "hello"}}
if !reflect.DeepEqual(actualImporters, expectedImporters) {
t.Errorf("db.Importers() = %v, want %v", actualImporters, expectedImporters)
}
actualImports, err := db.Packages(pdoc.Imports)
if err != nil {
t.Fatalf("db.Imports() returned error %v", err)
}
for i := range actualImports {
if actualImports[i].Path == "C" {
actualImports[i].Synopsis = ""
}
}
expectedImports := []Package{
{Path: "C", Synopsis: ""},
{Path: "errors", Synopsis: ""},
{Path: "github.com/user/repo/foo/bar", Synopsis: "hello"},
}
if !reflect.DeepEqual(actualImports, expectedImports) {
t.Errorf("db.Imports() = %v, want %v", actualImports, expectedImports)
}
importerCount, _ := db.ImporterCount("github.com/user/repo/foo/bar")
if importerCount != 1 {
t.Errorf("db.ImporterCount() = %d, want %d", importerCount, 1)
}
if err := db.Delete("github.com/user/repo/foo/bar"); err != nil {
t.Errorf("db.Delete() returned error %v", err)
}
db.Query("bar")
if err := db.Put(pdoc, time.Time{}, false); err != nil {
t.Errorf("db.Put() returned error %v", err)
}
if err := db.Block("github.com/user/repo"); err != nil {
t.Errorf("db.Block() returned error %v", err)
}
blocked, err := db.IsBlocked("github.com/user/repo/foo/bar")
if !blocked || err != nil {
t.Errorf("db.IsBlocked(github.com/user/repo/foo/bar) returned %v, %v, want true, nil", blocked, err)
}
blocked, err = db.IsBlocked("github.com/foo/bar")
if blocked || err != nil {
t.Errorf("db.IsBlocked(github.com/foo/bar) returned %v, %v, want false, nil", blocked, err)
}
c := db.Pool.Get()
defer c.Close()
c.Send("DEL", "maxQueryId")
c.Send("DEL", "maxPackageId")
c.Send("DEL", "block")
c.Send("DEL", "popular:0")
c.Send("DEL", "newCrawl")
keys, err := redis.Values(c.Do("HKEYS", "ids"))
for _, key := range keys {
t.Errorf("unexpected id %s", key)
}
keys, err = redis.Values(c.Do("KEYS", "*"))
for _, key := range keys {
t.Errorf("unexpected key %s", key)
}
}
const epsilon = 0.000001
func TestPopular(t *testing.T) {
db := newDB(t)
defer closeDB(db)
c := db.Pool.Get()
defer c.Close()
// Add scores for packages. On each iteration, add half-life to time and
// divide the score by two. All packages should have the same score.
now := time.Now()
score := float64(4048)
for id := 12; id >= 0; id-- {
path := "github.com/user/repo/p" + strconv.Itoa(id)
c.Do("HSET", "ids", path, id)
err := db.incrementPopularScoreInternal(path, score, now)
if err != nil {
t.Fatal(err)
}
now = now.Add(popularHalfLife)
score /= 2
}
values, _ := redis.Values(c.Do("ZRANGE", "popular", "0", "100000", "WITHSCORES"))
if len(values) != 26 {
t.Fatalf("Expected 26 values, got %d", len(values))
}
// Check for equal scores.
score, err := redis.Float64(values[1], nil)
if err != nil {
t.Fatal(err)
}
for i := 3; i < len(values); i += 2 {
s, _ := redis.Float64(values[i], nil)
if math.Abs(score-s)/score > epsilon {
t.Errorf("Bad score, score[1]=%g, score[%d]=%g", score, i, s)
}
}
}
func TestCounter(t *testing.T) {
db := newDB(t)
defer closeDB(db)
const key = "127.0.0.1"
now := time.Now()
n, err := db.incrementCounterInternal(key, 1, now)
if err != nil {
t.Fatal(err)
}
if math.Abs(n-1.0) > epsilon {
t.Errorf("1: got n=%g, want 1", n)
}
n, err = db.incrementCounterInternal(key, 1, now)
if err != nil {
t.Fatal(err)
}
if math.Abs(n-2.0)/2.0 > epsilon {
t.Errorf("2: got n=%g, want 2", n)
}
now = now.Add(counterHalflife)
n, err = db.incrementCounterInternal(key, 1, now)
if err != nil {
t.Fatal(err)
}
if math.Abs(n-2.0)/2.0 > epsilon {
t.Errorf("3: got n=%g, want 2", n)
}
}

243
vendor/github.com/golang/gddo/database/index.go generated vendored Normal file
View File

@@ -0,0 +1,243 @@
// Copyright 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
package database
import (
"path"
"regexp"
"strings"
"unicode"
"github.com/golang/gddo/doc"
"github.com/golang/gddo/gosrc"
)
func isStandardPackage(path string) bool {
return strings.Index(path, ".") < 0
}
func isTermSep(r rune) bool {
return unicode.IsSpace(r) ||
r != '.' && unicode.IsPunct(r) ||
unicode.IsSymbol(r)
}
func normalizeProjectRoot(projectRoot string) string {
if projectRoot == "" {
return "go"
}
return projectRoot
}
var synonyms = map[string]string{
"redis": "redisdb", // append db to avoid stemming to 'red'
"rand": "random",
"postgres": "postgresql",
"mongo": "mongodb",
}
func term(s string) string {
s = strings.ToLower(s)
if x, ok := synonyms[s]; ok {
s = x
}
// Trim the trailing period at the end of any sentence.
return stem(strings.TrimSuffix(s, "."))
}
var httpPat = regexp.MustCompile(`https?://\S+`)
func collectSynopsisTerms(terms map[string]bool, synopsis string) {
synopsis = httpPat.ReplaceAllLiteralString(synopsis, "")
fields := strings.FieldsFunc(synopsis, isTermSep)
for i := range fields {
fields[i] = strings.ToLower(fields[i])
}
// Ignore boilerplate in the following common patterns:
// Package foo ...
// Command foo ...
// Package foo implements ... (and provides, contains)
// The foo package ...
// The foo package implements ...
// The foo command ...
checkPackageVerb := false
switch {
case len(fields) >= 1 && fields[0] == "package":
fields = fields[1:]
checkPackageVerb = true
case len(fields) >= 1 && fields[0] == "command":
fields = fields[1:]
case len(fields) >= 3 && fields[0] == "the" && fields[2] == "package":
fields[2] = fields[1]
fields = fields[2:]
checkPackageVerb = true
case len(fields) >= 3 && fields[0] == "the" && fields[2] == "command":
fields[2] = fields[1]
fields = fields[2:]
}
if checkPackageVerb && len(fields) >= 2 &&
(fields[1] == "implements" || fields[1] == "provides" || fields[1] == "contains") {
fields[1] = fields[0]
fields = fields[1:]
}
for _, s := range fields {
if !stopWord[s] {
terms[term(s)] = true
}
}
}
func termSlice(terms map[string]bool) []string {
result := make([]string, 0, len(terms))
for term := range terms {
result = append(result, term)
}
return result
}
func documentTerms(pdoc *doc.Package, score float64) []string {
terms := make(map[string]bool)
// Project root
projectRoot := normalizeProjectRoot(pdoc.ProjectRoot)
terms["project:"+projectRoot] = true
if strings.HasPrefix(pdoc.ImportPath, "golang.org/x/") {
terms["project:subrepo"] = true
}
// Imports
for _, path := range pdoc.Imports {
if gosrc.IsValidPath(path) {
terms["import:"+path] = true
}
}
if score > 0 {
for _, term := range parseQuery(pdoc.ImportPath) {
terms[term] = true
}
if !isStandardPackage(pdoc.ImportPath) {
terms["all:"] = true
for _, term := range parseQuery(pdoc.ProjectName) {
terms[term] = true
}
for _, term := range parseQuery(pdoc.Name) {
terms[term] = true
}
}
// Synopsis
collectSynopsisTerms(terms, pdoc.Synopsis)
}
return termSlice(terms)
}
// vendorPat matches the path of a vendored package.
var vendorPat = regexp.MustCompile(
// match directories used by tools to vendor packages.
`/(?:_?third_party|vendors|Godeps/_workspace/src)/` +
// match a domain name.
`[^./]+\.[^/]+`)
func documentScore(pdoc *doc.Package) float64 {
if pdoc.Name == "" ||
pdoc.Status != gosrc.Active ||
len(pdoc.Errors) > 0 ||
strings.HasSuffix(pdoc.ImportPath, ".go") ||
strings.HasPrefix(pdoc.ImportPath, "gist.github.com/") ||
strings.HasSuffix(pdoc.ImportPath, "/internal") ||
strings.Contains(pdoc.ImportPath, "/internal/") ||
vendorPat.MatchString(pdoc.ImportPath) {
return 0
}
for _, p := range pdoc.Imports {
if strings.HasSuffix(p, ".go") {
return 0
}
}
r := 1.0
if pdoc.IsCmd {
if pdoc.Doc == "" {
// Do not include command in index if it does not have documentation.
return 0
}
if !importsGoPackages(pdoc) {
// Penalize commands that don't use the "go/*" packages.
r *= 0.9
}
} else {
if !pdoc.Truncated &&
len(pdoc.Consts) == 0 &&
len(pdoc.Vars) == 0 &&
len(pdoc.Funcs) == 0 &&
len(pdoc.Types) == 0 &&
len(pdoc.Examples) == 0 {
// Do not include package in index if it does not have exports.
return 0
}
if pdoc.Doc == "" {
// Penalty for no documentation.
r *= 0.95
}
if path.Base(pdoc.ImportPath) != pdoc.Name {
// Penalty for last element of path != package name.
r *= 0.9
}
for i := 0; i < strings.Count(pdoc.ImportPath[len(pdoc.ProjectRoot):], "/"); i++ {
// Penalty for deeply nested packages.
r *= 0.99
}
if strings.Index(pdoc.ImportPath[len(pdoc.ProjectRoot):], "/src/") > 0 {
r *= 0.95
}
for _, p := range pdoc.Imports {
if vendorPat.MatchString(p) {
// Penalize packages that import vendored packages.
r *= 0.1
break
}
}
}
return r
}
func parseQuery(q string) []string {
var terms []string
q = strings.ToLower(q)
for _, s := range strings.FieldsFunc(q, isTermSep) {
if !stopWord[s] {
terms = append(terms, term(s))
}
}
return terms
}
func importsGoPackages(pdoc *doc.Package) bool {
for _, m := range pdoc.Imports {
if strings.HasPrefix(m, "go/") {
return true
}
}
return false
}

202
vendor/github.com/golang/gddo/database/index_test.go generated vendored Normal file
View File

@@ -0,0 +1,202 @@
// Copyright 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
package database
import (
"reflect"
"sort"
"testing"
"github.com/golang/gddo/doc"
)
var indexTests = []struct {
pdoc *doc.Package
terms []string
}{
{&doc.Package{
ImportPath: "strconv",
ProjectRoot: "",
ProjectName: "Go",
Name: "strconv",
Synopsis: "Package strconv implements conversions to and from string representations of basic data types.",
Doc: "Package strconv implements conversions to and from string representations\nof basic data types.",
Imports: []string{"errors", "math", "unicode/utf8"},
Funcs: []*doc.Func{{}},
},
[]string{
"bas",
"convert",
"dat",
"import:errors",
"import:math",
"import:unicode/utf8",
"project:go",
"repres",
"strconv",
"string",
"typ"},
},
{&doc.Package{
ImportPath: "github.com/user/repo/dir",
ProjectRoot: "github.com/user/repo",
ProjectName: "go-oauth",
ProjectURL: "https://github.com/user/repo/",
Name: "dir",
Synopsis: "Package dir implements a subset of the OAuth client interface as defined in RFC 5849.",
Doc: "Package oauth implements a subset of the OAuth client interface as defined in RFC 5849.\n\n" +
"This package assumes that the application writes request URL paths to the\nnetwork using " +
"the encoding implemented by the net/url URL RequestURI method.\n" +
"The HTTP client in the standard net/http package uses this encoding.",
IsCmd: false,
Imports: []string{
"bytes",
"crypto/hmac",
"crypto/sha1",
"encoding/base64",
"encoding/binary",
"errors",
"fmt",
"io",
"io/ioutil",
"net/http",
"net/url",
"regexp",
"sort",
"strconv",
"strings",
"sync",
"time",
},
TestImports: []string{"bytes", "net/url", "testing"},
Funcs: []*doc.Func{{}},
},
[]string{
"all:",
"5849", "cly", "defin", "dir", "github.com", "go",
"import:bytes", "import:crypto/hmac", "import:crypto/sha1",
"import:encoding/base64", "import:encoding/binary", "import:errors",
"import:fmt", "import:io", "import:io/ioutil", "import:net/http",
"import:net/url", "import:regexp", "import:sort", "import:strconv",
"import:strings", "import:sync", "import:time", "interfac",
"oau", "project:github.com/user/repo", "repo", "rfc", "subset", "us",
},
},
}
func TestDocTerms(t *testing.T) {
for _, tt := range indexTests {
score := documentScore(tt.pdoc)
terms := documentTerms(tt.pdoc, score)
sort.Strings(terms)
sort.Strings(tt.terms)
if !reflect.DeepEqual(terms, tt.terms) {
t.Errorf("documentTerms(%s) ->\n got: %#v\nwant: %#v", tt.pdoc.ImportPath, terms, tt.terms)
}
}
}
var vendorPatTests = []struct {
path string
match bool
}{
{"camlistore.org/third_party/github.com/user/repo", true},
{"camlistore.org/third_party/dir", false},
{"camlistore.org/third_party", false},
{"camlistore.org/xthird_party/github.com/user/repo", false},
{"camlistore.org/third_partyx/github.com/user/repo", false},
{"example.org/_third_party/github.com/user/repo/dir", true},
{"example.org/_third_party/dir", false},
{"github.com/user/repo/Godeps/_workspace/src/github.com/user/repo", true},
{"github.com/user/repo/Godeps/_workspace/src/dir", false},
{"github.com/user/repo", false},
}
func TestVendorPat(t *testing.T) {
for _, tt := range vendorPatTests {
match := vendorPat.MatchString(tt.path)
if match != tt.match {
t.Errorf("match(%q) = %v, want %v", tt.path, match, match)
}
}
}
var synopsisTermTests = []struct {
synopsis string
terms []string
}{
{
"Package foo implements bar.",
[]string{"bar", "foo"},
},
{
"Package foo provides bar.",
[]string{"bar", "foo"},
},
{
"The foo package provides bar.",
[]string{"bar", "foo"},
},
{
"Package foo contains an implementation of bar.",
[]string{"bar", "foo", "impl"},
},
{
"Package foo is awesome",
[]string{"awesom", "foo"},
},
{
"The foo package is awesome",
[]string{"awesom", "foo"},
},
{
"The foo command is awesome",
[]string{"awesom", "foo"},
},
{
"Command foo is awesome",
[]string{"awesom", "foo"},
},
{
"The foo package",
[]string{"foo"},
},
{
"Package foo",
[]string{"foo"},
},
{
"Command foo",
[]string{"foo"},
},
{
"Package",
[]string{},
},
{
"Command",
[]string{},
},
}
func TestSynopsisTerms(t *testing.T) {
for _, tt := range synopsisTermTests {
terms := make(map[string]bool)
collectSynopsisTerms(terms, tt.synopsis)
actual := termSlice(terms)
expected := tt.terms
sort.Strings(actual)
sort.Strings(expected)
if !reflect.DeepEqual(actual, expected) {
t.Errorf("%q ->\n got: %#v\nwant: %#v", tt.synopsis, actual, expected)
}
}
}

207
vendor/github.com/golang/gddo/database/indexae.go generated vendored Normal file
View File

@@ -0,0 +1,207 @@
// Copyright 2016 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
package database
import (
"bytes"
"errors"
"fmt"
"log"
"math"
"strings"
"unicode"
"golang.org/x/net/context"
"google.golang.org/appengine/search"
"github.com/golang/gddo/doc"
)
func (p *Package) Load(fields []search.Field, meta *search.DocumentMetadata) error {
for _, f := range fields {
switch f.Name {
case "Name":
if v, ok := f.Value.(search.Atom); ok {
p.Name = string(v)
}
case "Path":
if v, ok := f.Value.(string); ok {
p.Path = v
}
case "Synopsis":
if v, ok := f.Value.(string); ok {
p.Synopsis = v
}
case "ImportCount":
if v, ok := f.Value.(float64); ok {
p.ImportCount = int(v)
}
case "Stars":
if v, ok := f.Value.(float64); ok {
p.Stars = int(v)
}
case "Score":
if v, ok := f.Value.(float64); ok {
p.Score = v
}
}
}
if p.Path == "" {
return errors.New("Invalid document: missing Path field")
}
for _, f := range meta.Facets {
if f.Name == "Fork" {
p.Fork = f.Value.(search.Atom) == "true"
}
}
return nil
}
func (p *Package) Save() ([]search.Field, *search.DocumentMetadata, error) {
fields := []search.Field{
{Name: "Name", Value: search.Atom(p.Name)},
{Name: "Path", Value: p.Path},
{Name: "Synopsis", Value: p.Synopsis},
{Name: "Score", Value: p.Score},
{Name: "ImportCount", Value: float64(p.ImportCount)},
{Name: "Stars", Value: float64(p.Stars)},
}
fork := fmt.Sprint(p.Fork) // "true" or "false"
meta := &search.DocumentMetadata{
// Customize the rank property by the product of the package score and
// natural logarithm of the import count. Rank must be a positive integer.
// Use 1 as minimum rank and keep 3 digits of precision to distinguish
// close ranks.
Rank: int(math.Max(1, 1000*p.Score*math.Log(math.E+float64(p.ImportCount)))),
Facets: []search.Facet{
{Name: "Fork", Value: search.Atom(fork)},
},
}
return fields, meta, nil
}
// PutIndex creates or updates a package entry in the search index. id identifies the document in the index.
// If pdoc is non-nil, PutIndex will update the package's name, path and synopsis supplied by pdoc.
// pdoc must be non-nil for a package's first call to PutIndex.
// PutIndex updates the Score to score, if non-negative.
func PutIndex(c context.Context, pdoc *doc.Package, id string, score float64, importCount int) error {
if id == "" {
return errors.New("indexae: no id assigned")
}
idx, err := search.Open("packages")
if err != nil {
return err
}
var pkg Package
if err := idx.Get(c, id, &pkg); err != nil {
if err != search.ErrNoSuchDocument {
return err
} else if pdoc == nil {
// Cannot update a non-existing document.
return errors.New("indexae: cannot create new document with nil pdoc")
}
// No such document in the index, fall through.
}
// Update document information accordingly.
if pdoc != nil {
pkg.Name = pdoc.Name
pkg.Path = pdoc.ImportPath
pkg.Synopsis = pdoc.Synopsis
pkg.Stars = pdoc.Stars
pkg.Fork = pdoc.Fork
}
if score >= 0 {
pkg.Score = score
}
pkg.ImportCount = importCount
if _, err := idx.Put(c, id, &pkg); err != nil {
return err
}
return nil
}
// Search searches the packages index for a given query. A path-like query string
// will be passed in unchanged, whereas single words will be stemmed.
func Search(c context.Context, q string) ([]Package, error) {
index, err := search.Open("packages")
if err != nil {
return nil, err
}
var pkgs []Package
opt := &search.SearchOptions{
Limit: 100,
}
for it := index.Search(c, parseQuery2(q), opt); ; {
var p Package
_, err := it.Next(&p)
if err == search.Done {
break
}
if err != nil {
return nil, err
}
pkgs = append(pkgs, p)
}
return pkgs, nil
}
func parseQuery2(q string) string {
var buf bytes.Buffer
for _, s := range strings.FieldsFunc(q, isTermSep2) {
if strings.ContainsAny(s, "./") {
// Quote terms with / or . for path like query.
fmt.Fprintf(&buf, "%q ", s)
} else {
// Stem for single word terms.
fmt.Fprintf(&buf, "~%v ", s)
}
}
return buf.String()
}
func isTermSep2(r rune) bool {
return unicode.IsSpace(r) ||
r != '.' && r != '/' && unicode.IsPunct(r) ||
unicode.IsSymbol(r)
}
func deleteIndex(c context.Context, id string) error {
idx, err := search.Open("packages")
if err != nil {
return err
}
return idx.Delete(c, id)
}
// PurgeIndex deletes all the packages from the search index.
func PurgeIndex(c context.Context) error {
idx, err := search.Open("packages")
if err != nil {
return err
}
n := 0
for it := idx.List(c, &search.ListOptions{IDsOnly: true}); ; n++ {
var pkg Package
id, err := it.Next(&pkg)
if err == search.Done {
break
}
if err != nil {
return err
}
if err := idx.Delete(c, id); err != nil {
log.Printf("Failed to delete package %s: %v", id, err)
continue
}
}
log.Printf("Purged %d packages from the search index.", n)
return nil
}

126
vendor/github.com/golang/gddo/database/indexae_test.go generated vendored Normal file
View File

@@ -0,0 +1,126 @@
// Copyright 2016 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
package database
import (
"math"
"strconv"
"testing"
"google.golang.org/appengine/aetest"
"google.golang.org/appengine/search"
"github.com/golang/gddo/doc"
)
var pdoc = &doc.Package{
ImportPath: "github.com/golang/test",
Name: "test",
Synopsis: "This is a test package.",
Fork: true,
Stars: 10,
}
func TestPutIndexWithEmptyId(t *testing.T) {
c, done, err := aetest.NewContext()
if err != nil {
t.Fatal(err)
}
defer done()
if err := PutIndex(c, nil, "", 0, 0); err == nil {
t.Errorf("PutIndex succeeded unexpectedly")
}
}
func TestPutIndexCreateNilDoc(t *testing.T) {
c, done, err := aetest.NewContext()
if err != nil {
t.Fatal(err)
}
defer done()
if err := PutIndex(c, nil, "12345", -1, 2); err == nil {
t.Errorf("PutIndex succeeded unexpectedly")
}
}
func TestPutIndexNewPackageAndUpdate(t *testing.T) {
c, done, err := aetest.NewContext()
if err != nil {
t.Fatal(err)
}
defer done()
// Put a new package into search index.
if err := PutIndex(c, pdoc, "12345", 0.99, 1); err != nil {
t.Fatal(err)
}
// Verify the package was put in is as expected.
idx, err := search.Open("packages")
if err != nil {
t.Fatal(err)
}
var got Package
if err = idx.Get(c, "12345", &got); err != nil && err != search.ErrNoSuchDocument {
t.Fatal(err)
}
wanted := Package{
Name: pdoc.Name,
Path: pdoc.ImportPath,
Synopsis: pdoc.Synopsis,
ImportCount: 1,
Fork: true,
Stars: 10,
Score: 0.99,
}
if got != wanted {
t.Errorf("PutIndex got %v, want %v", got, wanted)
}
// Update the import count of the package.
if err := PutIndex(c, nil, "12345", -1, 2); err != nil {
t.Fatal(err)
}
if err := idx.Get(c, "12345", &got); err != nil && err != search.ErrNoSuchDocument {
t.Fatal(err)
}
wanted.ImportCount = 2
if got != wanted {
t.Errorf("PutIndex got %v, want %v", got, wanted)
}
}
func TestSearchResultSorted(t *testing.T) {
c, done, err := aetest.NewContext()
if err != nil {
t.Fatal(err)
}
defer done()
// Put multiple packages into the search index and the search result
// should be sorted properly.
id := "1"
for i := 2; i < 6; i++ {
id += strconv.Itoa(i)
pdoc.Synopsis = id
if err := PutIndex(c, pdoc, id, math.Pow(0.9, float64(i)), 10*i); err != nil {
t.Fatal(err)
}
}
got, err := Search(c, "test")
if err != nil {
t.Fatal(err)
}
wanted := []string{"123", "12", "1234", "12345"}
for i, p := range got {
if p.Synopsis != wanted[i] {
t.Errorf("Search got %v, want %v", p.Synopsis, wanted[i])
}
}
}

123
vendor/github.com/golang/gddo/database/stem.go generated vendored Normal file
View File

@@ -0,0 +1,123 @@
// Copyright 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
// This file implements the Paice/Husk stemming algorithm.
// http://www.comp.lancs.ac.uk/computing/research/stemming/Links/paice.htm
package database
import (
"bytes"
"regexp"
"strconv"
)
const stemRuleText = `
ai*2. a*1.
bb1.
city3s. ci2> cn1t>
dd1. dei3y> deec2ss. dee1. de2> dooh4>
e1>
feil1v. fi2>
gni3> gai3y. ga2> gg1.
ht*2. hsiug5ct. hsi3>
i*1. i1y>
ji1d. juf1s. ju1d. jo1d. jeh1r. jrev1t. jsim2t. jn1d. j1s.
lbaifi6. lbai4y. lba3> lbi3. lib2l> lc1. lufi4y. luf3> lu2. lai3> lau3> la2> ll1.
mui3. mu*2. msi3> mm1.
nois4j> noix4ct. noi3> nai3> na2> nee0. ne2> nn1.
pihs4> pp1.
re2> rae0. ra2. ro2> ru2> rr1. rt1> rei3y>
sei3y> sis2. si2> ssen4> ss0. suo3> su*2. s*1> s0.
tacilp4y. ta2> tnem4> tne3> tna3> tpir2b. tpro2b. tcud1. tpmus2. tpec2iv. tulo2v. tsis0. tsi3> tt1.
uqi3. ugo1.
vis3j> vie0. vi2>
ylb1> yli3y> ylp0. yl2> ygo1. yhp1. ymo1. ypo1. yti3> yte3> ytl2. yrtsi5. yra3> yro3> yfi3. ycn2t> yca3>
zi2> zy1s.
`
type stemRule struct {
text string
suffix []byte
intact bool
remove int
append []byte
more bool
}
func parseStemRules() map[byte][]*stemRule {
rules := make(map[byte][]*stemRule)
for _, m := range regexp.MustCompile(`(?m)(?:^| )([a-zA-Z]*)(\*?)([0-9])([a-zA-z]*)([.>])`).FindAllStringSubmatch(stemRuleText, -1) {
suffix := []byte(m[1])
for i := 0; i < len(suffix)/2; i++ {
j := len(suffix) - 1 - i
suffix[i], suffix[j] = suffix[j], suffix[i]
}
remove, _ := strconv.Atoi(m[3])
r := &stemRule{
text: m[0],
suffix: suffix,
intact: m[2] == "*",
remove: remove,
append: []byte(m[4]),
more: m[5] == ">",
}
c := suffix[len(suffix)-1]
rules[c] = append(rules[c], r)
}
return rules
}
var stemRules = parseStemRules()
func firstVowel(offset int, p []byte) int {
for i, b := range p {
switch b {
case 'a', 'e', 'i', 'o', 'u':
return offset + i
case 'y':
if offset+i > 0 {
return offset + i
}
}
}
return -1
}
func acceptableStem(a, b []byte) bool {
i := firstVowel(0, a)
if i < 0 {
i = firstVowel(len(a), b)
}
l := len(a) + len(b)
if i == 0 {
return l > 1
}
return i >= 0 && l > 2
}
func stem(s string) string {
stem := bytes.ToLower([]byte(s))
intact := true
run := acceptableStem(stem, []byte{})
for run {
run = false
for _, rule := range stemRules[stem[len(stem)-1]] {
if bytes.HasSuffix(stem, rule.suffix) &&
(intact || !rule.intact) &&
acceptableStem(stem[:len(stem)-rule.remove], rule.append) {
stem = append(stem[:len(stem)-rule.remove], rule.append...)
intact = false
run = rule.more
break
}
}
}
return string(stem)
}

31
vendor/github.com/golang/gddo/database/stem_test.go generated vendored Normal file
View File

@@ -0,0 +1,31 @@
// Copyright 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
package database
import (
"testing"
)
var stemTests = []struct {
s, expected string
}{
{"html", "html"},
{"strings", "string"},
{"ballroom", "ballroom"},
{"mechanicalization", "mech"},
{"pragmaticality", "pragm"},
{"rationalistically", "rat"},
}
func TestStem(t *testing.T) {
for _, tt := range stemTests {
actual := stem(tt.s)
if actual != tt.expected {
t.Errorf("stem(%q) = %q, want %q", tt.s, actual, tt.expected)
}
}
}

141
vendor/github.com/golang/gddo/database/stop.go generated vendored Normal file
View File

@@ -0,0 +1,141 @@
// Copyright 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
package database
import (
"strings"
)
var stopWord = createStopWordMap()
func createStopWordMap() map[string]bool {
m := make(map[string]bool)
for _, s := range strings.Fields(stopText) {
m[s] = true
}
return m
}
const stopText = `
a
about
after
all
also
am
an
and
another
any
are
as
at
b
be
because
been
before
being
between
both
but
by
c
came
can
come
could
d
did
do
e
each
f
for
from
g
get
got
h
had
has
have
he
her
here
him
himself
his
how
i
if
in
into
is
it
j
k
l
like
m
make
many
me
might
more
most
much
must
my
n
never
now
o
of
on
only
or
other
our
out
over
p
q
r
s
said
same
see
should
since
some
still
such
t
take
than
that
the
their
them
then
there
these
they
this
those
through
to
too
u
under
v
w
x
y
z
`

37
vendor/github.com/golang/gddo/deploy/gddo.conf generated vendored Normal file
View File

@@ -0,0 +1,37 @@
log_format verbose '$remote_addr\t[$time_local]\t$host\t$request\t$status\t$body_bytes_sent\t$http_referer\t$http_user_agent\t$request_time\t$upstream_response_time';
server {
server_name go.pkgdoc.org pkgdoc.org www.pkgdoc.org www.godoc.org;
rewrite ^ http://godoc.org$request_uri? permanent;
}
server {
server_name talks.godoc.org;
rewrite ^ http://go-talks.appspot.com$request_uri? permanent;
}
server {
listen 80 default_server;
listen 443 ssl default_server;
ssl_certificate /ssl/godoc_org.crt;
ssl_certificate_key /ssl/godoc_org.key;
server_name _ godoc.org api.godoc.org;
access_log /var/log/nginx/gddo.log verbose;
gzip on;
gzip_proxied any;
gzip_types text/css text/plain text/javascript application/javascript;
if ($http_user_agent ~ (seek\.io|Wotbox|Exabot|HTTrack|TurnitinBot|Ezooms|PaperLiBot|Sogou) ) {
return 444;
}
location / {
proxy_pass http://127.0.0.1:8080;
proxy_set_header Host $http_host;
proxy_set_header X-Scheme $scheme;
proxy_set_header X-Real-IP $remote_addr;
}
}

695
vendor/github.com/golang/gddo/deploy/redis.conf generated vendored Normal file
View File

@@ -0,0 +1,695 @@
# Redis configuration file example
# Note on units: when memory size is needed, it is possible to specify
# it in the usual form of 1k 5GB 4M and so forth:
#
# 1k => 1000 bytes
# 1kb => 1024 bytes
# 1m => 1000000 bytes
# 1mb => 1024*1024 bytes
# 1g => 1000000000 bytes
# 1gb => 1024*1024*1024 bytes
#
# units are case insensitive so 1GB 1Gb 1gB are all the same.
# By default Redis does not run as a daemon. Use 'yes' if you need it.
# Note that Redis will write a pid file in /var/run/redis.pid when daemonized.
daemonize no
# When running daemonized, Redis writes a pid file in /var/run/redis.pid by
# default. You can specify a custom pid file location here.
pidfile /var/run/redis.pid
# Accept connections on the specified port, default is 6379.
# If port 0 is specified Redis will not listen on a TCP socket.
port 6379
# By default Redis listens for connections from all the network interfaces
# available on the server. It is possible to listen to just one or multiple
# interfaces using the "bind" configuration directive, followed by one or
# more IP addresses.
#
# Examples:
#
# bind 192.168.1.100 10.0.0.1
# bind 127.0.0.1
# Specify the path for the unix socket that will be used to listen for
# incoming connections. There is no default, so Redis will not listen
# on a unix socket when not specified.
#
# unixsocket /tmp/redis.sock
# unixsocketperm 755
# Close the connection after a client is idle for N seconds (0 to disable)
timeout 0
# TCP keepalive.
#
# If non-zero, use SO_KEEPALIVE to send TCP ACKs to clients in absence
# of communication. This is useful for two reasons:
#
# 1) Detect dead peers.
# 2) Take the connection alive from the point of view of network
# equipment in the middle.
#
# On Linux, the specified value (in seconds) is the period used to send ACKs.
# Note that to close the connection the double of the time is needed.
# On other kernels the period depends on the kernel configuration.
#
# A reasonable value for this option is 60 seconds.
tcp-keepalive 0
# Specify the server verbosity level.
# This can be one of:
# debug (a lot of information, useful for development/testing)
# verbose (many rarely useful info, but not a mess like the debug level)
# notice (moderately verbose, what you want in production probably)
# warning (only very important / critical messages are logged)
loglevel notice
# Specify the log file name. Also the emptry string can be used to force
# Redis to log on the standard output. Note that if you use standard
# output for logging but daemonize, logs will be sent to /dev/null
logfile ""
# To enable logging to the system logger, just set 'syslog-enabled' to yes,
# and optionally update the other syslog parameters to suit your needs.
# syslog-enabled no
# Specify the syslog identity.
# syslog-ident redis
# Specify the syslog facility. Must be USER or between LOCAL0-LOCAL7.
# syslog-facility local0
# Set the number of databases. The default database is DB 0, you can select
# a different one on a per-connection basis using SELECT <dbid> where
# dbid is a number between 0 and 'databases'-1
databases 16
################################ SNAPSHOTTING #################################
#
# Save the DB on disk:
#
# save <seconds> <changes>
#
# Will save the DB if both the given number of seconds and the given
# number of write operations against the DB occurred.
#
# In the example below the behaviour will be to save:
# after 900 sec (15 min) if at least 1 key changed
# after 300 sec (5 min) if at least 10 keys changed
# after 60 sec if at least 10000 keys changed
#
# Note: you can disable saving at all commenting all the "save" lines.
#
# It is also possible to remove all the previously configured save
# points by adding a save directive with a single empty string argument
# like in the following example:
#
# save ""
save 900 1
save 300 10
save 60 10000
# By default Redis will stop accepting writes if RDB snapshots are enabled
# (at least one save point) and the latest background save failed.
# This will make the user aware (in an hard way) that data is not persisting
# on disk properly, otherwise chances are that no one will notice and some
# distater will happen.
#
# If the background saving process will start working again Redis will
# automatically allow writes again.
#
# However if you have setup your proper monitoring of the Redis server
# and persistence, you may want to disable this feature so that Redis will
# continue to work as usually even if there are problems with disk,
# permissions, and so forth.
stop-writes-on-bgsave-error yes
# Compress string objects using LZF when dump .rdb databases?
# For default that's set to 'yes' as it's almost always a win.
# If you want to save some CPU in the saving child set it to 'no' but
# the dataset will likely be bigger if you have compressible values or keys.
rdbcompression yes
# Since version 5 of RDB a CRC64 checksum is placed at the end of the file.
# This makes the format more resistant to corruption but there is a performance
# hit to pay (around 10%) when saving and loading RDB files, so you can disable it
# for maximum performances.
#
# RDB files created with checksum disabled have a checksum of zero that will
# tell the loading code to skip the check.
rdbchecksum yes
# The filename where to dump the DB
dbfilename dump.rdb
# The working directory.
#
# The DB will be written inside this directory, with the filename specified
# above using the 'dbfilename' configuration directive.
#
# The Append Only File will also be created inside this directory.
#
# Note that you must specify a directory here, not a file name.
dir /data
################################# REPLICATION #################################
# Master-Slave replication. Use slaveof to make a Redis instance a copy of
# another Redis server. Note that the configuration is local to the slave
# so for example it is possible to configure the slave to save the DB with a
# different interval, or to listen to another port, and so on.
#
# slaveof <masterip> <masterport>
# If the master is password protected (using the "requirepass" configuration
# directive below) it is possible to tell the slave to authenticate before
# starting the replication synchronization process, otherwise the master will
# refuse the slave request.
#
# masterauth <master-password>
# When a slave loses its connection with the master, or when the replication
# is still in progress, the slave can act in two different ways:
#
# 1) if slave-serve-stale-data is set to 'yes' (the default) the slave will
# still reply to client requests, possibly with out of date data, or the
# data set may just be empty if this is the first synchronization.
#
# 2) if slave-serve-stale-data is set to 'no' the slave will reply with
# an error "SYNC with master in progress" to all the kind of commands
# but to INFO and SLAVEOF.
#
slave-serve-stale-data yes
# You can configure a slave instance to accept writes or not. Writing against
# a slave instance may be useful to store some ephemeral data (because data
# written on a slave will be easily deleted after resync with the master) but
# may also cause problems if clients are writing to it because of a
# misconfiguration.
#
# Since Redis 2.6 by default slaves are read-only.
#
# Note: read only slaves are not designed to be exposed to untrusted clients
# on the internet. It's just a protection layer against misuse of the instance.
# Still a read only slave exports by default all the administrative commands
# such as CONFIG, DEBUG, and so forth. To a limited extend you can improve
# security of read only slaves using 'rename-command' to shadow all the
# administrative / dangerous commands.
slave-read-only yes
# Slaves send PINGs to server in a predefined interval. It's possible to change
# this interval with the repl_ping_slave_period option. The default value is 10
# seconds.
#
# repl-ping-slave-period 10
# The following option sets the replication timeout for:
#
# 1) Bulk transfer I/O during SYNC, from the point of view of slave.
# 2) Master timeout from the point of view of slaves (data, pings).
# 3) Slave timeout from the point of view of masters (REPLCONF ACK pings).
#
# It is important to make sure that this value is greater than the value
# specified for repl-ping-slave-period otherwise a timeout will be detected
# every time there is low traffic between the master and the slave.
#
# repl-timeout 60
# Disable TCP_NODELAY on the slave socket after SYNC?
#
# If you select "yes" Redis will use a smaller number of TCP packets and
# less bandwidth to send data to slaves. But this can add a delay for
# the data to appear on the slave side, up to 40 milliseconds with
# Linux kernels using a default configuration.
#
# If you select "no" the delay for data to appear on the slave side will
# be reduced but more bandwidth will be used for replication.
#
# By default we optimize for low latency, but in very high traffic conditions
# or when the master and slaves are many hops away, turning this to "yes" may
# be a good idea.
repl-disable-tcp-nodelay no
# Set the replication backlog size. The backlog is a buffer that accumulates
# slave data when slaves are disconnected for some time, so that when a slave
# wants to reconnect again, often a full resync is not needed, but a partial
# resync is enough, just passing the portion of data the slave missed while
# disconnected.
#
# The biggest the replication backlog, the longer the time the slave can be
# disconnected and later be able to perform a partial resynchronization.
#
# The backlog is only allocated once there is at least a slave connected.
#
# repl-backlog-size 1mb
# After a master has no longer connected slaves for some time, the backlog
# will be freed. The following option configures the amount of seconds that
# need to elapse, starting from the time the last slave disconnected, for
# the backlog buffer to be freed.
#
# A value of 0 means to never release the backlog.
#
# repl-backlog-ttl 3600
# The slave priority is an integer number published by Redis in the INFO output.
# It is used by Redis Sentinel in order to select a slave to promote into a
# master if the master is no longer working correctly.
#
# A slave with a low priority number is considered better for promotion, so
# for instance if there are three slaves with priority 10, 100, 25 Sentinel will
# pick the one with priority 10, that is the lowest.
#
# However a special priority of 0 marks the slave as not able to perform the
# role of master, so a slave with priority of 0 will never be selected by
# Redis Sentinel for promotion.
#
# By default the priority is 100.
slave-priority 100
# It is possible for a master to stop accepting writes if there are less than
# N slaves connected, having a lag less or equal than M seconds.
#
# The N slaves need to be in "online" state.
#
# The lag in seconds, that must be <= the specified value, is calculated from
# the last ping received from the slave, that is usually sent every second.
#
# This option does not GUARANTEES that N replicas will accept the write, but
# will limit the window of exposure for lost writes in case not enough slaves
# are available, to the specified number of seconds.
#
# For example to require at least 3 slaves with a lag <= 10 seconds use:
#
# min-slaves-to-write 3
# min-slaves-max-lag 10
#
# Setting one or the other to 0 disables the feature.
#
# By default min-slaves-to-write is set to 0 (feature disabled) and
# min-slaves-max-lag is set to 10.
################################## SECURITY ###################################
# Require clients to issue AUTH <PASSWORD> before processing any other
# commands. This might be useful in environments in which you do not trust
# others with access to the host running redis-server.
#
# This should stay commented out for backward compatibility and because most
# people do not need auth (e.g. they run their own servers).
#
# Warning: since Redis is pretty fast an outside user can try up to
# 150k passwords per second against a good box. This means that you should
# use a very strong password otherwise it will be very easy to break.
#
# requirepass foobared
# Command renaming.
#
# It is possible to change the name of dangerous commands in a shared
# environment. For instance the CONFIG command may be renamed into something
# hard to guess so that it will still be available for internal-use tools
# but not available for general clients.
#
# Example:
#
# rename-command CONFIG b840fc02d524045429941cc15f59e41cb7be6c52
#
# It is also possible to completely kill a command by renaming it into
# an empty string:
#
# rename-command CONFIG ""
#
# Please note that changing the name of commands that are logged into the
# AOF file or transmitted to slaves may cause problems.
################################### LIMITS ####################################
# Set the max number of connected clients at the same time. By default
# this limit is set to 10000 clients, however if the Redis server is not
# able to configure the process file limit to allow for the specified limit
# the max number of allowed clients is set to the current file limit
# minus 32 (as Redis reserves a few file descriptors for internal uses).
#
# Once the limit is reached Redis will close all the new connections sending
# an error 'max number of clients reached'.
#
# maxclients 10000
# Don't use more memory than the specified amount of bytes.
# When the memory limit is reached Redis will try to remove keys
# accordingly to the eviction policy selected (see maxmemmory-policy).
#
# If Redis can't remove keys according to the policy, or if the policy is
# set to 'noeviction', Redis will start to reply with errors to commands
# that would use more memory, like SET, LPUSH, and so on, and will continue
# to reply to read-only commands like GET.
#
# This option is usually useful when using Redis as an LRU cache, or to set
# an hard memory limit for an instance (using the 'noeviction' policy).
#
# WARNING: If you have slaves attached to an instance with maxmemory on,
# the size of the output buffers needed to feed the slaves are subtracted
# from the used memory count, so that network problems / resyncs will
# not trigger a loop where keys are evicted, and in turn the output
# buffer of slaves is full with DELs of keys evicted triggering the deletion
# of more keys, and so forth until the database is completely emptied.
#
# In short... if you have slaves attached it is suggested that you set a lower
# limit for maxmemory so that there is some free RAM on the system for slave
# output buffers (but this is not needed if the policy is 'noeviction').
#
# maxmemory <bytes>
# MAXMEMORY POLICY: how Redis will select what to remove when maxmemory
# is reached. You can select among five behaviors:
#
# volatile-lru -> remove the key with an expire set using an LRU algorithm
# allkeys-lru -> remove any key accordingly to the LRU algorithm
# volatile-random -> remove a random key with an expire set
# allkeys-random -> remove a random key, any key
# volatile-ttl -> remove the key with the nearest expire time (minor TTL)
# noeviction -> don't expire at all, just return an error on write operations
#
# Note: with any of the above policies, Redis will return an error on write
# operations, when there are not suitable keys for eviction.
#
# At the date of writing this commands are: set setnx setex append
# incr decr rpush lpush rpushx lpushx linsert lset rpoplpush sadd
# sinter sinterstore sunion sunionstore sdiff sdiffstore zadd zincrby
# zunionstore zinterstore hset hsetnx hmset hincrby incrby decrby
# getset mset msetnx exec sort
#
# The default is:
#
# maxmemory-policy volatile-lru
# LRU and minimal TTL algorithms are not precise algorithms but approximated
# algorithms (in order to save memory), so you can select as well the sample
# size to check. For instance for default Redis will check three keys and
# pick the one that was used less recently, you can change the sample size
# using the following configuration directive.
#
# maxmemory-samples 3
############################## APPEND ONLY MODE ###############################
# By default Redis asynchronously dumps the dataset on disk. This mode is
# good enough in many applications, but an issue with the Redis process or
# a power outage may result into a few minutes of writes lost (depending on
# the configured save points).
#
# The Append Only File is an alternative persistence mode that provides
# much better durability. For instance using the default data fsync policy
# (see later in the config file) Redis can lose just one second of writes in a
# dramatic event like a server power outage, or a single write if something
# wrong with the Redis process itself happens, but the operating system is
# still running correctly.
#
# AOF and RDB persistence can be enabled at the same time without problems.
# If the AOF is enabled on startup Redis will load the AOF, that is the file
# with the better durability guarantees.
#
# Please check http://redis.io/topics/persistence for more information.
appendonly no
# The name of the append only file (default: "appendonly.aof")
# appendfilename appendonly.aof
# The fsync() call tells the Operating System to actually write data on disk
# instead to wait for more data in the output buffer. Some OS will really flush
# data on disk, some other OS will just try to do it ASAP.
#
# Redis supports three different modes:
#
# no: don't fsync, just let the OS flush the data when it wants. Faster.
# always: fsync after every write to the append only log . Slow, Safest.
# everysec: fsync only one time every second. Compromise.
#
# The default is "everysec", as that's usually the right compromise between
# speed and data safety. It's up to you to understand if you can relax this to
# "no" that will let the operating system flush the output buffer when
# it wants, for better performances (but if you can live with the idea of
# some data loss consider the default persistence mode that's snapshotting),
# or on the contrary, use "always" that's very slow but a bit safer than
# everysec.
#
# More details please check the following article:
# http://antirez.com/post/redis-persistence-demystified.html
#
# If unsure, use "everysec".
# appendfsync always
appendfsync everysec
# appendfsync no
# When the AOF fsync policy is set to always or everysec, and a background
# saving process (a background save or AOF log background rewriting) is
# performing a lot of I/O against the disk, in some Linux configurations
# Redis may block too long on the fsync() call. Note that there is no fix for
# this currently, as even performing fsync in a different thread will block
# our synchronous write(2) call.
#
# In order to mitigate this problem it's possible to use the following option
# that will prevent fsync() from being called in the main process while a
# BGSAVE or BGREWRITEAOF is in progress.
#
# This means that while another child is saving, the durability of Redis is
# the same as "appendfsync none". In practical terms, this means that it is
# possible to lose up to 30 seconds of log in the worst scenario (with the
# default Linux settings).
#
# If you have latency problems turn this to "yes". Otherwise leave it as
# "no" that is the safest pick from the point of view of durability.
no-appendfsync-on-rewrite no
# Automatic rewrite of the append only file.
# Redis is able to automatically rewrite the log file implicitly calling
# BGREWRITEAOF when the AOF log size grows by the specified percentage.
#
# This is how it works: Redis remembers the size of the AOF file after the
# latest rewrite (if no rewrite has happened since the restart, the size of
# the AOF at startup is used).
#
# This base size is compared to the current size. If the current size is
# bigger than the specified percentage, the rewrite is triggered. Also
# you need to specify a minimal size for the AOF file to be rewritten, this
# is useful to avoid rewriting the AOF file even if the percentage increase
# is reached but it is still pretty small.
#
# Specify a percentage of zero in order to disable the automatic AOF
# rewrite feature.
auto-aof-rewrite-percentage 100
auto-aof-rewrite-min-size 64mb
################################ LUA SCRIPTING ###############################
# Max execution time of a Lua script in milliseconds.
#
# If the maximum execution time is reached Redis will log that a script is
# still in execution after the maximum allowed time and will start to
# reply to queries with an error.
#
# When a long running script exceed the maximum execution time only the
# SCRIPT KILL and SHUTDOWN NOSAVE commands are available. The first can be
# used to stop a script that did not yet called write commands. The second
# is the only way to shut down the server in the case a write commands was
# already issue by the script but the user don't want to wait for the natural
# termination of the script.
#
# Set it to 0 or a negative value for unlimited execution without warnings.
lua-time-limit 5000
################################## SLOW LOG ###################################
# The Redis Slow Log is a system to log queries that exceeded a specified
# execution time. The execution time does not include the I/O operations
# like talking with the client, sending the reply and so forth,
# but just the time needed to actually execute the command (this is the only
# stage of command execution where the thread is blocked and can not serve
# other requests in the meantime).
#
# You can configure the slow log with two parameters: one tells Redis
# what is the execution time, in microseconds, to exceed in order for the
# command to get logged, and the other parameter is the length of the
# slow log. When a new command is logged the oldest one is removed from the
# queue of logged commands.
# The following time is expressed in microseconds, so 1000000 is equivalent
# to one second. Note that a negative number disables the slow log, while
# a value of zero forces the logging of every command.
slowlog-log-slower-than 10000
# There is no limit to this length. Just be aware that it will consume memory.
# You can reclaim memory used by the slow log with SLOWLOG RESET.
slowlog-max-len 128
############################# Event notification ##############################
# Redis can notify Pub/Sub clients about events happening in the key space.
# This feature is documented at http://redis.io/topics/keyspace-events
#
# For instance if keyspace events notification is enabled, and a client
# performs a DEL operation on key "foo" stored in the Database 0, two
# messages will be published via Pub/Sub:
#
# PUBLISH __keyspace@0__:foo del
# PUBLISH __keyevent@0__:del foo
#
# It is possible to select the events that Redis will notify among a set
# of classes. Every class is identified by a single character:
#
# K Keyspace events, published with __keyspace@<db>__ prefix.
# E Keyevent events, published with __keyevent@<db>__ prefix.
# g Generic commands (non-type specific) like DEL, EXPIRE, RENAME, ...
# $ String commands
# l List commands
# s Set commands
# h Hash commands
# z Sorted set commands
# x Expired events (events generated every time a key expires)
# e Evicted events (events generated when a key is evicted for maxmemory)
# A Alias for g$lshzxe, so that the "AKE" string means all the events.
#
# The "notify-keyspace-events" takes as argument a string that is composed
# by zero or multiple characters. The empty string means that notifications
# are disabled at all.
#
# Example: to enable list and generic events, from the point of view of the
# event name, use:
#
# notify-keyspace-events Elg
#
# Example 2: to get the stream of the expired keys subscribing to channel
# name __keyevent@0__:expired use:
#
# notify-keyspace-events Ex
#
# By default all notifications are disabled because most users don't need
# this feature and the feature has some overhead. Note that if you don't
# specify at least one of K or E, no events will be delivered.
notify-keyspace-events ""
############################### ADVANCED CONFIG ###############################
# Hashes are encoded using a memory efficient data structure when they have a
# small number of entries, and the biggest entry does not exceed a given
# threshold. These thresholds can be configured using the following directives.
hash-max-ziplist-entries 512
hash-max-ziplist-value 64
# Similarly to hashes, small lists are also encoded in a special way in order
# to save a lot of space. The special representation is only used when
# you are under the following limits:
list-max-ziplist-entries 512
list-max-ziplist-value 64
# Sets have a special encoding in just one case: when a set is composed
# of just strings that happens to be integers in radix 10 in the range
# of 64 bit signed integers.
# The following configuration setting sets the limit in the size of the
# set in order to use this special memory saving encoding.
set-max-intset-entries 512
# Similarly to hashes and lists, sorted sets are also specially encoded in
# order to save a lot of space. This encoding is only used when the length and
# elements of a sorted set are below the following limits:
zset-max-ziplist-entries 128
zset-max-ziplist-value 64
# Active rehashing uses 1 millisecond every 100 milliseconds of CPU time in
# order to help rehashing the main Redis hash table (the one mapping top-level
# keys to values). The hash table implementation Redis uses (see dict.c)
# performs a lazy rehashing: the more operation you run into an hash table
# that is rehashing, the more rehashing "steps" are performed, so if the
# server is idle the rehashing is never complete and some more memory is used
# by the hash table.
#
# The default is to use this millisecond 10 times every second in order to
# active rehashing the main dictionaries, freeing memory when possible.
#
# If unsure:
# use "activerehashing no" if you have hard latency requirements and it is
# not a good thing in your environment that Redis can reply form time to time
# to queries with 2 milliseconds delay.
#
# use "activerehashing yes" if you don't have such hard requirements but
# want to free memory asap when possible.
activerehashing yes
# The client output buffer limits can be used to force disconnection of clients
# that are not reading data from the server fast enough for some reason (a
# common reason is that a Pub/Sub client can't consume messages as fast as the
# publisher can produce them).
#
# The limit can be set differently for the three different classes of clients:
#
# normal -> normal clients
# slave -> slave clients and MONITOR clients
# pubsub -> clients subcribed to at least one pubsub channel or pattern
#
# The syntax of every client-output-buffer-limit directive is the following:
#
# client-output-buffer-limit <class> <hard limit> <soft limit> <soft seconds>
#
# A client is immediately disconnected once the hard limit is reached, or if
# the soft limit is reached and remains reached for the specified number of
# seconds (continuously).
# So for instance if the hard limit is 32 megabytes and the soft limit is
# 16 megabytes / 10 seconds, the client will get disconnected immediately
# if the size of the output buffers reach 32 megabytes, but will also get
# disconnected if the client reaches 16 megabytes and continuously overcomes
# the limit for 10 seconds.
#
# By default normal clients are not limited because they don't receive data
# without asking (in a push way), but just after a request, so only
# asynchronous clients may create a scenario where data is requested faster
# than it can read.
#
# Instead there is a default limit for pubsub and slave clients, since
# subscribers and slaves receive data in a push fashion.
#
# Both the hard or the soft limit can be disabled by setting them to zero.
client-output-buffer-limit normal 0 0 0
client-output-buffer-limit slave 256mb 64mb 60
client-output-buffer-limit pubsub 32mb 8mb 60
# Redis calls an internal function to perform many background tasks, like
# closing connections of clients in timeot, purging expired keys that are
# never requested, and so forth.
#
# Not all tasks are performed with the same frequency, but Redis checks for
# tasks to perform accordingly to the specified "hz" value.
#
# By default "hz" is set to 10. Raising the value will use more CPU when
# Redis is idle, but at the same time will make Redis more responsive when
# there are many keys expiring at the same time, and timeouts may be
# handled with more precision.
#
# The range is between 1 and 500, however a value over 100 is usually not
# a good idea. Most users should use the default of 10 and raise this up to
# 100 only in environments where very low latency is required.
hz 10
# When a child rewrites the AOF file, if the following option is enabled
# the file will be fsync-ed every 32 MB of data generated. This is useful
# in order to commit the file to the disk more incrementally and avoid
# big latency spikes.
aof-rewrite-incremental-fsync yes
################################## INCLUDES ###################################
# Include one or more other config files here. This is useful if you
# have a standard template that goes to all Redis server but also need
# to customize a few per-server settings. Include files can include
# other files, so use this wisely.
#
# include /path/to/local.conf
# include /path/to/other.conf

View File

@@ -0,0 +1,2 @@
#!/bin/sh
exec /go/bin/gddo-server 2>&1

2
vendor/github.com/golang/gddo/deploy/services/nginx/run generated vendored Executable file
View File

@@ -0,0 +1,2 @@
#!/bin/sh
exec /usr/sbin/nginx 2>&1

2
vendor/github.com/golang/gddo/deploy/services/redis/run generated vendored Executable file
View File

@@ -0,0 +1,2 @@
#!/bin/sh
exec /usr/bin/redis-server 2>&1

656
vendor/github.com/golang/gddo/doc/builder.go generated vendored Normal file
View File

@@ -0,0 +1,656 @@
// Copyright 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
package doc
import (
"bytes"
"errors"
"go/ast"
"go/build"
"go/doc"
"go/format"
"go/parser"
"go/token"
"regexp"
"sort"
"strings"
"time"
"unicode"
"unicode/utf8"
"github.com/golang/gddo/gosrc"
)
func startsWithUppercase(s string) bool {
r, _ := utf8.DecodeRuneInString(s)
return unicode.IsUpper(r)
}
var badSynopsisPrefixes = []string{
"Autogenerated by Thrift Compiler",
"Automatically generated ",
"Auto-generated by ",
"Copyright ",
"COPYRIGHT ",
`THE SOFTWARE IS PROVIDED "AS IS"`,
"TODO: ",
"vim:",
}
// synopsis extracts the first sentence from s. All runs of whitespace are
// replaced by a single space.
func synopsis(s string) string {
parts := strings.SplitN(s, "\n\n", 2)
s = parts[0]
var buf []byte
const (
other = iota
period
space
)
last := space
Loop:
for i := 0; i < len(s); i++ {
b := s[i]
switch b {
case ' ', '\t', '\r', '\n':
switch last {
case period:
break Loop
case other:
buf = append(buf, ' ')
last = space
}
case '.':
last = period
buf = append(buf, b)
default:
last = other
buf = append(buf, b)
}
}
// Ensure that synopsis fits an App Engine datastore text property.
const m = 400
if len(buf) > m {
buf = buf[:m]
if i := bytes.LastIndex(buf, []byte{' '}); i >= 0 {
buf = buf[:i]
}
buf = append(buf, " ..."...)
}
s = string(buf)
r, n := utf8.DecodeRuneInString(s)
if n < 0 || unicode.IsPunct(r) || unicode.IsSymbol(r) {
// ignore Markdown headings, editor settings, Go build constraints, and * in poorly formatted block comments.
s = ""
} else {
for _, prefix := range badSynopsisPrefixes {
if strings.HasPrefix(s, prefix) {
s = ""
break
}
}
}
return s
}
var referencesPats = []*regexp.Regexp{
regexp.MustCompile(`"([-a-zA-Z0-9~+_./]+)"`), // quoted path
regexp.MustCompile(`https://drone\.io/([-a-zA-Z0-9~+_./]+)/status\.png`),
regexp.MustCompile(`\b(?:` + strings.Join([]string{
`go\s+get\s+`,
`goinstall\s+`,
regexp.QuoteMeta("http://godoc.org/"),
regexp.QuoteMeta("http://gopkgdoc.appspot.com/pkg/"),
regexp.QuoteMeta("http://go.pkgdoc.org/"),
regexp.QuoteMeta("http://gowalker.org/"),
}, "|") + `)([-a-zA-Z0-9~+_./]+)`),
}
// addReferences adds packages referenced in plain text s.
func addReferences(references map[string]bool, s []byte) {
for _, pat := range referencesPats {
for _, m := range pat.FindAllSubmatch(s, -1) {
p := string(m[1])
if gosrc.IsValidRemotePath(p) {
references[p] = true
}
}
}
}
type byFuncName []*doc.Func
func (s byFuncName) Len() int { return len(s) }
func (s byFuncName) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
func (s byFuncName) Less(i, j int) bool { return s[i].Name < s[j].Name }
func removeAssociations(dpkg *doc.Package) {
for _, t := range dpkg.Types {
dpkg.Funcs = append(dpkg.Funcs, t.Funcs...)
t.Funcs = nil
}
sort.Sort(byFuncName(dpkg.Funcs))
}
// builder holds the state used when building the documentation.
type builder struct {
srcs map[string]*source
fset *token.FileSet
examples []*doc.Example
buf []byte // scratch space for printNode method.
}
type Value struct {
Decl Code
Pos Pos
Doc string
}
func (b *builder) values(vdocs []*doc.Value) []*Value {
var result []*Value
for _, d := range vdocs {
result = append(result, &Value{
Decl: b.printDecl(d.Decl),
Pos: b.position(d.Decl),
Doc: d.Doc,
})
}
return result
}
type Note struct {
Pos Pos
UID string
Body string
}
type posNode token.Pos
func (p posNode) Pos() token.Pos { return token.Pos(p) }
func (p posNode) End() token.Pos { return token.Pos(p) }
func (b *builder) notes(gnotes map[string][]*doc.Note) map[string][]*Note {
if len(gnotes) == 0 {
return nil
}
notes := make(map[string][]*Note)
for tag, gvalues := range gnotes {
values := make([]*Note, len(gvalues))
for i := range gvalues {
values[i] = &Note{
Pos: b.position(posNode(gvalues[i].Pos)),
UID: gvalues[i].UID,
Body: strings.TrimSpace(gvalues[i].Body),
}
}
notes[tag] = values
}
return notes
}
type Example struct {
Name string
Doc string
Code Code
Play string
Output string
}
var exampleOutputRx = regexp.MustCompile(`(?i)//[[:space:]]*output:`)
func (b *builder) getExamples(name string) []*Example {
var docs []*Example
for _, e := range b.examples {
if !strings.HasPrefix(e.Name, name) {
continue
}
n := e.Name[len(name):]
if n != "" {
if i := strings.LastIndex(n, "_"); i != 0 {
continue
}
n = n[1:]
if startsWithUppercase(n) {
continue
}
n = strings.Title(n)
}
code, output := b.printExample(e)
play := ""
if e.Play != nil {
b.buf = b.buf[:0]
if err := format.Node(sliceWriter{&b.buf}, b.fset, e.Play); err != nil {
play = err.Error()
} else {
play = string(b.buf)
}
}
docs = append(docs, &Example{
Name: n,
Doc: e.Doc,
Code: code,
Output: output,
Play: play})
}
return docs
}
type Func struct {
Decl Code
Pos Pos
Doc string
Name string
Recv string // Actual receiver "T" or "*T".
Orig string // Original receiver "T" or "*T". This can be different from Recv due to embedding.
Examples []*Example
}
func (b *builder) funcs(fdocs []*doc.Func) []*Func {
var result []*Func
for _, d := range fdocs {
var exampleName string
switch {
case d.Recv == "":
exampleName = d.Name
case d.Recv[0] == '*':
exampleName = d.Recv[1:] + "_" + d.Name
default:
exampleName = d.Recv + "_" + d.Name
}
result = append(result, &Func{
Decl: b.printDecl(d.Decl),
Pos: b.position(d.Decl),
Doc: d.Doc,
Name: d.Name,
Recv: d.Recv,
Orig: d.Orig,
Examples: b.getExamples(exampleName),
})
}
return result
}
type Type struct {
Doc string
Name string
Decl Code
Pos Pos
Consts []*Value
Vars []*Value
Funcs []*Func
Methods []*Func
Examples []*Example
}
func (b *builder) types(tdocs []*doc.Type) []*Type {
var result []*Type
for _, d := range tdocs {
result = append(result, &Type{
Doc: d.Doc,
Name: d.Name,
Decl: b.printDecl(d.Decl),
Pos: b.position(d.Decl),
Consts: b.values(d.Consts),
Vars: b.values(d.Vars),
Funcs: b.funcs(d.Funcs),
Methods: b.funcs(d.Methods),
Examples: b.getExamples(d.Name),
})
}
return result
}
var packageNamePats = []*regexp.Regexp{
// Last element with .suffix removed.
regexp.MustCompile(`/([^-./]+)[-.](?:git|svn|hg|bzr|v\d+)$`),
// Last element with "go" prefix or suffix removed.
regexp.MustCompile(`/([^-./]+)[-.]go$`),
regexp.MustCompile(`/go[-.]([^-./]+)$`),
// Special cases for popular repos.
regexp.MustCompile(`^code\.google\.com/p/google-api-go-client/([^/]+)/v[^/]+$`),
regexp.MustCompile(`^code\.google\.com/p/biogo\.([^/]+)$`),
// It's also common for the last element of the path to contain an
// extra "go" prefix, but not always. TODO: examine unresolved ids to
// detect when trimming the "go" prefix is appropriate.
// Last component of path.
regexp.MustCompile(`([^/]+)$`),
}
func simpleImporter(imports map[string]*ast.Object, path string) (*ast.Object, error) {
pkg := imports[path]
if pkg != nil {
return pkg, nil
}
// Guess the package name without importing it.
for _, pat := range packageNamePats {
m := pat.FindStringSubmatch(path)
if m != nil {
pkg = ast.NewObj(ast.Pkg, m[1])
pkg.Data = ast.NewScope(nil)
imports[path] = pkg
return pkg, nil
}
}
return nil, errors.New("package not found")
}
type File struct {
Name string
URL string
}
type Pos struct {
Line int32 // 0 if not valid.
N uint16 // number of lines - 1
File int16 // index in Package.Files
}
type source struct {
name string
browseURL string
data []byte
index int
}
// PackageVersion is modified when previously stored packages are invalid.
const PackageVersion = "8"
type Package struct {
// The import path for this package.
ImportPath string
// Import path prefix for all packages in the project.
ProjectRoot string
// Name of the project.
ProjectName string
// Project home page.
ProjectURL string
// Errors found when fetching or parsing this package.
Errors []string
// Packages referenced in README files.
References []string
// Version control system: git, hg, bzr, ...
VCS string
// Version control: active or suppressed.
Status gosrc.DirectoryStatus
// Whether the package is a fork of another one.
Fork bool
// How many stars (for a GitHub project) or followers (for a BitBucket
// project) the repository of this package has.
Stars int
// The time this object was created.
Updated time.Time
// Cache validation tag. This tag is not necessarily an HTTP entity tag.
// The tag is "" if there is no meaningful cache validation for the VCS.
Etag string
// Subdirectories, possibly containing Go code.
Subdirectories []string
// Package name or "" if no package for this import path. The proceeding
// fields are set even if a package is not found for the import path.
Name string
// Synopsis and full documentation for the package.
Synopsis string
Doc string
// Format this package as a command.
IsCmd bool
// True if package documentation is incomplete.
Truncated bool
// Environment
GOOS, GOARCH string
// Top-level declarations.
Consts []*Value
Funcs []*Func
Types []*Type
Vars []*Value
// Package examples
Examples []*Example
Notes map[string][]*Note
// Source.
LineFmt string
BrowseURL string
Files []*File
TestFiles []*File
// Source size in bytes.
SourceSize int
TestSourceSize int
// Imports
Imports []string
TestImports []string
XTestImports []string
}
var goEnvs = []struct{ GOOS, GOARCH string }{
{"linux", "amd64"},
{"darwin", "amd64"},
{"windows", "amd64"},
{"linux", "js"},
}
// SetDefaultGOOS sets given GOOS value as default one to use when building
// package documents. SetDefaultGOOS has no effect on some windows-only
// packages.
func SetDefaultGOOS(goos string) {
if goos == "" {
return
}
var i int
for ; i < len(goEnvs); i++ {
if goEnvs[i].GOOS == goos {
break
}
}
switch i {
case 0:
return
case len(goEnvs):
env := goEnvs[0]
env.GOOS = goos
goEnvs = append(goEnvs, env)
}
goEnvs[0], goEnvs[i] = goEnvs[i], goEnvs[0]
}
var windowsOnlyPackages = map[string]bool{
"internal/syscall/windows": true,
"internal/syscall/windows/registry": true,
"golang.org/x/exp/shiny/driver/internal/win32": true,
"golang.org/x/exp/shiny/driver/windriver": true,
"golang.org/x/sys/windows": true,
"golang.org/x/sys/windows/registry": true,
}
func newPackage(dir *gosrc.Directory) (*Package, error) {
pkg := &Package{
Updated: time.Now().UTC(),
LineFmt: dir.LineFmt,
ImportPath: dir.ImportPath,
ProjectRoot: dir.ProjectRoot,
ProjectName: dir.ProjectName,
ProjectURL: dir.ProjectURL,
BrowseURL: dir.BrowseURL,
Etag: PackageVersion + "-" + dir.Etag,
VCS: dir.VCS,
Status: dir.Status,
Subdirectories: dir.Subdirectories,
Fork: dir.Fork,
Stars: dir.Stars,
}
var b builder
b.srcs = make(map[string]*source)
references := make(map[string]bool)
for _, file := range dir.Files {
if strings.HasSuffix(file.Name, ".go") {
gosrc.OverwriteLineComments(file.Data)
b.srcs[file.Name] = &source{name: file.Name, browseURL: file.BrowseURL, data: file.Data}
} else {
addReferences(references, file.Data)
}
}
for r := range references {
pkg.References = append(pkg.References, r)
}
if len(b.srcs) == 0 {
return pkg, nil
}
b.fset = token.NewFileSet()
// Find the package and associated files.
ctxt := build.Context{
GOOS: "linux",
GOARCH: "amd64",
CgoEnabled: true,
ReleaseTags: build.Default.ReleaseTags,
BuildTags: build.Default.BuildTags,
Compiler: "gc",
}
var err error
var bpkg *build.Package
for _, env := range goEnvs {
// Some packages should be always displayed as GOOS=windows (see issue #16509 for details).
// TODO: remove this once issue #16509 is resolved.
if windowsOnlyPackages[dir.ImportPath] && env.GOOS != "windows" {
continue
}
ctxt.GOOS = env.GOOS
ctxt.GOARCH = env.GOARCH
bpkg, err = dir.Import(&ctxt, build.ImportComment)
if _, ok := err.(*build.NoGoError); !ok {
break
}
}
if err != nil {
if _, ok := err.(*build.NoGoError); !ok {
pkg.Errors = append(pkg.Errors, err.Error())
}
return pkg, nil
}
if bpkg.ImportComment != "" && bpkg.ImportComment != dir.ImportPath {
return nil, gosrc.NotFoundError{
Message: "not at canonical import path",
Redirect: bpkg.ImportComment,
}
}
// Parse the Go files
files := make(map[string]*ast.File)
names := append(bpkg.GoFiles, bpkg.CgoFiles...)
sort.Strings(names)
pkg.Files = make([]*File, len(names))
for i, name := range names {
file, err := parser.ParseFile(b.fset, name, b.srcs[name].data, parser.ParseComments)
if err != nil {
pkg.Errors = append(pkg.Errors, err.Error())
} else {
files[name] = file
}
src := b.srcs[name]
src.index = i
pkg.Files[i] = &File{Name: name, URL: src.browseURL}
pkg.SourceSize += len(src.data)
}
apkg, _ := ast.NewPackage(b.fset, files, simpleImporter, nil)
// Find examples in the test files.
names = append(bpkg.TestGoFiles, bpkg.XTestGoFiles...)
sort.Strings(names)
pkg.TestFiles = make([]*File, len(names))
for i, name := range names {
file, err := parser.ParseFile(b.fset, name, b.srcs[name].data, parser.ParseComments)
if err != nil {
pkg.Errors = append(pkg.Errors, err.Error())
} else {
b.examples = append(b.examples, doc.Examples(file)...)
}
pkg.TestFiles[i] = &File{Name: name, URL: b.srcs[name].browseURL}
pkg.TestSourceSize += len(b.srcs[name].data)
}
b.vetPackage(pkg, apkg)
mode := doc.Mode(0)
if pkg.ImportPath == "builtin" {
mode |= doc.AllDecls
}
dpkg := doc.New(apkg, pkg.ImportPath, mode)
if pkg.ImportPath == "builtin" {
removeAssociations(dpkg)
}
pkg.Name = dpkg.Name
pkg.Doc = strings.TrimRight(dpkg.Doc, " \t\n\r")
pkg.Synopsis = synopsis(pkg.Doc)
pkg.Examples = b.getExamples("")
pkg.IsCmd = bpkg.IsCommand()
pkg.GOOS = ctxt.GOOS
pkg.GOARCH = ctxt.GOARCH
pkg.Consts = b.values(dpkg.Consts)
pkg.Funcs = b.funcs(dpkg.Funcs)
pkg.Types = b.types(dpkg.Types)
pkg.Vars = b.values(dpkg.Vars)
pkg.Notes = b.notes(dpkg.Notes)
pkg.Imports = bpkg.Imports
pkg.TestImports = bpkg.TestImports
pkg.XTestImports = bpkg.XTestImports
return pkg, nil
}

114
vendor/github.com/golang/gddo/doc/builder_test.go generated vendored Normal file
View File

@@ -0,0 +1,114 @@
// Copyright 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
package doc
import (
"go/ast"
"testing"
)
var badSynopsis = []string{
"+build !release",
"COPYRIGHT Jimmy Bob",
"### Markdown heading",
"-*- indent-tabs-mode: nil -*-",
"vim:set ts=2 sw=2 et ai ft=go:",
}
func TestBadSynopsis(t *testing.T) {
for _, s := range badSynopsis {
if synopsis(s) != "" {
t.Errorf(`synopsis(%q) did not return ""`, s)
}
}
}
const readme = `
$ go get github.com/user/repo/pkg1
[foo](http://gopkgdoc.appspot.com/pkg/github.com/user/repo/pkg2)
[foo](http://go.pkgdoc.org/github.com/user/repo/pkg3)
[foo](http://godoc.org/github.com/user/repo/pkg4)
<http://go.pkgdoc.org/github.com/user/repo/pkg5>
[foo](http://godoc.org/github.com/user/repo/pkg6#Export)
http://gowalker.org/github.com/user/repo/pkg7
Build Status: [![Build Status](https://drone.io/github.com/user/repo1/status.png)](https://drone.io/github.com/user/repo1/latest)
'go get example.org/package1' will install package1.
(http://go.pkgdoc.org/example.org/package2 "Package2's documentation on GoPkgDoc").
import "example.org/package3"
`
var expectedReferences = []string{
"github.com/user/repo/pkg1",
"github.com/user/repo/pkg2",
"github.com/user/repo/pkg3",
"github.com/user/repo/pkg4",
"github.com/user/repo/pkg5",
"github.com/user/repo/pkg6",
"github.com/user/repo/pkg7",
"github.com/user/repo1",
"example.org/package1",
"example.org/package2",
"example.org/package3",
}
func TestReferences(t *testing.T) {
references := make(map[string]bool)
addReferences(references, []byte(readme))
for _, r := range expectedReferences {
if !references[r] {
t.Errorf("missing %s", r)
}
delete(references, r)
}
for r := range references {
t.Errorf("extra %s", r)
}
}
var simpleImporterTests = []struct {
path string
name string
}{
// Last element with .suffix removed.
{"example.com/user/name.git", "name"},
{"example.com/user/name.svn", "name"},
{"example.com/user/name.hg", "name"},
{"example.com/user/name.bzr", "name"},
{"example.com/name.v0", "name"},
{"example.com/user/repo/name.v11", "name"},
// Last element with "go" prefix or suffix removed.
{"github.com/user/go-name", "name"},
{"github.com/user/go.name", "name"},
{"github.com/user/name.go", "name"},
{"github.com/user/name-go", "name"},
// Special cases for popular repos.
{"code.google.com/p/biogo.name", "name"},
{"code.google.com/p/google-api-go-client/name/v3", "name"},
// Use last element of path.
{"example.com/user/name.other", "name.other"},
{"example.com/.v0", ".v0"},
{"example.com/user/repo.v2/name", "name"},
{"github.com/user/namev0", "namev0"},
{"github.com/user/goname", "goname"},
{"github.com/user/namego", "namego"},
{"github.com/user/name", "name"},
{"name", "name"},
{"user/name", "name"},
}
func TestSimpleImporter(t *testing.T) {
for _, tt := range simpleImporterTests {
m := make(map[string]*ast.Object)
obj, _ := simpleImporter(m, tt.path)
if obj.Name != tt.name {
t.Errorf("simpleImporter(%q) = %q, want %q", tt.path, obj.Name, tt.name)
}
}
}

359
vendor/github.com/golang/gddo/doc/code.go generated vendored Normal file
View File

@@ -0,0 +1,359 @@
// Copyright 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
package doc
import (
"bytes"
"fmt"
"go/ast"
"go/doc"
"go/printer"
"go/scanner"
"go/token"
"math"
"strconv"
)
const (
notPredeclared = iota
predeclaredType
predeclaredConstant
predeclaredFunction
)
// predeclared represents the set of all predeclared identifiers.
var predeclared = map[string]int{
"bool": predeclaredType,
"byte": predeclaredType,
"complex128": predeclaredType,
"complex64": predeclaredType,
"error": predeclaredType,
"float32": predeclaredType,
"float64": predeclaredType,
"int16": predeclaredType,
"int32": predeclaredType,
"int64": predeclaredType,
"int8": predeclaredType,
"int": predeclaredType,
"rune": predeclaredType,
"string": predeclaredType,
"uint16": predeclaredType,
"uint32": predeclaredType,
"uint64": predeclaredType,
"uint8": predeclaredType,
"uint": predeclaredType,
"uintptr": predeclaredType,
"true": predeclaredConstant,
"false": predeclaredConstant,
"iota": predeclaredConstant,
"nil": predeclaredConstant,
"append": predeclaredFunction,
"cap": predeclaredFunction,
"close": predeclaredFunction,
"complex": predeclaredFunction,
"copy": predeclaredFunction,
"delete": predeclaredFunction,
"imag": predeclaredFunction,
"len": predeclaredFunction,
"make": predeclaredFunction,
"new": predeclaredFunction,
"panic": predeclaredFunction,
"print": predeclaredFunction,
"println": predeclaredFunction,
"real": predeclaredFunction,
"recover": predeclaredFunction,
}
type AnnotationKind int16
const (
// Link to export in package specified by Paths[PathIndex] with fragment
// Text[strings.LastIndex(Text[Pos:End], ".")+1:End].
LinkAnnotation AnnotationKind = iota
// Anchor with name specified by Text[Pos:End] or typeName + "." +
// Text[Pos:End] for type declarations.
AnchorAnnotation
// Comment.
CommentAnnotation
// Link to package specified by Paths[PathIndex].
PackageLinkAnnotation
// Link to builtin entity with name Text[Pos:End].
BuiltinAnnotation
)
type Annotation struct {
Pos, End int32
Kind AnnotationKind
PathIndex int16
}
type Code struct {
Text string
Annotations []Annotation
Paths []string
}
// declVisitor modifies a declaration AST for printing and collects annotations.
type declVisitor struct {
annotations []Annotation
paths []string
pathIndex map[string]int
comments []*ast.CommentGroup
}
func (v *declVisitor) add(kind AnnotationKind, importPath string) {
pathIndex := -1
if importPath != "" {
var ok bool
pathIndex, ok = v.pathIndex[importPath]
if !ok {
pathIndex = len(v.paths)
v.paths = append(v.paths, importPath)
v.pathIndex[importPath] = pathIndex
}
}
v.annotations = append(v.annotations, Annotation{Kind: kind, PathIndex: int16(pathIndex)})
}
func (v *declVisitor) ignoreName() {
v.add(-1, "")
}
func (v *declVisitor) Visit(n ast.Node) ast.Visitor {
switch n := n.(type) {
case *ast.TypeSpec:
v.ignoreName()
switch n := n.Type.(type) {
case *ast.InterfaceType:
for _, f := range n.Methods.List {
for _ = range f.Names {
v.add(AnchorAnnotation, "")
}
ast.Walk(v, f.Type)
}
case *ast.StructType:
for _, f := range n.Fields.List {
for _ = range f.Names {
v.add(AnchorAnnotation, "")
}
ast.Walk(v, f.Type)
}
default:
ast.Walk(v, n)
}
case *ast.FuncDecl:
if n.Recv != nil {
ast.Walk(v, n.Recv)
}
v.ignoreName()
ast.Walk(v, n.Type)
case *ast.Field:
for _ = range n.Names {
v.ignoreName()
}
ast.Walk(v, n.Type)
case *ast.ValueSpec:
for _ = range n.Names {
v.add(AnchorAnnotation, "")
}
if n.Type != nil {
ast.Walk(v, n.Type)
}
for _, x := range n.Values {
ast.Walk(v, x)
}
case *ast.Ident:
switch {
case n.Obj == nil && predeclared[n.Name] != notPredeclared:
v.add(BuiltinAnnotation, "")
case n.Obj != nil && ast.IsExported(n.Name):
v.add(LinkAnnotation, "")
default:
v.ignoreName()
}
case *ast.SelectorExpr:
if x, _ := n.X.(*ast.Ident); x != nil {
if obj := x.Obj; obj != nil && obj.Kind == ast.Pkg {
if spec, _ := obj.Decl.(*ast.ImportSpec); spec != nil {
if path, err := strconv.Unquote(spec.Path.Value); err == nil {
v.add(PackageLinkAnnotation, path)
if path == "C" {
v.ignoreName()
} else {
v.add(LinkAnnotation, path)
}
return nil
}
}
}
}
ast.Walk(v, n.X)
v.ignoreName()
case *ast.BasicLit:
if n.Kind == token.STRING && len(n.Value) > 128 {
v.comments = append(v.comments,
&ast.CommentGroup{List: []*ast.Comment{{
Slash: n.Pos(),
Text: fmt.Sprintf("/* %d byte string literal not displayed */", len(n.Value)),
}}})
n.Value = `""`
} else {
return v
}
case *ast.CompositeLit:
if len(n.Elts) > 100 {
if n.Type != nil {
ast.Walk(v, n.Type)
}
v.comments = append(v.comments,
&ast.CommentGroup{List: []*ast.Comment{{
Slash: n.Lbrace,
Text: fmt.Sprintf("/* %d elements not displayed */", len(n.Elts)),
}}})
n.Elts = n.Elts[:0]
} else {
return v
}
default:
return v
}
return nil
}
func (b *builder) printDecl(decl ast.Decl) (d Code) {
v := &declVisitor{pathIndex: make(map[string]int)}
ast.Walk(v, decl)
b.buf = b.buf[:0]
err := (&printer.Config{Mode: printer.UseSpaces, Tabwidth: 4}).Fprint(
sliceWriter{&b.buf},
b.fset,
&printer.CommentedNode{Node: decl, Comments: v.comments})
if err != nil {
return Code{Text: err.Error()}
}
var annotations []Annotation
var s scanner.Scanner
fset := token.NewFileSet()
file := fset.AddFile("", fset.Base(), len(b.buf))
s.Init(file, b.buf, nil, scanner.ScanComments)
prevTok := token.ILLEGAL
loop:
for {
pos, tok, lit := s.Scan()
switch tok {
case token.EOF:
break loop
case token.COMMENT:
p := file.Offset(pos)
e := p + len(lit)
if prevTok == token.COMMENT {
annotations[len(annotations)-1].End = int32(e)
} else {
annotations = append(annotations, Annotation{Kind: CommentAnnotation, Pos: int32(p), End: int32(e)})
}
case token.IDENT:
if len(v.annotations) == 0 {
// Oops!
break loop
}
annotation := v.annotations[0]
v.annotations = v.annotations[1:]
if annotation.Kind == -1 {
continue
}
p := file.Offset(pos)
e := p + len(lit)
annotation.Pos = int32(p)
annotation.End = int32(e)
annotations = append(annotations, annotation)
}
prevTok = tok
}
return Code{Text: string(b.buf), Annotations: annotations, Paths: v.paths}
}
func (b *builder) position(n ast.Node) Pos {
var position Pos
pos := b.fset.Position(n.Pos())
src := b.srcs[pos.Filename]
if src != nil {
position.File = int16(src.index)
position.Line = int32(pos.Line)
end := b.fset.Position(n.End())
if src == b.srcs[end.Filename] {
n := end.Line - pos.Line
if n >= 0 && n <= math.MaxUint16 {
position.N = uint16(n)
}
}
}
return position
}
func (b *builder) printExample(e *doc.Example) (code Code, output string) {
output = e.Output
b.buf = b.buf[:0]
var n interface{}
if _, ok := e.Code.(*ast.File); ok {
n = e.Play
} else {
n = &printer.CommentedNode{Node: e.Code, Comments: e.Comments}
}
err := (&printer.Config{Mode: printer.UseSpaces, Tabwidth: 4}).Fprint(sliceWriter{&b.buf}, b.fset, n)
if err != nil {
return Code{Text: err.Error()}, output
}
// additional formatting if this is a function body
if i := len(b.buf); i >= 2 && b.buf[0] == '{' && b.buf[i-1] == '}' {
// remove surrounding braces
b.buf = b.buf[1 : i-1]
// unindent
b.buf = bytes.Replace(b.buf, []byte("\n "), []byte("\n"), -1)
// remove output comment
if j := exampleOutputRx.FindIndex(b.buf); j != nil {
b.buf = bytes.TrimSpace(b.buf[:j[0]])
}
} else {
// drop output, as the output comment will appear in the code
output = ""
}
var annotations []Annotation
var s scanner.Scanner
fset := token.NewFileSet()
file := fset.AddFile("", fset.Base(), len(b.buf))
s.Init(file, b.buf, nil, scanner.ScanComments)
prevTok := token.ILLEGAL
scanLoop:
for {
pos, tok, lit := s.Scan()
switch tok {
case token.EOF:
break scanLoop
case token.COMMENT:
p := file.Offset(pos)
e := p + len(lit)
if prevTok == token.COMMENT {
annotations[len(annotations)-1].End = int32(e)
} else {
annotations = append(annotations, Annotation{Kind: CommentAnnotation, Pos: int32(p), End: int32(e)})
}
}
prevTok = tok
}
return Code{Text: string(b.buf), Annotations: annotations}, output
}

55
vendor/github.com/golang/gddo/doc/get.go generated vendored Normal file
View File

@@ -0,0 +1,55 @@
// Copyright 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
// Package doc fetches Go package documentation from version control services.
package doc
import (
"github.com/golang/gddo/gosrc"
"go/doc"
"net/http"
"strings"
)
func Get(client *http.Client, importPath string, etag string) (*Package, error) {
const versionPrefix = PackageVersion + "-"
if strings.HasPrefix(etag, versionPrefix) {
etag = etag[len(versionPrefix):]
} else {
etag = ""
}
dir, err := gosrc.Get(client, importPath, etag)
if err != nil {
return nil, err
}
pdoc, err := newPackage(dir)
if err != nil {
return pdoc, err
}
if pdoc.Synopsis == "" &&
pdoc.Doc == "" &&
!pdoc.IsCmd &&
pdoc.Name != "" &&
dir.ImportPath == dir.ProjectRoot &&
len(pdoc.Errors) == 0 {
project, err := gosrc.GetProject(client, dir.ResolvedPath)
switch {
case err == nil:
pdoc.Synopsis = doc.Synopsis(project.Description)
case gosrc.IsNotFound(err):
// ok
default:
return nil, err
}
}
return pdoc, nil
}

69
vendor/github.com/golang/gddo/doc/goprint.go generated vendored Normal file
View File

@@ -0,0 +1,69 @@
// Copyright 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
// +build ignore
// Command astprint prints the AST for a file.
//
// Usage: go run asprint.go fname
package main
import (
"flag"
"go/ast"
"go/build"
"go/doc"
"go/parser"
"go/token"
"io/ioutil"
"log"
"path/filepath"
"strings"
"github.com/davecgh/go-spew/spew"
)
func importer(imports map[string]*ast.Object, path string) (*ast.Object, error) {
pkg := imports[path]
if pkg == nil {
name := path[strings.LastIndex(path, "/")+1:]
pkg = ast.NewObj(ast.Pkg, name)
pkg.Data = ast.NewScope(nil) // required by ast.NewPackage for dot-import
imports[path] = pkg
}
return pkg, nil
}
func main() {
flag.Parse()
if len(flag.Args()) != 1 {
log.Fatal("Usage: go run goprint.go path")
}
bpkg, err := build.Default.Import(flag.Args()[0], ".", 0)
if err != nil {
log.Fatal(err)
}
fset := token.NewFileSet()
files := make(map[string]*ast.File)
for _, fname := range bpkg.GoFiles {
p, err := ioutil.ReadFile(filepath.Join(bpkg.SrcRoot, bpkg.ImportPath, fname))
if err != nil {
log.Fatal(err)
}
file, err := parser.ParseFile(fset, fname, p, parser.ParseComments)
if err != nil {
log.Fatal(err)
}
files[fname] = file
}
c := spew.NewDefaultConfig()
c.DisableMethods = true
apkg, _ := ast.NewPackage(fset, files, importer, nil)
c.Dump(apkg)
ast.Print(fset, apkg)
dpkg := doc.New(apkg, bpkg.ImportPath, 0)
c.Dump(dpkg)
}

50
vendor/github.com/golang/gddo/doc/print.go generated vendored Normal file
View File

@@ -0,0 +1,50 @@
// Copyright 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
// +build ignore
// Command print fetches and prints package documentation.
//
// Usage: go run print.go importPath
package main
import (
"flag"
"log"
"net/http"
"os"
"github.com/davecgh/go-spew/spew"
"github.com/golang/gddo/doc"
"github.com/golang/gddo/gosrc"
)
var (
etag = flag.String("etag", "", "Etag")
local = flag.Bool("local", false, "Get package from local directory.")
)
func main() {
flag.Parse()
if len(flag.Args()) != 1 {
log.Fatal("Usage: go run print.go importPath")
}
path := flag.Args()[0]
var (
pdoc *doc.Package
err error
)
if *local {
gosrc.SetLocalDevMode(os.Getenv("GOPATH"))
}
pdoc, err = doc.Get(http.DefaultClient, path, *etag)
//}
if err != nil {
log.Fatal(err)
}
spew.Dump(pdoc)
}

14
vendor/github.com/golang/gddo/doc/util.go generated vendored Normal file
View File

@@ -0,0 +1,14 @@
// Copyright 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
package doc
type sliceWriter struct{ p *[]byte }
func (w sliceWriter) Write(p []byte) (int, error) {
*w.p = append(*w.p, p...)
return len(p), nil
}

81
vendor/github.com/golang/gddo/doc/vet.go generated vendored Normal file
View File

@@ -0,0 +1,81 @@
// Copyright 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
package doc
import (
"fmt"
"go/ast"
"go/token"
"strconv"
"strings"
"github.com/golang/gddo/gosrc"
)
// This list of deprecated exports is used to find code that has not been
// updated for Go 1.
var deprecatedExports = map[string][]string{
`"bytes"`: {"Add"},
`"crypto/aes"`: {"Cipher"},
`"crypto/hmac"`: {"NewSHA1", "NewSHA256"},
`"crypto/rand"`: {"Seed"},
`"encoding/json"`: {"MarshalForHTML"},
`"encoding/xml"`: {"Marshaler", "NewParser", "Parser"},
`"html"`: {"NewTokenizer", "Parse"},
`"image"`: {"Color", "NRGBAColor", "RGBAColor"},
`"io"`: {"Copyn"},
`"log"`: {"Exitf"},
`"math"`: {"Fabs", "Fmax", "Fmod"},
`"os"`: {"Envs", "Error", "Getenverror", "NewError", "Time", "UnixSignal", "Wait"},
`"reflect"`: {"MapValue", "Typeof"},
`"runtime"`: {"UpdateMemStats"},
`"strconv"`: {"Atob", "Atof32", "Atof64", "AtofN", "Atoi64", "Atoui", "Atoui64", "Btoui64", "Ftoa64", "Itoa64", "Uitoa", "Uitoa64"},
`"time"`: {"LocalTime", "Nanoseconds", "NanosecondsToLocalTime", "Seconds", "SecondsToLocalTime", "SecondsToUTC"},
`"unicode/utf8"`: {"NewString"},
}
type vetVisitor struct {
errors map[string]token.Pos
}
func (v *vetVisitor) Visit(n ast.Node) ast.Visitor {
if sel, ok := n.(*ast.SelectorExpr); ok {
if x, _ := sel.X.(*ast.Ident); x != nil {
if obj := x.Obj; obj != nil && obj.Kind == ast.Pkg {
if spec, _ := obj.Decl.(*ast.ImportSpec); spec != nil {
for _, name := range deprecatedExports[spec.Path.Value] {
if name == sel.Sel.Name {
v.errors[fmt.Sprintf("%s.%s not found", spec.Path.Value, sel.Sel.Name)] = n.Pos()
return nil
}
}
}
}
}
}
return v
}
func (b *builder) vetPackage(pkg *Package, apkg *ast.Package) {
errors := make(map[string]token.Pos)
for _, file := range apkg.Files {
for _, is := range file.Imports {
importPath, _ := strconv.Unquote(is.Path.Value)
if !gosrc.IsValidPath(importPath) &&
!strings.HasPrefix(importPath, "exp/") &&
!strings.HasPrefix(importPath, "appengine") {
errors[fmt.Sprintf("Unrecognized import path %q", importPath)] = is.Pos()
}
}
v := vetVisitor{errors: errors}
ast.Walk(&v, file)
}
for message, pos := range errors {
pkg.Errors = append(pkg.Errors,
fmt.Sprintf("%s (%s)", message, b.fset.Position(pos)))
}
}

33
vendor/github.com/golang/gddo/gddo-admin/block.go generated vendored Normal file
View File

@@ -0,0 +1,33 @@
// Copyright 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
package main
import (
"github.com/golang/gddo/database"
"log"
"os"
)
var blockCommand = &command{
name: "block",
run: block,
usage: "block path",
}
func block(c *command) {
if len(c.flag.Args()) != 1 {
c.printUsage()
os.Exit(1)
}
db, err := database.New()
if err != nil {
log.Fatal(err)
}
if err := db.Block(c.flag.Args()[0]); err != nil {
log.Fatal(err)
}
}

65
vendor/github.com/golang/gddo/gddo-admin/crawl.go generated vendored Normal file
View File

@@ -0,0 +1,65 @@
// Copyright 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
package main
import (
"fmt"
"io/ioutil"
"log"
"os"
"strings"
"github.com/garyburd/redigo/redis"
"github.com/golang/gddo/database"
)
var crawlCommand = &command{
name: "crawl",
run: crawl,
usage: "crawl [new]",
}
func crawl(c *command) {
if len(c.flag.Args()) > 1 {
c.printUsage()
os.Exit(1)
}
db, err := database.New()
if err != nil {
log.Fatal(err)
}
if len(c.flag.Args()) == 1 {
p, err := ioutil.ReadFile(c.flag.Args()[0])
if err != nil {
log.Fatal(err)
}
for _, p := range strings.Fields(string(p)) {
db.AddNewCrawl(p)
}
}
conn := db.Pool.Get()
defer conn.Close()
paths, err := redis.Strings(conn.Do("SMEMBERS", "newCrawl"))
if err != nil {
log.Fatal(err)
}
fmt.Println("NEW")
for _, path := range paths {
fmt.Println(path)
}
paths, err = redis.Strings(conn.Do("SMEMBERS", "badCrawl"))
if err != nil {
log.Fatal(err)
}
fmt.Println("BAD")
for _, path := range paths {
fmt.Println(path)
}
}

59
vendor/github.com/golang/gddo/gddo-admin/dangle.go generated vendored Normal file
View File

@@ -0,0 +1,59 @@
// Copyright 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
package main
import (
"fmt"
"log"
"os"
"github.com/golang/gddo/database"
"github.com/golang/gddo/gosrc"
)
var dangleCommand = &command{
name: "dangle",
run: dangle,
usage: "dangle",
}
func dangle(c *command) {
if len(c.flag.Args()) != 0 {
c.printUsage()
os.Exit(1)
}
db, err := database.New()
if err != nil {
log.Fatal(err)
}
m := make(map[string]int)
err = db.Do(func(pi *database.PackageInfo) error {
m[pi.PDoc.ImportPath] |= 1
for _, p := range pi.PDoc.Imports {
if gosrc.IsValidPath(p) {
m[p] |= 2
}
}
for _, p := range pi.PDoc.TestImports {
if gosrc.IsValidPath(p) {
m[p] |= 2
}
}
for _, p := range pi.PDoc.XTestImports {
if gosrc.IsValidPath(p) {
m[p] |= 2
}
}
return nil
})
for p, v := range m {
if v == 2 {
fmt.Println(p)
}
}
}

34
vendor/github.com/golang/gddo/gddo-admin/delete.go generated vendored Normal file
View File

@@ -0,0 +1,34 @@
// Copyright 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
package main
import (
"log"
"os"
"github.com/golang/gddo/database"
)
var deleteCommand = &command{
name: "delete",
run: del,
usage: "delete path",
}
func del(c *command) {
if len(c.flag.Args()) != 1 {
c.printUsage()
os.Exit(1)
}
db, err := database.New()
if err != nil {
log.Fatal(err)
}
if err := db.Delete(c.flag.Args()[0]); err != nil {
log.Fatal(err)
}
}

70
vendor/github.com/golang/gddo/gddo-admin/main.go generated vendored Normal file
View File

@@ -0,0 +1,70 @@
// Copyright 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
// Command gddo-admin is the GoDoc.org command line administration tool.
package main
import (
"flag"
"fmt"
"os"
"strings"
)
type command struct {
name string
run func(c *command)
flag flag.FlagSet
usage string
}
func (c *command) printUsage() {
fmt.Fprintf(os.Stderr, "%s %s\n", os.Args[0], c.usage)
c.flag.PrintDefaults()
}
var commands = []*command{
blockCommand,
reindexCommand,
deleteCommand,
popularCommand,
dangleCommand,
crawlCommand,
statsCommand,
}
func printUsage() {
var n []string
for _, c := range commands {
n = append(n, c.name)
}
fmt.Fprintf(os.Stderr, "%s %s\n", os.Args[0], strings.Join(n, "|"))
flag.PrintDefaults()
for _, c := range commands {
c.printUsage()
}
}
func main() {
flag.Usage = printUsage
flag.Parse()
args := flag.Args()
if len(args) >= 1 {
for _, c := range commands {
if args[0] == c.name {
c.flag.Usage = func() {
c.printUsage()
os.Exit(2)
}
c.flag.Parse(args[1:])
c.run(c)
return
}
}
}
printUsage()
os.Exit(2)
}

44
vendor/github.com/golang/gddo/gddo-admin/popular.go generated vendored Normal file
View File

@@ -0,0 +1,44 @@
// Copyright 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
package main
import (
"fmt"
"log"
"os"
"github.com/golang/gddo/database"
)
var (
popularCommand = &command{
name: "popular",
usage: "popular",
}
)
func init() {
popularCommand.run = popular
}
func popular(c *command) {
if len(c.flag.Args()) != 0 {
c.printUsage()
os.Exit(1)
}
db, err := database.New()
if err != nil {
log.Fatal(err)
}
pkgs, err := db.PopularWithScores()
if err != nil {
log.Fatal(err)
}
for _, pkg := range pkgs {
fmt.Println(pkg.Path, pkg.Synopsis)
}
}

68
vendor/github.com/golang/gddo/gddo-admin/reindex.go generated vendored Normal file
View File

@@ -0,0 +1,68 @@
// Copyright 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
package main
import (
"log"
"os"
"time"
"github.com/golang/gddo/database"
"github.com/golang/gddo/doc"
)
var reindexCommand = &command{
name: "reindex",
run: reindex,
usage: "reindex",
}
func fix(pdoc *doc.Package) {
/*
for _, v := range pdoc.Consts {
}
for _, v := range pdoc.Vars {
}
for _, v := range pdoc.Funcs {
}
for _, t := range pdoc.Types {
for _, v := range t.Consts {
}
for _, v := range t.Vars {
}
for _, v := range t.Funcs {
}
for _, v := range t.Methods {
}
}
for _, notes := range pdoc.Notes {
for _, v := range notes {
}
}
*/
}
func reindex(c *command) {
if len(c.flag.Args()) != 0 {
c.printUsage()
os.Exit(1)
}
db, err := database.New()
if err != nil {
log.Fatal(err)
}
var n int
err = db.Do(func(pi *database.PackageInfo) error {
n++
fix(pi.PDoc)
return db.Put(pi.PDoc, time.Time{}, false)
})
if err != nil {
log.Fatal(err)
}
log.Printf("Updated %d documents", n)
}

78
vendor/github.com/golang/gddo/gddo-admin/stats.go generated vendored Normal file
View File

@@ -0,0 +1,78 @@
// Copyright 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
package main
import (
"fmt"
"log"
"os"
"sort"
"github.com/golang/gddo/database"
)
var statsCommand = &command{
name: "stats",
run: stats,
usage: "stats",
}
type itemSize struct {
path string
size int
}
type bySizeDesc []itemSize
func (p bySizeDesc) Len() int { return len(p) }
func (p bySizeDesc) Less(i, j int) bool { return p[i].size > p[j].size }
func (p bySizeDesc) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
func stats(c *command) {
if len(c.flag.Args()) != 0 {
c.printUsage()
os.Exit(1)
}
db, err := database.New()
if err != nil {
log.Fatal(err)
}
var packageSizes []itemSize
var truncatedPackages []string
projectSizes := make(map[string]int)
err = db.Do(func(pi *database.PackageInfo) error {
packageSizes = append(packageSizes, itemSize{pi.PDoc.ImportPath, pi.Size})
projectSizes[pi.PDoc.ProjectRoot] += pi.Size
if pi.PDoc.Truncated {
truncatedPackages = append(truncatedPackages, pi.PDoc.ImportPath)
}
return nil
})
var sizes []itemSize
for path, size := range projectSizes {
sizes = append(sizes, itemSize{path, size})
}
sort.Sort(bySizeDesc(sizes))
fmt.Println("PROJECT SIZES")
for _, size := range sizes {
fmt.Printf("%6d %s\n", size.size, size.path)
}
sort.Sort(bySizeDesc(packageSizes))
fmt.Println("PACKAGE SIZES")
for _, size := range packageSizes {
fmt.Printf("%6d %s\n", size.size, size.path)
}
sort.Sort(sort.StringSlice(truncatedPackages))
fmt.Println("TRUNCATED PACKAGES")
for _, p := range truncatedPackages {
fmt.Printf("%s\n", p)
}
}

10
vendor/github.com/golang/gddo/gddo-server/app.yaml generated vendored Normal file
View File

@@ -0,0 +1,10 @@
# This YAML file is used for local deployment with GAE development environment.
runtime: go
vm: true
api_version: 1
threadsafe: true
handlers:
- url: /.*
script: IGNORED
secure: always

View File

@@ -0,0 +1,4 @@
<?xml version="1.0"?>
<users>
<user>6F3E495D5591D0B1308072CA245E8849</user>
</users>

View File

@@ -0,0 +1,2 @@
User-agent: *
Disallow: *

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.3 KiB

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 22 KiB

View File

@@ -0,0 +1 @@
google-site-verification: google3d2f3cd4cc2bb44b.html

View File

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,8 @@
User-agent: *
Disallow: /*?imports
Disallow: /*?importers
Disallow: /*?import-graph*
Disallow: /*?gosrc*
Disallow: /*?file*
Disallow: /*?play*
Disallow: /*?tools

View File

@@ -0,0 +1,82 @@
.container { max-width: 970px; }
.section-header {
padding-bottom: 4px;
margin: 20px 0 10px;
border-bottom: 1px solid #eeeeee;
}
/* Sidebar navigation (copied from bootstrap docs.css) */
/* First level of nav */
.gddo-sidebar {
margin-top: 5px;
margin-bottom: 30px;
padding-bottom: 10px;
text-shadow: 0 1px 0 #fff;
border-radius: 5px;
}
/* By default it's not affixed in mobile views, so undo that */
.gddo-sidebar .nav.affix {
position: static;
}
.gddo-sidebar .nav {
overflow: auto;
height: 95%;
}
/* All levels of nav */
.gddo-sidebar .nav > li > a {
display: block;
color: #716b7a;
padding: 5px 0px;
}
.gddo-sidebar .nav > li > a:hover,
.gddo-sidebar .nav > li > a:focus {
text-decoration: none;
background-color: #e5e3e9;
}
.gddo-sidebar .nav > .active > a,
.gddo-sidebar .nav > .active:hover > a,
.gddo-sidebar .nav > .active:focus > a {
font-weight: bold;
color: #563d7c;
background-color: transparent;
}
/* Nav: second level (shown on .active) */
.gddo-sidebar .nav .nav {
display: none; /* Hide by default, but at >768px, show it */
margin-bottom: 8px;
}
.gddo-sidebar .nav .nav > li > a {
padding-top: 3px;
padding-bottom: 3px;
padding-left: 15px;
font-size: 90%;
}
/* Show and affix the side nav when space allows it */
@media screen and (min-width: 992px) {
.gddo-sidebar .nav > .active > ul {
display: block;
}
/* Widen the fixed sidebar */
.gddo-sidebar .nav.affix,
.gddo-sidebar .nav.affix-bottom {
width: 213px;
}
.gddo-sidebar .nav.affix {
position: fixed; /* Undo the static from mobile first approach */
top: 10px;
}
.gddo-sidebar .nav.affix-bottom {
position: absolute; /* Undo the static from mobile first approach */
}
.gddo-sidebar .nav.affix-bottom .bs-sidenav,
.gddo-sidebar .nav.affix .bs-sidenav {
margin-top: 0;
margin-bottom: 0;
}
}

View File

@@ -0,0 +1,139 @@
html { background-color: whitesmoke; }
body { background-color: white; }
h4 { margin-top: 20px; }
.container { max-width: 728px; }
#x-projnav {
min-height: 20px;
margin-bottom: 20px;
background-color: #eee;
padding: 9px;
border-radius: 3px;
}
#x-footer {
padding-top: 14px;
padding-bottom: 15px;
margin-top: 5px;
background-color: #eee;
border-top-style: solid;
border-top-width: 1px;
}
.highlighted {
background-color: #FDFF9E;
}
#x-pkginfo {
margin-top: 25px;
border-top: 1px solid #ccc;
padding-top: 20px;
margin-bottom: 15px;
}
code {
background-color: inherit;
border: none;
color: #222;
padding: 0;
}
pre {
color: #222;
overflow: auto;
white-space: pre;
word-break: normal;
word-wrap: normal;
}
.funcdecl > pre {
white-space: pre-wrap;
word-break: break-all;
word-wrap: break-word;
}
pre .com {
color: #006600;
}
.decl {
position: relative;
}
.decl > a {
position: absolute;
top: 0px;
right: 0px;
display: none;
border: 1px solid #ccc;
border-top-right-radius: 4px;
border-bottom-left-radius: 4px;
padding-left: 4px;
padding-right: 4px;
}
.decl > a:hover {
background-color: white;
text-decoration: none;
}
.decl:hover > a {
display: block;
}
a, .navbar-default .navbar-brand {
color: #375eab;
}
.navbar-default, #x-footer {
background-color: hsl(209, 51%, 92%);
border-color: hsl(209, 51%, 88%);
}
.navbar-default .navbar-nav > .active > a,
.navbar-default .navbar-nav > .active > a:hover,
.navbar-default .navbar-nav > .active > a:focus {
background-color: hsl(209, 51%, 88%);
}
.navbar-default .navbar-nav > li > a:hover,
.navbar-default .navbar-nav > li > a:focus {
color: #000;
}
.panel-default > .panel-heading {
color: #333;
background-color: transparent;
}
a.permalink {
display: none;
}
a.uses {
display: none;
color: #666;
font-size: 0.8em;
}
h1:hover .permalink, h2:hover .permalink, h3:hover .permalink, h4:hover .permalink, h5:hover .permalink, h6:hover .permalink, h1:hover .uses, h2:hover .uses, h3:hover .uses, h4:hover .uses, h5:hover .uses, h6:hover .uses {
display: inline;
}
@media (max-width : 768px) {
.form-control {
font-size:16px;
}
}
.synopsis {
opacity: 0.87;
}
.additional-info {
display: block;
opacity: 0.54;
text-transform: uppercase;
font-size: 0.75em;
}

View File

@@ -0,0 +1,231 @@
// jump modal
$(function() {
var all;
var visible;
var active = -1;
var lastFilter = '';
var $body = $('#x-jump-body');
var $list = $('#x-jump-list');
var $filter = $('#x-jump-filter');
var $modal = $('#x-jump');
var update = function(filter) {
lastFilter = filter;
if (active >= 0) {
visible[active].e.removeClass('active');
active = -1;
}
visible = []
var re = new RegExp(filter.replace(/([.*+?^=!:${}()|\[\]\/\\])/g, "\\$1"), "gi");
all.forEach(function (id) {
id.e.detach();
var text = id.text;
if (filter) {
text = id.text.replace(re, function (s) { return '<b>' + s + '</b>'; });
if (text == id.text) {
return
}
}
id.e.html(text + ' ' + '<i>' + id.kind + '</i>');
visible.push(id);
});
$body.scrollTop(0);
if (visible.length > 0) {
active = 0;
visible[active].e.addClass('active');
}
$list.append($.map(visible, function(identifier) { return identifier.e; }));
}
var incrActive = function(delta) {
if (visible.length == 0) {
return
}
visible[active].e.removeClass('active');
active += delta;
if (active < 0) {
active = 0;
$body.scrollTop(0);
} else if (active >= visible.length) {
active = visible.length - 1;
$body.scrollTop($body[0].scrollHeight - $body[0].clientHeight);
} else {
var $e = visible[active].e;
var t = $e.position().top;
var b = t + $e.outerHeight(false);
if (t <= 0) {
$body.scrollTop($body.scrollTop() + t);
} else if (b >= $body.outerHeight(false)) {
$body.scrollTop($body.scrollTop() + b - $body.outerHeight(false));
}
}
visible[active].e.addClass('active');
}
$modal.on('show.bs.modal', function() {
if (!all) {
all = []
var kinds = {'c': 'constant', 'v': 'variable', 'f': 'function', 't': 'type', 'd': 'field', 'm': 'method'}
$('*[id]').each(function() {
var e = $(this);
var id = e.attr('id');
if (/^[^_][^-]*$/.test(id)) {
all.push({
text: id,
ltext: id.toLowerCase(),
kind: kinds[e.closest('[data-kind]').attr('data-kind')],
e: $('<a/>', {href: '#' + id, 'class': 'list-group-item', tabindex: '-1'})
});
}
});
all.sort(function (a, b) {
if (a.ltext > b.ltext) { return 1; }
if (a.ltext < b.ltext) { return -1; }
return 0
});
}
}).on('shown.bs.modal', function() {
update('');
$filter.val('').focus();
}).on('hide.bs.modal', function() {
$filter.blur();
}).on('click', '.list-group-item', function() {
$modal.modal('hide');
});
$filter.on('change keyup', function() {
var filter = $filter.val();
if (filter.toUpperCase() != lastFilter.toUpperCase()) {
update(filter);
}
}).on('keydown', function(e) {
switch(e.which) {
case 38: // up
incrActive(-1);
e.preventDefault();
break;
case 40: // down
incrActive(1);
e.preventDefault();
break;
case 13: // enter
if (active >= 0) {
visible[active].e[0].click();
}
break
}
});
});
$(function() {
if ("onhashchange" in window) {
var highlightedSel = "";
window.onhashchange = function() {
if (highlightedSel) {
$(highlightedSel).removeClass("highlighted");
}
highlightedSel = window.location.hash.replace( /(:|\.|\[|\]|,)/g, "\\$1" );
if (highlightedSel && (highlightedSel.indexOf("example-") == -1)) {
$(highlightedSel).addClass("highlighted");
}
};
window.onhashchange();
}
});
// keyboard shortcuts
$(function() {
var prevCh = null, prevTime = 0, modal = false;
$('.modal').on({
show: function() { modal = true; },
hidden: function() { modal = false; }
});
$(document).on('keypress', function(e) {
var combo = e.timeStamp - prevTime <= 1000;
prevTime = 0;
if (modal) {
return true;
}
var t = e.target.tagName
if (t == 'INPUT' ||
t == 'SELECT' ||
t == 'TEXTAREA' ) {
return true;
}
if (e.target.contentEditable && e.target.contentEditable == 'true') {
return true;
}
if (e.metaKey || e.ctrlKey) {
return true;
}
var ch = String.fromCharCode(e.which);
if (combo) {
switch (prevCh + ch) {
case "gg":
$('html,body').animate({scrollTop: 0},'fast');
return false;
case "gb":
$('html,body').animate({scrollTop: $(document).height()},'fast');
return false;
case "gi":
if ($('#pkg-index').length > 0) {
$('html,body').animate({scrollTop: $("#pkg-index").offset().top},'fast');
return false;
}
case "ge":
if ($('#pkg-examples').length > 0) {
$('html,body').animate({scrollTop: $("#pkg-examples").offset().top},'fast');
return false;
}
}
}
switch (ch) {
case "/":
$('#x-search-query').focus();
return false;
case "?":
$('#x-shortcuts').modal();
return false;
case "f":
if ($('#x-jump').length > 0) {
$('#x-jump').modal();
return false;
}
}
prevCh = ch
prevTime = e.timeStamp
return true;
});
});
// misc
$(function() {
$('span.timeago').timeago();
if (window.location.hash.substring(0, 9) == '#example-') {
var id = '#ex-' + window.location.hash.substring(9);
$(id).addClass('in').removeClass('collapse').height('auto');
}
$(document).on("click", "input.click-select", function(e) {
$(e.target).select();
});
$('body').scrollspy({
target: '.gddo-sidebar',
offset: 10
});
});

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="109" height="20"><linearGradient id="a" x2="0" y2="100%"><stop offset="0" stop-color="#bbb" stop-opacity=".1"/><stop offset="1" stop-opacity=".1"/></linearGradient><rect rx="3" width="109" height="20" fill="#555"/><rect rx="3" x="44" width="65" height="20" fill="#5272B4"/><path fill="#5272B4" d="M44 0h4v20h-4z"/><rect rx="3" width="109" height="20" fill="url(#a)"/><g fill="#fff" text-anchor="middle" font-family="DejaVu Sans,Verdana,Geneva,sans-serif" font-size="11"><text x="23" y="15" fill="#010101" fill-opacity=".3">godoc</text><text x="23" y="14">godoc</text><text x="75.5" y="15" fill="#010101" fill-opacity=".3">reference</text><text x="75.5" y="14">reference</text></g></svg>

After

Width:  |  Height:  |  Size: 733 B

View File

@@ -0,0 +1,72 @@
{{define "Head"}}<title>About - GoDoc</title>{{end}}
{{define "Body"}}
<h1>About</h1>
<p>GoDoc hosts documentation for <a href="http://golang.org/">Go</a>
packages on <a href="https://bitbucket.org/">Bitbucket</a>, <a
href="https://github.com/">GitHub</a>, <a
href="https://launchpad.net/">Launchpad</a> and <a
href="http://code.google.com/hosting/">Google Project Hosting</a>.
<p>The source code for GoDoc is available <a
href="https://github.com/golang/gddo">on GitHub</a>.
<p>GoDoc displays documentation for GOOS=linux unless otherwise noted at the
bottom of the documentation page.
<h4 id="howto">Add a package to GoDoc</h4>
<p>GoDoc generates documentation from Go source code. The <a
href="http://blog.golang.org/godoc-documenting-go-code">guidelines</a>
for writing documentation for the <a
href="http://golang.org/cmd/godoc/">godoc</a> tool apply to GoDoc.
<p>It's important to write a good summary of the package in the first sentence
of the package comment. GoDoc indexes the first sentence and displays the first
sentence in package lists.
<p>To add a package to GoDoc, <a href="/">search</a> for the package by import
path. If GoDoc does not already have the documentation for the package, then
GoDoc will fetch the source from the version control system on the fly and add
the documentation.
<p>GoDoc checks for package updates once per day. You can force GoDoc to update
the documentation immediately by clicking the refresh link at the bottom of the
package documentation page.
<p>GoDoc crawls package imports and child directories to find new packages.
<h4 id="remove">Remove a package from GoDoc</h4>
GoDoc automatically removes packages deleted from the version control system
when GoDoc checks for updates to the package. You can force GoDoc to remove a
deleted package immediately by clicking the refresh link at the bottom of the
package documentation page.
If you do not want GoDoc to display documentation for your package, send mail
to golang-dev@googlegroups.com with the import path of the path of the package
that you want to remove.
<h4 id="feedback">Feedback</h4>
<p>Send your ideas, feature requests and questions to the <a href="https://groups.google.com/group/golang-dev">golang-dev mailing list</a>.
Report bugs using the <a href="https://github.com/golang/gddo/issues/new">GitHub Issue Tracker</a>.
<h4 id="shortcuts">Keyboard Shortcuts</h4>
<p>GoDoc has keyboard shortcuts for navigating package documentation
pages. Type '?' on a package page for help.
<h4 id="bookmarklet">Bookmarklet</h4>
<p>The GoDoc bookmarklet navigates from pages on Bitbucket, GitHub Launchpad
and Google Project Hosting to the package documentation. To install the
bookmarklet, click and drag the following link to your bookmark bar: <a
href="javascript:window.location='http://{{.Host}}/?q='+encodeURIComponent(window.location)">GoDoc</a>
<h4>More Documentation</h4>
<p>More documentation about GoDoc is available on <a href="https://github.com/golang/gddo/wiki">the project's GitHub wiki</a>.
{{end}}

View File

@@ -0,0 +1,6 @@
{{define "Head"}}<title>Bot - GoDoc</title>{{end}}
{{define "Body"}}
<p>GoDocBot is godoc.org's robot for fetching Go documentation from version control systems.
<p>Contact: golang-dev@googlegroups.com
{{end}}

View File

@@ -0,0 +1,9 @@
{{define "Head"}}{{template "PkgCmdHeader" $}}{{end}}
{{define "Body"}}
{{template "ProjectNav" $}}
<h2>Command {{$.pdoc.PageName}}</h2>
{{$.pdoc.Doc|comment}}
{{template "PkgFiles" $}}
{{template "PkgCmdFooter" $}}
{{end}}

View File

@@ -0,0 +1,5 @@
{{define "ROOT"}}{{with .pdoc}}
COMMAND DOCUMENTATION
{{.Doc|comment}}
{{template "Subdirs" $}}{{end}}{{end}}

View File

@@ -0,0 +1,126 @@
{{define "Analytics"}}{{with gaAccount}}<script type="text/javascript">
var _gaq = _gaq || [];
_gaq.push(['_setAccount', '{{.}}']);
_gaq.push(['_trackPageview']);
(function() {
var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
})();
</script>{{end}}{{end}}
{{define "SearchBox"}}
<form>
<div class="input-group">
<input class="form-control" name="q" autofocus="autofocus" value="{{.}}" placeholder="Search for package by import path or keyword." type="text">
<span class="input-group-btn">
<button class="btn btn-default" type="submit">Go!</button>
</span>
</div>
</form>
{{end}}
{{define "ProjectNav"}}{{template "FlashMessages" .flashMessages}}<div class="clearfix" id="x-projnav">
{{if .pdoc.ProjectRoot}}{{if .pdoc.ProjectURL}}<a href="{{.pdoc.ProjectURL}}"><strong>{{.pdoc.ProjectName}}:</strong></a>{{else}}<strong>{{.pdoc.ProjectName}}:</strong>{{end}}{{else}}<a href="/-/go">Go:</a>{{end}}
{{.pdoc.Breadcrumbs templateName}}
{{if and .pdoc.Name (or templateName "pkg.html" templateName "cmd.html")}}
<span class="pull-right">
{{if not .pdoc.IsCmd}}
<a href="#pkg-index">Index</a>
{{if .pdoc.AllExamples}}<span class="text-muted">|</span> <a href="#pkg-examples">Examples</a>{{end}}
<span class="text-muted">|</span>
{{end}}
<a href="#pkg-files">Files</a>
{{if .pkgs}}<span class="text-muted">|</span> <a href="#pkg-subdirectories">Directories</a>{{end}}
</span>
{{end}}
</div>{{end}}
{{define "Pkgs"}}
<table class="table table-condensed">
<thead><tr><th>Path</th><th>Synopsis</th></tr></thead>
<tbody>{{range .}}<tr><td>{{if .Path|isValidImportPath}}<a href="/{{.Path}}">{{.Path|importPath}}</a>{{else}}{{.Path|importPath}}{{end}}</td><td>{{.Synopsis|importPath}}</td></tr>
{{end}}</tbody>
</table>
{{end}}
{{define "SearchPkgs"}}
<table class="table table-condensed">
<thead><tr><th>Path</th><th>Synopsis</th></tr></thead>
<tbody>{{range .}}
<tr><td>
{{if .Path|isValidImportPath}}
<a href="/{{.Path}}">{{.Path|importPath}}</a>
<ul class="list-inline">
<li class="additional-info">{{.ImportCount}} imports</li>
{{if .Fork}}<li class="additional-info">· fork</li>{{end}}
{{if .Stars}}<li class="additional-info">· {{.Stars}} stars</li>{{end}}
</ul>
{{else}}{{.Path|importPath}}</td>
{{end}}
<td class="synopsis">{{.Synopsis|importPath}}</td></tr>
{{end}}</tbody>
</table>
{{end}}
{{define "PkgCmdHeader"}}{{with .pdoc}}
<title>{{.PageName}} - GoDoc</title>
{{if .Synopsis}}
<meta name="twitter:title" content="{{if .IsCmd}}Command{{else}}Package{{end}} {{.PageName}}">
<meta property="og:title" content="{{if .IsCmd}}Command{{else}}Package{{end}} {{.PageName}}">
<meta name="description" content="{{.Synopsis}}">
<meta name="twitter:description" content="{{.Synopsis}}">
<meta property="og:description" content="{{.Synopsis}}">
<meta name="twitter:card" content="summary">
<meta name="twitter:site" content="@golang">
{{end}}
{{if .Errors}}<meta name="robots" content="NOINDEX">{{end}}
{{end}}{{end}}
{{define "PkgFiles"}}{{with .pdoc}}
<h4 id="pkg-files">
{{with .BrowseURL}}<a href="{{.}}">Package Files</a>{{else}}Package Files{{end}}
<a class="permalink" href="#pkg-files">&para;</a>
</h4>
<p>{{range .Files}}{{if .URL}}<a href="{{.URL}}">{{.Name}}</a>{{else}}{{.Name}}{{end}} {{end}}</p>
{{end}}{{end}}
{{define "PkgCmdFooter"}}
<!-- Bugs -->
{{with .pdoc}}{{with .Notes}}{{with .BUG}}
<h3 id="pkg-note-bug">Bugs <a class="permalink" href="#pkg-note-bug">&para;</a></h3>{{range .}}<p>{{$.pdoc.SourceLink .Pos "☞" true}} {{.Body}}{{end}}
{{end}}{{end}}{{end}}
{{if $.pkgs}}<h3 id="pkg-subdirectories">Directories <a class="permalink" href="#pkg-subdirectories">&para;</a></h3>
<table class="table table-condensed">
<thead><tr><th>Path</th><th>Synopsis</th></tr></thead>
<tbody>{{range $.pkgs}}<tr><td><a href="/{{.Path}}">{{relativePath .Path $.pdoc.ImportPath}}</a><td>{{.Synopsis}}</td></tr>{{end}}</tbody>
</table>
{{end}}
<div id="x-pkginfo">
{{with $.pdoc}}
<form name="x-refresh" method="POST" action="/-/refresh"><input type="hidden" name="path" value="{{.ImportPath}}"></form>
<p>{{if or .Imports $.importerCount}}Package {{.Name}} {{if .Imports}}imports <a href="?imports">{{.Imports|len}} packages</a> (<a href="?import-graph">graph</a>){{end}}{{if and .Imports $.importerCount}} and {{end}}{{if $.importerCount}}is imported by <a href="?importers">{{$.importerCount}} packages</a>{{end}}.{{end}}
{{if not .Updated.IsZero}}Updated <span class="timeago" title="{{.Updated.Format "2006-01-02T15:04:05Z"}}">{{.Updated.Format "2006-01-02"}}</span>{{if or (equal .GOOS "windows") (equal .GOOS "darwin")}} with GOOS={{.GOOS}}{{end}}.{{end}}
<a href="javascript:document.getElementsByName('x-refresh')[0].submit();" title="Refresh this page from the source.">Refresh now</a>.
<a href="?tools">Tools</a> for package owners.
{{.StatusDescription}}
{{end}}
{{with $.pdoc.Errors}}
<p>The <a href="http://golang.org/cmd/go/#Download_and_install_packages_and_dependencies">go get</a>
command cannot install this package because of the following issues:
<ul>
{{range .}}<li>{{.}}{{end}}
</ul>
{{end}}
</div>
{{end}}
{{define "FlashMessages"}}{{range .}}
{{if eq .ID "redir"}}{{if eq (len .Args) 1}}<div class="alert alert-warning">Redirected from {{index .Args 0}}.</div>{{end}}
{{else if eq .ID "refresh"}}{{if eq (len .Args) 1}}<div class="alert alert-danger">Error refreshing package: {{index .Args 0}}</div>{{end}}
{{end}}
{{end}}{{end}}

View File

@@ -0,0 +1,3 @@
{{define "Subdirs"}}{{with $.pkgs}}SUBDIRECTORIES
{{range .}}
{{.Path}}{{end}}{{end}}{{end}}

View File

@@ -0,0 +1,10 @@
{{define "Head"}}
{{template "PkgCmdHeader" $}}
<meta name="robots" content="NOINDEX">
{{end}}
{{define "Body"}}
{{template "ProjectNav" $}}
{{template "PkgCmdFooter" $}}
{{end}}

View File

@@ -0,0 +1 @@
{{define "ROOT"}}{{with .pdoc}}{{template "Subdirs" $}}{{end}}{{end}}

View File

@@ -0,0 +1,23 @@
{{define "ROOT"}}<!DOCTYPE html><html lang="en">
<head>
<title>{{.pdoc.PageName}} graph - GoDoc</title>
<meta name="robots" content="NOINDEX, NOFOLLOW">
<link href="{{staticPath "/-/bootstrap.min.css"}}" rel="stylesheet">
<link href="{{staticPath "/-/site.css"}}" rel="stylesheet">
</head>
<body>
<div class="well-small">
Package <a href="/{{.pdoc.ImportPath}}">{{.pdoc.Name}}</a>
{{if .pdoc.ProjectRoot}}<span class="text-muted">|</span>
{{if .hide}}
<a href="?import-graph">Show</a>
{{else}}
<a href="?import-graph&hide=1">Hide</a> (<a href="?import-graph&hide=2">all</a>)
{{end}}
standard package dependencies.
{{end}}
</div>
{{.svg}}
</body>
{{template "Analytics"}}
</html>{{end}}

View File

@@ -0,0 +1,37 @@
{{define "Head"}}<title>GoDoc</title>
{{/* <link type="application/opensearchdescription+xml" rel="search" href="/-/opensearch.xml?v={{fileHash "templates/opensearch.xml"}}"/> */}}{{end}}
{{define "Body"}}
<div class="jumbotron">
<h2>Search for Go Packages</h2>
{{template "SearchBox" ""}}
</div>
<p>GoDoc hosts documentation for <a href="http://golang.org/">Go</a> packages
on Bitbucket, GitHub, Google Project Hosting and Launchpad. Read the <a
href="/-/about">About Page</a> for information about adding packages to GoDoc
and more.
<div class="row">
<div class="col-sm-6">
{{with .Popular}}
<h4>Popular Packages</h4>
<ul class="list-unstyled">
{{range .}}<li><a href="/{{.Path}}">{{.Path}}</a>{{end}}
</ul>
{{end}}
</div>
<div class="col-sm-6">
<h4>More Packages</h4>
<ul class="list-unstyled">
<li><a href="/-/go">Go Standard Packages</a>
<li><a href="/-/subrepo">Go Sub-repository Packages</a>
<li><a href="https://golang.org/wiki/Projects">Projects @ go-wiki</a>
<li><a href="https://github.com/search?o=desc&amp;q=language%3Ago&amp;s=stars&amp;type=Repositories">Most stars</a>,
<a href="https://github.com/search?o=desc&amp;q=language%3Ago&amp;s=forks&amp;type=Repositories">most forks</a>,
<a href="https://github.com/search?o=desc&amp;q=language%3Ago&amp;s=updated&amp;type=Repositories">recently updated</a> on GitHub
</ul>
</div>
</div>
{{end}}

View File

@@ -0,0 +1,2 @@
{{define "ROOT"}}
{{end}}

View File

@@ -0,0 +1,7 @@
{{define "Head"}}<title>{{.pdoc.PageName}} importers - GoDoc</title><meta name="robots" content="NOINDEX, NOFOLLOW">{{end}}
{{define "Body"}}
{{template "ProjectNav" $}}
<h3>Packages that import {{$.pdoc.Name}}</h3>
{{template "Pkgs" $.pkgs}}
{{end}}

View File

@@ -0,0 +1,10 @@
{{define "Head"}}<title>{{.pdoc.PageName}} importers - GoDoc</title><meta name="robots" content="NOINDEX, NOFOLLOW">{{end}}
{{define "Body"}}
{{template "ProjectNav" $}}
<h3>Packages that import {{$.pdoc.Name}}</h3>
<table class="table table-condensed">
<thead><tr><th>Path</th><th>Synopsis</th></tr></thead>
<tbody>{{range .pkgs}}<tr><td>{{.Path|importPath}}</td><td>{{.Synopsis|importPath}}</td></tr>{{end}}</tbody>
</table>
{{end}}

View File

@@ -0,0 +1,7 @@
{{define "Head"}}<title>{{.pdoc.PageName}} imports - GoDoc</title><meta name="robots" content="NOINDEX, NOFOLLOW">{{end}}
{{define "Body"}}
{{template "ProjectNav" $}}
<h3>Packages imported by {{.pdoc.Name}}</h3>
{{template "Pkgs" $.pkgs}}
{{end}}

View File

@@ -0,0 +1,73 @@
{{define "ROOT"}}<!DOCTYPE html><html lang="en">
<head profile="http://a9.com/-/spec/opensearch/1.1/">
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<link href="{{staticPath "/-/bootstrap.min.css"}}" rel="stylesheet">
<link href="{{staticPath "/-/site.css"}}" rel="stylesheet">
{{template "Head" $}}
</head>
<body>
<nav class="navbar navbar-default" role="navigation">
<div class="container">
<div class="navbar-header">
<button type="button" class="navbar-toggle" data-toggle="collapse" data-target=".navbar-collapse">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" href="/"><strong>GoDoc</strong></a>
</div>
<div class="collapse navbar-collapse">
<ul class="nav navbar-nav">
<li{{if equal "home.html" templateName}} class="active"{{end}}><a href="/">Home</a></li>
<li{{if equal "about.html" templateName}} class="active"{{end}}><a href="/-/about">About</a></li>
</ul>
<form class="navbar-nav navbar-form navbar-right" id="x-search" action="/" role="search"><input class="form-control" id="x-search-query" type="text" name="q" placeholder="Search"></form>
</div>
</div>
</nav>
<div class="container">
{{template "Body" $}}
</div>
<div id="x-footer" class="clearfix">
<div class="container">
<a href="https://github.com/golang/gddo/issues">Website Issues</a>
<span class="text-muted">|</span> <a href="http://golang.org/">Go Language</a>
<span class="pull-right"><a href="#">Back to top</a></span>
</div>
</div>
<div id="x-shortcuts" tabindex="-1" class="modal">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-header">
<button type="button" class="close" data-dismiss="modal" aria-hidden="true">&times;</button>
<h4 class="modal-title">Keyboard shortcuts</h4>
</div>
<div class="modal-body">
<table>{{$mutePkg := not (equal "pkg.html" templateName)}}
<tr><td align="right"><b>?</b></td><td> : This menu</td></tr>
<tr><td align="right"><b>/</b></td><td> : Search site</td></tr>
<tr{{if $mutePkg}} class="text-muted"{{end}}><td align="right"><b>f</b></td><td> : Jump to identifier</td></tr>
<tr><td align="right"><b>g</b> then <b>g</b></td><td> : Go to top of page</td></tr>
<tr><td align="right"><b>g</b> then <b>b</b></td><td> : Go to end of page</td></tr>
<tr{{if $mutePkg}} class="text-muted"{{end}}><td align="right"><b>g</b> then <b>i</b></td><td> : Go to index</td></tr>
<tr{{if $mutePkg}} class="text-muted"{{end}}><td align="right"><b>g</b> then <b>e</b></td><td> : Go to examples</td></tr>
</table>
</div>
<div class="modal-footer">
<button type="button" class="btn" data-dismiss="modal">Close</button>
</div>
</div>
</div>
</div>
<script src="{{staticPath "/-/jquery-2.0.3.min.js"}}"></script>
<script src="{{staticPath "/-/bootstrap.min.js"}}"></script>
<script src="{{staticPath "/-/site.js"}}"></script>
{{template "Analytics"}}
</body>
</html>
{{end}}

View File

@@ -0,0 +1,10 @@
{{define "Head"}}<title>Not Found - GoDoc</title>{{end}}
{{define "Body"}}
{{template "FlashMessages" .flashMessages}}
<h1>Not Found</h1>
<p>Oh snap! Our team of gophers could not find the web page you are looking for. Try one of these pages:
<ul>
<li><a href="/">Home</a>
</ul>
{{end}}

View File

@@ -0,0 +1,2 @@
{{define "ROOT"}}NOT FOUND
{{end}}

View File

@@ -0,0 +1,9 @@
{{define "ROOT"}}<?xml version="1.0"?>
<OpenSearchDescription xmlns="http://a9.com/-/spec/opensearch/1.1/">
<InputEncoding>UTF-8</InputEncoding>
<ShortName>GoDoc</ShortName>
<Description>GoDoc: Go Documentation Service</Description>
<Url type="text/html" method="get" template="http://{{.}}/?q={searchTerms}"/>
<Url type="application/x-suggestions+json" template="http://{{.}}/-/suggest?q={searchTerms}"/>
</OpenSearchDescription>
{{end}}

View File

@@ -0,0 +1,183 @@
{{define "Head"}}
{{template "PkgCmdHeader" $}}
{{if sidebarEnabled}}
<link href="{{staticPath "/-/sidebar.css"}}" rel="stylesheet">
{{end}}
{{end}}
{{define "Body"}}
{{with .pdoc}}
{{if sidebarEnabled}}
<div class="row">
<!-- Sidebar -->
<div class="gddo-sidebar col-md-3 hidden-xs hidden-sm">
<ul id="sidebar-nav" class="nav" data-spy="affix" data-offset-top="70">
<li class="active"><a href="#pkg-overview">Overview</a></li>
<li><a href="#pkg-index">Index</a></li>
{{if .Examples}}<li><a href="#pkg-examples">Examples</a></li>{{end}}
{{if .Consts}}<li><a href="#pkg-constants">Constants</a></li>{{end}}
{{if .Vars}}<li><a href="#pkg-variables">Variables</a></li>{{end}}
{{if .Funcs}}
<li>
<a href="#pkg-functions">Functions</a>
<ul class="nav">
{{range .Funcs}}<li><a href="#{{.Name}}">{{.Name}}</a></li>{{end}}
</ul>
</li>
{{end}}
{{if .Types}}
<li>
<a href="#pkg-types">Types</a>
<ul class="nav">
{{range .Types}}<li><a href="#{{.Name}}">{{.Name}}</a></li>{{end}}
</ul>
</li>
{{end}}
{{if .Notes.BUG}}<li><a href="#pkg-note-bug">Bugs</a></li>{{end}}
{{if $.pkgs}}<li><a href="#pkg-subdirectories">Directories</a></li>{{end}}
</ul>
</div>
<!-- Content -->
<div class="col-md-9">
{{end}}<!-- end sidebarEnabled -->
{{template "ProjectNav" $}}
<h2 id="pkg-overview">package {{.Name}}</h2>
<p><code>import "{{.ImportPath}}"</code>
{{.Doc|comment}}
{{template "Examples" .|$.pdoc.ObjExamples}}
<!-- Index -->
<h3 id="pkg-index" class="section-header">Index <a class="permalink" href="#pkg-index">&para;</a></h3>
{{if .Truncated}}
<div class="alert">The documentation displayed here is incomplete. Use the godoc command to read the complete documentation.</div>
{{end}}
<ul class="list-unstyled">
{{if .Consts}}<li><a href="#pkg-constants">Constants</a></li>{{end}}
{{if .Vars}}<li><a href="#pkg-variables">Variables</a></li>{{end}}
{{range .Funcs}}<li><a href="#{{.Name}}">{{.Decl.Text}}</a></li>{{end}}
{{range $t := .Types}}
<li><a href="#{{.Name}}">type {{.Name}}</a></li>
{{if or .Funcs .Methods}}<ul>{{end}}
{{range .Funcs}}<li><a href="#{{.Name}}">{{.Decl.Text}}</a></li>{{end}}
{{range .Methods}}<li><a href="#{{$t.Name}}.{{.Name}}">{{.Decl.Text}}</a></li>{{end}}
{{if or .Funcs .Methods}}</ul>{{end}}
{{end}}
{{if .Notes.BUG}}<li><a href="#pkg-note-bug">Bugs</a></li>{{end}}
</ul>
<!-- Examples -->
{{with .AllExamples}}
<h4 id="pkg-examples">Examples <a class="permalink" href="#pkg-examples">&para;</a></h4>
<ul class="list-unstyled">
{{range . }}<li><a href="#example-{{.ID}}" onclick="$('#ex-{{.ID}}').addClass('in').removeClass('collapse').height('auto')">{{.Label}}</a></li>{{end}}
</ul>
{{else}}
<span id="pkg-examples"></span>
{{end}}
<!-- Files -->
{{template "PkgFiles" $}}
<!-- Contants -->
{{if .Consts}}
<h3 id="pkg-constants">Constants <a class="permalink" href="#pkg-constants">&para;</a></h3>
{{range .Consts}}<div class="decl" data-kind="c">{{$.pdoc.SourceLink .Pos "\u2756" false}}{{code .Decl nil}}</div>{{.Doc|comment}}{{end}}
{{end}}
<!-- Variables -->
{{if .Vars}}
<h3 id="pkg-variables">Variables <a class="permalink" href="#pkg-variables">&para;</a></h3>
{{range .Vars}}<div class="decl" data-kind="v">{{$.pdoc.SourceLink .Pos "\u2756" false}}{{code .Decl nil}}</div>{{.Doc|comment}}{{end}}
{{end}}
<!-- Functions -->
{{if sidebarEnabled}}{{if .Funcs}}
<h3 id="pkg-functions" class="section-header">Functions <a class="permalink" href="#pkg-functions">&para;</a></h3>
{{end}}{{end}}
{{range .Funcs}}
<h3 id="{{.Name}}" data-kind="f">func {{$.pdoc.SourceLink .Pos .Name true}} <a class="permalink" href="#{{.Name}}">&para;</a> {{$.pdoc.UsesLink "List Function Callers" .Name}}</h3>
<div class="funcdecl decl">{{$.pdoc.SourceLink .Pos "\u2756" false}}{{code .Decl nil}}</div>{{.Doc|comment}}
{{template "Examples" .|$.pdoc.ObjExamples}}
{{end}}
<!-- Types -->
{{if sidebarEnabled}}{{if .Types}}
<h3 id="pkg-types" class="section-header">Types <a class="permalink" href="#pkg-types">&para;</a></h3>
{{end}}{{end}}
{{range $t := .Types}}
<h3 id="{{.Name}}" data-kind="t">type {{$.pdoc.SourceLink .Pos .Name true}} <a class="permalink" href="#{{.Name}}">&para;</a> {{$.pdoc.UsesLink "List Uses of This Type" .Name}}</h3>
<div class="decl" data-kind="{{if isInterface $t}}m{{else}}d{{end}}">{{$.pdoc.SourceLink .Pos "\u2756" false}}{{code .Decl $t}}</div>{{.Doc|comment}}
{{range .Consts}}<div class="decl" data-kind="c">{{$.pdoc.SourceLink .Pos "\u2756" false}}{{code .Decl nil}}</div>{{.Doc|comment}}{{end}}
{{range .Vars}}<div class="decl" data-kind="v">{{$.pdoc.SourceLink .Pos "\u2756" false}}{{code .Decl nil}}</div>{{.Doc|comment}}{{end}}
{{template "Examples" .|$.pdoc.ObjExamples}}
{{range .Funcs}}
<h4 id="{{.Name}}" data-kind="f">func {{$.pdoc.SourceLink .Pos .Name true}} <a class="permalink" href="#{{.Name}}">&para;</a> {{$.pdoc.UsesLink "List Function Callers" .Name}}</h4>
<div class="funcdecl decl">{{$.pdoc.SourceLink .Pos "\u2756" false}}{{code .Decl nil}}</div>{{.Doc|comment}}
{{template "Examples" .|$.pdoc.ObjExamples}}
{{end}}
{{range .Methods}}
<h4 id="{{$t.Name}}.{{.Name}}" data-kind="m">func ({{.Recv}}) {{$.pdoc.SourceLink .Pos .Name true}} <a class="permalink" href="#{{$t.Name}}.{{.Name}}">&para;</a> {{$.pdoc.UsesLink "List Method Callers" .Orig .Recv .Name}}</h4>
<div class="funcdecl decl">{{$.pdoc.SourceLink .Pos "\u2756" false}}{{code .Decl nil}}</div>{{.Doc|comment}}
{{template "Examples" .|$.pdoc.ObjExamples}}
{{end}}
{{end}}
{{template "PkgCmdFooter" $}}
<div id="x-jump" tabindex="-1" class="modal">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-header">
<h4 class="modal-title">Jump to identifier</h4>
<br class="clearfix">
<input id="x-jump-filter" class="form-control" autocomplete="off" type="text">
</div>
<div id="x-jump-body" class="modal-body" style="height: 260px; overflow: auto;">
<div id="x-jump-list" class="list-group" style="margin-bottom: 0;"></div>
</div>
<div class="modal-footer">
<button type="button" class="btn" data-dismiss="modal">Close</button>
</div>
</div>
</div>
</div>
{{if sidebarEnabled}}
</div>
</div>
{{end}}
{{end}}
{{end}}
{{define "Examples"}}
{{if .}}
<div class="panel-group">
{{range .}}
<div class="panel panel-default" id="example-{{.ID}}">
<div class="panel-heading"><a class="accordion-toggle" data-toggle="collapse" href="#ex-{{.ID}}">Example{{with .Example.Name}} ({{.}}){{end}}</a></div>
<div id="ex-{{.ID}}" class="panel-collapse collapse"><div class="panel-body">
{{with .Example.Doc}}<p>{{.|comment}}{{end}}
<p>Code:{{if .Play}}<span class="pull-right"><a href="?play={{.ID}}">play</a>&nbsp;</span>{{end}}
{{code .Example.Code nil}}
{{with .Example.Output}}<p>Output:<pre>{{.}}</pre>{{end}}
</div></div>
</div>
{{end}}
</div>
{{end}}
{{end}}

View File

@@ -0,0 +1,38 @@
{{define "ROOT"}}{{with .pdoc}}PACKAGE{{if .Name}}
package {{.Name}}
import "{{.ImportPath}}"
{{.Doc|comment}}
{{if .Consts}}
CONSTANTS
{{range .Consts}}{{.Decl.Text}}
{{.Doc|comment}}{{end}}
{{end}}{{if .Vars}}
VARIABLES
{{range .Vars}}{{.Decl.Text}}
{{.Doc|comment}}{{end}}
{{end}}{{if .Funcs}}
FUNCTIONS
{{range .Funcs}}{{.Decl.Text}}
{{.Doc|comment}}
{{end}}{{end}}{{if .Types}}
TYPES
{{range .Types}}{{.Decl.Text}}
{{.Doc|comment}}
{{range .Consts}}{{.Decl.Text}}
{{.Doc|comment}}
{{end}}{{range .Vars}}{{.Decl.Text}}
{{.Doc|comment}}
{{end}}{{range .Funcs}}{{.Decl.Text}}
{{.Doc|comment}}
{{end}}{{range .Methods}}{{.Decl.Text}}
{{.Doc|comment}}
{{end}}{{end}}
{{end}}
{{template "Subdirs" $}}
{{end}}{{end}}{{end}}

View File

@@ -0,0 +1,14 @@
{{define "Head"}}<title>{{.q}} - GoDoc</title><meta name="robots" content="NOINDEX">{{end}}
{{define "Body"}}
<div class="well">
{{template "SearchBox" .q}}
</div>
<p>Try this search on <a href="http://go-search.org/search?q={{.q}}">Go-Search</a>
or <a href="https://github.com/search?q={{.q}}+language:go">GitHub</a>.
{{if .pkgs}}
{{template "SearchPkgs" .pkgs}}
{{else}}
<p>No packages found.
{{end}}
{{end}}

View File

@@ -0,0 +1,2 @@
{{define "ROOT"}}{{range .pkgs}}{{.Path}} {{.Synopsis}}
{{end}}{{end}}

View File

@@ -0,0 +1,8 @@
{{define "Head"}}<title>Standard Packages - GoDoc</title><meta name="robots" content="NOINDEX">{{end}}
{{define "Body"}}
<h1>Go Standard Packages</h1>
{{template "Pkgs" .pkgs}}
<p>View the official documentation at <a href="http://golang.org/pkg/">golang.org</a>.
{{end}}

View File

@@ -0,0 +1,23 @@
{{define "Head"}}<title>Go Sub-Repository Packages - GoDoc</title><meta name="robots" content="NOINDEX">{{end}}
{{define "Body"}}
<h1>Go Sub-repository Packages</h1>
These packages are part of the Go Project but outside the main Go tree. They are developed under looser compatibility requirements than the Go core.
<h2>Repositories</h2>
<ul class="list-unstyled">
{{template "subrepo" map "name" "blog" "desc" "the content and server program for blog.golang.org."}}
{{template "subrepo" map "name" "crypto" "desc" "additional cryptography packages."}}
{{template "subrepo" map "name" "exp" "desc" "experimental code (handle with care)."}}
{{template "subrepo" map "name" "image" "desc" "additional imaging packages."}}
{{template "subrepo" map "name" "mobile" "desc" "libraries and build tools for Go on Android."}}
{{template "subrepo" map "name" "net" "desc" "additional networking packages."}}
{{template "subrepo" map "name" "sys" "desc" "for low-level interactions with the operating system."}}
{{template "subrepo" map "name" "talks" "desc" "the content and server program for talks.golang.org."}}
{{template "subrepo" map "name" "text" "desc" "packages for working with text."}}
{{template "subrepo" map "name" "tools" "desc" "godoc, vet, cover, and other tools."}}
</ul>
<h2>Packages</h2>
{{template "Pkgs" .pkgs}}
{{end}}
{{define "subrepo"}}<li><a href="https://go.googlesource.com/{{.name}}/+/master">golang.org/x/{{.name}}</a> — {{.desc}}{{end}}

View File

@@ -0,0 +1,36 @@
{{define "Head"}}<title>{{.pdoc.PageName}} tools - GoDoc</title><meta name="robots" content="NOINDEX, NOFOLLOW">{{end}}
{{define "Body"}}
{{template "ProjectNav" $}}
<h2>Tools for {{$.pdoc.PageName}}</h2>
<h3>Badge</h3>
<p><a href="{{.uri}}"><img src="{{.uri}}?status.svg" alt="GoDoc"></a>
<p>Use one of the snippets below to add a link to GoDoc from your project
website or README file:</a>
<h5>HTML</h5>
<input type="text" value='<a href="{{.uri}}"><img src="{{.uri}}?status.svg" alt="GoDoc"></a>' class="click-select form-control">
<h5>Markdown</h5>
<input type="text" value="[![GoDoc]({{.uri}}?status.svg)]({{.uri}})" class="click-select form-control">
{{if .pdoc.Name}}
<h3>Lint</h3>
<form name="x-lint" method="POST" action="http://go-lint.appspot.com/-/refresh"><input name="importPath" type="hidden" value="{{.pdoc.ImportPath}}"></form>
<p><a href="javascript:document.getElementsByName('x-lint')[0].submit();">Run lint</a> on {{.pdoc.PageName}}.
{{if and (not .pdoc.IsCmd) (not .pdoc.Doc)}}
<p>The {{.pdoc.Name}} package does not have a package declaration
comment. See the <a
href="http://blog.golang.org/godoc-documenting-go-code">Go
documentation guidelines</a> for information on how to write a package
comment. It's important to write a good summary of the package in the
first sentence of the package comment. GoDoc indexes the first sentence
and displays the first sentence in package lists.
{{end}}
{{end}}
<p>&nbsp;
{{end}}

View File

@@ -0,0 +1,184 @@
/**
* Timeago is a jQuery plugin that makes it easy to support automatically
* updating fuzzy timestamps (e.g. "4 minutes ago" or "about 1 day ago").
*
* @name timeago
* @version 1.1.0
* @requires jQuery v1.2.3+
* @author Ryan McGeary
* @license MIT License - http://www.opensource.org/licenses/mit-license.php
*
* For usage and examples, visit:
* http://timeago.yarp.com/
*
* Copyright (c) 2008-2013, Ryan McGeary (ryan -[at]- mcgeary [*dot*] org)
*/
(function (factory) {
if (typeof define === 'function' && define.amd) {
// AMD. Register as an anonymous module.
define(['jquery'], factory);
} else {
// Browser globals
factory(jQuery);
}
}(function ($) {
$.timeago = function(timestamp) {
if (timestamp instanceof Date) {
return inWords(timestamp);
} else if (typeof timestamp === "string") {
return inWords($.timeago.parse(timestamp));
} else if (typeof timestamp === "number") {
return inWords(new Date(timestamp));
} else {
return inWords($.timeago.datetime(timestamp));
}
};
var $t = $.timeago;
$.extend($.timeago, {
settings: {
refreshMillis: 60000,
allowFuture: false,
localeTitle: false,
strings: {
prefixAgo: null,
prefixFromNow: null,
suffixAgo: "ago",
suffixFromNow: "from now",
seconds: "less than a minute",
minute: "about a minute",
minutes: "%d minutes",
hour: "about an hour",
hours: "about %d hours",
day: "a day",
days: "%d days",
month: "about a month",
months: "%d months",
year: "about a year",
years: "%d years",
wordSeparator: " ",
numbers: []
}
},
inWords: function(distanceMillis) {
var $l = this.settings.strings;
var prefix = $l.prefixAgo;
var suffix = $l.suffixAgo;
if (this.settings.allowFuture) {
if (distanceMillis < 0) {
prefix = $l.prefixFromNow;
suffix = $l.suffixFromNow;
}
}
var seconds = Math.abs(distanceMillis) / 1000;
var minutes = seconds / 60;
var hours = minutes / 60;
var days = hours / 24;
var years = days / 365;
function substitute(stringOrFunction, number) {
var string = $.isFunction(stringOrFunction) ? stringOrFunction(number, distanceMillis) : stringOrFunction;
var value = ($l.numbers && $l.numbers[number]) || number;
return string.replace(/%d/i, value);
}
var words = seconds < 45 && substitute($l.seconds, Math.round(seconds)) ||
seconds < 90 && substitute($l.minute, 1) ||
minutes < 45 && substitute($l.minutes, Math.round(minutes)) ||
minutes < 90 && substitute($l.hour, 1) ||
hours < 24 && substitute($l.hours, Math.round(hours)) ||
hours < 42 && substitute($l.day, 1) ||
days < 30 && substitute($l.days, Math.round(days)) ||
days < 45 && substitute($l.month, 1) ||
days < 365 && substitute($l.months, Math.round(days / 30)) ||
years < 1.5 && substitute($l.year, 1) ||
substitute($l.years, Math.round(years));
var separator = $l.wordSeparator || "";
if ($l.wordSeparator === undefined) { separator = " "; }
return $.trim([prefix, words, suffix].join(separator));
},
parse: function(iso8601) {
var s = $.trim(iso8601);
s = s.replace(/\.\d+/,""); // remove milliseconds
s = s.replace(/-/,"/").replace(/-/,"/");
s = s.replace(/T/," ").replace(/Z/," UTC");
s = s.replace(/([\+\-]\d\d)\:?(\d\d)/," $1$2"); // -04:00 -> -0400
return new Date(s);
},
datetime: function(elem) {
var iso8601 = $t.isTime(elem) ? $(elem).attr("datetime") : $(elem).attr("title");
return $t.parse(iso8601);
},
isTime: function(elem) {
// jQuery's `is()` doesn't play well with HTML5 in IE
return $(elem).get(0).tagName.toLowerCase() === "time"; // $(elem).is("time");
}
});
// functions that can be called via $(el).timeago('action')
// init is default when no action is given
// functions are called with context of a single element
var functions = {
init: function(){
var refresh_el = $.proxy(refresh, this);
refresh_el();
var $s = $t.settings;
if ($s.refreshMillis > 0) {
setInterval(refresh_el, $s.refreshMillis);
}
},
update: function(time){
$(this).data('timeago', { datetime: $t.parse(time) });
refresh.apply(this);
}
};
$.fn.timeago = function(action, options) {
var fn = action ? functions[action] : functions.init;
if(!fn){
throw new Error("Unknown function name '"+ action +"' for timeago");
}
// each over objects here and call the requested function
this.each(function(){
fn.call(this, options);
});
return this;
};
function refresh() {
var data = prepareData(this);
if (!isNaN(data.datetime)) {
$(this).text(inWords(data.datetime));
}
return this;
}
function prepareData(element) {
element = $(element);
if (!element.data("timeago")) {
element.data("timeago", { datetime: $t.datetime(element) });
var text = $.trim(element.text());
if ($t.settings.localeTitle) {
element.attr("title", element.data('timeago').datetime.toLocaleString());
} else if (text.length > 0 && !($t.isTime(element) && element.attr("title"))) {
element.attr("title", text);
}
}
return element.data("timeago");
}
function inWords(date) {
return $t.inWords(distance(date));
}
function distance(date) {
return (new Date().getTime() - date.getTime());
}
// fix for IE6 suckage
document.createElement("abbr");
document.createElement("time");
}));

134
vendor/github.com/golang/gddo/gddo-server/background.go generated vendored Normal file
View File

@@ -0,0 +1,134 @@
// Copyright 2017 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
package main
import (
"log"
"time"
"github.com/spf13/viper"
"google.golang.org/appengine"
"github.com/golang/gddo/database"
"github.com/golang/gddo/gosrc"
)
type BackgroundTask struct {
name string
fn func() error
interval time.Duration
next time.Time
}
func runBackgroundTasks() {
defer log.Println("ERROR: Background exiting!")
var backgroundTasks = []BackgroundTask{
{
name: "GitHub updates",
fn: readGitHubUpdates,
interval: viper.GetDuration(ConfigGithubInterval),
},
{
name: "Crawl",
fn: doCrawl,
interval: viper.GetDuration(ConfigCrawlInterval),
},
}
sleep := time.Minute
for _, task := range backgroundTasks {
if task.interval > 0 && sleep > task.interval {
sleep = task.interval
}
}
for {
for _, task := range backgroundTasks {
start := time.Now()
if task.interval > 0 && start.After(task.next) {
if err := task.fn(); err != nil {
log.Printf("Task %s: %v", task.name, err)
}
task.next = time.Now().Add(task.interval)
}
}
time.Sleep(sleep)
}
}
func doCrawl() error {
// Look for new package to crawl.
importPath, hasSubdirs, err := db.PopNewCrawl()
if err != nil {
log.Printf("db.PopNewCrawl() returned error %v", err)
return nil
}
if importPath != "" {
if pdoc, err := crawlDoc("new", importPath, nil, hasSubdirs, time.Time{}); pdoc == nil && err == nil {
if err := db.AddBadCrawl(importPath); err != nil {
log.Printf("ERROR db.AddBadCrawl(%q): %v", importPath, err)
}
}
return nil
}
// Crawl existing doc.
pdoc, pkgs, nextCrawl, err := db.Get("-")
if err != nil {
log.Printf("db.Get(\"-\") returned error %v", err)
return nil
}
if pdoc == nil || nextCrawl.After(time.Now()) {
return nil
}
if _, err = crawlDoc("crawl", pdoc.ImportPath, pdoc, len(pkgs) > 0, nextCrawl); err != nil {
// Touch package so that crawl advances to next package.
if err := db.SetNextCrawl(pdoc.ImportPath, time.Now().Add(viper.GetDuration(ConfigMaxAge)/3)); err != nil {
log.Printf("ERROR db.SetNextCrawl(%q): %v", pdoc.ImportPath, err)
}
}
return nil
}
func readGitHubUpdates() error {
const key = "gitHubUpdates"
var last string
if err := db.GetGob(key, &last); err != nil {
return err
}
last, names, err := gosrc.GetGitHubUpdates(httpClient, last)
if err != nil {
return err
}
for _, name := range names {
log.Printf("bump crawl github.com/%s", name)
if err := db.BumpCrawl("github.com/" + name); err != nil {
log.Println("ERROR force crawl:", err)
}
}
if err := db.PutGob(key, last); err != nil {
return err
}
return nil
}
func reindex() {
c := appengine.BackgroundContext()
if err := db.Reindex(c); err != nil {
log.Println("reindex:", err)
}
}
func purgeIndex() {
c := appengine.BackgroundContext()
if err := database.PurgeIndex(c); err != nil {
log.Println("purgeIndex:", err)
}
}

97
vendor/github.com/golang/gddo/gddo-server/browse.go generated vendored Normal file
View File

@@ -0,0 +1,97 @@
// Copyright 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
package main
import (
"net/url"
"path"
"regexp"
"strings"
)
func importPathFromGoogleBrowse(m []string) string {
project := m[1]
dir := m[2]
if dir == "" {
dir = "/"
} else if dir[len(dir)-1] == '/' {
dir = dir[:len(dir)-1]
}
subrepo := ""
if len(m[3]) > 0 {
v, _ := url.ParseQuery(m[3][1:])
subrepo = v.Get("repo")
if len(subrepo) > 0 {
subrepo = "." + subrepo
}
}
if strings.HasPrefix(m[4], "#hg%2F") {
d, _ := url.QueryUnescape(m[4][len("#hg%2f"):])
if i := strings.IndexRune(d, '%'); i >= 0 {
d = d[:i]
}
dir = dir + "/" + d
}
return "code.google.com/p/" + project + subrepo + dir
}
var browsePatterns = []struct {
pat *regexp.Regexp
fn func([]string) string
}{
{
// GitHub tree browser.
regexp.MustCompile(`^https?://(github\.com/[^/]+/[^/]+)(?:/tree/[^/]+(/.*))?$`),
func(m []string) string { return m[1] + m[2] },
},
{
// GitHub file browser.
regexp.MustCompile(`^https?://(github\.com/[^/]+/[^/]+)/blob/[^/]+/(.*)$`),
func(m []string) string {
d := path.Dir(m[2])
if d == "." {
return m[1]
}
return m[1] + "/" + d
},
},
{
// GitHub issues, pulls, etc.
regexp.MustCompile(`^https?://(github\.com/[^/]+/[^/]+)(.*)$`),
func(m []string) string { return m[1] },
},
{
// Bitbucket source borwser.
regexp.MustCompile(`^https?://(bitbucket\.org/[^/]+/[^/]+)(?:/src/[^/]+(/[^?]+)?)?`),
func(m []string) string { return m[1] + m[2] },
},
{
// Google Project Hosting source browser.
regexp.MustCompile(`^http:/+code\.google\.com/p/([^/]+)/source/browse(/[^?#]*)?(\?[^#]*)?(#.*)?$`),
importPathFromGoogleBrowse,
},
{
// Launchpad source browser.
regexp.MustCompile(`^https?:/+bazaar\.(launchpad\.net/.*)/files$`),
func(m []string) string { return m[1] },
},
{
regexp.MustCompile(`^https?://(.+)$`),
func(m []string) string { return strings.Trim(m[1], "/") },
},
}
// isBrowserURL returns importPath and true if URL looks like a URL for a VCS
// source browser.
func isBrowseURL(s string) (importPath string, ok bool) {
for _, c := range browsePatterns {
if m := c.pat.FindStringSubmatch(s); m != nil {
return c.fn(m), true
}
}
return "", false
}

View File

@@ -0,0 +1,40 @@
// Copyright 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
package main
import (
"testing"
)
var isBrowseURLTests = []struct {
s string
importPath string
ok bool
}{
{"https://github.com/garyburd/gddo/blob/master/doc/code.go", "github.com/garyburd/gddo/doc", true},
{"https://github.com/garyburd/go-oauth/blob/master/.gitignore", "github.com/garyburd/go-oauth", true},
{"https://github.com/garyburd/gddo/issues/154", "github.com/garyburd/gddo", true},
{"https://bitbucket.org/user/repo/src/bd0b661a263e/p1/p2?at=default", "bitbucket.org/user/repo/p1/p2", true},
{"https://bitbucket.org/user/repo/src", "bitbucket.org/user/repo", true},
{"https://bitbucket.org/user/repo", "bitbucket.org/user/repo", true},
{"https://github.com/user/repo", "github.com/user/repo", true},
{"https://github.com/user/repo/tree/master/p1", "github.com/user/repo/p1", true},
{"http://code.google.com/p/project", "code.google.com/p/project", true},
}
func TestIsBrowseURL(t *testing.T) {
for _, tt := range isBrowseURLTests {
importPath, ok := isBrowseURL(tt.s)
if tt.ok {
if importPath != tt.importPath || ok != true {
t.Errorf("IsBrowseURL(%q) = %q, %v; want %q %v", tt.s, importPath, ok, tt.importPath, true)
}
} else if ok {
t.Errorf("IsBrowseURL(%q) = %q, %v; want _, false", tt.s, importPath, ok)
}
}
}

60
vendor/github.com/golang/gddo/gddo-server/client.go generated vendored Normal file
View File

@@ -0,0 +1,60 @@
// Copyright 2017 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
// This file implements an http.Client with request timeouts set by command
// line flags.
package main
import (
"fmt"
"net"
"net/http"
"os"
"github.com/gregjones/httpcache"
"github.com/gregjones/httpcache/memcache"
"github.com/spf13/viper"
"github.com/golang/gddo/httputil"
)
func newHTTPClient() *http.Client {
t := newCacheTransport()
requestTimeout := viper.GetDuration(ConfigRequestTimeout)
t.Transport = &http.Transport{
Proxy: http.ProxyFromEnvironment,
Dial: (&net.Dialer{
Timeout: viper.GetDuration(ConfigDialTimeout),
KeepAlive: requestTimeout / 2,
}).Dial,
ResponseHeaderTimeout: requestTimeout / 2,
TLSHandshakeTimeout: requestTimeout / 2,
}
return &http.Client{
// Wrap the cached transport with GitHub authentication.
Transport: httputil.NewAuthTransport(t),
Timeout: requestTimeout,
}
}
func newCacheTransport() *httpcache.Transport {
// host and port are set by GAE Flex runtime, can be left blank locally.
host := os.Getenv("MEMCACHE_PORT_11211_TCP_ADDR")
if host == "" {
host = "localhost"
}
port := os.Getenv("MEMCACHE_PORT_11211_TCP_PORT")
if port == "" {
port = "11211"
}
addr := fmt.Sprintf("%s:%s", host, port)
return httpcache.NewTransport(
memcache.New(addr),
)
}

127
vendor/github.com/golang/gddo/gddo-server/config.go generated vendored Normal file
View File

@@ -0,0 +1,127 @@
package main
import (
"context"
"os"
"path/filepath"
"strings"
"time"
"github.com/golang/gddo/database"
"github.com/golang/gddo/log"
"github.com/spf13/pflag"
"github.com/spf13/viper"
)
const (
gaeProjectEnvVar = "GAE_LONG_APP_ID"
)
const (
ConfigMaxAge = "max_age"
ConfigGetTimeout = "get_timeout"
ConfigRobotThreshold = "robot"
ConfigAssetsDir = "assets"
ConfigFirstGetTimeout = "first_get_timeout"
ConfigBindAddress = "http"
ConfigProject = "project"
ConfigTrustProxyHeaders = "trust_proxy_headers"
ConfigSidebar = "sidebar"
ConfigDefaultGOOS = "default_goos"
ConfigSourcegraphURL = "sourcegraph_url"
ConfigGithubInterval = "github_interval"
ConfigCrawlInterval = "crawl_interval"
ConfigDialTimeout = "dial_timeout"
ConfigRequestTimeout = "request_timeout"
)
// Initialize configuration
func init() {
ctx := context.Background()
// Automatically detect if we are on App Engine.
if os.Getenv(gaeProjectEnvVar) != "" {
viper.Set("on_appengine", true)
} else {
viper.Set("on_appengine", false)
}
// Setup command line flags
flags := buildFlags()
flags.Parse(os.Args)
if err := viper.BindPFlags(flags); err != nil {
panic(err)
}
// Also fetch from enviorment
viper.SetEnvPrefix("gddo")
viper.SetEnvKeyReplacer(strings.NewReplacer("-", "_"))
viper.AutomaticEnv()
// Automatically get project ID from env on Google App Engine
viper.BindEnv(ConfigProject, gaeProjectEnvVar)
// Read from config.
readViperConfig(ctx)
log.Info(ctx, "config values loaded", "values", viper.AllSettings())
}
func buildFlags() *pflag.FlagSet {
flags := pflag.NewFlagSet("default", pflag.ExitOnError)
flags.StringP("config", "c", "", "path to motd config file")
flags.String("project", "", "Google Cloud Platform project used for Google services")
// TODO(stephenmw): flags.Bool("enable-admin-pages", false, "When true, enables /admin pages")
flags.Float64(ConfigRobotThreshold, 100, "Request counter threshold for robots.")
flags.String(ConfigAssetsDir, filepath.Join(defaultBase("github.com/golang/gddo/gddo-server"), "assets"), "Base directory for templates and static files.")
flags.Duration(ConfigGetTimeout, 8*time.Second, "Time to wait for package update from the VCS.")
flags.Duration(ConfigFirstGetTimeout, 5*time.Second, "Time to wait for first fetch of package from the VCS.")
flags.Duration(ConfigMaxAge, 24*time.Hour, "Update package documents older than this age.")
flags.String(ConfigBindAddress, ":8080", "Listen for HTTP connections on this address.")
flags.Bool(ConfigSidebar, false, "Enable package page sidebar.")
flags.String(ConfigDefaultGOOS, "", "Default GOOS to use when building package documents.")
flags.Bool(ConfigTrustProxyHeaders, false, "If enabled, identify the remote address of the request using X-Real-Ip in header.")
flags.String(ConfigSourcegraphURL, "https://sourcegraph.com", "Link to global uses on Sourcegraph based at this URL (no need for trailing slash).")
flags.Duration(ConfigGithubInterval, 0, "Github updates crawler sleeps for this duration between fetches. Zero disables the crawler.")
flags.Duration(ConfigCrawlInterval, 0, "Package updater sleeps for this duration between package updates. Zero disables updates.")
flags.Duration(ConfigDialTimeout, 5*time.Second, "Timeout for dialing an HTTP connection.")
flags.Duration(ConfigRequestTimeout, 20*time.Second, "Time out for roundtripping an HTTP request.")
// TODO(stephenmw): pass these variables at database creation time.
flags.StringVar(&database.RedisServer, "db-server", database.RedisServer, "URI of Redis server.")
flags.DurationVar(&database.RedisIdleTimeout, "db-idle-timeout", database.RedisIdleTimeout, "Close Redis connections after remaining idle for this duration.")
flags.BoolVar(&database.RedisLog, "db-log", database.RedisLog, "Log database commands")
return flags
}
// readViperConfig finds and then parses a config file. It will log.Fatal if the
// config file was specified or could not parse. Otherwise it will only warn
// that it failed to load the config.
func readViperConfig(ctx context.Context) {
viper.AddConfigPath(".")
viper.AddConfigPath("/etc")
viper.SetConfigName("gddo")
if viper.GetString("config") != "" {
viper.SetConfigFile(viper.GetString("config"))
}
if err := viper.ReadInConfig(); err != nil {
// If a config exists but could not be parsed, we should bail.
if _, ok := err.(viper.ConfigParseError); ok {
log.Fatal(ctx, "failed to parse config", "error", err)
}
// If the user specified a config file location in flags or env and
// we failed to load it, we should bail. If not, it is just a warning.
if viper.GetString("config") != "" {
log.Fatal(ctx, "failed to load configuration file", "error", err)
} else {
log.Warn(ctx, "failed to load configuration file", "error", err)
}
} else {
log.Info(ctx, "loaded configuration file successfully", "path", viper.ConfigFileUsed())
}
}

144
vendor/github.com/golang/gddo/gddo-server/crawl.go generated vendored Normal file
View File

@@ -0,0 +1,144 @@
// Copyright 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
package main
import (
"fmt"
"log"
"regexp"
"strings"
"time"
"github.com/spf13/viper"
"github.com/golang/gddo/doc"
"github.com/golang/gddo/gosrc"
)
var testdataPat = regexp.MustCompile(`/testdata(?:/|$)`)
// crawlDoc fetches the package documentation from the VCS and updates the database.
func crawlDoc(source string, importPath string, pdoc *doc.Package, hasSubdirs bool, nextCrawl time.Time) (*doc.Package, error) {
message := []interface{}{source}
defer func() {
message = append(message, importPath)
log.Println(message...)
}()
if !nextCrawl.IsZero() {
d := time.Since(nextCrawl) / time.Hour
if d > 0 {
message = append(message, "late:", int64(d))
}
}
etag := ""
if pdoc != nil {
etag = pdoc.Etag
message = append(message, "etag:", etag)
}
start := time.Now()
var err error
if strings.HasPrefix(importPath, "code.google.com/p/go.") {
// Old import path for Go sub-repository.
pdoc = nil
err = gosrc.NotFoundError{Message: "old Go sub-repo", Redirect: "golang.org/x/" + importPath[len("code.google.com/p/go."):]}
} else if blocked, e := db.IsBlocked(importPath); blocked && e == nil {
pdoc = nil
err = gosrc.NotFoundError{Message: "blocked."}
} else if testdataPat.MatchString(importPath) {
pdoc = nil
err = gosrc.NotFoundError{Message: "testdata."}
} else {
var pdocNew *doc.Package
pdocNew, err = doc.Get(httpClient, importPath, etag)
message = append(message, "fetch:", int64(time.Since(start)/time.Millisecond))
if err == nil && pdocNew.Name == "" && !hasSubdirs {
for _, e := range pdocNew.Errors {
message = append(message, "err:", e)
}
pdoc = nil
err = gosrc.NotFoundError{Message: "no Go files or subdirs"}
} else if _, ok := err.(gosrc.NotModifiedError); !ok {
pdoc = pdocNew
}
}
maxAge := viper.GetDuration(ConfigMaxAge)
nextCrawl = start.Add(maxAge)
switch {
case strings.HasPrefix(importPath, "github.com/") || (pdoc != nil && len(pdoc.Errors) > 0):
nextCrawl = start.Add(maxAge * 7)
case strings.HasPrefix(importPath, "gist.github.com/"):
// Don't spend time on gists. It's silly thing to do.
nextCrawl = start.Add(maxAge * 30)
}
if err == nil {
message = append(message, "put:", pdoc.Etag)
if err := put(pdoc, nextCrawl); err != nil {
log.Println(err)
}
return pdoc, nil
} else if e, ok := err.(gosrc.NotModifiedError); ok {
if pdoc.Status == gosrc.Active && !isActivePkg(importPath, e.Status) {
if e.Status == gosrc.NoRecentCommits {
e.Status = gosrc.Inactive
}
message = append(message, "archive", e)
pdoc.Status = e.Status
if err := db.Put(pdoc, nextCrawl, false); err != nil {
log.Printf("ERROR db.Put(%q): %v", importPath, err)
}
} else {
// Touch the package without updating and move on to next one.
message = append(message, "touch")
if err := db.SetNextCrawl(importPath, nextCrawl); err != nil {
log.Printf("ERROR db.SetNextCrawl(%q): %v", importPath, err)
}
}
return pdoc, nil
} else if e, ok := err.(gosrc.NotFoundError); ok {
message = append(message, "notfound:", e)
if err := db.Delete(importPath); err != nil {
log.Printf("ERROR db.Delete(%q): %v", importPath, err)
}
return nil, e
} else {
message = append(message, "ERROR:", err)
return nil, err
}
}
func put(pdoc *doc.Package, nextCrawl time.Time) error {
if pdoc.Status == gosrc.NoRecentCommits &&
isActivePkg(pdoc.ImportPath, gosrc.NoRecentCommits) {
pdoc.Status = gosrc.Active
}
if err := db.Put(pdoc, nextCrawl, false); err != nil {
return fmt.Errorf("ERROR db.Put(%q): %v", pdoc.ImportPath, err)
}
return nil
}
// isActivePkg reports whether a package is considered active,
// either because its directory is active or because it is imported by another package.
func isActivePkg(pkg string, status gosrc.DirectoryStatus) bool {
switch status {
case gosrc.Active:
return true
case gosrc.NoRecentCommits:
// It should be inactive only if it has no imports as well.
n, err := db.ImporterCount(pkg)
if err != nil {
log.Printf("ERROR db.ImporterCount(%q): %v", pkg, err)
}
return n > 0
}
return false
}

48
vendor/github.com/golang/gddo/gddo-server/graph.go generated vendored Normal file
View File

@@ -0,0 +1,48 @@
// Copyright 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
package main
import (
"bytes"
"errors"
"fmt"
"os/exec"
"strings"
"github.com/golang/gddo/database"
"github.com/golang/gddo/doc"
)
func renderGraph(pdoc *doc.Package, pkgs []database.Package, edges [][2]int) ([]byte, error) {
var in, out bytes.Buffer
fmt.Fprintf(&in, "digraph %s { \n", pdoc.Name)
for i, pkg := range pkgs {
fmt.Fprintf(&in, " n%d [label=\"%s\", URL=\"/%s\", tooltip=\"%s\"];\n",
i, pkg.Path, pkg.Path,
strings.Replace(pkg.Synopsis, `"`, `\"`, -1))
}
for _, edge := range edges {
fmt.Fprintf(&in, " n%d -> n%d;\n", edge[0], edge[1])
}
in.WriteString("}")
cmd := exec.Command("dot", "-Tsvg")
cmd.Stdin = &in
cmd.Stdout = &out
if err := cmd.Run(); err != nil {
return nil, err
}
p := out.Bytes()
i := bytes.Index(p, []byte("<svg"))
if i < 0 {
return nil, errors.New("<svg not found")
}
p = p[i:]
return p, nil
}

77
vendor/github.com/golang/gddo/gddo-server/logging.go generated vendored Normal file
View File

@@ -0,0 +1,77 @@
// Copyright 2016 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
package main
import (
"crypto/rand"
"encoding/hex"
"log"
"net/http"
"time"
"cloud.google.com/go/logging"
"github.com/golang/gddo/database"
)
// newGCELogger returns a handler that wraps h but logs each request
// using Google Cloud Logging service.
func newGCELogger(cli *logging.Logger) *GCELogger {
return &GCELogger{cli}
}
type GCELogger struct {
cli *logging.Logger
}
// LogEvent creates an entry in Cloud Logging to record user's behavior. We should only
// use this to log events we are interested in. General request logs are handled by GAE
// automatically in request_log and stderr.
func (g *GCELogger) LogEvent(w http.ResponseWriter, r *http.Request, content interface{}) {
const sessionCookieName = "GODOC_ORG_SESSION_ID"
cookie, err := r.Cookie(sessionCookieName)
if err != nil {
// Generates a random session id and sends it in response.
rs, err := randomString()
if err != nil {
log.Println("error generating a random session id: ", err)
return
}
// This cookie is intentionally short-lived and contains no information
// that might identify the user. Its sole purpose is to tie query
// terms and destination pages together to measure search quality.
cookie = &http.Cookie{
Name: sessionCookieName,
Value: rs,
Expires: time.Now().Add(time.Hour),
}
http.SetCookie(w, cookie)
}
// We must not record the client's IP address, or any other information
// that might compromise the user's privacy.
payload := map[string]interface{}{
sessionCookieName: cookie.Value,
"path": r.URL.RequestURI(),
"method": r.Method,
"referer": r.Referer(),
}
if pkgs, ok := content.([]database.Package); ok {
payload["packages"] = pkgs
}
// Log queues the entry to its internal buffer, or discarding the entry
// if the buffer was full.
g.cli.Log(logging.Entry{
Payload: payload,
})
}
func randomString() (string, error) {
b := make([]byte, 8)
_, err := rand.Read(b)
return hex.EncodeToString(b), err
}

997
vendor/github.com/golang/gddo/gddo-server/main.go generated vendored Normal file
View File

@@ -0,0 +1,997 @@
// Copyright 2017 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
// Command gddo-server is the GoPkgDoc server.
package main
import (
"bytes"
"crypto/md5"
"encoding/json"
"errors"
"fmt"
"go/build"
"html/template"
"io"
"log"
"net/http"
"os"
"path"
"regexp"
"runtime/debug"
"sort"
"strconv"
"strings"
"time"
"cloud.google.com/go/compute/metadata"
"cloud.google.com/go/logging"
"github.com/spf13/viper"
"golang.org/x/net/context"
"google.golang.org/appengine"
"github.com/golang/gddo/database"
"github.com/golang/gddo/doc"
"github.com/golang/gddo/gosrc"
"github.com/golang/gddo/httputil"
)
const (
jsonMIMEType = "application/json; charset=utf-8"
textMIMEType = "text/plain; charset=utf-8"
htmlMIMEType = "text/html; charset=utf-8"
)
var errUpdateTimeout = errors.New("refresh timeout")
type httpError struct {
status int // HTTP status code.
err error // Optional reason for the HTTP error.
}
func (err *httpError) Error() string {
if err.err != nil {
return fmt.Sprintf("status %d, reason %s", err.status, err.err.Error())
}
return fmt.Sprintf("Status %d", err.status)
}
const (
humanRequest = iota
robotRequest
queryRequest
refreshRequest
apiRequest
)
type crawlResult struct {
pdoc *doc.Package
err error
}
// getDoc gets the package documentation from the database or from the version
// control system as needed.
func getDoc(path string, requestType int) (*doc.Package, []database.Package, error) {
if path == "-" {
// A hack in the database package uses the path "-" to represent the
// next document to crawl. Block "-" here so that requests to /- always
// return not found.
return nil, nil, &httpError{status: http.StatusNotFound}
}
pdoc, pkgs, nextCrawl, err := db.Get(path)
if err != nil {
return nil, nil, err
}
needsCrawl := false
switch requestType {
case queryRequest, apiRequest:
needsCrawl = nextCrawl.IsZero() && len(pkgs) == 0
case humanRequest:
needsCrawl = nextCrawl.Before(time.Now())
case robotRequest:
needsCrawl = nextCrawl.IsZero() && len(pkgs) > 0
}
if !needsCrawl {
return pdoc, pkgs, nil
}
c := make(chan crawlResult, 1)
go func() {
pdoc, err := crawlDoc("web ", path, pdoc, len(pkgs) > 0, nextCrawl)
c <- crawlResult{pdoc, err}
}()
timeout := viper.GetDuration(ConfigGetTimeout)
if pdoc == nil {
timeout = viper.GetDuration(ConfigFirstGetTimeout)
}
select {
case cr := <-c:
err = cr.err
if err == nil {
pdoc = cr.pdoc
}
case <-time.After(timeout):
err = errUpdateTimeout
}
switch {
case err == nil:
return pdoc, pkgs, nil
case gosrc.IsNotFound(err):
return nil, nil, err
case pdoc != nil:
log.Printf("Serving %q from database after error getting doc: %v", path, err)
return pdoc, pkgs, nil
case err == errUpdateTimeout:
log.Printf("Serving %q as not found after timeout getting doc", path)
return nil, nil, &httpError{status: http.StatusNotFound}
default:
return nil, nil, err
}
}
func templateExt(req *http.Request) string {
if httputil.NegotiateContentType(req, []string{"text/html", "text/plain"}, "text/html") == "text/plain" {
return ".txt"
}
return ".html"
}
var (
robotPat = regexp.MustCompile(`(:?\+https?://)|(?:\Wbot\W)|(?:^Python-urllib)|(?:^Go )|(?:^Java/)`)
)
func isRobot(req *http.Request) bool {
if robotPat.MatchString(req.Header.Get("User-Agent")) {
return true
}
host := httputil.StripPort(req.RemoteAddr)
n, err := db.IncrementCounter(host, 1)
if err != nil {
log.Printf("error incrementing counter for %s, %v", host, err)
return false
}
if n > viper.GetFloat64(ConfigRobotThreshold) {
log.Printf("robot %.2f %s %s", n, host, req.Header.Get("User-Agent"))
return true
}
return false
}
func popularLinkReferral(req *http.Request) bool {
return strings.HasSuffix(req.Header.Get("Referer"), "//"+req.Host+"/")
}
func isView(req *http.Request, key string) bool {
rq := req.URL.RawQuery
return strings.HasPrefix(rq, key) &&
(len(rq) == len(key) || rq[len(key)] == '=' || rq[len(key)] == '&')
}
// httpEtag returns the package entity tag used in HTTP transactions.
func httpEtag(pdoc *doc.Package, pkgs []database.Package, importerCount int, flashMessages []flashMessage) string {
b := make([]byte, 0, 128)
b = strconv.AppendInt(b, pdoc.Updated.Unix(), 16)
b = append(b, 0)
b = append(b, pdoc.Etag...)
if importerCount >= 8 {
importerCount = 8
}
b = append(b, 0)
b = strconv.AppendInt(b, int64(importerCount), 16)
for _, pkg := range pkgs {
b = append(b, 0)
b = append(b, pkg.Path...)
b = append(b, 0)
b = append(b, pkg.Synopsis...)
}
if viper.GetBool(ConfigSidebar) {
b = append(b, "\000xsb"...)
}
for _, m := range flashMessages {
b = append(b, 0)
b = append(b, m.ID...)
for _, a := range m.Args {
b = append(b, 1)
b = append(b, a...)
}
}
h := md5.New()
h.Write(b)
b = h.Sum(b[:0])
return fmt.Sprintf("\"%x\"", b)
}
func servePackage(resp http.ResponseWriter, req *http.Request) error {
p := path.Clean(req.URL.Path)
if strings.HasPrefix(p, "/pkg/") {
p = p[len("/pkg"):]
}
if p != req.URL.Path {
http.Redirect(resp, req, p, http.StatusMovedPermanently)
return nil
}
if isView(req, "status.svg") {
statusImageHandlerSVG.ServeHTTP(resp, req)
return nil
}
if isView(req, "status.png") {
statusImageHandlerPNG.ServeHTTP(resp, req)
return nil
}
requestType := humanRequest
if isRobot(req) {
requestType = robotRequest
}
importPath := strings.TrimPrefix(req.URL.Path, "/")
pdoc, pkgs, err := getDoc(importPath, requestType)
if e, ok := err.(gosrc.NotFoundError); ok && e.Redirect != "" {
// To prevent dumb clients from following redirect loops, respond with
// status 404 if the target document is not found.
if _, _, err := getDoc(e.Redirect, requestType); gosrc.IsNotFound(err) {
return &httpError{status: http.StatusNotFound}
}
u := "/" + e.Redirect
if req.URL.RawQuery != "" {
u += "?" + req.URL.RawQuery
}
setFlashMessages(resp, []flashMessage{{ID: "redir", Args: []string{importPath}}})
http.Redirect(resp, req, u, http.StatusFound)
return nil
}
if err != nil {
return err
}
flashMessages := getFlashMessages(resp, req)
if pdoc == nil {
if len(pkgs) == 0 {
return &httpError{status: http.StatusNotFound}
}
pdocChild, _, _, err := db.Get(pkgs[0].Path)
if err != nil {
return err
}
pdoc = &doc.Package{
ProjectName: pdocChild.ProjectName,
ProjectRoot: pdocChild.ProjectRoot,
ProjectURL: pdocChild.ProjectURL,
ImportPath: importPath,
}
}
switch {
case len(req.Form) == 0:
importerCount := 0
if pdoc.Name != "" {
importerCount, err = db.ImporterCount(importPath)
if err != nil {
return err
}
}
etag := httpEtag(pdoc, pkgs, importerCount, flashMessages)
status := http.StatusOK
if req.Header.Get("If-None-Match") == etag {
status = http.StatusNotModified
}
if requestType == humanRequest &&
pdoc.Name != "" && // not a directory
pdoc.ProjectRoot != "" && // not a standard package
!pdoc.IsCmd &&
len(pdoc.Errors) == 0 &&
!popularLinkReferral(req) {
if err := db.IncrementPopularScore(pdoc.ImportPath); err != nil {
log.Printf("ERROR db.IncrementPopularScore(%s): %v", pdoc.ImportPath, err)
}
}
if gceLogger != nil {
gceLogger.LogEvent(resp, req, nil)
}
template := "dir"
switch {
case pdoc.IsCmd:
template = "cmd"
case pdoc.Name != "":
template = "pkg"
}
template += templateExt(req)
return executeTemplate(resp, template, status, http.Header{"Etag": {etag}}, map[string]interface{}{
"flashMessages": flashMessages,
"pkgs": pkgs,
"pdoc": newTDoc(pdoc),
"importerCount": importerCount,
})
case isView(req, "imports"):
if pdoc.Name == "" {
break
}
pkgs, err = db.Packages(pdoc.Imports)
if err != nil {
return err
}
return executeTemplate(resp, "imports.html", http.StatusOK, nil, map[string]interface{}{
"flashMessages": flashMessages,
"pkgs": pkgs,
"pdoc": newTDoc(pdoc),
})
case isView(req, "tools"):
proto := "http"
if req.Host == "godoc.org" {
proto = "https"
}
return executeTemplate(resp, "tools.html", http.StatusOK, nil, map[string]interface{}{
"flashMessages": flashMessages,
"uri": fmt.Sprintf("%s://%s/%s", proto, req.Host, importPath),
"pdoc": newTDoc(pdoc),
})
case isView(req, "importers"):
if pdoc.Name == "" {
break
}
pkgs, err = db.Importers(importPath)
if err != nil {
return err
}
template := "importers.html"
if requestType == robotRequest {
// Hide back links from robots.
template = "importers_robot.html"
}
return executeTemplate(resp, template, http.StatusOK, nil, map[string]interface{}{
"flashMessages": flashMessages,
"pkgs": pkgs,
"pdoc": newTDoc(pdoc),
})
case isView(req, "import-graph"):
if requestType == robotRequest {
return &httpError{status: http.StatusForbidden}
}
if pdoc.Name == "" {
break
}
hide := database.ShowAllDeps
switch req.Form.Get("hide") {
case "1":
hide = database.HideStandardDeps
case "2":
hide = database.HideStandardAll
}
pkgs, edges, err := db.ImportGraph(pdoc, hide)
if err != nil {
return err
}
b, err := renderGraph(pdoc, pkgs, edges)
if err != nil {
return err
}
return executeTemplate(resp, "graph.html", http.StatusOK, nil, map[string]interface{}{
"flashMessages": flashMessages,
"svg": template.HTML(b),
"pdoc": newTDoc(pdoc),
"hide": hide,
})
case isView(req, "play"):
u, err := playURL(pdoc, req.Form.Get("play"), req.Header.Get("X-AppEngine-Country"))
if err != nil {
return err
}
http.Redirect(resp, req, u, http.StatusMovedPermanently)
return nil
case req.Form.Get("view") != "":
// Redirect deprecated view= queries.
var q string
switch view := req.Form.Get("view"); view {
case "imports", "importers":
q = view
case "import-graph":
if req.Form.Get("hide") == "1" {
q = "import-graph&hide=1"
} else {
q = "import-graph"
}
}
if q != "" {
u := *req.URL
u.RawQuery = q
http.Redirect(resp, req, u.String(), http.StatusMovedPermanently)
return nil
}
}
return &httpError{status: http.StatusNotFound}
}
func serveRefresh(resp http.ResponseWriter, req *http.Request) error {
importPath := req.Form.Get("path")
_, pkgs, _, err := db.Get(importPath)
if err != nil {
return err
}
c := make(chan error, 1)
go func() {
_, err := crawlDoc("rfrsh", importPath, nil, len(pkgs) > 0, time.Time{})
c <- err
}()
select {
case err = <-c:
case <-time.After(viper.GetDuration(ConfigGetTimeout)):
err = errUpdateTimeout
}
if e, ok := err.(gosrc.NotFoundError); ok && e.Redirect != "" {
setFlashMessages(resp, []flashMessage{{ID: "redir", Args: []string{importPath}}})
importPath = e.Redirect
err = nil
} else if err != nil {
setFlashMessages(resp, []flashMessage{{ID: "refresh", Args: []string{errorText(err)}}})
}
http.Redirect(resp, req, "/"+importPath, http.StatusFound)
return nil
}
func serveGoIndex(resp http.ResponseWriter, req *http.Request) error {
pkgs, err := db.GoIndex()
if err != nil {
return err
}
return executeTemplate(resp, "std.html", http.StatusOK, nil, map[string]interface{}{
"pkgs": pkgs,
})
}
func serveGoSubrepoIndex(resp http.ResponseWriter, req *http.Request) error {
pkgs, err := db.GoSubrepoIndex()
if err != nil {
return err
}
return executeTemplate(resp, "subrepo.html", http.StatusOK, nil, map[string]interface{}{
"pkgs": pkgs,
})
}
func runReindex(resp http.ResponseWriter, req *http.Request) {
fmt.Fprintln(resp, "Reindexing...")
go reindex()
}
func runPurgeIndex(resp http.ResponseWriter, req *http.Request) {
fmt.Fprintln(resp, "Purging the search index...")
go purgeIndex()
}
type byPath struct {
pkgs []database.Package
rank []int
}
func (bp *byPath) Len() int { return len(bp.pkgs) }
func (bp *byPath) Less(i, j int) bool { return bp.pkgs[i].Path < bp.pkgs[j].Path }
func (bp *byPath) Swap(i, j int) {
bp.pkgs[i], bp.pkgs[j] = bp.pkgs[j], bp.pkgs[i]
bp.rank[i], bp.rank[j] = bp.rank[j], bp.rank[i]
}
type byRank struct {
pkgs []database.Package
rank []int
}
func (br *byRank) Len() int { return len(br.pkgs) }
func (br *byRank) Less(i, j int) bool { return br.rank[i] < br.rank[j] }
func (br *byRank) Swap(i, j int) {
br.pkgs[i], br.pkgs[j] = br.pkgs[j], br.pkgs[i]
br.rank[i], br.rank[j] = br.rank[j], br.rank[i]
}
func popular() ([]database.Package, error) {
const n = 25
pkgs, err := db.Popular(2 * n)
if err != nil {
return nil, err
}
rank := make([]int, len(pkgs))
for i := range pkgs {
rank[i] = i
}
sort.Sort(&byPath{pkgs, rank})
j := 0
prev := "."
for i, pkg := range pkgs {
if strings.HasPrefix(pkg.Path, prev) {
if rank[j-1] < rank[i] {
rank[j-1] = rank[i]
}
continue
}
prev = pkg.Path + "/"
pkgs[j] = pkg
rank[j] = rank[i]
j++
}
pkgs = pkgs[:j]
sort.Sort(&byRank{pkgs, rank})
if len(pkgs) > n {
pkgs = pkgs[:n]
}
sort.Sort(&byPath{pkgs, rank})
return pkgs, nil
}
func serveHome(resp http.ResponseWriter, req *http.Request) error {
if req.URL.Path != "/" {
return servePackage(resp, req)
}
q := strings.TrimSpace(req.Form.Get("q"))
if q == "" {
pkgs, err := popular()
if err != nil {
return err
}
return executeTemplate(resp, "home"+templateExt(req), http.StatusOK, nil,
map[string]interface{}{"Popular": pkgs})
}
if path, ok := isBrowseURL(q); ok {
q = path
}
if gosrc.IsValidRemotePath(q) || (strings.Contains(q, "/") && gosrc.IsGoRepoPath(q)) {
pdoc, pkgs, err := getDoc(q, queryRequest)
if e, ok := err.(gosrc.NotFoundError); ok && e.Redirect != "" {
http.Redirect(resp, req, "/"+e.Redirect, http.StatusFound)
return nil
}
if err == nil && (pdoc != nil || len(pkgs) > 0) {
http.Redirect(resp, req, "/"+q, http.StatusFound)
return nil
}
}
ctx := appengine.NewContext(req)
pkgs, err := database.Search(ctx, q)
if err != nil {
return err
}
if gceLogger != nil {
// Log up to top 10 packages we served upon a search.
logPkgs := pkgs
if len(pkgs) > 10 {
logPkgs = pkgs[:10]
}
gceLogger.LogEvent(resp, req, logPkgs)
}
return executeTemplate(resp, "results"+templateExt(req), http.StatusOK, nil,
map[string]interface{}{"q": q, "pkgs": pkgs})
}
func serveAbout(resp http.ResponseWriter, req *http.Request) error {
return executeTemplate(resp, "about.html", http.StatusOK, nil,
map[string]interface{}{"Host": req.Host})
}
func serveBot(resp http.ResponseWriter, req *http.Request) error {
return executeTemplate(resp, "bot.html", http.StatusOK, nil, nil)
}
func serveHealthCheck(resp http.ResponseWriter, req *http.Request) {
resp.Write([]byte("Health check: ok\n"))
}
func logError(req *http.Request, err error, rv interface{}) {
if err != nil {
var buf bytes.Buffer
fmt.Fprintf(&buf, "Error serving %s: %v\n", req.URL, err)
if rv != nil {
fmt.Fprintln(&buf, rv)
buf.Write(debug.Stack())
}
log.Print(buf.String())
}
}
func serveAPISearch(resp http.ResponseWriter, req *http.Request) error {
q := strings.TrimSpace(req.Form.Get("q"))
var pkgs []database.Package
if gosrc.IsValidRemotePath(q) || (strings.Contains(q, "/") && gosrc.IsGoRepoPath(q)) {
pdoc, _, err := getDoc(q, apiRequest)
if e, ok := err.(gosrc.NotFoundError); ok && e.Redirect != "" {
pdoc, _, err = getDoc(e.Redirect, robotRequest)
}
if err == nil && pdoc != nil {
pkgs = []database.Package{{Path: pdoc.ImportPath, Synopsis: pdoc.Synopsis}}
}
}
if pkgs == nil {
var err error
ctx := appengine.NewContext(req)
pkgs, err = database.Search(ctx, q)
if err != nil {
return err
}
}
var data = struct {
Results []database.Package `json:"results"`
}{
pkgs,
}
resp.Header().Set("Content-Type", jsonMIMEType)
return json.NewEncoder(resp).Encode(&data)
}
func serveAPIPackages(resp http.ResponseWriter, req *http.Request) error {
pkgs, err := db.AllPackages()
if err != nil {
return err
}
data := struct {
Results []database.Package `json:"results"`
}{
pkgs,
}
resp.Header().Set("Content-Type", jsonMIMEType)
return json.NewEncoder(resp).Encode(&data)
}
func serveAPIImporters(resp http.ResponseWriter, req *http.Request) error {
importPath := strings.TrimPrefix(req.URL.Path, "/importers/")
pkgs, err := db.Importers(importPath)
if err != nil {
return err
}
data := struct {
Results []database.Package `json:"results"`
}{
pkgs,
}
resp.Header().Set("Content-Type", jsonMIMEType)
return json.NewEncoder(resp).Encode(&data)
}
func serveAPIImports(resp http.ResponseWriter, req *http.Request) error {
importPath := strings.TrimPrefix(req.URL.Path, "/imports/")
pdoc, _, err := getDoc(importPath, robotRequest)
if err != nil {
return err
}
if pdoc == nil || pdoc.Name == "" {
return &httpError{status: http.StatusNotFound}
}
imports, err := db.Packages(pdoc.Imports)
if err != nil {
return err
}
testImports, err := db.Packages(pdoc.TestImports)
if err != nil {
return err
}
data := struct {
Imports []database.Package `json:"imports"`
TestImports []database.Package `json:"testImports"`
}{
imports,
testImports,
}
resp.Header().Set("Content-Type", jsonMIMEType)
return json.NewEncoder(resp).Encode(&data)
}
func serveAPIHome(resp http.ResponseWriter, req *http.Request) error {
return &httpError{status: http.StatusNotFound}
}
func runHandler(resp http.ResponseWriter, req *http.Request,
fn func(resp http.ResponseWriter, req *http.Request) error, errfn httputil.Error) {
defer func() {
if rv := recover(); rv != nil {
err := errors.New("handler panic")
logError(req, err, rv)
errfn(resp, req, http.StatusInternalServerError, err)
}
}()
// TODO(stephenmw): choose headers based on if we are on App Engine
if viper.GetBool(ConfigTrustProxyHeaders) {
// If running on GAE, use X-Appengine-User-Ip to identify real ip of requests.
if s := req.Header.Get("X-Appengine-User-Ip"); s != "" {
req.RemoteAddr = s
} else if s := req.Header.Get("X-Real-Ip"); s != "" {
req.RemoteAddr = s
}
}
req.Body = http.MaxBytesReader(resp, req.Body, 2048)
req.ParseForm()
var rb httputil.ResponseBuffer
err := fn(&rb, req)
if err == nil {
rb.WriteTo(resp)
} else if e, ok := err.(*httpError); ok {
if e.status >= 500 {
logError(req, err, nil)
}
errfn(resp, req, e.status, e.err)
} else if gosrc.IsNotFound(err) {
errfn(resp, req, http.StatusNotFound, nil)
} else {
logError(req, err, nil)
errfn(resp, req, http.StatusInternalServerError, err)
}
}
type handler func(resp http.ResponseWriter, req *http.Request) error
func (h handler) ServeHTTP(resp http.ResponseWriter, req *http.Request) {
runHandler(resp, req, h, handleError)
}
type apiHandler func(resp http.ResponseWriter, req *http.Request) error
func (h apiHandler) ServeHTTP(resp http.ResponseWriter, req *http.Request) {
runHandler(resp, req, h, handleAPIError)
}
func errorText(err error) string {
if err == errUpdateTimeout {
return "Timeout getting package files from the version control system."
}
if e, ok := err.(*gosrc.RemoteError); ok {
return "Error getting package files from " + e.Host + "."
}
return "Internal server error."
}
func handleError(resp http.ResponseWriter, req *http.Request, status int, err error) {
switch status {
case http.StatusNotFound:
executeTemplate(resp, "notfound"+templateExt(req), status, nil, map[string]interface{}{
"flashMessages": getFlashMessages(resp, req),
})
default:
resp.Header().Set("Content-Type", textMIMEType)
resp.WriteHeader(http.StatusInternalServerError)
io.WriteString(resp, errorText(err))
}
}
func handleAPIError(resp http.ResponseWriter, req *http.Request, status int, err error) {
var data struct {
Error struct {
Message string `json:"message"`
} `json:"error"`
}
data.Error.Message = http.StatusText(status)
resp.Header().Set("Content-Type", jsonMIMEType)
resp.WriteHeader(status)
json.NewEncoder(resp).Encode(&data)
}
type rootHandler []struct {
prefix string
h http.Handler
}
func (m rootHandler) ServeHTTP(resp http.ResponseWriter, req *http.Request) {
var h http.Handler
for _, ph := range m {
if strings.HasPrefix(req.Host, ph.prefix) {
h = ph.h
break
}
}
h.ServeHTTP(resp, req)
}
// otherDomainHandler redirects to another domain keeping the rest of the URL.
type otherDomainHandler struct {
scheme string
targetDomain string
}
func (h otherDomainHandler) ServeHTTP(w http.ResponseWriter, req *http.Request) {
u := *req.URL
u.Scheme = h.scheme
u.Host = h.targetDomain
http.Redirect(w, req, u.String(), http.StatusFound)
}
func defaultBase(path string) string {
p, err := build.Default.Import(path, "", build.FindOnly)
if err != nil {
return "."
}
return p.Dir
}
var (
db *database.Database
httpClient *http.Client
statusImageHandlerPNG http.Handler
statusImageHandlerSVG http.Handler
gceLogger *GCELogger
)
func main() {
doc.SetDefaultGOOS(viper.GetString(ConfigDefaultGOOS))
httpClient = newHTTPClient()
var (
gceLogName string
projID string
)
// TODO(stephenmw): merge into viper config infrastructure.
if metadata.OnGCE() {
acct, err := metadata.ProjectAttributeValue("ga-account")
if err != nil {
log.Printf("querying metadata for ga-account: %v", err)
} else {
gaAccount = acct
}
// Get the log name on GCE and setup context for creating a GCE log client.
if name, err := metadata.ProjectAttributeValue("gce-log-name"); err != nil {
log.Printf("querying metadata for gce-log-name: %v", err)
} else {
gceLogName = name
if id, err := metadata.ProjectID(); err != nil {
log.Printf("querying metadata for project ID: %v", err)
} else {
projID = id
}
}
} else {
gaAccount = os.Getenv("GA_ACCOUNT")
}
if err := parseHTMLTemplates([][]string{
{"about.html", "common.html", "layout.html"},
{"bot.html", "common.html", "layout.html"},
{"cmd.html", "common.html", "layout.html"},
{"dir.html", "common.html", "layout.html"},
{"home.html", "common.html", "layout.html"},
{"importers.html", "common.html", "layout.html"},
{"importers_robot.html", "common.html", "layout.html"},
{"imports.html", "common.html", "layout.html"},
{"notfound.html", "common.html", "layout.html"},
{"pkg.html", "common.html", "layout.html"},
{"results.html", "common.html", "layout.html"},
{"tools.html", "common.html", "layout.html"},
{"std.html", "common.html", "layout.html"},
{"subrepo.html", "common.html", "layout.html"},
{"graph.html", "common.html"},
}); err != nil {
log.Fatal(err)
}
if err := parseTextTemplates([][]string{
{"cmd.txt", "common.txt"},
{"dir.txt", "common.txt"},
{"home.txt", "common.txt"},
{"notfound.txt", "common.txt"},
{"pkg.txt", "common.txt"},
{"results.txt", "common.txt"},
}); err != nil {
log.Fatal(err)
}
var err error
db, err = database.New()
if err != nil {
log.Fatalf("Error opening database: %v", err)
}
go runBackgroundTasks()
staticServer := httputil.StaticServer{
Dir: viper.GetString(ConfigAssetsDir),
MaxAge: time.Hour,
MIMETypes: map[string]string{
".css": "text/css; charset=utf-8",
".js": "text/javascript; charset=utf-8",
},
}
statusImageHandlerPNG = staticServer.FileHandler("status.png")
statusImageHandlerSVG = staticServer.FileHandler("status.svg")
apiMux := http.NewServeMux()
apiMux.Handle("/favicon.ico", staticServer.FileHandler("favicon.ico"))
apiMux.Handle("/google3d2f3cd4cc2bb44b.html", staticServer.FileHandler("google3d2f3cd4cc2bb44b.html"))
apiMux.Handle("/humans.txt", staticServer.FileHandler("humans.txt"))
apiMux.Handle("/robots.txt", staticServer.FileHandler("apiRobots.txt"))
apiMux.Handle("/search", apiHandler(serveAPISearch))
apiMux.Handle("/packages", apiHandler(serveAPIPackages))
apiMux.Handle("/importers/", apiHandler(serveAPIImporters))
apiMux.Handle("/imports/", apiHandler(serveAPIImports))
apiMux.Handle("/", apiHandler(serveAPIHome))
mux := http.NewServeMux()
mux.Handle("/-/site.js", staticServer.FilesHandler(
"third_party/jquery.timeago.js",
"site.js"))
mux.Handle("/-/site.css", staticServer.FilesHandler("site.css"))
mux.Handle("/-/bootstrap.min.css", staticServer.FilesHandler("bootstrap.min.css"))
mux.Handle("/-/bootstrap.min.js", staticServer.FilesHandler("bootstrap.min.js"))
mux.Handle("/-/jquery-2.0.3.min.js", staticServer.FilesHandler("jquery-2.0.3.min.js"))
if viper.GetBool(ConfigSidebar) {
mux.Handle("/-/sidebar.css", staticServer.FilesHandler("sidebar.css"))
}
mux.Handle("/-/", http.NotFoundHandler())
mux.Handle("/-/about", handler(serveAbout))
mux.Handle("/-/bot", handler(serveBot))
mux.Handle("/-/go", handler(serveGoIndex))
mux.Handle("/-/subrepo", handler(serveGoSubrepoIndex))
mux.Handle("/-/refresh", handler(serveRefresh))
mux.Handle("/-/admin/reindex", http.HandlerFunc(runReindex))
mux.Handle("/-/admin/purgeindex", http.HandlerFunc(runPurgeIndex))
mux.Handle("/about", http.RedirectHandler("/-/about", http.StatusMovedPermanently))
mux.Handle("/favicon.ico", staticServer.FileHandler("favicon.ico"))
mux.Handle("/google3d2f3cd4cc2bb44b.html", staticServer.FileHandler("google3d2f3cd4cc2bb44b.html"))
mux.Handle("/humans.txt", staticServer.FileHandler("humans.txt"))
mux.Handle("/robots.txt", staticServer.FileHandler("robots.txt"))
mux.Handle("/BingSiteAuth.xml", staticServer.FileHandler("BingSiteAuth.xml"))
mux.Handle("/C", http.RedirectHandler("http://golang.org/doc/articles/c_go_cgo.html", http.StatusMovedPermanently))
mux.Handle("/code.jquery.com/", http.NotFoundHandler())
mux.Handle("/_ah/health", http.HandlerFunc(serveHealthCheck))
mux.Handle("/_ah/", http.NotFoundHandler())
mux.Handle("/", handler(serveHome))
cacheBusters.Handler = mux
var root http.Handler = rootHandler{
{"api.", apiMux},
{"talks.godoc.org", otherDomainHandler{"https", "go-talks.appspot.com"}},
{"www.godoc.org", otherDomainHandler{"https", "godoc.org"}},
{"", mux},
}
if gceLogName != "" {
ctx := context.Background()
logc, err := logging.NewClient(ctx, projID)
if err != nil {
log.Fatalf("Failed to create cloud logging client: %v", err)
}
logger := logc.Logger(gceLogName)
if err := logc.Ping(ctx); err != nil {
log.Fatalf("Failed to ping Google Cloud Logging: %v", err)
}
gceLogger = newGCELogger(logger)
}
http.Handle("/", root)
appengine.Main()
}

33
vendor/github.com/golang/gddo/gddo-server/main_test.go generated vendored Normal file
View File

@@ -0,0 +1,33 @@
// Copyright 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
package main
import (
"net/http"
"testing"
)
var robotTests = []string{
"Mozilla/5.0 (compatible; TweetedTimes Bot/1.0; +http://tweetedtimes.com)",
"Mozilla/5.0 (compatible; YandexBot/3.0; +http://yandex.com/bots)",
"Mozilla/5.0 (compatible; MJ12bot/v1.4.3; http://www.majestic12.co.uk/bot.php?+)",
"Go 1.1 package http",
"Java/1.7.0_25 0.003 0.003",
"Python-urllib/2.6",
"Mozilla/5.0 (compatible; archive.org_bot +http://www.archive.org/details/archive.org_bot)",
"Mozilla/5.0 (compatible; Ezooms/1.0; ezooms.bot@gmail.com)",
"Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)",
}
func TestRobots(t *testing.T) {
for _, tt := range robotTests {
req := http.Request{Header: http.Header{"User-Agent": {tt}}}
if !isRobot(&req) {
t.Errorf("%s not a robot", tt)
}
}
}

91
vendor/github.com/golang/gddo/gddo-server/play.go generated vendored Normal file
View File

@@ -0,0 +1,91 @@
// Copyright 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
package main
import (
"fmt"
"io/ioutil"
"net/http"
"regexp"
"strings"
"github.com/golang/gddo/doc"
)
func findExamples(pdoc *doc.Package, export, method string) []*doc.Example {
if "package" == export {
return pdoc.Examples
}
for _, f := range pdoc.Funcs {
if f.Name == export {
return f.Examples
}
}
for _, t := range pdoc.Types {
for _, f := range t.Funcs {
if f.Name == export {
return f.Examples
}
}
if t.Name == export {
if method == "" {
return t.Examples
}
for _, m := range t.Methods {
if method == m.Name {
return m.Examples
}
}
return nil
}
}
return nil
}
func findExample(pdoc *doc.Package, export, method, name string) *doc.Example {
for _, e := range findExamples(pdoc, export, method) {
if name == e.Name {
return e
}
}
return nil
}
var exampleIDPat = regexp.MustCompile(`([^-]+)(?:-([^-]*)(?:-(.*))?)?`)
func playURL(pdoc *doc.Package, id, countryHeader string) (string, error) {
if m := exampleIDPat.FindStringSubmatch(id); m != nil {
if e := findExample(pdoc, m[1], m[2], m[3]); e != nil && e.Play != "" {
req, err := http.NewRequest("POST", "https://play.golang.org/share", strings.NewReader(e.Play))
if err != nil {
return "", err
}
req.Header.Set("Content-Type", "text/plain")
if countryHeader != "" {
// Forward the App Engine country header.
req.Header.Set("X-AppEngine-Country", countryHeader)
}
resp, err := httpClient.Do(req)
if err != nil {
return "", err
}
defer resp.Body.Close()
p, err := ioutil.ReadAll(resp.Body)
if err != nil {
return "", err
}
if resp.StatusCode > 399 {
return "", &httpError{
status: resp.StatusCode,
err: fmt.Errorf("Error from play.golang.org: %s", p),
}
}
return fmt.Sprintf("http://play.golang.org/p/%s", p), nil
}
}
return "", &httpError{status: http.StatusNotFound}
}

575
vendor/github.com/golang/gddo/gddo-server/template.go generated vendored Normal file
View File

@@ -0,0 +1,575 @@
// Copyright 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
package main
import (
"bytes"
"encoding/base64"
"errors"
"fmt"
godoc "go/doc"
htemp "html/template"
"io"
"net/http"
"net/url"
"path"
"path/filepath"
"reflect"
"regexp"
"sort"
"strings"
ttemp "text/template"
"time"
"github.com/spf13/viper"
"github.com/golang/gddo/doc"
"github.com/golang/gddo/gosrc"
"github.com/golang/gddo/httputil"
)
var cacheBusters httputil.CacheBusters
type flashMessage struct {
ID string
Args []string
}
// getFlashMessages retrieves flash messages from the request and clears the flash cookie if needed.
func getFlashMessages(resp http.ResponseWriter, req *http.Request) []flashMessage {
c, err := req.Cookie("flash")
if err == http.ErrNoCookie {
return nil
}
http.SetCookie(resp, &http.Cookie{Name: "flash", Path: "/", MaxAge: -1, Expires: time.Now().Add(-100 * 24 * time.Hour)})
if err != nil {
return nil
}
p, err := base64.URLEncoding.DecodeString(c.Value)
if err != nil {
return nil
}
var messages []flashMessage
for _, s := range strings.Split(string(p), "\000") {
idArgs := strings.Split(s, "\001")
messages = append(messages, flashMessage{ID: idArgs[0], Args: idArgs[1:]})
}
return messages
}
// setFlashMessages sets a cookie with the given flash messages.
func setFlashMessages(resp http.ResponseWriter, messages []flashMessage) {
var buf []byte
for i, message := range messages {
if i > 0 {
buf = append(buf, '\000')
}
buf = append(buf, message.ID...)
for _, arg := range message.Args {
buf = append(buf, '\001')
buf = append(buf, arg...)
}
}
value := base64.URLEncoding.EncodeToString(buf)
http.SetCookie(resp, &http.Cookie{Name: "flash", Value: value, Path: "/"})
}
type tdoc struct {
*doc.Package
allExamples []*texample
}
type texample struct {
ID string
Label string
Example *doc.Example
Play bool
obj interface{}
}
func newTDoc(pdoc *doc.Package) *tdoc {
return &tdoc{Package: pdoc}
}
func (pdoc *tdoc) SourceLink(pos doc.Pos, text string, textOnlyOK bool) htemp.HTML {
if pos.Line == 0 || pdoc.LineFmt == "" || pdoc.Files[pos.File].URL == "" {
if textOnlyOK {
return htemp.HTML(htemp.HTMLEscapeString(text))
}
return ""
}
return htemp.HTML(fmt.Sprintf(`<a title="View Source" href="%s">%s</a>`,
htemp.HTMLEscapeString(fmt.Sprintf(pdoc.LineFmt, pdoc.Files[pos.File].URL, pos.Line)),
htemp.HTMLEscapeString(text)))
}
// UsesLink generates a link to uses of a symbol definition.
// title is used as the tooltip. defParts are parts of the symbol definition name.
func (pdoc *tdoc) UsesLink(title string, defParts ...string) htemp.HTML {
if viper.GetString(ConfigSourcegraphURL) == "" {
return ""
}
var def string
switch len(defParts) {
case 1:
// Funcs and types have one def part.
def = defParts[0]
case 3:
// Methods have three def parts, the original receiver name, actual receiver name and method name.
orig, recv, methodName := defParts[0], defParts[1], defParts[2]
if orig == "" {
// TODO: Remove this fallback after 2016-08-05. It's only needed temporarily to backfill data.
// Actual receiver is not needed, it's only used because original receiver value
// was recently added to gddo/doc package and will be blank until next package rebuild.
//
// Use actual receiver as fallback.
orig = recv
}
// Trim "*" from "*T" if it's a pointer receiver method.
typeName := strings.TrimPrefix(orig, "*")
def = typeName + "/" + methodName
default:
panic(fmt.Errorf("%v defParts, want 1 or 3", len(defParts)))
}
q := url.Values{
"repo": {pdoc.ProjectRoot},
"pkg": {pdoc.ImportPath},
"def": {def},
}
u := viper.GetString(ConfigSourcegraphURL) + "/-/godoc/refs?" + q.Encode()
return htemp.HTML(fmt.Sprintf(`<a class="uses" title="%s" href="%s">Uses</a>`, htemp.HTMLEscapeString(title), htemp.HTMLEscapeString(u)))
}
func (pdoc *tdoc) PageName() string {
if pdoc.Name != "" && !pdoc.IsCmd {
return pdoc.Name
}
_, name := path.Split(pdoc.ImportPath)
return name
}
func (pdoc *tdoc) addExamples(obj interface{}, export, method string, examples []*doc.Example) {
label := export
id := export
if method != "" {
label += "." + method
id += "-" + method
}
for _, e := range examples {
te := &texample{
Label: label,
ID: id,
Example: e,
obj: obj,
// Only show play links for packages within the standard library.
Play: e.Play != "" && gosrc.IsGoRepoPath(pdoc.ImportPath),
}
if e.Name != "" {
te.Label += " (" + e.Name + ")"
if method == "" {
te.ID += "-"
}
te.ID += "-" + e.Name
}
pdoc.allExamples = append(pdoc.allExamples, te)
}
}
type byExampleID []*texample
func (e byExampleID) Len() int { return len(e) }
func (e byExampleID) Less(i, j int) bool { return e[i].ID < e[j].ID }
func (e byExampleID) Swap(i, j int) { e[i], e[j] = e[j], e[i] }
func (pdoc *tdoc) AllExamples() []*texample {
if pdoc.allExamples != nil {
return pdoc.allExamples
}
pdoc.allExamples = make([]*texample, 0)
pdoc.addExamples(pdoc, "package", "", pdoc.Examples)
for _, f := range pdoc.Funcs {
pdoc.addExamples(f, f.Name, "", f.Examples)
}
for _, t := range pdoc.Types {
pdoc.addExamples(t, t.Name, "", t.Examples)
for _, f := range t.Funcs {
pdoc.addExamples(f, f.Name, "", f.Examples)
}
for _, m := range t.Methods {
if len(m.Examples) > 0 {
pdoc.addExamples(m, t.Name, m.Name, m.Examples)
}
}
}
sort.Sort(byExampleID(pdoc.allExamples))
return pdoc.allExamples
}
func (pdoc *tdoc) ObjExamples(obj interface{}) []*texample {
var examples []*texample
for _, e := range pdoc.allExamples {
if e.obj == obj {
examples = append(examples, e)
}
}
return examples
}
func (pdoc *tdoc) Breadcrumbs(templateName string) htemp.HTML {
if !strings.HasPrefix(pdoc.ImportPath, pdoc.ProjectRoot) {
return ""
}
var buf bytes.Buffer
i := 0
j := len(pdoc.ProjectRoot)
if j == 0 {
j = strings.IndexRune(pdoc.ImportPath, '/')
if j < 0 {
j = len(pdoc.ImportPath)
}
}
for {
if i != 0 {
buf.WriteString(`<span class="text-muted">/</span>`)
}
link := j < len(pdoc.ImportPath) ||
(templateName != "dir.html" && templateName != "cmd.html" && templateName != "pkg.html")
if link {
buf.WriteString(`<a href="`)
buf.WriteString(formatPathFrag(pdoc.ImportPath[:j], ""))
buf.WriteString(`">`)
} else {
buf.WriteString(`<span class="text-muted">`)
}
buf.WriteString(htemp.HTMLEscapeString(pdoc.ImportPath[i:j]))
if link {
buf.WriteString("</a>")
} else {
buf.WriteString("</span>")
}
i = j + 1
if i >= len(pdoc.ImportPath) {
break
}
j = strings.IndexRune(pdoc.ImportPath[i:], '/')
if j < 0 {
j = len(pdoc.ImportPath)
} else {
j += i
}
}
return htemp.HTML(buf.String())
}
func (pdoc *tdoc) StatusDescription() htemp.HTML {
desc := ""
switch pdoc.Package.Status {
case gosrc.DeadEndFork:
desc = "This is a dead-end fork (no commits since the fork)."
case gosrc.QuickFork:
desc = "This is a quick bug-fix fork (has fewer than three commits, and only during the week it was created)."
case gosrc.Inactive:
desc = "This is an inactive package (no imports and no commits in at least two years)."
}
return htemp.HTML(desc)
}
func formatPathFrag(path, fragment string) string {
if len(path) > 0 && path[0] != '/' {
path = "/" + path
}
u := url.URL{Path: path, Fragment: fragment}
return u.String()
}
func hostFn(urlStr string) string {
u, err := url.Parse(urlStr)
if err != nil {
return ""
}
return u.Host
}
func mapFn(kvs ...interface{}) (map[string]interface{}, error) {
if len(kvs)%2 != 0 {
return nil, errors.New("map requires even number of arguments")
}
m := make(map[string]interface{})
for i := 0; i < len(kvs); i += 2 {
s, ok := kvs[i].(string)
if !ok {
return nil, errors.New("even args to map must be strings")
}
m[s] = kvs[i+1]
}
return m, nil
}
// relativePathFn formats an import path as HTML.
func relativePathFn(path string, parentPath interface{}) string {
if p, ok := parentPath.(string); ok && p != "" && strings.HasPrefix(path, p) {
path = path[len(p)+1:]
}
return path
}
// importPathFn formats an import with zero width space characters to allow for breaks.
func importPathFn(path string) htemp.HTML {
path = htemp.HTMLEscapeString(path)
if len(path) > 45 {
// Allow long import paths to break following "/"
path = strings.Replace(path, "/", "/&#8203;", -1)
}
return htemp.HTML(path)
}
var (
h3Pat = regexp.MustCompile(`<h3 id="([^"]+)">([^<]+)</h3>`)
rfcPat = regexp.MustCompile(`RFC\s+(\d{3,4})(,?\s+[Ss]ection\s+(\d+(\.\d+)*))?`)
packagePat = regexp.MustCompile(`\s+package\s+([-a-z0-9]\S+)`)
)
func replaceAll(src []byte, re *regexp.Regexp, replace func(out, src []byte, m []int) []byte) []byte {
var out []byte
for len(src) > 0 {
m := re.FindSubmatchIndex(src)
if m == nil {
break
}
out = append(out, src[:m[0]]...)
out = replace(out, src, m)
src = src[m[1]:]
}
if out == nil {
return src
}
return append(out, src...)
}
// commentFn formats a source code comment as HTML.
func commentFn(v string) htemp.HTML {
var buf bytes.Buffer
godoc.ToHTML(&buf, v, nil)
p := buf.Bytes()
p = replaceAll(p, h3Pat, func(out, src []byte, m []int) []byte {
out = append(out, `<h4 id="`...)
out = append(out, src[m[2]:m[3]]...)
out = append(out, `">`...)
out = append(out, src[m[4]:m[5]]...)
out = append(out, ` <a class="permalink" href="#`...)
out = append(out, src[m[2]:m[3]]...)
out = append(out, `">&para</a></h4>`...)
return out
})
p = replaceAll(p, rfcPat, func(out, src []byte, m []int) []byte {
out = append(out, `<a href="http://tools.ietf.org/html/rfc`...)
out = append(out, src[m[2]:m[3]]...)
// If available, add section fragment
if m[4] != -1 {
out = append(out, `#section-`...)
out = append(out, src[m[6]:m[7]]...)
}
out = append(out, `">`...)
out = append(out, src[m[0]:m[1]]...)
out = append(out, `</a>`...)
return out
})
p = replaceAll(p, packagePat, func(out, src []byte, m []int) []byte {
path := bytes.TrimRight(src[m[2]:m[3]], ".!?:")
if !gosrc.IsValidPath(string(path)) {
return append(out, src[m[0]:m[1]]...)
}
out = append(out, src[m[0]:m[2]]...)
out = append(out, `<a href="/`...)
out = append(out, path...)
out = append(out, `">`...)
out = append(out, path...)
out = append(out, `</a>`...)
out = append(out, src[m[2]+len(path):m[1]]...)
return out
})
return htemp.HTML(p)
}
// commentTextFn formats a source code comment as text.
func commentTextFn(v string) string {
const indent = " "
var buf bytes.Buffer
godoc.ToText(&buf, v, indent, "\t", 80-2*len(indent))
p := buf.Bytes()
return string(p)
}
var period = []byte{'.'}
func codeFn(c doc.Code, typ *doc.Type) htemp.HTML {
var buf bytes.Buffer
last := 0
src := []byte(c.Text)
buf.WriteString("<pre>")
for _, a := range c.Annotations {
htemp.HTMLEscape(&buf, src[last:a.Pos])
switch a.Kind {
case doc.PackageLinkAnnotation:
buf.WriteString(`<a href="`)
buf.WriteString(formatPathFrag(c.Paths[a.PathIndex], ""))
buf.WriteString(`">`)
htemp.HTMLEscape(&buf, src[a.Pos:a.End])
buf.WriteString(`</a>`)
case doc.LinkAnnotation, doc.BuiltinAnnotation:
var p string
if a.Kind == doc.BuiltinAnnotation {
p = "builtin"
} else if a.PathIndex >= 0 {
p = c.Paths[a.PathIndex]
}
n := src[a.Pos:a.End]
n = n[bytes.LastIndex(n, period)+1:]
buf.WriteString(`<a href="`)
buf.WriteString(formatPathFrag(p, string(n)))
buf.WriteString(`">`)
htemp.HTMLEscape(&buf, src[a.Pos:a.End])
buf.WriteString(`</a>`)
case doc.CommentAnnotation:
buf.WriteString(`<span class="com">`)
htemp.HTMLEscape(&buf, src[a.Pos:a.End])
buf.WriteString(`</span>`)
case doc.AnchorAnnotation:
buf.WriteString(`<span id="`)
if typ != nil {
htemp.HTMLEscape(&buf, []byte(typ.Name))
buf.WriteByte('.')
}
htemp.HTMLEscape(&buf, src[a.Pos:a.End])
buf.WriteString(`">`)
htemp.HTMLEscape(&buf, src[a.Pos:a.End])
buf.WriteString(`</span>`)
default:
htemp.HTMLEscape(&buf, src[a.Pos:a.End])
}
last = int(a.End)
}
htemp.HTMLEscape(&buf, src[last:])
buf.WriteString("</pre>")
return htemp.HTML(buf.String())
}
var isInterfacePat = regexp.MustCompile(`^type [^ ]+ interface`)
func isInterfaceFn(t *doc.Type) bool {
return isInterfacePat.MatchString(t.Decl.Text)
}
var gaAccount string
func gaAccountFn() string {
return gaAccount
}
func noteTitleFn(s string) string {
return strings.Title(strings.ToLower(s))
}
func htmlCommentFn(s string) htemp.HTML {
return htemp.HTML("<!-- " + s + " -->")
}
var mimeTypes = map[string]string{
".html": htmlMIMEType,
".txt": textMIMEType,
}
func executeTemplate(resp http.ResponseWriter, name string, status int, header http.Header, data interface{}) error {
for k, v := range header {
resp.Header()[k] = v
}
mimeType, ok := mimeTypes[path.Ext(name)]
if !ok {
mimeType = textMIMEType
}
resp.Header().Set("Content-Type", mimeType)
t := templates[name]
if t == nil {
return fmt.Errorf("template %s not found", name)
}
resp.WriteHeader(status)
if status == http.StatusNotModified {
return nil
}
return t.Execute(resp, data)
}
var templates = map[string]interface {
Execute(io.Writer, interface{}) error
}{}
func joinTemplateDir(base string, files []string) []string {
result := make([]string, len(files))
for i := range files {
result[i] = filepath.Join(base, "templates", files[i])
}
return result
}
func parseHTMLTemplates(sets [][]string) error {
for _, set := range sets {
templateName := set[0]
t := htemp.New("")
t.Funcs(htemp.FuncMap{
"code": codeFn,
"comment": commentFn,
"equal": reflect.DeepEqual,
"gaAccount": gaAccountFn,
"host": hostFn,
"htmlComment": htmlCommentFn,
"importPath": importPathFn,
"isInterface": isInterfaceFn,
"isValidImportPath": gosrc.IsValidPath,
"map": mapFn,
"noteTitle": noteTitleFn,
"relativePath": relativePathFn,
"sidebarEnabled": func() bool { return viper.GetBool(ConfigSidebar) },
"staticPath": func(p string) string { return cacheBusters.AppendQueryParam(p, "v") },
"templateName": func() string { return templateName },
})
if _, err := t.ParseFiles(joinTemplateDir(viper.GetString(ConfigAssetsDir), set)...); err != nil {
return err
}
t = t.Lookup("ROOT")
if t == nil {
return fmt.Errorf("ROOT template not found in %v", set)
}
templates[set[0]] = t
}
return nil
}
func parseTextTemplates(sets [][]string) error {
for _, set := range sets {
t := ttemp.New("")
t.Funcs(ttemp.FuncMap{
"comment": commentTextFn,
})
if _, err := t.ParseFiles(joinTemplateDir(viper.GetString(ConfigAssetsDir), set)...); err != nil {
return err
}
t = t.Lookup("ROOT")
if t == nil {
return fmt.Errorf("ROOT template not found in %v", set)
}
templates[set[0]] = t
}
return nil
}

View File

@@ -0,0 +1,33 @@
// Copyright 2014 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
package main
import (
"net/http"
"net/http/httptest"
"reflect"
"strings"
"testing"
)
func TestFlashMessages(t *testing.T) {
resp := httptest.NewRecorder()
expectedMessages := []flashMessage{
{ID: "a", Args: []string{"one"}},
{ID: "b", Args: []string{"two", "three"}},
{ID: "c", Args: []string{}},
}
setFlashMessages(resp, expectedMessages)
req := &http.Request{Header: http.Header{"Cookie": {strings.Split(resp.Header().Get("Set-Cookie"), ";")[0]}}}
actualMessages := getFlashMessages(resp, req)
if !reflect.DeepEqual(actualMessages, expectedMessages) {
t.Errorf("got messages %+v, want %+v", actualMessages, expectedMessages)
}
}

27
vendor/github.com/golang/gddo/gosrc/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,27 @@
Copyright (c) 2013 The Go Authors. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
* Neither the name of Google Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

9
vendor/github.com/golang/gddo/gosrc/README.markdown generated vendored Normal file
View File

@@ -0,0 +1,9 @@
Package gosrc fetches Go package source code from version control services.
Contributions
-------------
Contributions to this project are welcome, though please send mail before
starting work on anything major. Contributors retain their copyright, so we
need you to fill out a short form before we can accept your contribution:
https://developers.google.com/open-source/cla/individual

171
vendor/github.com/golang/gddo/gosrc/bitbucket.go generated vendored Normal file
View File

@@ -0,0 +1,171 @@
// Copyright 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
package gosrc
import (
"log"
"net/http"
"path"
"regexp"
"time"
)
func init() {
addService(&service{
pattern: regexp.MustCompile(`^bitbucket\.org/(?P<owner>[a-z0-9A-Z_.\-]+)/(?P<repo>[a-z0-9A-Z_.\-]+)(?P<dir>/[a-z0-9A-Z_.\-/]*)?$`),
prefix: "bitbucket.org/",
get: getBitbucketDir,
})
}
var bitbucketEtagRe = regexp.MustCompile(`^(hg|git)-`)
type bitbucketRepo struct {
Scm string
CreatedOn string `json:"created_on"`
LastUpdated string `json:"last_updated"`
ForkOf struct {
Scm string
} `json:"fork_of"`
Followers int `json:"followers_count"`
IsFork bool `json:"is_fork"`
}
type bitbucketNode struct {
Node string `json:"node"`
Timestamp string `json:"utctimestamp"`
}
func getBitbucketDir(client *http.Client, match map[string]string, savedEtag string) (*Directory, error) {
var repo *bitbucketRepo
c := &httpClient{client: client}
if m := bitbucketEtagRe.FindStringSubmatch(savedEtag); m != nil {
match["vcs"] = m[1]
} else {
repo, err := getBitbucketRepo(c, match)
if err != nil {
return nil, err
}
match["vcs"] = repo.Scm
}
tags := make(map[string]string)
timestamps := make(map[string]time.Time)
for _, nodeType := range []string{"branches", "tags"} {
var nodes map[string]bitbucketNode
if _, err := c.getJSON(expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/{0}", match, nodeType), &nodes); err != nil {
return nil, err
}
for t, n := range nodes {
tags[t] = n.Node
const timeFormat = "2006-01-02 15:04:05Z07:00"
committed, err := time.Parse(timeFormat, n.Timestamp)
if err != nil {
log.Println("error parsing timestamp:", n.Timestamp)
continue
}
timestamps[t] = committed
}
}
var err error
tag, commit, err := bestTag(tags, defaultTags[match["vcs"]])
if err != nil {
return nil, err
}
match["tag"] = tag
match["commit"] = commit
etag := expand("{vcs}-{commit}", match)
if etag == savedEtag {
return nil, NotModifiedError{Since: timestamps[tag]}
}
if repo == nil {
repo, err = getBitbucketRepo(c, match)
if err != nil {
return nil, err
}
}
var contents struct {
Directories []string
Files []struct {
Path string
}
}
if _, err := c.getJSON(expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/src/{tag}{dir}/", match), &contents); err != nil {
return nil, err
}
var files []*File
var dataURLs []string
for _, f := range contents.Files {
_, name := path.Split(f.Path)
if isDocFile(name) {
files = append(files, &File{Name: name, BrowseURL: expand("https://bitbucket.org/{owner}/{repo}/src/{tag}/{0}", match, f.Path)})
dataURLs = append(dataURLs, expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}/raw/{tag}/{0}", match, f.Path))
}
}
if err := c.getFiles(dataURLs, files); err != nil {
return nil, err
}
status := Active
if isBitbucketDeadEndFork(repo) {
status = DeadEndFork
}
return &Directory{
BrowseURL: expand("https://bitbucket.org/{owner}/{repo}/src/{tag}{dir}", match),
Etag: etag,
Files: files,
LineFmt: "%s#cl-%d",
ProjectName: match["repo"],
ProjectRoot: expand("bitbucket.org/{owner}/{repo}", match),
ProjectURL: expand("https://bitbucket.org/{owner}/{repo}/", match),
Subdirectories: contents.Directories,
VCS: match["vcs"],
Status: status,
Fork: repo.IsFork,
Stars: repo.Followers,
}, nil
}
func getBitbucketRepo(c *httpClient, match map[string]string) (*bitbucketRepo, error) {
var repo bitbucketRepo
if _, err := c.getJSON(expand("https://api.bitbucket.org/1.0/repositories/{owner}/{repo}", match), &repo); err != nil {
return nil, err
}
return &repo, nil
}
func isBitbucketDeadEndFork(repo *bitbucketRepo) bool {
l := "2006-01-02T15:04:05.999999999"
created, err := time.Parse(l, repo.CreatedOn)
if err != nil {
return false
}
updated, err := time.Parse(l, repo.LastUpdated)
if err != nil {
return false
}
isDeadEndFork := false
if repo.ForkOf.Scm != "" && created.Unix() >= updated.Unix() {
isDeadEndFork = true
}
return isDeadEndFork
}

62
vendor/github.com/golang/gddo/gosrc/build.go generated vendored Normal file
View File

@@ -0,0 +1,62 @@
// Copyright 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
package gosrc
import (
"bytes"
"go/build"
"io"
"io/ioutil"
"os"
"path"
"strings"
"time"
)
// Import returns details about the package in the directory.
func (dir *Directory) Import(ctx *build.Context, mode build.ImportMode) (*build.Package, error) {
safeCopy := *ctx
ctx = &safeCopy
ctx.JoinPath = path.Join
ctx.IsAbsPath = path.IsAbs
ctx.SplitPathList = func(list string) []string { return strings.Split(list, ":") }
ctx.IsDir = func(path string) bool { return false }
ctx.HasSubdir = func(root, dir string) (rel string, ok bool) { return "", false }
ctx.ReadDir = dir.readDir
ctx.OpenFile = dir.openFile
return ctx.ImportDir(".", mode)
}
type fileInfo struct{ f *File }
func (fi fileInfo) Name() string { return fi.f.Name }
func (fi fileInfo) Size() int64 { return int64(len(fi.f.Data)) }
func (fi fileInfo) Mode() os.FileMode { return 0 }
func (fi fileInfo) ModTime() time.Time { return time.Time{} }
func (fi fileInfo) IsDir() bool { return false }
func (fi fileInfo) Sys() interface{} { return nil }
func (dir *Directory) readDir(name string) ([]os.FileInfo, error) {
if name != "." {
return nil, os.ErrNotExist
}
fis := make([]os.FileInfo, len(dir.Files))
for i, f := range dir.Files {
fis[i] = fileInfo{f}
}
return fis, nil
}
func (dir *Directory) openFile(path string) (io.ReadCloser, error) {
name := strings.TrimPrefix(path, "./")
for _, f := range dir.Files {
if f.Name == name {
return ioutil.NopCloser(bytes.NewReader(f.Data)), nil
}
}
return nil, os.ErrNotExist
}

145
vendor/github.com/golang/gddo/gosrc/client.go generated vendored Normal file
View File

@@ -0,0 +1,145 @@
// Copyright 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
package gosrc
import (
"encoding/json"
"fmt"
"io"
"io/ioutil"
"net/http"
)
type httpClient struct {
errFn func(*http.Response) error
header http.Header
client *http.Client
}
func (c *httpClient) err(resp *http.Response) error {
if resp.StatusCode == 404 {
return NotFoundError{Message: "Resource not found: " + resp.Request.URL.String()}
}
if c.errFn != nil {
return c.errFn(resp)
}
return &RemoteError{resp.Request.URL.Host, fmt.Errorf("%d: (%s)", resp.StatusCode, resp.Request.URL.String())}
}
// get issues a GET to the specified URL.
func (c *httpClient) get(url string) (*http.Response, error) {
req, err := http.NewRequest("GET", url, nil)
if err != nil {
return nil, err
}
for k, vs := range c.header {
req.Header[k] = vs
}
resp, err := c.client.Do(req)
if err != nil {
return nil, &RemoteError{req.URL.Host, err}
}
return resp, err
}
// getNoFollow issues a GET to the specified URL without following redirects.
func (c *httpClient) getNoFollow(url string) (*http.Response, error) {
req, err := http.NewRequest("GET", url, nil)
if err != nil {
return nil, err
}
for k, vs := range c.header {
req.Header[k] = vs
}
t := c.client.Transport
if t == nil {
t = http.DefaultTransport
}
resp, err := t.RoundTrip(req)
if err != nil {
return nil, &RemoteError{req.URL.Host, err}
}
return resp, err
}
func (c *httpClient) getBytes(url string) ([]byte, error) {
resp, err := c.get(url)
if err != nil {
return nil, err
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
return nil, c.err(resp)
}
p, err := ioutil.ReadAll(resp.Body)
return p, err
}
func (c *httpClient) getReader(url string) (io.ReadCloser, error) {
resp, err := c.get(url)
if err != nil {
return nil, err
}
if resp.StatusCode != 200 {
err = c.err(resp)
resp.Body.Close()
return nil, err
}
return resp.Body, nil
}
func (c *httpClient) getJSON(url string, v interface{}) (*http.Response, error) {
resp, err := c.get(url)
if err != nil {
return resp, err
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
return resp, c.err(resp)
}
err = json.NewDecoder(resp.Body).Decode(v)
if _, ok := err.(*json.SyntaxError); ok {
err = NotFoundError{Message: "JSON syntax error at " + url}
}
return resp, err
}
func (c *httpClient) getFiles(urls []string, files []*File) error {
ch := make(chan error, len(files))
for i := range files {
go func(i int) {
resp, err := c.get(urls[i])
if err != nil {
ch <- err
return
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
var err error
if c.errFn != nil {
err = c.errFn(resp)
} else {
err = &RemoteError{resp.Request.URL.Host, fmt.Errorf("get %s -> %d", urls[i], resp.StatusCode)}
}
ch <- err
return
}
files[i].Data, err = ioutil.ReadAll(resp.Body)
if err != nil {
ch <- &RemoteError{resp.Request.URL.Host, err}
return
}
ch <- nil
}(i)
}
for range files {
if err := <-ch; err != nil {
return err
}
}
return nil
}

Some files were not shown because too many files have changed in this diff Show More