mirror of
https://github.com/grafana/grafana.git
synced 2026-01-06 17:33:49 +08:00
Compare commits
2 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6e1beea1f2 | ||
|
|
a362c84fdf |
@@ -1,7 +1,7 @@
|
||||
[run]
|
||||
init_cmds = [
|
||||
["go", "build", "-o", "./bin/grafana-server", "./pkg/cmd/grafana-server"],
|
||||
["./bin/grafana-server", "cfg:app_mode=development"]
|
||||
["./bin/grafana-server"]
|
||||
]
|
||||
watch_all = true
|
||||
watch_dirs = [
|
||||
@@ -9,9 +9,9 @@ watch_dirs = [
|
||||
"$WORKDIR/public/views",
|
||||
"$WORKDIR/conf",
|
||||
]
|
||||
watch_exts = [".go", ".ini", ".toml"]
|
||||
watch_exts = [".go", ".ini", ".toml", ".html"]
|
||||
build_delay = 1500
|
||||
cmds = [
|
||||
["go", "build", "-o", "./bin/grafana-server", "./pkg/cmd/grafana-server"],
|
||||
["./bin/grafana-server", "cfg:app_mode=development"]
|
||||
["./bin/grafana-server"]
|
||||
]
|
||||
|
||||
@@ -1,20 +1,19 @@
|
||||
# http://editorconfig.org
|
||||
root = true
|
||||
|
||||
[*.go]
|
||||
indent_style = tabs
|
||||
indent_size = 2
|
||||
charset = utf-8
|
||||
trim_trailing_whitespace = true
|
||||
insert_final_newline = true
|
||||
|
||||
[*]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
charset = utf-8
|
||||
trim_trailing_whitespace = true
|
||||
insert_final_newline = true
|
||||
max_line_length = 120
|
||||
|
||||
[*.go]
|
||||
indent_style = tab
|
||||
indent_size = 4
|
||||
charset = utf-8
|
||||
trim_trailing_whitespace = true
|
||||
insert_final_newline = true
|
||||
|
||||
[*.md]
|
||||
trim_trailing_whitespace = false
|
||||
|
||||
12
.flooignore
Normal file
12
.flooignore
Normal file
@@ -0,0 +1,12 @@
|
||||
#*
|
||||
*.o
|
||||
*.pyc
|
||||
*.pyo
|
||||
*~
|
||||
extern/
|
||||
node_modules/
|
||||
tmp/
|
||||
data/
|
||||
vendor/
|
||||
public_gen/
|
||||
dist/
|
||||
6
.github/CONTRIBUTING.md
vendored
6
.github/CONTRIBUTING.md
vendored
@@ -12,11 +12,11 @@ grunt karma:dev
|
||||
|
||||
### Run tests for backend assets before commit
|
||||
```
|
||||
test -z "$(gofmt -s -l . | grep -v -E 'vendor/(github.com|golang.org|gopkg.in)' | tee /dev/stderr)"
|
||||
test -z "$(gofmt -s -l . | grep -v Godeps/_workspace/src/ | tee /dev/stderr)"
|
||||
```
|
||||
|
||||
### Run tests for frontend assets before commit
|
||||
```
|
||||
npm test
|
||||
go test -v ./pkg/...
|
||||
grunt test
|
||||
godep go test -v ./pkg/...
|
||||
```
|
||||
|
||||
17
.github/ISSUE_TEMPLATE.md
vendored
17
.github/ISSUE_TEMPLATE.md
vendored
@@ -1,8 +1,7 @@
|
||||
Read before posting:
|
||||
|
||||
- Questions should be posted to https://community.grafana.com. Please search there and here on GitHub for similar issues before creating a new issue.
|
||||
- Checkout FAQ: https://community.grafana.com/c/howto/faq
|
||||
- Checkout How to troubleshoot metric query issues: https://community.grafana.com/t/how-to-troubleshoot-metric-query-issues/50
|
||||
* **I'm submitting a ...**
|
||||
- [ ] Bug report
|
||||
- [ ] Feature request
|
||||
- [ ] Question / Support request: **Please do not** open a github issue. [Support Options](http://grafana.org/support/)
|
||||
|
||||
Please include this information:
|
||||
- What Grafana version are you using?
|
||||
@@ -11,6 +10,8 @@ Please include this information:
|
||||
- What did you do?
|
||||
- What was the expected result?
|
||||
- What happened instead?
|
||||
- If related to metric query / data viz:
|
||||
- Include raw network request & response: get by opening Chrome Dev Tools (F12, Ctrl+Shift+I on windows, Cmd+Opt+I on Mac), go the network tab.
|
||||
|
||||
|
||||
**IMPORTANT** If it relates to metric data viz:
|
||||
- An image or text representation of your metric query
|
||||
- The raw query and response for the network request (check this in chrome dev tools network tab, here you can see metric requests and other request, please include the request body and request response)
|
||||
|
||||
|
||||
4
.github/PULL_REQUEST_TEMPLATE.md
vendored
4
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,4 +1,2 @@
|
||||
* Link the PR to an issue for new features
|
||||
* Rebase your PR if it gets out of sync with master
|
||||
|
||||
**REMOVE THE TEXT ABOVE BEFORE CREATING THE PULL REQUEST**
|
||||
* Rebase your PR if it gets out of sync with master
|
||||
31
.gitignore
vendored
31
.gitignore
vendored
@@ -4,21 +4,18 @@ coverage/
|
||||
.aws-config.json
|
||||
awsconfig
|
||||
/dist
|
||||
/public/build
|
||||
/public/views/index.html
|
||||
/emails/dist
|
||||
/public_gen
|
||||
/public/vendor/npm
|
||||
/tmp
|
||||
tools/phantomjs/phantomjs
|
||||
tools/phantomjs/phantomjs.exe
|
||||
profile.out
|
||||
coverage.txt
|
||||
vendor/phantomjs/phantomjs
|
||||
|
||||
docs/AWS_S3_BUCKET
|
||||
docs/GIT_BRANCH
|
||||
docs/VERSION
|
||||
docs/GITCOMMIT
|
||||
docs/changed-files
|
||||
docs/changed-files
|
||||
|
||||
# locally required config files
|
||||
public/css/*.min.css
|
||||
@@ -28,9 +25,6 @@ public/css/*.min.css
|
||||
*.swp
|
||||
.idea/
|
||||
*.iml
|
||||
*.tmp
|
||||
.DS_Store
|
||||
.vscode/
|
||||
|
||||
/data/*
|
||||
/bin/*
|
||||
@@ -38,26 +32,9 @@ public/css/*.min.css
|
||||
conf/custom.ini
|
||||
fig.yml
|
||||
docker-compose.yml
|
||||
docker-compose.yaml
|
||||
/conf/provisioning/**/custom.yaml
|
||||
profile.cov
|
||||
/grafana
|
||||
.notouch
|
||||
/pkg/cmd/grafana-cli/grafana-cli
|
||||
/pkg/cmd/grafana-server/grafana-server
|
||||
/pkg/cmd/grafana-server/debug
|
||||
debug.test
|
||||
/examples/*/dist
|
||||
/packaging/**/*.rpm
|
||||
/packaging/**/*.deb
|
||||
|
||||
# Ignore OSX indexing
|
||||
.DS_Store
|
||||
|
||||
/vendor/**/*.py
|
||||
/vendor/**/*.xml
|
||||
/vendor/**/*.yml
|
||||
/vendor/**/*_test.go
|
||||
/vendor/**/.editorconfig
|
||||
/vendor/**/appengine*
|
||||
*.orig
|
||||
/examples/*/dist
|
||||
10
.hooks/pre-commit
Executable file
10
.hooks/pre-commit
Executable file
@@ -0,0 +1,10 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
test -z "$(gofmt -s -l . | grep -v Godeps/_workspace/src/ | tee /dev/stderr)"
|
||||
if [ $? -gt 0 ]; then
|
||||
echo "Some files aren't formatted, please run 'go fmt ./pkg/...' to format your source code before committing"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
grunt test
|
||||
21
.jsfmtrc
Normal file
21
.jsfmtrc
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"preset" : "default",
|
||||
|
||||
"lineBreak" : {
|
||||
"before" : {
|
||||
"VariableDeclarationWithoutInit" : 0,
|
||||
},
|
||||
|
||||
"after": {
|
||||
"AssignmentOperator": -1,
|
||||
"ArgumentListArrayExpression": ">=1"
|
||||
}
|
||||
},
|
||||
|
||||
"whiteSpace" : {
|
||||
"before" : {
|
||||
},
|
||||
"after" : {
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,10 +1,10 @@
|
||||
{
|
||||
"browser": true,
|
||||
"esversion": 6,
|
||||
|
||||
"bitwise":false,
|
||||
"curly": true,
|
||||
"eqnull": true,
|
||||
"strict": false,
|
||||
"strict": true,
|
||||
"devel": true,
|
||||
"eqeqeq": true,
|
||||
"forin": false,
|
||||
|
||||
676
CHANGELOG.md
676
CHANGELOG.md
@@ -1,673 +1,3 @@
|
||||
# 5.0.0-beta5 (2018-02-26)
|
||||
|
||||
### Fixes
|
||||
|
||||
- **Orgs** Unable to switch org when too many orgs listed [#10774](https://github.com/grafana/grafana/issues/10774)
|
||||
- **Folders** Make it easier/explicit to access/modify folders using the API [#10630](https://github.com/grafana/grafana/issues/10630)
|
||||
- **Dashboard** Scrollbar works incorrectly in Grafana 5.0 Beta4 in some cases [#10982](https://github.com/grafana/grafana/issues/10982)
|
||||
- **ElasticSearch** Custom aggregation sizes no longer allowed for Elasticsearch [#10124](https://github.com/grafana/grafana/issues/10124)
|
||||
- **oauth** Github OAuth with allowed organizations fails to login [#10964](https://github.com/grafana/grafana/issues/10964)
|
||||
- **heatmap** Heatmap panel has partially hidden legend [#10793](https://github.com/grafana/grafana/issues/10793)
|
||||
- **snapshots** Expired snapshots not being cleaned up [#10996](https://github.com/grafana/grafana/pull/10996)
|
||||
|
||||
# 5.0.0-beta4 (2018-02-19)
|
||||
|
||||
### Fixes
|
||||
|
||||
- **Dashboard** Fixed dashboard overwrite permission issue [#10814](https://github.com/grafana/grafana/issues/10814)
|
||||
- **Keyboard shortcuts** Fixed Esc key when in panel edit/view mode [#10945](https://github.com/grafana/grafana/issues/10945)
|
||||
- **Save dashboard** Fixed issue with time range & variable reset after saving [#10946](https://github.com/grafana/grafana/issues/10946)
|
||||
|
||||
# 5.0.0-beta3 (2018-02-16)
|
||||
|
||||
### Fixes
|
||||
|
||||
- **MySQL** Fixed new migration issue with index length [#10931](https://github.com/grafana/grafana/issues/10931)
|
||||
- **Modal** Escape key no closes modals everywhere, fixes [#10887](https://github.com/grafana/grafana/issues/10887)
|
||||
- **Row repeats** Fix for repeating rows issue, fixes [#10932](https://github.com/grafana/grafana/issues/10932)
|
||||
- **Docs** Team api documented, fixes [#10832](https://github.com/grafana/grafana/issues/10832)
|
||||
- **Plugins** Plugin info page broken, fixes [#10943](https://github.com/grafana/grafana/issues/10943)
|
||||
|
||||
# 5.0.0-beta2 (2018-02-15)
|
||||
|
||||
### Fixes
|
||||
|
||||
- **Permissions** Fixed search permissions issues [#10822](https://github.com/grafana/grafana/issues/10822)
|
||||
- **Permissions** Fixed problem issues displaying permissions lists [#10864](https://github.com/grafana/grafana/issues/10864)
|
||||
- **PNG-Rendering** Fixed problem rendering legend to the right [#10526](https://github.com/grafana/grafana/issues/10526)
|
||||
- **Reset password** Fixed problem with reset password form [#10870](https://github.com/grafana/grafana/issues/10870)
|
||||
- **Light theme** Fixed problem with light theme in safari, [#10869](https://github.com/grafana/grafana/issues/10869)
|
||||
- **Provisioning** Now handles deletes when dashboard json files removed from disk [#10865](https://github.com/grafana/grafana/issues/10865)
|
||||
- **MySQL** Fixed issue with schema migration on old mysql (index too long) [#10779](https://github.com/grafana/grafana/issues/10779)
|
||||
- **Github OAuth** Fixed fetching github orgs from private github org [#10823](https://github.com/grafana/grafana/issues/10823)
|
||||
- **Embedding** Fixed issues embedding panel [#10787](https://github.com/grafana/grafana/issues/10787)
|
||||
|
||||
# 5.0.0-beta1 (2018-02-05)
|
||||
|
||||
Grafana v5.0 is going to be the biggest and most foundational release Grafana has ever had, coming with a ton of UX improvements, a new dashboard grid engine, dashboard folders, user teams and permissions. Checkout out this [video preview](https://www.youtube.com/watch?v=Izr0IBgoTZQ) of Grafana v5.
|
||||
|
||||
### New Major Features
|
||||
- **Dashboards** Dashboard folders, [#1611](https://github.com/grafana/grafana/issues/1611)
|
||||
- **Teams** User groups (teams) implemented. Can be used in folder & dashboard permission list.
|
||||
- **Dashboard grid**: Panels are now layed out in a two dimensional grid (with x, y, w, h). [#9093](https://github.com/grafana/grafana/issues/9093).
|
||||
- **Templating**: Vertical repeat direction for panel repeats.
|
||||
- **UX**: Major update to page header and navigation
|
||||
- **Dashboard settings**: Combine dashboard settings views into one with side menu, [#9750](https://github.com/grafana/grafana/issues/9750)
|
||||
- **Persistent dashboard url's**: New url's for dashboards that allows renaming dashboards without breaking links. [#7883](https://github.com/grafana/grafana/issues/7883)
|
||||
|
||||
## Breaking changes
|
||||
|
||||
* **[dashboard.json]** have been replaced with [dashboard provisioning](http://docs.grafana.org/administration/provisioning/).
|
||||
Config files for provisioning datasources as configuration have changed from `/conf/datasources` to `/conf/provisioning/datasources`.
|
||||
From `/etc/grafana/datasources` to `/etc/grafana/provisioning/datasources` when installed with deb/rpm packages.
|
||||
|
||||
* **Pagerduty** The notifier now defaults to not auto resolve incidents. More details at [#10222](https://github.com/grafana/grafana/issues/10222)
|
||||
|
||||
* **HTTP API**
|
||||
- `GET /api/alerts` property dashboardUri renamed to url and is now the full url (that is including app sub url).
|
||||
|
||||
## New Dashboard Grid
|
||||
|
||||
The new grid engine is a major upgrade for how you can position and move panels. It enables new layouts and a much easier dashboard building experience. The change is backward compatible. So you can upgrade your current version to 5.0 without breaking dashboards, but you cannot downgrade from 5.0 to previous versions. Grafana will automatically upgrade your dashboards to the new schema and position panels to match your existing layout. There might be minor differences in panel height. If you upgrade to 5.0 and for some reason want to rollback to the previous version you can restore dashboards to previous versions using dashboard history. But that should only be seen as an emergency solution.
|
||||
|
||||
Dashboard panels and rows are positioned using a gridPos object `{x: 0, y: 0, w: 24, h: 5}`. Units are in grid dimensions (24 columns, 1 height unit 30px). Rows and Panels objects exist (together) in a flat array directly on the dashboard root object. Rows are not needed for layouts anymore and are mainly there for backward compatibility. Some panel plugins that do not respect their panel height might require an update.
|
||||
|
||||
## New Features
|
||||
* **Alerting**: Add support for internal image store [#6922](https://github.com/grafana/grafana/issues/6922), thx [@FunkyM](https://github.com/FunkyM)
|
||||
* **Data Source Proxy**: Add support for whitelisting specified cookies that will be passed through to the data source when proxying data source requests [#5457](https://github.com/grafana/grafana/issues/5457), thanks [@robingustafsson](https://github.com/robingustafsson)
|
||||
* **Postgres/MySQL**: add __timeGroup macro for mysql [#9596](https://github.com/grafana/grafana/pull/9596), thanks [@svenklemm](https://github.com/svenklemm)
|
||||
* **Text**: Text panel are now edited in the ace editor. [#9698](https://github.com/grafana/grafana/pull/9698), thx [@mtanda](https://github.com/mtanda)
|
||||
* **Teams**: Add Microsoft Teams notifier as [#8523](https://github.com/grafana/grafana/issues/8523), thx [@anthu](https://github.com/anthu)
|
||||
* **Datasources**: Its now possible to configure datasources with config files [#1789](https://github.com/grafana/grafana/issues/1789)
|
||||
* **Graphite**: Query editor updated to support new query by tag features [#9230](https://github.com/grafana/grafana/issues/9230)
|
||||
* **Dashboard history**: New config file option versions_to_keep sets how many versions per dashboard to store, [#9671](https://github.com/grafana/grafana/issues/9671)
|
||||
* **Dashboard as cfg**: Load dashboards from file into Grafana on startup/change [#9654](https://github.com/grafana/grafana/issues/9654) [#5269](https://github.com/grafana/grafana/issues/5269)
|
||||
* **Prometheus**: Grafana can now send alerts to Prometheus Alertmanager while firing [#7481](https://github.com/grafana/grafana/issues/7481), thx [@Thib17](https://github.com/Thib17) and [@mtanda](https://github.com/mtanda)
|
||||
* **Table**: Support multiple table formated queries in table panel [#9170](https://github.com/grafana/grafana/issues/9170), thx [@davkal](https://github.com/davkal)
|
||||
* **Security**: Protect against brute force (frequent) login attempts [#7616](https://github.com/grafana/grafana/issues/7616)
|
||||
|
||||
## Minor
|
||||
* **Graph**: Don't hide graph display options (Lines/Points) when draw mode is unchecked [#9770](https://github.com/grafana/grafana/issues/9770), thx [@Jonnymcc](https://github.com/Jonnymcc)
|
||||
* **Prometheus**: Show label name in paren after by/without/on/ignoring/group_left/group_right [#9664](https://github.com/grafana/grafana/pull/9664), thx [@mtanda](https://github.com/mtanda)
|
||||
* **Alert panel**: Adds placeholder text when no alerts are within the time range [#9624](https://github.com/grafana/grafana/issues/9624), thx [@straend](https://github.com/straend)
|
||||
* **Mysql**: MySQL enable MaxOpenCon and MaxIdleCon regards how constring is configured. [#9784](https://github.com/grafana/grafana/issues/9784), thx [@dfredell](https://github.com/dfredell)
|
||||
* **Cloudwatch**: Fixes broken query inspector for cloudwatch [#9661](https://github.com/grafana/grafana/issues/9661), thx [@mtanda](https://github.com/mtanda)
|
||||
* **Dashboard**: Make it possible to start dashboards from search and dashboard list panel [#1871](https://github.com/grafana/grafana/issues/1871)
|
||||
* **Annotations**: Posting annotations now return the id of the annotation [#9798](https://github.com/grafana/grafana/issues/9798)
|
||||
* **Systemd**: Use systemd notification ready flag [#10024](https://github.com/grafana/grafana/issues/10024), thx [@jgrassler](https://github.com/jgrassler)
|
||||
* **Github**: Use organizations_url provided from github to verify user belongs in org. [#10111](https://github.com/grafana/grafana/issues/10111), thx
|
||||
[@adiletmaratov](https://github.com/adiletmaratov)
|
||||
* **Backend**: Fixed bug where Grafana exited before all sub routines where finished [#10131](https://github.com/grafana/grafana/issues/10131)
|
||||
* **Azure**: Adds support for Azure blob storage as external image stor [#8955](https://github.com/grafana/grafana/issues/8955), thx [@saada](https://github.com/saada)
|
||||
* **Telegram**: Add support for inline image uploads to telegram notifier plugin [#9967](https://github.com/grafana/grafana/pull/9967), thx [@rburchell](https://github.com/rburchell)
|
||||
|
||||
## Fixes
|
||||
* **Sensu**: Send alert message to sensu output [#9551](https://github.com/grafana/grafana/issues/9551), thx [@cjchand](https://github.com/cjchand)
|
||||
* **Singlestat**: suppress error when result contains no datapoints [#9636](https://github.com/grafana/grafana/issues/9636), thx [@utkarshcmu](https://github.com/utkarshcmu)
|
||||
* **Postgres/MySQL**: Control quoting in SQL-queries when using template variables [#9030](https://github.com/grafana/grafana/issues/9030), thanks [@svenklemm](https://github.com/svenklemm)
|
||||
* **Pagerduty**: Pagerduty dont auto resolve incidents by default anymore. [#10222](https://github.com/grafana/grafana/issues/10222)
|
||||
* **Cloudwatch**: Fix for multi-valued templated queries. [#9903](https://github.com/grafana/grafana/issues/9903)
|
||||
|
||||
## Tech
|
||||
* **RabbitMq**: Remove support for publishing events to RabbitMQ [#9645](https://github.com/grafana/grafana/issues/9645)
|
||||
|
||||
## Deprecation notes
|
||||
|
||||
### HTTP API
|
||||
The following operations have been deprecated and will be removed in a future release:
|
||||
- `GET /api/dashboards/db/:slug` -> Use `GET /api/dashboards/uid/:uid` instead
|
||||
- `DELETE /api/dashboards/db/:slug` -> Use `DELETE /api/dashboards/uid/:uid` instead
|
||||
|
||||
The following properties have been deprecated and will be removed in a future release:
|
||||
- `uri` property in `GET /api/search` -> Use new `url` or `uid` property instead
|
||||
- `meta.slug` property in `GET /api/dashboards/uid/:uid` and `GET /api/dashboards/db/:slug` -> Use new `meta.url` or `dashboard.uid` property instead
|
||||
|
||||
# 4.6.3 (2017-12-14)
|
||||
|
||||
## Fixes
|
||||
* **Gzip**: Fixes bug gravatar images when gzip was enabled [#5952](https://github.com/grafana/grafana/issues/5952)
|
||||
* **Alert list**: Now shows alert state changes even after adding manual annotations on dashboard [#9951](https://github.com/grafana/grafana/issues/9951)
|
||||
* **Alerting**: Fixes bug where rules evaluated as firing when all conditions was false and using OR operator. [#9318](https://github.com/grafana/grafana/issues/9318)
|
||||
* **Cloudwatch**: CloudWatch no longer display metrics' default alias [#10151](https://github.com/grafana/grafana/issues/10151), thx [@mtanda](https://github.com/mtanda)
|
||||
|
||||
# 4.6.2 (2017-11-16)
|
||||
|
||||
## Important
|
||||
* **Prometheus**: Fixes bug with new prometheus alerts in Grafana. Make sure to download this version if your using Prometheus for alerting. More details in the issue. [#9777](https://github.com/grafana/grafana/issues/9777)
|
||||
|
||||
## Fixes
|
||||
* **Color picker**: Bug after using textbox input field to change/paste color string [#9769](https://github.com/grafana/grafana/issues/9769)
|
||||
* **Cloudwatch**: Fix for cloudwatch templating query `ec2_instance_attribute` [#9667](https://github.com/grafana/grafana/issues/9667), thanks [@mtanda](https://github.com/mtanda)
|
||||
* **Heatmap**: Fixed tooltip for "time series buckets" mode [#9332](https://github.com/grafana/grafana/issues/9332)
|
||||
* **InfluxDB**: Fixed query editor issue when using `>` or `<` operators in WHERE clause [#9871](https://github.com/grafana/grafana/issues/9871)
|
||||
|
||||
|
||||
# 4.6.1 (2017-11-01)
|
||||
|
||||
* **Singlestat**: Lost thresholds when using save dashboard as [#9681](https://github.com/grafana/grafana/issues/9681)
|
||||
* **Graph**: Fix for series override color picker [#9715](https://github.com/grafana/grafana/issues/9715)
|
||||
* **Go**: build using golang 1.9.2 [#9713](https://github.com/grafana/grafana/issues/9713)
|
||||
* **Plugins**: Fixed problem with loading plugin js files behind auth proxy [#9509](https://github.com/grafana/grafana/issues/9509)
|
||||
* **Graphite**: Annotation tooltip should render empty string when undefined [#9707](https://github.com/grafana/grafana/issues/9707)
|
||||
|
||||
# 4.6.0 (2017-10-26)
|
||||
|
||||
## Fixes
|
||||
* **Alerting**: Viewer can no longer pause alert rules [#9640](https://github.com/grafana/grafana/issues/9640)
|
||||
* **Playlist**: Bug where playlist controls was missing [#9639](https://github.com/grafana/grafana/issues/9639)
|
||||
* **Firefox**: Creating region annotations now work in firefox [#9638](https://github.com/grafana/grafana/issues/9638)
|
||||
|
||||
# 4.6.0-beta3 (2017-10-23)
|
||||
|
||||
## Fixes
|
||||
* **Prometheus**: Fix for browser crash for short time ranges. [#9575](https://github.com/grafana/grafana/issues/9575)
|
||||
* **Heatmap**: Fix for y-axis not showing. [#9576](https://github.com/grafana/grafana/issues/9576)
|
||||
* **Save to file**: Fix for save to file in export modal. [#9586](https://github.com/grafana/grafana/issues/9586)
|
||||
* **Postgres**: modify group by time macro so it can be used in select clause [#9527](https://github.com/grafana/grafana/pull/9527), thanks [@svenklemm](https://github.com/svenklemm)
|
||||
|
||||
# 4.6.0-beta2 (2017-10-17)
|
||||
|
||||
## Fixes
|
||||
* **ColorPicker**: Fix for color picker not showing [#9549](https://github.com/grafana/grafana/issues/9549)
|
||||
* **Alerting**: Fix for broken test rule button in alert tab [#9539](https://github.com/grafana/grafana/issues/9539)
|
||||
* **Cloudwatch**: Provide error message when failing to add cloudwatch datasource [#9534](https://github.com/grafana/grafana/pull/9534), thx [@mtanda](https://github.com/mtanda)
|
||||
* **Cloudwatch**: Fix unused period parameter [#9536](https://github.com/grafana/grafana/pull/9536), thx [@mtanda](https://github.com/mtanda)
|
||||
* **CSV Export**: Fix for broken CSV export [#9525](https://github.com/grafana/grafana/issues/9525)
|
||||
* **Text panel**: Fix for issue with break lines in Firefox [#9491](https://github.com/grafana/grafana/issues/9491)
|
||||
* **Annotations**: Fix for issue saving annotation event in MySQL DB [#9550](https://github.com/grafana/grafana/issues/9550), thanks [@krise3k](https://github.com/krise3k)
|
||||
|
||||
|
||||
# 4.6.0-beta1 (2017-10-13)
|
||||
|
||||
## New Features
|
||||
* **Annotations**: Add support for creating annotations from graph panel [#8197](https://github.com/grafana/grafana/pull/8197)
|
||||
* **GCS**: Adds support for Google Cloud Storage [#8370](https://github.com/grafana/grafana/issues/8370) thx [@chuhlomin](https://github.com/chuhlomin)
|
||||
* **Prometheus**: Adds /metrics endpoint for exposing Grafana metrics. [#9187](https://github.com/grafana/grafana/pull/9187)
|
||||
* **Graph**: Add support for local formating in axis. [#1395](https://github.com/grafana/grafana/issues/1395), thx [@m0nhawk](https://github.com/m0nhawk)
|
||||
* **Jaeger**: Add support for open tracing using jaeger in Grafana. [#9213](https://github.com/grafana/grafana/pull/9213)
|
||||
* **Unit types**: New date & time unit types added, useful in singlestat to show dates & times. [#3678](https://github.com/grafana/grafana/issues/3678), [#6710](https://github.com/grafana/grafana/issues/6710), [#2764](https://github.com/grafana/grafana/issues/2764)
|
||||
* **CLI**: Make it possible to install plugins from any url [#5873](https://github.com/grafana/grafana/issues/5873)
|
||||
* **Prometheus**: Add support for instant queries [#5765](https://github.com/grafana/grafana/issues/5765), thx [@mtanda](https://github.com/mtanda)
|
||||
* **Cloudwatch**: Add support for alerting using the cloudwatch datasource [#8050](https://github.com/grafana/grafana/pull/8050), thx [@mtanda](https://github.com/mtanda)
|
||||
* **Pagerduty**: Include triggering series in pagerduty notification [#8479](https://github.com/grafana/grafana/issues/8479), thx [@rickymoorhouse](https://github.com/rickymoorhouse)
|
||||
* **Timezone**: Time ranges like Today & Yesterday now work correctly when timezone setting is set to UTC [#8916](https://github.com/grafana/grafana/issues/8916), thx [@ctide](https://github.com/ctide)
|
||||
* **Prometheus**: Align $__interval with the step parameters. [#9226](https://github.com/grafana/grafana/pull/9226), thx [@alin-amana](https://github.com/alin-amana)
|
||||
* **Prometheus**: Autocomplete for label name and label value [#9208](https://github.com/grafana/grafana/pull/9208), thx [@mtanda](https://github.com/mtanda)
|
||||
* **Postgres**: New Postgres data source [#9209](https://github.com/grafana/grafana/pull/9209), thx [@svenklemm](https://github.com/svenklemm)
|
||||
* **Datasources**: Make datasource HTTP requests verify TLS by default. closes [#9371](https://github.com/grafana/grafana/issues/9371), [#5334](https://github.com/grafana/grafana/issues/5334), [#8812](https://github.com/grafana/grafana/issues/8812), thx [@mattbostock](https://github.com/mattbostock)
|
||||
* **OAuth**: Verify TLS during OAuth callback [#9373](https://github.com/grafana/grafana/issues/9373), thx [@mattbostock](https://github.com/mattbostock)
|
||||
|
||||
## Minor
|
||||
* **SMTP**: Make it possible to set specific HELO for smtp client. [#9319](https://github.com/grafana/grafana/issues/9319)
|
||||
* **Dataproxy**: Allow grafana to renegotiate tls connection [#9250](https://github.com/grafana/grafana/issues/9250)
|
||||
* **HTTP**: set net.Dialer.DualStack to true for all http clients [#9367](https://github.com/grafana/grafana/pull/9367)
|
||||
* **Alerting**: Add diff and percent diff as series reducers [#9386](https://github.com/grafana/grafana/pull/9386), thx [@shanhuhai5739](https://github.com/shanhuhai5739)
|
||||
* **Slack**: Allow images to be uploaded to slack when Token is present [#7175](https://github.com/grafana/grafana/issues/7175), thx [@xginn8](https://github.com/xginn8)
|
||||
* **Opsgenie**: Use their latest API instead of old version [#9399](https://github.com/grafana/grafana/pull/9399), thx [@cglrkn](https://github.com/cglrkn)
|
||||
* **Table**: Add support for displaying the timestamp with milliseconds [#9429](https://github.com/grafana/grafana/pull/9429), thx [@s1061123](https://github.com/s1061123)
|
||||
* **Hipchat**: Add metrics, message and image to hipchat notifications [#9110](https://github.com/grafana/grafana/issues/9110), thx [@eloo](https://github.com/eloo)
|
||||
* **Kafka**: Add support for sending alert notifications to kafka [#7104](https://github.com/grafana/grafana/issues/7104), thx [@utkarshcmu](https://github.com/utkarshcmu)
|
||||
* **Alerting**: add count_non_null as series reducer [#9516](https://github.com/grafana/grafana/issues/9516)
|
||||
|
||||
## Tech
|
||||
* **Go**: Grafana is now built using golang 1.9
|
||||
* **Webpack**: Changed from systemjs to webpack (see readme or building from source guide for new build instructions). Systemjs is still used to load plugins but now plugins can only import a limited set of dependencies. See [PLUGIN_DEV.md](https://github.com/grafana/grafana/blob/master/PLUGIN_DEV.md) for more details on how this can effect some plugins.
|
||||
|
||||
# 4.5.2 (2017-09-22)
|
||||
|
||||
## Fixes
|
||||
* **Graphite**: Fix for issues with jsonData & graphiteVersion null errors [#9258](https://github.com/grafana/grafana/issues/9258)
|
||||
* **Graphite**: Fix for Grafana internal metrics to Graphite sending NaN values [#9279](https://github.com/grafana/grafana/issues/9279)
|
||||
* **HTTP API**: Fix for HEAD method requests [#9307](https://github.com/grafana/grafana/issues/9307)
|
||||
* **Templating**: Fix for duplicate template variable queries when refresh is set to time range change [#9185](https://github.com/grafana/grafana/issues/9185)
|
||||
* **Metrics**: dont write NaN values to graphite [#9279](https://github.com/grafana/grafana/issues/9279)
|
||||
|
||||
# 4.5.1 (2017-09-15)
|
||||
|
||||
## Fixes
|
||||
* **MySQL**: Fixed issue with query editor not showing [#9247](https://github.com/grafana/grafana/issues/9247)
|
||||
|
||||
## Breaking changes
|
||||
* **Metrics**: The metric structure for internal metrics about Grafana published to graphite has changed. This might break dashboards for internal metrics.
|
||||
|
||||
# 4.5.0 (2017-09-14)
|
||||
|
||||
## Fixes & Enhancements since beta1
|
||||
* **Security**: Security fix for api vulnerability (in multiple org setups).
|
||||
* **Shortcuts**: Adds shortcut for creating new dashboard [#8876](https://github.com/grafana/grafana/pull/8876) thx [@mtanda](https://github.com/mtanda)
|
||||
* **Graph**: Right Y-Axis label position fixed [#9172](https://github.com/grafana/grafana/pull/9172)
|
||||
* **General**: Improve rounding of time intervals [#9197](https://github.com/grafana/grafana/pull/9197), thx [@alin-amana](https://github.com/alin-amana)
|
||||
|
||||
# 4.5.0-beta1 (2017-09-05)
|
||||
|
||||
## New Features
|
||||
|
||||
* **Table panel**: Render cell values as links that can have an url template that uses variables from current table row. [#3754](https://github.com/grafana/grafana/issues/3754)
|
||||
* **Elasticsearch**: Add ad hoc filters directly by clicking values in table panel [#8052](https://github.com/grafana/grafana/issues/8052).
|
||||
* **MySQL**: New rich query editor with syntax highlighting
|
||||
* **Prometheus**: New rich query editor with syntax highlighting, metric & range auto complete and integrated function docs. [#5117](https://github.com/grafana/grafana/issues/5117)
|
||||
|
||||
## Enhancements
|
||||
|
||||
* **GitHub OAuth**: Support for GitHub organizations with 100+ teams. [#8846](https://github.com/grafana/grafana/issues/8846), thx [@skwashd](https://github.com/skwashd)
|
||||
* **Graphite**: Calls to Graphite api /metrics/find now include panel or dashboad time range (from & until) in most cases, [#8055](https://github.com/grafana/grafana/issues/8055)
|
||||
* **Graphite**: Added new graphite 1.0 functions, available if you set version to 1.0.x in data source settings. New Functions: mapSeries, reduceSeries, isNonNull, groupByNodes, offsetToZero, grep, weightedAverage, removeEmptySeries, aggregateLine, averageOutsidePercentile, delay, exponentialMovingAverage, fallbackSeries, integralByInterval, interpolate, invert, linearRegression, movingMin, movingMax, movingSum, multiplySeriesWithWildcards, pow, powSeries, removeBetweenPercentile, squareRoot, timeSlice, closes [#8261](https://github.com/grafana/grafana/issues/8261)
|
||||
- **Elasticsearch**: Ad-hoc filters now use query phrase match filters instead of term filters, works on non keyword/raw fields [#9095](https://github.com/grafana/grafana/issues/9095).
|
||||
|
||||
### Breaking change
|
||||
|
||||
* **InfluxDB/Elasticsearch**: The panel & data source option named "Group by time interval" is now named "Min time interval" and does now always define a lower limit for the auto group by time. Without having to use `>` prefix (that prefix still works). This should in theory have close to zero actual impact on existing dashboards. It does mean that if you used this setting to define a hard group by time interval of, say "1d", if you zoomed to a time range wide enough the time range could increase above the "1d" range as the setting is now always considered a lower limit.
|
||||
* **Elasticsearch**: Elasticsearch metric queries without date histogram now return table formated data making table panel much easier to use for this use case. Should not break/change existing dashboards with stock panels but external panel plugins can be affected.
|
||||
|
||||
## Changes
|
||||
|
||||
* **InfluxDB**: Change time range filter for absolute time ranges to be inclusive instead of exclusive [#8319](https://github.com/grafana/grafana/issues/8319), thx [@Oxydros](https://github.com/Oxydros)
|
||||
* **InfluxDB**: Added paranthesis around tag filters in queries [#9131](https://github.com/grafana/grafana/pull/9131)
|
||||
|
||||
## Bug Fixes
|
||||
|
||||
* **Modals**: Maintain scroll position after opening/leaving modal [#8800](https://github.com/grafana/grafana/issues/8800)
|
||||
* **Templating**: You cannot select data source variables as data source for other template variables [#7510](https://github.com/grafana/grafana/issues/7510)
|
||||
* **MySQL/Postgres**: Fix for max_idle_conn option default which was wrongly set to zero which does not mean unlimited but means zero, which in practice kind of disables connection pooling, which is not good. Fixes [#8513](https://github.com/grafana/grafana/issues/8513)
|
||||
|
||||
# 4.4.3 (2017-08-07)
|
||||
|
||||
## Bug Fixes
|
||||
|
||||
* **Search**: Fix for issue that casued search view to hide when you clicked starred or tags filters, fixes [#8981](https://github.com/grafana/grafana/issues/8981)
|
||||
* **Modals**: ESC key now closes modal again, fixes [#8981](https://github.com/grafana/grafana/issues/8988), thx [@j-white](https://github.com/j-white)
|
||||
|
||||
# 4.4.2 (2017-08-01)
|
||||
|
||||
## Bug Fixes
|
||||
|
||||
* **GrafanaDB(mysql)**: Fix for dashboard_version.data column type, now changed to MEDIUMTEXT, fixes [#8813](https://github.com/grafana/grafana/issues/8813)
|
||||
* **Dashboard(settings)**: Closing setting views using ESC key did not update url correctly, fixes [#8869](https://github.com/grafana/grafana/issues/8869)
|
||||
* **InfluxDB**: Wrong username/password parameter name when using direct access, fixes [#8789](https://github.com/grafana/grafana/issues/8789)
|
||||
* **Forms(TextArea)**: Bug fix for no scroll in text areas [#8797](https://github.com/grafana/grafana/issues/8797)
|
||||
* **Png Render API**: Bug fix for timeout url parameter. It now works as it should. Default value was also increased from 30 to 60 seconds [#8710](https://github.com/grafana/grafana/issues/8710)
|
||||
* **Search**: Fix for not being able to close search by clicking on right side of search result container, [8848](https://github.com/grafana/grafana/issues/8848)
|
||||
* **Cloudwatch**: Fix for using variables in templating metrics() query, [8965](https://github.com/grafana/grafana/issues/8965)
|
||||
|
||||
## Changes
|
||||
|
||||
* **Settings(defaults)**: allow_sign_up default changed from true to false [#8743](https://github.com/grafana/grafana/issues/8743)
|
||||
* **Settings(defaults)**: allow_org_create default changed from true to false
|
||||
|
||||
# 4.4.1 (2017-07-05)
|
||||
|
||||
## Bug Fixes
|
||||
|
||||
* **Migrations**: migration fails where dashboard.created_by is null [#8783](https://github.com/grafana/grafana/issues/8783)
|
||||
|
||||
# 4.4.0 (2017-07-04)
|
||||
|
||||
## New Features
|
||||
**Dashboard History**: View dashboard version history, compare any two versions (summary & json diffs), restore to old version. This big feature
|
||||
was contributed by **Walmart Labs**. Big thanks to them for this massive contribution!
|
||||
Initial feature request: [#4638](https://github.com/grafana/grafana/issues/4638)
|
||||
Pull Request: [#8472](https://github.com/grafana/grafana/pull/8472)
|
||||
|
||||
## Enhancements
|
||||
* **Elasticsearch**: Added filter aggregation label [#8420](https://github.com/grafana/grafana/pull/8420), thx [@tianzk](github.com/tianzk)
|
||||
* **Sensu**: Added option for source and handler [#8405](https://github.com/grafana/grafana/pull/8405), thx [@joemiller](github.com/joemiller)
|
||||
* **CSV**: Configurable csv export datetime format [#8058](https://github.com/grafana/grafana/issues/8058), thx [@cederigo](github.com/cederigo)
|
||||
* **Table Panel**: Column style that preserves formatting/indentation (like pre tag) [#6617](https://github.com/grafana/grafana/issues/6617)
|
||||
* **DingDing**: Add DingDing Alert Notifier [#8473](https://github.com/grafana/grafana/pull/8473) thx [@jiamliang](https://github.com/jiamliang)
|
||||
|
||||
## Minor Enhancements
|
||||
|
||||
* **Elasticsearch**: Add option for result set size in raw_document [#3426](https://github.com/grafana/grafana/issues/3426) [#8527](https://github.com/grafana/grafana/pull/8527), thx [@mk-dhia](github.com/mk-dhia)
|
||||
|
||||
## Bug Fixes
|
||||
|
||||
* **Graph**: Bug fix for negative values in histogram mode [#8628](https://github.com/grafana/grafana/issues/8628)
|
||||
|
||||
# 4.3.2 (2017-05-31)
|
||||
|
||||
## Bug fixes
|
||||
|
||||
* **InfluxDB**: Fixed issue with query editor not showing ALIAS BY input field when in text editor mode [#8459](https://github.com/grafana/grafana/issues/8459)
|
||||
* **Graph Log Scale**: Fixed issue with log scale going below x-axis [#8244](https://github.com/grafana/grafana/issues/8244)
|
||||
* **Playlist**: Fixed dashboard play order issue [#7688](https://github.com/grafana/grafana/issues/7688)
|
||||
* **Elasticsearch**: Fixed table query issue with ES 2.x [#8467](https://github.com/grafana/grafana/issues/8467), thx [@goldeelox](https://github.com/goldeelox)
|
||||
|
||||
## Changes
|
||||
* **Lazy Loading Of Panels**: Panels are no longer loaded as they are scrolled into view, this was reverted due to Chrome bug, might be reintroduced when Chrome fixes it's JS blocking behavior on scroll. [#8500](https://github.com/grafana/grafana/issues/8500)
|
||||
|
||||
# 4.3.1 (2017-05-23)
|
||||
|
||||
## Bug fixes
|
||||
|
||||
* **S3 image upload**: Fixed image url issue for us-east-1 (us standard) region. If you were missing slack images for alert notifications this should fix it. [#8444](https://github.com/grafana/grafana/issues/8444)
|
||||
|
||||
# 4.3.0-stable (2017-05-23)
|
||||
|
||||
## Bug fixes
|
||||
|
||||
* **Gzip**: Fixed crash when gzip was enabled [#8380](https://github.com/grafana/grafana/issues/8380)
|
||||
* **Graphite**: Fixed issue with Toggle edit mode did in query editor [#8377](https://github.com/grafana/grafana/issues/8377)
|
||||
* **Alerting**: Fixed issue with state history not showing query execution errors [#8412](https://github.com/grafana/grafana/issues/8412)
|
||||
* **Alerting**: Fixed issue with missing state history events/annotations when using sqlite3 database [#7992](https://github.com/grafana/grafana/issues/7992)
|
||||
* **Sqlite**: Fixed with database table locked and using sqlite3 database [#7992](https://github.com/grafana/grafana/issues/7992)
|
||||
* **Alerting**: Fixed issue with annotations showing up in unsaved dashboards, new graph & alert panel. [#8361](https://github.com/grafana/grafana/issues/8361)
|
||||
* **webdav**: Fixed http proxy env variable support for webdav image upload [#7922](https://github.com/grafana/grafana/issues/79222), thx [@berghauz](https://github.com/berghauz)
|
||||
* **Prometheus**: Fixed issue with hiding query [#8413](https://github.com/grafana/grafana/issues/8413)
|
||||
|
||||
## Enhancements
|
||||
|
||||
* **VictorOps**: Now supports panel image & auto resolve [#8431](https://github.com/grafana/grafana/pull/8431), thx [@davidmscott](https://github.com/davidmscott)
|
||||
* **Alerting**: Alert annotations now provide more info [#8421](https://github.com/grafana/grafana/pull/8421)
|
||||
|
||||
# 4.3.0-beta1 (2017-05-12)
|
||||
|
||||
## Enhancements
|
||||
|
||||
* **InfluxDB**: influxdb query builder support for ORDER BY and LIMIT (allows TOPN queries) [#6065](https://github.com/grafana/grafana/issues/6065) Support influxdb's SLIMIT Feature [#7232](https://github.com/grafana/grafana/issues/7232) thx [@thuck](https://github.com/thuck)
|
||||
* **Panels**: Delay loading & Lazy load panels as they become visible (scrolled into view) [#5216](https://github.com/grafana/grafana/issues/5216) thx [@jifwin](https://github.com/jifwin)
|
||||
* **Graph**: Support auto grid min/max when using log scale [#3090](https://github.com/grafana/grafana/issues/3090), thx [@bigbenhur](https://github.com/bigbenhur)
|
||||
* **Graph**: Support for histograms [#600](https://github.com/grafana/grafana/issues/600)
|
||||
* **Prometheus**: Support table response formats (column per label) [#6140](https://github.com/grafana/grafana/issues/6140), thx [@mtanda](https://github.com/mtanda)
|
||||
* **Single Stat Panel**: support for non time series data [#6564](https://github.com/grafana/grafana/issues/6564)
|
||||
* **Server**: Monitoring Grafana (health check endpoint) [#3302](https://github.com/grafana/grafana/issues/3302)
|
||||
* **Heatmap**: Heatmap Panel [#7934](https://github.com/grafana/grafana/pull/7934)
|
||||
* **Elasticsearch**: histogram aggregation [#3164](https://github.com/grafana/grafana/issues/3164)
|
||||
|
||||
## Minor Enhancements
|
||||
|
||||
* **InfluxDB**: Small fix for the "glow" when focus the field for LIMIT and SLIMIT [#7799](https://github.com/grafana/grafana/pull/7799) thx [@thuck](https://github.com/thuck)
|
||||
* **Prometheus**: Make Prometheus query field a textarea [#7663](https://github.com/grafana/grafana/issues/7663), thx [@hagen1778](https://github.com/hagen1778)
|
||||
* **Prometheus**: Step parameter changed semantics to min step to reduce the load on Prometheus and rendering in browser [#8073](https://github.com/grafana/grafana/pull/8073), thx [@bobrik](https://github.com/bobrik)
|
||||
* **Templating**: Should not be possible to create self-referencing (recursive) template variable definitions [#7614](https://github.com/grafana/grafana/issues/7614) thx [@thuck](https://github.com/thuck)
|
||||
* **Cloudwatch**: Correctly obtain IAM roles within ECS container tasks [#7892](https://github.com/grafana/grafana/issues/7892) thx [@gomlgs](https://github.com/gomlgs)
|
||||
* **Units**: New number format: Scientific notation [#7781](https://github.com/grafana/grafana/issues/7781) thx [@cadnce](https://github.com/cadnce)
|
||||
* **Oauth**: Add common type for oauth authorization errors [#6428](https://github.com/grafana/grafana/issues/6428) thx [@amenzhinsky](https://github.com/amenzhinsky)
|
||||
* **Templating**: Data source variable now supports multi value and panel repeats [#7030](https://github.com/grafana/grafana/issues/7030) thx [@mtanda](https://github.com/mtanda)
|
||||
* **Telegram**: Telegram alert is not sending metric and legend. [#8110](https://github.com/grafana/grafana/issues/8110), thx [@bashgeek](https://github.com/bashgeek)
|
||||
* **Graph**: Support dashed lines [#514](https://github.com/grafana/grafana/issues/514), thx [@smalik03](https://github.com/smalik03)
|
||||
* **Table**: Support to change column header text [#3551](https://github.com/grafana/grafana/issues/3551)
|
||||
* **Alerting**: Better error when SMTP is not configured [#8093](https://github.com/grafana/grafana/issues/8093)
|
||||
* **Pushover**: Add an option to attach graph image link in Pushover notification [#8043](https://github.com/grafana/grafana/issues/8043) thx [@devkid](https://github.com/devkid)
|
||||
* **WebDAV**: Allow to set different ImageBaseUrl for WebDAV upload and image link [#7914](https://github.com/grafana/grafana/issues/7914)
|
||||
* **Panels**: type-ahead mixed datasource selection [#7697](https://github.com/grafana/grafana/issues/7697) thx [@mtanda](https://github.com/mtanda)
|
||||
* **Security**:User enumeration problem [#7619](https://github.com/grafana/grafana/issues/7619)
|
||||
* **InfluxDB**: Register new queries available in InfluxDB - Holt Winters [#5619](https://github.com/grafana/grafana/issues/5619) thx [@rikkuness](https://github.com/rikkuness)
|
||||
* **Server**: Support listening on a UNIX socket [#4030](https://github.com/grafana/grafana/issues/4030), thx [@mitjaziv](https://github.com/mitjaziv)
|
||||
* **Graph**: Support log scaling for values smaller 1 [#5278](https://github.com/grafana/grafana/issues/5278)
|
||||
* **InfluxDB**: Slow 'select measurement' rendering for InfluxDB [#2524](https://github.com/grafana/grafana/issues/2524), thx [@sbhenderson](https://github.com/sbhenderson)
|
||||
* **Config**: Configurable signout menu activation [#7968](https://github.com/grafana/grafana/pull/7968), thx [@seuf](https://github.com/seuf)
|
||||
|
||||
## Fixes
|
||||
* **Table Panel**: Fixed annotation display in table panel, [#8023](https://github.com/grafana/grafana/issues/8023)
|
||||
* **Dashboard**: If refresh is blocked due to tab not visible, then refresh when it becomes visible [#8076](https://github.com/grafana/grafana/issues/8076) thanks [@SimenB](https://github.com/SimenB)
|
||||
* **Snapshots**: Fixed problem with annotations & snapshots [#7659](https://github.com/grafana/grafana/issues/7659)
|
||||
* **Graph**: MetricSegment loses type when value is an asterisk [#8277](https://github.com/grafana/grafana/issues/8277), thx [@Gordiychuk](https://github.com/Gordiychuk)
|
||||
* **Alerting**: Alert notifications do not show charts when using a non public S3 bucket [#8250](https://github.com/grafana/grafana/issues/8250) thx [@rogerswingle](https://github.com/rogerswingle)
|
||||
* **Graph**: 100% client CPU usage on red alert glow animation [#8222](https://github.com/grafana/grafana/issues/8222)
|
||||
* **InfluxDB**: Templating: "All" query does match too much [#8165](https://github.com/grafana/grafana/issues/8165)
|
||||
* **Dashboard**: Description tooltip is not fully displayed [#7970](https://github.com/grafana/grafana/issues/7970)
|
||||
* **Proxy**: Redirect after switching Org does not obey sub path in root_url (using reverse proxy) [#8089](https://github.com/grafana/grafana/issues/8089)
|
||||
* **Templating**: Restoration of ad-hoc variable from URL does not work correctly [#8056](https://github.com/grafana/grafana/issues/8056) thx [@tamayika](https://github.com/tamayika)
|
||||
* **InfluxDB**: timeFilter cannot be used twice in alerts [#7969](https://github.com/grafana/grafana/issues/7969)
|
||||
* **MySQL**: 4-byte UTF8 not supported when using MySQL database (allows Emojis) [#7958](https://github.com/grafana/grafana/issues/7958)
|
||||
* **Alerting**: api/alerts and api/alert/:id hold previous data for "message" and "Message" field when field value is changed from "some string" to empty string. [#7927](https://github.com/grafana/grafana/issues/7927)
|
||||
* **Graph**: Cannot add fill below to series override [#7916](https://github.com/grafana/grafana/issues/7916)
|
||||
* **InfluxDB**: Influxb Datasource test passes even if the Database doesn't exist [#7864](https://github.com/grafana/grafana/issues/7864)
|
||||
* **Prometheus**: Displaying Prometheus annotations is incredibly slow [#7750](https://github.com/grafana/grafana/issues/7750), thx [@mtanda](https://github.com/mtanda)
|
||||
* **Graphite**: grafana generates empty find query to graphite -> 422 Unprocessable Entity [#7740](https://github.com/grafana/grafana/issues/7740)
|
||||
* **Admin**: make organisation filter case insensitive [#8194](https://github.com/grafana/grafana/issues/8194), thx [@Alexander-N](https://github.com/Alexander-N)
|
||||
|
||||
## Changes
|
||||
* **Elasticsearch**: Changed elasticsearch Terms aggregation to default to Min Doc Count to 1, and sort order to Top [#8321](https://github.com/grafana/grafana/issues/8321)
|
||||
|
||||
## Tech
|
||||
|
||||
* **Library Upgrade**: inconshreveable/log15 outdated - no support for solaris [#8262](https://github.com/grafana/grafana/issues/8262)
|
||||
* **Library Upgrade**: Upgrade Macaron [#7600](https://github.com/grafana/grafana/issues/7600)
|
||||
|
||||
# 4.2.0 (2017-03-22)
|
||||
## Minor Enhancements
|
||||
* **Templates**: Prevent use of the prefix `__` for templates in web UI [#7678](https://github.com/grafana/grafana/issues/7678)
|
||||
* **Threema**: Add emoji to Threema alert notifications [#7676](https://github.com/grafana/grafana/pull/7676) thx [@dbrgn](https://github.com/dbrgn)
|
||||
* **Panels**: Support dm3 unit [#7695](https://github.com/grafana/grafana/issues/7695) thx [@mitjaziv](https://github.com/mitjaziv)
|
||||
* **Docs**: Added some details about Sessions in Postgres [#7694](https://github.com/grafana/grafana/pull/7694) thx [@rickard-von-essen](https://github.com/rickard-von-essen)
|
||||
* **Influxdb**: Allow commas in template variables [#7681](https://github.com/grafana/grafana/issues/7681) thx [@thuck](https://github.com/thuck)
|
||||
* **Cloudwatch**: stop using deprecated session.New() [#7736](https://github.com/grafana/grafana/issues/7736) thx [@mtanda](https://github.com/mtanda)
|
||||
* **OpenTSDB**: Pass dropcounter rate option if no max counter and no reset value or reset value as 0 is specified [#7743](https://github.com/grafana/grafana/pull/7743) thx [@r4um](https://github.com/r4um)
|
||||
* **Templating**: support full resolution for $interval variable [#7696](https://github.com/grafana/grafana/pull/7696) thx [@mtanda](https://github.com/mtanda)
|
||||
* **Elasticsearch**: Unique Count on string fields in ElasticSearch [#3536](https://github.com/grafana/grafana/issues/3536), thx [@pyro2927](https://github.com/pyro2927)
|
||||
* **Templating**: Data source template variable that refers to other variable in regex filter [#6365](https://github.com/grafana/grafana/issues/6365) thx [@rlodge](https://github.com/rlodge)
|
||||
* **Admin**: Global User List: add search and pagination [#7469](https://github.com/grafana/grafana/issues/7469)
|
||||
* **User Management**: Invite UI is now disabled when login form is disabled [#7875](https://github.com/grafana/grafana/issues/7875)
|
||||
|
||||
## Bugfixes
|
||||
* **Webhook**: Use proxy settings from environment variables [#7710](https://github.com/grafana/grafana/issues/7710)
|
||||
* **Panels**: Deleting a dashboard with unsaved changes raises an error message [#7591](https://github.com/grafana/grafana/issues/7591) thx [@thuck](https://github.com/thuck)
|
||||
* **Influxdb**: Query builder detects regex to easily for measurement [#7276](https://github.com/grafana/grafana/issues/7276) thx [@thuck](https://github.com/thuck)
|
||||
* **Docs**: router_logging not documented [#7723](https://github.com/grafana/grafana/issues/7723)
|
||||
* **Alerting**: Spelling mistake [#7739](https://github.com/grafana/grafana/pull/7739) thx [@woutersmit](https://github.com/woutersmit)
|
||||
* **Alerting**: Graph legend scrolls to top when an alias is toggled/clicked [#7680](https://github.com/grafana/grafana/issues/7680) thx [@p4ddy1](https://github.com/p4ddy1)
|
||||
* **Panels**: Fixed panel tooltip description after scrolling down [#7708](https://github.com/grafana/grafana/issues/7708) thx [@askomorokhov](https://github.com/askomorokhov)
|
||||
|
||||
# 4.2.0-beta1 (2017-02-27)
|
||||
|
||||
## Enhancements
|
||||
* **Telegram**: Added Telegram alert notifier [#7098](https://github.com/grafana/grafana/pull/7098), thx [@leonoff](https://github.com/leonoff)
|
||||
* **Templating**: Make $__interval and $__interval_ms global built in variables that can be used in by any datasource (in panel queries), closes [#7190](https://github.com/grafana/grafana/issues/7190), closes [#6582](https://github.com/grafana/grafana/issues/6582)
|
||||
* **S3 Image Store**: External s3 image store (used in alert notifications) now support AWS IAM Roles, closes [#6985](https://github.com/grafana/grafana/issues/6985), [#7058](https://github.com/grafana/grafana/issues/7058) thx [@mtanda](https://github.com/mtanda)
|
||||
* **SingleStat**: Implements diff aggregation method for singlestat [#7234](https://github.com/grafana/grafana/issues/7234), thx [@oliverpool](https://github.com/oliverpool)
|
||||
* **Dataproxy**: Added setting to enable more verbose logging in dataproxy [#7209](https://github.com/grafana/grafana/pull/7209), thx [@Ricky-N](https://github.com/Ricky-N)
|
||||
* **Alerting**: Better information about why an alert triggered [#7035](https://github.com/grafana/grafana/issues/7035)
|
||||
* **LINE**: Add LINE as alerting notification channel [#7301](https://github.com/grafana/grafana/pull/7301), thx [@huydx](https://github.com/huydx)
|
||||
* **LINE**: Adds image to notification message [#7417](https://github.com/grafana/grafana/pull/7417), thx [@Erliz](https://github.com/Erliz)
|
||||
* **Hipchat**: Adds support for sending alert notifications to hipchat [#6451](https://github.com/grafana/grafana/issues/6451), thx [@jregovic](https://github.com/jregovic)
|
||||
* **Alerting**: Uploading images for alert notifications is now optional [#7419](https://github.com/grafana/grafana/issues/7419)
|
||||
* **Dashboard**: Adds shortcut for collapsing/expanding all rows [#552](https://github.com/grafana/grafana/issues/552), thx [@mtanda](https://github.com/mtanda)
|
||||
* **Alerting**: Adds de duping of alert notifications [#7632](https://github.com/grafana/grafana/pull/7632)
|
||||
* **Orgs**: Sharing dashboards using Grafana share feature will now redirect to correct org. [#1613](https://github.com/grafana/grafana/issues/1613)
|
||||
* **Pushover**: Add Pushover alert notifications [#7526](https://github.com/grafana/grafana/pull/7526) thx [@devkid](https://github.com/devkid)
|
||||
* **Threema**: Add Threema Gateway alert notification integration [#7482](https://github.com/grafana/grafana/pull/7482) thx [@dbrgn](https://github.com/dbrgn)
|
||||
|
||||
## Minor Enhancements
|
||||
* **Optimzation**: Never issue refresh event when Grafana tab is not visible [#7218](https://github.com/grafana/grafana/issues/7218), thx [@mtanda](https://github.com/mtanda)
|
||||
* **Browser History**: Browser back/forward now works time ranges / zoom, [#7259](https://github.com/grafana/grafana/issues/7259)
|
||||
* **Elasticsearch**: Support for Min Doc Count options in Terms aggregation [#7324](https://github.com/grafana/grafana/pull/7324), thx [@lpic10](https://github.com/lpic10)
|
||||
* **Elasticsearch**: Term aggregation limit can now be changed in template queries [#7112](https://github.com/grafana/grafana/issues/7112), thx [@FFalcon](https://github.com/FFalcon)
|
||||
* **Elasticsearch**: Ad-hoc filters now support all operators [#7612](https://github.com/grafana/grafana/issues/7612), thx [@tamayika](https://github.com/tamayika)
|
||||
* **Graph**: Add full series name as title for legends. [#7493](https://github.com/grafana/grafana/pull/7493), thx [@kolobaev](https://github.com/kolobaev)
|
||||
* **Table**: Add a message when queries returns no data. [#6109](https://github.com/grafana/grafana/issues/6109), thx [@xginn8](https://github.com/xginn8)
|
||||
* **Graph**: Set max width for series names in legend tables. [#2385](https://github.com/grafana/grafana/issues/2385), thx [@kolobaev](https://github.com/kolobaev)
|
||||
* **Database**: Allow max db connection pool configuration [#7427](https://github.com/grafana/grafana/issues/7427), thx [@huydx](https://github.com/huydx)
|
||||
* **Datasources** Delete datsource by name [#7476](https://github.com/grafana/grafana/issues/7476), thx [@huydx](https://github.com/huydx)
|
||||
* **Dataproxy**: Only allow get that begins with api/ to access Prometheus [#7459](https://github.com/grafana/grafana/pull/7459), thx [@mtanda](https://github.com/mtanda)
|
||||
* **Snapshot**: Make timeout for snapshot creation configurable [#7449](https://github.com/grafana/grafana/pull/7449) thx [@ryu1-sakai](https://github.com/ryu1-sakai)
|
||||
* **Panels**: Add more physics units [#7554](https://github.com/grafana/grafana/pull/7554) thx [@ryantxu](https://github.com/ryantxu)
|
||||
* **Email**: Add sender's name on email [#2131](https://github.com/grafana/grafana/issues/2131) thx [@jacobbednarz](https://github.com/jacobbednarz)
|
||||
* **HTTPS**: Set tls 1.2 as lowest tls version. [#7347](https://github.com/grafana/grafana/pull/7347) thx [@roman-vynar](https://github.com/roman-vynar)
|
||||
* **Table**: Added suppressing of empty results to table plugin. [#7602](https://github.com/grafana/grafana/pull/7602) thx [@LLIyRiK](https://github.com/LLIyRiK)
|
||||
|
||||
## Tech
|
||||
|
||||
* **Library Upgrade**: Upgraded angularjs from 1.5.8 to 1.6.1 [#7274](https://github.com/grafana/grafana/issues/7274)
|
||||
* **Backend**: Grafana is now built using golang 1.8
|
||||
|
||||
## Bugfixes
|
||||
* **Alerting**: Fixes missing support for no_data and execution error when testing alerts [#7149](https://github.com/grafana/grafana/issues/7149)
|
||||
* **Dashboard**: Avoid duplicate data in dashboard json for panels with alerts [#7256](https://github.com/grafana/grafana/pull/7256)
|
||||
* **Alertlist**: Only show scrollbar when required [#7269](https://github.com/grafana/grafana/issues/7269)
|
||||
* **SMTP**: Set LocalName to hostname [#7223](https://github.com/grafana/grafana/issues/7223)
|
||||
* **Sidemenu**: Disable sign out in sidemenu for AuthProxyEnabled [#7377](https://github.com/grafana/grafana/pull/7377), thx [@solugebefola](https://github.com/solugebefola)
|
||||
* **Prometheus**: Add support for basic auth in Prometheus tsdb package [#6799](https://github.com/grafana/grafana/issues/6799), thx [@hagen1778](https://github.com/hagen1778)
|
||||
* **OAuth**: Redirect to original page when logging in with OAuth [#7513](https://github.com/grafana/grafana/issues/7513)
|
||||
* **Annotations**: Wrap text in annotations tooltip [#7542](https://github.com/grafana/grafana/pull/7542), thx [@xginn8](https://github.com/xginn8)
|
||||
* **Templating**: Fixes error when using numeric sort on empty strings [#7382](https://github.com/grafana/grafana/issues/7382)
|
||||
* **Templating**: Fixed issue detecting template variable dependency [#7354](https://github.com/grafana/grafana/issues/7354)
|
||||
|
||||
# 4.1.2 (2017-02-13)
|
||||
|
||||
### Bugfixes
|
||||
* **Table**: Fixes broken annotation rendering mode in the table panel [#7268](https://github.com/grafana/grafana/issues/7268)
|
||||
* **Data Sources**: Sorting for lists of data sources in UI is now case insensitive [#7491](https://github.com/grafana/grafana/issues/7491)
|
||||
* **Admin**: Support more then 1000 users in global users list [#7469](https://github.com/grafana/grafana/issues/7469)
|
||||
|
||||
# 4.1.1 (2017-01-11)
|
||||
|
||||
### Bugfixes
|
||||
* **Graph Panel**: Fixed issue with legend height in table mode [#7221](https://github.com/grafana/grafana/issues/7221)
|
||||
|
||||
# 4.1.0 (2017-01-11)
|
||||
|
||||
### Bugfixes
|
||||
* **Server side PNG rendering**: Fixed issue with y-axis label rotation in phantomjs rendered images [#6924](https://github.com/grafana/grafana/issues/6924)
|
||||
* **Graph**: Fixed centering of y-axis label [#7099](https://github.com/grafana/grafana/issues/7099)
|
||||
* **Graph**: Fixed graph legend table mode and always visible scrollbar [#6828](https://github.com/grafana/grafana/issues/6828)
|
||||
* **Templating**: Fixed template variable value groups/tags feature [#6752](https://github.com/grafana/grafana/issues/6752)
|
||||
* **Webhook**: Fixed webhook username mismatch [#7195](https://github.com/grafana/grafana/pull/7195), thx [@theisenmark](https://github.com/theisenmark)
|
||||
* **Influxdb**: Handles time(auto) the same way as time($interval) [#6997](https://github.com/grafana/grafana/issues/6997)
|
||||
|
||||
## Enhancements
|
||||
* **Elasticsearch**: Added support for all moving average options [#7154](https://github.com/grafana/grafana/pull/7154), thx [@vaibhavinbayarea](https://github.com/vaibhavinbayarea)
|
||||
|
||||
# 4.1-beta1 (2016-12-21)
|
||||
|
||||
### Enhancements
|
||||
* **Postgres**: Add support for Certs for Postgres database [#6655](https://github.com/grafana/grafana/issues/6655)
|
||||
* **Victorops**: Add VictorOps notification integration [#6411](https://github.com/grafana/grafana/issues/6411), thx [@ichekrygin](https://github.com/ichekrygin)
|
||||
* **Opsgenie**: Add OpsGenie notification integratiion [#6687](https://github.com/grafana/grafana/issues/6687), thx [@kylemcc](https://github.com/kylemcc)
|
||||
* **Singlestat**: New aggregation on singlestat panel [#6740](https://github.com/grafana/grafana/pull/6740), thx [@dirk-leroux](https://github.com/dirk-leroux)
|
||||
* **Cloudwatch**: Make it possible to specify access and secret key on the data source config page [#6697](https://github.com/grafana/grafana/issues/6697)
|
||||
* **Table**: Added Hidden Column Style for Table Panel [#5677](https://github.com/grafana/grafana/pull/5677), thx [@bmundt](https://github.com/bmundt)
|
||||
* **Graph**: Shared crosshair option renamed to shared tooltip, shows tooltip on all graphs as you hover over one graph. [#1578](https://github.com/grafana/grafana/pull/1578), [#6274](https://github.com/grafana/grafana/pull/6274)
|
||||
* **Elasticsearch**: Added support for Missing option (bucket) for terms aggregation [#4244](https://github.com/grafana/grafana/pull/4244), thx [@shanielh](https://github.com/shanielh)
|
||||
* **Elasticsearch**: Added support for Elasticsearch 5.x [#5740](https://github.com/grafana/grafana/issues/5740), thx [@lpic10](https://github.com/lpic10)
|
||||
* **CLI**: Make it possible to reset the admin password using the grafana-cli. [#5479](https://github.com/grafana/grafana/issues/5479)
|
||||
* **Influxdb**: Support multiple tags in InfluxDB annotations. [#4550](https://github.com/grafana/grafana/pull/4550), thx [@adrianlzt](https://github.com/adrianlzt)
|
||||
* **LDAP**: Basic Auth now supports LDAP username and password, [#6940](https://github.com/grafana/grafana/pull/6940), thx [@utkarshcmu](https://github.com/utkarshcmu)
|
||||
* **LDAP**: Now works with Auth Proxy, role and organisation mapping & sync will regularly be performed. [#6895](https://github.com/grafana/grafana/pull/6895), thx [@Seuf](https://github.com/seuf)
|
||||
* **Alerting**: Adds OK as no data option. [#6866](https://github.com/grafana/grafana/issues/6866)
|
||||
* **Alert list**: Order alerts based on state. [#6676](https://github.com/grafana/grafana/issues/6676)
|
||||
* **Alerting**: Add api endpoint for pausing all alerts. [#6589](https://github.com/grafana/grafana/issues/6589)
|
||||
* **Panel**: Added help text for panels. [#4079](https://github.com/grafana/grafana/issues/4079), thx [@utkarshcmu](https://github.com/utkarshcmu)
|
||||
|
||||
### Bugfixes
|
||||
* **API**: HTTP API for deleting org returning incorrect message for a non-existing org [#6679](https://github.com/grafana/grafana/issues/6679)
|
||||
* **Dashboard**: Posting empty dashboard result in corrupted dashboard [#5443](https://github.com/grafana/grafana/issues/5443)
|
||||
* **Logging**: Fixed logging level confing issue [#6978](https://github.com/grafana/grafana/issues/6978)
|
||||
* **Notifications**: Remove html escaping the email subject. [#6905](https://github.com/grafana/grafana/issues/6905)
|
||||
* **Influxdb**: Fixes broken field dropdown when using template vars as measurement. [#6473](https://github.com/grafana/grafana/issues/6473)
|
||||
|
||||
# 4.0.2 (2016-12-08)
|
||||
|
||||
### Enhancements
|
||||
* **Playlist**: Add support for kiosk mode [#6727](https://github.com/grafana/grafana/issues/6727)
|
||||
|
||||
### Bugfixes
|
||||
* **Alerting**: Add alert message to webhook notifications [#6807](https://github.com/grafana/grafana/issues/6807)
|
||||
* **Alerting**: Fixes a bug where avg() reducer treated null as zero. [#6879](https://github.com/grafana/grafana/issues/6879)
|
||||
* **PNG Rendering**: Fix for server side rendering when using non default http addr bind and domain setting [#6813](https://github.com/grafana/grafana/issues/6813)
|
||||
* **PNG Rendering**: Fix for server side rendering when setting enforce_domain to true [#6769](https://github.com/grafana/grafana/issues/6769)
|
||||
* **Webhooks**: Add content type json to outgoing webhooks [#6822](https://github.com/grafana/grafana/issues/6822)
|
||||
* **Keyboard shortcut**: Fixed zoom out shortcut [#6837](https://github.com/grafana/grafana/issues/6837)
|
||||
* **Webdav**: Adds basic auth headers to webdav uploader [#6779](https://github.com/grafana/grafana/issues/6779)
|
||||
|
||||
# 4.0.1 (2016-12-02)
|
||||
|
||||
> **Notice**
|
||||
4.0.0 had serious connection pooling issue when using a data source in proxy access. This bug caused lots of resource issues
|
||||
due to too many connections/file handles on the data source backend. This problem is fixed in this release.
|
||||
|
||||
### Bugfixes
|
||||
* **Metrics**: Fixes nil pointer dereference on my arm build [#6749](https://github.com/grafana/grafana/issues/6749)
|
||||
* **Data proxy**: Fixes a tcp pooling issue in the datasource reverse proxy [#6759](https://github.com/grafana/grafana/issues/6759)
|
||||
|
||||
# 4.0-stable (2016-11-29)
|
||||
|
||||
### Bugfixes
|
||||
* **Server-side rendering**: Fixed address used when rendering panel via phantomjs and using non default http_addr config [#6660](https://github.com/grafana/grafana/issues/6660)
|
||||
* **Graph panel**: Fixed graph panel tooltip sort order issue [#6648](https://github.com/grafana/grafana/issues/6648)
|
||||
* **Unsaved changes**: You now navigate to the intended page after saving in the unsaved changes dialog [#6675](https://github.com/grafana/grafana/issues/6675)
|
||||
* **TLS Client Auth**: Support for TLS client authentication for datasource proxies [#2316](https://github.com/grafana/grafana/issues/2316)
|
||||
* **Alerts out of sync**: Saving dashboards with broken alerts causes sync problem[#6576](https://github.com/grafana/grafana/issues/6576)
|
||||
* **Alerting**: Saving an alert with condition "HAS NO DATA" throws an error[#6701](https://github.com/grafana/grafana/issues/6701)
|
||||
* **Config**: Improve error message when parsing broken config file [#6731](https://github.com/grafana/grafana/issues/6731)
|
||||
* **Table**: Render empty dates as - instead of current date [#6728](https://github.com/grafana/grafana/issues/6728)
|
||||
|
||||
# 4.0-beta2 (2016-11-21)
|
||||
|
||||
### Bugfixes
|
||||
* **Graph Panel**: Log base scale on right Y-axis had no effect, max value calc was not applied, [#6534](https://github.com/grafana/grafana/issues/6534)
|
||||
* **Graph Panel**: Bar width if bars was only used in series override, [#6528](https://github.com/grafana/grafana/issues/6528)
|
||||
* **UI/Browser**: Fixed issue with page/view header gradient border not showing in Safari, [#6530](https://github.com/grafana/grafana/issues/6530)
|
||||
* **Cloudwatch**: Fixed cloudwatch datasource requesting to many datapoints, [#6544](https://github.com/grafana/grafana/issues/6544)
|
||||
* **UX**: Panel Drop zone visible after duplicating panel, and when entering fullscreen/edit view, [#6598](https://github.com/grafana/grafana/issues/6598)
|
||||
* **Templating**: Newly added variable was not visible directly only after dashboard reload, [#6622](https://github.com/grafana/grafana/issues/6622)
|
||||
|
||||
### Enhancements
|
||||
* **Singlestat**: Support repeated template variables in prefix/postfix [#6595](https://github.com/grafana/grafana/issues/6595)
|
||||
* **Templating**: Don't persist variable options with refresh option [#6586](https://github.com/grafana/grafana/issues/6586)
|
||||
* **Alerting**: Add ability to have OR conditions (and mixing AND & OR) [#6579](https://github.com/grafana/grafana/issues/6579)
|
||||
* **InfluxDB**: Fix for Ad-Hoc Filters variable & changing dashboards [#6821](https://github.com/grafana/grafana/issues/6821)
|
||||
|
||||
# 4.0-beta1 (2016-11-09)
|
||||
|
||||
### Enhancements
|
||||
* **Login**: Adds option to disable username/password logins, closes [#4674](https://github.com/grafana/grafana/issues/4674)
|
||||
* **SingleStat**: Add seriename as option in singlestat panel, closes [#4740](https://github.com/grafana/grafana/issues/4740)
|
||||
* **Localization**: Week start day now dependant on browser locale setting, closes [#3003](https://github.com/grafana/grafana/issues/3003)
|
||||
* **Templating**: Update panel repeats for variables that change on time refresh, closes [#5021](https://github.com/grafana/grafana/issues/5021)
|
||||
* **Templating**: Add support for numeric and alphabetical sorting of variable values, closes [#2839](https://github.com/grafana/grafana/issues/2839)
|
||||
* **Elasticsearch**: Support to set Precision Threshold for Unique Count metric, closes [#4689](https://github.com/grafana/grafana/issues/4689)
|
||||
* **Navigation**: Add search to org swithcer, closes [#2609](https://github.com/grafana/grafana/issues/2609)
|
||||
* **Database**: Allow database config using one propertie, closes [#5456](https://github.com/grafana/grafana/pull/5456)
|
||||
* **Graphite**: Add support for groupByNodes, closes [#5613](https://github.com/grafana/grafana/pull/5613)
|
||||
* **Influxdb**: Add support for elapsed(), closes [#5827](https://github.com/grafana/grafana/pull/5827)
|
||||
* **OpenTSDB**: Add support for explicitTags for OpenTSDB>=2.3, closes [#6360](https://github.com/grafana/grafana/pull/6361)
|
||||
* **OAuth**: Add support for generic oauth, closes [#4718](https://github.com/grafana/grafana/pull/4718)
|
||||
* **Cloudwatch**: Add support to expand multi select template variable, closes [#5003](https://github.com/grafana/grafana/pull/5003)
|
||||
* **Background Tasks**: Now support automatic purging of old snapshots, closes [#4087](https://github.com/grafana/grafana/issues/4087)
|
||||
* **Background Tasks**: Now support automatic purging of old rendered images, closes [#2172](https://github.com/grafana/grafana/issues/2172)
|
||||
* **Dashboard**: After inactivity hide nav/row actions, fade to nice clean view, can be toggled with `d v`, also added kiosk mode, toggled via `d k` [#6476](https://github.com/grafana/grafana/issues/6476)
|
||||
* **Dashboard**: Improved dashboard row menu & add panel UX [#6442](https://github.com/grafana/grafana/issues/6442)
|
||||
* **Graph Panel**: Support for stacking null values [#2912](https://github.com/grafana/grafana/issues/2912), [#6287](https://github.com/grafana/grafana/issues/6287), thanks @benrubson!
|
||||
|
||||
### Breaking changes
|
||||
* **SystemD**: Change systemd description, closes [#5971](https://github.com/grafana/grafana/pull/5971)
|
||||
* **lodash upgrade**: Upgraded lodash from 2.4.2 to 4.15.0, this contains a number of breaking changes that could effect plugins. closes [#6021](https://github.com/grafana/grafana/pull/6021)
|
||||
|
||||
### Bug fixes
|
||||
* **Table Panel**: Fixed problem when switching to Mixed datasource in metrics tab, fixes [#5999](https://github.com/grafana/grafana/pull/5999)
|
||||
* **Playlist**: Fixed problem with play order not matching order defined in playlist, fixes [#5467](https://github.com/grafana/grafana/pull/5467)
|
||||
* **Graph panel**: Fixed problem with auto decimals on y axis when datamin=datamax, fixes [#6070](https://github.com/grafana/grafana/pull/6070)
|
||||
* **Snapshot**: Can view embedded panels/png rendered panels in snapshots without login, fixes [#3769](https://github.com/grafana/grafana/pull/3769)
|
||||
* **Elasticsearch**: Fix for query template variable when looking up terms without query, no longer relies on elasticsearch default field, fixes [#3887](https://github.com/grafana/grafana/pull/3887)
|
||||
* **Elasticsearch**: Fix for displaying IP address used in terms aggregations, fixes [#4393](https://github.com/grafana/grafana/pull/4393)
|
||||
* **PNG Rendering**: Fix for server side rendering when using auth proxy, fixes [#5906](https://github.com/grafana/grafana/pull/5906)
|
||||
* **OpenTSDB**: Fixed multi-value nested templating for opentsdb, fixes [#6455](https://github.com/grafana/grafana/pull/6455)
|
||||
* **Playlist**: Remove playlist items when dashboard is removed, fixes [#6292](https://github.com/grafana/grafana/issues/6292)
|
||||
|
||||
# 3.1.2 (unreleased)
|
||||
* **Templating**: Fixed issue when combining row & panel repeats, fixes [#5790](https://github.com/grafana/grafana/issues/5790)
|
||||
* **Drag&Drop**: Fixed issue with drag and drop in latest Chrome(51+), fixes [#5767](https://github.com/grafana/grafana/issues/5767)
|
||||
* **Internal Metrics**: Fixed issue with dots in instance_name when sending internal metrics to Graphite, fixes [#5739](https://github.com/grafana/grafana/issues/5739)
|
||||
* **Grafana-CLI**: Add default plugin path for MAC OS, fixes [#5806](https://github.com/grafana/grafana/issues/5806)
|
||||
* **Grafana-CLI**: Improve error message for upgrade-all command, fixes [#5885](https://github.com/grafana/grafana/issues/5885)
|
||||
|
||||
# 3.1.1 (2016-08-01)
|
||||
* **IFrame embedding**: Fixed issue of using full iframe height, fixes [#5605](https://github.com/grafana/grafana/issues/5606)
|
||||
* **Panel PNG rendering**: Fixed issue detecting render completion, fixes [#5605](https://github.com/grafana/grafana/issues/5606)
|
||||
* **Elasticsearch**: Fixed issue with templating query and json parse error, fixes [#5615](https://github.com/grafana/grafana/issues/5615)
|
||||
* **Tech**: Upgraded JQuery to 2.2.4 to fix Security vulnerabilitie in 2.1.4, fixes [#5627](https://github.com/grafana/grafana/issues/5627)
|
||||
* **Graphite**: Fixed issue with mixed data sources and Graphite, fixes [#5617](https://github.com/grafana/grafana/issues/5617)
|
||||
* **Templating**: Fixed issue with template variable query was issued multiple times during dashboard load, fixes [#5637](https://github.com/grafana/grafana/issues/5637)
|
||||
* **Zoom**: Fixed issues with zoom in and out on embedded (iframed) panel, fixes [#4489](https://github.com/grafana/grafana/issues/4489), [#5666](https://github.com/grafana/grafana/issues/5666)
|
||||
|
||||
# 3.1.0 stable (2016-07-12)
|
||||
|
||||
### Bugfixes & Enhancements,
|
||||
* **User Alert Notices**: Backend error alert popups did not show properly, fixes [#5435](https://github.com/grafana/grafana/issues/5435)
|
||||
* **Table**: Added sanitize HTML option to allow links in table cells, fixes [#4596](https://github.com/grafana/grafana/issues/4596)
|
||||
* **Apps**: App dashboards are automatically synced to DB at startup after plugin update, fixes [#5529](https://github.com/grafana/grafana/issues/5529)
|
||||
|
||||
# 3.1.0-beta1 (2016-06-23)
|
||||
|
||||
### Enhancements
|
||||
@@ -681,8 +11,8 @@ due to too many connections/file handles on the data source backend. This proble
|
||||
* **InfluxDB**: Add spread function, closes [#5211](https://github.com/grafana/grafana/issues/5211)
|
||||
* **Scripts**: Use restart instead of start for deb package script, closes [#5282](https://github.com/grafana/grafana/pull/5282)
|
||||
* **Logging**: Moved to structured logging lib, and moved to component specific level filters via config file, closes [#4590](https://github.com/grafana/grafana/issues/4590)
|
||||
* **OpenTSDB**: Support nested template variables in tag_values function, closes [#4398](https://github.com/grafana/grafana/issues/4398)
|
||||
* **Datasource**: Pending data source requests are cancelled before new ones are issues (Graphite & Prometheus), closes [#5321](https://github.com/grafana/grafana/issues/5321)
|
||||
* **OpenTSDB**: Support nested template variables in tag_values function, closes [4398](https://github.com/grafana/grafana/issues/4398)
|
||||
* **Datasource**: Pending data source requests are cancelled before new ones are issues (Graphite & Prometheus), closes [5321](https://github.com/grafana/grafana/issues/5321)
|
||||
|
||||
### Breaking changes
|
||||
* **Logging** : Changed default logging output format (now structured into message, and key value pairs, with logger key acting as component). You can also no change in config to json log ouput.
|
||||
@@ -1211,7 +541,7 @@ Grafana 2.x is fundamentally different from 1.x; it now ships with an integrated
|
||||
|
||||
# 1.8.0 (2014-09-22)
|
||||
|
||||
Read this [blog post](https://grafana.com/blog/2014/09/11/grafana-1.8.0-rc1-released) for an overview of all improvements.
|
||||
Read this [blog post](http://grafana.org/blog/2014/09/11/grafana-1-8-0-rc1-released.html) for an overview of all improvements.
|
||||
|
||||
**Fixes**
|
||||
- [Issue #802](https://github.com/grafana/grafana/issues/802). Annotations: Fix when using InfluxDB datasource
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
||||
In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to creating a positive environment include:
|
||||
|
||||
* Using welcoming and inclusive language
|
||||
* Being respectful of differing viewpoints and experiences
|
||||
* Gracefully accepting constructive criticism
|
||||
* Focusing on what is best for the community
|
||||
* Showing empathy towards other community members
|
||||
|
||||
Examples of unacceptable behavior by participants include:
|
||||
|
||||
* The use of sexualized language or imagery and unwelcome sexual attention or advances
|
||||
* Trolling, insulting/derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or electronic address, without explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a professional setting
|
||||
|
||||
## Our Responsibilities
|
||||
|
||||
Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at contact@grafana.com. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
|
||||
|
||||
Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]
|
||||
|
||||
[homepage]: http://contributor-covenant.org
|
||||
[version]: http://contributor-covenant.org/version/1/4/
|
||||
372
Godeps/Godeps.json
generated
Normal file
372
Godeps/Godeps.json
generated
Normal file
@@ -0,0 +1,372 @@
|
||||
{
|
||||
"ImportPath": "github.com/grafana/grafana",
|
||||
"GoVersion": "go1.5.1",
|
||||
"GodepVersion": "v60",
|
||||
"Packages": [
|
||||
"./pkg/..."
|
||||
],
|
||||
"Deps": [
|
||||
{
|
||||
"ImportPath": "github.com/BurntSushi/toml",
|
||||
"Comment": "v0.1.0-21-g056c9bc",
|
||||
"Rev": "056c9bc7be7190eaa7715723883caffa5f8fa3e4"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/Unknwon/com",
|
||||
"Rev": "d9bcf409c8a368d06c9b347705c381e7c12d54df"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/aws/aws-sdk-go/aws",
|
||||
"Comment": "v1.0.0",
|
||||
"Rev": "abb928e07c4108683d6b4d0b6ca08fe6bc0eee5f"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/aws/aws-sdk-go/aws/awserr",
|
||||
"Comment": "v1.0.0",
|
||||
"Rev": "abb928e07c4108683d6b4d0b6ca08fe6bc0eee5f"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/aws/aws-sdk-go/aws/awsutil",
|
||||
"Comment": "v1.0.0",
|
||||
"Rev": "abb928e07c4108683d6b4d0b6ca08fe6bc0eee5f"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/aws/aws-sdk-go/aws/client",
|
||||
"Comment": "v1.0.0",
|
||||
"Rev": "abb928e07c4108683d6b4d0b6ca08fe6bc0eee5f"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/aws/aws-sdk-go/aws/client/metadata",
|
||||
"Comment": "v1.0.0",
|
||||
"Rev": "abb928e07c4108683d6b4d0b6ca08fe6bc0eee5f"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/aws/aws-sdk-go/aws/corehandlers",
|
||||
"Comment": "v1.0.0",
|
||||
"Rev": "abb928e07c4108683d6b4d0b6ca08fe6bc0eee5f"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/aws/aws-sdk-go/aws/credentials",
|
||||
"Comment": "v1.0.0",
|
||||
"Rev": "abb928e07c4108683d6b4d0b6ca08fe6bc0eee5f"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/aws/aws-sdk-go/aws/credentials/ec2rolecreds",
|
||||
"Comment": "v1.0.0",
|
||||
"Rev": "abb928e07c4108683d6b4d0b6ca08fe6bc0eee5f"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/aws/aws-sdk-go/aws/defaults",
|
||||
"Comment": "v1.0.0",
|
||||
"Rev": "abb928e07c4108683d6b4d0b6ca08fe6bc0eee5f"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/aws/aws-sdk-go/aws/ec2metadata",
|
||||
"Comment": "v1.0.0",
|
||||
"Rev": "abb928e07c4108683d6b4d0b6ca08fe6bc0eee5f"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/aws/aws-sdk-go/aws/request",
|
||||
"Comment": "v1.0.0",
|
||||
"Rev": "abb928e07c4108683d6b4d0b6ca08fe6bc0eee5f"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/aws/aws-sdk-go/aws/session",
|
||||
"Comment": "v1.0.0",
|
||||
"Rev": "abb928e07c4108683d6b4d0b6ca08fe6bc0eee5f"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/aws/aws-sdk-go/private/endpoints",
|
||||
"Comment": "v1.0.0",
|
||||
"Rev": "abb928e07c4108683d6b4d0b6ca08fe6bc0eee5f"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/aws/aws-sdk-go/private/protocol/ec2query",
|
||||
"Comment": "v1.0.0",
|
||||
"Rev": "abb928e07c4108683d6b4d0b6ca08fe6bc0eee5f"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/aws/aws-sdk-go/private/protocol/query",
|
||||
"Comment": "v1.0.0",
|
||||
"Rev": "abb928e07c4108683d6b4d0b6ca08fe6bc0eee5f"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/aws/aws-sdk-go/private/protocol/query/queryutil",
|
||||
"Comment": "v1.0.0",
|
||||
"Rev": "abb928e07c4108683d6b4d0b6ca08fe6bc0eee5f"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/aws/aws-sdk-go/private/protocol/rest",
|
||||
"Comment": "v1.0.0",
|
||||
"Rev": "abb928e07c4108683d6b4d0b6ca08fe6bc0eee5f"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/aws/aws-sdk-go/private/protocol/xml/xmlutil",
|
||||
"Comment": "v1.0.0",
|
||||
"Rev": "abb928e07c4108683d6b4d0b6ca08fe6bc0eee5f"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/aws/aws-sdk-go/private/signer/v4",
|
||||
"Comment": "v1.0.0",
|
||||
"Rev": "abb928e07c4108683d6b4d0b6ca08fe6bc0eee5f"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/aws/aws-sdk-go/private/waiter",
|
||||
"Comment": "v1.0.0",
|
||||
"Rev": "abb928e07c4108683d6b4d0b6ca08fe6bc0eee5f"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/aws/aws-sdk-go/service/cloudwatch",
|
||||
"Comment": "v1.0.0",
|
||||
"Rev": "abb928e07c4108683d6b4d0b6ca08fe6bc0eee5f"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/aws/aws-sdk-go/service/ec2",
|
||||
"Comment": "v1.0.0",
|
||||
"Rev": "abb928e07c4108683d6b4d0b6ca08fe6bc0eee5f"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/aws/aws-sdk-go/service/sts",
|
||||
"Comment": "v1.0.0",
|
||||
"Rev": "abb928e07c4108683d6b4d0b6ca08fe6bc0eee5f"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/bmizerany/assert",
|
||||
"Comment": "release.r60-6-ge17e998",
|
||||
"Rev": "e17e99893cb6509f428e1728281c2ad60a6b31e3"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/bradfitz/gomemcache/memcache",
|
||||
"Comment": "release.r60-40-g72a6864",
|
||||
"Rev": "72a68649ba712ee7c4b5b4a943a626bcd7d90eb8"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/codegangsta/cli",
|
||||
"Comment": "1.2.0-187-gc31a797",
|
||||
"Rev": "c31a7975863e7810c92e2e288a9ab074f9a88f29"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/davecgh/go-spew/spew",
|
||||
"Rev": "2df174808ee097f90d259e432cc04442cf60be21"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/fatih/color",
|
||||
"Comment": "v0.1-16-g4f7bcef",
|
||||
"Rev": "4f7bcef27eec7925456d0c30c5e7b0408b3339be"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/franela/goreq",
|
||||
"Rev": "3ddeded65be21dacb5a2e2d0b95af9ff6862a2b5"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/go-ini/ini",
|
||||
"Comment": "v0-48-g060d7da",
|
||||
"Rev": "060d7da055ba6ec5ea7a31f116332fe5efa04ce0"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/go-ldap/ldap",
|
||||
"Comment": "v2.2.1",
|
||||
"Rev": "07a7330929b9ee80495c88a4439657d89c7dbd87"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/go-macaron/binding",
|
||||
"Rev": "2502aaf4bce3a4e6451b4610847bfb8dffdb6266"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/go-macaron/gzip",
|
||||
"Rev": "4938e9be6b279d8426cb1c89a6bcf7af70b0c21d"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/go-macaron/inject",
|
||||
"Rev": "c5ab7bf3a307593cd44cb272d1a5beea473dd072"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/go-macaron/session",
|
||||
"Rev": "66031fcb37a0fff002a1f028eb0b3a815c78306b"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/go-macaron/session/memcache",
|
||||
"Rev": "66031fcb37a0fff002a1f028eb0b3a815c78306b"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/go-macaron/session/mysql",
|
||||
"Rev": "66031fcb37a0fff002a1f028eb0b3a815c78306b"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/go-macaron/session/postgres",
|
||||
"Rev": "66031fcb37a0fff002a1f028eb0b3a815c78306b"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/go-macaron/session/redis",
|
||||
"Rev": "66031fcb37a0fff002a1f028eb0b3a815c78306b"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/go-sql-driver/mysql",
|
||||
"Comment": "v1.2-171-g267b128",
|
||||
"Rev": "267b128680c46286b9ca13475c3cca5de8f79bd7"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/go-stack/stack",
|
||||
"Comment": "v1.5.2",
|
||||
"Rev": "100eb0c0a9c5b306ca2fb4f165df21d80ada4b82"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/go-xorm/core",
|
||||
"Comment": "v0.4.4-7-g9e608f7",
|
||||
"Rev": "9e608f7330b9d16fe2818cfe731128b3f156cb9a"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/go-xorm/xorm",
|
||||
"Comment": "v0.4.4-44-gf561133",
|
||||
"Rev": "f56113384f2c63dfe4cd8e768e349f1c35122b58"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/gorilla/websocket",
|
||||
"Rev": "c45a635370221f34fea2d5163fd156fcb4e38e8a"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/gosimple/slug",
|
||||
"Rev": "8d258463b4459f161f51d6a357edacd3eef9d663"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/hashicorp/go-version",
|
||||
"Rev": "7e3c02b30806fa5779d3bdfc152ce4c6f40e7b38"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/inconshreveable/log15",
|
||||
"Comment": "v2.3-61-g20bca5a",
|
||||
"Rev": "20bca5a7a57282e241fac83ec9ea42538027f1c1"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/inconshreveable/log15/term",
|
||||
"Comment": "v2.3-61-g20bca5a",
|
||||
"Rev": "20bca5a7a57282e241fac83ec9ea42538027f1c1"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/jmespath/go-jmespath",
|
||||
"Comment": "0.2.2",
|
||||
"Rev": "3433f3ea46d9f8019119e7dd41274e112a2359a9"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/jtolds/gls",
|
||||
"Rev": "f1ac7f4f24f50328e6bc838ca4437d1612a0243c"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/klauspost/compress/flate",
|
||||
"Rev": "7b02889a2005228347aef0e76beeaee564d82f8c"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/klauspost/compress/gzip",
|
||||
"Rev": "7b02889a2005228347aef0e76beeaee564d82f8c"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/klauspost/cpuid",
|
||||
"Rev": "349c675778172472f5e8f3a3e0fe187e302e5a10"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/klauspost/crc32",
|
||||
"Rev": "6834731faf32e62a2dd809d99fb24d1e4ae5a92d"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/kr/pretty",
|
||||
"Comment": "go.weekly.2011-12-22-27-ge6ac2fc",
|
||||
"Rev": "e6ac2fc51e89a3249e82157fa0bb7a18ef9dd5bb"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/kr/text",
|
||||
"Rev": "bb797dc4fb8320488f47bf11de07a733d7233e1f"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/lib/pq",
|
||||
"Comment": "go1.0-cutoff-13-g19eeca3",
|
||||
"Rev": "19eeca3e30d2577b1761db471ec130810e67f532"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/lib/pq/oid",
|
||||
"Comment": "go1.0-cutoff-13-g19eeca3",
|
||||
"Rev": "19eeca3e30d2577b1761db471ec130810e67f532"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/mattn/go-colorable",
|
||||
"Rev": "9cbef7c35391cca05f15f8181dc0b18bc9736dbb"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/mattn/go-isatty",
|
||||
"Rev": "56b76bdf51f7708750eac80fa38b952bb9f32639"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/mattn/go-sqlite3",
|
||||
"Comment": "v1.1.0-67-g7204887",
|
||||
"Rev": "7204887cf3a42df1cfaa5505dc3a3427f6dded8b"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/rainycape/unidecode",
|
||||
"Rev": "836ef0a715aedf08a12d595ed73ec8ed5b288cac"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/smartystreets/goconvey/convey",
|
||||
"Comment": "1.5.0-356-gfbc0a1c",
|
||||
"Rev": "fbc0a1c888f9f96263f9a559d1769905245f1123"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/smartystreets/goconvey/convey/assertions",
|
||||
"Comment": "1.5.0-356-gfbc0a1c",
|
||||
"Rev": "fbc0a1c888f9f96263f9a559d1769905245f1123"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/smartystreets/goconvey/convey/assertions/oglematchers",
|
||||
"Comment": "1.5.0-356-gfbc0a1c",
|
||||
"Rev": "fbc0a1c888f9f96263f9a559d1769905245f1123"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/smartystreets/goconvey/convey/gotest",
|
||||
"Comment": "1.5.0-356-gfbc0a1c",
|
||||
"Rev": "fbc0a1c888f9f96263f9a559d1769905245f1123"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/smartystreets/goconvey/convey/reporting",
|
||||
"Comment": "1.5.0-356-gfbc0a1c",
|
||||
"Rev": "fbc0a1c888f9f96263f9a559d1769905245f1123"
|
||||
},
|
||||
{
|
||||
"ImportPath": "github.com/streadway/amqp",
|
||||
"Rev": "150b7f24d6ad507e6026c13d85ce1f1391ac7400"
|
||||
},
|
||||
{
|
||||
"ImportPath": "golang.org/x/net/context",
|
||||
"Rev": "972f0c5fbe4ae29e666c3f78c3ed42ae7a448b0a"
|
||||
},
|
||||
{
|
||||
"ImportPath": "golang.org/x/oauth2",
|
||||
"Rev": "c58fcf0ffc1c772aa2e1ee4894bc19f2649263b2"
|
||||
},
|
||||
{
|
||||
"ImportPath": "golang.org/x/sys/unix",
|
||||
"Rev": "7a56174f0086b32866ebd746a794417edbc678a1"
|
||||
},
|
||||
{
|
||||
"ImportPath": "gopkg.in/asn1-ber.v1",
|
||||
"Comment": "v1",
|
||||
"Rev": "9eae18c3681ae3d3c677ac2b80a8fe57de45fc09"
|
||||
},
|
||||
{
|
||||
"ImportPath": "gopkg.in/bufio.v1",
|
||||
"Comment": "v1",
|
||||
"Rev": "567b2bfa514e796916c4747494d6ff5132a1dfce"
|
||||
},
|
||||
{
|
||||
"ImportPath": "gopkg.in/ini.v1",
|
||||
"Comment": "v0-16-g1772191",
|
||||
"Rev": "177219109c97e7920c933e21c9b25f874357b237"
|
||||
},
|
||||
{
|
||||
"ImportPath": "gopkg.in/macaron.v1",
|
||||
"Rev": "1c6dd87797ae9319b4658cbd48d1d0420b279fd5"
|
||||
},
|
||||
{
|
||||
"ImportPath": "gopkg.in/redis.v2",
|
||||
"Comment": "v2.3.2",
|
||||
"Rev": "e6179049628164864e6e84e973cfb56335748dea"
|
||||
}
|
||||
]
|
||||
}
|
||||
5
Godeps/Readme
generated
Normal file
5
Godeps/Readme
generated
Normal file
@@ -0,0 +1,5 @@
|
||||
This directory tree is generated automatically by godep.
|
||||
|
||||
Please do not edit.
|
||||
|
||||
See https://github.com/tools/godep for more information.
|
||||
2
Godeps/_workspace/.gitignore
generated
vendored
Normal file
2
Godeps/_workspace/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
/pkg
|
||||
/bin
|
||||
5
Godeps/_workspace/src/github.com/BurntSushi/toml/.gitignore
generated
vendored
Normal file
5
Godeps/_workspace/src/github.com/BurntSushi/toml/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
TAGS
|
||||
tags
|
||||
.*.swp
|
||||
tomlcheck/tomlcheck
|
||||
toml.test
|
||||
12
Godeps/_workspace/src/github.com/BurntSushi/toml/.travis.yml
generated
vendored
Normal file
12
Godeps/_workspace/src/github.com/BurntSushi/toml/.travis.yml
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
language: go
|
||||
go:
|
||||
- 1.1
|
||||
- 1.2
|
||||
- tip
|
||||
install:
|
||||
- go install ./...
|
||||
- go get github.com/BurntSushi/toml-test
|
||||
script:
|
||||
- export PATH="$PATH:$HOME/gopath/bin"
|
||||
- make test
|
||||
|
||||
3
Godeps/_workspace/src/github.com/BurntSushi/toml/COMPATIBLE
generated
vendored
Normal file
3
Godeps/_workspace/src/github.com/BurntSushi/toml/COMPATIBLE
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
Compatible with TOML version
|
||||
[v0.2.0](https://github.com/mojombo/toml/blob/master/versions/toml-v0.2.0.md)
|
||||
|
||||
19
Godeps/_workspace/src/github.com/BurntSushi/toml/Makefile
generated
vendored
Normal file
19
Godeps/_workspace/src/github.com/BurntSushi/toml/Makefile
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
install:
|
||||
go install ./...
|
||||
|
||||
test: install
|
||||
go test -v
|
||||
toml-test toml-test-decoder
|
||||
toml-test -encoder toml-test-encoder
|
||||
|
||||
fmt:
|
||||
gofmt -w *.go */*.go
|
||||
colcheck *.go */*.go
|
||||
|
||||
tags:
|
||||
find ./ -name '*.go' -print0 | xargs -0 gotags > TAGS
|
||||
|
||||
push:
|
||||
git push origin master
|
||||
git push github master
|
||||
|
||||
220
Godeps/_workspace/src/github.com/BurntSushi/toml/README.md
generated
vendored
Normal file
220
Godeps/_workspace/src/github.com/BurntSushi/toml/README.md
generated
vendored
Normal file
@@ -0,0 +1,220 @@
|
||||
## TOML parser and encoder for Go with reflection
|
||||
|
||||
TOML stands for Tom's Obvious, Minimal Language. This Go package provides a
|
||||
reflection interface similar to Go's standard library `json` and `xml`
|
||||
packages. This package also supports the `encoding.TextUnmarshaler` and
|
||||
`encoding.TextMarshaler` interfaces so that you can define custom data
|
||||
representations. (There is an example of this below.)
|
||||
|
||||
Spec: https://github.com/mojombo/toml
|
||||
|
||||
Compatible with TOML version
|
||||
[v0.2.0](https://github.com/toml-lang/toml/blob/master/versions/en/toml-v0.2.0.md)
|
||||
|
||||
Documentation: http://godoc.org/github.com/BurntSushi/toml
|
||||
|
||||
Installation:
|
||||
|
||||
```bash
|
||||
go get github.com/BurntSushi/toml
|
||||
```
|
||||
|
||||
Try the toml validator:
|
||||
|
||||
```bash
|
||||
go get github.com/BurntSushi/toml/cmd/tomlv
|
||||
tomlv some-toml-file.toml
|
||||
```
|
||||
|
||||
[](https://travis-ci.org/BurntSushi/toml)
|
||||
|
||||
|
||||
### Testing
|
||||
|
||||
This package passes all tests in
|
||||
[toml-test](https://github.com/BurntSushi/toml-test) for both the decoder
|
||||
and the encoder.
|
||||
|
||||
### Examples
|
||||
|
||||
This package works similarly to how the Go standard library handles `XML`
|
||||
and `JSON`. Namely, data is loaded into Go values via reflection.
|
||||
|
||||
For the simplest example, consider some TOML file as just a list of keys
|
||||
and values:
|
||||
|
||||
```toml
|
||||
Age = 25
|
||||
Cats = [ "Cauchy", "Plato" ]
|
||||
Pi = 3.14
|
||||
Perfection = [ 6, 28, 496, 8128 ]
|
||||
DOB = 1987-07-05T05:45:00Z
|
||||
```
|
||||
|
||||
Which could be defined in Go as:
|
||||
|
||||
```go
|
||||
type Config struct {
|
||||
Age int
|
||||
Cats []string
|
||||
Pi float64
|
||||
Perfection []int
|
||||
DOB time.Time // requires `import time`
|
||||
}
|
||||
```
|
||||
|
||||
And then decoded with:
|
||||
|
||||
```go
|
||||
var conf Config
|
||||
if _, err := toml.Decode(tomlData, &conf); err != nil {
|
||||
// handle error
|
||||
}
|
||||
```
|
||||
|
||||
You can also use struct tags if your struct field name doesn't map to a TOML
|
||||
key value directly:
|
||||
|
||||
```toml
|
||||
some_key_NAME = "wat"
|
||||
```
|
||||
|
||||
```go
|
||||
type TOML struct {
|
||||
ObscureKey string `toml:"some_key_NAME"`
|
||||
}
|
||||
```
|
||||
|
||||
### Using the `encoding.TextUnmarshaler` interface
|
||||
|
||||
Here's an example that automatically parses duration strings into
|
||||
`time.Duration` values:
|
||||
|
||||
```toml
|
||||
[[song]]
|
||||
name = "Thunder Road"
|
||||
duration = "4m49s"
|
||||
|
||||
[[song]]
|
||||
name = "Stairway to Heaven"
|
||||
duration = "8m03s"
|
||||
```
|
||||
|
||||
Which can be decoded with:
|
||||
|
||||
```go
|
||||
type song struct {
|
||||
Name string
|
||||
Duration duration
|
||||
}
|
||||
type songs struct {
|
||||
Song []song
|
||||
}
|
||||
var favorites songs
|
||||
if _, err := toml.Decode(blob, &favorites); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
for _, s := range favorites.Song {
|
||||
fmt.Printf("%s (%s)\n", s.Name, s.Duration)
|
||||
}
|
||||
```
|
||||
|
||||
And you'll also need a `duration` type that satisfies the
|
||||
`encoding.TextUnmarshaler` interface:
|
||||
|
||||
```go
|
||||
type duration struct {
|
||||
time.Duration
|
||||
}
|
||||
|
||||
func (d *duration) UnmarshalText(text []byte) error {
|
||||
var err error
|
||||
d.Duration, err = time.ParseDuration(string(text))
|
||||
return err
|
||||
}
|
||||
```
|
||||
|
||||
### More complex usage
|
||||
|
||||
Here's an example of how to load the example from the official spec page:
|
||||
|
||||
```toml
|
||||
# This is a TOML document. Boom.
|
||||
|
||||
title = "TOML Example"
|
||||
|
||||
[owner]
|
||||
name = "Tom Preston-Werner"
|
||||
organization = "GitHub"
|
||||
bio = "GitHub Cofounder & CEO\nLikes tater tots and beer."
|
||||
dob = 1979-05-27T07:32:00Z # First class dates? Why not?
|
||||
|
||||
[database]
|
||||
server = "192.168.1.1"
|
||||
ports = [ 8001, 8001, 8002 ]
|
||||
connection_max = 5000
|
||||
enabled = true
|
||||
|
||||
[servers]
|
||||
|
||||
# You can indent as you please. Tabs or spaces. TOML don't care.
|
||||
[servers.alpha]
|
||||
ip = "10.0.0.1"
|
||||
dc = "eqdc10"
|
||||
|
||||
[servers.beta]
|
||||
ip = "10.0.0.2"
|
||||
dc = "eqdc10"
|
||||
|
||||
[clients]
|
||||
data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it
|
||||
|
||||
# Line breaks are OK when inside arrays
|
||||
hosts = [
|
||||
"alpha",
|
||||
"omega"
|
||||
]
|
||||
```
|
||||
|
||||
And the corresponding Go types are:
|
||||
|
||||
```go
|
||||
type tomlConfig struct {
|
||||
Title string
|
||||
Owner ownerInfo
|
||||
DB database `toml:"database"`
|
||||
Servers map[string]server
|
||||
Clients clients
|
||||
}
|
||||
|
||||
type ownerInfo struct {
|
||||
Name string
|
||||
Org string `toml:"organization"`
|
||||
Bio string
|
||||
DOB time.Time
|
||||
}
|
||||
|
||||
type database struct {
|
||||
Server string
|
||||
Ports []int
|
||||
ConnMax int `toml:"connection_max"`
|
||||
Enabled bool
|
||||
}
|
||||
|
||||
type server struct {
|
||||
IP string
|
||||
DC string
|
||||
}
|
||||
|
||||
type clients struct {
|
||||
Data [][]interface{}
|
||||
Hosts []string
|
||||
}
|
||||
```
|
||||
|
||||
Note that a case insensitive match will be tried if an exact match can't be
|
||||
found.
|
||||
|
||||
A working example of the above can be found in `_examples/example.{go,toml}`.
|
||||
|
||||
14
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/README.md
generated
vendored
Normal file
14
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/README.md
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
# Implements the TOML test suite interface
|
||||
|
||||
This is an implementation of the interface expected by
|
||||
[toml-test](https://github.com/BurntSushi/toml-test) for my
|
||||
[toml parser written in Go](https://github.com/BurntSushi/toml).
|
||||
In particular, it maps TOML data on `stdin` to a JSON format on `stdout`.
|
||||
|
||||
|
||||
Compatible with TOML version
|
||||
[v0.2.0](https://github.com/mojombo/toml/blob/master/versions/toml-v0.2.0.md)
|
||||
|
||||
Compatible with `toml-test` version
|
||||
[v0.2.0](https://github.com/BurntSushi/toml-test/tree/v0.2.0)
|
||||
|
||||
90
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/main.go
generated
vendored
Normal file
90
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/main.go
generated
vendored
Normal file
@@ -0,0 +1,90 @@
|
||||
// Command toml-test-decoder satisfies the toml-test interface for testing
|
||||
// TOML decoders. Namely, it accepts TOML on stdin and outputs JSON on stdout.
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"flag"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"path"
|
||||
"time"
|
||||
|
||||
"github.com/BurntSushi/toml"
|
||||
)
|
||||
|
||||
func init() {
|
||||
log.SetFlags(0)
|
||||
|
||||
flag.Usage = usage
|
||||
flag.Parse()
|
||||
}
|
||||
|
||||
func usage() {
|
||||
log.Printf("Usage: %s < toml-file\n", path.Base(os.Args[0]))
|
||||
flag.PrintDefaults()
|
||||
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
func main() {
|
||||
if flag.NArg() != 0 {
|
||||
flag.Usage()
|
||||
}
|
||||
|
||||
var tmp interface{}
|
||||
if _, err := toml.DecodeReader(os.Stdin, &tmp); err != nil {
|
||||
log.Fatalf("Error decoding TOML: %s", err)
|
||||
}
|
||||
|
||||
typedTmp := translate(tmp)
|
||||
if err := json.NewEncoder(os.Stdout).Encode(typedTmp); err != nil {
|
||||
log.Fatalf("Error encoding JSON: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
func translate(tomlData interface{}) interface{} {
|
||||
switch orig := tomlData.(type) {
|
||||
case map[string]interface{}:
|
||||
typed := make(map[string]interface{}, len(orig))
|
||||
for k, v := range orig {
|
||||
typed[k] = translate(v)
|
||||
}
|
||||
return typed
|
||||
case []map[string]interface{}:
|
||||
typed := make([]map[string]interface{}, len(orig))
|
||||
for i, v := range orig {
|
||||
typed[i] = translate(v).(map[string]interface{})
|
||||
}
|
||||
return typed
|
||||
case []interface{}:
|
||||
typed := make([]interface{}, len(orig))
|
||||
for i, v := range orig {
|
||||
typed[i] = translate(v)
|
||||
}
|
||||
|
||||
// We don't really need to tag arrays, but let's be future proof.
|
||||
// (If TOML ever supports tuples, we'll need this.)
|
||||
return tag("array", typed)
|
||||
case time.Time:
|
||||
return tag("datetime", orig.Format("2006-01-02T15:04:05Z"))
|
||||
case bool:
|
||||
return tag("bool", fmt.Sprintf("%v", orig))
|
||||
case int64:
|
||||
return tag("integer", fmt.Sprintf("%d", orig))
|
||||
case float64:
|
||||
return tag("float", fmt.Sprintf("%v", orig))
|
||||
case string:
|
||||
return tag("string", orig)
|
||||
}
|
||||
|
||||
panic(fmt.Sprintf("Unknown type: %T", tomlData))
|
||||
}
|
||||
|
||||
func tag(typeName string, data interface{}) map[string]interface{} {
|
||||
return map[string]interface{}{
|
||||
"type": typeName,
|
||||
"value": data,
|
||||
}
|
||||
}
|
||||
14
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/README.md
generated
vendored
Normal file
14
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/README.md
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
# Implements the TOML test suite interface for TOML encoders
|
||||
|
||||
This is an implementation of the interface expected by
|
||||
[toml-test](https://github.com/BurntSushi/toml-test) for the
|
||||
[TOML encoder](https://github.com/BurntSushi/toml).
|
||||
In particular, it maps JSON data on `stdin` to a TOML format on `stdout`.
|
||||
|
||||
|
||||
Compatible with TOML version
|
||||
[v0.2.0](https://github.com/mojombo/toml/blob/master/versions/toml-v0.2.0.md)
|
||||
|
||||
Compatible with `toml-test` version
|
||||
[v0.2.0](https://github.com/BurntSushi/toml-test/tree/v0.2.0)
|
||||
|
||||
131
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/main.go
generated
vendored
Normal file
131
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/main.go
generated
vendored
Normal file
@@ -0,0 +1,131 @@
|
||||
// Command toml-test-encoder satisfies the toml-test interface for testing
|
||||
// TOML encoders. Namely, it accepts JSON on stdin and outputs TOML on stdout.
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"flag"
|
||||
"log"
|
||||
"os"
|
||||
"path"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/BurntSushi/toml"
|
||||
)
|
||||
|
||||
func init() {
|
||||
log.SetFlags(0)
|
||||
|
||||
flag.Usage = usage
|
||||
flag.Parse()
|
||||
}
|
||||
|
||||
func usage() {
|
||||
log.Printf("Usage: %s < json-file\n", path.Base(os.Args[0]))
|
||||
flag.PrintDefaults()
|
||||
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
func main() {
|
||||
if flag.NArg() != 0 {
|
||||
flag.Usage()
|
||||
}
|
||||
|
||||
var tmp interface{}
|
||||
if err := json.NewDecoder(os.Stdin).Decode(&tmp); err != nil {
|
||||
log.Fatalf("Error decoding JSON: %s", err)
|
||||
}
|
||||
|
||||
tomlData := translate(tmp)
|
||||
if err := toml.NewEncoder(os.Stdout).Encode(tomlData); err != nil {
|
||||
log.Fatalf("Error encoding TOML: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
func translate(typedJson interface{}) interface{} {
|
||||
switch v := typedJson.(type) {
|
||||
case map[string]interface{}:
|
||||
if len(v) == 2 && in("type", v) && in("value", v) {
|
||||
return untag(v)
|
||||
}
|
||||
m := make(map[string]interface{}, len(v))
|
||||
for k, v2 := range v {
|
||||
m[k] = translate(v2)
|
||||
}
|
||||
return m
|
||||
case []interface{}:
|
||||
tabArray := make([]map[string]interface{}, len(v))
|
||||
for i := range v {
|
||||
if m, ok := translate(v[i]).(map[string]interface{}); ok {
|
||||
tabArray[i] = m
|
||||
} else {
|
||||
log.Fatalf("JSON arrays may only contain objects. This " +
|
||||
"corresponds to only tables being allowed in " +
|
||||
"TOML table arrays.")
|
||||
}
|
||||
}
|
||||
return tabArray
|
||||
}
|
||||
log.Fatalf("Unrecognized JSON format '%T'.", typedJson)
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
func untag(typed map[string]interface{}) interface{} {
|
||||
t := typed["type"].(string)
|
||||
v := typed["value"]
|
||||
switch t {
|
||||
case "string":
|
||||
return v.(string)
|
||||
case "integer":
|
||||
v := v.(string)
|
||||
n, err := strconv.Atoi(v)
|
||||
if err != nil {
|
||||
log.Fatalf("Could not parse '%s' as integer: %s", v, err)
|
||||
}
|
||||
return n
|
||||
case "float":
|
||||
v := v.(string)
|
||||
f, err := strconv.ParseFloat(v, 64)
|
||||
if err != nil {
|
||||
log.Fatalf("Could not parse '%s' as float64: %s", v, err)
|
||||
}
|
||||
return f
|
||||
case "datetime":
|
||||
v := v.(string)
|
||||
t, err := time.Parse("2006-01-02T15:04:05Z", v)
|
||||
if err != nil {
|
||||
log.Fatalf("Could not parse '%s' as a datetime: %s", v, err)
|
||||
}
|
||||
return t
|
||||
case "bool":
|
||||
v := v.(string)
|
||||
switch v {
|
||||
case "true":
|
||||
return true
|
||||
case "false":
|
||||
return false
|
||||
}
|
||||
log.Fatalf("Could not parse '%s' as a boolean.", v)
|
||||
case "array":
|
||||
v := v.([]interface{})
|
||||
array := make([]interface{}, len(v))
|
||||
for i := range v {
|
||||
if m, ok := v[i].(map[string]interface{}); ok {
|
||||
array[i] = untag(m)
|
||||
} else {
|
||||
log.Fatalf("Arrays may only contain other arrays or "+
|
||||
"primitive values, but found a '%T'.", m)
|
||||
}
|
||||
}
|
||||
return array
|
||||
}
|
||||
log.Fatalf("Unrecognized tag type '%s'.", t)
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
func in(key string, m map[string]interface{}) bool {
|
||||
_, ok := m[key]
|
||||
return ok
|
||||
}
|
||||
22
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/README.md
generated
vendored
Normal file
22
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/README.md
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
# TOML Validator
|
||||
|
||||
If Go is installed, it's simple to try it out:
|
||||
|
||||
```bash
|
||||
go get github.com/BurntSushi/toml/cmd/tomlv
|
||||
tomlv some-toml-file.toml
|
||||
```
|
||||
|
||||
You can see the types of every key in a TOML file with:
|
||||
|
||||
```bash
|
||||
tomlv -types some-toml-file.toml
|
||||
```
|
||||
|
||||
At the moment, only one error message is reported at a time. Error messages
|
||||
include line numbers. No output means that the files given are valid TOML, or
|
||||
there is a bug in `tomlv`.
|
||||
|
||||
Compatible with TOML version
|
||||
[v0.1.0](https://github.com/mojombo/toml/blob/master/versions/toml-v0.1.0.md)
|
||||
|
||||
61
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/main.go
generated
vendored
Normal file
61
Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/main.go
generated
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
// Command tomlv validates TOML documents and prints each key's type.
|
||||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"path"
|
||||
"strings"
|
||||
"text/tabwriter"
|
||||
|
||||
"github.com/BurntSushi/toml"
|
||||
)
|
||||
|
||||
var (
|
||||
flagTypes = false
|
||||
)
|
||||
|
||||
func init() {
|
||||
log.SetFlags(0)
|
||||
|
||||
flag.BoolVar(&flagTypes, "types", flagTypes,
|
||||
"When set, the types of every defined key will be shown.")
|
||||
|
||||
flag.Usage = usage
|
||||
flag.Parse()
|
||||
}
|
||||
|
||||
func usage() {
|
||||
log.Printf("Usage: %s toml-file [ toml-file ... ]\n",
|
||||
path.Base(os.Args[0]))
|
||||
flag.PrintDefaults()
|
||||
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
func main() {
|
||||
if flag.NArg() < 1 {
|
||||
flag.Usage()
|
||||
}
|
||||
for _, f := range flag.Args() {
|
||||
var tmp interface{}
|
||||
md, err := toml.DecodeFile(f, &tmp)
|
||||
if err != nil {
|
||||
log.Fatalf("Error in '%s': %s", f, err)
|
||||
}
|
||||
if flagTypes {
|
||||
printTypes(md)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func printTypes(md toml.MetaData) {
|
||||
tabw := tabwriter.NewWriter(os.Stdout, 0, 0, 2, ' ', 0)
|
||||
for _, key := range md.Keys() {
|
||||
fmt.Fprintf(tabw, "%s%s\t%s\n",
|
||||
strings.Repeat(" ", len(key)-1), key, md.Type(key...))
|
||||
}
|
||||
tabw.Flush()
|
||||
}
|
||||
492
Godeps/_workspace/src/github.com/BurntSushi/toml/decode.go
generated
vendored
Normal file
492
Godeps/_workspace/src/github.com/BurntSushi/toml/decode.go
generated
vendored
Normal file
@@ -0,0 +1,492 @@
|
||||
package toml
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"math"
|
||||
"reflect"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
var e = fmt.Errorf
|
||||
|
||||
// Unmarshaler is the interface implemented by objects that can unmarshal a
|
||||
// TOML description of themselves.
|
||||
type Unmarshaler interface {
|
||||
UnmarshalTOML(interface{}) error
|
||||
}
|
||||
|
||||
// Unmarshal decodes the contents of `p` in TOML format into a pointer `v`.
|
||||
func Unmarshal(p []byte, v interface{}) error {
|
||||
_, err := Decode(string(p), v)
|
||||
return err
|
||||
}
|
||||
|
||||
// Primitive is a TOML value that hasn't been decoded into a Go value.
|
||||
// When using the various `Decode*` functions, the type `Primitive` may
|
||||
// be given to any value, and its decoding will be delayed.
|
||||
//
|
||||
// A `Primitive` value can be decoded using the `PrimitiveDecode` function.
|
||||
//
|
||||
// The underlying representation of a `Primitive` value is subject to change.
|
||||
// Do not rely on it.
|
||||
//
|
||||
// N.B. Primitive values are still parsed, so using them will only avoid
|
||||
// the overhead of reflection. They can be useful when you don't know the
|
||||
// exact type of TOML data until run time.
|
||||
type Primitive struct {
|
||||
undecoded interface{}
|
||||
context Key
|
||||
}
|
||||
|
||||
// DEPRECATED!
|
||||
//
|
||||
// Use MetaData.PrimitiveDecode instead.
|
||||
func PrimitiveDecode(primValue Primitive, v interface{}) error {
|
||||
md := MetaData{decoded: make(map[string]bool)}
|
||||
return md.unify(primValue.undecoded, rvalue(v))
|
||||
}
|
||||
|
||||
// PrimitiveDecode is just like the other `Decode*` functions, except it
|
||||
// decodes a TOML value that has already been parsed. Valid primitive values
|
||||
// can *only* be obtained from values filled by the decoder functions,
|
||||
// including this method. (i.e., `v` may contain more `Primitive`
|
||||
// values.)
|
||||
//
|
||||
// Meta data for primitive values is included in the meta data returned by
|
||||
// the `Decode*` functions with one exception: keys returned by the Undecoded
|
||||
// method will only reflect keys that were decoded. Namely, any keys hidden
|
||||
// behind a Primitive will be considered undecoded. Executing this method will
|
||||
// update the undecoded keys in the meta data. (See the example.)
|
||||
func (md *MetaData) PrimitiveDecode(primValue Primitive, v interface{}) error {
|
||||
md.context = primValue.context
|
||||
defer func() { md.context = nil }()
|
||||
return md.unify(primValue.undecoded, rvalue(v))
|
||||
}
|
||||
|
||||
// Decode will decode the contents of `data` in TOML format into a pointer
|
||||
// `v`.
|
||||
//
|
||||
// TOML hashes correspond to Go structs or maps. (Dealer's choice. They can be
|
||||
// used interchangeably.)
|
||||
//
|
||||
// TOML arrays of tables correspond to either a slice of structs or a slice
|
||||
// of maps.
|
||||
//
|
||||
// TOML datetimes correspond to Go `time.Time` values.
|
||||
//
|
||||
// All other TOML types (float, string, int, bool and array) correspond
|
||||
// to the obvious Go types.
|
||||
//
|
||||
// An exception to the above rules is if a type implements the
|
||||
// encoding.TextUnmarshaler interface. In this case, any primitive TOML value
|
||||
// (floats, strings, integers, booleans and datetimes) will be converted to
|
||||
// a byte string and given to the value's UnmarshalText method. See the
|
||||
// Unmarshaler example for a demonstration with time duration strings.
|
||||
//
|
||||
// Key mapping
|
||||
//
|
||||
// TOML keys can map to either keys in a Go map or field names in a Go
|
||||
// struct. The special `toml` struct tag may be used to map TOML keys to
|
||||
// struct fields that don't match the key name exactly. (See the example.)
|
||||
// A case insensitive match to struct names will be tried if an exact match
|
||||
// can't be found.
|
||||
//
|
||||
// The mapping between TOML values and Go values is loose. That is, there
|
||||
// may exist TOML values that cannot be placed into your representation, and
|
||||
// there may be parts of your representation that do not correspond to
|
||||
// TOML values. This loose mapping can be made stricter by using the IsDefined
|
||||
// and/or Undecoded methods on the MetaData returned.
|
||||
//
|
||||
// This decoder will not handle cyclic types. If a cyclic type is passed,
|
||||
// `Decode` will not terminate.
|
||||
func Decode(data string, v interface{}) (MetaData, error) {
|
||||
p, err := parse(data)
|
||||
if err != nil {
|
||||
return MetaData{}, err
|
||||
}
|
||||
md := MetaData{
|
||||
p.mapping, p.types, p.ordered,
|
||||
make(map[string]bool, len(p.ordered)), nil,
|
||||
}
|
||||
return md, md.unify(p.mapping, rvalue(v))
|
||||
}
|
||||
|
||||
// DecodeFile is just like Decode, except it will automatically read the
|
||||
// contents of the file at `fpath` and decode it for you.
|
||||
func DecodeFile(fpath string, v interface{}) (MetaData, error) {
|
||||
bs, err := ioutil.ReadFile(fpath)
|
||||
if err != nil {
|
||||
return MetaData{}, err
|
||||
}
|
||||
return Decode(string(bs), v)
|
||||
}
|
||||
|
||||
// DecodeReader is just like Decode, except it will consume all bytes
|
||||
// from the reader and decode it for you.
|
||||
func DecodeReader(r io.Reader, v interface{}) (MetaData, error) {
|
||||
bs, err := ioutil.ReadAll(r)
|
||||
if err != nil {
|
||||
return MetaData{}, err
|
||||
}
|
||||
return Decode(string(bs), v)
|
||||
}
|
||||
|
||||
// unify performs a sort of type unification based on the structure of `rv`,
|
||||
// which is the client representation.
|
||||
//
|
||||
// Any type mismatch produces an error. Finding a type that we don't know
|
||||
// how to handle produces an unsupported type error.
|
||||
func (md *MetaData) unify(data interface{}, rv reflect.Value) error {
|
||||
|
||||
// Special case. Look for a `Primitive` value.
|
||||
if rv.Type() == reflect.TypeOf((*Primitive)(nil)).Elem() {
|
||||
// Save the undecoded data and the key context into the primitive
|
||||
// value.
|
||||
context := make(Key, len(md.context))
|
||||
copy(context, md.context)
|
||||
rv.Set(reflect.ValueOf(Primitive{
|
||||
undecoded: data,
|
||||
context: context,
|
||||
}))
|
||||
return nil
|
||||
}
|
||||
|
||||
// Special case. Unmarshaler Interface support.
|
||||
if rv.CanAddr() {
|
||||
if v, ok := rv.Addr().Interface().(Unmarshaler); ok {
|
||||
return v.UnmarshalTOML(data)
|
||||
}
|
||||
}
|
||||
|
||||
// Special case. Handle time.Time values specifically.
|
||||
// TODO: Remove this code when we decide to drop support for Go 1.1.
|
||||
// This isn't necessary in Go 1.2 because time.Time satisfies the encoding
|
||||
// interfaces.
|
||||
if rv.Type().AssignableTo(rvalue(time.Time{}).Type()) {
|
||||
return md.unifyDatetime(data, rv)
|
||||
}
|
||||
|
||||
// Special case. Look for a value satisfying the TextUnmarshaler interface.
|
||||
if v, ok := rv.Interface().(TextUnmarshaler); ok {
|
||||
return md.unifyText(data, v)
|
||||
}
|
||||
// BUG(burntsushi)
|
||||
// The behavior here is incorrect whenever a Go type satisfies the
|
||||
// encoding.TextUnmarshaler interface but also corresponds to a TOML
|
||||
// hash or array. In particular, the unmarshaler should only be applied
|
||||
// to primitive TOML values. But at this point, it will be applied to
|
||||
// all kinds of values and produce an incorrect error whenever those values
|
||||
// are hashes or arrays (including arrays of tables).
|
||||
|
||||
k := rv.Kind()
|
||||
|
||||
// laziness
|
||||
if k >= reflect.Int && k <= reflect.Uint64 {
|
||||
return md.unifyInt(data, rv)
|
||||
}
|
||||
switch k {
|
||||
case reflect.Ptr:
|
||||
elem := reflect.New(rv.Type().Elem())
|
||||
err := md.unify(data, reflect.Indirect(elem))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rv.Set(elem)
|
||||
return nil
|
||||
case reflect.Struct:
|
||||
return md.unifyStruct(data, rv)
|
||||
case reflect.Map:
|
||||
return md.unifyMap(data, rv)
|
||||
case reflect.Array:
|
||||
return md.unifyArray(data, rv)
|
||||
case reflect.Slice:
|
||||
return md.unifySlice(data, rv)
|
||||
case reflect.String:
|
||||
return md.unifyString(data, rv)
|
||||
case reflect.Bool:
|
||||
return md.unifyBool(data, rv)
|
||||
case reflect.Interface:
|
||||
// we only support empty interfaces.
|
||||
if rv.NumMethod() > 0 {
|
||||
return e("Unsupported type '%s'.", rv.Kind())
|
||||
}
|
||||
return md.unifyAnything(data, rv)
|
||||
case reflect.Float32:
|
||||
fallthrough
|
||||
case reflect.Float64:
|
||||
return md.unifyFloat64(data, rv)
|
||||
}
|
||||
return e("Unsupported type '%s'.", rv.Kind())
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyStruct(mapping interface{}, rv reflect.Value) error {
|
||||
tmap, ok := mapping.(map[string]interface{})
|
||||
if !ok {
|
||||
return mismatch(rv, "map", mapping)
|
||||
}
|
||||
|
||||
for key, datum := range tmap {
|
||||
var f *field
|
||||
fields := cachedTypeFields(rv.Type())
|
||||
for i := range fields {
|
||||
ff := &fields[i]
|
||||
if ff.name == key {
|
||||
f = ff
|
||||
break
|
||||
}
|
||||
if f == nil && strings.EqualFold(ff.name, key) {
|
||||
f = ff
|
||||
}
|
||||
}
|
||||
if f != nil {
|
||||
subv := rv
|
||||
for _, i := range f.index {
|
||||
subv = indirect(subv.Field(i))
|
||||
}
|
||||
if isUnifiable(subv) {
|
||||
md.decoded[md.context.add(key).String()] = true
|
||||
md.context = append(md.context, key)
|
||||
if err := md.unify(datum, subv); err != nil {
|
||||
return e("Type mismatch for '%s.%s': %s",
|
||||
rv.Type().String(), f.name, err)
|
||||
}
|
||||
md.context = md.context[0 : len(md.context)-1]
|
||||
} else if f.name != "" {
|
||||
// Bad user! No soup for you!
|
||||
return e("Field '%s.%s' is unexported, and therefore cannot "+
|
||||
"be loaded with reflection.", rv.Type().String(), f.name)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyMap(mapping interface{}, rv reflect.Value) error {
|
||||
tmap, ok := mapping.(map[string]interface{})
|
||||
if !ok {
|
||||
return badtype("map", mapping)
|
||||
}
|
||||
if rv.IsNil() {
|
||||
rv.Set(reflect.MakeMap(rv.Type()))
|
||||
}
|
||||
for k, v := range tmap {
|
||||
md.decoded[md.context.add(k).String()] = true
|
||||
md.context = append(md.context, k)
|
||||
|
||||
rvkey := indirect(reflect.New(rv.Type().Key()))
|
||||
rvval := reflect.Indirect(reflect.New(rv.Type().Elem()))
|
||||
if err := md.unify(v, rvval); err != nil {
|
||||
return err
|
||||
}
|
||||
md.context = md.context[0 : len(md.context)-1]
|
||||
|
||||
rvkey.SetString(k)
|
||||
rv.SetMapIndex(rvkey, rvval)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyArray(data interface{}, rv reflect.Value) error {
|
||||
datav := reflect.ValueOf(data)
|
||||
if datav.Kind() != reflect.Slice {
|
||||
return badtype("slice", data)
|
||||
}
|
||||
sliceLen := datav.Len()
|
||||
if sliceLen != rv.Len() {
|
||||
return e("expected array length %d; got TOML array of length %d",
|
||||
rv.Len(), sliceLen)
|
||||
}
|
||||
return md.unifySliceArray(datav, rv)
|
||||
}
|
||||
|
||||
func (md *MetaData) unifySlice(data interface{}, rv reflect.Value) error {
|
||||
datav := reflect.ValueOf(data)
|
||||
if datav.Kind() != reflect.Slice {
|
||||
return badtype("slice", data)
|
||||
}
|
||||
sliceLen := datav.Len()
|
||||
if rv.IsNil() {
|
||||
rv.Set(reflect.MakeSlice(rv.Type(), sliceLen, sliceLen))
|
||||
}
|
||||
return md.unifySliceArray(datav, rv)
|
||||
}
|
||||
|
||||
func (md *MetaData) unifySliceArray(data, rv reflect.Value) error {
|
||||
sliceLen := data.Len()
|
||||
for i := 0; i < sliceLen; i++ {
|
||||
v := data.Index(i).Interface()
|
||||
sliceval := indirect(rv.Index(i))
|
||||
if err := md.unify(v, sliceval); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyDatetime(data interface{}, rv reflect.Value) error {
|
||||
if _, ok := data.(time.Time); ok {
|
||||
rv.Set(reflect.ValueOf(data))
|
||||
return nil
|
||||
}
|
||||
return badtype("time.Time", data)
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyString(data interface{}, rv reflect.Value) error {
|
||||
if s, ok := data.(string); ok {
|
||||
rv.SetString(s)
|
||||
return nil
|
||||
}
|
||||
return badtype("string", data)
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyFloat64(data interface{}, rv reflect.Value) error {
|
||||
if num, ok := data.(float64); ok {
|
||||
switch rv.Kind() {
|
||||
case reflect.Float32:
|
||||
fallthrough
|
||||
case reflect.Float64:
|
||||
rv.SetFloat(num)
|
||||
default:
|
||||
panic("bug")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
return badtype("float", data)
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyInt(data interface{}, rv reflect.Value) error {
|
||||
if num, ok := data.(int64); ok {
|
||||
if rv.Kind() >= reflect.Int && rv.Kind() <= reflect.Int64 {
|
||||
switch rv.Kind() {
|
||||
case reflect.Int, reflect.Int64:
|
||||
// No bounds checking necessary.
|
||||
case reflect.Int8:
|
||||
if num < math.MinInt8 || num > math.MaxInt8 {
|
||||
return e("Value '%d' is out of range for int8.", num)
|
||||
}
|
||||
case reflect.Int16:
|
||||
if num < math.MinInt16 || num > math.MaxInt16 {
|
||||
return e("Value '%d' is out of range for int16.", num)
|
||||
}
|
||||
case reflect.Int32:
|
||||
if num < math.MinInt32 || num > math.MaxInt32 {
|
||||
return e("Value '%d' is out of range for int32.", num)
|
||||
}
|
||||
}
|
||||
rv.SetInt(num)
|
||||
} else if rv.Kind() >= reflect.Uint && rv.Kind() <= reflect.Uint64 {
|
||||
unum := uint64(num)
|
||||
switch rv.Kind() {
|
||||
case reflect.Uint, reflect.Uint64:
|
||||
// No bounds checking necessary.
|
||||
case reflect.Uint8:
|
||||
if num < 0 || unum > math.MaxUint8 {
|
||||
return e("Value '%d' is out of range for uint8.", num)
|
||||
}
|
||||
case reflect.Uint16:
|
||||
if num < 0 || unum > math.MaxUint16 {
|
||||
return e("Value '%d' is out of range for uint16.", num)
|
||||
}
|
||||
case reflect.Uint32:
|
||||
if num < 0 || unum > math.MaxUint32 {
|
||||
return e("Value '%d' is out of range for uint32.", num)
|
||||
}
|
||||
}
|
||||
rv.SetUint(unum)
|
||||
} else {
|
||||
panic("unreachable")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
return badtype("integer", data)
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyBool(data interface{}, rv reflect.Value) error {
|
||||
if b, ok := data.(bool); ok {
|
||||
rv.SetBool(b)
|
||||
return nil
|
||||
}
|
||||
return badtype("boolean", data)
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyAnything(data interface{}, rv reflect.Value) error {
|
||||
rv.Set(reflect.ValueOf(data))
|
||||
return nil
|
||||
}
|
||||
|
||||
func (md *MetaData) unifyText(data interface{}, v TextUnmarshaler) error {
|
||||
var s string
|
||||
switch sdata := data.(type) {
|
||||
case TextMarshaler:
|
||||
text, err := sdata.MarshalText()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
s = string(text)
|
||||
case fmt.Stringer:
|
||||
s = sdata.String()
|
||||
case string:
|
||||
s = sdata
|
||||
case bool:
|
||||
s = fmt.Sprintf("%v", sdata)
|
||||
case int64:
|
||||
s = fmt.Sprintf("%d", sdata)
|
||||
case float64:
|
||||
s = fmt.Sprintf("%f", sdata)
|
||||
default:
|
||||
return badtype("primitive (string-like)", data)
|
||||
}
|
||||
if err := v.UnmarshalText([]byte(s)); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// rvalue returns a reflect.Value of `v`. All pointers are resolved.
|
||||
func rvalue(v interface{}) reflect.Value {
|
||||
return indirect(reflect.ValueOf(v))
|
||||
}
|
||||
|
||||
// indirect returns the value pointed to by a pointer.
|
||||
// Pointers are followed until the value is not a pointer.
|
||||
// New values are allocated for each nil pointer.
|
||||
//
|
||||
// An exception to this rule is if the value satisfies an interface of
|
||||
// interest to us (like encoding.TextUnmarshaler).
|
||||
func indirect(v reflect.Value) reflect.Value {
|
||||
if v.Kind() != reflect.Ptr {
|
||||
if v.CanAddr() {
|
||||
pv := v.Addr()
|
||||
if _, ok := pv.Interface().(TextUnmarshaler); ok {
|
||||
return pv
|
||||
}
|
||||
}
|
||||
return v
|
||||
}
|
||||
if v.IsNil() {
|
||||
v.Set(reflect.New(v.Type().Elem()))
|
||||
}
|
||||
return indirect(reflect.Indirect(v))
|
||||
}
|
||||
|
||||
func isUnifiable(rv reflect.Value) bool {
|
||||
if rv.CanSet() {
|
||||
return true
|
||||
}
|
||||
if _, ok := rv.Interface().(TextUnmarshaler); ok {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func badtype(expected string, data interface{}) error {
|
||||
return e("Expected %s but found '%T'.", expected, data)
|
||||
}
|
||||
|
||||
func mismatch(user reflect.Value, expected string, data interface{}) error {
|
||||
return e("Type mismatch for %s. Expected %s but found '%T'.",
|
||||
user.Type().String(), expected, data)
|
||||
}
|
||||
@@ -77,8 +77,9 @@ func (k Key) maybeQuoted(i int) string {
|
||||
}
|
||||
if quote {
|
||||
return "\"" + strings.Replace(k[i], "\"", "\\\"", -1) + "\""
|
||||
} else {
|
||||
return k[i]
|
||||
}
|
||||
return k[i]
|
||||
}
|
||||
|
||||
func (k Key) add(piece string) Key {
|
||||
950
Godeps/_workspace/src/github.com/BurntSushi/toml/decode_test.go
generated
vendored
Normal file
950
Godeps/_workspace/src/github.com/BurntSushi/toml/decode_test.go
generated
vendored
Normal file
@@ -0,0 +1,950 @@
|
||||
package toml
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"reflect"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func init() {
|
||||
log.SetFlags(0)
|
||||
}
|
||||
|
||||
func TestDecodeSimple(t *testing.T) {
|
||||
var testSimple = `
|
||||
age = 250
|
||||
andrew = "gallant"
|
||||
kait = "brady"
|
||||
now = 1987-07-05T05:45:00Z
|
||||
yesOrNo = true
|
||||
pi = 3.14
|
||||
colors = [
|
||||
["red", "green", "blue"],
|
||||
["cyan", "magenta", "yellow", "black"],
|
||||
]
|
||||
|
||||
[My.Cats]
|
||||
plato = "cat 1"
|
||||
cauchy = "cat 2"
|
||||
`
|
||||
|
||||
type cats struct {
|
||||
Plato string
|
||||
Cauchy string
|
||||
}
|
||||
type simple struct {
|
||||
Age int
|
||||
Colors [][]string
|
||||
Pi float64
|
||||
YesOrNo bool
|
||||
Now time.Time
|
||||
Andrew string
|
||||
Kait string
|
||||
My map[string]cats
|
||||
}
|
||||
|
||||
var val simple
|
||||
_, err := Decode(testSimple, &val)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
now, err := time.Parse("2006-01-02T15:04:05", "1987-07-05T05:45:00")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
var answer = simple{
|
||||
Age: 250,
|
||||
Andrew: "gallant",
|
||||
Kait: "brady",
|
||||
Now: now,
|
||||
YesOrNo: true,
|
||||
Pi: 3.14,
|
||||
Colors: [][]string{
|
||||
{"red", "green", "blue"},
|
||||
{"cyan", "magenta", "yellow", "black"},
|
||||
},
|
||||
My: map[string]cats{
|
||||
"Cats": cats{Plato: "cat 1", Cauchy: "cat 2"},
|
||||
},
|
||||
}
|
||||
if !reflect.DeepEqual(val, answer) {
|
||||
t.Fatalf("Expected\n-----\n%#v\n-----\nbut got\n-----\n%#v\n",
|
||||
answer, val)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodeEmbedded(t *testing.T) {
|
||||
type Dog struct{ Name string }
|
||||
type Age int
|
||||
|
||||
tests := map[string]struct {
|
||||
input string
|
||||
decodeInto interface{}
|
||||
wantDecoded interface{}
|
||||
}{
|
||||
"embedded struct": {
|
||||
input: `Name = "milton"`,
|
||||
decodeInto: &struct{ Dog }{},
|
||||
wantDecoded: &struct{ Dog }{Dog{"milton"}},
|
||||
},
|
||||
"embedded non-nil pointer to struct": {
|
||||
input: `Name = "milton"`,
|
||||
decodeInto: &struct{ *Dog }{},
|
||||
wantDecoded: &struct{ *Dog }{&Dog{"milton"}},
|
||||
},
|
||||
"embedded nil pointer to struct": {
|
||||
input: ``,
|
||||
decodeInto: &struct{ *Dog }{},
|
||||
wantDecoded: &struct{ *Dog }{nil},
|
||||
},
|
||||
"embedded int": {
|
||||
input: `Age = -5`,
|
||||
decodeInto: &struct{ Age }{},
|
||||
wantDecoded: &struct{ Age }{-5},
|
||||
},
|
||||
}
|
||||
|
||||
for label, test := range tests {
|
||||
_, err := Decode(test.input, test.decodeInto)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if !reflect.DeepEqual(test.wantDecoded, test.decodeInto) {
|
||||
t.Errorf("%s: want decoded == %+v, got %+v",
|
||||
label, test.wantDecoded, test.decodeInto)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestTableArrays(t *testing.T) {
|
||||
var tomlTableArrays = `
|
||||
[[albums]]
|
||||
name = "Born to Run"
|
||||
|
||||
[[albums.songs]]
|
||||
name = "Jungleland"
|
||||
|
||||
[[albums.songs]]
|
||||
name = "Meeting Across the River"
|
||||
|
||||
[[albums]]
|
||||
name = "Born in the USA"
|
||||
|
||||
[[albums.songs]]
|
||||
name = "Glory Days"
|
||||
|
||||
[[albums.songs]]
|
||||
name = "Dancing in the Dark"
|
||||
`
|
||||
|
||||
type Song struct {
|
||||
Name string
|
||||
}
|
||||
|
||||
type Album struct {
|
||||
Name string
|
||||
Songs []Song
|
||||
}
|
||||
|
||||
type Music struct {
|
||||
Albums []Album
|
||||
}
|
||||
|
||||
expected := Music{[]Album{
|
||||
{"Born to Run", []Song{{"Jungleland"}, {"Meeting Across the River"}}},
|
||||
{"Born in the USA", []Song{{"Glory Days"}, {"Dancing in the Dark"}}},
|
||||
}}
|
||||
var got Music
|
||||
if _, err := Decode(tomlTableArrays, &got); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if !reflect.DeepEqual(expected, got) {
|
||||
t.Fatalf("\n%#v\n!=\n%#v\n", expected, got)
|
||||
}
|
||||
}
|
||||
|
||||
// Case insensitive matching tests.
|
||||
// A bit more comprehensive than needed given the current implementation,
|
||||
// but implementations change.
|
||||
// Probably still missing demonstrations of some ugly corner cases regarding
|
||||
// case insensitive matching and multiple fields.
|
||||
func TestCase(t *testing.T) {
|
||||
var caseToml = `
|
||||
tOpString = "string"
|
||||
tOpInt = 1
|
||||
tOpFloat = 1.1
|
||||
tOpBool = true
|
||||
tOpdate = 2006-01-02T15:04:05Z
|
||||
tOparray = [ "array" ]
|
||||
Match = "i should be in Match only"
|
||||
MatcH = "i should be in MatcH only"
|
||||
once = "just once"
|
||||
[nEst.eD]
|
||||
nEstedString = "another string"
|
||||
`
|
||||
|
||||
type InsensitiveEd struct {
|
||||
NestedString string
|
||||
}
|
||||
|
||||
type InsensitiveNest struct {
|
||||
Ed InsensitiveEd
|
||||
}
|
||||
|
||||
type Insensitive struct {
|
||||
TopString string
|
||||
TopInt int
|
||||
TopFloat float64
|
||||
TopBool bool
|
||||
TopDate time.Time
|
||||
TopArray []string
|
||||
Match string
|
||||
MatcH string
|
||||
Once string
|
||||
OncE string
|
||||
Nest InsensitiveNest
|
||||
}
|
||||
|
||||
tme, err := time.Parse(time.RFC3339, time.RFC3339[:len(time.RFC3339)-5])
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
expected := Insensitive{
|
||||
TopString: "string",
|
||||
TopInt: 1,
|
||||
TopFloat: 1.1,
|
||||
TopBool: true,
|
||||
TopDate: tme,
|
||||
TopArray: []string{"array"},
|
||||
MatcH: "i should be in MatcH only",
|
||||
Match: "i should be in Match only",
|
||||
Once: "just once",
|
||||
OncE: "",
|
||||
Nest: InsensitiveNest{
|
||||
Ed: InsensitiveEd{NestedString: "another string"},
|
||||
},
|
||||
}
|
||||
var got Insensitive
|
||||
if _, err := Decode(caseToml, &got); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if !reflect.DeepEqual(expected, got) {
|
||||
t.Fatalf("\n%#v\n!=\n%#v\n", expected, got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestPointers(t *testing.T) {
|
||||
type Object struct {
|
||||
Type string
|
||||
Description string
|
||||
}
|
||||
|
||||
type Dict struct {
|
||||
NamedObject map[string]*Object
|
||||
BaseObject *Object
|
||||
Strptr *string
|
||||
Strptrs []*string
|
||||
}
|
||||
s1, s2, s3 := "blah", "abc", "def"
|
||||
expected := &Dict{
|
||||
Strptr: &s1,
|
||||
Strptrs: []*string{&s2, &s3},
|
||||
NamedObject: map[string]*Object{
|
||||
"foo": {"FOO", "fooooo!!!"},
|
||||
"bar": {"BAR", "ba-ba-ba-ba-barrrr!!!"},
|
||||
},
|
||||
BaseObject: &Object{"BASE", "da base"},
|
||||
}
|
||||
|
||||
ex1 := `
|
||||
Strptr = "blah"
|
||||
Strptrs = ["abc", "def"]
|
||||
|
||||
[NamedObject.foo]
|
||||
Type = "FOO"
|
||||
Description = "fooooo!!!"
|
||||
|
||||
[NamedObject.bar]
|
||||
Type = "BAR"
|
||||
Description = "ba-ba-ba-ba-barrrr!!!"
|
||||
|
||||
[BaseObject]
|
||||
Type = "BASE"
|
||||
Description = "da base"
|
||||
`
|
||||
dict := new(Dict)
|
||||
_, err := Decode(ex1, dict)
|
||||
if err != nil {
|
||||
t.Errorf("Decode error: %v", err)
|
||||
}
|
||||
if !reflect.DeepEqual(expected, dict) {
|
||||
t.Fatalf("\n%#v\n!=\n%#v\n", expected, dict)
|
||||
}
|
||||
}
|
||||
|
||||
type sphere struct {
|
||||
Center [3]float64
|
||||
Radius float64
|
||||
}
|
||||
|
||||
func TestDecodeSimpleArray(t *testing.T) {
|
||||
var s1 sphere
|
||||
if _, err := Decode(`center = [0.0, 1.5, 0.0]`, &s1); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodeArrayWrongSize(t *testing.T) {
|
||||
var s1 sphere
|
||||
if _, err := Decode(`center = [0.1, 2.3]`, &s1); err == nil {
|
||||
t.Fatal("Expected array type mismatch error")
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodeLargeIntoSmallInt(t *testing.T) {
|
||||
type table struct {
|
||||
Value int8
|
||||
}
|
||||
var tab table
|
||||
if _, err := Decode(`value = 500`, &tab); err == nil {
|
||||
t.Fatal("Expected integer out-of-bounds error.")
|
||||
}
|
||||
}
|
||||
|
||||
func TestDecodeSizedInts(t *testing.T) {
|
||||
type table struct {
|
||||
U8 uint8
|
||||
U16 uint16
|
||||
U32 uint32
|
||||
U64 uint64
|
||||
U uint
|
||||
I8 int8
|
||||
I16 int16
|
||||
I32 int32
|
||||
I64 int64
|
||||
I int
|
||||
}
|
||||
answer := table{1, 1, 1, 1, 1, -1, -1, -1, -1, -1}
|
||||
toml := `
|
||||
u8 = 1
|
||||
u16 = 1
|
||||
u32 = 1
|
||||
u64 = 1
|
||||
u = 1
|
||||
i8 = -1
|
||||
i16 = -1
|
||||
i32 = -1
|
||||
i64 = -1
|
||||
i = -1
|
||||
`
|
||||
var tab table
|
||||
if _, err := Decode(toml, &tab); err != nil {
|
||||
t.Fatal(err.Error())
|
||||
}
|
||||
if answer != tab {
|
||||
t.Fatalf("Expected %#v but got %#v", answer, tab)
|
||||
}
|
||||
}
|
||||
|
||||
func TestUnmarshaler(t *testing.T) {
|
||||
|
||||
var tomlBlob = `
|
||||
[dishes.hamboogie]
|
||||
name = "Hamboogie with fries"
|
||||
price = 10.99
|
||||
|
||||
[[dishes.hamboogie.ingredients]]
|
||||
name = "Bread Bun"
|
||||
|
||||
[[dishes.hamboogie.ingredients]]
|
||||
name = "Lettuce"
|
||||
|
||||
[[dishes.hamboogie.ingredients]]
|
||||
name = "Real Beef Patty"
|
||||
|
||||
[[dishes.hamboogie.ingredients]]
|
||||
name = "Tomato"
|
||||
|
||||
[dishes.eggsalad]
|
||||
name = "Egg Salad with rice"
|
||||
price = 3.99
|
||||
|
||||
[[dishes.eggsalad.ingredients]]
|
||||
name = "Egg"
|
||||
|
||||
[[dishes.eggsalad.ingredients]]
|
||||
name = "Mayo"
|
||||
|
||||
[[dishes.eggsalad.ingredients]]
|
||||
name = "Rice"
|
||||
`
|
||||
m := &menu{}
|
||||
if _, err := Decode(tomlBlob, m); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
if len(m.Dishes) != 2 {
|
||||
t.Log("two dishes should be loaded with UnmarshalTOML()")
|
||||
t.Errorf("expected %d but got %d", 2, len(m.Dishes))
|
||||
}
|
||||
|
||||
eggSalad := m.Dishes["eggsalad"]
|
||||
if _, ok := interface{}(eggSalad).(dish); !ok {
|
||||
t.Errorf("expected a dish")
|
||||
}
|
||||
|
||||
if eggSalad.Name != "Egg Salad with rice" {
|
||||
t.Errorf("expected the dish to be named 'Egg Salad with rice'")
|
||||
}
|
||||
|
||||
if len(eggSalad.Ingredients) != 3 {
|
||||
t.Log("dish should be loaded with UnmarshalTOML()")
|
||||
t.Errorf("expected %d but got %d", 3, len(eggSalad.Ingredients))
|
||||
}
|
||||
|
||||
found := false
|
||||
for _, i := range eggSalad.Ingredients {
|
||||
if i.Name == "Rice" {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
t.Error("Rice was not loaded in UnmarshalTOML()")
|
||||
}
|
||||
|
||||
// test on a value - must be passed as *
|
||||
o := menu{}
|
||||
if _, err := Decode(tomlBlob, &o); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
type menu struct {
|
||||
Dishes map[string]dish
|
||||
}
|
||||
|
||||
func (m *menu) UnmarshalTOML(p interface{}) error {
|
||||
m.Dishes = make(map[string]dish)
|
||||
data, _ := p.(map[string]interface{})
|
||||
dishes := data["dishes"].(map[string]interface{})
|
||||
for n, v := range dishes {
|
||||
if d, ok := v.(map[string]interface{}); ok {
|
||||
nd := dish{}
|
||||
nd.UnmarshalTOML(d)
|
||||
m.Dishes[n] = nd
|
||||
} else {
|
||||
return fmt.Errorf("not a dish")
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type dish struct {
|
||||
Name string
|
||||
Price float32
|
||||
Ingredients []ingredient
|
||||
}
|
||||
|
||||
func (d *dish) UnmarshalTOML(p interface{}) error {
|
||||
data, _ := p.(map[string]interface{})
|
||||
d.Name, _ = data["name"].(string)
|
||||
d.Price, _ = data["price"].(float32)
|
||||
ingredients, _ := data["ingredients"].([]map[string]interface{})
|
||||
for _, e := range ingredients {
|
||||
n, _ := interface{}(e).(map[string]interface{})
|
||||
name, _ := n["name"].(string)
|
||||
i := ingredient{name}
|
||||
d.Ingredients = append(d.Ingredients, i)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type ingredient struct {
|
||||
Name string
|
||||
}
|
||||
|
||||
func ExampleMetaData_PrimitiveDecode() {
|
||||
var md MetaData
|
||||
var err error
|
||||
|
||||
var tomlBlob = `
|
||||
ranking = ["Springsteen", "J Geils"]
|
||||
|
||||
[bands.Springsteen]
|
||||
started = 1973
|
||||
albums = ["Greetings", "WIESS", "Born to Run", "Darkness"]
|
||||
|
||||
[bands."J Geils"]
|
||||
started = 1970
|
||||
albums = ["The J. Geils Band", "Full House", "Blow Your Face Out"]
|
||||
`
|
||||
|
||||
type band struct {
|
||||
Started int
|
||||
Albums []string
|
||||
}
|
||||
type classics struct {
|
||||
Ranking []string
|
||||
Bands map[string]Primitive
|
||||
}
|
||||
|
||||
// Do the initial decode. Reflection is delayed on Primitive values.
|
||||
var music classics
|
||||
if md, err = Decode(tomlBlob, &music); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
// MetaData still includes information on Primitive values.
|
||||
fmt.Printf("Is `bands.Springsteen` defined? %v\n",
|
||||
md.IsDefined("bands", "Springsteen"))
|
||||
|
||||
// Decode primitive data into Go values.
|
||||
for _, artist := range music.Ranking {
|
||||
// A band is a primitive value, so we need to decode it to get a
|
||||
// real `band` value.
|
||||
primValue := music.Bands[artist]
|
||||
|
||||
var aBand band
|
||||
if err = md.PrimitiveDecode(primValue, &aBand); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
fmt.Printf("%s started in %d.\n", artist, aBand.Started)
|
||||
}
|
||||
// Check to see if there were any fields left undecoded.
|
||||
// Note that this won't be empty before decoding the Primitive value!
|
||||
fmt.Printf("Undecoded: %q\n", md.Undecoded())
|
||||
|
||||
// Output:
|
||||
// Is `bands.Springsteen` defined? true
|
||||
// Springsteen started in 1973.
|
||||
// J Geils started in 1970.
|
||||
// Undecoded: []
|
||||
}
|
||||
|
||||
func ExampleDecode() {
|
||||
var tomlBlob = `
|
||||
# Some comments.
|
||||
[alpha]
|
||||
ip = "10.0.0.1"
|
||||
|
||||
[alpha.config]
|
||||
Ports = [ 8001, 8002 ]
|
||||
Location = "Toronto"
|
||||
Created = 1987-07-05T05:45:00Z
|
||||
|
||||
[beta]
|
||||
ip = "10.0.0.2"
|
||||
|
||||
[beta.config]
|
||||
Ports = [ 9001, 9002 ]
|
||||
Location = "New Jersey"
|
||||
Created = 1887-01-05T05:55:00Z
|
||||
`
|
||||
|
||||
type serverConfig struct {
|
||||
Ports []int
|
||||
Location string
|
||||
Created time.Time
|
||||
}
|
||||
|
||||
type server struct {
|
||||
IP string `toml:"ip"`
|
||||
Config serverConfig `toml:"config"`
|
||||
}
|
||||
|
||||
type servers map[string]server
|
||||
|
||||
var config servers
|
||||
if _, err := Decode(tomlBlob, &config); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
for _, name := range []string{"alpha", "beta"} {
|
||||
s := config[name]
|
||||
fmt.Printf("Server: %s (ip: %s) in %s created on %s\n",
|
||||
name, s.IP, s.Config.Location,
|
||||
s.Config.Created.Format("2006-01-02"))
|
||||
fmt.Printf("Ports: %v\n", s.Config.Ports)
|
||||
}
|
||||
|
||||
// Output:
|
||||
// Server: alpha (ip: 10.0.0.1) in Toronto created on 1987-07-05
|
||||
// Ports: [8001 8002]
|
||||
// Server: beta (ip: 10.0.0.2) in New Jersey created on 1887-01-05
|
||||
// Ports: [9001 9002]
|
||||
}
|
||||
|
||||
type duration struct {
|
||||
time.Duration
|
||||
}
|
||||
|
||||
func (d *duration) UnmarshalText(text []byte) error {
|
||||
var err error
|
||||
d.Duration, err = time.ParseDuration(string(text))
|
||||
return err
|
||||
}
|
||||
|
||||
// Example Unmarshaler shows how to decode TOML strings into your own
|
||||
// custom data type.
|
||||
func Example_unmarshaler() {
|
||||
blob := `
|
||||
[[song]]
|
||||
name = "Thunder Road"
|
||||
duration = "4m49s"
|
||||
|
||||
[[song]]
|
||||
name = "Stairway to Heaven"
|
||||
duration = "8m03s"
|
||||
`
|
||||
type song struct {
|
||||
Name string
|
||||
Duration duration
|
||||
}
|
||||
type songs struct {
|
||||
Song []song
|
||||
}
|
||||
var favorites songs
|
||||
if _, err := Decode(blob, &favorites); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
// Code to implement the TextUnmarshaler interface for `duration`:
|
||||
//
|
||||
// type duration struct {
|
||||
// time.Duration
|
||||
// }
|
||||
//
|
||||
// func (d *duration) UnmarshalText(text []byte) error {
|
||||
// var err error
|
||||
// d.Duration, err = time.ParseDuration(string(text))
|
||||
// return err
|
||||
// }
|
||||
|
||||
for _, s := range favorites.Song {
|
||||
fmt.Printf("%s (%s)\n", s.Name, s.Duration)
|
||||
}
|
||||
// Output:
|
||||
// Thunder Road (4m49s)
|
||||
// Stairway to Heaven (8m3s)
|
||||
}
|
||||
|
||||
// Example StrictDecoding shows how to detect whether there are keys in the
|
||||
// TOML document that weren't decoded into the value given. This is useful
|
||||
// for returning an error to the user if they've included extraneous fields
|
||||
// in their configuration.
|
||||
func Example_strictDecoding() {
|
||||
var blob = `
|
||||
key1 = "value1"
|
||||
key2 = "value2"
|
||||
key3 = "value3"
|
||||
`
|
||||
type config struct {
|
||||
Key1 string
|
||||
Key3 string
|
||||
}
|
||||
|
||||
var conf config
|
||||
md, err := Decode(blob, &conf)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
fmt.Printf("Undecoded keys: %q\n", md.Undecoded())
|
||||
// Output:
|
||||
// Undecoded keys: ["key2"]
|
||||
}
|
||||
|
||||
// Example UnmarshalTOML shows how to implement a struct type that knows how to
|
||||
// unmarshal itself. The struct must take full responsibility for mapping the
|
||||
// values passed into the struct. The method may be used with interfaces in a
|
||||
// struct in cases where the actual type is not known until the data is
|
||||
// examined.
|
||||
func Example_unmarshalTOML() {
|
||||
|
||||
var blob = `
|
||||
[[parts]]
|
||||
type = "valve"
|
||||
id = "valve-1"
|
||||
size = 1.2
|
||||
rating = 4
|
||||
|
||||
[[parts]]
|
||||
type = "valve"
|
||||
id = "valve-2"
|
||||
size = 2.1
|
||||
rating = 5
|
||||
|
||||
[[parts]]
|
||||
type = "pipe"
|
||||
id = "pipe-1"
|
||||
length = 2.1
|
||||
diameter = 12
|
||||
|
||||
[[parts]]
|
||||
type = "cable"
|
||||
id = "cable-1"
|
||||
length = 12
|
||||
rating = 3.1
|
||||
`
|
||||
o := &order{}
|
||||
err := Unmarshal([]byte(blob), o)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
fmt.Println(len(o.parts))
|
||||
|
||||
for _, part := range o.parts {
|
||||
fmt.Println(part.Name())
|
||||
}
|
||||
|
||||
// Code to implement UmarshalJSON.
|
||||
|
||||
// type order struct {
|
||||
// // NOTE `order.parts` is a private slice of type `part` which is an
|
||||
// // interface and may only be loaded from toml using the
|
||||
// // UnmarshalTOML() method of the Umarshaler interface.
|
||||
// parts parts
|
||||
// }
|
||||
|
||||
// func (o *order) UnmarshalTOML(data interface{}) error {
|
||||
|
||||
// // NOTE the example below contains detailed type casting to show how
|
||||
// // the 'data' is retrieved. In operational use, a type cast wrapper
|
||||
// // may be prefered e.g.
|
||||
// //
|
||||
// // func AsMap(v interface{}) (map[string]interface{}, error) {
|
||||
// // return v.(map[string]interface{})
|
||||
// // }
|
||||
// //
|
||||
// // resulting in:
|
||||
// // d, _ := AsMap(data)
|
||||
// //
|
||||
|
||||
// d, _ := data.(map[string]interface{})
|
||||
// parts, _ := d["parts"].([]map[string]interface{})
|
||||
|
||||
// for _, p := range parts {
|
||||
|
||||
// typ, _ := p["type"].(string)
|
||||
// id, _ := p["id"].(string)
|
||||
|
||||
// // detect the type of part and handle each case
|
||||
// switch p["type"] {
|
||||
// case "valve":
|
||||
|
||||
// size := float32(p["size"].(float64))
|
||||
// rating := int(p["rating"].(int64))
|
||||
|
||||
// valve := &valve{
|
||||
// Type: typ,
|
||||
// ID: id,
|
||||
// Size: size,
|
||||
// Rating: rating,
|
||||
// }
|
||||
|
||||
// o.parts = append(o.parts, valve)
|
||||
|
||||
// case "pipe":
|
||||
|
||||
// length := float32(p["length"].(float64))
|
||||
// diameter := int(p["diameter"].(int64))
|
||||
|
||||
// pipe := &pipe{
|
||||
// Type: typ,
|
||||
// ID: id,
|
||||
// Length: length,
|
||||
// Diameter: diameter,
|
||||
// }
|
||||
|
||||
// o.parts = append(o.parts, pipe)
|
||||
|
||||
// case "cable":
|
||||
|
||||
// length := int(p["length"].(int64))
|
||||
// rating := float32(p["rating"].(float64))
|
||||
|
||||
// cable := &cable{
|
||||
// Type: typ,
|
||||
// ID: id,
|
||||
// Length: length,
|
||||
// Rating: rating,
|
||||
// }
|
||||
|
||||
// o.parts = append(o.parts, cable)
|
||||
|
||||
// }
|
||||
// }
|
||||
|
||||
// return nil
|
||||
// }
|
||||
|
||||
// type parts []part
|
||||
|
||||
// type part interface {
|
||||
// Name() string
|
||||
// }
|
||||
|
||||
// type valve struct {
|
||||
// Type string
|
||||
// ID string
|
||||
// Size float32
|
||||
// Rating int
|
||||
// }
|
||||
|
||||
// func (v *valve) Name() string {
|
||||
// return fmt.Sprintf("VALVE: %s", v.ID)
|
||||
// }
|
||||
|
||||
// type pipe struct {
|
||||
// Type string
|
||||
// ID string
|
||||
// Length float32
|
||||
// Diameter int
|
||||
// }
|
||||
|
||||
// func (p *pipe) Name() string {
|
||||
// return fmt.Sprintf("PIPE: %s", p.ID)
|
||||
// }
|
||||
|
||||
// type cable struct {
|
||||
// Type string
|
||||
// ID string
|
||||
// Length int
|
||||
// Rating float32
|
||||
// }
|
||||
|
||||
// func (c *cable) Name() string {
|
||||
// return fmt.Sprintf("CABLE: %s", c.ID)
|
||||
// }
|
||||
|
||||
// Output:
|
||||
// 4
|
||||
// VALVE: valve-1
|
||||
// VALVE: valve-2
|
||||
// PIPE: pipe-1
|
||||
// CABLE: cable-1
|
||||
|
||||
}
|
||||
|
||||
type order struct {
|
||||
// NOTE `order.parts` is a private slice of type `part` which is an
|
||||
// interface and may only be loaded from toml using the UnmarshalTOML()
|
||||
// method of the Umarshaler interface.
|
||||
parts parts
|
||||
}
|
||||
|
||||
func (o *order) UnmarshalTOML(data interface{}) error {
|
||||
|
||||
// NOTE the example below contains detailed type casting to show how
|
||||
// the 'data' is retrieved. In operational use, a type cast wrapper
|
||||
// may be prefered e.g.
|
||||
//
|
||||
// func AsMap(v interface{}) (map[string]interface{}, error) {
|
||||
// return v.(map[string]interface{})
|
||||
// }
|
||||
//
|
||||
// resulting in:
|
||||
// d, _ := AsMap(data)
|
||||
//
|
||||
|
||||
d, _ := data.(map[string]interface{})
|
||||
parts, _ := d["parts"].([]map[string]interface{})
|
||||
|
||||
for _, p := range parts {
|
||||
|
||||
typ, _ := p["type"].(string)
|
||||
id, _ := p["id"].(string)
|
||||
|
||||
// detect the type of part and handle each case
|
||||
switch p["type"] {
|
||||
case "valve":
|
||||
|
||||
size := float32(p["size"].(float64))
|
||||
rating := int(p["rating"].(int64))
|
||||
|
||||
valve := &valve{
|
||||
Type: typ,
|
||||
ID: id,
|
||||
Size: size,
|
||||
Rating: rating,
|
||||
}
|
||||
|
||||
o.parts = append(o.parts, valve)
|
||||
|
||||
case "pipe":
|
||||
|
||||
length := float32(p["length"].(float64))
|
||||
diameter := int(p["diameter"].(int64))
|
||||
|
||||
pipe := &pipe{
|
||||
Type: typ,
|
||||
ID: id,
|
||||
Length: length,
|
||||
Diameter: diameter,
|
||||
}
|
||||
|
||||
o.parts = append(o.parts, pipe)
|
||||
|
||||
case "cable":
|
||||
|
||||
length := int(p["length"].(int64))
|
||||
rating := float32(p["rating"].(float64))
|
||||
|
||||
cable := &cable{
|
||||
Type: typ,
|
||||
ID: id,
|
||||
Length: length,
|
||||
Rating: rating,
|
||||
}
|
||||
|
||||
o.parts = append(o.parts, cable)
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type parts []part
|
||||
|
||||
type part interface {
|
||||
Name() string
|
||||
}
|
||||
|
||||
type valve struct {
|
||||
Type string
|
||||
ID string
|
||||
Size float32
|
||||
Rating int
|
||||
}
|
||||
|
||||
func (v *valve) Name() string {
|
||||
return fmt.Sprintf("VALVE: %s", v.ID)
|
||||
}
|
||||
|
||||
type pipe struct {
|
||||
Type string
|
||||
ID string
|
||||
Length float32
|
||||
Diameter int
|
||||
}
|
||||
|
||||
func (p *pipe) Name() string {
|
||||
return fmt.Sprintf("PIPE: %s", p.ID)
|
||||
}
|
||||
|
||||
type cable struct {
|
||||
Type string
|
||||
ID string
|
||||
Length int
|
||||
Rating float32
|
||||
}
|
||||
|
||||
func (c *cable) Name() string {
|
||||
return fmt.Sprintf("CABLE: %s", c.ID)
|
||||
}
|
||||
27
Godeps/_workspace/src/github.com/BurntSushi/toml/doc.go
generated
vendored
Normal file
27
Godeps/_workspace/src/github.com/BurntSushi/toml/doc.go
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
/*
|
||||
Package toml provides facilities for decoding and encoding TOML configuration
|
||||
files via reflection. There is also support for delaying decoding with
|
||||
the Primitive type, and querying the set of keys in a TOML document with the
|
||||
MetaData type.
|
||||
|
||||
The specification implemented: https://github.com/mojombo/toml
|
||||
|
||||
The sub-command github.com/BurntSushi/toml/cmd/tomlv can be used to verify
|
||||
whether a file is a valid TOML document. It can also be used to print the
|
||||
type of each key in a TOML document.
|
||||
|
||||
Testing
|
||||
|
||||
There are two important types of tests used for this package. The first is
|
||||
contained inside '*_test.go' files and uses the standard Go unit testing
|
||||
framework. These tests are primarily devoted to holistically testing the
|
||||
decoder and encoder.
|
||||
|
||||
The second type of testing is used to verify the implementation's adherence
|
||||
to the TOML specification. These tests have been factored into their own
|
||||
project: https://github.com/BurntSushi/toml-test
|
||||
|
||||
The reason the tests are in a separate project is so that they can be used by
|
||||
any implementation of TOML. Namely, it is language agnostic.
|
||||
*/
|
||||
package toml
|
||||
551
Godeps/_workspace/src/github.com/BurntSushi/toml/encode.go
generated
vendored
Normal file
551
Godeps/_workspace/src/github.com/BurntSushi/toml/encode.go
generated
vendored
Normal file
@@ -0,0 +1,551 @@
|
||||
package toml
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type tomlEncodeError struct{ error }
|
||||
|
||||
var (
|
||||
errArrayMixedElementTypes = errors.New(
|
||||
"can't encode array with mixed element types")
|
||||
errArrayNilElement = errors.New(
|
||||
"can't encode array with nil element")
|
||||
errNonString = errors.New(
|
||||
"can't encode a map with non-string key type")
|
||||
errAnonNonStruct = errors.New(
|
||||
"can't encode an anonymous field that is not a struct")
|
||||
errArrayNoTable = errors.New(
|
||||
"TOML array element can't contain a table")
|
||||
errNoKey = errors.New(
|
||||
"top-level values must be a Go map or struct")
|
||||
errAnything = errors.New("") // used in testing
|
||||
)
|
||||
|
||||
var quotedReplacer = strings.NewReplacer(
|
||||
"\t", "\\t",
|
||||
"\n", "\\n",
|
||||
"\r", "\\r",
|
||||
"\"", "\\\"",
|
||||
"\\", "\\\\",
|
||||
)
|
||||
|
||||
// Encoder controls the encoding of Go values to a TOML document to some
|
||||
// io.Writer.
|
||||
//
|
||||
// The indentation level can be controlled with the Indent field.
|
||||
type Encoder struct {
|
||||
// A single indentation level. By default it is two spaces.
|
||||
Indent string
|
||||
|
||||
// hasWritten is whether we have written any output to w yet.
|
||||
hasWritten bool
|
||||
w *bufio.Writer
|
||||
}
|
||||
|
||||
// NewEncoder returns a TOML encoder that encodes Go values to the io.Writer
|
||||
// given. By default, a single indentation level is 2 spaces.
|
||||
func NewEncoder(w io.Writer) *Encoder {
|
||||
return &Encoder{
|
||||
w: bufio.NewWriter(w),
|
||||
Indent: " ",
|
||||
}
|
||||
}
|
||||
|
||||
// Encode writes a TOML representation of the Go value to the underlying
|
||||
// io.Writer. If the value given cannot be encoded to a valid TOML document,
|
||||
// then an error is returned.
|
||||
//
|
||||
// The mapping between Go values and TOML values should be precisely the same
|
||||
// as for the Decode* functions. Similarly, the TextMarshaler interface is
|
||||
// supported by encoding the resulting bytes as strings. (If you want to write
|
||||
// arbitrary binary data then you will need to use something like base64 since
|
||||
// TOML does not have any binary types.)
|
||||
//
|
||||
// When encoding TOML hashes (i.e., Go maps or structs), keys without any
|
||||
// sub-hashes are encoded first.
|
||||
//
|
||||
// If a Go map is encoded, then its keys are sorted alphabetically for
|
||||
// deterministic output. More control over this behavior may be provided if
|
||||
// there is demand for it.
|
||||
//
|
||||
// Encoding Go values without a corresponding TOML representation---like map
|
||||
// types with non-string keys---will cause an error to be returned. Similarly
|
||||
// for mixed arrays/slices, arrays/slices with nil elements, embedded
|
||||
// non-struct types and nested slices containing maps or structs.
|
||||
// (e.g., [][]map[string]string is not allowed but []map[string]string is OK
|
||||
// and so is []map[string][]string.)
|
||||
func (enc *Encoder) Encode(v interface{}) error {
|
||||
rv := eindirect(reflect.ValueOf(v))
|
||||
if err := enc.safeEncode(Key([]string{}), rv); err != nil {
|
||||
return err
|
||||
}
|
||||
return enc.w.Flush()
|
||||
}
|
||||
|
||||
func (enc *Encoder) safeEncode(key Key, rv reflect.Value) (err error) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
if terr, ok := r.(tomlEncodeError); ok {
|
||||
err = terr.error
|
||||
return
|
||||
}
|
||||
panic(r)
|
||||
}
|
||||
}()
|
||||
enc.encode(key, rv)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (enc *Encoder) encode(key Key, rv reflect.Value) {
|
||||
// Special case. Time needs to be in ISO8601 format.
|
||||
// Special case. If we can marshal the type to text, then we used that.
|
||||
// Basically, this prevents the encoder for handling these types as
|
||||
// generic structs (or whatever the underlying type of a TextMarshaler is).
|
||||
switch rv.Interface().(type) {
|
||||
case time.Time, TextMarshaler:
|
||||
enc.keyEqElement(key, rv)
|
||||
return
|
||||
}
|
||||
|
||||
k := rv.Kind()
|
||||
switch k {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32,
|
||||
reflect.Int64,
|
||||
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32,
|
||||
reflect.Uint64,
|
||||
reflect.Float32, reflect.Float64, reflect.String, reflect.Bool:
|
||||
enc.keyEqElement(key, rv)
|
||||
case reflect.Array, reflect.Slice:
|
||||
if typeEqual(tomlArrayHash, tomlTypeOfGo(rv)) {
|
||||
enc.eArrayOfTables(key, rv)
|
||||
} else {
|
||||
enc.keyEqElement(key, rv)
|
||||
}
|
||||
case reflect.Interface:
|
||||
if rv.IsNil() {
|
||||
return
|
||||
}
|
||||
enc.encode(key, rv.Elem())
|
||||
case reflect.Map:
|
||||
if rv.IsNil() {
|
||||
return
|
||||
}
|
||||
enc.eTable(key, rv)
|
||||
case reflect.Ptr:
|
||||
if rv.IsNil() {
|
||||
return
|
||||
}
|
||||
enc.encode(key, rv.Elem())
|
||||
case reflect.Struct:
|
||||
enc.eTable(key, rv)
|
||||
default:
|
||||
panic(e("Unsupported type for key '%s': %s", key, k))
|
||||
}
|
||||
}
|
||||
|
||||
// eElement encodes any value that can be an array element (primitives and
|
||||
// arrays).
|
||||
func (enc *Encoder) eElement(rv reflect.Value) {
|
||||
switch v := rv.Interface().(type) {
|
||||
case time.Time:
|
||||
// Special case time.Time as a primitive. Has to come before
|
||||
// TextMarshaler below because time.Time implements
|
||||
// encoding.TextMarshaler, but we need to always use UTC.
|
||||
enc.wf(v.In(time.FixedZone("UTC", 0)).Format("2006-01-02T15:04:05Z"))
|
||||
return
|
||||
case TextMarshaler:
|
||||
// Special case. Use text marshaler if it's available for this value.
|
||||
if s, err := v.MarshalText(); err != nil {
|
||||
encPanic(err)
|
||||
} else {
|
||||
enc.writeQuoted(string(s))
|
||||
}
|
||||
return
|
||||
}
|
||||
switch rv.Kind() {
|
||||
case reflect.Bool:
|
||||
enc.wf(strconv.FormatBool(rv.Bool()))
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32,
|
||||
reflect.Int64:
|
||||
enc.wf(strconv.FormatInt(rv.Int(), 10))
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16,
|
||||
reflect.Uint32, reflect.Uint64:
|
||||
enc.wf(strconv.FormatUint(rv.Uint(), 10))
|
||||
case reflect.Float32:
|
||||
enc.wf(floatAddDecimal(strconv.FormatFloat(rv.Float(), 'f', -1, 32)))
|
||||
case reflect.Float64:
|
||||
enc.wf(floatAddDecimal(strconv.FormatFloat(rv.Float(), 'f', -1, 64)))
|
||||
case reflect.Array, reflect.Slice:
|
||||
enc.eArrayOrSliceElement(rv)
|
||||
case reflect.Interface:
|
||||
enc.eElement(rv.Elem())
|
||||
case reflect.String:
|
||||
enc.writeQuoted(rv.String())
|
||||
default:
|
||||
panic(e("Unexpected primitive type: %s", rv.Kind()))
|
||||
}
|
||||
}
|
||||
|
||||
// By the TOML spec, all floats must have a decimal with at least one
|
||||
// number on either side.
|
||||
func floatAddDecimal(fstr string) string {
|
||||
if !strings.Contains(fstr, ".") {
|
||||
return fstr + ".0"
|
||||
}
|
||||
return fstr
|
||||
}
|
||||
|
||||
func (enc *Encoder) writeQuoted(s string) {
|
||||
enc.wf("\"%s\"", quotedReplacer.Replace(s))
|
||||
}
|
||||
|
||||
func (enc *Encoder) eArrayOrSliceElement(rv reflect.Value) {
|
||||
length := rv.Len()
|
||||
enc.wf("[")
|
||||
for i := 0; i < length; i++ {
|
||||
elem := rv.Index(i)
|
||||
enc.eElement(elem)
|
||||
if i != length-1 {
|
||||
enc.wf(", ")
|
||||
}
|
||||
}
|
||||
enc.wf("]")
|
||||
}
|
||||
|
||||
func (enc *Encoder) eArrayOfTables(key Key, rv reflect.Value) {
|
||||
if len(key) == 0 {
|
||||
encPanic(errNoKey)
|
||||
}
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
trv := rv.Index(i)
|
||||
if isNil(trv) {
|
||||
continue
|
||||
}
|
||||
panicIfInvalidKey(key)
|
||||
enc.newline()
|
||||
enc.wf("%s[[%s]]", enc.indentStr(key), key.maybeQuotedAll())
|
||||
enc.newline()
|
||||
enc.eMapOrStruct(key, trv)
|
||||
}
|
||||
}
|
||||
|
||||
func (enc *Encoder) eTable(key Key, rv reflect.Value) {
|
||||
panicIfInvalidKey(key)
|
||||
if len(key) == 1 {
|
||||
// Output an extra new line between top-level tables.
|
||||
// (The newline isn't written if nothing else has been written though.)
|
||||
enc.newline()
|
||||
}
|
||||
if len(key) > 0 {
|
||||
enc.wf("%s[%s]", enc.indentStr(key), key.maybeQuotedAll())
|
||||
enc.newline()
|
||||
}
|
||||
enc.eMapOrStruct(key, rv)
|
||||
}
|
||||
|
||||
func (enc *Encoder) eMapOrStruct(key Key, rv reflect.Value) {
|
||||
switch rv := eindirect(rv); rv.Kind() {
|
||||
case reflect.Map:
|
||||
enc.eMap(key, rv)
|
||||
case reflect.Struct:
|
||||
enc.eStruct(key, rv)
|
||||
default:
|
||||
panic("eTable: unhandled reflect.Value Kind: " + rv.Kind().String())
|
||||
}
|
||||
}
|
||||
|
||||
func (enc *Encoder) eMap(key Key, rv reflect.Value) {
|
||||
rt := rv.Type()
|
||||
if rt.Key().Kind() != reflect.String {
|
||||
encPanic(errNonString)
|
||||
}
|
||||
|
||||
// Sort keys so that we have deterministic output. And write keys directly
|
||||
// underneath this key first, before writing sub-structs or sub-maps.
|
||||
var mapKeysDirect, mapKeysSub []string
|
||||
for _, mapKey := range rv.MapKeys() {
|
||||
k := mapKey.String()
|
||||
if typeIsHash(tomlTypeOfGo(rv.MapIndex(mapKey))) {
|
||||
mapKeysSub = append(mapKeysSub, k)
|
||||
} else {
|
||||
mapKeysDirect = append(mapKeysDirect, k)
|
||||
}
|
||||
}
|
||||
|
||||
var writeMapKeys = func(mapKeys []string) {
|
||||
sort.Strings(mapKeys)
|
||||
for _, mapKey := range mapKeys {
|
||||
mrv := rv.MapIndex(reflect.ValueOf(mapKey))
|
||||
if isNil(mrv) {
|
||||
// Don't write anything for nil fields.
|
||||
continue
|
||||
}
|
||||
enc.encode(key.add(mapKey), mrv)
|
||||
}
|
||||
}
|
||||
writeMapKeys(mapKeysDirect)
|
||||
writeMapKeys(mapKeysSub)
|
||||
}
|
||||
|
||||
func (enc *Encoder) eStruct(key Key, rv reflect.Value) {
|
||||
// Write keys for fields directly under this key first, because if we write
|
||||
// a field that creates a new table, then all keys under it will be in that
|
||||
// table (not the one we're writing here).
|
||||
rt := rv.Type()
|
||||
var fieldsDirect, fieldsSub [][]int
|
||||
var addFields func(rt reflect.Type, rv reflect.Value, start []int)
|
||||
addFields = func(rt reflect.Type, rv reflect.Value, start []int) {
|
||||
for i := 0; i < rt.NumField(); i++ {
|
||||
f := rt.Field(i)
|
||||
// skip unexporded fields
|
||||
if f.PkgPath != "" {
|
||||
continue
|
||||
}
|
||||
frv := rv.Field(i)
|
||||
if f.Anonymous {
|
||||
frv := eindirect(frv)
|
||||
t := frv.Type()
|
||||
if t.Kind() != reflect.Struct {
|
||||
encPanic(errAnonNonStruct)
|
||||
}
|
||||
addFields(t, frv, f.Index)
|
||||
} else if typeIsHash(tomlTypeOfGo(frv)) {
|
||||
fieldsSub = append(fieldsSub, append(start, f.Index...))
|
||||
} else {
|
||||
fieldsDirect = append(fieldsDirect, append(start, f.Index...))
|
||||
}
|
||||
}
|
||||
}
|
||||
addFields(rt, rv, nil)
|
||||
|
||||
var writeFields = func(fields [][]int) {
|
||||
for _, fieldIndex := range fields {
|
||||
sft := rt.FieldByIndex(fieldIndex)
|
||||
sf := rv.FieldByIndex(fieldIndex)
|
||||
if isNil(sf) {
|
||||
// Don't write anything for nil fields.
|
||||
continue
|
||||
}
|
||||
|
||||
keyName := sft.Tag.Get("toml")
|
||||
if keyName == "-" {
|
||||
continue
|
||||
}
|
||||
if keyName == "" {
|
||||
keyName = sft.Name
|
||||
}
|
||||
|
||||
keyName, opts := getOptions(keyName)
|
||||
if _, ok := opts["omitempty"]; ok && isEmpty(sf) {
|
||||
continue
|
||||
} else if _, ok := opts["omitzero"]; ok && isZero(sf) {
|
||||
continue
|
||||
}
|
||||
|
||||
enc.encode(key.add(keyName), sf)
|
||||
}
|
||||
}
|
||||
writeFields(fieldsDirect)
|
||||
writeFields(fieldsSub)
|
||||
}
|
||||
|
||||
// tomlTypeName returns the TOML type name of the Go value's type. It is
|
||||
// used to determine whether the types of array elements are mixed (which is
|
||||
// forbidden). If the Go value is nil, then it is illegal for it to be an array
|
||||
// element, and valueIsNil is returned as true.
|
||||
|
||||
// Returns the TOML type of a Go value. The type may be `nil`, which means
|
||||
// no concrete TOML type could be found.
|
||||
func tomlTypeOfGo(rv reflect.Value) tomlType {
|
||||
if isNil(rv) || !rv.IsValid() {
|
||||
return nil
|
||||
}
|
||||
switch rv.Kind() {
|
||||
case reflect.Bool:
|
||||
return tomlBool
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32,
|
||||
reflect.Int64,
|
||||
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32,
|
||||
reflect.Uint64:
|
||||
return tomlInteger
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return tomlFloat
|
||||
case reflect.Array, reflect.Slice:
|
||||
if typeEqual(tomlHash, tomlArrayType(rv)) {
|
||||
return tomlArrayHash
|
||||
} else {
|
||||
return tomlArray
|
||||
}
|
||||
case reflect.Ptr, reflect.Interface:
|
||||
return tomlTypeOfGo(rv.Elem())
|
||||
case reflect.String:
|
||||
return tomlString
|
||||
case reflect.Map:
|
||||
return tomlHash
|
||||
case reflect.Struct:
|
||||
switch rv.Interface().(type) {
|
||||
case time.Time:
|
||||
return tomlDatetime
|
||||
case TextMarshaler:
|
||||
return tomlString
|
||||
default:
|
||||
return tomlHash
|
||||
}
|
||||
default:
|
||||
panic("unexpected reflect.Kind: " + rv.Kind().String())
|
||||
}
|
||||
}
|
||||
|
||||
// tomlArrayType returns the element type of a TOML array. The type returned
|
||||
// may be nil if it cannot be determined (e.g., a nil slice or a zero length
|
||||
// slize). This function may also panic if it finds a type that cannot be
|
||||
// expressed in TOML (such as nil elements, heterogeneous arrays or directly
|
||||
// nested arrays of tables).
|
||||
func tomlArrayType(rv reflect.Value) tomlType {
|
||||
if isNil(rv) || !rv.IsValid() || rv.Len() == 0 {
|
||||
return nil
|
||||
}
|
||||
firstType := tomlTypeOfGo(rv.Index(0))
|
||||
if firstType == nil {
|
||||
encPanic(errArrayNilElement)
|
||||
}
|
||||
|
||||
rvlen := rv.Len()
|
||||
for i := 1; i < rvlen; i++ {
|
||||
elem := rv.Index(i)
|
||||
switch elemType := tomlTypeOfGo(elem); {
|
||||
case elemType == nil:
|
||||
encPanic(errArrayNilElement)
|
||||
case !typeEqual(firstType, elemType):
|
||||
encPanic(errArrayMixedElementTypes)
|
||||
}
|
||||
}
|
||||
// If we have a nested array, then we must make sure that the nested
|
||||
// array contains ONLY primitives.
|
||||
// This checks arbitrarily nested arrays.
|
||||
if typeEqual(firstType, tomlArray) || typeEqual(firstType, tomlArrayHash) {
|
||||
nest := tomlArrayType(eindirect(rv.Index(0)))
|
||||
if typeEqual(nest, tomlHash) || typeEqual(nest, tomlArrayHash) {
|
||||
encPanic(errArrayNoTable)
|
||||
}
|
||||
}
|
||||
return firstType
|
||||
}
|
||||
|
||||
func getOptions(keyName string) (string, map[string]struct{}) {
|
||||
opts := make(map[string]struct{})
|
||||
ss := strings.Split(keyName, ",")
|
||||
name := ss[0]
|
||||
if len(ss) > 1 {
|
||||
for _, opt := range ss {
|
||||
opts[opt] = struct{}{}
|
||||
}
|
||||
}
|
||||
|
||||
return name, opts
|
||||
}
|
||||
|
||||
func isZero(rv reflect.Value) bool {
|
||||
switch rv.Kind() {
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
if rv.Int() == 0 {
|
||||
return true
|
||||
}
|
||||
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||
if rv.Uint() == 0 {
|
||||
return true
|
||||
}
|
||||
case reflect.Float32, reflect.Float64:
|
||||
if rv.Float() == 0.0 {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func isEmpty(rv reflect.Value) bool {
|
||||
switch rv.Kind() {
|
||||
case reflect.String:
|
||||
if len(strings.TrimSpace(rv.String())) == 0 {
|
||||
return true
|
||||
}
|
||||
case reflect.Array, reflect.Slice, reflect.Map:
|
||||
if rv.Len() == 0 {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (enc *Encoder) newline() {
|
||||
if enc.hasWritten {
|
||||
enc.wf("\n")
|
||||
}
|
||||
}
|
||||
|
||||
func (enc *Encoder) keyEqElement(key Key, val reflect.Value) {
|
||||
if len(key) == 0 {
|
||||
encPanic(errNoKey)
|
||||
}
|
||||
panicIfInvalidKey(key)
|
||||
enc.wf("%s%s = ", enc.indentStr(key), key.maybeQuoted(len(key)-1))
|
||||
enc.eElement(val)
|
||||
enc.newline()
|
||||
}
|
||||
|
||||
func (enc *Encoder) wf(format string, v ...interface{}) {
|
||||
if _, err := fmt.Fprintf(enc.w, format, v...); err != nil {
|
||||
encPanic(err)
|
||||
}
|
||||
enc.hasWritten = true
|
||||
}
|
||||
|
||||
func (enc *Encoder) indentStr(key Key) string {
|
||||
return strings.Repeat(enc.Indent, len(key)-1)
|
||||
}
|
||||
|
||||
func encPanic(err error) {
|
||||
panic(tomlEncodeError{err})
|
||||
}
|
||||
|
||||
func eindirect(v reflect.Value) reflect.Value {
|
||||
switch v.Kind() {
|
||||
case reflect.Ptr, reflect.Interface:
|
||||
return eindirect(v.Elem())
|
||||
default:
|
||||
return v
|
||||
}
|
||||
}
|
||||
|
||||
func isNil(rv reflect.Value) bool {
|
||||
switch rv.Kind() {
|
||||
case reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice:
|
||||
return rv.IsNil()
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func panicIfInvalidKey(key Key) {
|
||||
for _, k := range key {
|
||||
if len(k) == 0 {
|
||||
encPanic(e("Key '%s' is not a valid table name. Key names "+
|
||||
"cannot be empty.", key.maybeQuotedAll()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func isValidKeyName(s string) bool {
|
||||
return len(s) != 0
|
||||
}
|
||||
542
Godeps/_workspace/src/github.com/BurntSushi/toml/encode_test.go
generated
vendored
Normal file
542
Godeps/_workspace/src/github.com/BurntSushi/toml/encode_test.go
generated
vendored
Normal file
@@ -0,0 +1,542 @@
|
||||
package toml
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"log"
|
||||
"net"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestEncodeRoundTrip(t *testing.T) {
|
||||
type Config struct {
|
||||
Age int
|
||||
Cats []string
|
||||
Pi float64
|
||||
Perfection []int
|
||||
DOB time.Time
|
||||
Ipaddress net.IP
|
||||
}
|
||||
|
||||
var inputs = Config{
|
||||
13,
|
||||
[]string{"one", "two", "three"},
|
||||
3.145,
|
||||
[]int{11, 2, 3, 4},
|
||||
time.Now(),
|
||||
net.ParseIP("192.168.59.254"),
|
||||
}
|
||||
|
||||
var firstBuffer bytes.Buffer
|
||||
e := NewEncoder(&firstBuffer)
|
||||
err := e.Encode(inputs)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
var outputs Config
|
||||
if _, err := Decode(firstBuffer.String(), &outputs); err != nil {
|
||||
log.Printf("Could not decode:\n-----\n%s\n-----\n",
|
||||
firstBuffer.String())
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// could test each value individually, but I'm lazy
|
||||
var secondBuffer bytes.Buffer
|
||||
e2 := NewEncoder(&secondBuffer)
|
||||
err = e2.Encode(outputs)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if firstBuffer.String() != secondBuffer.String() {
|
||||
t.Error(
|
||||
firstBuffer.String(),
|
||||
"\n\n is not identical to\n\n",
|
||||
secondBuffer.String())
|
||||
}
|
||||
}
|
||||
|
||||
// XXX(burntsushi)
|
||||
// I think these tests probably should be removed. They are good, but they
|
||||
// ought to be obsolete by toml-test.
|
||||
func TestEncode(t *testing.T) {
|
||||
type Embedded struct {
|
||||
Int int `toml:"_int"`
|
||||
}
|
||||
type NonStruct int
|
||||
|
||||
date := time.Date(2014, 5, 11, 20, 30, 40, 0, time.FixedZone("IST", 3600))
|
||||
dateStr := "2014-05-11T19:30:40Z"
|
||||
|
||||
tests := map[string]struct {
|
||||
input interface{}
|
||||
wantOutput string
|
||||
wantError error
|
||||
}{
|
||||
"bool field": {
|
||||
input: struct {
|
||||
BoolTrue bool
|
||||
BoolFalse bool
|
||||
}{true, false},
|
||||
wantOutput: "BoolTrue = true\nBoolFalse = false\n",
|
||||
},
|
||||
"int fields": {
|
||||
input: struct {
|
||||
Int int
|
||||
Int8 int8
|
||||
Int16 int16
|
||||
Int32 int32
|
||||
Int64 int64
|
||||
}{1, 2, 3, 4, 5},
|
||||
wantOutput: "Int = 1\nInt8 = 2\nInt16 = 3\nInt32 = 4\nInt64 = 5\n",
|
||||
},
|
||||
"uint fields": {
|
||||
input: struct {
|
||||
Uint uint
|
||||
Uint8 uint8
|
||||
Uint16 uint16
|
||||
Uint32 uint32
|
||||
Uint64 uint64
|
||||
}{1, 2, 3, 4, 5},
|
||||
wantOutput: "Uint = 1\nUint8 = 2\nUint16 = 3\nUint32 = 4" +
|
||||
"\nUint64 = 5\n",
|
||||
},
|
||||
"float fields": {
|
||||
input: struct {
|
||||
Float32 float32
|
||||
Float64 float64
|
||||
}{1.5, 2.5},
|
||||
wantOutput: "Float32 = 1.5\nFloat64 = 2.5\n",
|
||||
},
|
||||
"string field": {
|
||||
input: struct{ String string }{"foo"},
|
||||
wantOutput: "String = \"foo\"\n",
|
||||
},
|
||||
"string field and unexported field": {
|
||||
input: struct {
|
||||
String string
|
||||
unexported int
|
||||
}{"foo", 0},
|
||||
wantOutput: "String = \"foo\"\n",
|
||||
},
|
||||
"datetime field in UTC": {
|
||||
input: struct{ Date time.Time }{date},
|
||||
wantOutput: fmt.Sprintf("Date = %s\n", dateStr),
|
||||
},
|
||||
"datetime field as primitive": {
|
||||
// Using a map here to fail if isStructOrMap() returns true for
|
||||
// time.Time.
|
||||
input: map[string]interface{}{
|
||||
"Date": date,
|
||||
"Int": 1,
|
||||
},
|
||||
wantOutput: fmt.Sprintf("Date = %s\nInt = 1\n", dateStr),
|
||||
},
|
||||
"array fields": {
|
||||
input: struct {
|
||||
IntArray0 [0]int
|
||||
IntArray3 [3]int
|
||||
}{[0]int{}, [3]int{1, 2, 3}},
|
||||
wantOutput: "IntArray0 = []\nIntArray3 = [1, 2, 3]\n",
|
||||
},
|
||||
"slice fields": {
|
||||
input: struct{ IntSliceNil, IntSlice0, IntSlice3 []int }{
|
||||
nil, []int{}, []int{1, 2, 3},
|
||||
},
|
||||
wantOutput: "IntSlice0 = []\nIntSlice3 = [1, 2, 3]\n",
|
||||
},
|
||||
"datetime slices": {
|
||||
input: struct{ DatetimeSlice []time.Time }{
|
||||
[]time.Time{date, date},
|
||||
},
|
||||
wantOutput: fmt.Sprintf("DatetimeSlice = [%s, %s]\n",
|
||||
dateStr, dateStr),
|
||||
},
|
||||
"nested arrays and slices": {
|
||||
input: struct {
|
||||
SliceOfArrays [][2]int
|
||||
ArrayOfSlices [2][]int
|
||||
SliceOfArraysOfSlices [][2][]int
|
||||
ArrayOfSlicesOfArrays [2][][2]int
|
||||
SliceOfMixedArrays [][2]interface{}
|
||||
ArrayOfMixedSlices [2][]interface{}
|
||||
}{
|
||||
[][2]int{{1, 2}, {3, 4}},
|
||||
[2][]int{{1, 2}, {3, 4}},
|
||||
[][2][]int{
|
||||
{
|
||||
{1, 2}, {3, 4},
|
||||
},
|
||||
{
|
||||
{5, 6}, {7, 8},
|
||||
},
|
||||
},
|
||||
[2][][2]int{
|
||||
{
|
||||
{1, 2}, {3, 4},
|
||||
},
|
||||
{
|
||||
{5, 6}, {7, 8},
|
||||
},
|
||||
},
|
||||
[][2]interface{}{
|
||||
{1, 2}, {"a", "b"},
|
||||
},
|
||||
[2][]interface{}{
|
||||
{1, 2}, {"a", "b"},
|
||||
},
|
||||
},
|
||||
wantOutput: `SliceOfArrays = [[1, 2], [3, 4]]
|
||||
ArrayOfSlices = [[1, 2], [3, 4]]
|
||||
SliceOfArraysOfSlices = [[[1, 2], [3, 4]], [[5, 6], [7, 8]]]
|
||||
ArrayOfSlicesOfArrays = [[[1, 2], [3, 4]], [[5, 6], [7, 8]]]
|
||||
SliceOfMixedArrays = [[1, 2], ["a", "b"]]
|
||||
ArrayOfMixedSlices = [[1, 2], ["a", "b"]]
|
||||
`,
|
||||
},
|
||||
"empty slice": {
|
||||
input: struct{ Empty []interface{} }{[]interface{}{}},
|
||||
wantOutput: "Empty = []\n",
|
||||
},
|
||||
"(error) slice with element type mismatch (string and integer)": {
|
||||
input: struct{ Mixed []interface{} }{[]interface{}{1, "a"}},
|
||||
wantError: errArrayMixedElementTypes,
|
||||
},
|
||||
"(error) slice with element type mismatch (integer and float)": {
|
||||
input: struct{ Mixed []interface{} }{[]interface{}{1, 2.5}},
|
||||
wantError: errArrayMixedElementTypes,
|
||||
},
|
||||
"slice with elems of differing Go types, same TOML types": {
|
||||
input: struct {
|
||||
MixedInts []interface{}
|
||||
MixedFloats []interface{}
|
||||
}{
|
||||
[]interface{}{
|
||||
int(1), int8(2), int16(3), int32(4), int64(5),
|
||||
uint(1), uint8(2), uint16(3), uint32(4), uint64(5),
|
||||
},
|
||||
[]interface{}{float32(1.5), float64(2.5)},
|
||||
},
|
||||
wantOutput: "MixedInts = [1, 2, 3, 4, 5, 1, 2, 3, 4, 5]\n" +
|
||||
"MixedFloats = [1.5, 2.5]\n",
|
||||
},
|
||||
"(error) slice w/ element type mismatch (one is nested array)": {
|
||||
input: struct{ Mixed []interface{} }{
|
||||
[]interface{}{1, []interface{}{2}},
|
||||
},
|
||||
wantError: errArrayMixedElementTypes,
|
||||
},
|
||||
"(error) slice with 1 nil element": {
|
||||
input: struct{ NilElement1 []interface{} }{[]interface{}{nil}},
|
||||
wantError: errArrayNilElement,
|
||||
},
|
||||
"(error) slice with 1 nil element (and other non-nil elements)": {
|
||||
input: struct{ NilElement []interface{} }{
|
||||
[]interface{}{1, nil},
|
||||
},
|
||||
wantError: errArrayNilElement,
|
||||
},
|
||||
"simple map": {
|
||||
input: map[string]int{"a": 1, "b": 2},
|
||||
wantOutput: "a = 1\nb = 2\n",
|
||||
},
|
||||
"map with interface{} value type": {
|
||||
input: map[string]interface{}{"a": 1, "b": "c"},
|
||||
wantOutput: "a = 1\nb = \"c\"\n",
|
||||
},
|
||||
"map with interface{} value type, some of which are structs": {
|
||||
input: map[string]interface{}{
|
||||
"a": struct{ Int int }{2},
|
||||
"b": 1,
|
||||
},
|
||||
wantOutput: "b = 1\n\n[a]\n Int = 2\n",
|
||||
},
|
||||
"nested map": {
|
||||
input: map[string]map[string]int{
|
||||
"a": {"b": 1},
|
||||
"c": {"d": 2},
|
||||
},
|
||||
wantOutput: "[a]\n b = 1\n\n[c]\n d = 2\n",
|
||||
},
|
||||
"nested struct": {
|
||||
input: struct{ Struct struct{ Int int } }{
|
||||
struct{ Int int }{1},
|
||||
},
|
||||
wantOutput: "[Struct]\n Int = 1\n",
|
||||
},
|
||||
"nested struct and non-struct field": {
|
||||
input: struct {
|
||||
Struct struct{ Int int }
|
||||
Bool bool
|
||||
}{struct{ Int int }{1}, true},
|
||||
wantOutput: "Bool = true\n\n[Struct]\n Int = 1\n",
|
||||
},
|
||||
"2 nested structs": {
|
||||
input: struct{ Struct1, Struct2 struct{ Int int } }{
|
||||
struct{ Int int }{1}, struct{ Int int }{2},
|
||||
},
|
||||
wantOutput: "[Struct1]\n Int = 1\n\n[Struct2]\n Int = 2\n",
|
||||
},
|
||||
"deeply nested structs": {
|
||||
input: struct {
|
||||
Struct1, Struct2 struct{ Struct3 *struct{ Int int } }
|
||||
}{
|
||||
struct{ Struct3 *struct{ Int int } }{&struct{ Int int }{1}},
|
||||
struct{ Struct3 *struct{ Int int } }{nil},
|
||||
},
|
||||
wantOutput: "[Struct1]\n [Struct1.Struct3]\n Int = 1" +
|
||||
"\n\n[Struct2]\n",
|
||||
},
|
||||
"nested struct with nil struct elem": {
|
||||
input: struct {
|
||||
Struct struct{ Inner *struct{ Int int } }
|
||||
}{
|
||||
struct{ Inner *struct{ Int int } }{nil},
|
||||
},
|
||||
wantOutput: "[Struct]\n",
|
||||
},
|
||||
"nested struct with no fields": {
|
||||
input: struct {
|
||||
Struct struct{ Inner struct{} }
|
||||
}{
|
||||
struct{ Inner struct{} }{struct{}{}},
|
||||
},
|
||||
wantOutput: "[Struct]\n [Struct.Inner]\n",
|
||||
},
|
||||
"struct with tags": {
|
||||
input: struct {
|
||||
Struct struct {
|
||||
Int int `toml:"_int"`
|
||||
} `toml:"_struct"`
|
||||
Bool bool `toml:"_bool"`
|
||||
}{
|
||||
struct {
|
||||
Int int `toml:"_int"`
|
||||
}{1}, true,
|
||||
},
|
||||
wantOutput: "_bool = true\n\n[_struct]\n _int = 1\n",
|
||||
},
|
||||
"embedded struct": {
|
||||
input: struct{ Embedded }{Embedded{1}},
|
||||
wantOutput: "_int = 1\n",
|
||||
},
|
||||
"embedded *struct": {
|
||||
input: struct{ *Embedded }{&Embedded{1}},
|
||||
wantOutput: "_int = 1\n",
|
||||
},
|
||||
"nested embedded struct": {
|
||||
input: struct {
|
||||
Struct struct{ Embedded } `toml:"_struct"`
|
||||
}{struct{ Embedded }{Embedded{1}}},
|
||||
wantOutput: "[_struct]\n _int = 1\n",
|
||||
},
|
||||
"nested embedded *struct": {
|
||||
input: struct {
|
||||
Struct struct{ *Embedded } `toml:"_struct"`
|
||||
}{struct{ *Embedded }{&Embedded{1}}},
|
||||
wantOutput: "[_struct]\n _int = 1\n",
|
||||
},
|
||||
"array of tables": {
|
||||
input: struct {
|
||||
Structs []*struct{ Int int } `toml:"struct"`
|
||||
}{
|
||||
[]*struct{ Int int }{{1}, {3}},
|
||||
},
|
||||
wantOutput: "[[struct]]\n Int = 1\n\n[[struct]]\n Int = 3\n",
|
||||
},
|
||||
"array of tables order": {
|
||||
input: map[string]interface{}{
|
||||
"map": map[string]interface{}{
|
||||
"zero": 5,
|
||||
"arr": []map[string]int{
|
||||
map[string]int{
|
||||
"friend": 5,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantOutput: "[map]\n zero = 5\n\n [[map.arr]]\n friend = 5\n",
|
||||
},
|
||||
"(error) top-level slice": {
|
||||
input: []struct{ Int int }{{1}, {2}, {3}},
|
||||
wantError: errNoKey,
|
||||
},
|
||||
"(error) slice of slice": {
|
||||
input: struct {
|
||||
Slices [][]struct{ Int int }
|
||||
}{
|
||||
[][]struct{ Int int }{{{1}}, {{2}}, {{3}}},
|
||||
},
|
||||
wantError: errArrayNoTable,
|
||||
},
|
||||
"(error) map no string key": {
|
||||
input: map[int]string{1: ""},
|
||||
wantError: errNonString,
|
||||
},
|
||||
"(error) anonymous non-struct": {
|
||||
input: struct{ NonStruct }{5},
|
||||
wantError: errAnonNonStruct,
|
||||
},
|
||||
"(error) empty key name": {
|
||||
input: map[string]int{"": 1},
|
||||
wantError: errAnything,
|
||||
},
|
||||
"(error) empty map name": {
|
||||
input: map[string]interface{}{
|
||||
"": map[string]int{"v": 1},
|
||||
},
|
||||
wantError: errAnything,
|
||||
},
|
||||
}
|
||||
for label, test := range tests {
|
||||
encodeExpected(t, label, test.input, test.wantOutput, test.wantError)
|
||||
}
|
||||
}
|
||||
|
||||
func TestEncodeNestedTableArrays(t *testing.T) {
|
||||
type song struct {
|
||||
Name string `toml:"name"`
|
||||
}
|
||||
type album struct {
|
||||
Name string `toml:"name"`
|
||||
Songs []song `toml:"songs"`
|
||||
}
|
||||
type springsteen struct {
|
||||
Albums []album `toml:"albums"`
|
||||
}
|
||||
value := springsteen{
|
||||
[]album{
|
||||
{"Born to Run",
|
||||
[]song{{"Jungleland"}, {"Meeting Across the River"}}},
|
||||
{"Born in the USA",
|
||||
[]song{{"Glory Days"}, {"Dancing in the Dark"}}},
|
||||
},
|
||||
}
|
||||
expected := `[[albums]]
|
||||
name = "Born to Run"
|
||||
|
||||
[[albums.songs]]
|
||||
name = "Jungleland"
|
||||
|
||||
[[albums.songs]]
|
||||
name = "Meeting Across the River"
|
||||
|
||||
[[albums]]
|
||||
name = "Born in the USA"
|
||||
|
||||
[[albums.songs]]
|
||||
name = "Glory Days"
|
||||
|
||||
[[albums.songs]]
|
||||
name = "Dancing in the Dark"
|
||||
`
|
||||
encodeExpected(t, "nested table arrays", value, expected, nil)
|
||||
}
|
||||
|
||||
func TestEncodeArrayHashWithNormalHashOrder(t *testing.T) {
|
||||
type Alpha struct {
|
||||
V int
|
||||
}
|
||||
type Beta struct {
|
||||
V int
|
||||
}
|
||||
type Conf struct {
|
||||
V int
|
||||
A Alpha
|
||||
B []Beta
|
||||
}
|
||||
|
||||
val := Conf{
|
||||
V: 1,
|
||||
A: Alpha{2},
|
||||
B: []Beta{{3}},
|
||||
}
|
||||
expected := "V = 1\n\n[A]\n V = 2\n\n[[B]]\n V = 3\n"
|
||||
encodeExpected(t, "array hash with normal hash order", val, expected, nil)
|
||||
}
|
||||
|
||||
func TestEncodeWithOmitEmpty(t *testing.T) {
|
||||
type simple struct {
|
||||
User string `toml:"user"`
|
||||
Pass string `toml:"password,omitempty"`
|
||||
}
|
||||
|
||||
value := simple{"Testing", ""}
|
||||
expected := fmt.Sprintf("user = %q\n", value.User)
|
||||
encodeExpected(t, "simple with omitempty, is empty", value, expected, nil)
|
||||
value.Pass = "some password"
|
||||
expected = fmt.Sprintf("user = %q\npassword = %q\n", value.User, value.Pass)
|
||||
encodeExpected(t, "simple with omitempty, not empty", value, expected, nil)
|
||||
}
|
||||
|
||||
func TestEncodeWithOmitZero(t *testing.T) {
|
||||
type simple struct {
|
||||
Number int `toml:"number,omitzero"`
|
||||
Real float64 `toml:"real,omitzero"`
|
||||
Unsigned uint `toml:"unsigned,omitzero"`
|
||||
}
|
||||
|
||||
value := simple{0, 0.0, uint(0)}
|
||||
expected := ""
|
||||
|
||||
encodeExpected(t, "simple with omitzero, all zero", value, expected, nil)
|
||||
|
||||
value.Number = 10
|
||||
value.Real = 20
|
||||
value.Unsigned = 5
|
||||
expected = `number = 10
|
||||
real = 20.0
|
||||
unsigned = 5
|
||||
`
|
||||
encodeExpected(t, "simple with omitzero, non-zero", value, expected, nil)
|
||||
}
|
||||
|
||||
func encodeExpected(
|
||||
t *testing.T, label string, val interface{}, wantStr string, wantErr error,
|
||||
) {
|
||||
var buf bytes.Buffer
|
||||
enc := NewEncoder(&buf)
|
||||
err := enc.Encode(val)
|
||||
if err != wantErr {
|
||||
if wantErr != nil {
|
||||
if wantErr == errAnything && err != nil {
|
||||
return
|
||||
}
|
||||
t.Errorf("%s: want Encode error %v, got %v", label, wantErr, err)
|
||||
} else {
|
||||
t.Errorf("%s: Encode failed: %s", label, err)
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
if got := buf.String(); wantStr != got {
|
||||
t.Errorf("%s: want\n-----\n%q\n-----\nbut got\n-----\n%q\n-----\n",
|
||||
label, wantStr, got)
|
||||
}
|
||||
}
|
||||
|
||||
func ExampleEncoder_Encode() {
|
||||
date, _ := time.Parse(time.RFC822, "14 Mar 10 18:00 UTC")
|
||||
var config = map[string]interface{}{
|
||||
"date": date,
|
||||
"counts": []int{1, 1, 2, 3, 5, 8},
|
||||
"hash": map[string]string{
|
||||
"key1": "val1",
|
||||
"key2": "val2",
|
||||
},
|
||||
}
|
||||
buf := new(bytes.Buffer)
|
||||
if err := NewEncoder(buf).Encode(config); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
fmt.Println(buf.String())
|
||||
|
||||
// Output:
|
||||
// counts = [1, 1, 2, 3, 5, 8]
|
||||
// date = 2010-03-14T18:00:00Z
|
||||
//
|
||||
// [hash]
|
||||
// key1 = "val1"
|
||||
// key2 = "val2"
|
||||
}
|
||||
874
Godeps/_workspace/src/github.com/BurntSushi/toml/lex.go
generated
vendored
Normal file
874
Godeps/_workspace/src/github.com/BurntSushi/toml/lex.go
generated
vendored
Normal file
@@ -0,0 +1,874 @@
|
||||
package toml
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type itemType int
|
||||
|
||||
const (
|
||||
itemError itemType = iota
|
||||
itemNIL // used in the parser to indicate no type
|
||||
itemEOF
|
||||
itemText
|
||||
itemString
|
||||
itemRawString
|
||||
itemMultilineString
|
||||
itemRawMultilineString
|
||||
itemBool
|
||||
itemInteger
|
||||
itemFloat
|
||||
itemDatetime
|
||||
itemArray // the start of an array
|
||||
itemArrayEnd
|
||||
itemTableStart
|
||||
itemTableEnd
|
||||
itemArrayTableStart
|
||||
itemArrayTableEnd
|
||||
itemKeyStart
|
||||
itemCommentStart
|
||||
)
|
||||
|
||||
const (
|
||||
eof = 0
|
||||
tableStart = '['
|
||||
tableEnd = ']'
|
||||
arrayTableStart = '['
|
||||
arrayTableEnd = ']'
|
||||
tableSep = '.'
|
||||
keySep = '='
|
||||
arrayStart = '['
|
||||
arrayEnd = ']'
|
||||
arrayValTerm = ','
|
||||
commentStart = '#'
|
||||
stringStart = '"'
|
||||
stringEnd = '"'
|
||||
rawStringStart = '\''
|
||||
rawStringEnd = '\''
|
||||
)
|
||||
|
||||
type stateFn func(lx *lexer) stateFn
|
||||
|
||||
type lexer struct {
|
||||
input string
|
||||
start int
|
||||
pos int
|
||||
width int
|
||||
line int
|
||||
state stateFn
|
||||
items chan item
|
||||
|
||||
// A stack of state functions used to maintain context.
|
||||
// The idea is to reuse parts of the state machine in various places.
|
||||
// For example, values can appear at the top level or within arbitrarily
|
||||
// nested arrays. The last state on the stack is used after a value has
|
||||
// been lexed. Similarly for comments.
|
||||
stack []stateFn
|
||||
}
|
||||
|
||||
type item struct {
|
||||
typ itemType
|
||||
val string
|
||||
line int
|
||||
}
|
||||
|
||||
func (lx *lexer) nextItem() item {
|
||||
for {
|
||||
select {
|
||||
case item := <-lx.items:
|
||||
return item
|
||||
default:
|
||||
lx.state = lx.state(lx)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func lex(input string) *lexer {
|
||||
lx := &lexer{
|
||||
input: input + "\n",
|
||||
state: lexTop,
|
||||
line: 1,
|
||||
items: make(chan item, 10),
|
||||
stack: make([]stateFn, 0, 10),
|
||||
}
|
||||
return lx
|
||||
}
|
||||
|
||||
func (lx *lexer) push(state stateFn) {
|
||||
lx.stack = append(lx.stack, state)
|
||||
}
|
||||
|
||||
func (lx *lexer) pop() stateFn {
|
||||
if len(lx.stack) == 0 {
|
||||
return lx.errorf("BUG in lexer: no states to pop.")
|
||||
}
|
||||
last := lx.stack[len(lx.stack)-1]
|
||||
lx.stack = lx.stack[0 : len(lx.stack)-1]
|
||||
return last
|
||||
}
|
||||
|
||||
func (lx *lexer) current() string {
|
||||
return lx.input[lx.start:lx.pos]
|
||||
}
|
||||
|
||||
func (lx *lexer) emit(typ itemType) {
|
||||
lx.items <- item{typ, lx.current(), lx.line}
|
||||
lx.start = lx.pos
|
||||
}
|
||||
|
||||
func (lx *lexer) emitTrim(typ itemType) {
|
||||
lx.items <- item{typ, strings.TrimSpace(lx.current()), lx.line}
|
||||
lx.start = lx.pos
|
||||
}
|
||||
|
||||
func (lx *lexer) next() (r rune) {
|
||||
if lx.pos >= len(lx.input) {
|
||||
lx.width = 0
|
||||
return eof
|
||||
}
|
||||
|
||||
if lx.input[lx.pos] == '\n' {
|
||||
lx.line++
|
||||
}
|
||||
r, lx.width = utf8.DecodeRuneInString(lx.input[lx.pos:])
|
||||
lx.pos += lx.width
|
||||
return r
|
||||
}
|
||||
|
||||
// ignore skips over the pending input before this point.
|
||||
func (lx *lexer) ignore() {
|
||||
lx.start = lx.pos
|
||||
}
|
||||
|
||||
// backup steps back one rune. Can be called only once per call of next.
|
||||
func (lx *lexer) backup() {
|
||||
lx.pos -= lx.width
|
||||
if lx.pos < len(lx.input) && lx.input[lx.pos] == '\n' {
|
||||
lx.line--
|
||||
}
|
||||
}
|
||||
|
||||
// accept consumes the next rune if it's equal to `valid`.
|
||||
func (lx *lexer) accept(valid rune) bool {
|
||||
if lx.next() == valid {
|
||||
return true
|
||||
}
|
||||
lx.backup()
|
||||
return false
|
||||
}
|
||||
|
||||
// peek returns but does not consume the next rune in the input.
|
||||
func (lx *lexer) peek() rune {
|
||||
r := lx.next()
|
||||
lx.backup()
|
||||
return r
|
||||
}
|
||||
|
||||
// errorf stops all lexing by emitting an error and returning `nil`.
|
||||
// Note that any value that is a character is escaped if it's a special
|
||||
// character (new lines, tabs, etc.).
|
||||
func (lx *lexer) errorf(format string, values ...interface{}) stateFn {
|
||||
lx.items <- item{
|
||||
itemError,
|
||||
fmt.Sprintf(format, values...),
|
||||
lx.line,
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// lexTop consumes elements at the top level of TOML data.
|
||||
func lexTop(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
if isWhitespace(r) || isNL(r) {
|
||||
return lexSkip(lx, lexTop)
|
||||
}
|
||||
|
||||
switch r {
|
||||
case commentStart:
|
||||
lx.push(lexTop)
|
||||
return lexCommentStart
|
||||
case tableStart:
|
||||
return lexTableStart
|
||||
case eof:
|
||||
if lx.pos > lx.start {
|
||||
return lx.errorf("Unexpected EOF.")
|
||||
}
|
||||
lx.emit(itemEOF)
|
||||
return nil
|
||||
}
|
||||
|
||||
// At this point, the only valid item can be a key, so we back up
|
||||
// and let the key lexer do the rest.
|
||||
lx.backup()
|
||||
lx.push(lexTopEnd)
|
||||
return lexKeyStart
|
||||
}
|
||||
|
||||
// lexTopEnd is entered whenever a top-level item has been consumed. (A value
|
||||
// or a table.) It must see only whitespace, and will turn back to lexTop
|
||||
// upon a new line. If it sees EOF, it will quit the lexer successfully.
|
||||
func lexTopEnd(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case r == commentStart:
|
||||
// a comment will read to a new line for us.
|
||||
lx.push(lexTop)
|
||||
return lexCommentStart
|
||||
case isWhitespace(r):
|
||||
return lexTopEnd
|
||||
case isNL(r):
|
||||
lx.ignore()
|
||||
return lexTop
|
||||
case r == eof:
|
||||
lx.ignore()
|
||||
return lexTop
|
||||
}
|
||||
return lx.errorf("Expected a top-level item to end with a new line, "+
|
||||
"comment or EOF, but got %q instead.", r)
|
||||
}
|
||||
|
||||
// lexTable lexes the beginning of a table. Namely, it makes sure that
|
||||
// it starts with a character other than '.' and ']'.
|
||||
// It assumes that '[' has already been consumed.
|
||||
// It also handles the case that this is an item in an array of tables.
|
||||
// e.g., '[[name]]'.
|
||||
func lexTableStart(lx *lexer) stateFn {
|
||||
if lx.peek() == arrayTableStart {
|
||||
lx.next()
|
||||
lx.emit(itemArrayTableStart)
|
||||
lx.push(lexArrayTableEnd)
|
||||
} else {
|
||||
lx.emit(itemTableStart)
|
||||
lx.push(lexTableEnd)
|
||||
}
|
||||
return lexTableNameStart
|
||||
}
|
||||
|
||||
func lexTableEnd(lx *lexer) stateFn {
|
||||
lx.emit(itemTableEnd)
|
||||
return lexTopEnd
|
||||
}
|
||||
|
||||
func lexArrayTableEnd(lx *lexer) stateFn {
|
||||
if r := lx.next(); r != arrayTableEnd {
|
||||
return lx.errorf("Expected end of table array name delimiter %q, "+
|
||||
"but got %q instead.", arrayTableEnd, r)
|
||||
}
|
||||
lx.emit(itemArrayTableEnd)
|
||||
return lexTopEnd
|
||||
}
|
||||
|
||||
func lexTableNameStart(lx *lexer) stateFn {
|
||||
switch r := lx.peek(); {
|
||||
case r == tableEnd || r == eof:
|
||||
return lx.errorf("Unexpected end of table name. (Table names cannot " +
|
||||
"be empty.)")
|
||||
case r == tableSep:
|
||||
return lx.errorf("Unexpected table separator. (Table names cannot " +
|
||||
"be empty.)")
|
||||
case r == stringStart || r == rawStringStart:
|
||||
lx.ignore()
|
||||
lx.push(lexTableNameEnd)
|
||||
return lexValue // reuse string lexing
|
||||
case isWhitespace(r):
|
||||
return lexTableNameStart
|
||||
default:
|
||||
return lexBareTableName
|
||||
}
|
||||
}
|
||||
|
||||
// lexTableName lexes the name of a table. It assumes that at least one
|
||||
// valid character for the table has already been read.
|
||||
func lexBareTableName(lx *lexer) stateFn {
|
||||
switch r := lx.next(); {
|
||||
case isBareKeyChar(r):
|
||||
return lexBareTableName
|
||||
case r == tableSep || r == tableEnd:
|
||||
lx.backup()
|
||||
lx.emitTrim(itemText)
|
||||
return lexTableNameEnd
|
||||
default:
|
||||
return lx.errorf("Bare keys cannot contain %q.", r)
|
||||
}
|
||||
}
|
||||
|
||||
// lexTableNameEnd reads the end of a piece of a table name, optionally
|
||||
// consuming whitespace.
|
||||
func lexTableNameEnd(lx *lexer) stateFn {
|
||||
switch r := lx.next(); {
|
||||
case isWhitespace(r):
|
||||
return lexTableNameEnd
|
||||
case r == tableSep:
|
||||
lx.ignore()
|
||||
return lexTableNameStart
|
||||
case r == tableEnd:
|
||||
return lx.pop()
|
||||
default:
|
||||
return lx.errorf("Expected '.' or ']' to end table name, but got %q "+
|
||||
"instead.", r)
|
||||
}
|
||||
}
|
||||
|
||||
// lexKeyStart consumes a key name up until the first non-whitespace character.
|
||||
// lexKeyStart will ignore whitespace.
|
||||
func lexKeyStart(lx *lexer) stateFn {
|
||||
r := lx.peek()
|
||||
switch {
|
||||
case r == keySep:
|
||||
return lx.errorf("Unexpected key separator %q.", keySep)
|
||||
case isWhitespace(r) || isNL(r):
|
||||
lx.next()
|
||||
return lexSkip(lx, lexKeyStart)
|
||||
case r == stringStart || r == rawStringStart:
|
||||
lx.ignore()
|
||||
lx.emit(itemKeyStart)
|
||||
lx.push(lexKeyEnd)
|
||||
return lexValue // reuse string lexing
|
||||
default:
|
||||
lx.ignore()
|
||||
lx.emit(itemKeyStart)
|
||||
return lexBareKey
|
||||
}
|
||||
}
|
||||
|
||||
// lexBareKey consumes the text of a bare key. Assumes that the first character
|
||||
// (which is not whitespace) has not yet been consumed.
|
||||
func lexBareKey(lx *lexer) stateFn {
|
||||
switch r := lx.next(); {
|
||||
case isBareKeyChar(r):
|
||||
return lexBareKey
|
||||
case isWhitespace(r):
|
||||
lx.emitTrim(itemText)
|
||||
return lexKeyEnd
|
||||
case r == keySep:
|
||||
lx.backup()
|
||||
lx.emitTrim(itemText)
|
||||
return lexKeyEnd
|
||||
default:
|
||||
return lx.errorf("Bare keys cannot contain %q.", r)
|
||||
}
|
||||
}
|
||||
|
||||
// lexKeyEnd consumes the end of a key and trims whitespace (up to the key
|
||||
// separator).
|
||||
func lexKeyEnd(lx *lexer) stateFn {
|
||||
switch r := lx.next(); {
|
||||
case r == keySep:
|
||||
return lexSkip(lx, lexValue)
|
||||
case isWhitespace(r):
|
||||
return lexSkip(lx, lexKeyEnd)
|
||||
default:
|
||||
return lx.errorf("Expected key separator %q, but got %q instead.",
|
||||
keySep, r)
|
||||
}
|
||||
}
|
||||
|
||||
// lexValue starts the consumption of a value anywhere a value is expected.
|
||||
// lexValue will ignore whitespace.
|
||||
// After a value is lexed, the last state on the next is popped and returned.
|
||||
func lexValue(lx *lexer) stateFn {
|
||||
// We allow whitespace to precede a value, but NOT new lines.
|
||||
// In array syntax, the array states are responsible for ignoring new
|
||||
// lines.
|
||||
r := lx.next()
|
||||
if isWhitespace(r) {
|
||||
return lexSkip(lx, lexValue)
|
||||
}
|
||||
|
||||
switch {
|
||||
case r == arrayStart:
|
||||
lx.ignore()
|
||||
lx.emit(itemArray)
|
||||
return lexArrayValue
|
||||
case r == stringStart:
|
||||
if lx.accept(stringStart) {
|
||||
if lx.accept(stringStart) {
|
||||
lx.ignore() // Ignore """
|
||||
return lexMultilineString
|
||||
}
|
||||
lx.backup()
|
||||
}
|
||||
lx.ignore() // ignore the '"'
|
||||
return lexString
|
||||
case r == rawStringStart:
|
||||
if lx.accept(rawStringStart) {
|
||||
if lx.accept(rawStringStart) {
|
||||
lx.ignore() // Ignore """
|
||||
return lexMultilineRawString
|
||||
}
|
||||
lx.backup()
|
||||
}
|
||||
lx.ignore() // ignore the "'"
|
||||
return lexRawString
|
||||
case r == 't':
|
||||
return lexTrue
|
||||
case r == 'f':
|
||||
return lexFalse
|
||||
case r == '-':
|
||||
return lexNumberStart
|
||||
case isDigit(r):
|
||||
lx.backup() // avoid an extra state and use the same as above
|
||||
return lexNumberOrDateStart
|
||||
case r == '.': // special error case, be kind to users
|
||||
return lx.errorf("Floats must start with a digit, not '.'.")
|
||||
}
|
||||
return lx.errorf("Expected value but found %q instead.", r)
|
||||
}
|
||||
|
||||
// lexArrayValue consumes one value in an array. It assumes that '[' or ','
|
||||
// have already been consumed. All whitespace and new lines are ignored.
|
||||
func lexArrayValue(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case isWhitespace(r) || isNL(r):
|
||||
return lexSkip(lx, lexArrayValue)
|
||||
case r == commentStart:
|
||||
lx.push(lexArrayValue)
|
||||
return lexCommentStart
|
||||
case r == arrayValTerm:
|
||||
return lx.errorf("Unexpected array value terminator %q.",
|
||||
arrayValTerm)
|
||||
case r == arrayEnd:
|
||||
return lexArrayEnd
|
||||
}
|
||||
|
||||
lx.backup()
|
||||
lx.push(lexArrayValueEnd)
|
||||
return lexValue
|
||||
}
|
||||
|
||||
// lexArrayValueEnd consumes the cruft between values of an array. Namely,
|
||||
// it ignores whitespace and expects either a ',' or a ']'.
|
||||
func lexArrayValueEnd(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case isWhitespace(r) || isNL(r):
|
||||
return lexSkip(lx, lexArrayValueEnd)
|
||||
case r == commentStart:
|
||||
lx.push(lexArrayValueEnd)
|
||||
return lexCommentStart
|
||||
case r == arrayValTerm:
|
||||
lx.ignore()
|
||||
return lexArrayValue // move on to the next value
|
||||
case r == arrayEnd:
|
||||
return lexArrayEnd
|
||||
}
|
||||
return lx.errorf("Expected an array value terminator %q or an array "+
|
||||
"terminator %q, but got %q instead.", arrayValTerm, arrayEnd, r)
|
||||
}
|
||||
|
||||
// lexArrayEnd finishes the lexing of an array. It assumes that a ']' has
|
||||
// just been consumed.
|
||||
func lexArrayEnd(lx *lexer) stateFn {
|
||||
lx.ignore()
|
||||
lx.emit(itemArrayEnd)
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexString consumes the inner contents of a string. It assumes that the
|
||||
// beginning '"' has already been consumed and ignored.
|
||||
func lexString(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case isNL(r):
|
||||
return lx.errorf("Strings cannot contain new lines.")
|
||||
case r == '\\':
|
||||
lx.push(lexString)
|
||||
return lexStringEscape
|
||||
case r == stringEnd:
|
||||
lx.backup()
|
||||
lx.emit(itemString)
|
||||
lx.next()
|
||||
lx.ignore()
|
||||
return lx.pop()
|
||||
}
|
||||
return lexString
|
||||
}
|
||||
|
||||
// lexMultilineString consumes the inner contents of a string. It assumes that
|
||||
// the beginning '"""' has already been consumed and ignored.
|
||||
func lexMultilineString(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case r == '\\':
|
||||
return lexMultilineStringEscape
|
||||
case r == stringEnd:
|
||||
if lx.accept(stringEnd) {
|
||||
if lx.accept(stringEnd) {
|
||||
lx.backup()
|
||||
lx.backup()
|
||||
lx.backup()
|
||||
lx.emit(itemMultilineString)
|
||||
lx.next()
|
||||
lx.next()
|
||||
lx.next()
|
||||
lx.ignore()
|
||||
return lx.pop()
|
||||
}
|
||||
lx.backup()
|
||||
}
|
||||
}
|
||||
return lexMultilineString
|
||||
}
|
||||
|
||||
// lexRawString consumes a raw string. Nothing can be escaped in such a string.
|
||||
// It assumes that the beginning "'" has already been consumed and ignored.
|
||||
func lexRawString(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case isNL(r):
|
||||
return lx.errorf("Strings cannot contain new lines.")
|
||||
case r == rawStringEnd:
|
||||
lx.backup()
|
||||
lx.emit(itemRawString)
|
||||
lx.next()
|
||||
lx.ignore()
|
||||
return lx.pop()
|
||||
}
|
||||
return lexRawString
|
||||
}
|
||||
|
||||
// lexMultilineRawString consumes a raw string. Nothing can be escaped in such
|
||||
// a string. It assumes that the beginning "'" has already been consumed and
|
||||
// ignored.
|
||||
func lexMultilineRawString(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case r == rawStringEnd:
|
||||
if lx.accept(rawStringEnd) {
|
||||
if lx.accept(rawStringEnd) {
|
||||
lx.backup()
|
||||
lx.backup()
|
||||
lx.backup()
|
||||
lx.emit(itemRawMultilineString)
|
||||
lx.next()
|
||||
lx.next()
|
||||
lx.next()
|
||||
lx.ignore()
|
||||
return lx.pop()
|
||||
}
|
||||
lx.backup()
|
||||
}
|
||||
}
|
||||
return lexMultilineRawString
|
||||
}
|
||||
|
||||
// lexMultilineStringEscape consumes an escaped character. It assumes that the
|
||||
// preceding '\\' has already been consumed.
|
||||
func lexMultilineStringEscape(lx *lexer) stateFn {
|
||||
// Handle the special case first:
|
||||
if isNL(lx.next()) {
|
||||
lx.next()
|
||||
return lexMultilineString
|
||||
} else {
|
||||
lx.backup()
|
||||
lx.push(lexMultilineString)
|
||||
return lexStringEscape(lx)
|
||||
}
|
||||
}
|
||||
|
||||
func lexStringEscape(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch r {
|
||||
case 'b':
|
||||
fallthrough
|
||||
case 't':
|
||||
fallthrough
|
||||
case 'n':
|
||||
fallthrough
|
||||
case 'f':
|
||||
fallthrough
|
||||
case 'r':
|
||||
fallthrough
|
||||
case '"':
|
||||
fallthrough
|
||||
case '\\':
|
||||
return lx.pop()
|
||||
case 'u':
|
||||
return lexShortUnicodeEscape
|
||||
case 'U':
|
||||
return lexLongUnicodeEscape
|
||||
}
|
||||
return lx.errorf("Invalid escape character %q. Only the following "+
|
||||
"escape characters are allowed: "+
|
||||
"\\b, \\t, \\n, \\f, \\r, \\\", \\/, \\\\, "+
|
||||
"\\uXXXX and \\UXXXXXXXX.", r)
|
||||
}
|
||||
|
||||
func lexShortUnicodeEscape(lx *lexer) stateFn {
|
||||
var r rune
|
||||
for i := 0; i < 4; i++ {
|
||||
r = lx.next()
|
||||
if !isHexadecimal(r) {
|
||||
return lx.errorf("Expected four hexadecimal digits after '\\u', "+
|
||||
"but got '%s' instead.", lx.current())
|
||||
}
|
||||
}
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
func lexLongUnicodeEscape(lx *lexer) stateFn {
|
||||
var r rune
|
||||
for i := 0; i < 8; i++ {
|
||||
r = lx.next()
|
||||
if !isHexadecimal(r) {
|
||||
return lx.errorf("Expected eight hexadecimal digits after '\\U', "+
|
||||
"but got '%s' instead.", lx.current())
|
||||
}
|
||||
}
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexNumberOrDateStart consumes either a (positive) integer, float or
|
||||
// datetime. It assumes that NO negative sign has been consumed.
|
||||
func lexNumberOrDateStart(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
if !isDigit(r) {
|
||||
if r == '.' {
|
||||
return lx.errorf("Floats must start with a digit, not '.'.")
|
||||
} else {
|
||||
return lx.errorf("Expected a digit but got %q.", r)
|
||||
}
|
||||
}
|
||||
return lexNumberOrDate
|
||||
}
|
||||
|
||||
// lexNumberOrDate consumes either a (positive) integer, float or datetime.
|
||||
func lexNumberOrDate(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case r == '-':
|
||||
if lx.pos-lx.start != 5 {
|
||||
return lx.errorf("All ISO8601 dates must be in full Zulu form.")
|
||||
}
|
||||
return lexDateAfterYear
|
||||
case isDigit(r):
|
||||
return lexNumberOrDate
|
||||
case r == '.':
|
||||
return lexFloatStart
|
||||
}
|
||||
|
||||
lx.backup()
|
||||
lx.emit(itemInteger)
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexDateAfterYear consumes a full Zulu Datetime in ISO8601 format.
|
||||
// It assumes that "YYYY-" has already been consumed.
|
||||
func lexDateAfterYear(lx *lexer) stateFn {
|
||||
formats := []rune{
|
||||
// digits are '0'.
|
||||
// everything else is direct equality.
|
||||
'0', '0', '-', '0', '0',
|
||||
'T',
|
||||
'0', '0', ':', '0', '0', ':', '0', '0',
|
||||
'Z',
|
||||
}
|
||||
for _, f := range formats {
|
||||
r := lx.next()
|
||||
if f == '0' {
|
||||
if !isDigit(r) {
|
||||
return lx.errorf("Expected digit in ISO8601 datetime, "+
|
||||
"but found %q instead.", r)
|
||||
}
|
||||
} else if f != r {
|
||||
return lx.errorf("Expected %q in ISO8601 datetime, "+
|
||||
"but found %q instead.", f, r)
|
||||
}
|
||||
}
|
||||
lx.emit(itemDatetime)
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexNumberStart consumes either an integer or a float. It assumes that
|
||||
// a negative sign has already been read, but that *no* digits have been
|
||||
// consumed. lexNumberStart will move to the appropriate integer or float
|
||||
// states.
|
||||
func lexNumberStart(lx *lexer) stateFn {
|
||||
// we MUST see a digit. Even floats have to start with a digit.
|
||||
r := lx.next()
|
||||
if !isDigit(r) {
|
||||
if r == '.' {
|
||||
return lx.errorf("Floats must start with a digit, not '.'.")
|
||||
} else {
|
||||
return lx.errorf("Expected a digit but got %q.", r)
|
||||
}
|
||||
}
|
||||
return lexNumber
|
||||
}
|
||||
|
||||
// lexNumber consumes an integer or a float after seeing the first digit.
|
||||
func lexNumber(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
switch {
|
||||
case isDigit(r):
|
||||
return lexNumber
|
||||
case r == '.':
|
||||
return lexFloatStart
|
||||
}
|
||||
|
||||
lx.backup()
|
||||
lx.emit(itemInteger)
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexFloatStart starts the consumption of digits of a float after a '.'.
|
||||
// Namely, at least one digit is required.
|
||||
func lexFloatStart(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
if !isDigit(r) {
|
||||
return lx.errorf("Floats must have a digit after the '.', but got "+
|
||||
"%q instead.", r)
|
||||
}
|
||||
return lexFloat
|
||||
}
|
||||
|
||||
// lexFloat consumes the digits of a float after a '.'.
|
||||
// Assumes that one digit has been consumed after a '.' already.
|
||||
func lexFloat(lx *lexer) stateFn {
|
||||
r := lx.next()
|
||||
if isDigit(r) {
|
||||
return lexFloat
|
||||
}
|
||||
|
||||
lx.backup()
|
||||
lx.emit(itemFloat)
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexConst consumes the s[1:] in s. It assumes that s[0] has already been
|
||||
// consumed.
|
||||
func lexConst(lx *lexer, s string) stateFn {
|
||||
for i := range s[1:] {
|
||||
if r := lx.next(); r != rune(s[i+1]) {
|
||||
return lx.errorf("Expected %q, but found %q instead.", s[:i+1],
|
||||
s[:i]+string(r))
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// lexTrue consumes the "rue" in "true". It assumes that 't' has already
|
||||
// been consumed.
|
||||
func lexTrue(lx *lexer) stateFn {
|
||||
if fn := lexConst(lx, "true"); fn != nil {
|
||||
return fn
|
||||
}
|
||||
lx.emit(itemBool)
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexFalse consumes the "alse" in "false". It assumes that 'f' has already
|
||||
// been consumed.
|
||||
func lexFalse(lx *lexer) stateFn {
|
||||
if fn := lexConst(lx, "false"); fn != nil {
|
||||
return fn
|
||||
}
|
||||
lx.emit(itemBool)
|
||||
return lx.pop()
|
||||
}
|
||||
|
||||
// lexCommentStart begins the lexing of a comment. It will emit
|
||||
// itemCommentStart and consume no characters, passing control to lexComment.
|
||||
func lexCommentStart(lx *lexer) stateFn {
|
||||
lx.ignore()
|
||||
lx.emit(itemCommentStart)
|
||||
return lexComment
|
||||
}
|
||||
|
||||
// lexComment lexes an entire comment. It assumes that '#' has been consumed.
|
||||
// It will consume *up to* the first new line character, and pass control
|
||||
// back to the last state on the stack.
|
||||
func lexComment(lx *lexer) stateFn {
|
||||
r := lx.peek()
|
||||
if isNL(r) || r == eof {
|
||||
lx.emit(itemText)
|
||||
return lx.pop()
|
||||
}
|
||||
lx.next()
|
||||
return lexComment
|
||||
}
|
||||
|
||||
// lexSkip ignores all slurped input and moves on to the next state.
|
||||
func lexSkip(lx *lexer, nextState stateFn) stateFn {
|
||||
return func(lx *lexer) stateFn {
|
||||
lx.ignore()
|
||||
return nextState
|
||||
}
|
||||
}
|
||||
|
||||
// isWhitespace returns true if `r` is a whitespace character according
|
||||
// to the spec.
|
||||
func isWhitespace(r rune) bool {
|
||||
return r == '\t' || r == ' '
|
||||
}
|
||||
|
||||
func isNL(r rune) bool {
|
||||
return r == '\n' || r == '\r'
|
||||
}
|
||||
|
||||
func isDigit(r rune) bool {
|
||||
return r >= '0' && r <= '9'
|
||||
}
|
||||
|
||||
func isHexadecimal(r rune) bool {
|
||||
return (r >= '0' && r <= '9') ||
|
||||
(r >= 'a' && r <= 'f') ||
|
||||
(r >= 'A' && r <= 'F')
|
||||
}
|
||||
|
||||
func isBareKeyChar(r rune) bool {
|
||||
return (r >= 'A' && r <= 'Z') ||
|
||||
(r >= 'a' && r <= 'z') ||
|
||||
(r >= '0' && r <= '9') ||
|
||||
r == '_' ||
|
||||
r == '-'
|
||||
}
|
||||
|
||||
func (itype itemType) String() string {
|
||||
switch itype {
|
||||
case itemError:
|
||||
return "Error"
|
||||
case itemNIL:
|
||||
return "NIL"
|
||||
case itemEOF:
|
||||
return "EOF"
|
||||
case itemText:
|
||||
return "Text"
|
||||
case itemString:
|
||||
return "String"
|
||||
case itemRawString:
|
||||
return "String"
|
||||
case itemMultilineString:
|
||||
return "String"
|
||||
case itemRawMultilineString:
|
||||
return "String"
|
||||
case itemBool:
|
||||
return "Bool"
|
||||
case itemInteger:
|
||||
return "Integer"
|
||||
case itemFloat:
|
||||
return "Float"
|
||||
case itemDatetime:
|
||||
return "DateTime"
|
||||
case itemTableStart:
|
||||
return "TableStart"
|
||||
case itemTableEnd:
|
||||
return "TableEnd"
|
||||
case itemKeyStart:
|
||||
return "KeyStart"
|
||||
case itemArray:
|
||||
return "Array"
|
||||
case itemArrayEnd:
|
||||
return "ArrayEnd"
|
||||
case itemCommentStart:
|
||||
return "CommentStart"
|
||||
}
|
||||
panic(fmt.Sprintf("BUG: Unknown type '%d'.", int(itype)))
|
||||
}
|
||||
|
||||
func (item item) String() string {
|
||||
return fmt.Sprintf("(%s, %s)", item.typ.String(), item.val)
|
||||
}
|
||||
498
Godeps/_workspace/src/github.com/BurntSushi/toml/parse.go
generated
vendored
Normal file
498
Godeps/_workspace/src/github.com/BurntSushi/toml/parse.go
generated
vendored
Normal file
@@ -0,0 +1,498 @@
|
||||
package toml
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type parser struct {
|
||||
mapping map[string]interface{}
|
||||
types map[string]tomlType
|
||||
lx *lexer
|
||||
|
||||
// A list of keys in the order that they appear in the TOML data.
|
||||
ordered []Key
|
||||
|
||||
// the full key for the current hash in scope
|
||||
context Key
|
||||
|
||||
// the base key name for everything except hashes
|
||||
currentKey string
|
||||
|
||||
// rough approximation of line number
|
||||
approxLine int
|
||||
|
||||
// A map of 'key.group.names' to whether they were created implicitly.
|
||||
implicits map[string]bool
|
||||
}
|
||||
|
||||
type parseError string
|
||||
|
||||
func (pe parseError) Error() string {
|
||||
return string(pe)
|
||||
}
|
||||
|
||||
func parse(data string) (p *parser, err error) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
var ok bool
|
||||
if err, ok = r.(parseError); ok {
|
||||
return
|
||||
}
|
||||
panic(r)
|
||||
}
|
||||
}()
|
||||
|
||||
p = &parser{
|
||||
mapping: make(map[string]interface{}),
|
||||
types: make(map[string]tomlType),
|
||||
lx: lex(data),
|
||||
ordered: make([]Key, 0),
|
||||
implicits: make(map[string]bool),
|
||||
}
|
||||
for {
|
||||
item := p.next()
|
||||
if item.typ == itemEOF {
|
||||
break
|
||||
}
|
||||
p.topLevel(item)
|
||||
}
|
||||
|
||||
return p, nil
|
||||
}
|
||||
|
||||
func (p *parser) panicf(format string, v ...interface{}) {
|
||||
msg := fmt.Sprintf("Near line %d (last key parsed '%s'): %s",
|
||||
p.approxLine, p.current(), fmt.Sprintf(format, v...))
|
||||
panic(parseError(msg))
|
||||
}
|
||||
|
||||
func (p *parser) next() item {
|
||||
it := p.lx.nextItem()
|
||||
if it.typ == itemError {
|
||||
p.panicf("%s", it.val)
|
||||
}
|
||||
return it
|
||||
}
|
||||
|
||||
func (p *parser) bug(format string, v ...interface{}) {
|
||||
log.Fatalf("BUG: %s\n\n", fmt.Sprintf(format, v...))
|
||||
}
|
||||
|
||||
func (p *parser) expect(typ itemType) item {
|
||||
it := p.next()
|
||||
p.assertEqual(typ, it.typ)
|
||||
return it
|
||||
}
|
||||
|
||||
func (p *parser) assertEqual(expected, got itemType) {
|
||||
if expected != got {
|
||||
p.bug("Expected '%s' but got '%s'.", expected, got)
|
||||
}
|
||||
}
|
||||
|
||||
func (p *parser) topLevel(item item) {
|
||||
switch item.typ {
|
||||
case itemCommentStart:
|
||||
p.approxLine = item.line
|
||||
p.expect(itemText)
|
||||
case itemTableStart:
|
||||
kg := p.next()
|
||||
p.approxLine = kg.line
|
||||
|
||||
var key Key
|
||||
for ; kg.typ != itemTableEnd && kg.typ != itemEOF; kg = p.next() {
|
||||
key = append(key, p.keyString(kg))
|
||||
}
|
||||
p.assertEqual(itemTableEnd, kg.typ)
|
||||
|
||||
p.establishContext(key, false)
|
||||
p.setType("", tomlHash)
|
||||
p.ordered = append(p.ordered, key)
|
||||
case itemArrayTableStart:
|
||||
kg := p.next()
|
||||
p.approxLine = kg.line
|
||||
|
||||
var key Key
|
||||
for ; kg.typ != itemArrayTableEnd && kg.typ != itemEOF; kg = p.next() {
|
||||
key = append(key, p.keyString(kg))
|
||||
}
|
||||
p.assertEqual(itemArrayTableEnd, kg.typ)
|
||||
|
||||
p.establishContext(key, true)
|
||||
p.setType("", tomlArrayHash)
|
||||
p.ordered = append(p.ordered, key)
|
||||
case itemKeyStart:
|
||||
kname := p.next()
|
||||
p.approxLine = kname.line
|
||||
p.currentKey = p.keyString(kname)
|
||||
|
||||
val, typ := p.value(p.next())
|
||||
p.setValue(p.currentKey, val)
|
||||
p.setType(p.currentKey, typ)
|
||||
p.ordered = append(p.ordered, p.context.add(p.currentKey))
|
||||
p.currentKey = ""
|
||||
default:
|
||||
p.bug("Unexpected type at top level: %s", item.typ)
|
||||
}
|
||||
}
|
||||
|
||||
// Gets a string for a key (or part of a key in a table name).
|
||||
func (p *parser) keyString(it item) string {
|
||||
switch it.typ {
|
||||
case itemText:
|
||||
return it.val
|
||||
case itemString, itemMultilineString,
|
||||
itemRawString, itemRawMultilineString:
|
||||
s, _ := p.value(it)
|
||||
return s.(string)
|
||||
default:
|
||||
p.bug("Unexpected key type: %s", it.typ)
|
||||
panic("unreachable")
|
||||
}
|
||||
}
|
||||
|
||||
// value translates an expected value from the lexer into a Go value wrapped
|
||||
// as an empty interface.
|
||||
func (p *parser) value(it item) (interface{}, tomlType) {
|
||||
switch it.typ {
|
||||
case itemString:
|
||||
return p.replaceEscapes(it.val), p.typeOfPrimitive(it)
|
||||
case itemMultilineString:
|
||||
trimmed := stripFirstNewline(stripEscapedWhitespace(it.val))
|
||||
return p.replaceEscapes(trimmed), p.typeOfPrimitive(it)
|
||||
case itemRawString:
|
||||
return it.val, p.typeOfPrimitive(it)
|
||||
case itemRawMultilineString:
|
||||
return stripFirstNewline(it.val), p.typeOfPrimitive(it)
|
||||
case itemBool:
|
||||
switch it.val {
|
||||
case "true":
|
||||
return true, p.typeOfPrimitive(it)
|
||||
case "false":
|
||||
return false, p.typeOfPrimitive(it)
|
||||
}
|
||||
p.bug("Expected boolean value, but got '%s'.", it.val)
|
||||
case itemInteger:
|
||||
num, err := strconv.ParseInt(it.val, 10, 64)
|
||||
if err != nil {
|
||||
// See comment below for floats describing why we make a
|
||||
// distinction between a bug and a user error.
|
||||
if e, ok := err.(*strconv.NumError); ok &&
|
||||
e.Err == strconv.ErrRange {
|
||||
|
||||
p.panicf("Integer '%s' is out of the range of 64-bit "+
|
||||
"signed integers.", it.val)
|
||||
} else {
|
||||
p.bug("Expected integer value, but got '%s'.", it.val)
|
||||
}
|
||||
}
|
||||
return num, p.typeOfPrimitive(it)
|
||||
case itemFloat:
|
||||
num, err := strconv.ParseFloat(it.val, 64)
|
||||
if err != nil {
|
||||
// Distinguish float values. Normally, it'd be a bug if the lexer
|
||||
// provides an invalid float, but it's possible that the float is
|
||||
// out of range of valid values (which the lexer cannot determine).
|
||||
// So mark the former as a bug but the latter as a legitimate user
|
||||
// error.
|
||||
//
|
||||
// This is also true for integers.
|
||||
if e, ok := err.(*strconv.NumError); ok &&
|
||||
e.Err == strconv.ErrRange {
|
||||
|
||||
p.panicf("Float '%s' is out of the range of 64-bit "+
|
||||
"IEEE-754 floating-point numbers.", it.val)
|
||||
} else {
|
||||
p.bug("Expected float value, but got '%s'.", it.val)
|
||||
}
|
||||
}
|
||||
return num, p.typeOfPrimitive(it)
|
||||
case itemDatetime:
|
||||
t, err := time.Parse("2006-01-02T15:04:05Z", it.val)
|
||||
if err != nil {
|
||||
p.bug("Expected Zulu formatted DateTime, but got '%s'.", it.val)
|
||||
}
|
||||
return t, p.typeOfPrimitive(it)
|
||||
case itemArray:
|
||||
array := make([]interface{}, 0)
|
||||
types := make([]tomlType, 0)
|
||||
|
||||
for it = p.next(); it.typ != itemArrayEnd; it = p.next() {
|
||||
if it.typ == itemCommentStart {
|
||||
p.expect(itemText)
|
||||
continue
|
||||
}
|
||||
|
||||
val, typ := p.value(it)
|
||||
array = append(array, val)
|
||||
types = append(types, typ)
|
||||
}
|
||||
return array, p.typeOfArray(types)
|
||||
}
|
||||
p.bug("Unexpected value type: %s", it.typ)
|
||||
panic("unreachable")
|
||||
}
|
||||
|
||||
// establishContext sets the current context of the parser,
|
||||
// where the context is either a hash or an array of hashes. Which one is
|
||||
// set depends on the value of the `array` parameter.
|
||||
//
|
||||
// Establishing the context also makes sure that the key isn't a duplicate, and
|
||||
// will create implicit hashes automatically.
|
||||
func (p *parser) establishContext(key Key, array bool) {
|
||||
var ok bool
|
||||
|
||||
// Always start at the top level and drill down for our context.
|
||||
hashContext := p.mapping
|
||||
keyContext := make(Key, 0)
|
||||
|
||||
// We only need implicit hashes for key[0:-1]
|
||||
for _, k := range key[0 : len(key)-1] {
|
||||
_, ok = hashContext[k]
|
||||
keyContext = append(keyContext, k)
|
||||
|
||||
// No key? Make an implicit hash and move on.
|
||||
if !ok {
|
||||
p.addImplicit(keyContext)
|
||||
hashContext[k] = make(map[string]interface{})
|
||||
}
|
||||
|
||||
// If the hash context is actually an array of tables, then set
|
||||
// the hash context to the last element in that array.
|
||||
//
|
||||
// Otherwise, it better be a table, since this MUST be a key group (by
|
||||
// virtue of it not being the last element in a key).
|
||||
switch t := hashContext[k].(type) {
|
||||
case []map[string]interface{}:
|
||||
hashContext = t[len(t)-1]
|
||||
case map[string]interface{}:
|
||||
hashContext = t
|
||||
default:
|
||||
p.panicf("Key '%s' was already created as a hash.", keyContext)
|
||||
}
|
||||
}
|
||||
|
||||
p.context = keyContext
|
||||
if array {
|
||||
// If this is the first element for this array, then allocate a new
|
||||
// list of tables for it.
|
||||
k := key[len(key)-1]
|
||||
if _, ok := hashContext[k]; !ok {
|
||||
hashContext[k] = make([]map[string]interface{}, 0, 5)
|
||||
}
|
||||
|
||||
// Add a new table. But make sure the key hasn't already been used
|
||||
// for something else.
|
||||
if hash, ok := hashContext[k].([]map[string]interface{}); ok {
|
||||
hashContext[k] = append(hash, make(map[string]interface{}))
|
||||
} else {
|
||||
p.panicf("Key '%s' was already created and cannot be used as "+
|
||||
"an array.", keyContext)
|
||||
}
|
||||
} else {
|
||||
p.setValue(key[len(key)-1], make(map[string]interface{}))
|
||||
}
|
||||
p.context = append(p.context, key[len(key)-1])
|
||||
}
|
||||
|
||||
// setValue sets the given key to the given value in the current context.
|
||||
// It will make sure that the key hasn't already been defined, account for
|
||||
// implicit key groups.
|
||||
func (p *parser) setValue(key string, value interface{}) {
|
||||
var tmpHash interface{}
|
||||
var ok bool
|
||||
|
||||
hash := p.mapping
|
||||
keyContext := make(Key, 0)
|
||||
for _, k := range p.context {
|
||||
keyContext = append(keyContext, k)
|
||||
if tmpHash, ok = hash[k]; !ok {
|
||||
p.bug("Context for key '%s' has not been established.", keyContext)
|
||||
}
|
||||
switch t := tmpHash.(type) {
|
||||
case []map[string]interface{}:
|
||||
// The context is a table of hashes. Pick the most recent table
|
||||
// defined as the current hash.
|
||||
hash = t[len(t)-1]
|
||||
case map[string]interface{}:
|
||||
hash = t
|
||||
default:
|
||||
p.bug("Expected hash to have type 'map[string]interface{}', but "+
|
||||
"it has '%T' instead.", tmpHash)
|
||||
}
|
||||
}
|
||||
keyContext = append(keyContext, key)
|
||||
|
||||
if _, ok := hash[key]; ok {
|
||||
// Typically, if the given key has already been set, then we have
|
||||
// to raise an error since duplicate keys are disallowed. However,
|
||||
// it's possible that a key was previously defined implicitly. In this
|
||||
// case, it is allowed to be redefined concretely. (See the
|
||||
// `tests/valid/implicit-and-explicit-after.toml` test in `toml-test`.)
|
||||
//
|
||||
// But we have to make sure to stop marking it as an implicit. (So that
|
||||
// another redefinition provokes an error.)
|
||||
//
|
||||
// Note that since it has already been defined (as a hash), we don't
|
||||
// want to overwrite it. So our business is done.
|
||||
if p.isImplicit(keyContext) {
|
||||
p.removeImplicit(keyContext)
|
||||
return
|
||||
}
|
||||
|
||||
// Otherwise, we have a concrete key trying to override a previous
|
||||
// key, which is *always* wrong.
|
||||
p.panicf("Key '%s' has already been defined.", keyContext)
|
||||
}
|
||||
hash[key] = value
|
||||
}
|
||||
|
||||
// setType sets the type of a particular value at a given key.
|
||||
// It should be called immediately AFTER setValue.
|
||||
//
|
||||
// Note that if `key` is empty, then the type given will be applied to the
|
||||
// current context (which is either a table or an array of tables).
|
||||
func (p *parser) setType(key string, typ tomlType) {
|
||||
keyContext := make(Key, 0, len(p.context)+1)
|
||||
for _, k := range p.context {
|
||||
keyContext = append(keyContext, k)
|
||||
}
|
||||
if len(key) > 0 { // allow type setting for hashes
|
||||
keyContext = append(keyContext, key)
|
||||
}
|
||||
p.types[keyContext.String()] = typ
|
||||
}
|
||||
|
||||
// addImplicit sets the given Key as having been created implicitly.
|
||||
func (p *parser) addImplicit(key Key) {
|
||||
p.implicits[key.String()] = true
|
||||
}
|
||||
|
||||
// removeImplicit stops tagging the given key as having been implicitly
|
||||
// created.
|
||||
func (p *parser) removeImplicit(key Key) {
|
||||
p.implicits[key.String()] = false
|
||||
}
|
||||
|
||||
// isImplicit returns true if the key group pointed to by the key was created
|
||||
// implicitly.
|
||||
func (p *parser) isImplicit(key Key) bool {
|
||||
return p.implicits[key.String()]
|
||||
}
|
||||
|
||||
// current returns the full key name of the current context.
|
||||
func (p *parser) current() string {
|
||||
if len(p.currentKey) == 0 {
|
||||
return p.context.String()
|
||||
}
|
||||
if len(p.context) == 0 {
|
||||
return p.currentKey
|
||||
}
|
||||
return fmt.Sprintf("%s.%s", p.context, p.currentKey)
|
||||
}
|
||||
|
||||
func stripFirstNewline(s string) string {
|
||||
if len(s) == 0 || s[0] != '\n' {
|
||||
return s
|
||||
}
|
||||
return s[1:len(s)]
|
||||
}
|
||||
|
||||
func stripEscapedWhitespace(s string) string {
|
||||
esc := strings.Split(s, "\\\n")
|
||||
if len(esc) > 1 {
|
||||
for i := 1; i < len(esc); i++ {
|
||||
esc[i] = strings.TrimLeftFunc(esc[i], unicode.IsSpace)
|
||||
}
|
||||
}
|
||||
return strings.Join(esc, "")
|
||||
}
|
||||
|
||||
func (p *parser) replaceEscapes(str string) string {
|
||||
var replaced []rune
|
||||
s := []byte(str)
|
||||
r := 0
|
||||
for r < len(s) {
|
||||
if s[r] != '\\' {
|
||||
c, size := utf8.DecodeRune(s[r:])
|
||||
r += size
|
||||
replaced = append(replaced, c)
|
||||
continue
|
||||
}
|
||||
r += 1
|
||||
if r >= len(s) {
|
||||
p.bug("Escape sequence at end of string.")
|
||||
return ""
|
||||
}
|
||||
switch s[r] {
|
||||
default:
|
||||
p.bug("Expected valid escape code after \\, but got %q.", s[r])
|
||||
return ""
|
||||
case 'b':
|
||||
replaced = append(replaced, rune(0x0008))
|
||||
r += 1
|
||||
case 't':
|
||||
replaced = append(replaced, rune(0x0009))
|
||||
r += 1
|
||||
case 'n':
|
||||
replaced = append(replaced, rune(0x000A))
|
||||
r += 1
|
||||
case 'f':
|
||||
replaced = append(replaced, rune(0x000C))
|
||||
r += 1
|
||||
case 'r':
|
||||
replaced = append(replaced, rune(0x000D))
|
||||
r += 1
|
||||
case '"':
|
||||
replaced = append(replaced, rune(0x0022))
|
||||
r += 1
|
||||
case '\\':
|
||||
replaced = append(replaced, rune(0x005C))
|
||||
r += 1
|
||||
case 'u':
|
||||
// At this point, we know we have a Unicode escape of the form
|
||||
// `uXXXX` at [r, r+5). (Because the lexer guarantees this
|
||||
// for us.)
|
||||
escaped := p.asciiEscapeToUnicode(s[r+1 : r+5])
|
||||
replaced = append(replaced, escaped)
|
||||
r += 5
|
||||
case 'U':
|
||||
// At this point, we know we have a Unicode escape of the form
|
||||
// `uXXXX` at [r, r+9). (Because the lexer guarantees this
|
||||
// for us.)
|
||||
escaped := p.asciiEscapeToUnicode(s[r+1 : r+9])
|
||||
replaced = append(replaced, escaped)
|
||||
r += 9
|
||||
}
|
||||
}
|
||||
return string(replaced)
|
||||
}
|
||||
|
||||
func (p *parser) asciiEscapeToUnicode(bs []byte) rune {
|
||||
s := string(bs)
|
||||
hex, err := strconv.ParseUint(strings.ToLower(s), 16, 32)
|
||||
if err != nil {
|
||||
p.bug("Could not parse '%s' as a hexadecimal number, but the "+
|
||||
"lexer claims it's OK: %s", s, err)
|
||||
}
|
||||
|
||||
// BUG(burntsushi)
|
||||
// I honestly don't understand how this works. I can't seem
|
||||
// to find a way to make this fail. I figured this would fail on invalid
|
||||
// UTF-8 characters like U+DCFF, but it doesn't.
|
||||
if !utf8.ValidString(string(rune(hex))) {
|
||||
p.panicf("Escaped character '\\u%s' is not valid UTF-8.", s)
|
||||
}
|
||||
return rune(hex)
|
||||
}
|
||||
|
||||
func isStringType(ty itemType) bool {
|
||||
return ty == itemString || ty == itemMultilineString ||
|
||||
ty == itemRawString || ty == itemRawMultilineString
|
||||
}
|
||||
1
Godeps/_workspace/src/github.com/BurntSushi/toml/session.vim
generated
vendored
Normal file
1
Godeps/_workspace/src/github.com/BurntSushi/toml/session.vim
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
au BufWritePost *.go silent!make tags > /dev/null 2>&1
|
||||
@@ -92,11 +92,11 @@ func typeFields(t reflect.Type) []field {
|
||||
// Scan f.typ for fields to include.
|
||||
for i := 0; i < f.typ.NumField(); i++ {
|
||||
sf := f.typ.Field(i)
|
||||
if sf.PkgPath != "" && !sf.Anonymous { // unexported
|
||||
if sf.PkgPath != "" { // unexported
|
||||
continue
|
||||
}
|
||||
opts := getOptions(sf.Tag)
|
||||
if opts.skip {
|
||||
name := sf.Tag.Get("toml")
|
||||
if name == "-" {
|
||||
continue
|
||||
}
|
||||
index := make([]int, len(f.index)+1)
|
||||
@@ -110,9 +110,8 @@ func typeFields(t reflect.Type) []field {
|
||||
}
|
||||
|
||||
// Record found field and index sequence.
|
||||
if opts.name != "" || !sf.Anonymous || ft.Kind() != reflect.Struct {
|
||||
tagged := opts.name != ""
|
||||
name := opts.name
|
||||
if name != "" || !sf.Anonymous || ft.Kind() != reflect.Struct {
|
||||
tagged := name != ""
|
||||
if name == "" {
|
||||
name = sf.Name
|
||||
}
|
||||
24
Godeps/_workspace/src/github.com/Unknwon/com/.gitignore
generated
vendored
Normal file
24
Godeps/_workspace/src/github.com/Unknwon/com/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
# Compiled Object files, Static and Dynamic libs (Shared Objects)
|
||||
*.o
|
||||
*.a
|
||||
*.so
|
||||
|
||||
# Folders
|
||||
_obj
|
||||
_test
|
||||
.idea
|
||||
|
||||
# Architecture specific extensions/prefixes
|
||||
*.[568vq]
|
||||
[568vq].out
|
||||
|
||||
*.cgo1.go
|
||||
*.cgo2.c
|
||||
_cgo_defun.c
|
||||
_cgo_gotypes.go
|
||||
_cgo_export.*
|
||||
|
||||
_testmain.go
|
||||
|
||||
*.exe
|
||||
*.iml
|
||||
24
Godeps/_workspace/src/github.com/Unknwon/com/README.md
generated
vendored
Normal file
24
Godeps/_workspace/src/github.com/Unknwon/com/README.md
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
Common functions
|
||||
===
|
||||
|
||||
[](https://drone.io/github.com/Unknwon/com/latest) [](http://gowalker.org/github.com/Unknwon/com)
|
||||
|
||||
This is an open source project for commonly used functions for the Go programming language.
|
||||
|
||||
This package need >= **go 1.2**
|
||||
|
||||
Code Convention: based on [Go Code Convention](https://github.com/Unknwon/go-code-convention).
|
||||
|
||||
## Contribute
|
||||
|
||||
Your contribute is welcome, but you have to check following steps after you added some functions and commit them:
|
||||
|
||||
1. Make sure you wrote user-friendly comments for **all functions** .
|
||||
2. Make sure you wrote test cases with any possible condition for **all functions** in file `*_test.go`.
|
||||
3. Make sure you wrote benchmarks for **all functions** in file `*_test.go`.
|
||||
4. Make sure you wrote useful examples for **all functions** in file `example_test.go`.
|
||||
5. Make sure you ran `go test -bench="."` and got **PASS** .
|
||||
|
||||
## Performance
|
||||
|
||||
See results on [drone.io](https://drone.io/github.com/Unknwon/com/latest) by `go test -bench="."`.
|
||||
140
Godeps/_workspace/src/github.com/Unknwon/com/cmd_test.go
generated
vendored
Normal file
140
Godeps/_workspace/src/github.com/Unknwon/com/cmd_test.go
generated
vendored
Normal file
@@ -0,0 +1,140 @@
|
||||
// Copyright 2013 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"runtime"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestColorLogS(t *testing.T) {
|
||||
if runtime.GOOS != "windows" {
|
||||
// Trace + path.
|
||||
cls := ColorLogS("[TRAC] Trace level test with path( %s )", "/path/to/somethere")
|
||||
clsR := fmt.Sprintf(
|
||||
"[\033[%dmTRAC%s] Trace level test with path(\033[%dm%s%s)",
|
||||
Blue, EndColor, Yellow, "/path/to/somethere", EndColor)
|
||||
if cls != clsR {
|
||||
t.Errorf("ColorLogS:\n Expect => %s\n Got => %s\n", clsR, cls)
|
||||
}
|
||||
|
||||
// Error + error.
|
||||
cls = ColorLogS("[ERRO] Error level test with error[ %s ]", "test error")
|
||||
clsR = fmt.Sprintf(
|
||||
"[\033[%dmERRO%s] Error level test with error[\033[%dm%s%s]",
|
||||
Red, EndColor, Red, "test error", EndColor)
|
||||
if cls != clsR {
|
||||
t.Errorf("ColorLogS:\n Expect => %s\n Got => %s\n", clsR, cls)
|
||||
}
|
||||
|
||||
// Warning + highlight.
|
||||
cls = ColorLogS("[WARN] Warnning level test with highlight # %s #", "special offer!")
|
||||
clsR = fmt.Sprintf(
|
||||
"[\033[%dmWARN%s] Warnning level test with highlight \033[%dm%s%s",
|
||||
Magenta, EndColor, Gray, "special offer!", EndColor)
|
||||
if cls != clsR {
|
||||
t.Errorf("ColorLogS:\n Expect => %s\n Got => %s\n", clsR, cls)
|
||||
}
|
||||
|
||||
// Success.
|
||||
cls = ColorLogS("[SUCC] Success level test")
|
||||
clsR = fmt.Sprintf(
|
||||
"[\033[%dmSUCC%s] Success level test",
|
||||
Green, EndColor)
|
||||
if cls != clsR {
|
||||
t.Errorf("ColorLogS:\n Expect => %s\n Got => %s\n", clsR, cls)
|
||||
}
|
||||
|
||||
// Default.
|
||||
cls = ColorLogS("[INFO] Default level test")
|
||||
clsR = fmt.Sprintf(
|
||||
"[INFO] Default level test")
|
||||
if cls != clsR {
|
||||
t.Errorf("ColorLogS:\n Expect => %s\n Got => %s\n", clsR, cls)
|
||||
}
|
||||
} else {
|
||||
// Trace + path.
|
||||
cls := ColorLogS("[TRAC] Trace level test with path( %s )", "/path/to/somethere")
|
||||
clsR := fmt.Sprintf(
|
||||
"[TRAC] Trace level test with path(%s)",
|
||||
"/path/to/somethere")
|
||||
if cls != clsR {
|
||||
t.Errorf("ColorLogS:\n Expect => %s\n Got => %s\n", clsR, cls)
|
||||
}
|
||||
|
||||
// Error + error.
|
||||
cls = ColorLogS("[ERRO] Error level test with error[ %s ]", "test error")
|
||||
clsR = fmt.Sprintf(
|
||||
"[ERRO] Error level test with error[%s]",
|
||||
"test error")
|
||||
if cls != clsR {
|
||||
t.Errorf("ColorLogS:\n Expect => %s\n Got => %s\n", clsR, cls)
|
||||
}
|
||||
|
||||
// Warning + highlight.
|
||||
cls = ColorLogS("[WARN] Warnning level test with highlight # %s #", "special offer!")
|
||||
clsR = fmt.Sprintf(
|
||||
"[WARN] Warnning level test with highlight %s",
|
||||
"special offer!")
|
||||
if cls != clsR {
|
||||
t.Errorf("ColorLogS:\n Expect => %s\n Got => %s\n", clsR, cls)
|
||||
}
|
||||
|
||||
// Success.
|
||||
cls = ColorLogS("[SUCC] Success level test")
|
||||
clsR = fmt.Sprintf(
|
||||
"[SUCC] Success level test")
|
||||
if cls != clsR {
|
||||
t.Errorf("ColorLogS:\n Expect => %s\n Got => %s\n", clsR, cls)
|
||||
}
|
||||
|
||||
// Default.
|
||||
cls = ColorLogS("[INFO] Default level test")
|
||||
clsR = fmt.Sprintf(
|
||||
"[INFO] Default level test")
|
||||
if cls != clsR {
|
||||
t.Errorf("ColorLogS:\n Expect => %s\n Got => %s\n", clsR, cls)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestExecCmd(t *testing.T) {
|
||||
stdout, stderr, err := ExecCmd("go", "help", "get")
|
||||
if err != nil {
|
||||
t.Errorf("ExecCmd:\n Expect => %v\n Got => %v\n", nil, err)
|
||||
} else if len(stderr) != 0 {
|
||||
t.Errorf("ExecCmd:\n Expect => %s\n Got => %s\n", "", stderr)
|
||||
} else if !strings.HasPrefix(stdout, "usage: go get") {
|
||||
t.Errorf("ExecCmd:\n Expect => %s\n Got => %s\n", "usage: go get", stdout)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkColorLogS(b *testing.B) {
|
||||
log := fmt.Sprintf(
|
||||
"[WARN] This is a tesing log that should be colored, path( %s ),"+
|
||||
" highlight # %s #, error [ %s ].",
|
||||
"path to somewhere", "highlighted content", "tesing error")
|
||||
for i := 0; i < b.N; i++ {
|
||||
ColorLogS(log)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkExecCmd(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
ExecCmd("go", "help", "get")
|
||||
}
|
||||
}
|
||||
157
Godeps/_workspace/src/github.com/Unknwon/com/convert.go
generated
vendored
Normal file
157
Godeps/_workspace/src/github.com/Unknwon/com/convert.go
generated
vendored
Normal file
@@ -0,0 +1,157 @@
|
||||
// Copyright 2014 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
// Convert string to specify type.
|
||||
type StrTo string
|
||||
|
||||
func (f StrTo) Exist() bool {
|
||||
return string(f) != string(0x1E)
|
||||
}
|
||||
|
||||
func (f StrTo) Uint8() (uint8, error) {
|
||||
v, err := strconv.ParseUint(f.String(), 10, 8)
|
||||
return uint8(v), err
|
||||
}
|
||||
|
||||
func (f StrTo) Int() (int, error) {
|
||||
v, err := strconv.ParseInt(f.String(), 10, 32)
|
||||
return int(v), err
|
||||
}
|
||||
|
||||
func (f StrTo) Int64() (int64, error) {
|
||||
v, err := strconv.ParseInt(f.String(), 10, 64)
|
||||
return int64(v), err
|
||||
}
|
||||
|
||||
func (f StrTo) MustUint8() uint8 {
|
||||
v, _ := f.Uint8()
|
||||
return v
|
||||
}
|
||||
|
||||
func (f StrTo) MustInt() int {
|
||||
v, _ := f.Int()
|
||||
return v
|
||||
}
|
||||
|
||||
func (f StrTo) MustInt64() int64 {
|
||||
v, _ := f.Int64()
|
||||
return v
|
||||
}
|
||||
|
||||
func (f StrTo) String() string {
|
||||
if f.Exist() {
|
||||
return string(f)
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// Convert any type to string.
|
||||
func ToStr(value interface{}, args ...int) (s string) {
|
||||
switch v := value.(type) {
|
||||
case bool:
|
||||
s = strconv.FormatBool(v)
|
||||
case float32:
|
||||
s = strconv.FormatFloat(float64(v), 'f', argInt(args).Get(0, -1), argInt(args).Get(1, 32))
|
||||
case float64:
|
||||
s = strconv.FormatFloat(v, 'f', argInt(args).Get(0, -1), argInt(args).Get(1, 64))
|
||||
case int:
|
||||
s = strconv.FormatInt(int64(v), argInt(args).Get(0, 10))
|
||||
case int8:
|
||||
s = strconv.FormatInt(int64(v), argInt(args).Get(0, 10))
|
||||
case int16:
|
||||
s = strconv.FormatInt(int64(v), argInt(args).Get(0, 10))
|
||||
case int32:
|
||||
s = strconv.FormatInt(int64(v), argInt(args).Get(0, 10))
|
||||
case int64:
|
||||
s = strconv.FormatInt(v, argInt(args).Get(0, 10))
|
||||
case uint:
|
||||
s = strconv.FormatUint(uint64(v), argInt(args).Get(0, 10))
|
||||
case uint8:
|
||||
s = strconv.FormatUint(uint64(v), argInt(args).Get(0, 10))
|
||||
case uint16:
|
||||
s = strconv.FormatUint(uint64(v), argInt(args).Get(0, 10))
|
||||
case uint32:
|
||||
s = strconv.FormatUint(uint64(v), argInt(args).Get(0, 10))
|
||||
case uint64:
|
||||
s = strconv.FormatUint(v, argInt(args).Get(0, 10))
|
||||
case string:
|
||||
s = v
|
||||
case []byte:
|
||||
s = string(v)
|
||||
default:
|
||||
s = fmt.Sprintf("%v", v)
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
type argInt []int
|
||||
|
||||
func (a argInt) Get(i int, args ...int) (r int) {
|
||||
if i >= 0 && i < len(a) {
|
||||
r = a[i]
|
||||
} else if len(args) > 0 {
|
||||
r = args[0]
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// HexStr2int converts hex format string to decimal number.
|
||||
func HexStr2int(hexStr string) (int, error) {
|
||||
num := 0
|
||||
length := len(hexStr)
|
||||
for i := 0; i < length; i++ {
|
||||
char := hexStr[length-i-1]
|
||||
factor := -1
|
||||
|
||||
switch {
|
||||
case char >= '0' && char <= '9':
|
||||
factor = int(char) - '0'
|
||||
case char >= 'a' && char <= 'f':
|
||||
factor = int(char) - 'a' + 10
|
||||
default:
|
||||
return -1, fmt.Errorf("invalid hex: %s", string(char))
|
||||
}
|
||||
|
||||
num += factor * PowInt(16, i)
|
||||
}
|
||||
return num, nil
|
||||
}
|
||||
|
||||
// Int2HexStr converts decimal number to hex format string.
|
||||
func Int2HexStr(num int) (hex string) {
|
||||
if num == 0 {
|
||||
return "0"
|
||||
}
|
||||
|
||||
for num > 0 {
|
||||
r := num % 16
|
||||
|
||||
c := "?"
|
||||
if r >= 0 && r <= 9 {
|
||||
c = string(r + '0')
|
||||
} else {
|
||||
c = string(r + 'a' - 10)
|
||||
}
|
||||
hex = c + hex
|
||||
num = num / 16
|
||||
}
|
||||
return hex
|
||||
}
|
||||
56
Godeps/_workspace/src/github.com/Unknwon/com/convert_test.go
generated
vendored
Normal file
56
Godeps/_workspace/src/github.com/Unknwon/com/convert_test.go
generated
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
// Copyright 2014 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestHexStr2int(t *testing.T) {
|
||||
Convey("Convert hex format string to decimal", t, func() {
|
||||
hexDecs := map[string]int{
|
||||
"1": 1,
|
||||
"002": 2,
|
||||
"011": 17,
|
||||
"0a1": 161,
|
||||
"35e": 862,
|
||||
}
|
||||
|
||||
for hex, dec := range hexDecs {
|
||||
val, err := HexStr2int(hex)
|
||||
So(err, ShouldBeNil)
|
||||
So(val, ShouldEqual, dec)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestInt2HexStr(t *testing.T) {
|
||||
Convey("Convert decimal to hex format string", t, func() {
|
||||
decHexs := map[int]string{
|
||||
1: "1",
|
||||
2: "2",
|
||||
17: "11",
|
||||
161: "a1",
|
||||
862: "35e",
|
||||
}
|
||||
|
||||
for dec, hex := range decHexs {
|
||||
val := Int2HexStr(dec)
|
||||
So(val, ShouldEqual, hex)
|
||||
}
|
||||
})
|
||||
}
|
||||
58
Godeps/_workspace/src/github.com/Unknwon/com/dir_test.go
generated
vendored
Normal file
58
Godeps/_workspace/src/github.com/Unknwon/com/dir_test.go
generated
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
// Copyright 2013 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com
|
||||
|
||||
import (
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestIsDir(t *testing.T) {
|
||||
Convey("Check if given path is a directory", t, func() {
|
||||
Convey("Pass a file name", func() {
|
||||
So(IsDir("file.go"), ShouldEqual, false)
|
||||
})
|
||||
Convey("Pass a directory name", func() {
|
||||
So(IsDir("testdata"), ShouldEqual, true)
|
||||
})
|
||||
Convey("Pass a invalid path", func() {
|
||||
So(IsDir("foo"), ShouldEqual, false)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func TestCopyDir(t *testing.T) {
|
||||
Convey("Items of two slices should be same", t, func() {
|
||||
s1, err := StatDir("testdata", true)
|
||||
So(err, ShouldEqual, nil)
|
||||
|
||||
err = CopyDir("testdata", "testdata2")
|
||||
So(err, ShouldEqual, nil)
|
||||
|
||||
s2, err := StatDir("testdata2", true)
|
||||
os.RemoveAll("testdata2")
|
||||
So(err, ShouldEqual, nil)
|
||||
|
||||
So(CompareSliceStr(s1, s2), ShouldEqual, true)
|
||||
})
|
||||
}
|
||||
|
||||
func BenchmarkIsDir(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
IsDir("file.go")
|
||||
}
|
||||
}
|
||||
299
Godeps/_workspace/src/github.com/Unknwon/com/example_test.go
generated
vendored
Normal file
299
Godeps/_workspace/src/github.com/Unknwon/com/example_test.go
generated
vendored
Normal file
@@ -0,0 +1,299 @@
|
||||
// Copyright 2013 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
|
||||
"github.com/Unknwon/com"
|
||||
)
|
||||
|
||||
// ------------------------------
|
||||
// cmd.go
|
||||
// ------------------------------
|
||||
|
||||
func ExampleColorLogS() {
|
||||
coloredLog := com.ColorLogS(fmt.Sprintf(
|
||||
"[WARN] This is a tesing log that should be colored, path( %s ),"+
|
||||
" highlight # %s #, error [ %s ].",
|
||||
"path to somewhere", "highlighted content", "tesing error"))
|
||||
fmt.Println(coloredLog)
|
||||
}
|
||||
|
||||
func ExampleColorLog() {
|
||||
com.ColorLog(fmt.Sprintf(
|
||||
"[WARN] This is a tesing log that should be colored, path( %s ),"+
|
||||
" highlight # %s #, error [ %s ].",
|
||||
"path to somewhere", "highlighted content", "tesing error"))
|
||||
}
|
||||
|
||||
func ExampleExecCmd() {
|
||||
stdout, stderr, err := com.ExecCmd("go", "help", "get")
|
||||
fmt.Println(stdout, stderr, err)
|
||||
}
|
||||
|
||||
// ------------- END ------------
|
||||
|
||||
// ------------------------------
|
||||
// html.go
|
||||
// ------------------------------
|
||||
|
||||
func ExampleHtml2JS() {
|
||||
htm := "<div id=\"button\" class=\"btn\">Click me</div>\n\r"
|
||||
js := string(com.Html2JS([]byte(htm)))
|
||||
fmt.Println(js)
|
||||
// Output: <div id=\"button\" class=\"btn\">Click me</div>\n
|
||||
}
|
||||
|
||||
// ------------- END ------------
|
||||
|
||||
// ------------------------------
|
||||
// path.go
|
||||
// ------------------------------
|
||||
|
||||
func ExampleGetGOPATHs() {
|
||||
gps := com.GetGOPATHs()
|
||||
fmt.Println(gps)
|
||||
}
|
||||
|
||||
func ExampleGetSrcPath() {
|
||||
srcPath, err := com.GetSrcPath("github.com/Unknwon/com")
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
return
|
||||
}
|
||||
fmt.Println(srcPath)
|
||||
}
|
||||
|
||||
func ExampleHomeDir() {
|
||||
hd, err := com.HomeDir()
|
||||
fmt.Println(hd, err)
|
||||
}
|
||||
|
||||
// ------------- END ------------
|
||||
|
||||
// ------------------------------
|
||||
// file.go
|
||||
// ------------------------------
|
||||
|
||||
func ExampleIsFile() {
|
||||
if com.IsFile("file.go") {
|
||||
fmt.Println("file.go exists")
|
||||
return
|
||||
}
|
||||
fmt.Println("file.go is not a file or does not exist")
|
||||
}
|
||||
|
||||
func ExampleIsExist() {
|
||||
if com.IsExist("file.go") {
|
||||
fmt.Println("file.go exists")
|
||||
return
|
||||
}
|
||||
fmt.Println("file.go does not exist")
|
||||
}
|
||||
|
||||
// ------------- END ------------
|
||||
|
||||
// ------------------------------
|
||||
// dir.go
|
||||
// ------------------------------
|
||||
|
||||
func ExampleIsDir() {
|
||||
if com.IsDir("files") {
|
||||
fmt.Println("directory 'files' exists")
|
||||
return
|
||||
}
|
||||
fmt.Println("'files' is not a directory or does not exist")
|
||||
}
|
||||
|
||||
// ------------- END ------------
|
||||
|
||||
// ------------------------------
|
||||
// string.go
|
||||
// ------------------------------
|
||||
|
||||
func ExampleIsLetter() {
|
||||
fmt.Println(com.IsLetter('1'))
|
||||
fmt.Println(com.IsLetter('['))
|
||||
fmt.Println(com.IsLetter('a'))
|
||||
fmt.Println(com.IsLetter('Z'))
|
||||
// Output:
|
||||
// false
|
||||
// false
|
||||
// true
|
||||
// true
|
||||
}
|
||||
|
||||
func ExampleExpand() {
|
||||
match := map[string]string{
|
||||
"domain": "gowalker.org",
|
||||
"subdomain": "github.com",
|
||||
}
|
||||
s := "http://{domain}/{subdomain}/{0}/{1}"
|
||||
fmt.Println(com.Expand(s, match, "Unknwon", "gowalker"))
|
||||
// Output: http://gowalker.org/github.com/Unknwon/gowalker
|
||||
}
|
||||
|
||||
// ------------- END ------------
|
||||
|
||||
// ------------------------------
|
||||
// http.go
|
||||
// ------------------------------
|
||||
|
||||
func ExampleHttpGet() ([]byte, error) {
|
||||
rc, err := com.HttpGet(&http.Client{}, "http://gowalker.org", nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
p, err := ioutil.ReadAll(rc)
|
||||
rc.Close()
|
||||
return p, err
|
||||
}
|
||||
|
||||
func ExampleHttpGetBytes() ([]byte, error) {
|
||||
p, err := com.HttpGetBytes(&http.Client{}, "http://gowalker.org", nil)
|
||||
return p, err
|
||||
}
|
||||
|
||||
func ExampleHttpGetJSON() interface{} {
|
||||
j := com.HttpGetJSON(&http.Client{}, "http://gowalker.org", nil)
|
||||
return j
|
||||
}
|
||||
|
||||
type rawFile struct {
|
||||
name string
|
||||
rawURL string
|
||||
data []byte
|
||||
}
|
||||
|
||||
func (rf *rawFile) Name() string {
|
||||
return rf.name
|
||||
}
|
||||
|
||||
func (rf *rawFile) RawUrl() string {
|
||||
return rf.rawURL
|
||||
}
|
||||
|
||||
func (rf *rawFile) Data() []byte {
|
||||
return rf.data
|
||||
}
|
||||
|
||||
func (rf *rawFile) SetData(p []byte) {
|
||||
rf.data = p
|
||||
}
|
||||
|
||||
func ExampleFetchFiles() {
|
||||
// Code that should be outside of your function body.
|
||||
// type rawFile struct {
|
||||
// name string
|
||||
// rawURL string
|
||||
// data []byte
|
||||
// }
|
||||
|
||||
// func (rf *rawFile) Name() string {
|
||||
// return rf.name
|
||||
// }
|
||||
|
||||
// func (rf *rawFile) RawUrl() string {
|
||||
// return rf.rawURL
|
||||
// }
|
||||
|
||||
// func (rf *rawFile) Data() []byte {
|
||||
// return rf.data
|
||||
// }
|
||||
|
||||
// func (rf *rawFile) SetData(p []byte) {
|
||||
// rf.data = p
|
||||
// }
|
||||
|
||||
files := []com.RawFile{
|
||||
&rawFile{rawURL: "http://example.com"},
|
||||
&rawFile{rawURL: "http://example.com/foo"},
|
||||
}
|
||||
err := com.FetchFiles(&http.Client{}, files, nil)
|
||||
fmt.Println(err, len(files[0].Data()), len(files[1].Data()))
|
||||
}
|
||||
|
||||
func ExampleFetchFilesCurl() {
|
||||
// Code that should be outside of your function body.
|
||||
// type rawFile struct {
|
||||
// name string
|
||||
// rawURL string
|
||||
// data []byte
|
||||
// }
|
||||
|
||||
// func (rf *rawFile) Name() string {
|
||||
// return rf.name
|
||||
// }
|
||||
|
||||
// func (rf *rawFile) RawUrl() string {
|
||||
// return rf.rawURL
|
||||
// }
|
||||
|
||||
// func (rf *rawFile) Data() []byte {
|
||||
// return rf.data
|
||||
// }
|
||||
|
||||
// func (rf *rawFile) SetData(p []byte) {
|
||||
// rf.data = p
|
||||
// }
|
||||
|
||||
files := []com.RawFile{
|
||||
&rawFile{rawURL: "http://example.com"},
|
||||
&rawFile{rawURL: "http://example.com/foo"},
|
||||
}
|
||||
err := com.FetchFilesCurl(files)
|
||||
fmt.Println(err, len(files[0].Data()), len(files[1].Data()))
|
||||
}
|
||||
|
||||
// ------------- END ------------
|
||||
|
||||
// ------------------------------
|
||||
// regex.go
|
||||
// ------------------------------
|
||||
|
||||
func ExampleIsEmail() {
|
||||
fmt.Println(com.IsEmail("test@example.com"))
|
||||
fmt.Println(com.IsEmail("@example.com"))
|
||||
// Output:
|
||||
// true
|
||||
// false
|
||||
}
|
||||
|
||||
func ExampleIsUrl() {
|
||||
fmt.Println(com.IsUrl("http://example.com"))
|
||||
fmt.Println(com.IsUrl("http//example.com"))
|
||||
// Output:
|
||||
// true
|
||||
// false
|
||||
}
|
||||
|
||||
// ------------- END ------------
|
||||
|
||||
// ------------------------------
|
||||
// slice.go
|
||||
// ------------------------------
|
||||
|
||||
func ExampleAppendStr() {
|
||||
s := []string{"a"}
|
||||
s = com.AppendStr(s, "a")
|
||||
s = com.AppendStr(s, "b")
|
||||
fmt.Println(s)
|
||||
// Output: [a b]
|
||||
}
|
||||
|
||||
// ------------- END ------------
|
||||
61
Godeps/_workspace/src/github.com/Unknwon/com/file_test.go
generated
vendored
Normal file
61
Godeps/_workspace/src/github.com/Unknwon/com/file_test.go
generated
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
// Copyright 2013 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestIsFile(t *testing.T) {
|
||||
if !IsFile("file.go") {
|
||||
t.Errorf("IsExist:\n Expect => %v\n Got => %v\n", true, false)
|
||||
}
|
||||
|
||||
if IsFile("testdata") {
|
||||
t.Errorf("IsExist:\n Expect => %v\n Got => %v\n", false, true)
|
||||
}
|
||||
|
||||
if IsFile("files.go") {
|
||||
t.Errorf("IsExist:\n Expect => %v\n Got => %v\n", false, true)
|
||||
}
|
||||
}
|
||||
|
||||
func TestIsExist(t *testing.T) {
|
||||
Convey("Check if file or directory exists", t, func() {
|
||||
Convey("Pass a file name that exists", func() {
|
||||
So(IsExist("file.go"), ShouldEqual, true)
|
||||
})
|
||||
Convey("Pass a directory name that exists", func() {
|
||||
So(IsExist("testdata"), ShouldEqual, true)
|
||||
})
|
||||
Convey("Pass a directory name that does not exist", func() {
|
||||
So(IsExist(".hg"), ShouldEqual, false)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func BenchmarkIsFile(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
IsFile("file.go")
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIsExist(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
IsExist("file.go")
|
||||
}
|
||||
}
|
||||
35
Godeps/_workspace/src/github.com/Unknwon/com/html_test.go
generated
vendored
Normal file
35
Godeps/_workspace/src/github.com/Unknwon/com/html_test.go
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
// Copyright 2013 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestHtml2JS(t *testing.T) {
|
||||
htm := "<div id=\"button\" class=\"btn\">Click me</div>\n\r"
|
||||
js := string(Html2JS([]byte(htm)))
|
||||
jsR := `<div id=\"button\" class=\"btn\">Click me</div>\n`
|
||||
if js != jsR {
|
||||
t.Errorf("Html2JS:\n Expect => %s\n Got => %s\n", jsR, js)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkHtml2JS(b *testing.B) {
|
||||
htm := "<div id=\"button\" class=\"btn\">Click me</div>\n\r"
|
||||
for i := 0; i < b.N; i++ {
|
||||
Html2JS([]byte(htm))
|
||||
}
|
||||
}
|
||||
111
Godeps/_workspace/src/github.com/Unknwon/com/http_test.go
generated
vendored
Normal file
111
Godeps/_workspace/src/github.com/Unknwon/com/http_test.go
generated
vendored
Normal file
@@ -0,0 +1,111 @@
|
||||
// Copyright 2013 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
var examplePrefix = `<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Example Domain</title>
|
||||
`
|
||||
|
||||
func TestHttpGet(t *testing.T) {
|
||||
// 200.
|
||||
rc, err := HttpGet(&http.Client{}, "http://example.com", nil)
|
||||
if err != nil {
|
||||
t.Fatalf("HttpGet:\n Expect => %v\n Got => %s\n", nil, err)
|
||||
}
|
||||
p, err := ioutil.ReadAll(rc)
|
||||
if err != nil {
|
||||
t.Errorf("HttpGet:\n Expect => %v\n Got => %s\n", nil, err)
|
||||
}
|
||||
s := string(p)
|
||||
if !strings.HasPrefix(s, examplePrefix) {
|
||||
t.Errorf("HttpGet:\n Expect => %s\n Got => %s\n", examplePrefix, s)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHttpGetBytes(t *testing.T) {
|
||||
p, err := HttpGetBytes(&http.Client{}, "http://example.com", nil)
|
||||
if err != nil {
|
||||
t.Errorf("HttpGetBytes:\n Expect => %v\n Got => %s\n", nil, err)
|
||||
}
|
||||
s := string(p)
|
||||
if !strings.HasPrefix(s, examplePrefix) {
|
||||
t.Errorf("HttpGet:\n Expect => %s\n Got => %s\n", examplePrefix, s)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHttpGetJSON(t *testing.T) {
|
||||
|
||||
}
|
||||
|
||||
type rawFile struct {
|
||||
name string
|
||||
rawURL string
|
||||
data []byte
|
||||
}
|
||||
|
||||
func (rf *rawFile) Name() string {
|
||||
return rf.name
|
||||
}
|
||||
|
||||
func (rf *rawFile) RawUrl() string {
|
||||
return rf.rawURL
|
||||
}
|
||||
|
||||
func (rf *rawFile) Data() []byte {
|
||||
return rf.data
|
||||
}
|
||||
|
||||
func (rf *rawFile) SetData(p []byte) {
|
||||
rf.data = p
|
||||
}
|
||||
|
||||
func TestFetchFiles(t *testing.T) {
|
||||
files := []RawFile{
|
||||
&rawFile{rawURL: "http://example.com"},
|
||||
&rawFile{rawURL: "http://example.com"},
|
||||
}
|
||||
err := FetchFiles(&http.Client{}, files, nil)
|
||||
if err != nil {
|
||||
t.Errorf("FetchFiles:\n Expect => %v\n Got => %s\n", nil, err)
|
||||
} else if len(files[0].Data()) != 1270 {
|
||||
t.Errorf("FetchFiles:\n Expect => %d\n Got => %d\n", 1270, len(files[0].Data()))
|
||||
} else if len(files[1].Data()) != 1270 {
|
||||
t.Errorf("FetchFiles:\n Expect => %d\n Got => %d\n", 1270, len(files[1].Data()))
|
||||
}
|
||||
}
|
||||
|
||||
func TestFetchFilesCurl(t *testing.T) {
|
||||
files := []RawFile{
|
||||
&rawFile{rawURL: "http://example.com"},
|
||||
&rawFile{rawURL: "http://example.com"},
|
||||
}
|
||||
err := FetchFilesCurl(files)
|
||||
if err != nil {
|
||||
t.Errorf("FetchFilesCurl:\n Expect => %v\n Got => %s\n", nil, err)
|
||||
} else if len(files[0].Data()) != 1270 {
|
||||
t.Errorf("FetchFilesCurl:\n Expect => %d\n Got => %d\n", 1270, len(files[0].Data()))
|
||||
} else if len(files[1].Data()) != 1270 {
|
||||
t.Errorf("FetchFilesCurl:\n Expect => %d\n Got => %d\n", 1270, len(files[1].Data()))
|
||||
}
|
||||
}
|
||||
@@ -14,16 +14,11 @@
|
||||
|
||||
package com
|
||||
|
||||
// PowInt is int type of math.Pow function.
|
||||
// PowInt is int type of math.Pow function.
|
||||
func PowInt(x int, y int) int {
|
||||
if y <= 0 {
|
||||
return 1
|
||||
} else {
|
||||
if y % 2 == 0 {
|
||||
sqrt := PowInt(x, y/2)
|
||||
return sqrt * sqrt
|
||||
} else {
|
||||
return PowInt(x, y-1) * x
|
||||
}
|
||||
num := 1
|
||||
for i := 0; i < y; i++ {
|
||||
num *= x
|
||||
}
|
||||
return num
|
||||
}
|
||||
@@ -64,9 +64,9 @@ func GetSrcPath(importPath string) (appPath string, err error) {
|
||||
// it returns error when the variable does not exist.
|
||||
func HomeDir() (home string, err error) {
|
||||
if runtime.GOOS == "windows" {
|
||||
home = os.Getenv("USERPROFILE")
|
||||
if len(home) == 0 {
|
||||
home = os.Getenv("HOMEDRIVE") + os.Getenv("HOMEPATH")
|
||||
home = os.Getenv("HOMEDRIVE") + os.Getenv("HOMEPATH")
|
||||
if home == "" {
|
||||
home = os.Getenv("USERPROFILE")
|
||||
}
|
||||
} else {
|
||||
home = os.Getenv("HOME")
|
||||
67
Godeps/_workspace/src/github.com/Unknwon/com/path_test.go
generated
vendored
Normal file
67
Godeps/_workspace/src/github.com/Unknwon/com/path_test.go
generated
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
// Copyright 2013 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com
|
||||
|
||||
import (
|
||||
"os"
|
||||
"runtime"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestGetGOPATHs(t *testing.T) {
|
||||
var gpsR []string
|
||||
|
||||
if runtime.GOOS != "windows" {
|
||||
gpsR = []string{"path/to/gopath1", "path/to/gopath2", "path/to/gopath3"}
|
||||
os.Setenv("GOPATH", "path/to/gopath1:path/to/gopath2:path/to/gopath3")
|
||||
} else {
|
||||
gpsR = []string{"path/to/gopath1", "path/to/gopath2", "path/to/gopath3"}
|
||||
os.Setenv("GOPATH", "path\\to\\gopath1;path\\to\\gopath2;path\\to\\gopath3")
|
||||
}
|
||||
|
||||
gps := GetGOPATHs()
|
||||
if !CompareSliceStr(gps, gpsR) {
|
||||
t.Errorf("GetGOPATHs:\n Expect => %s\n Got => %s\n", gpsR, gps)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetSrcPath(t *testing.T) {
|
||||
|
||||
}
|
||||
|
||||
func TestHomeDir(t *testing.T) {
|
||||
_, err := HomeDir()
|
||||
if err != nil {
|
||||
t.Errorf("HomeDir:\n Expect => %v\n Got => %s\n", nil, err)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkGetGOPATHs(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
GetGOPATHs()
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkGetSrcPath(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
GetSrcPath("github.com/Unknwon/com")
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkHomeDir(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
HomeDir()
|
||||
}
|
||||
}
|
||||
70
Godeps/_workspace/src/github.com/Unknwon/com/regex_test.go
generated
vendored
Normal file
70
Godeps/_workspace/src/github.com/Unknwon/com/regex_test.go
generated
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
// Copyright 2013 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestIsEmail(t *testing.T) {
|
||||
emails := map[string]bool{
|
||||
`test@example.com`: true,
|
||||
`single-character@b.org`: true,
|
||||
`uncommon_address@test.museum`: true,
|
||||
`local@sld.UPPER`: true,
|
||||
`@missing.org`: false,
|
||||
`missing@.com`: false,
|
||||
`missing@qq.`: false,
|
||||
`wrong-ip@127.1.1.1.26`: false,
|
||||
}
|
||||
for e, r := range emails {
|
||||
b := IsEmail(e)
|
||||
if b != r {
|
||||
t.Errorf("IsEmail:\n Expect => %v\n Got => %v\n", r, b)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestIsUrl(t *testing.T) {
|
||||
urls := map[string]bool{
|
||||
"http://www.example.com": true,
|
||||
"http://example.com": true,
|
||||
"http://example.com?user=test&password=test": true,
|
||||
"http://example.com?user=test#login": true,
|
||||
"ftp://example.com": true,
|
||||
"https://example.com": true,
|
||||
"htp://example.com": false,
|
||||
"http//example.com": false,
|
||||
"http://example": true,
|
||||
}
|
||||
for u, r := range urls {
|
||||
b := IsUrl(u)
|
||||
if b != r {
|
||||
t.Errorf("IsUrl:\n Expect => %v\n Got => %v\n", r, b)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIsEmail(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
IsEmail("test@example.com")
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIsUrl(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
IsEmail("http://example.com")
|
||||
}
|
||||
}
|
||||
99
Godeps/_workspace/src/github.com/Unknwon/com/slice_test.go
generated
vendored
Normal file
99
Godeps/_workspace/src/github.com/Unknwon/com/slice_test.go
generated
vendored
Normal file
@@ -0,0 +1,99 @@
|
||||
// Copyright 2013 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestAppendStr(t *testing.T) {
|
||||
Convey("Append a string to a slice with no duplicates", t, func() {
|
||||
s := []string{"a"}
|
||||
|
||||
Convey("Append a string that does not exist in slice", func() {
|
||||
s = AppendStr(s, "b")
|
||||
So(len(s), ShouldEqual, 2)
|
||||
})
|
||||
|
||||
Convey("Append a string that does exist in slice", func() {
|
||||
s = AppendStr(s, "b")
|
||||
So(len(s), ShouldEqual, 2)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func TestCompareSliceStr(t *testing.T) {
|
||||
Convey("Compares two 'string' type slices with elements and order", t, func() {
|
||||
Convey("Compare two slices that do have same elements and order", func() {
|
||||
So(CompareSliceStr(
|
||||
[]string{"1", "2", "3"}, []string{"1", "2", "3"}), ShouldBeTrue)
|
||||
})
|
||||
|
||||
Convey("Compare two slices that do have same elements but does not have same order", func() {
|
||||
So(!CompareSliceStr(
|
||||
[]string{"2", "1", "3"}, []string{"1", "2", "3"}), ShouldBeTrue)
|
||||
})
|
||||
|
||||
Convey("Compare two slices that have different number of elements", func() {
|
||||
So(!CompareSliceStr(
|
||||
[]string{"2", "1"}, []string{"1", "2", "3"}), ShouldBeTrue)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func TestCompareSliceStrU(t *testing.T) {
|
||||
Convey("Compare two 'string' type slices with elements and ignore the order", t, func() {
|
||||
Convey("Compare two slices that do have same elements and order", func() {
|
||||
So(CompareSliceStrU(
|
||||
[]string{"1", "2", "3"}, []string{"1", "2", "3"}), ShouldBeTrue)
|
||||
})
|
||||
|
||||
Convey("Compare two slices that do have same elements but does not have same order", func() {
|
||||
So(CompareSliceStrU(
|
||||
[]string{"2", "1", "3"}, []string{"1", "2", "3"}), ShouldBeTrue)
|
||||
})
|
||||
|
||||
Convey("Compare two slices that have different number of elements", func() {
|
||||
So(!CompareSliceStrU(
|
||||
[]string{"2", "1"}, []string{"1", "2", "3"}), ShouldBeTrue)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func BenchmarkAppendStr(b *testing.B) {
|
||||
s := []string{"a"}
|
||||
for i := 0; i < b.N; i++ {
|
||||
s = AppendStr(s, fmt.Sprint(b.N%3))
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkCompareSliceStr(b *testing.B) {
|
||||
s1 := []string{"1", "2", "3"}
|
||||
s2 := []string{"1", "2", "3"}
|
||||
for i := 0; i < b.N; i++ {
|
||||
CompareSliceStr(s1, s2)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkCompareSliceStrU(b *testing.B) {
|
||||
s1 := []string{"1", "4", "2", "3"}
|
||||
s2 := []string{"1", "2", "3", "4"}
|
||||
for i := 0; i < b.N; i++ {
|
||||
CompareSliceStrU(s1, s2)
|
||||
}
|
||||
}
|
||||
140
Godeps/_workspace/src/github.com/Unknwon/com/string.go
generated
vendored
Normal file
140
Godeps/_workspace/src/github.com/Unknwon/com/string.go
generated
vendored
Normal file
@@ -0,0 +1,140 @@
|
||||
// Copyright 2013 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com
|
||||
|
||||
import (
|
||||
"crypto/aes"
|
||||
"crypto/cipher"
|
||||
"crypto/rand"
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"io"
|
||||
r "math/rand"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// AESEncrypt encrypts text and given key with AES.
|
||||
func AESEncrypt(key, text []byte) ([]byte, error) {
|
||||
block, err := aes.NewCipher(key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
b := base64.StdEncoding.EncodeToString(text)
|
||||
ciphertext := make([]byte, aes.BlockSize+len(b))
|
||||
iv := ciphertext[:aes.BlockSize]
|
||||
if _, err := io.ReadFull(rand.Reader, iv); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
cfb := cipher.NewCFBEncrypter(block, iv)
|
||||
cfb.XORKeyStream(ciphertext[aes.BlockSize:], []byte(b))
|
||||
return ciphertext, nil
|
||||
}
|
||||
|
||||
// AESDecrypt decrypts text and given key with AES.
|
||||
func AESDecrypt(key, text []byte) ([]byte, error) {
|
||||
block, err := aes.NewCipher(key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(text) < aes.BlockSize {
|
||||
return nil, errors.New("ciphertext too short")
|
||||
}
|
||||
iv := text[:aes.BlockSize]
|
||||
text = text[aes.BlockSize:]
|
||||
cfb := cipher.NewCFBDecrypter(block, iv)
|
||||
cfb.XORKeyStream(text, text)
|
||||
data, err := base64.StdEncoding.DecodeString(string(text))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return data, nil
|
||||
}
|
||||
|
||||
// IsLetter returns true if the 'l' is an English letter.
|
||||
func IsLetter(l uint8) bool {
|
||||
n := (l | 0x20) - 'a'
|
||||
if n >= 0 && n < 26 {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Expand replaces {k} in template with match[k] or subs[atoi(k)] if k is not in match.
|
||||
func Expand(template string, match map[string]string, subs ...string) string {
|
||||
var p []byte
|
||||
var i int
|
||||
for {
|
||||
i = strings.Index(template, "{")
|
||||
if i < 0 {
|
||||
break
|
||||
}
|
||||
p = append(p, template[:i]...)
|
||||
template = template[i+1:]
|
||||
i = strings.Index(template, "}")
|
||||
if s, ok := match[template[:i]]; ok {
|
||||
p = append(p, s...)
|
||||
} else {
|
||||
j, _ := strconv.Atoi(template[:i])
|
||||
if j >= len(subs) {
|
||||
p = append(p, []byte("Missing")...)
|
||||
} else {
|
||||
p = append(p, subs[j]...)
|
||||
}
|
||||
}
|
||||
template = template[i+1:]
|
||||
}
|
||||
p = append(p, template...)
|
||||
return string(p)
|
||||
}
|
||||
|
||||
// Reverse s string, support unicode
|
||||
func Reverse(s string) string {
|
||||
n := len(s)
|
||||
runes := make([]rune, n)
|
||||
for _, rune := range s {
|
||||
n--
|
||||
runes[n] = rune
|
||||
}
|
||||
return string(runes[n:])
|
||||
}
|
||||
|
||||
// RandomCreateBytes generate random []byte by specify chars.
|
||||
func RandomCreateBytes(n int, alphabets ...byte) []byte {
|
||||
const alphanum = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
|
||||
var bytes = make([]byte, n)
|
||||
var randby bool
|
||||
if num, err := rand.Read(bytes); num != n || err != nil {
|
||||
r.Seed(time.Now().UnixNano())
|
||||
randby = true
|
||||
}
|
||||
for i, b := range bytes {
|
||||
if len(alphabets) == 0 {
|
||||
if randby {
|
||||
bytes[i] = alphanum[r.Intn(len(alphanum))]
|
||||
} else {
|
||||
bytes[i] = alphanum[b%byte(len(alphanum))]
|
||||
}
|
||||
} else {
|
||||
if randby {
|
||||
bytes[i] = alphabets[r.Intn(len(alphabets))]
|
||||
} else {
|
||||
bytes[i] = alphabets[b%byte(len(alphabets))]
|
||||
}
|
||||
}
|
||||
}
|
||||
return bytes
|
||||
}
|
||||
82
Godeps/_workspace/src/github.com/Unknwon/com/string_test.go
generated
vendored
Normal file
82
Godeps/_workspace/src/github.com/Unknwon/com/string_test.go
generated
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
// Copyright 2013 com authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License"): you may
|
||||
// not use this file except in compliance with the License. You may obtain
|
||||
// a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
// License for the specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package com
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestIsLetter(t *testing.T) {
|
||||
if IsLetter('1') {
|
||||
t.Errorf("IsLetter:\n Expect => %v\n Got => %v\n", false, true)
|
||||
}
|
||||
|
||||
if IsLetter('[') {
|
||||
t.Errorf("IsLetter:\n Expect => %v\n Got => %v\n", false, true)
|
||||
}
|
||||
|
||||
if !IsLetter('a') {
|
||||
t.Errorf("IsLetter:\n Expect => %v\n Got => %v\n", true, false)
|
||||
}
|
||||
|
||||
if !IsLetter('Z') {
|
||||
t.Errorf("IsLetter:\n Expect => %v\n Got => %v\n", true, false)
|
||||
}
|
||||
}
|
||||
|
||||
func TestExpand(t *testing.T) {
|
||||
match := map[string]string{
|
||||
"domain": "gowalker.org",
|
||||
"subdomain": "github.com",
|
||||
}
|
||||
s := "http://{domain}/{subdomain}/{0}/{1}"
|
||||
sR := "http://gowalker.org/github.com/Unknwon/gowalker"
|
||||
if Expand(s, match, "Unknwon", "gowalker") != sR {
|
||||
t.Errorf("Expand:\n Expect => %s\n Got => %s\n", sR, s)
|
||||
}
|
||||
}
|
||||
|
||||
func TestReverse(t *testing.T) {
|
||||
if Reverse("abcdefg") != "gfedcba" {
|
||||
t.Errorf("Reverse:\n Except => %s\n Got =>%s\n", "gfedcba", Reverse("abcdefg"))
|
||||
}
|
||||
if Reverse("上善若水厚德载物") != "物载德厚水若善上" {
|
||||
t.Errorf("Reverse:\n Except => %s\n Got =>%s\n", "物载德厚水若善上", Reverse("上善若水厚德载物"))
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkIsLetter(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
IsLetter('a')
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkExpand(b *testing.B) {
|
||||
match := map[string]string{
|
||||
"domain": "gowalker.org",
|
||||
"subdomain": "github.com",
|
||||
}
|
||||
s := "http://{domain}/{subdomain}/{0}/{1}"
|
||||
for i := 0; i < b.N; i++ {
|
||||
Expand(s, match, "Unknwon", "gowalker")
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkReverse(b *testing.B) {
|
||||
s := "abscef中文"
|
||||
for i := 0; i < b.N; i++ {
|
||||
Reverse(s)
|
||||
}
|
||||
}
|
||||
1
Godeps/_workspace/src/github.com/Unknwon/com/testdata/SaveFile.txt
generated
vendored
Normal file
1
Godeps/_workspace/src/github.com/Unknwon/com/testdata/SaveFile.txt
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
TestSaveFile
|
||||
1
Godeps/_workspace/src/github.com/Unknwon/com/testdata/SaveFileS.txt
generated
vendored
Normal file
1
Godeps/_workspace/src/github.com/Unknwon/com/testdata/SaveFileS.txt
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
TestSaveFileS
|
||||
1
Godeps/_workspace/src/github.com/Unknwon/com/testdata/statDir/SaveFile.txt
generated
vendored
Normal file
1
Godeps/_workspace/src/github.com/Unknwon/com/testdata/statDir/SaveFile.txt
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
TestSaveFile
|
||||
1
Godeps/_workspace/src/github.com/Unknwon/com/testdata/statDir/SaveFileS.txt
generated
vendored
Normal file
1
Godeps/_workspace/src/github.com/Unknwon/com/testdata/statDir/SaveFileS.txt
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
TestSaveFileS
|
||||
1
Godeps/_workspace/src/github.com/Unknwon/com/testdata/statDir/secondLevel/SaveFile.txt
generated
vendored
Normal file
1
Godeps/_workspace/src/github.com/Unknwon/com/testdata/statDir/secondLevel/SaveFile.txt
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
TestSaveFile
|
||||
1
Godeps/_workspace/src/github.com/Unknwon/com/testdata/statDir/secondLevel/SaveFileS.txt
generated
vendored
Normal file
1
Godeps/_workspace/src/github.com/Unknwon/com/testdata/statDir/secondLevel/SaveFileS.txt
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
TestSaveFileS
|
||||
105
Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/awserr/error.go
generated
vendored
Normal file
105
Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/awserr/error.go
generated
vendored
Normal file
@@ -0,0 +1,105 @@
|
||||
// Package awserr represents API error interface accessors for the SDK.
|
||||
package awserr
|
||||
|
||||
// An Error wraps lower level errors with code, message and an original error.
|
||||
// The underlying concrete error type may also satisfy other interfaces which
|
||||
// can be to used to obtain more specific information about the error.
|
||||
//
|
||||
// Calling Error() or String() will always include the full information about
|
||||
// an error based on its underlying type.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// output, err := s3manage.Upload(svc, input, opts)
|
||||
// if err != nil {
|
||||
// if awsErr, ok := err.(awserr.Error); ok {
|
||||
// // Get error details
|
||||
// log.Println("Error:", err.Code(), err.Message())
|
||||
//
|
||||
// // Prints out full error message, including original error if there was one.
|
||||
// log.Println("Error:", err.Error())
|
||||
//
|
||||
// // Get original error
|
||||
// if origErr := err.Err(); origErr != nil {
|
||||
// // operate on original error.
|
||||
// }
|
||||
// } else {
|
||||
// fmt.Println(err.Error())
|
||||
// }
|
||||
// }
|
||||
//
|
||||
type Error interface {
|
||||
// Satisfy the generic error interface.
|
||||
error
|
||||
|
||||
// Returns the short phrase depicting the classification of the error.
|
||||
Code() string
|
||||
|
||||
// Returns the error details message.
|
||||
Message() string
|
||||
|
||||
// Returns the original error if one was set. Nil is returned if not set.
|
||||
OrigErr() error
|
||||
}
|
||||
|
||||
// New returns an Error object described by the code, message, and origErr.
|
||||
//
|
||||
// If origErr satisfies the Error interface it will not be wrapped within a new
|
||||
// Error object and will instead be returned.
|
||||
func New(code, message string, origErr error) Error {
|
||||
if e, ok := origErr.(Error); ok && e != nil {
|
||||
return e
|
||||
}
|
||||
return newBaseError(code, message, origErr)
|
||||
}
|
||||
|
||||
// A RequestFailure is an interface to extract request failure information from
|
||||
// an Error such as the request ID of the failed request returned by a service.
|
||||
// RequestFailures may not always have a requestID value if the request failed
|
||||
// prior to reaching the service such as a connection error.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// output, err := s3manage.Upload(svc, input, opts)
|
||||
// if err != nil {
|
||||
// if reqerr, ok := err.(RequestFailure); ok {
|
||||
// log.Printf("Request failed", reqerr.Code(), reqerr.Message(), reqerr.RequestID())
|
||||
// } else {
|
||||
// log.Printf("Error:", err.Error()
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// Combined with awserr.Error:
|
||||
//
|
||||
// output, err := s3manage.Upload(svc, input, opts)
|
||||
// if err != nil {
|
||||
// if awsErr, ok := err.(awserr.Error); ok {
|
||||
// // Generic AWS Error with Code, Message, and original error (if any)
|
||||
// fmt.Println(awsErr.Code(), awsErr.Message(), awsErr.OrigErr())
|
||||
//
|
||||
// if reqErr, ok := err.(awserr.RequestFailure); ok {
|
||||
// // A service error occurred
|
||||
// fmt.Println(reqErr.StatusCode(), reqErr.RequestID())
|
||||
// }
|
||||
// } else {
|
||||
// fmt.Println(err.Error())
|
||||
// }
|
||||
// }
|
||||
//
|
||||
type RequestFailure interface {
|
||||
Error
|
||||
|
||||
// The status code of the HTTP response.
|
||||
StatusCode() int
|
||||
|
||||
// The request ID returned by the service for a request failure. This will
|
||||
// be empty if no request ID is available such as the request failed due
|
||||
// to a connection error.
|
||||
RequestID() string
|
||||
}
|
||||
|
||||
// NewRequestFailure returns a new request error wrapper for the given Error
|
||||
// provided.
|
||||
func NewRequestFailure(err Error, statusCode int, reqID string) RequestFailure {
|
||||
return newRequestError(err, statusCode, reqID)
|
||||
}
|
||||
135
Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/awserr/types.go
generated
vendored
Normal file
135
Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/awserr/types.go
generated
vendored
Normal file
@@ -0,0 +1,135 @@
|
||||
package awserr
|
||||
|
||||
import "fmt"
|
||||
|
||||
// SprintError returns a string of the formatted error code.
|
||||
//
|
||||
// Both extra and origErr are optional. If they are included their lines
|
||||
// will be added, but if they are not included their lines will be ignored.
|
||||
func SprintError(code, message, extra string, origErr error) string {
|
||||
msg := fmt.Sprintf("%s: %s", code, message)
|
||||
if extra != "" {
|
||||
msg = fmt.Sprintf("%s\n\t%s", msg, extra)
|
||||
}
|
||||
if origErr != nil {
|
||||
msg = fmt.Sprintf("%s\ncaused by: %s", msg, origErr.Error())
|
||||
}
|
||||
return msg
|
||||
}
|
||||
|
||||
// A baseError wraps the code and message which defines an error. It also
|
||||
// can be used to wrap an original error object.
|
||||
//
|
||||
// Should be used as the root for errors satisfying the awserr.Error. Also
|
||||
// for any error which does not fit into a specific error wrapper type.
|
||||
type baseError struct {
|
||||
// Classification of error
|
||||
code string
|
||||
|
||||
// Detailed information about error
|
||||
message string
|
||||
|
||||
// Optional original error this error is based off of. Allows building
|
||||
// chained errors.
|
||||
origErr error
|
||||
}
|
||||
|
||||
// newBaseError returns an error object for the code, message, and err.
|
||||
//
|
||||
// code is a short no whitespace phrase depicting the classification of
|
||||
// the error that is being created.
|
||||
//
|
||||
// message is the free flow string containing detailed information about the error.
|
||||
//
|
||||
// origErr is the error object which will be nested under the new error to be returned.
|
||||
func newBaseError(code, message string, origErr error) *baseError {
|
||||
return &baseError{
|
||||
code: code,
|
||||
message: message,
|
||||
origErr: origErr,
|
||||
}
|
||||
}
|
||||
|
||||
// Error returns the string representation of the error.
|
||||
//
|
||||
// See ErrorWithExtra for formatting.
|
||||
//
|
||||
// Satisfies the error interface.
|
||||
func (b baseError) Error() string {
|
||||
return SprintError(b.code, b.message, "", b.origErr)
|
||||
}
|
||||
|
||||
// String returns the string representation of the error.
|
||||
// Alias for Error to satisfy the stringer interface.
|
||||
func (b baseError) String() string {
|
||||
return b.Error()
|
||||
}
|
||||
|
||||
// Code returns the short phrase depicting the classification of the error.
|
||||
func (b baseError) Code() string {
|
||||
return b.code
|
||||
}
|
||||
|
||||
// Message returns the error details message.
|
||||
func (b baseError) Message() string {
|
||||
return b.message
|
||||
}
|
||||
|
||||
// OrigErr returns the original error if one was set. Nil is returned if no error
|
||||
// was set.
|
||||
func (b baseError) OrigErr() error {
|
||||
return b.origErr
|
||||
}
|
||||
|
||||
// So that the Error interface type can be included as an anonymous field
|
||||
// in the requestError struct and not conflict with the error.Error() method.
|
||||
type awsError Error
|
||||
|
||||
// A requestError wraps a request or service error.
|
||||
//
|
||||
// Composed of baseError for code, message, and original error.
|
||||
type requestError struct {
|
||||
awsError
|
||||
statusCode int
|
||||
requestID string
|
||||
}
|
||||
|
||||
// newRequestError returns a wrapped error with additional information for request
|
||||
// status code, and service requestID.
|
||||
//
|
||||
// Should be used to wrap all request which involve service requests. Even if
|
||||
// the request failed without a service response, but had an HTTP status code
|
||||
// that may be meaningful.
|
||||
//
|
||||
// Also wraps original errors via the baseError.
|
||||
func newRequestError(err Error, statusCode int, requestID string) *requestError {
|
||||
return &requestError{
|
||||
awsError: err,
|
||||
statusCode: statusCode,
|
||||
requestID: requestID,
|
||||
}
|
||||
}
|
||||
|
||||
// Error returns the string representation of the error.
|
||||
// Satisfies the error interface.
|
||||
func (r requestError) Error() string {
|
||||
extra := fmt.Sprintf("status code: %d, request id: %s",
|
||||
r.statusCode, r.requestID)
|
||||
return SprintError(r.Code(), r.Message(), extra, r.OrigErr())
|
||||
}
|
||||
|
||||
// String returns the string representation of the error.
|
||||
// Alias for Error to satisfy the stringer interface.
|
||||
func (r requestError) String() string {
|
||||
return r.Error()
|
||||
}
|
||||
|
||||
// StatusCode returns the wrapped status code for the error
|
||||
func (r requestError) StatusCode() int {
|
||||
return r.statusCode
|
||||
}
|
||||
|
||||
// RequestID returns the wrapped requestID
|
||||
func (r requestError) RequestID() string {
|
||||
return r.requestID
|
||||
}
|
||||
100
Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/awsutil/copy.go
generated
vendored
Normal file
100
Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/awsutil/copy.go
generated
vendored
Normal file
@@ -0,0 +1,100 @@
|
||||
package awsutil
|
||||
|
||||
import (
|
||||
"io"
|
||||
"reflect"
|
||||
)
|
||||
|
||||
// Copy deeply copies a src structure to dst. Useful for copying request and
|
||||
// response structures.
|
||||
//
|
||||
// Can copy between structs of different type, but will only copy fields which
|
||||
// are assignable, and exist in both structs. Fields which are not assignable,
|
||||
// or do not exist in both structs are ignored.
|
||||
func Copy(dst, src interface{}) {
|
||||
dstval := reflect.ValueOf(dst)
|
||||
if !dstval.IsValid() {
|
||||
panic("Copy dst cannot be nil")
|
||||
}
|
||||
|
||||
rcopy(dstval, reflect.ValueOf(src), true)
|
||||
}
|
||||
|
||||
// CopyOf returns a copy of src while also allocating the memory for dst.
|
||||
// src must be a pointer type or this operation will fail.
|
||||
func CopyOf(src interface{}) (dst interface{}) {
|
||||
dsti := reflect.New(reflect.TypeOf(src).Elem())
|
||||
dst = dsti.Interface()
|
||||
rcopy(dsti, reflect.ValueOf(src), true)
|
||||
return
|
||||
}
|
||||
|
||||
// rcopy performs a recursive copy of values from the source to destination.
|
||||
//
|
||||
// root is used to skip certain aspects of the copy which are not valid
|
||||
// for the root node of a object.
|
||||
func rcopy(dst, src reflect.Value, root bool) {
|
||||
if !src.IsValid() {
|
||||
return
|
||||
}
|
||||
|
||||
switch src.Kind() {
|
||||
case reflect.Ptr:
|
||||
if _, ok := src.Interface().(io.Reader); ok {
|
||||
if dst.Kind() == reflect.Ptr && dst.Elem().CanSet() {
|
||||
dst.Elem().Set(src)
|
||||
} else if dst.CanSet() {
|
||||
dst.Set(src)
|
||||
}
|
||||
} else {
|
||||
e := src.Type().Elem()
|
||||
if dst.CanSet() && !src.IsNil() {
|
||||
dst.Set(reflect.New(e))
|
||||
}
|
||||
if src.Elem().IsValid() {
|
||||
// Keep the current root state since the depth hasn't changed
|
||||
rcopy(dst.Elem(), src.Elem(), root)
|
||||
}
|
||||
}
|
||||
case reflect.Struct:
|
||||
t := dst.Type()
|
||||
for i := 0; i < t.NumField(); i++ {
|
||||
name := t.Field(i).Name
|
||||
srcVal := src.FieldByName(name)
|
||||
dstVal := dst.FieldByName(name)
|
||||
if srcVal.IsValid() && dstVal.CanSet() {
|
||||
rcopy(dstVal, srcVal, false)
|
||||
}
|
||||
}
|
||||
case reflect.Slice:
|
||||
if src.IsNil() {
|
||||
break
|
||||
}
|
||||
|
||||
s := reflect.MakeSlice(src.Type(), src.Len(), src.Cap())
|
||||
dst.Set(s)
|
||||
for i := 0; i < src.Len(); i++ {
|
||||
rcopy(dst.Index(i), src.Index(i), false)
|
||||
}
|
||||
case reflect.Map:
|
||||
if src.IsNil() {
|
||||
break
|
||||
}
|
||||
|
||||
s := reflect.MakeMap(src.Type())
|
||||
dst.Set(s)
|
||||
for _, k := range src.MapKeys() {
|
||||
v := src.MapIndex(k)
|
||||
v2 := reflect.New(v.Type()).Elem()
|
||||
rcopy(v2, v, false)
|
||||
dst.SetMapIndex(k, v2)
|
||||
}
|
||||
default:
|
||||
// Assign the value if possible. If its not assignable, the value would
|
||||
// need to be converted and the impact of that may be unexpected, or is
|
||||
// not compatible with the dst type.
|
||||
if src.Type().AssignableTo(dst.Type()) {
|
||||
dst.Set(src)
|
||||
}
|
||||
}
|
||||
}
|
||||
233
Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/awsutil/copy_test.go
generated
vendored
Normal file
233
Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/awsutil/copy_test.go
generated
vendored
Normal file
@@ -0,0 +1,233 @@
|
||||
package awsutil_test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"testing"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws/awsutil"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func ExampleCopy() {
|
||||
type Foo struct {
|
||||
A int
|
||||
B []*string
|
||||
}
|
||||
|
||||
// Create the initial value
|
||||
str1 := "hello"
|
||||
str2 := "bye bye"
|
||||
f1 := &Foo{A: 1, B: []*string{&str1, &str2}}
|
||||
|
||||
// Do the copy
|
||||
var f2 Foo
|
||||
awsutil.Copy(&f2, f1)
|
||||
|
||||
// Print the result
|
||||
fmt.Println(awsutil.Prettify(f2))
|
||||
|
||||
// Output:
|
||||
// {
|
||||
// A: 1,
|
||||
// B: ["hello","bye bye"]
|
||||
// }
|
||||
}
|
||||
|
||||
func TestCopy(t *testing.T) {
|
||||
type Foo struct {
|
||||
A int
|
||||
B []*string
|
||||
C map[string]*int
|
||||
}
|
||||
|
||||
// Create the initial value
|
||||
str1 := "hello"
|
||||
str2 := "bye bye"
|
||||
int1 := 1
|
||||
int2 := 2
|
||||
f1 := &Foo{
|
||||
A: 1,
|
||||
B: []*string{&str1, &str2},
|
||||
C: map[string]*int{
|
||||
"A": &int1,
|
||||
"B": &int2,
|
||||
},
|
||||
}
|
||||
|
||||
// Do the copy
|
||||
var f2 Foo
|
||||
awsutil.Copy(&f2, f1)
|
||||
|
||||
// Values are equal
|
||||
assert.Equal(t, f2.A, f1.A)
|
||||
assert.Equal(t, f2.B, f1.B)
|
||||
assert.Equal(t, f2.C, f1.C)
|
||||
|
||||
// But pointers are not!
|
||||
str3 := "nothello"
|
||||
int3 := 57
|
||||
f2.A = 100
|
||||
f2.B[0] = &str3
|
||||
f2.C["B"] = &int3
|
||||
assert.NotEqual(t, f2.A, f1.A)
|
||||
assert.NotEqual(t, f2.B, f1.B)
|
||||
assert.NotEqual(t, f2.C, f1.C)
|
||||
}
|
||||
|
||||
func TestCopyNestedWithUnexported(t *testing.T) {
|
||||
type Bar struct {
|
||||
a int
|
||||
B int
|
||||
}
|
||||
type Foo struct {
|
||||
A string
|
||||
B Bar
|
||||
}
|
||||
|
||||
f1 := &Foo{A: "string", B: Bar{a: 1, B: 2}}
|
||||
|
||||
var f2 Foo
|
||||
awsutil.Copy(&f2, f1)
|
||||
|
||||
// Values match
|
||||
assert.Equal(t, f2.A, f1.A)
|
||||
assert.NotEqual(t, f2.B, f1.B)
|
||||
assert.NotEqual(t, f2.B.a, f1.B.a)
|
||||
assert.Equal(t, f2.B.B, f2.B.B)
|
||||
}
|
||||
|
||||
func TestCopyIgnoreNilMembers(t *testing.T) {
|
||||
type Foo struct {
|
||||
A *string
|
||||
B []string
|
||||
C map[string]string
|
||||
}
|
||||
|
||||
f := &Foo{}
|
||||
assert.Nil(t, f.A)
|
||||
assert.Nil(t, f.B)
|
||||
assert.Nil(t, f.C)
|
||||
|
||||
var f2 Foo
|
||||
awsutil.Copy(&f2, f)
|
||||
assert.Nil(t, f2.A)
|
||||
assert.Nil(t, f2.B)
|
||||
assert.Nil(t, f2.C)
|
||||
|
||||
fcopy := awsutil.CopyOf(f)
|
||||
f3 := fcopy.(*Foo)
|
||||
assert.Nil(t, f3.A)
|
||||
assert.Nil(t, f3.B)
|
||||
assert.Nil(t, f3.C)
|
||||
}
|
||||
|
||||
func TestCopyPrimitive(t *testing.T) {
|
||||
str := "hello"
|
||||
var s string
|
||||
awsutil.Copy(&s, &str)
|
||||
assert.Equal(t, "hello", s)
|
||||
}
|
||||
|
||||
func TestCopyNil(t *testing.T) {
|
||||
var s string
|
||||
awsutil.Copy(&s, nil)
|
||||
assert.Equal(t, "", s)
|
||||
}
|
||||
|
||||
func TestCopyReader(t *testing.T) {
|
||||
var buf io.Reader = bytes.NewReader([]byte("hello world"))
|
||||
var r io.Reader
|
||||
awsutil.Copy(&r, buf)
|
||||
b, err := ioutil.ReadAll(r)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, []byte("hello world"), b)
|
||||
|
||||
// empty bytes because this is not a deep copy
|
||||
b, err = ioutil.ReadAll(buf)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, []byte(""), b)
|
||||
}
|
||||
|
||||
func TestCopyDifferentStructs(t *testing.T) {
|
||||
type SrcFoo struct {
|
||||
A int
|
||||
B []*string
|
||||
C map[string]*int
|
||||
SrcUnique string
|
||||
SameNameDiffType int
|
||||
unexportedPtr *int
|
||||
ExportedPtr *int
|
||||
}
|
||||
type DstFoo struct {
|
||||
A int
|
||||
B []*string
|
||||
C map[string]*int
|
||||
DstUnique int
|
||||
SameNameDiffType string
|
||||
unexportedPtr *int
|
||||
ExportedPtr *int
|
||||
}
|
||||
|
||||
// Create the initial value
|
||||
str1 := "hello"
|
||||
str2 := "bye bye"
|
||||
int1 := 1
|
||||
int2 := 2
|
||||
f1 := &SrcFoo{
|
||||
A: 1,
|
||||
B: []*string{&str1, &str2},
|
||||
C: map[string]*int{
|
||||
"A": &int1,
|
||||
"B": &int2,
|
||||
},
|
||||
SrcUnique: "unique",
|
||||
SameNameDiffType: 1,
|
||||
unexportedPtr: &int1,
|
||||
ExportedPtr: &int2,
|
||||
}
|
||||
|
||||
// Do the copy
|
||||
var f2 DstFoo
|
||||
awsutil.Copy(&f2, f1)
|
||||
|
||||
// Values are equal
|
||||
assert.Equal(t, f2.A, f1.A)
|
||||
assert.Equal(t, f2.B, f1.B)
|
||||
assert.Equal(t, f2.C, f1.C)
|
||||
assert.Equal(t, "unique", f1.SrcUnique)
|
||||
assert.Equal(t, 1, f1.SameNameDiffType)
|
||||
assert.Equal(t, 0, f2.DstUnique)
|
||||
assert.Equal(t, "", f2.SameNameDiffType)
|
||||
assert.Equal(t, int1, *f1.unexportedPtr)
|
||||
assert.Nil(t, f2.unexportedPtr)
|
||||
assert.Equal(t, int2, *f1.ExportedPtr)
|
||||
assert.Equal(t, int2, *f2.ExportedPtr)
|
||||
}
|
||||
|
||||
func ExampleCopyOf() {
|
||||
type Foo struct {
|
||||
A int
|
||||
B []*string
|
||||
}
|
||||
|
||||
// Create the initial value
|
||||
str1 := "hello"
|
||||
str2 := "bye bye"
|
||||
f1 := &Foo{A: 1, B: []*string{&str1, &str2}}
|
||||
|
||||
// Do the copy
|
||||
v := awsutil.CopyOf(f1)
|
||||
var f2 *Foo = v.(*Foo)
|
||||
|
||||
// Print the result
|
||||
fmt.Println(awsutil.Prettify(f2))
|
||||
|
||||
// Output:
|
||||
// {
|
||||
// A: 1,
|
||||
// B: ["hello","bye bye"]
|
||||
// }
|
||||
}
|
||||
29
Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/awsutil/equal_test.go
generated
vendored
Normal file
29
Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/awsutil/equal_test.go
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
package awsutil_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/aws/awsutil"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestDeepEqual(t *testing.T) {
|
||||
cases := []struct {
|
||||
a, b interface{}
|
||||
equal bool
|
||||
}{
|
||||
{"a", "a", true},
|
||||
{"a", "b", false},
|
||||
{"a", aws.String(""), false},
|
||||
{"a", nil, false},
|
||||
{"a", aws.String("a"), true},
|
||||
{(*bool)(nil), (*bool)(nil), true},
|
||||
{(*bool)(nil), (*string)(nil), false},
|
||||
{nil, nil, true},
|
||||
}
|
||||
|
||||
for i, c := range cases {
|
||||
assert.Equal(t, c.equal, awsutil.DeepEqual(c.a, c.b), "%d, a:%v b:%v, %t", i, c.a, c.b, c.equal)
|
||||
}
|
||||
}
|
||||
@@ -106,8 +106,8 @@ func rValuesAtPath(v interface{}, path string, createPath, caseSensitive, nilTer
|
||||
|
||||
if indexStar || index != nil {
|
||||
nextvals = []reflect.Value{}
|
||||
for _, valItem := range values {
|
||||
value := reflect.Indirect(valItem)
|
||||
for _, value := range values {
|
||||
value := reflect.Indirect(value)
|
||||
if value.Kind() != reflect.Slice {
|
||||
continue
|
||||
}
|
||||
142
Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/awsutil/path_value_test.go
generated
vendored
Normal file
142
Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/awsutil/path_value_test.go
generated
vendored
Normal file
@@ -0,0 +1,142 @@
|
||||
package awsutil_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws/awsutil"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
type Struct struct {
|
||||
A []Struct
|
||||
z []Struct
|
||||
B *Struct
|
||||
D *Struct
|
||||
C string
|
||||
E map[string]string
|
||||
}
|
||||
|
||||
var data = Struct{
|
||||
A: []Struct{{C: "value1"}, {C: "value2"}, {C: "value3"}},
|
||||
z: []Struct{{C: "value1"}, {C: "value2"}, {C: "value3"}},
|
||||
B: &Struct{B: &Struct{C: "terminal"}, D: &Struct{C: "terminal2"}},
|
||||
C: "initial",
|
||||
}
|
||||
var data2 = Struct{A: []Struct{
|
||||
{A: []Struct{{C: "1"}, {C: "1"}, {C: "1"}, {C: "1"}, {C: "1"}}},
|
||||
{A: []Struct{{C: "2"}, {C: "2"}, {C: "2"}, {C: "2"}, {C: "2"}}},
|
||||
}}
|
||||
|
||||
func TestValueAtPathSuccess(t *testing.T) {
|
||||
var testCases = []struct {
|
||||
expect []interface{}
|
||||
data interface{}
|
||||
path string
|
||||
}{
|
||||
{[]interface{}{"initial"}, data, "C"},
|
||||
{[]interface{}{"value1"}, data, "A[0].C"},
|
||||
{[]interface{}{"value2"}, data, "A[1].C"},
|
||||
{[]interface{}{"value3"}, data, "A[2].C"},
|
||||
{[]interface{}{"value3"}, data, "a[2].c"},
|
||||
{[]interface{}{"value3"}, data, "A[-1].C"},
|
||||
{[]interface{}{"value1", "value2", "value3"}, data, "A[].C"},
|
||||
{[]interface{}{"terminal"}, data, "B . B . C"},
|
||||
{[]interface{}{"initial"}, data, "A.D.X || C"},
|
||||
{[]interface{}{"initial"}, data, "A[0].B || C"},
|
||||
{[]interface{}{
|
||||
Struct{A: []Struct{{C: "1"}, {C: "1"}, {C: "1"}, {C: "1"}, {C: "1"}}},
|
||||
Struct{A: []Struct{{C: "2"}, {C: "2"}, {C: "2"}, {C: "2"}, {C: "2"}}},
|
||||
}, data2, "A"},
|
||||
}
|
||||
for i, c := range testCases {
|
||||
v, err := awsutil.ValuesAtPath(c.data, c.path)
|
||||
assert.NoError(t, err, "case %d, expected no error, %s", i, c.path)
|
||||
assert.Equal(t, c.expect, v, "case %d, %s", i, c.path)
|
||||
}
|
||||
}
|
||||
|
||||
func TestValueAtPathFailure(t *testing.T) {
|
||||
var testCases = []struct {
|
||||
expect []interface{}
|
||||
errContains string
|
||||
data interface{}
|
||||
path string
|
||||
}{
|
||||
{nil, "", data, "C.x"},
|
||||
{nil, "SyntaxError: Invalid token: tDot", data, ".x"},
|
||||
{nil, "", data, "X.Y.Z"},
|
||||
{nil, "", data, "A[100].C"},
|
||||
{nil, "", data, "A[3].C"},
|
||||
{nil, "", data, "B.B.C.Z"},
|
||||
{nil, "", data, "z[-1].C"},
|
||||
{nil, "", nil, "A.B.C"},
|
||||
{[]interface{}{}, "", Struct{}, "A"},
|
||||
{nil, "", data, "A[0].B.C"},
|
||||
{nil, "", data, "D"},
|
||||
}
|
||||
|
||||
for i, c := range testCases {
|
||||
v, err := awsutil.ValuesAtPath(c.data, c.path)
|
||||
if c.errContains != "" {
|
||||
assert.Contains(t, err.Error(), c.errContains, "case %d, expected error, %s", i, c.path)
|
||||
continue
|
||||
} else {
|
||||
assert.NoError(t, err, "case %d, expected no error, %s", i, c.path)
|
||||
}
|
||||
assert.Equal(t, c.expect, v, "case %d, %s", i, c.path)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSetValueAtPathSuccess(t *testing.T) {
|
||||
var s Struct
|
||||
awsutil.SetValueAtPath(&s, "C", "test1")
|
||||
awsutil.SetValueAtPath(&s, "B.B.C", "test2")
|
||||
awsutil.SetValueAtPath(&s, "B.D.C", "test3")
|
||||
assert.Equal(t, "test1", s.C)
|
||||
assert.Equal(t, "test2", s.B.B.C)
|
||||
assert.Equal(t, "test3", s.B.D.C)
|
||||
|
||||
awsutil.SetValueAtPath(&s, "B.*.C", "test0")
|
||||
assert.Equal(t, "test0", s.B.B.C)
|
||||
assert.Equal(t, "test0", s.B.D.C)
|
||||
|
||||
var s2 Struct
|
||||
awsutil.SetValueAtPath(&s2, "b.b.c", "test0")
|
||||
assert.Equal(t, "test0", s2.B.B.C)
|
||||
awsutil.SetValueAtPath(&s2, "A", []Struct{{}})
|
||||
assert.Equal(t, []Struct{{}}, s2.A)
|
||||
|
||||
str := "foo"
|
||||
|
||||
s3 := Struct{}
|
||||
awsutil.SetValueAtPath(&s3, "b.b.c", str)
|
||||
assert.Equal(t, "foo", s3.B.B.C)
|
||||
|
||||
s3 = Struct{B: &Struct{B: &Struct{C: str}}}
|
||||
awsutil.SetValueAtPath(&s3, "b.b.c", nil)
|
||||
assert.Equal(t, "", s3.B.B.C)
|
||||
|
||||
s3 = Struct{}
|
||||
awsutil.SetValueAtPath(&s3, "b.b.c", nil)
|
||||
assert.Equal(t, "", s3.B.B.C)
|
||||
|
||||
s3 = Struct{}
|
||||
awsutil.SetValueAtPath(&s3, "b.b.c", &str)
|
||||
assert.Equal(t, "foo", s3.B.B.C)
|
||||
|
||||
var s4 struct{ Name *string }
|
||||
awsutil.SetValueAtPath(&s4, "Name", str)
|
||||
assert.Equal(t, str, *s4.Name)
|
||||
|
||||
s4 = struct{ Name *string }{}
|
||||
awsutil.SetValueAtPath(&s4, "Name", nil)
|
||||
assert.Equal(t, (*string)(nil), s4.Name)
|
||||
|
||||
s4 = struct{ Name *string }{Name: &str}
|
||||
awsutil.SetValueAtPath(&s4, "Name", nil)
|
||||
assert.Equal(t, (*string)(nil), s4.Name)
|
||||
|
||||
s4 = struct{ Name *string }{}
|
||||
awsutil.SetValueAtPath(&s4, "Name", &str)
|
||||
assert.Equal(t, str, *s4.Name)
|
||||
}
|
||||
@@ -61,12 +61,6 @@ func prettify(v reflect.Value, indent int, buf *bytes.Buffer) {
|
||||
|
||||
buf.WriteString("\n" + strings.Repeat(" ", indent) + "}")
|
||||
case reflect.Slice:
|
||||
strtype := v.Type().String()
|
||||
if strtype == "[]uint8" {
|
||||
fmt.Fprintf(buf, "<binary> len %d", v.Len())
|
||||
break
|
||||
}
|
||||
|
||||
nl, id, id2 := "", "", ""
|
||||
if v.Len() > 3 {
|
||||
nl, id, id2 = "\n", strings.Repeat(" ", indent), strings.Repeat(" ", indent+2)
|
||||
@@ -97,10 +91,6 @@ func prettify(v reflect.Value, indent int, buf *bytes.Buffer) {
|
||||
|
||||
buf.WriteString("\n" + strings.Repeat(" ", indent) + "}")
|
||||
default:
|
||||
if !v.IsValid() {
|
||||
fmt.Fprint(buf, "<invalid value>")
|
||||
return
|
||||
}
|
||||
format := "%v"
|
||||
switch v.Interface().(type) {
|
||||
case string:
|
||||
120
Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/client/client.go
generated
vendored
Normal file
120
Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/client/client.go
generated
vendored
Normal file
@@ -0,0 +1,120 @@
|
||||
package client
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http/httputil"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/aws/client/metadata"
|
||||
"github.com/aws/aws-sdk-go/aws/request"
|
||||
)
|
||||
|
||||
// A Config provides configuration to a service client instance.
|
||||
type Config struct {
|
||||
Config *aws.Config
|
||||
Handlers request.Handlers
|
||||
Endpoint, SigningRegion string
|
||||
}
|
||||
|
||||
// ConfigProvider provides a generic way for a service client to receive
|
||||
// the ClientConfig without circular dependencies.
|
||||
type ConfigProvider interface {
|
||||
ClientConfig(serviceName string, cfgs ...*aws.Config) Config
|
||||
}
|
||||
|
||||
// A Client implements the base client request and response handling
|
||||
// used by all service clients.
|
||||
type Client struct {
|
||||
request.Retryer
|
||||
metadata.ClientInfo
|
||||
|
||||
Config aws.Config
|
||||
Handlers request.Handlers
|
||||
}
|
||||
|
||||
// New will return a pointer to a new initialized service client.
|
||||
func New(cfg aws.Config, info metadata.ClientInfo, handlers request.Handlers, options ...func(*Client)) *Client {
|
||||
svc := &Client{
|
||||
Config: cfg,
|
||||
ClientInfo: info,
|
||||
Handlers: handlers,
|
||||
}
|
||||
|
||||
switch retryer, ok := cfg.Retryer.(request.Retryer); {
|
||||
case ok:
|
||||
svc.Retryer = retryer
|
||||
case cfg.Retryer != nil && cfg.Logger != nil:
|
||||
s := fmt.Sprintf("WARNING: %T does not implement request.Retryer; using DefaultRetryer instead", cfg.Retryer)
|
||||
cfg.Logger.Log(s)
|
||||
fallthrough
|
||||
default:
|
||||
maxRetries := aws.IntValue(cfg.MaxRetries)
|
||||
if cfg.MaxRetries == nil || maxRetries == aws.UseServiceDefaultRetries {
|
||||
maxRetries = 3
|
||||
}
|
||||
svc.Retryer = DefaultRetryer{NumMaxRetries: maxRetries}
|
||||
}
|
||||
|
||||
svc.AddDebugHandlers()
|
||||
|
||||
for _, option := range options {
|
||||
option(svc)
|
||||
}
|
||||
|
||||
return svc
|
||||
}
|
||||
|
||||
// NewRequest returns a new Request pointer for the service API
|
||||
// operation and parameters.
|
||||
func (c *Client) NewRequest(operation *request.Operation, params interface{}, data interface{}) *request.Request {
|
||||
return request.New(c.Config, c.ClientInfo, c.Handlers, c.Retryer, operation, params, data)
|
||||
}
|
||||
|
||||
// AddDebugHandlers injects debug logging handlers into the service to log request
|
||||
// debug information.
|
||||
func (c *Client) AddDebugHandlers() {
|
||||
if !c.Config.LogLevel.AtLeast(aws.LogDebug) {
|
||||
return
|
||||
}
|
||||
|
||||
c.Handlers.Send.PushFront(logRequest)
|
||||
c.Handlers.Send.PushBack(logResponse)
|
||||
}
|
||||
|
||||
const logReqMsg = `DEBUG: Request %s/%s Details:
|
||||
---[ REQUEST POST-SIGN ]-----------------------------
|
||||
%s
|
||||
-----------------------------------------------------`
|
||||
|
||||
func logRequest(r *request.Request) {
|
||||
logBody := r.Config.LogLevel.Matches(aws.LogDebugWithHTTPBody)
|
||||
dumpedBody, _ := httputil.DumpRequestOut(r.HTTPRequest, logBody)
|
||||
|
||||
if logBody {
|
||||
// Reset the request body because dumpRequest will re-wrap the r.HTTPRequest's
|
||||
// Body as a NoOpCloser and will not be reset after read by the HTTP
|
||||
// client reader.
|
||||
r.Body.Seek(r.BodyStart, 0)
|
||||
r.HTTPRequest.Body = ioutil.NopCloser(r.Body)
|
||||
}
|
||||
|
||||
r.Config.Logger.Log(fmt.Sprintf(logReqMsg, r.ClientInfo.ServiceName, r.Operation.Name, string(dumpedBody)))
|
||||
}
|
||||
|
||||
const logRespMsg = `DEBUG: Response %s/%s Details:
|
||||
---[ RESPONSE ]--------------------------------------
|
||||
%s
|
||||
-----------------------------------------------------`
|
||||
|
||||
func logResponse(r *request.Request) {
|
||||
var msg = "no reponse data"
|
||||
if r.HTTPResponse != nil {
|
||||
logBody := r.Config.LogLevel.Matches(aws.LogDebugWithHTTPBody)
|
||||
dumpedBody, _ := httputil.DumpResponse(r.HTTPResponse, logBody)
|
||||
msg = string(dumpedBody)
|
||||
} else if r.Error != nil {
|
||||
msg = r.Error.Error()
|
||||
}
|
||||
r.Config.Logger.Log(fmt.Sprintf(logRespMsg, r.ClientInfo.ServiceName, r.Operation.Name, msg))
|
||||
}
|
||||
45
Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/client/default_retryer.go
generated
vendored
Normal file
45
Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/client/default_retryer.go
generated
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
package client
|
||||
|
||||
import (
|
||||
"math"
|
||||
"math/rand"
|
||||
"time"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws/request"
|
||||
)
|
||||
|
||||
// DefaultRetryer implements basic retry logic using exponential backoff for
|
||||
// most services. If you want to implement custom retry logic, implement the
|
||||
// request.Retryer interface or create a structure type that composes this
|
||||
// struct and override the specific methods. For example, to override only
|
||||
// the MaxRetries method:
|
||||
//
|
||||
// type retryer struct {
|
||||
// service.DefaultRetryer
|
||||
// }
|
||||
//
|
||||
// // This implementation always has 100 max retries
|
||||
// func (d retryer) MaxRetries() uint { return 100 }
|
||||
type DefaultRetryer struct {
|
||||
NumMaxRetries int
|
||||
}
|
||||
|
||||
// MaxRetries returns the number of maximum returns the service will use to make
|
||||
// an individual API request.
|
||||
func (d DefaultRetryer) MaxRetries() int {
|
||||
return d.NumMaxRetries
|
||||
}
|
||||
|
||||
// RetryRules returns the delay duration before retrying this request again
|
||||
func (d DefaultRetryer) RetryRules(r *request.Request) time.Duration {
|
||||
delay := int(math.Pow(2, float64(r.RetryCount))) * (rand.Intn(30) + 30)
|
||||
return time.Duration(delay) * time.Millisecond
|
||||
}
|
||||
|
||||
// ShouldRetry returns if the request should be retried.
|
||||
func (d DefaultRetryer) ShouldRetry(r *request.Request) bool {
|
||||
if r.HTTPResponse.StatusCode >= 500 {
|
||||
return true
|
||||
}
|
||||
return r.IsErrorRetryable()
|
||||
}
|
||||
270
Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/config.go
generated
vendored
Normal file
270
Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/config.go
generated
vendored
Normal file
@@ -0,0 +1,270 @@
|
||||
package aws
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws/credentials"
|
||||
)
|
||||
|
||||
// UseServiceDefaultRetries instructs the config to use the service's own default
|
||||
// number of retries. This will be the default action if Config.MaxRetries
|
||||
// is nil also.
|
||||
const UseServiceDefaultRetries = -1
|
||||
|
||||
// RequestRetryer is an alias for a type that implements the request.Retryer interface.
|
||||
type RequestRetryer interface{}
|
||||
|
||||
// A Config provides service configuration for service clients. By default,
|
||||
// all clients will use the {defaults.DefaultConfig} structure.
|
||||
type Config struct {
|
||||
// The credentials object to use when signing requests. Defaults to
|
||||
// a chain of credential providers to search for credentials in environment
|
||||
// variables, shared credential file, and EC2 Instance Roles.
|
||||
Credentials *credentials.Credentials
|
||||
|
||||
// An optional endpoint URL (hostname only or fully qualified URI)
|
||||
// that overrides the default generated endpoint for a client. Set this
|
||||
// to `""` to use the default generated endpoint.
|
||||
//
|
||||
// @note You must still provide a `Region` value when specifying an
|
||||
// endpoint for a client.
|
||||
Endpoint *string
|
||||
|
||||
// The region to send requests to. This parameter is required and must
|
||||
// be configured globally or on a per-client basis unless otherwise
|
||||
// noted. A full list of regions is found in the "Regions and Endpoints"
|
||||
// document.
|
||||
//
|
||||
// @see http://docs.aws.amazon.com/general/latest/gr/rande.html
|
||||
// AWS Regions and Endpoints
|
||||
Region *string
|
||||
|
||||
// Set this to `true` to disable SSL when sending requests. Defaults
|
||||
// to `false`.
|
||||
DisableSSL *bool
|
||||
|
||||
// The HTTP client to use when sending requests. Defaults to
|
||||
// `http.DefaultClient`.
|
||||
HTTPClient *http.Client
|
||||
|
||||
// An integer value representing the logging level. The default log level
|
||||
// is zero (LogOff), which represents no logging. To enable logging set
|
||||
// to a LogLevel Value.
|
||||
LogLevel *LogLevelType
|
||||
|
||||
// The logger writer interface to write logging messages to. Defaults to
|
||||
// standard out.
|
||||
Logger Logger
|
||||
|
||||
// The maximum number of times that a request will be retried for failures.
|
||||
// Defaults to -1, which defers the max retry setting to the service specific
|
||||
// configuration.
|
||||
MaxRetries *int
|
||||
|
||||
// Retryer guides how HTTP requests should be retried in case of recoverable failures.
|
||||
//
|
||||
// When nil or the value does not implement the request.Retryer interface,
|
||||
// the request.DefaultRetryer will be used.
|
||||
//
|
||||
// When both Retryer and MaxRetries are non-nil, the former is used and
|
||||
// the latter ignored.
|
||||
//
|
||||
// To set the Retryer field in a type-safe manner and with chaining, use
|
||||
// the request.WithRetryer helper function:
|
||||
//
|
||||
// cfg := request.WithRetryer(aws.NewConfig(), myRetryer)
|
||||
//
|
||||
Retryer RequestRetryer
|
||||
|
||||
// Disables semantic parameter validation, which validates input for missing
|
||||
// required fields and/or other semantic request input errors.
|
||||
DisableParamValidation *bool
|
||||
|
||||
// Disables the computation of request and response checksums, e.g.,
|
||||
// CRC32 checksums in Amazon DynamoDB.
|
||||
DisableComputeChecksums *bool
|
||||
|
||||
// Set this to `true` to force the request to use path-style addressing,
|
||||
// i.e., `http://s3.amazonaws.com/BUCKET/KEY`. By default, the S3 client will
|
||||
// use virtual hosted bucket addressing when possible
|
||||
// (`http://BUCKET.s3.amazonaws.com/KEY`).
|
||||
//
|
||||
// @note This configuration option is specific to the Amazon S3 service.
|
||||
// @see http://docs.aws.amazon.com/AmazonS3/latest/dev/VirtualHosting.html
|
||||
// Amazon S3: Virtual Hosting of Buckets
|
||||
S3ForcePathStyle *bool
|
||||
|
||||
SleepDelay func(time.Duration)
|
||||
}
|
||||
|
||||
// NewConfig returns a new Config pointer that can be chained with builder methods to
|
||||
// set multiple configuration values inline without using pointers.
|
||||
//
|
||||
// svc := s3.New(aws.NewConfig().WithRegion("us-west-2").WithMaxRetries(10))
|
||||
//
|
||||
func NewConfig() *Config {
|
||||
return &Config{}
|
||||
}
|
||||
|
||||
// WithCredentials sets a config Credentials value returning a Config pointer
|
||||
// for chaining.
|
||||
func (c *Config) WithCredentials(creds *credentials.Credentials) *Config {
|
||||
c.Credentials = creds
|
||||
return c
|
||||
}
|
||||
|
||||
// WithEndpoint sets a config Endpoint value returning a Config pointer for
|
||||
// chaining.
|
||||
func (c *Config) WithEndpoint(endpoint string) *Config {
|
||||
c.Endpoint = &endpoint
|
||||
return c
|
||||
}
|
||||
|
||||
// WithRegion sets a config Region value returning a Config pointer for
|
||||
// chaining.
|
||||
func (c *Config) WithRegion(region string) *Config {
|
||||
c.Region = ®ion
|
||||
return c
|
||||
}
|
||||
|
||||
// WithDisableSSL sets a config DisableSSL value returning a Config pointer
|
||||
// for chaining.
|
||||
func (c *Config) WithDisableSSL(disable bool) *Config {
|
||||
c.DisableSSL = &disable
|
||||
return c
|
||||
}
|
||||
|
||||
// WithHTTPClient sets a config HTTPClient value returning a Config pointer
|
||||
// for chaining.
|
||||
func (c *Config) WithHTTPClient(client *http.Client) *Config {
|
||||
c.HTTPClient = client
|
||||
return c
|
||||
}
|
||||
|
||||
// WithMaxRetries sets a config MaxRetries value returning a Config pointer
|
||||
// for chaining.
|
||||
func (c *Config) WithMaxRetries(max int) *Config {
|
||||
c.MaxRetries = &max
|
||||
return c
|
||||
}
|
||||
|
||||
// WithDisableParamValidation sets a config DisableParamValidation value
|
||||
// returning a Config pointer for chaining.
|
||||
func (c *Config) WithDisableParamValidation(disable bool) *Config {
|
||||
c.DisableParamValidation = &disable
|
||||
return c
|
||||
}
|
||||
|
||||
// WithDisableComputeChecksums sets a config DisableComputeChecksums value
|
||||
// returning a Config pointer for chaining.
|
||||
func (c *Config) WithDisableComputeChecksums(disable bool) *Config {
|
||||
c.DisableComputeChecksums = &disable
|
||||
return c
|
||||
}
|
||||
|
||||
// WithLogLevel sets a config LogLevel value returning a Config pointer for
|
||||
// chaining.
|
||||
func (c *Config) WithLogLevel(level LogLevelType) *Config {
|
||||
c.LogLevel = &level
|
||||
return c
|
||||
}
|
||||
|
||||
// WithLogger sets a config Logger value returning a Config pointer for
|
||||
// chaining.
|
||||
func (c *Config) WithLogger(logger Logger) *Config {
|
||||
c.Logger = logger
|
||||
return c
|
||||
}
|
||||
|
||||
// WithS3ForcePathStyle sets a config S3ForcePathStyle value returning a Config
|
||||
// pointer for chaining.
|
||||
func (c *Config) WithS3ForcePathStyle(force bool) *Config {
|
||||
c.S3ForcePathStyle = &force
|
||||
return c
|
||||
}
|
||||
|
||||
// WithSleepDelay overrides the function used to sleep while waiting for the
|
||||
// next retry. Defaults to time.Sleep.
|
||||
func (c *Config) WithSleepDelay(fn func(time.Duration)) *Config {
|
||||
c.SleepDelay = fn
|
||||
return c
|
||||
}
|
||||
|
||||
// MergeIn merges the passed in configs into the existing config object.
|
||||
func (c *Config) MergeIn(cfgs ...*Config) {
|
||||
for _, other := range cfgs {
|
||||
mergeInConfig(c, other)
|
||||
}
|
||||
}
|
||||
|
||||
func mergeInConfig(dst *Config, other *Config) {
|
||||
if other == nil {
|
||||
return
|
||||
}
|
||||
|
||||
if other.Credentials != nil {
|
||||
dst.Credentials = other.Credentials
|
||||
}
|
||||
|
||||
if other.Endpoint != nil {
|
||||
dst.Endpoint = other.Endpoint
|
||||
}
|
||||
|
||||
if other.Region != nil {
|
||||
dst.Region = other.Region
|
||||
}
|
||||
|
||||
if other.DisableSSL != nil {
|
||||
dst.DisableSSL = other.DisableSSL
|
||||
}
|
||||
|
||||
if other.HTTPClient != nil {
|
||||
dst.HTTPClient = other.HTTPClient
|
||||
}
|
||||
|
||||
if other.LogLevel != nil {
|
||||
dst.LogLevel = other.LogLevel
|
||||
}
|
||||
|
||||
if other.Logger != nil {
|
||||
dst.Logger = other.Logger
|
||||
}
|
||||
|
||||
if other.MaxRetries != nil {
|
||||
dst.MaxRetries = other.MaxRetries
|
||||
}
|
||||
|
||||
if other.Retryer != nil {
|
||||
dst.Retryer = other.Retryer
|
||||
}
|
||||
|
||||
if other.DisableParamValidation != nil {
|
||||
dst.DisableParamValidation = other.DisableParamValidation
|
||||
}
|
||||
|
||||
if other.DisableComputeChecksums != nil {
|
||||
dst.DisableComputeChecksums = other.DisableComputeChecksums
|
||||
}
|
||||
|
||||
if other.S3ForcePathStyle != nil {
|
||||
dst.S3ForcePathStyle = other.S3ForcePathStyle
|
||||
}
|
||||
|
||||
if other.SleepDelay != nil {
|
||||
dst.SleepDelay = other.SleepDelay
|
||||
}
|
||||
}
|
||||
|
||||
// Copy will return a shallow copy of the Config object. If any additional
|
||||
// configurations are provided they will be merged into the new config returned.
|
||||
func (c *Config) Copy(cfgs ...*Config) *Config {
|
||||
dst := &Config{}
|
||||
dst.MergeIn(c)
|
||||
|
||||
for _, cfg := range cfgs {
|
||||
dst.MergeIn(cfg)
|
||||
}
|
||||
|
||||
return dst
|
||||
}
|
||||
86
Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/config_test.go
generated
vendored
Normal file
86
Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/config_test.go
generated
vendored
Normal file
@@ -0,0 +1,86 @@
|
||||
package aws
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/aws/aws-sdk-go/aws/credentials"
|
||||
)
|
||||
|
||||
var testCredentials = credentials.NewStaticCredentials("AKID", "SECRET", "SESSION")
|
||||
|
||||
var copyTestConfig = Config{
|
||||
Credentials: testCredentials,
|
||||
Endpoint: String("CopyTestEndpoint"),
|
||||
Region: String("COPY_TEST_AWS_REGION"),
|
||||
DisableSSL: Bool(true),
|
||||
HTTPClient: http.DefaultClient,
|
||||
LogLevel: LogLevel(LogDebug),
|
||||
Logger: NewDefaultLogger(),
|
||||
MaxRetries: Int(3),
|
||||
DisableParamValidation: Bool(true),
|
||||
DisableComputeChecksums: Bool(true),
|
||||
S3ForcePathStyle: Bool(true),
|
||||
}
|
||||
|
||||
func TestCopy(t *testing.T) {
|
||||
want := copyTestConfig
|
||||
got := copyTestConfig.Copy()
|
||||
if !reflect.DeepEqual(*got, want) {
|
||||
t.Errorf("Copy() = %+v", got)
|
||||
t.Errorf(" want %+v", want)
|
||||
}
|
||||
|
||||
got.Region = String("other")
|
||||
if got.Region == want.Region {
|
||||
t.Errorf("Expect setting copy values not not reflect in source")
|
||||
}
|
||||
}
|
||||
|
||||
func TestCopyReturnsNewInstance(t *testing.T) {
|
||||
want := copyTestConfig
|
||||
got := copyTestConfig.Copy()
|
||||
if got == &want {
|
||||
t.Errorf("Copy() = %p; want different instance as source %p", got, &want)
|
||||
}
|
||||
}
|
||||
|
||||
var mergeTestZeroValueConfig = Config{}
|
||||
|
||||
var mergeTestConfig = Config{
|
||||
Credentials: testCredentials,
|
||||
Endpoint: String("MergeTestEndpoint"),
|
||||
Region: String("MERGE_TEST_AWS_REGION"),
|
||||
DisableSSL: Bool(true),
|
||||
HTTPClient: http.DefaultClient,
|
||||
LogLevel: LogLevel(LogDebug),
|
||||
Logger: NewDefaultLogger(),
|
||||
MaxRetries: Int(10),
|
||||
DisableParamValidation: Bool(true),
|
||||
DisableComputeChecksums: Bool(true),
|
||||
S3ForcePathStyle: Bool(true),
|
||||
}
|
||||
|
||||
var mergeTests = []struct {
|
||||
cfg *Config
|
||||
in *Config
|
||||
want *Config
|
||||
}{
|
||||
{&Config{}, nil, &Config{}},
|
||||
{&Config{}, &mergeTestZeroValueConfig, &Config{}},
|
||||
{&Config{}, &mergeTestConfig, &mergeTestConfig},
|
||||
}
|
||||
|
||||
func TestMerge(t *testing.T) {
|
||||
for i, tt := range mergeTests {
|
||||
got := tt.cfg.Copy()
|
||||
got.MergeIn(tt.in)
|
||||
if !reflect.DeepEqual(got, tt.want) {
|
||||
t.Errorf("Config %d %+v", i, tt.cfg)
|
||||
t.Errorf(" Merge(%+v)", tt.in)
|
||||
t.Errorf(" got %+v", got)
|
||||
t.Errorf(" want %+v", tt.want)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2,7 +2,7 @@ package aws
|
||||
|
||||
import "time"
|
||||
|
||||
// String returns a pointer to the string value passed in.
|
||||
// String returns a pointer to of the string value passed in.
|
||||
func String(v string) *string {
|
||||
return &v
|
||||
}
|
||||
@@ -61,7 +61,7 @@ func StringValueMap(src map[string]*string) map[string]string {
|
||||
return dst
|
||||
}
|
||||
|
||||
// Bool returns a pointer to the bool value passed in.
|
||||
// Bool returns a pointer to of the bool value passed in.
|
||||
func Bool(v bool) *bool {
|
||||
return &v
|
||||
}
|
||||
@@ -120,7 +120,7 @@ func BoolValueMap(src map[string]*bool) map[string]bool {
|
||||
return dst
|
||||
}
|
||||
|
||||
// Int returns a pointer to the int value passed in.
|
||||
// Int returns a pointer to of the int value passed in.
|
||||
func Int(v int) *int {
|
||||
return &v
|
||||
}
|
||||
@@ -179,7 +179,7 @@ func IntValueMap(src map[string]*int) map[string]int {
|
||||
return dst
|
||||
}
|
||||
|
||||
// Int64 returns a pointer to the int64 value passed in.
|
||||
// Int64 returns a pointer to of the int64 value passed in.
|
||||
func Int64(v int64) *int64 {
|
||||
return &v
|
||||
}
|
||||
@@ -238,7 +238,7 @@ func Int64ValueMap(src map[string]*int64) map[string]int64 {
|
||||
return dst
|
||||
}
|
||||
|
||||
// Float64 returns a pointer to the float64 value passed in.
|
||||
// Float64 returns a pointer to of the float64 value passed in.
|
||||
func Float64(v float64) *float64 {
|
||||
return &v
|
||||
}
|
||||
@@ -297,7 +297,7 @@ func Float64ValueMap(src map[string]*float64) map[string]float64 {
|
||||
return dst
|
||||
}
|
||||
|
||||
// Time returns a pointer to the time.Time value passed in.
|
||||
// Time returns a pointer to of the time.Time value passed in.
|
||||
func Time(v time.Time) *time.Time {
|
||||
return &v
|
||||
}
|
||||
@@ -311,36 +311,6 @@ func TimeValue(v *time.Time) time.Time {
|
||||
return time.Time{}
|
||||
}
|
||||
|
||||
// SecondsTimeValue converts an int64 pointer to a time.Time value
|
||||
// representing seconds since Epoch or time.Time{} if the pointer is nil.
|
||||
func SecondsTimeValue(v *int64) time.Time {
|
||||
if v != nil {
|
||||
return time.Unix((*v / 1000), 0)
|
||||
}
|
||||
return time.Time{}
|
||||
}
|
||||
|
||||
// MillisecondsTimeValue converts an int64 pointer to a time.Time value
|
||||
// representing milliseconds sinch Epoch or time.Time{} if the pointer is nil.
|
||||
func MillisecondsTimeValue(v *int64) time.Time {
|
||||
if v != nil {
|
||||
return time.Unix(0, (*v * 1000000))
|
||||
}
|
||||
return time.Time{}
|
||||
}
|
||||
|
||||
// TimeUnixMilli returns a Unix timestamp in milliseconds from "January 1, 1970 UTC".
|
||||
// The result is undefined if the Unix time cannot be represented by an int64.
|
||||
// Which includes calling TimeUnixMilli on a zero Time is undefined.
|
||||
//
|
||||
// This utility is useful for service API's such as CloudWatch Logs which require
|
||||
// their unix time values to be in milliseconds.
|
||||
//
|
||||
// See Go stdlib https://golang.org/pkg/time/#Time.UnixNano for more information.
|
||||
func TimeUnixMilli(t time.Time) int64 {
|
||||
return t.UnixNano() / int64(time.Millisecond/time.Nanosecond)
|
||||
}
|
||||
|
||||
// TimeSlice converts a slice of time.Time values into a slice of
|
||||
// time.Time pointers
|
||||
func TimeSlice(src []time.Time) []*time.Time {
|
||||
437
Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/convert_types_test.go
generated
vendored
Normal file
437
Godeps/_workspace/src/github.com/aws/aws-sdk-go/aws/convert_types_test.go
generated
vendored
Normal file
@@ -0,0 +1,437 @@
|
||||
package aws
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
var testCasesStringSlice = [][]string{
|
||||
{"a", "b", "c", "d", "e"},
|
||||
{"a", "b", "", "", "e"},
|
||||
}
|
||||
|
||||
func TestStringSlice(t *testing.T) {
|
||||
for idx, in := range testCasesStringSlice {
|
||||
if in == nil {
|
||||
continue
|
||||
}
|
||||
out := StringSlice(in)
|
||||
assert.Len(t, out, len(in), "Unexpected len at idx %d", idx)
|
||||
for i := range out {
|
||||
assert.Equal(t, in[i], *(out[i]), "Unexpected value at idx %d", idx)
|
||||
}
|
||||
|
||||
out2 := StringValueSlice(out)
|
||||
assert.Len(t, out2, len(in), "Unexpected len at idx %d", idx)
|
||||
assert.Equal(t, in, out2, "Unexpected value at idx %d", idx)
|
||||
}
|
||||
}
|
||||
|
||||
var testCasesStringValueSlice = [][]*string{
|
||||
{String("a"), String("b"), nil, String("c")},
|
||||
}
|
||||
|
||||
func TestStringValueSlice(t *testing.T) {
|
||||
for idx, in := range testCasesStringValueSlice {
|
||||
if in == nil {
|
||||
continue
|
||||
}
|
||||
out := StringValueSlice(in)
|
||||
assert.Len(t, out, len(in), "Unexpected len at idx %d", idx)
|
||||
for i := range out {
|
||||
if in[i] == nil {
|
||||
assert.Empty(t, out[i], "Unexpected value at idx %d", idx)
|
||||
} else {
|
||||
assert.Equal(t, *(in[i]), out[i], "Unexpected value at idx %d", idx)
|
||||
}
|
||||
}
|
||||
|
||||
out2 := StringSlice(out)
|
||||
assert.Len(t, out2, len(in), "Unexpected len at idx %d", idx)
|
||||
for i := range out2 {
|
||||
if in[i] == nil {
|
||||
assert.Empty(t, *(out2[i]), "Unexpected value at idx %d", idx)
|
||||
} else {
|
||||
assert.Equal(t, in[i], out2[i], "Unexpected value at idx %d", idx)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var testCasesStringMap = []map[string]string{
|
||||
{"a": "1", "b": "2", "c": "3"},
|
||||
}
|
||||
|
||||
func TestStringMap(t *testing.T) {
|
||||
for idx, in := range testCasesStringMap {
|
||||
if in == nil {
|
||||
continue
|
||||
}
|
||||
out := StringMap(in)
|
||||
assert.Len(t, out, len(in), "Unexpected len at idx %d", idx)
|
||||
for i := range out {
|
||||
assert.Equal(t, in[i], *(out[i]), "Unexpected value at idx %d", idx)
|
||||
}
|
||||
|
||||
out2 := StringValueMap(out)
|
||||
assert.Len(t, out2, len(in), "Unexpected len at idx %d", idx)
|
||||
assert.Equal(t, in, out2, "Unexpected value at idx %d", idx)
|
||||
}
|
||||
}
|
||||
|
||||
var testCasesBoolSlice = [][]bool{
|
||||
{true, true, false, false},
|
||||
}
|
||||
|
||||
func TestBoolSlice(t *testing.T) {
|
||||
for idx, in := range testCasesBoolSlice {
|
||||
if in == nil {
|
||||
continue
|
||||
}
|
||||
out := BoolSlice(in)
|
||||
assert.Len(t, out, len(in), "Unexpected len at idx %d", idx)
|
||||
for i := range out {
|
||||
assert.Equal(t, in[i], *(out[i]), "Unexpected value at idx %d", idx)
|
||||
}
|
||||
|
||||
out2 := BoolValueSlice(out)
|
||||
assert.Len(t, out2, len(in), "Unexpected len at idx %d", idx)
|
||||
assert.Equal(t, in, out2, "Unexpected value at idx %d", idx)
|
||||
}
|
||||
}
|
||||
|
||||
var testCasesBoolValueSlice = [][]*bool{}
|
||||
|
||||
func TestBoolValueSlice(t *testing.T) {
|
||||
for idx, in := range testCasesBoolValueSlice {
|
||||
if in == nil {
|
||||
continue
|
||||
}
|
||||
out := BoolValueSlice(in)
|
||||
assert.Len(t, out, len(in), "Unexpected len at idx %d", idx)
|
||||
for i := range out {
|
||||
if in[i] == nil {
|
||||
assert.Empty(t, out[i], "Unexpected value at idx %d", idx)
|
||||
} else {
|
||||
assert.Equal(t, *(in[i]), out[i], "Unexpected value at idx %d", idx)
|
||||
}
|
||||
}
|
||||
|
||||
out2 := BoolSlice(out)
|
||||
assert.Len(t, out2, len(in), "Unexpected len at idx %d", idx)
|
||||
for i := range out2 {
|
||||
if in[i] == nil {
|
||||
assert.Empty(t, *(out2[i]), "Unexpected value at idx %d", idx)
|
||||
} else {
|
||||
assert.Equal(t, in[i], out2[i], "Unexpected value at idx %d", idx)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var testCasesBoolMap = []map[string]bool{
|
||||
{"a": true, "b": false, "c": true},
|
||||
}
|
||||
|
||||
func TestBoolMap(t *testing.T) {
|
||||
for idx, in := range testCasesBoolMap {
|
||||
if in == nil {
|
||||
continue
|
||||
}
|
||||
out := BoolMap(in)
|
||||
assert.Len(t, out, len(in), "Unexpected len at idx %d", idx)
|
||||
for i := range out {
|
||||
assert.Equal(t, in[i], *(out[i]), "Unexpected value at idx %d", idx)
|
||||
}
|
||||
|
||||
out2 := BoolValueMap(out)
|
||||
assert.Len(t, out2, len(in), "Unexpected len at idx %d", idx)
|
||||
assert.Equal(t, in, out2, "Unexpected value at idx %d", idx)
|
||||
}
|
||||
}
|
||||
|
||||
var testCasesIntSlice = [][]int{
|
||||
{1, 2, 3, 4},
|
||||
}
|
||||
|
||||
func TestIntSlice(t *testing.T) {
|
||||
for idx, in := range testCasesIntSlice {
|
||||
if in == nil {
|
||||
continue
|
||||
}
|
||||
out := IntSlice(in)
|
||||
assert.Len(t, out, len(in), "Unexpected len at idx %d", idx)
|
||||
for i := range out {
|
||||
assert.Equal(t, in[i], *(out[i]), "Unexpected value at idx %d", idx)
|
||||
}
|
||||
|
||||
out2 := IntValueSlice(out)
|
||||
assert.Len(t, out2, len(in), "Unexpected len at idx %d", idx)
|
||||
assert.Equal(t, in, out2, "Unexpected value at idx %d", idx)
|
||||
}
|
||||
}
|
||||
|
||||
var testCasesIntValueSlice = [][]*int{}
|
||||
|
||||
func TestIntValueSlice(t *testing.T) {
|
||||
for idx, in := range testCasesIntValueSlice {
|
||||
if in == nil {
|
||||
continue
|
||||
}
|
||||
out := IntValueSlice(in)
|
||||
assert.Len(t, out, len(in), "Unexpected len at idx %d", idx)
|
||||
for i := range out {
|
||||
if in[i] == nil {
|
||||
assert.Empty(t, out[i], "Unexpected value at idx %d", idx)
|
||||
} else {
|
||||
assert.Equal(t, *(in[i]), out[i], "Unexpected value at idx %d", idx)
|
||||
}
|
||||
}
|
||||
|
||||
out2 := IntSlice(out)
|
||||
assert.Len(t, out2, len(in), "Unexpected len at idx %d", idx)
|
||||
for i := range out2 {
|
||||
if in[i] == nil {
|
||||
assert.Empty(t, *(out2[i]), "Unexpected value at idx %d", idx)
|
||||
} else {
|
||||
assert.Equal(t, in[i], out2[i], "Unexpected value at idx %d", idx)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var testCasesIntMap = []map[string]int{
|
||||
{"a": 3, "b": 2, "c": 1},
|
||||
}
|
||||
|
||||
func TestIntMap(t *testing.T) {
|
||||
for idx, in := range testCasesIntMap {
|
||||
if in == nil {
|
||||
continue
|
||||
}
|
||||
out := IntMap(in)
|
||||
assert.Len(t, out, len(in), "Unexpected len at idx %d", idx)
|
||||
for i := range out {
|
||||
assert.Equal(t, in[i], *(out[i]), "Unexpected value at idx %d", idx)
|
||||
}
|
||||
|
||||
out2 := IntValueMap(out)
|
||||
assert.Len(t, out2, len(in), "Unexpected len at idx %d", idx)
|
||||
assert.Equal(t, in, out2, "Unexpected value at idx %d", idx)
|
||||
}
|
||||
}
|
||||
|
||||
var testCasesInt64Slice = [][]int64{
|
||||
{1, 2, 3, 4},
|
||||
}
|
||||
|
||||
func TestInt64Slice(t *testing.T) {
|
||||
for idx, in := range testCasesInt64Slice {
|
||||
if in == nil {
|
||||
continue
|
||||
}
|
||||
out := Int64Slice(in)
|
||||
assert.Len(t, out, len(in), "Unexpected len at idx %d", idx)
|
||||
for i := range out {
|
||||
assert.Equal(t, in[i], *(out[i]), "Unexpected value at idx %d", idx)
|
||||
}
|
||||
|
||||
out2 := Int64ValueSlice(out)
|
||||
assert.Len(t, out2, len(in), "Unexpected len at idx %d", idx)
|
||||
assert.Equal(t, in, out2, "Unexpected value at idx %d", idx)
|
||||
}
|
||||
}
|
||||
|
||||
var testCasesInt64ValueSlice = [][]*int64{}
|
||||
|
||||
func TestInt64ValueSlice(t *testing.T) {
|
||||
for idx, in := range testCasesInt64ValueSlice {
|
||||
if in == nil {
|
||||
continue
|
||||
}
|
||||
out := Int64ValueSlice(in)
|
||||
assert.Len(t, out, len(in), "Unexpected len at idx %d", idx)
|
||||
for i := range out {
|
||||
if in[i] == nil {
|
||||
assert.Empty(t, out[i], "Unexpected value at idx %d", idx)
|
||||
} else {
|
||||
assert.Equal(t, *(in[i]), out[i], "Unexpected value at idx %d", idx)
|
||||
}
|
||||
}
|
||||
|
||||
out2 := Int64Slice(out)
|
||||
assert.Len(t, out2, len(in), "Unexpected len at idx %d", idx)
|
||||
for i := range out2 {
|
||||
if in[i] == nil {
|
||||
assert.Empty(t, *(out2[i]), "Unexpected value at idx %d", idx)
|
||||
} else {
|
||||
assert.Equal(t, in[i], out2[i], "Unexpected value at idx %d", idx)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var testCasesInt64Map = []map[string]int64{
|
||||
{"a": 3, "b": 2, "c": 1},
|
||||
}
|
||||
|
||||
func TestInt64Map(t *testing.T) {
|
||||
for idx, in := range testCasesInt64Map {
|
||||
if in == nil {
|
||||
continue
|
||||
}
|
||||
out := Int64Map(in)
|
||||
assert.Len(t, out, len(in), "Unexpected len at idx %d", idx)
|
||||
for i := range out {
|
||||
assert.Equal(t, in[i], *(out[i]), "Unexpected value at idx %d", idx)
|
||||
}
|
||||
|
||||
out2 := Int64ValueMap(out)
|
||||
assert.Len(t, out2, len(in), "Unexpected len at idx %d", idx)
|
||||
assert.Equal(t, in, out2, "Unexpected value at idx %d", idx)
|
||||
}
|
||||
}
|
||||
|
||||
var testCasesFloat64Slice = [][]float64{
|
||||
{1, 2, 3, 4},
|
||||
}
|
||||
|
||||
func TestFloat64Slice(t *testing.T) {
|
||||
for idx, in := range testCasesFloat64Slice {
|
||||
if in == nil {
|
||||
continue
|
||||
}
|
||||
out := Float64Slice(in)
|
||||
assert.Len(t, out, len(in), "Unexpected len at idx %d", idx)
|
||||
for i := range out {
|
||||
assert.Equal(t, in[i], *(out[i]), "Unexpected value at idx %d", idx)
|
||||
}
|
||||
|
||||
out2 := Float64ValueSlice(out)
|
||||
assert.Len(t, out2, len(in), "Unexpected len at idx %d", idx)
|
||||
assert.Equal(t, in, out2, "Unexpected value at idx %d", idx)
|
||||
}
|
||||
}
|
||||
|
||||
var testCasesFloat64ValueSlice = [][]*float64{}
|
||||
|
||||
func TestFloat64ValueSlice(t *testing.T) {
|
||||
for idx, in := range testCasesFloat64ValueSlice {
|
||||
if in == nil {
|
||||
continue
|
||||
}
|
||||
out := Float64ValueSlice(in)
|
||||
assert.Len(t, out, len(in), "Unexpected len at idx %d", idx)
|
||||
for i := range out {
|
||||
if in[i] == nil {
|
||||
assert.Empty(t, out[i], "Unexpected value at idx %d", idx)
|
||||
} else {
|
||||
assert.Equal(t, *(in[i]), out[i], "Unexpected value at idx %d", idx)
|
||||
}
|
||||
}
|
||||
|
||||
out2 := Float64Slice(out)
|
||||
assert.Len(t, out2, len(in), "Unexpected len at idx %d", idx)
|
||||
for i := range out2 {
|
||||
if in[i] == nil {
|
||||
assert.Empty(t, *(out2[i]), "Unexpected value at idx %d", idx)
|
||||
} else {
|
||||
assert.Equal(t, in[i], out2[i], "Unexpected value at idx %d", idx)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var testCasesFloat64Map = []map[string]float64{
|
||||
{"a": 3, "b": 2, "c": 1},
|
||||
}
|
||||
|
||||
func TestFloat64Map(t *testing.T) {
|
||||
for idx, in := range testCasesFloat64Map {
|
||||
if in == nil {
|
||||
continue
|
||||
}
|
||||
out := Float64Map(in)
|
||||
assert.Len(t, out, len(in), "Unexpected len at idx %d", idx)
|
||||
for i := range out {
|
||||
assert.Equal(t, in[i], *(out[i]), "Unexpected value at idx %d", idx)
|
||||
}
|
||||
|
||||
out2 := Float64ValueMap(out)
|
||||
assert.Len(t, out2, len(in), "Unexpected len at idx %d", idx)
|
||||
assert.Equal(t, in, out2, "Unexpected value at idx %d", idx)
|
||||
}
|
||||
}
|
||||
|
||||
var testCasesTimeSlice = [][]time.Time{
|
||||
{time.Now(), time.Now().AddDate(100, 0, 0)},
|
||||
}
|
||||
|
||||
func TestTimeSlice(t *testing.T) {
|
||||
for idx, in := range testCasesTimeSlice {
|
||||
if in == nil {
|
||||
continue
|
||||
}
|
||||
out := TimeSlice(in)
|
||||
assert.Len(t, out, len(in), "Unexpected len at idx %d", idx)
|
||||
for i := range out {
|
||||
assert.Equal(t, in[i], *(out[i]), "Unexpected value at idx %d", idx)
|
||||
}
|
||||
|
||||
out2 := TimeValueSlice(out)
|
||||
assert.Len(t, out2, len(in), "Unexpected len at idx %d", idx)
|
||||
assert.Equal(t, in, out2, "Unexpected value at idx %d", idx)
|
||||
}
|
||||
}
|
||||
|
||||
var testCasesTimeValueSlice = [][]*time.Time{}
|
||||
|
||||
func TestTimeValueSlice(t *testing.T) {
|
||||
for idx, in := range testCasesTimeValueSlice {
|
||||
if in == nil {
|
||||
continue
|
||||
}
|
||||
out := TimeValueSlice(in)
|
||||
assert.Len(t, out, len(in), "Unexpected len at idx %d", idx)
|
||||
for i := range out {
|
||||
if in[i] == nil {
|
||||
assert.Empty(t, out[i], "Unexpected value at idx %d", idx)
|
||||
} else {
|
||||
assert.Equal(t, *(in[i]), out[i], "Unexpected value at idx %d", idx)
|
||||
}
|
||||
}
|
||||
|
||||
out2 := TimeSlice(out)
|
||||
assert.Len(t, out2, len(in), "Unexpected len at idx %d", idx)
|
||||
for i := range out2 {
|
||||
if in[i] == nil {
|
||||
assert.Empty(t, *(out2[i]), "Unexpected value at idx %d", idx)
|
||||
} else {
|
||||
assert.Equal(t, in[i], out2[i], "Unexpected value at idx %d", idx)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var testCasesTimeMap = []map[string]time.Time{
|
||||
{"a": time.Now().AddDate(-100, 0, 0), "b": time.Now()},
|
||||
}
|
||||
|
||||
func TestTimeMap(t *testing.T) {
|
||||
for idx, in := range testCasesTimeMap {
|
||||
if in == nil {
|
||||
continue
|
||||
}
|
||||
out := TimeMap(in)
|
||||
assert.Len(t, out, len(in), "Unexpected len at idx %d", idx)
|
||||
for i := range out {
|
||||
assert.Equal(t, in[i], *(out[i]), "Unexpected value at idx %d", idx)
|
||||
}
|
||||
|
||||
out2 := TimeValueMap(out)
|
||||
assert.Len(t, out2, len(in), "Unexpected len at idx %d", idx)
|
||||
assert.Equal(t, in, out2, "Unexpected value at idx %d", idx)
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user