Merge branch 'master' into filter_reject_require_v1

master
ImreSamu 2017-06-03 02:49:45 +02:00
commit 5ed8331937
73 changed files with 4248 additions and 1976 deletions

View File

@ -152,7 +152,7 @@ Then you need to enable GO15VENDOREXPERIMENT, if you are using Go 1.5. You can s
Get the code and install Imposm 3:
go get github.com/omniscale/imposm3
go install github.com/omniscale/imposm3
go install github.com/omniscale/imposm3/cmd/imposm3
Done. You should now have an imposm3 binary in `$GOPATH/bin`.

View File

@ -23,6 +23,7 @@ type Config struct {
ExpireTilesZoom int `json:"expiretiles_zoom"`
ReplicationUrl string `json:"replication_url"`
ReplicationInterval MinutesInterval `json:"replication_interval"`
DiffStateBefore MinutesInterval `json:"diff_state_before"`
}
type Schemas struct {
@ -57,6 +58,7 @@ type _BaseOptions struct {
ExpireTilesZoom int
ReplicationUrl string
ReplicationInterval time.Duration
DiffStateBefore time.Duration
}
func (o *_BaseOptions) updateFromConfig() error {
@ -140,6 +142,10 @@ func (o *_BaseOptions) updateFromConfig() error {
o.DiffDir = conf.DiffDir
}
}
if conf.DiffStateBefore.Duration != 0 && o.DiffStateBefore == 2*time.Hour {
o.DiffStateBefore = conf.DiffStateBefore.Duration
}
return nil
}
@ -164,7 +170,6 @@ type _ImportOptions struct {
DeployProduction bool
RevertDeploy bool
RemoveBackup bool
DiffStateBefore time.Duration
}
var BaseOptions = _BaseOptions{}
@ -221,7 +226,7 @@ func init() {
ImportFlags.BoolVar(&ImportOptions.DeployProduction, "deployproduction", false, "deploy production")
ImportFlags.BoolVar(&ImportOptions.RevertDeploy, "revertdeploy", false, "revert deploy to production")
ImportFlags.BoolVar(&ImportOptions.RemoveBackup, "removebackup", false, "remove backups from deploy")
ImportFlags.DurationVar(&ImportOptions.DiffStateBefore, "diff-state-before", 2*time.Hour, "set initial diff sequence before")
ImportFlags.DurationVar(&BaseOptions.DiffStateBefore, "diff-state-before", 2*time.Hour, "set initial diff sequence before")
DiffFlags.StringVar(&BaseOptions.ExpireTilesDir, "expiretiles-dir", "", "write expire tiles into dir")
DiffFlags.IntVar(&BaseOptions.ExpireTilesZoom, "expiretiles-zoom", 14, "write expire tiles in this zoom level")

View File

@ -7,6 +7,7 @@ import (
"github.com/omniscale/imposm3/element"
"github.com/omniscale/imposm3/geom"
"github.com/omniscale/imposm3/mapping"
"github.com/omniscale/imposm3/mapping/config"
)
type Config struct {
@ -57,27 +58,24 @@ type Finisher interface {
type Deleter interface {
Inserter
// Delete deletes ID from tables that matched ProbeXxx
Delete(int64, interface{}) error
// DeleteElem deletes element from all tables
DeleteElem(element.OSMElem) error
Delete(int64, []mapping.Match) error
}
type Optimizer interface {
Optimize() error
}
var databases map[string]func(Config, *mapping.Mapping) (DB, error)
var databases map[string]func(Config, *config.Mapping) (DB, error)
func init() {
databases = make(map[string]func(Config, *mapping.Mapping) (DB, error))
databases = make(map[string]func(Config, *config.Mapping) (DB, error))
}
func Register(name string, f func(Config, *mapping.Mapping) (DB, error)) {
func Register(name string, f func(Config, *config.Mapping) (DB, error)) {
databases[name] = f
}
func Open(conf Config, m *mapping.Mapping) (DB, error) {
func Open(conf Config, m *config.Mapping) (DB, error) {
parts := strings.SplitN(conf.ConnectionParams, ":", 2)
connectionType := parts[0]
@ -108,7 +106,7 @@ func (n *nullDb) InsertRelationMember(element.Relation, element.Member, geom.Geo
return nil
}
func newNullDb(conf Config, m *mapping.Mapping) (DB, error) {
func newNullDb(conf Config, m *config.Mapping) (DB, error) {
return &nullDb{}, nil
}

View File

@ -14,6 +14,7 @@ import (
"github.com/omniscale/imposm3/geom"
"github.com/omniscale/imposm3/logging"
"github.com/omniscale/imposm3/mapping"
"github.com/omniscale/imposm3/mapping/config"
)
var log = logging.NewLogger("PostGIS")
@ -509,36 +510,13 @@ func (pg *PostGIS) InsertRelationMember(rel element.Relation, m element.Member,
return nil
}
func (pg *PostGIS) Delete(id int64, matches interface{}) error {
if matches, ok := matches.([]mapping.Match); ok {
for _, match := range matches {
pg.txRouter.Delete(match.Table.Name, id)
}
if pg.updateGeneralizedTables {
for _, generalizedTable := range pg.generalizedFromMatches(matches) {
pg.txRouter.Delete(generalizedTable.Name, id)
}
}
func (pg *PostGIS) Delete(id int64, matches []mapping.Match) error {
for _, match := range matches {
pg.txRouter.Delete(match.Table.Name, id)
}
return nil
}
func (pg *PostGIS) DeleteElem(elem element.OSMElem) error {
// handle deletes of geometries that did not match in ProbeXxx.
// we have to handle multipolygon relations that took the tags of the
// main-member. those tags are not avail. during delete. just try to
// delete from each polygon/relation table.
if _, ok := elem.Tags["type"]; ok {
for _, tableSpec := range pg.Tables {
if tableSpec.GeometryType != "polygon" && tableSpec.GeometryType != "geometry" && tableSpec.GeometryType != "relation" {
continue
}
pg.txRouter.Delete(tableSpec.Name, elem.Id)
if pg.updateGeneralizedTables {
for _, genTable := range tableSpec.Generalizations {
pg.txRouter.Delete(genTable.Name, elem.Id)
}
}
if pg.updateGeneralizedTables {
for _, generalizedTable := range pg.generalizedFromMatches(matches) {
pg.txRouter.Delete(generalizedTable.Name, id)
}
}
return nil
@ -599,7 +577,7 @@ func (pg *PostGIS) Close() error {
return pg.Db.Close()
}
func New(conf database.Config, m *mapping.Mapping) (database.DB, error) {
func New(conf database.Config, m *config.Mapping) (database.DB, error) {
db := &PostGIS{}
db.Tables = make(map[string]*TableSpec)
@ -607,22 +585,37 @@ func New(conf database.Config, m *mapping.Mapping) (database.DB, error) {
db.Config = conf
if strings.HasPrefix(db.Config.ConnectionParams, "postgis://") {
db.Config.ConnectionParams = strings.Replace(
db.Config.ConnectionParams,
connStr := db.Config.ConnectionParams
// we accept postgis as an alias, replace for pq.ParseURL
if strings.HasPrefix(connStr, "postgis:") {
connStr = strings.Replace(
connStr,
"postgis", "postgres", 1,
)
}
params, err := pq.ParseURL(db.Config.ConnectionParams)
if err != nil {
return nil, err
var err error
var params string
if strings.HasPrefix(connStr, "postgres://") {
// connStr is a URL
params, err = pq.ParseURL(connStr)
if err != nil {
return nil, err
}
} else {
// connStr is already a params list (postgres: host=localhost ...)
params = strings.TrimSpace(strings.TrimPrefix(connStr, "postgres:"))
}
params = disableDefaultSsl(params)
params, db.Prefix = stripPrefixFromConnectionParams(params)
for name, table := range m.Tables {
db.Tables[name] = NewTableSpec(db, table)
db.Tables[name], err = NewTableSpec(db, table)
if err != nil {
return nil, err
}
}
for name, table := range m.GeneralizedTables {
db.GeneralizedTables[name] = NewGeneralizedTableSpec(db, table)

View File

@ -5,11 +5,13 @@ import (
"strings"
"github.com/omniscale/imposm3/mapping"
"github.com/omniscale/imposm3/mapping/config"
"github.com/pkg/errors"
)
type ColumnSpec struct {
Name string
FieldType mapping.FieldType
FieldType mapping.ColumnType
Type ColumnType
}
type TableSpec struct {
@ -124,12 +126,11 @@ func (spec *TableSpec) DeleteSQL() string {
)
}
func NewTableSpec(pg *PostGIS, t *mapping.Table) *TableSpec {
func NewTableSpec(pg *PostGIS, t *config.Table) (*TableSpec, error) {
var geomType string
switch t.Type {
case mapping.RelationMemberTable:
if mapping.TableType(t.Type) == mapping.RelationMemberTable {
geomType = "geometry"
default:
} else {
geomType = string(t.Type)
}
@ -140,23 +141,23 @@ func NewTableSpec(pg *PostGIS, t *mapping.Table) *TableSpec {
GeometryType: geomType,
Srid: pg.Config.Srid,
}
for _, field := range t.Fields {
fieldType := field.FieldType()
if fieldType == nil {
continue
for _, column := range t.Columns {
columnType, err := mapping.MakeColumnType(column)
if err != nil {
return nil, err
}
pgType, ok := pgTypes[fieldType.GoType]
pgType, ok := pgTypes[columnType.GoType]
if !ok {
log.Errorf("unhandled field type %v, using string type", fieldType)
return nil, errors.Errorf("unhandled column type %v, using string type", columnType)
pgType = pgTypes["string"]
}
col := ColumnSpec{field.Name, *fieldType, pgType}
col := ColumnSpec{column.Name, *columnType, pgType}
spec.Columns = append(spec.Columns, col)
}
return &spec
return &spec, nil
}
func NewGeneralizedTableSpec(pg *PostGIS, t *mapping.GeneralizedTable) *GeneralizedTableSpec {
func NewGeneralizedTableSpec(pg *PostGIS, t *config.GeneralizedTable) *GeneralizedTableSpec {
spec := GeneralizedTableSpec{
Name: t.Name,
FullName: pg.Prefix + t.Name,

View File

@ -38,10 +38,29 @@ To import all polygons with `tourism=zoo`, `natural=wood` or `natural=land` into
``relation_types``
~~~~~~~~~~~~~~~~~~
``relation_types`` restricts which relation types should be imported. It is a list with `type` values, e.g. ``[route, master_route]``.
For tables of type ``relation`` and ``relation_member``: Only import relations which have this type value. You still need to have a mapping.
For tables of type ``polygon``: Only build multi-polygons for relations which have this type value. You still need to have a mapping. Defaults to ``[multipolygon, boundary, land_area]``.
.. code-block:: yaml
tables:
routes:
type: relation
relation_types: [route]
mapping:
route: [bus]
``columns``
~~~~~~~~~~~
``columns`` is a list of columns that Imposm should create for this table. Each column is a YAML object with a ``type`` and a ``name`` and optionaly ``key``, ``args`` and ``from_member``.
``columns`` is a list of columns that Imposm should create for this table. Each column is a YAML object with a ``type`` and a ``name`` and optionally ``key``, ``args`` and ``from_member``.
``name``
^^^^^^^^^
@ -349,6 +368,16 @@ To load all tags except ``created_by``, ``source``, and ``tiger:county``, ``tige
To load specific data about amenities for inclusion into an `hstore_tags` column:
.. code-block:: yaml
tags:
include: [operator, opening_hours, wheelchair, website, phone, cuisine]
.. _Areas:
Areas

View File

@ -42,21 +42,7 @@ It will also insert relations of the type ``multipolygon`` with a ``building`` t
The roles are ignored by Imposm as not all holes are correctly tagged as ``inner``. Imposm uses geometry operations to verify if a member of a multipolygon is a hole, or if it is a separate polygon.
For compatibility, multipolygon relations without tags will use the tags from the (longest) outer way. Imposm will insert the following relation as well::
<way id="18101" version="1" timestamp="2011-11-11T00:11:11Z">
<nd ref="1001"/>
...
<nd ref="1001"/>
<tag k="building" v="yes"/>
</way>
<relation id="18901" version="1" timestamp="2011-11-11T00:11:11Z">
<member type="way" ref="18101" role="outer"/>
<member type="way" ref="18102" role="outer"/>
<tag k="type" v="multipolygon"/>
</relation>
Old-style multipolygon relations with tags on the outer way, instead of the relation are no longer supported.
Other relations
@ -72,6 +58,7 @@ These relations can not be mapped to `simple` linestrings or polygons as they ca
The Imposm table types ``relation`` and ``relation_member`` allow you to import all relevant data for these relations.
``relation_member``
^^^^^^^^^^^^^^^^^^^
@ -107,6 +94,7 @@ You can use the following mapping::
- key: ref
name: ref
type: string
relation_type: [route]
mapping:
route: [bus]
@ -169,6 +157,7 @@ The following mapping imports the bus route relation from above::
- name: network
key: network
type: string
relation_type: [route]
mapping:
route: [bus]

View File

@ -75,7 +75,7 @@ The second step is the writing of OpenStreetMap features into the database. It r
After the import, it creates the generalized tables and indicies.
You need to tell Imposm the connection parameters of your database. The ``-connection`` option takes a URL in the format ``postgis://username:password@host:port/databasename``.
You need to tell Imposm the connection parameters of your database. The ``-connection`` option takes a URL in the format ``postgis://username:password@host:port/databasename`` or a list of parameters like ``postgis: host=/tmp dbname=osm``.
In our example:
::

View File

@ -23,3 +23,14 @@ func ExpireProjectedNodes(expireor Expireor, nodes []element.Node, srid int, clo
panic("unsupported srid")
}
}
func ExpireProjectedNode(expireor Expireor, node element.Node, srid int) {
if srid == 4326 {
expireor.Expire(node.Long, node.Lat)
} else if srid == 3857 {
long, lat := proj.MercToWgs(node.Long, node.Lat)
expireor.Expire(long, lat)
} else {
panic("unsupported srid")
}
}

View File

@ -159,7 +159,7 @@ func (tl *TileList) Flush() error {
now := time.Now().UTC()
dir := filepath.Join(tl.out, now.Format("20060102"))
err := os.MkdirAll(dir, 0755)
err := os.MkdirAll(dir, 0775)
if err != nil {
return err
}

View File

@ -322,6 +322,7 @@ func (this *Geom) Bounds() Bounds {
if geom == nil {
return NilBounds
}
defer C.GEOSGeom_destroy(geom)
extRing := C.GEOSGetExteriorRing(geom)
if extRing == nil {
return NilBounds

View File

@ -16,15 +16,12 @@ type PreparedRelation struct {
// PrepareRelation is the first step in building a (multi-)polygon of a Relation.
// It builds rings from all ways and returns an error if there are unclosed rings.
// It also merges the Relation.Tags with the Tags of the outer way.
func PrepareRelation(rel *element.Relation, srid int, maxRingGap float64) (PreparedRelation, error) {
rings, err := buildRings(rel, maxRingGap)
if err != nil {
return PreparedRelation{}, err
}
rel.Tags = relationTags(rel.Tags, rings[0].ways[0].Tags)
return PreparedRelation{rings, rel, srid}, nil
}
@ -224,29 +221,6 @@ func buildRelGeometry(g *geos.Geos, rel *element.Relation, rings []*ring) (*geos
return result, nil
}
func relationTags(relTags, wayTags element.Tags) element.Tags {
result := make(element.Tags)
for k, v := range relTags {
if k == "name" || k == "type" {
continue
}
result[k] = v
}
if len(result) == 0 {
// relation does not have tags? use way tags
for k, v := range wayTags {
result[k] = v
}
} else {
// add back name (if present)
if name, ok := relTags["name"]; ok {
result["name"] = name
}
}
return result
}
// ringIsHole returns true if rings[idx] is a hole, False if it is a
// shell (also if hole in a hole, etc)
func ringIsHole(rings []*ring, idx int) bool {

View File

@ -94,7 +94,7 @@ func TestMultiPolygonWithHoleAndRelName(t *testing.T) {
})
rel := element.Relation{
OSMElem: element.OSMElem{Id: 1, Tags: element.Tags{"name": "rel"}}}
OSMElem: element.OSMElem{Id: 1, Tags: element.Tags{"name": "Relation", "natural": "forest", "type": "multipolygon"}}}
rel.Members = []element.Member{
{Id: 1, Type: element.WAY, Role: "outer", Way: &w1},
{Id: 2, Type: element.WAY, Role: "inner", Way: &w2},
@ -107,10 +107,11 @@ func TestMultiPolygonWithHoleAndRelName(t *testing.T) {
g := geos.NewGeos()
defer g.Finish()
if len(rel.Tags) != 2 {
if len(rel.Tags) != 3 {
t.Fatal("wrong rel tags", rel.Tags)
}
if rel.Tags["natural"] != "forest" || rel.Tags["name"] != "Blackwood" {
// name from way is ignored
if rel.Tags["natural"] != "forest" || rel.Tags["name"] != "Relation" {
t.Fatal("wrong rel tags", rel.Tags)
}
@ -147,7 +148,7 @@ func TestMultiPolygonWithMultipleHoles(t *testing.T) {
})
rel := element.Relation{
OSMElem: element.OSMElem{Id: 1, Tags: element.Tags{"landusage": "forest"}}}
OSMElem: element.OSMElem{Id: 1, Tags: element.Tags{"landusage": "forest", "type": "multipolygon"}}}
rel.Members = []element.Member{
{Id: 1, Type: element.WAY, Role: "outer", Way: &w1},
{Id: 2, Type: element.WAY, Role: "inner", Way: &w2},
@ -161,7 +162,7 @@ func TestMultiPolygonWithMultipleHoles(t *testing.T) {
g := geos.NewGeos()
defer g.Finish()
if len(rel.Tags) != 1 {
if len(rel.Tags) != 2 {
t.Fatal("wrong rel tags", rel.Tags)
}
if rel.Tags["landusage"] != "forest" {
@ -214,7 +215,7 @@ func TestMultiPolygonWithNeastedHoles(t *testing.T) {
{1, 4, 4},
})
rel := element.Relation{OSMElem: element.OSMElem{Id: 1}}
rel := element.Relation{OSMElem: element.OSMElem{Id: 1, Tags: element.Tags{"landusage": "forest", "type": "multipolygon"}}}
rel.Members = []element.Member{
{Id: 1, Type: element.WAY, Role: "outer", Way: &w1},
{Id: 2, Type: element.WAY, Role: "inner", Way: &w2},
@ -230,7 +231,7 @@ func TestMultiPolygonWithNeastedHoles(t *testing.T) {
g := geos.NewGeos()
defer g.Finish()
if len(rel.Tags) != 1 {
if len(rel.Tags) != 2 {
t.Fatal("wrong rel tags", rel.Tags)
}
if rel.Tags["landusage"] != "forest" {
@ -261,7 +262,7 @@ func TestPolygonFromThreeWays(t *testing.T) {
{1, 0, 0},
})
rel := element.Relation{OSMElem: element.OSMElem{Id: 1}}
rel := element.Relation{OSMElem: element.OSMElem{Id: 1, Tags: element.Tags{"landusage": "forest", "type": "multipolygon"}}}
rel.Members = []element.Member{
{Id: 1, Type: element.WAY, Role: "outer", Way: &w1},
{Id: 2, Type: element.WAY, Role: "inner", Way: &w2},
@ -275,7 +276,7 @@ func TestPolygonFromThreeWays(t *testing.T) {
g := geos.NewGeos()
defer g.Finish()
if len(rel.Tags) != 1 {
if len(rel.Tags) != 2 {
t.Fatal("wrong rel tags", rel.Tags)
}
if rel.Tags["landusage"] != "forest" {

View File

@ -51,7 +51,7 @@ func Import() {
tagmapping, err := mapping.NewMapping(config.BaseOptions.MappingFile)
if err != nil {
log.Fatal("mapping file: ", err)
log.Fatal("error in mapping file: ", err)
}
var db database.DB
@ -67,7 +67,7 @@ func Import() {
ProductionSchema: config.BaseOptions.Schemas.Production,
BackupSchema: config.BaseOptions.Schemas.Backup,
}
db, err = database.Open(conf, tagmapping)
db, err = database.Open(conf, &tagmapping.Conf)
if err != nil {
log.Fatal(err)
}
@ -125,7 +125,7 @@ func Import() {
osmCache.Close()
log.StopStep(step)
if config.ImportOptions.Diff {
diffstate, err := state.FromPbf(config.ImportOptions.Read, config.ImportOptions.DiffStateBefore)
diffstate, err := state.FromPbf(config.ImportOptions.Read, config.BaseOptions.DiffStateBefore)
if err != nil {
log.Print("error parsing diff state form PBF", err)
} else if diffstate != nil {
@ -181,12 +181,12 @@ func Import() {
relations := osmCache.Relations.Iter()
relWriter := writer.NewRelationWriter(osmCache, diffCache,
tagmapping.SingleIdSpace,
tagmapping.Conf.SingleIdSpace,
relations,
db, progress,
tagmapping.PolygonMatcher(),
tagmapping.RelationMatcher(),
tagmapping.RelationMemberMatcher(),
tagmapping.PolygonMatcher,
tagmapping.RelationMatcher,
tagmapping.RelationMemberMatcher,
config.BaseOptions.Srid)
relWriter.SetLimiter(geometryLimiter)
relWriter.EnableConcurrent()
@ -196,10 +196,10 @@ func Import() {
ways := osmCache.Ways.Iter()
wayWriter := writer.NewWayWriter(osmCache, diffCache,
tagmapping.SingleIdSpace,
tagmapping.Conf.SingleIdSpace,
ways, db,
progress,
tagmapping.PolygonMatcher(), tagmapping.LineStringMatcher(),
tagmapping.PolygonMatcher, tagmapping.LineStringMatcher,
config.BaseOptions.Srid)
wayWriter.SetLimiter(geometryLimiter)
wayWriter.EnableConcurrent()
@ -210,7 +210,7 @@ func Import() {
nodes := osmCache.Nodes.Iter()
nodeWriter := writer.NewNodeWriter(osmCache, nodes, db,
progress,
tagmapping.PointMatcher(),
tagmapping.PointMatcher,
config.BaseOptions.Srid)
nodeWriter.SetLimiter(geometryLimiter)
nodeWriter.EnableConcurrent()

View File

@ -1,8 +1,6 @@
package mapping
import (
"errors"
"fmt"
"math"
"regexp"
"strconv"
@ -11,14 +9,16 @@ import (
"github.com/omniscale/imposm3/element"
"github.com/omniscale/imposm3/geom"
"github.com/omniscale/imposm3/logging"
"github.com/omniscale/imposm3/mapping/config"
"github.com/pkg/errors"
)
var log = logging.NewLogger("mapping")
var AvailableFieldTypes map[string]FieldType
var AvailableColumnTypes map[string]ColumnType
func init() {
AvailableFieldTypes = map[string]FieldType{
AvailableColumnTypes = map[string]ColumnType{
"bool": {"bool", "bool", Bool, nil, nil, false},
"boolint": {"boolint", "int8", BoolInt, nil, nil, false},
"id": {"id", "int64", Id, nil, nil, false},
@ -47,94 +47,12 @@ func init() {
type MakeValue func(string, *element.OSMElem, *geom.Geometry, Match) interface{}
type MakeMemberValue func(*element.Relation, *element.Member, Match) interface{}
type MakeMakeValue func(string, FieldType, Field) (MakeValue, error)
type MakeMakeValue func(string, ColumnType, config.Column) (MakeValue, error)
type Key string
type Value string
type FieldSpec struct {
Key Key
Type FieldType
}
func (f *FieldSpec) Value(elem *element.OSMElem, geom *geom.Geometry, match Match) interface{} {
if f.Type.Func != nil {
return f.Type.Func(elem.Tags[string(f.Key)], elem, geom, match)
}
return nil
}
func (f *FieldSpec) MemberValue(rel *element.Relation, member *element.Member, geom *geom.Geometry, match Match) interface{} {
if f.Type.Func != nil {
if f.Type.FromMember {
if member.Elem == nil {
return nil
}
return f.Type.Func(member.Elem.Tags[string(f.Key)], member.Elem, geom, match)
}
return f.Type.Func(rel.Tags[string(f.Key)], &rel.OSMElem, geom, match)
}
if f.Type.MemberFunc != nil {
return f.Type.MemberFunc(rel, member, match)
}
return nil
}
type TableFields struct {
fields []FieldSpec
}
func (t *TableFields) MakeRow(elem *element.OSMElem, geom *geom.Geometry, match Match) []interface{} {
var row []interface{}
for _, field := range t.fields {
row = append(row, field.Value(elem, geom, match))
}
return row
}
func (t *TableFields) MakeMemberRow(rel *element.Relation, member *element.Member, geom *geom.Geometry, match Match) []interface{} {
var row []interface{}
for _, field := range t.fields {
row = append(row, field.MemberValue(rel, member, geom, match))
}
return row
}
func (field *Field) FieldType() *FieldType {
if fieldType, ok := AvailableFieldTypes[field.Type]; ok {
if fieldType.MakeFunc != nil {
makeValue, err := fieldType.MakeFunc(field.Name, fieldType, *field)
if err != nil {
log.Print(err)
return nil
}
fieldType = FieldType{fieldType.Name, fieldType.GoType, makeValue, nil, nil, fieldType.FromMember}
}
fieldType.FromMember = field.FromMember
return &fieldType
}
return nil
}
func (t *Table) TableFields() *TableFields {
result := TableFields{}
for _, mappingField := range t.Fields {
field := FieldSpec{}
field.Key = mappingField.Key
fieldType := mappingField.FieldType()
if fieldType != nil {
field.Type = *fieldType
} else {
log.Warn("unhandled type: ", mappingField.Type)
}
result.fields = append(result.fields, field)
}
return &result
}
type FieldType struct {
type ColumnType struct {
Name string
GoType string
Func MakeValue
@ -216,7 +134,7 @@ func Geometry(val string, elem *element.OSMElem, geom *geom.Geometry, match Matc
return string(geom.Wkb)
}
func MakePseudoArea(fieldName string, fieldType FieldType, field Field) (MakeValue, error) {
func MakePseudoArea(columnName string, columnType ColumnType, column config.Column) (MakeValue, error) {
log.Print("warn: pseudoarea type is deprecated and will be removed. See area and webmercarea type.")
return Area, nil
}
@ -248,14 +166,14 @@ func WebmercArea(val string, elem *element.OSMElem, geom *geom.Geometry, match M
var hstoreReplacer = strings.NewReplacer("\\", "\\\\", "\"", "\\\"")
func MakeHStoreString(fieldName string, fieldType FieldType, field Field) (MakeValue, error) {
func MakeHStoreString(columnName string, columnType ColumnType, column config.Column) (MakeValue, error) {
var includeAll bool
var err error
var include map[string]int
if _, ok := field.Args["include"]; !ok {
if _, ok := column.Args["include"]; !ok {
includeAll = true
} else {
include, err = decodeEnumArg(field, "include")
include, err = decodeEnumArg(column, "include")
if err != nil {
return nil, err
}
@ -273,18 +191,18 @@ func MakeHStoreString(fieldName string, fieldType FieldType, field Field) (MakeV
return hstoreString, nil
}
func MakeWayZOrder(fieldName string, fieldType FieldType, field Field) (MakeValue, error) {
if _, ok := field.Args["ranks"]; !ok {
func MakeWayZOrder(columnName string, columnType ColumnType, column config.Column) (MakeValue, error) {
if _, ok := column.Args["ranks"]; !ok {
return DefaultWayZOrder, nil
}
ranks, err := decodeEnumArg(field, "ranks")
ranks, err := decodeEnumArg(column, "ranks")
if err != nil {
return nil, err
}
levelOffset := len(ranks)
defaultRank := 0
if val, ok := field.Args["default"].(float64); ok {
if val, ok := column.Args["default"].(float64); ok {
defaultRank = int(val)
}
@ -361,9 +279,9 @@ func DefaultWayZOrder(val string, elem *element.OSMElem, geom *geom.Geometry, ma
return z
}
func MakeZOrder(fieldName string, fieldType FieldType, field Field) (MakeValue, error) {
func MakeZOrder(columnName string, columnType ColumnType, column config.Column) (MakeValue, error) {
log.Print("warn: zorder type is deprecated and will be removed. See enumerate type.")
_rankList, ok := field.Args["ranks"]
_rankList, ok := column.Args["ranks"]
if !ok {
return nil, errors.New("missing ranks in args for zorder")
}
@ -374,7 +292,7 @@ func MakeZOrder(fieldName string, fieldType FieldType, field Field) (MakeValue,
}
var key string
_key, ok := field.Args["key"]
_key, ok := column.Args["key"]
if ok {
key, ok = _key.(string)
if !ok {
@ -408,13 +326,13 @@ func MakeZOrder(fieldName string, fieldType FieldType, field Field) (MakeValue,
return zOrder, nil
}
func MakeEnumerate(fieldName string, fieldType FieldType, field Field) (MakeValue, error) {
values, err := decodeEnumArg(field, "values")
func MakeEnumerate(columnName string, columnType ColumnType, column config.Column) (MakeValue, error) {
values, err := decodeEnumArg(column, "values")
if err != nil {
return nil, err
}
enumerate := func(val string, elem *element.OSMElem, geom *geom.Geometry, match Match) interface{} {
if field.Key != "" {
if column.Key != "" {
if r, ok := values[val]; ok {
return r
}
@ -429,22 +347,22 @@ func MakeEnumerate(fieldName string, fieldType FieldType, field Field) (MakeValu
return enumerate, nil
}
func decodeEnumArg(field Field, key string) (map[string]int, error) {
_valuesList, ok := field.Args[key]
func decodeEnumArg(column config.Column, key string) (map[string]int, error) {
_valuesList, ok := column.Args[key]
if !ok {
return nil, fmt.Errorf("missing '%v' in args for %s", key, field.Type)
return nil, errors.Errorf("missing '%v' in args for %s", key, column.Type)
}
valuesList, ok := _valuesList.([]interface{})
if !ok {
return nil, fmt.Errorf("'%v' in args for %s not a list", key, field.Type)
return nil, errors.Errorf("'%v' in args for %s not a list", key, column.Type)
}
values := make(map[string]int)
for i, value := range valuesList {
valueName, ok := value.(string)
if !ok {
return nil, fmt.Errorf("value in '%v' not a string", key)
return nil, errors.Errorf("value in '%v' not a string", key)
}
values[valueName] = i + 1
@ -452,8 +370,8 @@ func decodeEnumArg(field Field, key string) (map[string]int, error) {
return values, nil
}
func MakeSuffixReplace(fieldName string, fieldType FieldType, field Field) (MakeValue, error) {
_changes, ok := field.Args["suffixes"]
func MakeSuffixReplace(columnName string, columnType ColumnType, column config.Column) (MakeValue, error) {
_changes, ok := column.Args["suffixes"]
if !ok {
return nil, errors.New("missing suffixes in args for string_suffixreplace")
}

View File

@ -6,6 +6,7 @@ import (
"github.com/omniscale/imposm3/element"
"github.com/omniscale/imposm3/geom"
"github.com/omniscale/imposm3/geom/geos"
"github.com/omniscale/imposm3/mapping/config"
)
func TestBool(t *testing.T) {
@ -74,8 +75,8 @@ func TestZOrder(t *testing.T) {
match := Match{}
zOrder, err := MakeZOrder("z_order",
AvailableFieldTypes["z_order"],
Field{
AvailableColumnTypes["z_order"],
config.Column{
Name: "z_order",
Key: "",
Type: "z_order",
@ -113,8 +114,8 @@ func TestEnumerate_Match(t *testing.T) {
// test enumerate by matched mapping key
zOrder, err := MakeEnumerate("enumerate",
AvailableFieldTypes["enumerate"],
Field{
AvailableColumnTypes["enumerate"],
config.Column{
Name: "enumerate",
Key: "",
Type: "enumerate",
@ -148,8 +149,8 @@ func TestEnumerate_Key(t *testing.T) {
// test enumerate by key
zOrder, err := MakeEnumerate("enumerate",
AvailableFieldTypes["enumerate"],
Field{
AvailableColumnTypes["enumerate"],
config.Column{
Name: "enumerate",
Key: "fips",
Type: "enumerate",
@ -182,8 +183,8 @@ func TestEnumerate_Key(t *testing.T) {
func TestWayZOrder(t *testing.T) {
zOrder, err := MakeWayZOrder("z_order",
AvailableFieldTypes["wayzorder"],
Field{
AvailableColumnTypes["wayzorder"],
config.Column{
Name: "zorder",
Type: "wayzorder",
Args: map[string]interface{}{
@ -233,7 +234,7 @@ func TestWayZOrder(t *testing.T) {
}
}
func TestAreaFields(t *testing.T) {
func TestAreaColumn(t *testing.T) {
tests := []struct {
wkt string
expected float32
@ -277,10 +278,10 @@ func TestAreaFields(t *testing.T) {
}
func TestMakeSuffixReplace(t *testing.T) {
field := Field{
column := config.Column{
Name: "name", Key: "name", Type: "string_suffixreplace",
Args: map[string]interface{}{"suffixes": map[interface{}]interface{}{"Straße": "Str.", "straße": "str."}}}
suffixReplace, err := MakeSuffixReplace("name", FieldType{}, field)
suffixReplace, err := MakeSuffixReplace("name", ColumnType{}, column)
if err != nil {
t.Fatal(err)
@ -298,27 +299,27 @@ func TestMakeSuffixReplace(t *testing.T) {
}
func TestHstoreString(t *testing.T) {
field := Field{
column := config.Column{
Name: "tags",
Type: "hstore_tags",
}
hstoreAll, err := MakeHStoreString("tags", FieldType{}, field)
hstoreAll, err := MakeHStoreString("tags", ColumnType{}, column)
if err != nil {
t.Fatal(err)
}
field = Field{
column = config.Column{
Name: "tags",
Type: "hstore_tags",
Args: map[string]interface{}{"include": []interface{}{"key1", "key2"}},
}
hstoreInclude, err := MakeHStoreString("tags", FieldType{}, field)
hstoreInclude, err := MakeHStoreString("tags", ColumnType{}, column)
if err != nil {
t.Fatal(err)
}
for _, test := range []struct {
field MakeValue
column MakeValue
tags element.Tags
expected interface{}
}{
@ -331,7 +332,7 @@ func TestHstoreString(t *testing.T) {
{hstoreInclude, element.Tags{"key1": "value"}, `"key1"=>"value"`},
{hstoreInclude, element.Tags{"key": "value", "key2": "value"}, `"key2"=>"value"`},
} {
actual := test.field("", &element.OSMElem{Tags: test.tags}, nil, Match{})
actual := test.column("", &element.OSMElem{Tags: test.tags}, nil, Match{})
if actual.(string) != test.expected {
t.Errorf("%#v != %#v for %#v", actual, test.expected, test.tags)
}

View File

@ -1,439 +0,0 @@
package mapping
import (
"errors"
"fmt"
"io/ioutil"
"regexp"
"github.com/omniscale/imposm3/element"
"gopkg.in/yaml.v2"
)
type Field struct {
Name string `yaml:"name"`
Key Key `yaml:"key"`
Keys []Key `yaml:"keys"`
Type string `yaml:"type"`
Args map[string]interface{} `yaml:"args"`
FromMember bool `yaml:"from_member"`
}
type Table struct {
Name string
Type TableType `yaml:"type"`
Mapping KeyValues `yaml:"mapping"`
Mappings map[string]SubMapping `yaml:"mappings"`
TypeMappings TypeMappings `yaml:"type_mappings"`
Fields []*Field `yaml:"columns"` // TODO rename Fields internaly to Columns
OldFields []*Field `yaml:"fields"`
Filters *Filters `yaml:"filters"`
}
type GeneralizedTable struct {
Name string
SourceTableName string `yaml:"source"`
Tolerance float64 `yaml:"tolerance"`
SqlFilter string `yaml:"sql_filter"`
}
type Filters struct {
ExcludeTags *[][]string `yaml:"exclude_tags"`
Reject KeyValues `yaml:"reject"`
Require KeyValues `yaml:"require"`
RejectRegexp KeyRegexpValue `yaml:"reject_regexp"`
RequireRegexp KeyRegexpValue `yaml:"require_regexp"`
}
type Tables map[string]*Table
type GeneralizedTables map[string]*GeneralizedTable
type Mapping struct {
Tables Tables `yaml:"tables"`
GeneralizedTables GeneralizedTables `yaml:"generalized_tables"`
Tags Tags `yaml:"tags"`
Areas Areas `yaml:"areas"`
// SingleIdSpace mangles the overlapping node/way/relation IDs
// to be unique (nodes positive, ways negative, relations negative -1e17)
SingleIdSpace bool `yaml:"use_single_id_space"`
}
type Areas struct {
AreaTags []Key `yaml:"area_tags"`
LinearTags []Key `yaml:"linear_tags"`
}
type Tags struct {
LoadAll bool `yaml:"load_all"`
Exclude []Key `yaml:"exclude"`
Include []Key `yaml:"include"`
}
type orderedValue struct {
value Value
order int
}
type KeyValues map[Key][]orderedValue
type KeyRegexpValue map[Key]string
func (kv *KeyValues) UnmarshalYAML(unmarshal func(interface{}) error) error {
if *kv == nil {
*kv = make(map[Key][]orderedValue)
}
slice := yaml.MapSlice{}
err := unmarshal(&slice)
if err != nil {
return err
}
order := 0
for _, item := range slice {
k, ok := item.Key.(string)
if !ok {
return fmt.Errorf("mapping key '%s' not a string", k)
}
values, ok := item.Value.([]interface{})
if !ok {
return fmt.Errorf("mapping key '%s' not a string", k)
}
for _, v := range values {
if v, ok := v.(string); ok {
(*kv)[Key(k)] = append((*kv)[Key(k)], orderedValue{value: Value(v), order: order})
} else {
return fmt.Errorf("mapping value '%s' not a string", v)
}
order += 1
}
}
return nil
}
type SubMapping struct {
Mapping KeyValues
}
type TypeMappings struct {
Points KeyValues `yaml:"points"`
LineStrings KeyValues `yaml:"linestrings"`
Polygons KeyValues `yaml:"polygons"`
}
type ElementFilter func(tags element.Tags, key Key, closed bool) bool
type TagTables map[Key]map[Value][]OrderedDestTable
type DestTable struct {
Name string
SubMapping string
}
type OrderedDestTable struct {
DestTable
order int
}
type TableType string
func (tt *TableType) UnmarshalJSON(data []byte) error {
switch string(data) {
case "":
return errors.New("missing table type")
case `"point"`:
*tt = PointTable
case `"linestring"`:
*tt = LineStringTable
case `"polygon"`:
*tt = PolygonTable
case `"geometry"`:
*tt = GeometryTable
case `"relation"`:
*tt = RelationTable
case `"relation_member"`:
*tt = RelationMemberTable
default:
return errors.New("unknown type " + string(data))
}
return nil
}
const (
PolygonTable TableType = "polygon"
LineStringTable TableType = "linestring"
PointTable TableType = "point"
GeometryTable TableType = "geometry"
RelationTable TableType = "relation"
RelationMemberTable TableType = "relation_member"
)
func NewMapping(filename string) (*Mapping, error) {
f, err := ioutil.ReadFile(filename)
if err != nil {
return nil, err
}
mapping := Mapping{}
err = yaml.Unmarshal(f, &mapping)
if err != nil {
return nil, err
}
err = mapping.prepare()
if err != nil {
return nil, err
}
return &mapping, nil
}
func (t *Table) ExtraTags() map[Key]bool {
tags := make(map[Key]bool)
for _, field := range t.Fields {
if field.Key != "" {
tags[field.Key] = true
}
for _, k := range field.Keys {
tags[k] = true
}
}
return tags
}
func (m *Mapping) prepare() error {
for name, t := range m.Tables {
t.Name = name
if t.OldFields != nil {
// todo deprecate 'fields'
t.Fields = t.OldFields
}
}
for name, t := range m.GeneralizedTables {
t.Name = name
}
return nil
}
func (tt TagTables) addFromMapping(mapping KeyValues, table DestTable) {
for key, vals := range mapping {
for _, v := range vals {
vals, ok := tt[key]
tbl := OrderedDestTable{DestTable: table, order: v.order}
if ok {
vals[v.value] = append(vals[v.value], tbl)
} else {
tt[key] = make(map[Value][]OrderedDestTable)
tt[key][v.value] = append(tt[key][v.value], tbl)
}
}
}
}
func (m *Mapping) mappings(tableType TableType, mappings TagTables) {
for name, t := range m.Tables {
if t.Type != GeometryTable && t.Type != tableType {
continue
}
mappings.addFromMapping(t.Mapping, DestTable{Name: name})
for subMappingName, subMapping := range t.Mappings {
mappings.addFromMapping(subMapping.Mapping, DestTable{Name: name, SubMapping: subMappingName})
}
switch tableType {
case PointTable:
mappings.addFromMapping(t.TypeMappings.Points, DestTable{Name: name})
case LineStringTable:
mappings.addFromMapping(t.TypeMappings.LineStrings, DestTable{Name: name})
case PolygonTable:
mappings.addFromMapping(t.TypeMappings.Polygons, DestTable{Name: name})
}
}
}
func (m *Mapping) tables(tableType TableType) map[string]*TableFields {
result := make(map[string]*TableFields)
for name, t := range m.Tables {
if t.Type == tableType || t.Type == "geometry" {
result[name] = t.TableFields()
}
}
return result
}
func (m *Mapping) extraTags(tableType TableType, tags map[Key]bool) {
for _, t := range m.Tables {
if t.Type != tableType && t.Type != "geometry" {
continue
}
for key, _ := range t.ExtraTags() {
tags[key] = true
}
if t.Filters != nil && t.Filters.ExcludeTags != nil {
for _, keyVal := range *t.Filters.ExcludeTags {
tags[Key(keyVal[0])] = true
}
}
}
for _, k := range m.Tags.Include {
tags[k] = true
}
// always include area tag for closed-way handling
tags["area"] = true
}
func (m *Mapping) ElementFilters() map[string][]ElementFilter {
result := make(map[string][]ElementFilter)
var areaTags map[Key]struct{}
var linearTags map[Key]struct{}
if m.Areas.AreaTags != nil {
areaTags = make(map[Key]struct{})
for _, tag := range m.Areas.AreaTags {
areaTags[tag] = struct{}{}
}
}
if m.Areas.LinearTags != nil {
linearTags = make(map[Key]struct{})
for _, tag := range m.Areas.LinearTags {
linearTags[tag] = struct{}{}
}
}
for name, t := range m.Tables {
if t.Type == LineStringTable && areaTags != nil {
f := func(tags element.Tags, key Key, closed bool) bool {
if closed {
if tags["area"] == "yes" {
return false
}
if tags["area"] != "no" {
if _, ok := areaTags[key]; ok {
return false
}
}
}
return true
}
result[name] = append(result[name], f)
}
if t.Type == PolygonTable && linearTags != nil {
f := func(tags element.Tags, key Key, closed bool) bool {
if closed && tags["area"] == "no" {
return false
}
if tags["area"] != "yes" {
if _, ok := linearTags[key]; ok {
return false
}
}
return true
}
result[name] = append(result[name], f)
}
if t.Filters == nil {
continue
}
if t.Filters.ExcludeTags != nil {
log.Print("warn: exclude_tags filter is deprecated and will be removed. See require and reject filter.")
for _, filterKeyVal := range *t.Filters.ExcludeTags {
// Convert `exclude_tags`` filter to `reject` filter !
keyname := string(filterKeyVal[0])
vararr := []orderedValue{
{
value: Value(filterKeyVal[1]),
order: 1,
},
}
result[name] = append(result[name], makeFiltersFunction(name, false, true, string(keyname), vararr))
}
}
if t.Filters.Require != nil {
for keyname, vararr := range t.Filters.Require {
result[name] = append(result[name], makeFiltersFunction(name, true, false, string(keyname), vararr))
}
}
if t.Filters.Reject != nil {
for keyname, vararr := range t.Filters.Reject {
result[name] = append(result[name], makeFiltersFunction(name, false, true, string(keyname), vararr))
}
}
if t.Filters.RequireRegexp != nil {
for keyname, regexp := range t.Filters.RequireRegexp {
result[name] = append(result[name], makeRegexpFiltersFunction(name, true, false, string(keyname), regexp))
}
}
if t.Filters.RejectRegexp != nil {
for keyname, regexp := range t.Filters.RejectRegexp {
result[name] = append(result[name], makeRegexpFiltersFunction(name, false, true, string(keyname), regexp))
}
}
}
return result
}
func findValueInOrderedValue(v Value, list []orderedValue) bool {
for _, item := range list {
if item.value == v {
return true
}
}
return false
}
func makeRegexpFiltersFunction(tablename string, virtualTrue bool, virtualFalse bool, v_keyname string, v_regexp string) func(tags element.Tags, key Key, closed bool) bool {
// Compile regular expression, if not valid regexp --> panic !
r := regexp.MustCompile(v_regexp)
return func(tags element.Tags, key Key, closed bool) bool {
if v, ok := tags[v_keyname]; ok {
if r.MatchString(v) {
return virtualTrue
}
}
return virtualFalse
}
}
func makeFiltersFunction(tablename string, virtualTrue bool, virtualFalse bool, v_keyname string, v_vararr []orderedValue) func(tags element.Tags, key Key, closed bool) bool {
if findValueInOrderedValue("__nil__", v_vararr) { // check __nil__
log.Print("warn: Filter value '__nil__' is not supported ! (tablename:" + tablename + ")")
}
if findValueInOrderedValue("__any__", v_vararr) { // check __any__
if len(v_vararr) > 1 {
log.Print("warn: Multiple filter value with '__any__' keywords is not valid! (tablename:" + tablename + ")")
}
return func(tags element.Tags, key Key, closed bool) bool {
if _, ok := tags[v_keyname]; ok {
return virtualTrue
}
return virtualFalse
}
} else if len(v_vararr) == 1 { // IF 1 parameter THEN we can generate optimal code
return func(tags element.Tags, key Key, closed bool) bool {
if v, ok := tags[v_keyname]; ok {
if Value(v) == v_vararr[0].value {
return virtualTrue
}
}
return virtualFalse
}
} else { // > 1 parameter - less optimal code
return func(tags element.Tags, key Key, closed bool) bool {
if v, ok := tags[v_keyname]; ok {
if findValueInOrderedValue(Value(v), v_vararr) {
return virtualTrue
}
}
return virtualFalse
}
}
}

118
mapping/config/config.go Normal file
View File

@ -0,0 +1,118 @@
package config
import (
"fmt"
"gopkg.in/yaml.v2"
)
type Mapping struct {
Tables Tables `yaml:"tables"`
GeneralizedTables GeneralizedTables `yaml:"generalized_tables"`
Tags Tags `yaml:"tags"`
Areas Areas `yaml:"areas"`
// SingleIdSpace mangles the overlapping node/way/relation IDs
// to be unique (nodes positive, ways negative, relations negative -1e17)
SingleIdSpace bool `yaml:"use_single_id_space"`
}
type Column struct {
Name string `yaml:"name"`
Key Key `yaml:"key"`
Keys []Key `yaml:"keys"`
Type string `yaml:"type"`
Args map[string]interface{} `yaml:"args"`
FromMember bool `yaml:"from_member"`
}
type Tables map[string]*Table
type Table struct {
Name string
Type string `yaml:"type"`
Mapping KeyValues `yaml:"mapping"`
Mappings map[string]SubMapping `yaml:"mappings"`
TypeMappings TypeMappings `yaml:"type_mappings"`
Columns []*Column `yaml:"columns"`
OldFields []*Column `yaml:"fields"`
Filters *Filters `yaml:"filters"`
RelationTypes []string `yaml:"relation_types"`
}
type GeneralizedTables map[string]*GeneralizedTable
type GeneralizedTable struct {
Name string
SourceTableName string `yaml:"source"`
Tolerance float64 `yaml:"tolerance"`
SqlFilter string `yaml:"sql_filter"`
}
type Filters struct {
ExcludeTags *[][]string `yaml:"exclude_tags"`
Reject KeyValues `yaml:"reject"`
Require KeyValues `yaml:"require"`
RejectRegexp KeyRegexpValue `yaml:"reject_regexp"`
RequireRegexp KeyRegexpValue `yaml:"require_regexp"`
}
type Areas struct {
AreaTags []Key `yaml:"area_tags"`
LinearTags []Key `yaml:"linear_tags"`
}
type Tags struct {
LoadAll bool `yaml:"load_all"`
Exclude []Key `yaml:"exclude"`
Include []Key `yaml:"include"`
}
type Key string
type Value string
type OrderedValue struct {
Value
Order int
}
type KeyValues map[Key][]OrderedValue
type KeyRegexpValue map[Key]string
func (kv *KeyValues) UnmarshalYAML(unmarshal func(interface{}) error) error {
if *kv == nil {
*kv = make(map[Key][]OrderedValue)
}
slice := yaml.MapSlice{}
err := unmarshal(&slice)
if err != nil {
return err
}
order := 0
for _, item := range slice {
k, ok := item.Key.(string)
if !ok {
return fmt.Errorf("mapping key '%s' not a string", k)
}
values, ok := item.Value.([]interface{})
if !ok {
return fmt.Errorf("mapping key '%s' not a string", k)
}
for _, v := range values {
if v, ok := v.(string); ok {
(*kv)[Key(k)] = append((*kv)[Key(k)], OrderedValue{Value: Value(v), Order: order})
} else {
return fmt.Errorf("mapping value '%s' not a string", v)
}
order += 1
}
}
return nil
}
type SubMapping struct {
Mapping KeyValues
}
type TypeMappings struct {
Points KeyValues `yaml:"points"`
LineStrings KeyValues `yaml:"linestrings"`
Polygons KeyValues `yaml:"polygons"`
}

View File

@ -1,4 +1,14 @@
/*
Package mapping provides functions for defining and executing the database schema.
Package mapping provides implements mapping and convertion between OSM elements and database tables, rows and columns.
The core logic of Imposm is accesible with the Mapping struct.
A Mapping creates filters and matchers based on mapping configuration (.yaml or .json file).
Filters are for initial filtering (during -read). They remove all tags that are not needed.
Matchers map OSM elements to zero or more destination tables. Each Match results can convert an OSM element
to a row with all mapped column values.
The matching is dependend on the element type (node, way, relation), the element tags and the destination
table type (point, linestring, polygon, relation, relation_member).
*/
package mapping

View File

@ -5,67 +5,96 @@ import (
"strings"
"github.com/omniscale/imposm3/element"
"github.com/omniscale/imposm3/mapping/config"
)
type TagFilterer interface {
Filter(tags *element.Tags)
}
func (m *Mapping) NodeTagFilter() TagFilterer {
if m.Tags.LoadAll {
return newExcludeFilter(m.Tags.Exclude)
if m.Conf.Tags.LoadAll {
return newExcludeFilter(m.Conf.Tags.Exclude)
}
mappings := make(map[Key]map[Value][]OrderedDestTable)
m.mappings("point", mappings)
mappings := make(TagTableMapping)
m.mappings(PointTable, mappings)
tags := make(map[Key]bool)
m.extraTags("point", tags)
return &TagFilter{mappings, tags}
m.extraTags(PointTable, tags)
m.extraTags(RelationMemberTable, tags)
return &tagFilter{mappings.asTagMap(), tags}
}
func (m *Mapping) WayTagFilter() TagFilterer {
if m.Tags.LoadAll {
return newExcludeFilter(m.Tags.Exclude)
if m.Conf.Tags.LoadAll {
return newExcludeFilter(m.Conf.Tags.Exclude)
}
mappings := make(map[Key]map[Value][]OrderedDestTable)
m.mappings("linestring", mappings)
m.mappings("polygon", mappings)
mappings := make(TagTableMapping)
m.mappings(LineStringTable, mappings)
m.mappings(PolygonTable, mappings)
tags := make(map[Key]bool)
m.extraTags("linestring", tags)
m.extraTags("polygon", tags)
return &TagFilter{mappings, tags}
m.extraTags(LineStringTable, tags)
m.extraTags(PolygonTable, tags)
m.extraTags(RelationMemberTable, tags)
return &tagFilter{mappings.asTagMap(), tags}
}
func (m *Mapping) RelationTagFilter() TagFilterer {
if m.Tags.LoadAll {
return newExcludeFilter(m.Tags.Exclude)
if m.Conf.Tags.LoadAll {
return newExcludeFilter(m.Conf.Tags.Exclude)
}
mappings := make(map[Key]map[Value][]OrderedDestTable)
m.mappings("linestring", mappings)
m.mappings("polygon", mappings)
mappings := make(TagTableMapping)
// do not filter out type tag for common relations
mappings["type"] = map[Value][]orderedDestTable{
"multipolygon": []orderedDestTable{},
"boundary": []orderedDestTable{},
"land_area": []orderedDestTable{},
}
m.mappings(LineStringTable, mappings)
m.mappings(PolygonTable, mappings)
m.mappings(RelationTable, mappings)
m.mappings(RelationMemberTable, mappings)
tags := make(map[Key]bool)
m.extraTags("linestring", tags)
m.extraTags("polygon", tags)
// do not filter out type tag
mappings["type"] = map[Value][]OrderedDestTable{
"multipolygon": []OrderedDestTable{},
"boundary": []OrderedDestTable{},
"land_area": []OrderedDestTable{},
}
return &RelationTagFilter{TagFilter{mappings, tags}}
m.extraTags(LineStringTable, tags)
m.extraTags(PolygonTable, tags)
m.extraTags(RelationTable, tags)
m.extraTags(RelationMemberTable, tags)
return &tagFilter{mappings.asTagMap(), tags}
}
type TagFilter struct {
mappings map[Key]map[Value][]OrderedDestTable
type tagMap map[Key]map[Value]struct{}
type tagFilter struct {
mappings tagMap
extraTags map[Key]bool
}
type RelationTagFilter struct {
TagFilter
func (f *tagFilter) Filter(tags *element.Tags) {
if tags == nil {
return
}
for k, v := range *tags {
values, ok := f.mappings[Key(k)]
if ok {
if _, ok := values["__any__"]; ok {
continue
} else if _, ok := values[Value(v)]; ok {
continue
} else if _, ok := f.extraTags[Key(k)]; !ok {
delete(*tags, k)
}
} else if _, ok := f.extraTags[Key(k)]; !ok {
delete(*tags, k)
}
}
}
type ExcludeFilter struct {
type excludeFilter struct {
keys map[Key]struct{}
matches []string
}
func newExcludeFilter(tags []Key) *ExcludeFilter {
f := ExcludeFilter{
func newExcludeFilter(tags []config.Key) *excludeFilter {
f := excludeFilter{
keys: make(map[Key]struct{}),
matches: make([]string, 0),
}
@ -73,14 +102,14 @@ func newExcludeFilter(tags []Key) *ExcludeFilter {
if strings.ContainsAny(string(t), "?*[") {
f.matches = append(f.matches, string(t))
} else {
f.keys[t] = struct{}{}
f.keys[Key(t)] = struct{}{}
}
}
return &f
}
func (f *ExcludeFilter) Filter(tags *element.Tags) bool {
for k, _ := range *tags {
func (f *excludeFilter) Filter(tags *element.Tags) {
for k := range *tags {
if _, ok := f.keys[Key(k)]; ok {
delete(*tags, k)
} else if f.matches != nil {
@ -92,83 +121,4 @@ func (f *ExcludeFilter) Filter(tags *element.Tags) bool {
}
}
}
return true
}
type TagFilterer interface {
Filter(tags *element.Tags) bool
}
func (f *TagFilter) Filter(tags *element.Tags) bool {
if tags == nil {
return false
}
foundMapping := false
for k, v := range *tags {
values, ok := f.mappings[Key(k)]
if ok {
if _, ok := values["__any__"]; ok {
foundMapping = true
continue
} else if _, ok := values[Value(v)]; ok {
foundMapping = true
continue
} else if _, ok := f.extraTags[Key(k)]; !ok {
delete(*tags, k)
}
} else if _, ok := f.extraTags[Key(k)]; !ok {
delete(*tags, k)
}
}
if foundMapping {
return true
} else {
*tags = nil
return false
}
}
func (f *RelationTagFilter) Filter(tags *element.Tags) bool {
if tags == nil {
return false
}
if t, ok := (*tags)["type"]; ok {
if t != "multipolygon" && t != "boundary" && t != "land_area" {
*tags = nil
return false
}
if t == "boundary" {
if _, ok := (*tags)["boundary"]; !ok {
// a lot of the boundary relations are not multipolygon
// only import with boundary tags (e.g. boundary=administrative)
*tags = nil
return false
}
}
} else {
*tags = nil
return false
}
tagCount := len(*tags)
f.TagFilter.Filter(tags)
// we removed tags...
if len(*tags) < tagCount {
expectedTags := 0
if _, ok := (*tags)["name"]; ok {
expectedTags += 1
}
if _, ok := (*tags)["type"]; ok {
expectedTags += 1
}
if len(*tags) == expectedTags {
// but no tags except name and type are left
// remove all, otherwise tags from longest
// way/ring would be used during MP building
*tags = nil
return false
}
}
// always return true here since we found a matching type
return true
}

View File

@ -5,6 +5,7 @@ import (
"testing"
"github.com/omniscale/imposm3/element"
"github.com/omniscale/imposm3/mapping/config"
)
var mapping *Mapping
@ -17,28 +18,29 @@ func init() {
}
}
func stringMapEquals(t *testing.T, expected, actual map[string]string) {
func stringMapEqual(expected, actual map[string]string) bool {
if len(expected) != len(actual) {
t.Fatalf("different length in %v and %v\n", expected, actual)
return false
}
for k, v := range expected {
if actualV, ok := actual[k]; ok {
if actualV != v {
t.Fatalf("%s != %s in %v and %v\n", v, actualV, expected, actual)
return false
}
} else {
t.Fatalf("%s not in %v\n", k, actual)
return false
}
}
return true
}
func matchesEqual(t *testing.T, expected []Match, actual []Match) {
func matchesEqual(expected []Match, actual []Match) bool {
expectedMatches := make(map[DestTable]Match)
actualMatches := make(map[DestTable]Match)
if len(expected) != len(actual) {
t.Fatalf("different length in %v and %v\n", expected, actual)
return false
}
for _, match := range expected {
@ -53,405 +55,293 @@ func matchesEqual(t *testing.T, expected []Match, actual []Match) {
if expectedMatch.Table != actualMatch.Table ||
expectedMatch.Key != actualMatch.Key ||
expectedMatch.Value != actualMatch.Value {
t.Fatalf("match differ %v != %v", expectedMatch, actualMatch)
return false
}
} else {
t.Fatalf("%s not in %v", name, actualMatches)
return false
}
}
return true
}
func TestTagFilterNodes(t *testing.T) {
tests := []struct {
tags element.Tags
expected element.Tags
}{
{tags: element.Tags{}, expected: element.Tags{}},
{tags: element.Tags{"name": "foo"}, expected: element.Tags{"name": "foo"}},
{tags: element.Tags{"name": "foo", "unknown": "foo"}, expected: element.Tags{"name": "foo"}},
{tags: element.Tags{"name": "foo", "place": "unknown"}, expected: element.Tags{"name": "foo"}},
{tags: element.Tags{"name": "foo", "place": "unknown", "population": "1000"}, expected: element.Tags{"name": "foo", "population": "1000"}},
{tags: element.Tags{"name": "foo", "place": "village"}, expected: element.Tags{"name": "foo", "place": "village"}},
{tags: element.Tags{"name": "foo", "place": "village", "population": "1000"}, expected: element.Tags{"name": "foo", "place": "village", "population": "1000"}},
{tags: element.Tags{"name": "foo", "place": "village", "unknown": "foo"}, expected: element.Tags{"name": "foo", "place": "village"}},
{tags: element.Tags{"name": "foo", "place": "village", "highway": "bus_stop"}, expected: element.Tags{"name": "foo", "place": "village", "highway": "bus_stop"}},
}
nodes := mapping.NodeTagFilter()
for i, test := range tests {
nodes.Filter(&test.tags)
if !stringMapEqual(test.tags, test.expected) {
t.Errorf("unexpected result for case %d: %v != %v", i+1, test.tags, test.expected)
}
}
}
func TestTagFilterNodes(t *testing.T) {
var tags element.Tags
nodes := mapping.NodeTagFilter()
tags = element.Tags{"name": "foo"}
if nodes.Filter(&tags) != false {
t.Fatal("unexpected filter response for", tags)
}
stringMapEquals(t, element.Tags{}, tags)
tags = element.Tags{"name": "foo", "unknown": "baz"}
if nodes.Filter(&tags) != false {
t.Fatal("unexpected filter response for", tags)
}
stringMapEquals(t, element.Tags{}, tags)
tags = element.Tags{"name": "foo", "place": "unknown"}
if nodes.Filter(&tags) != false {
t.Fatal("unexpected filter response for", tags)
}
stringMapEquals(t, element.Tags{}, tags)
tags = element.Tags{"name": "foo", "place": "village"}
if nodes.Filter(&tags) != true {
t.Fatal("unexpected filter response for", tags)
}
stringMapEquals(t, element.Tags{"name": "foo", "place": "village"}, tags)
tags = element.Tags{"name": "foo", "place": "village", "population": "1000"}
if nodes.Filter(&tags) != true {
t.Fatal("unexpected filter response for", tags)
}
stringMapEquals(t, element.Tags{"name": "foo", "place": "village", "population": "1000"}, tags)
tags = element.Tags{"name": "foo", "place": "village", "highway": "unknown"}
if nodes.Filter(&tags) != true {
t.Fatal("unexpected filter response for", tags)
}
stringMapEquals(t, element.Tags{"name": "foo", "place": "village"}, tags)
tags = element.Tags{"name": "foo", "place": "village", "highway": "bus_stop"}
if nodes.Filter(&tags) != true {
t.Fatal("unexpected filter response for", tags)
}
stringMapEquals(t, element.Tags{"name": "foo", "place": "village", "highway": "bus_stop"}, tags)
}
func TestTagFilterWays(t *testing.T) {
var tags element.Tags
tests := []struct {
tags element.Tags
expected element.Tags
}{
{tags: element.Tags{}, expected: element.Tags{}},
{tags: element.Tags{"name": "foo"}, expected: element.Tags{"name": "foo"}},
{tags: element.Tags{"name": "foo", "unknown": "foo"}, expected: element.Tags{"name": "foo"}},
{tags: element.Tags{"name": "foo", "highway": "unknown"}, expected: element.Tags{"name": "foo"}},
{tags: element.Tags{"name": "foo", "highway": "track"}, expected: element.Tags{"name": "foo", "highway": "track"}},
{tags: element.Tags{"name": "foo", "building": "whatever"}, expected: element.Tags{"name": "foo", "building": "whatever"}},
{tags: element.Tags{"name": "foo", "highway": "track", "unknown": "foo"}, expected: element.Tags{"name": "foo", "highway": "track"}},
{tags: element.Tags{"name": "foo", "place": "village", "highway": "track"}, expected: element.Tags{"name": "foo", "highway": "track"}},
{tags: element.Tags{"name": "foo", "highway": "track", "oneway": "yes", "tunnel": "1"}, expected: element.Tags{"name": "foo", "highway": "track", "oneway": "yes", "tunnel": "1"}},
}
ways := mapping.WayTagFilter()
tags = element.Tags{"name": "foo"}
if ways.Filter(&tags) != false {
t.Fatal("unexpected filter response for", tags)
for i, test := range tests {
ways.Filter(&test.tags)
if !stringMapEqual(test.tags, test.expected) {
t.Errorf("unexpected result for case %d: %v != %v", i+1, test.tags, test.expected)
}
}
stringMapEquals(t, element.Tags{}, tags)
tags = element.Tags{"name": "foo", "unknown": "baz"}
if ways.Filter(&tags) != false {
t.Fatal("unexpected filter response for", tags)
}
stringMapEquals(t, element.Tags{}, tags)
tags = element.Tags{"name": "foo", "highway": "unknown"}
if ways.Filter(&tags) != false {
t.Fatal("unexpected filter response for", tags)
}
stringMapEquals(t, element.Tags{}, tags)
tags = element.Tags{"name": "foo", "highway": "track"}
if ways.Filter(&tags) != true {
t.Fatal("unexpected filter response for", tags)
}
stringMapEquals(t, element.Tags{"name": "foo", "highway": "track"}, tags)
tags = element.Tags{"name": "foo", "highway": "track", "oneway": "yes", "tunnel": "1"}
if ways.Filter(&tags) != true {
t.Fatal("unexpected filter response for", tags)
}
stringMapEquals(t, element.Tags{"name": "foo", "highway": "track", "oneway": "yes", "tunnel": "1"}, tags)
tags = element.Tags{"name": "foo", "place": "village", "highway": "track"}
if ways.Filter(&tags) != true {
t.Fatal("unexpected filter response for", tags)
}
stringMapEquals(t, element.Tags{"name": "foo", "highway": "track"}, tags)
tags = element.Tags{"name": "foo", "railway": "tram", "highway": "secondary"}
if ways.Filter(&tags) != true {
t.Fatal("unexpected filter response for", tags)
}
stringMapEquals(t, element.Tags{"name": "foo", "railway": "tram", "highway": "secondary"}, tags)
// with __any__ value
tags = element.Tags{"name": "foo", "building": "yes"}
if ways.Filter(&tags) != true {
t.Fatal("unexpected filter response for", tags)
}
stringMapEquals(t, element.Tags{"name": "foo", "building": "yes"}, tags)
tags = element.Tags{"name": "foo", "building": "whatever"}
if ways.Filter(&tags) != true {
t.Fatal("unexpected filter response for", tags)
}
stringMapEquals(t, element.Tags{"name": "foo", "building": "whatever"}, tags)
}
func TestTagFilterRelations(t *testing.T) {
var tags element.Tags
tests := []struct {
tags element.Tags
expected element.Tags
}{
{tags: element.Tags{}, expected: element.Tags{}},
{tags: element.Tags{"name": "foo"}, expected: element.Tags{"name": "foo"}},
{tags: element.Tags{"name": "foo", "unknown": "foo"}, expected: element.Tags{"name": "foo"}},
{tags: element.Tags{"name": "foo", "landuse": "unknown"}, expected: element.Tags{"name": "foo"}},
{tags: element.Tags{"name": "foo", "landuse": "farm"}, expected: element.Tags{"name": "foo", "landuse": "farm"}},
{tags: element.Tags{"name": "foo", "landuse": "farm", "type": "multipolygon"}, expected: element.Tags{"name": "foo", "landuse": "farm", "type": "multipolygon"}},
{tags: element.Tags{"name": "foo", "type": "multipolygon"}, expected: element.Tags{"name": "foo", "type": "multipolygon"}},
{tags: element.Tags{"name": "foo", "type": "boundary"}, expected: element.Tags{"name": "foo", "type": "boundary"}},
{tags: element.Tags{"name": "foo", "landuse": "farm", "type": "boundary"}, expected: element.Tags{"name": "foo", "landuse": "farm", "type": "boundary"}},
}
relations := mapping.RelationTagFilter()
tags = element.Tags{"name": "foo"}
if relations.Filter(&tags) != false {
t.Fatal("unexpected filter response for", tags)
for i, test := range tests {
relations.Filter(&test.tags)
if !stringMapEqual(test.tags, test.expected) {
t.Errorf("unexpected result for case %d: %v != %v", i+1, test.tags, test.expected)
}
}
stringMapEquals(t, element.Tags{}, tags)
tags = element.Tags{"name": "foo", "unknown": "baz"}
if relations.Filter(&tags) != false {
t.Fatal("unexpected filter response for", tags)
}
stringMapEquals(t, element.Tags{}, tags)
tags = element.Tags{"name": "foo", "landuse": "unknown"}
if relations.Filter(&tags) != false {
t.Fatal("unexpected filter response for", tags)
}
stringMapEquals(t, element.Tags{}, tags)
tags = element.Tags{"name": "foo", "landuse": "farm"}
if relations.Filter(&tags) != false {
t.Fatal("unexpected filter response for", tags)
}
stringMapEquals(t, element.Tags{}, tags)
tags = element.Tags{"name": "foo", "landuse": "farm", "type": "multipolygon"}
if relations.Filter(&tags) != true {
t.Fatal("unexpected filter response for", tags)
}
stringMapEquals(t, element.Tags{"name": "foo", "landuse": "farm", "type": "multipolygon"}, tags)
// skip multipolygon with filtered tags, otherwise tags from
// longest way would be used
tags = element.Tags{"name": "foo", "landuse": "unknown", "type": "multipolygon"}
if relations.Filter(&tags) != false {
t.Fatal("unexpected filter response for", tags)
}
stringMapEquals(t, element.Tags{}, tags)
tags = element.Tags{"name": "foo", "landuse": "park", "type": "multipolygon"}
if relations.Filter(&tags) != true {
t.Fatal("unexpected filter response for", tags)
}
stringMapEquals(t, element.Tags{"name": "foo", "type": "multipolygon", "landuse": "park"}, tags)
tags = element.Tags{"name": "foo", "landuse": "farm", "boundary": "administrative", "type": "multipolygon"}
if relations.Filter(&tags) != true {
t.Fatal("unexpected filter response for", tags)
}
stringMapEquals(t, element.Tags{"name": "foo", "landuse": "farm", "boundary": "administrative", "type": "multipolygon"}, tags)
// boundary relation for boundary
tags = element.Tags{"name": "foo", "landuse": "farm", "boundary": "administrative", "type": "boundary"}
if relations.Filter(&tags) != true {
t.Fatal("unexpected filter response for", tags)
}
stringMapEquals(t, element.Tags{"name": "foo", "landuse": "farm", "boundary": "administrative", "type": "boundary"}, tags)
// boundary relation for non boundary
tags = element.Tags{"name": "foo", "landuse": "farm", "type": "boundary"}
if relations.Filter(&tags) != false {
t.Fatal("unexpected filter response for", tags)
}
stringMapEquals(t, element.Tags{}, tags)
/* skip boundary with filtered tags, otherwise tags from longest way would
be used */
tags = element.Tags{"name": "foo", "boundary": "unknown", "type": "boundary"}
if relations.Filter(&tags) != false {
t.Fatal("unexpected filter response for", tags)
}
stringMapEquals(t, element.Tags{}, tags)
tags = element.Tags{"name": "foo", "boundary": "administrative", "type": "boundary"}
if relations.Filter(&tags) != true {
t.Fatal("unexpected filter response for", tags)
}
stringMapEquals(t, element.Tags{"name": "foo", "boundary": "administrative", "type": "boundary"}, tags)
}
func TestPointMatcher(t *testing.T) {
elem := element.Node{}
points := mapping.PointMatcher()
elem.Tags = element.Tags{"unknown": "baz"}
matchesEqual(t, []Match{}, points.MatchNode(&elem))
elem.Tags = element.Tags{"place": "unknown"}
matchesEqual(t, []Match{}, points.MatchNode(&elem))
elem.Tags = element.Tags{"place": "city"}
matchesEqual(t, []Match{{"place", "city", DestTable{Name: "places"}, nil}}, points.MatchNode(&elem))
elem.Tags = element.Tags{"place": "city", "highway": "unknown"}
matchesEqual(t, []Match{{"place", "city", DestTable{Name: "places"}, nil}}, points.MatchNode(&elem))
elem.Tags = element.Tags{"place": "city", "highway": "bus_stop"}
matchesEqual(t,
[]Match{
tests := []struct {
tags element.Tags
matches []Match
}{
{element.Tags{"unknown": "baz"}, []Match{}},
{element.Tags{"place": "unknown"}, []Match{}},
{element.Tags{"place": "city"}, []Match{{"place", "city", DestTable{Name: "places"}, nil}}},
{element.Tags{"place": "city", "highway": "unknown"}, []Match{{"place", "city", DestTable{Name: "places"}, nil}}},
{element.Tags{"place": "city", "highway": "bus_stop"}, []Match{
{"place", "city", DestTable{Name: "places"}, nil},
{"highway", "bus_stop", DestTable{Name: "transport_points"}, nil}},
points.MatchNode(&elem))
},
}
elem := element.Node{}
m := mapping.PointMatcher
for i, test := range tests {
elem.Tags = test.tags
actual := m.MatchNode(&elem)
if !matchesEqual(actual, test.matches) {
t.Errorf("unexpected result for case %d: %v != %v", i+1, actual, test.matches)
}
}
}
func TestLineStringMatcher(t *testing.T) {
tests := []struct {
tags element.Tags
matches []Match
}{
{element.Tags{"unknown": "baz"}, []Match{}},
{element.Tags{"highway": "unknown"}, []Match{}},
{element.Tags{"highway": "pedestrian"},
[]Match{{"highway", "pedestrian", DestTable{Name: "roads", SubMapping: "roads"}, nil}}},
// exclude_tags area=yes
{element.Tags{"highway": "pedestrian", "area": "yes"}, []Match{}},
{element.Tags{"barrier": "hedge"},
[]Match{{"barrier", "hedge", DestTable{Name: "barrierways"}, nil}}},
{element.Tags{"barrier": "hedge", "area": "yes"}, []Match{}},
{element.Tags{"aeroway": "runway"}, []Match{}},
{element.Tags{"aeroway": "runway", "area": "no"},
[]Match{{"aeroway", "runway", DestTable{Name: "aeroways"}, nil}}},
{element.Tags{"highway": "secondary", "railway": "tram"},
[]Match{
{"highway", "secondary", DestTable{Name: "roads", SubMapping: "roads"}, nil},
{"railway", "tram", DestTable{Name: "roads", SubMapping: "railway"}, nil}},
},
{element.Tags{"highway": "footway", "landuse": "park", "barrier": "hedge"},
// landusages not a linestring table
[]Match{
{"highway", "footway", DestTable{Name: "roads", SubMapping: "roads"}, nil},
{"barrier", "hedge", DestTable{Name: "barrierways"}, nil}},
},
}
elem := element.Way{}
// fake closed way for area matching
elem.Refs = []int64{1, 2, 3, 4, 1}
if !elem.IsClosed() {
t.Fatal("way not closed")
}
ls := mapping.LineStringMatcher()
elem.Tags = element.Tags{"unknown": "baz"}
matchesEqual(t, []Match{}, ls.MatchWay(&elem))
elem.Tags = element.Tags{"highway": "unknown"}
matchesEqual(t, []Match{}, ls.MatchWay(&elem))
elem.Tags = element.Tags{"highway": "pedestrian"}
matchesEqual(t, []Match{{"highway", "pedestrian", DestTable{Name: "roads", SubMapping: "roads"}, nil}}, ls.MatchWay(&elem))
// exclude_tags area=yes
elem.Tags = element.Tags{"highway": "pedestrian", "area": "yes"}
matchesEqual(t, []Match{}, ls.MatchWay(&elem))
elem.Tags = element.Tags{"barrier": "hedge"}
matchesEqual(t, []Match{{"barrier", "hedge", DestTable{Name: "barrierways"}, nil}}, ls.MatchWay(&elem))
elem.Tags = element.Tags{"barrier": "hedge", "area": "yes"}
matchesEqual(t, []Match{}, ls.MatchWay(&elem))
elem.Tags = element.Tags{"aeroway": "runway", "area": "no"}
matchesEqual(t, []Match{{"aeroway", "runway", DestTable{Name: "aeroways"}, nil}}, ls.MatchWay(&elem))
elem.Tags = element.Tags{"aeroway": "runway"}
matchesEqual(t, []Match{}, ls.MatchWay(&elem))
elem.Tags = element.Tags{"highway": "secondary", "railway": "tram"}
matchesEqual(t,
[]Match{
{"highway", "secondary", DestTable{Name: "roads", SubMapping: "roads"}, nil},
{"railway", "tram", DestTable{Name: "roads", SubMapping: "railway"}, nil}},
ls.MatchWay(&elem))
elem.Tags = element.Tags{"highway": "footway", "landuse": "park"}
// landusages not a linestring table
matchesEqual(t, []Match{{"highway", "footway", DestTable{Name: "roads", SubMapping: "roads"}, nil}}, ls.MatchWay(&elem))
m := mapping.LineStringMatcher
for i, test := range tests {
elem.Tags = test.tags
actual := m.MatchWay(&elem)
if !matchesEqual(actual, test.matches) {
t.Errorf("unexpected result for case %d: %v != %v", i+1, actual, test.matches)
}
}
}
func TestPolygonMatcher(t *testing.T) {
elem := element.Relation{}
polys := mapping.PolygonMatcher()
func TestPolygonMatcher_MatchWay(t *testing.T) {
tests := []struct {
tags element.Tags
matches []Match
}{
{element.Tags{}, []Match{}},
{element.Tags{"unknown": "baz"}, []Match{}},
{element.Tags{"landuse": "unknown"}, []Match{}},
{element.Tags{"landuse": "unknown", "type": "multipolygon"}, []Match{}},
{element.Tags{"building": "yes"}, []Match{{"building", "yes", DestTable{Name: "buildings"}, nil}}},
{element.Tags{"building": "residential"}, []Match{{"building", "residential", DestTable{Name: "buildings"}, nil}}},
// line type requires area=yes
{element.Tags{"barrier": "hedge"}, []Match{}},
{element.Tags{"barrier": "hedge", "area": "yes"}, []Match{{"barrier", "hedge", DestTable{Name: "landusages"}, nil}}},
elem.Tags = element.Tags{"unknown": "baz"}
matchesEqual(t, []Match{}, polys.MatchRelation(&elem))
{element.Tags{"building": "shop"}, []Match{
{"building", "shop", DestTable{Name: "buildings"}, nil},
{"building", "shop", DestTable{Name: "amenity_areas"}, nil},
}},
elem.Tags = element.Tags{"landuse": "unknowns"}
matchesEqual(t, []Match{}, polys.MatchRelation(&elem))
elem.Tags = element.Tags{"building": "yes"}
matchesEqual(t, []Match{{"building", "yes", DestTable{Name: "buildings"}, nil}}, polys.MatchRelation(&elem))
elem.Tags = element.Tags{"building": "residential"}
matchesEqual(t, []Match{{"building", "residential", DestTable{Name: "buildings"}, nil}}, polys.MatchRelation(&elem))
elem.Tags = element.Tags{"barrier": "hedge"}
matchesEqual(t, []Match{}, polys.MatchRelation(&elem))
elem.Tags = element.Tags{"barrier": "hedge", "area": "yes"}
matchesEqual(t, []Match{{"barrier", "hedge", DestTable{Name: "landusages"}, nil}}, polys.MatchRelation(&elem))
elem.Tags = element.Tags{"building": "shop"}
matchesEqual(t, []Match{
{"building", "shop", DestTable{Name: "buildings"}, nil},
{"building", "shop", DestTable{Name: "amenity_areas"}, nil}},
polys.MatchRelation(&elem))
elem.Tags = element.Tags{"landuse": "farm"}
matchesEqual(t, []Match{{"landuse", "farm", DestTable{Name: "landusages"}, nil}}, polys.MatchRelation(&elem))
elem.Tags = element.Tags{"landuse": "farm", "highway": "secondary"}
matchesEqual(t, []Match{{"landuse", "farm", DestTable{Name: "landusages"}, nil}}, polys.MatchRelation(&elem))
elem.Tags = element.Tags{"landuse": "farm", "aeroway": "apron"}
matchesEqual(t,
[]Match{
{element.Tags{"aeroway": "apron", "landuse": "farm"}, []Match{
{"aeroway", "apron", DestTable{Name: "transport_areas"}, nil},
{"landuse", "farm", DestTable{Name: "landusages"}, nil}},
polys.MatchRelation(&elem))
{"landuse", "farm", DestTable{Name: "landusages"}, nil},
}},
elem.Tags = element.Tags{"highway": "footway"} // linear by default
matchesEqual(t, []Match{}, polys.MatchRelation(&elem))
{element.Tags{"landuse": "farm", "highway": "secondary"}, []Match{
{"landuse", "farm", DestTable{Name: "landusages"}, nil},
}},
elem.Tags = element.Tags{"highway": "footway", "area": "yes"}
matchesEqual(t, []Match{{"highway", "footway", DestTable{Name: "landusages"}, nil}}, polys.MatchRelation(&elem))
{element.Tags{"highway": "footway"}, []Match{}},
{element.Tags{"highway": "footway", "area": "yes"}, []Match{
{"highway", "footway", DestTable{Name: "landusages"}, nil},
}},
elem.Tags = element.Tags{"boundary": "administrative", "admin_level": "8"}
matchesEqual(t, []Match{{"boundary", "administrative", DestTable{Name: "admin"}, nil}}, polys.MatchRelation(&elem))
{element.Tags{"boundary": "administrative", "admin_level": "8"}, []Match{{"boundary", "administrative", DestTable{Name: "admin"}, nil}}},
/*
landusages mapping has the following order,
check that XxxMatcher always uses the first
amenity:
- university
landuse:
- forest
leisure:
- park
landuse:
- park
*/
{element.Tags{"landuse": "forest", "leisure": "park"}, []Match{{"landuse", "forest", DestTable{Name: "landusages"}, nil}}},
{element.Tags{"landuse": "park", "leisure": "park"}, []Match{{"leisure", "park", DestTable{Name: "landusages"}, nil}}},
{element.Tags{"landuse": "park", "leisure": "park", "amenity": "university"}, []Match{{"amenity", "university", DestTable{Name: "landusages"}, nil}}},
}
elem := element.Way{}
// fake closed way for area matching
elem.Refs = []int64{1, 2, 3, 4, 1}
if !elem.IsClosed() {
t.Fatal("way not closed")
}
m := mapping.PolygonMatcher
for i, test := range tests {
elem.Tags = test.tags
actual := m.MatchWay(&elem)
if !matchesEqual(actual, test.matches) {
t.Errorf("unexpected result for case %d: %v != %v", i+1, actual, test.matches)
}
}
elem.Refs = nil
elem.Tags = element.Tags{"building": "yes"}
actual := m.MatchWay(&elem)
if !matchesEqual([]Match{}, actual) {
t.Error("open way matched as polygon")
}
}
func TestMatcherMappingOrder(t *testing.T) {
func TestPolygonMatcher_MatchRelation(t *testing.T) {
// check that only relations with type=multipolygon/boundary are matched as polygon
tests := []struct {
tags element.Tags
matches []Match
}{
{element.Tags{}, []Match{}},
{element.Tags{"unknown": "baz"}, []Match{}},
{element.Tags{"landuse": "unknown"}, []Match{}},
{element.Tags{"landuse": "unknown", "type": "multipolygon"}, []Match{}},
{element.Tags{"building": "yes"}, []Match{}},
{element.Tags{"building": "yes", "type": "multipolygon"}, []Match{{"building", "yes", DestTable{Name: "buildings"}, nil}}},
{element.Tags{"building": "residential", "type": "multipolygon"}, []Match{{"building", "residential", DestTable{Name: "buildings"}, nil}}},
// line type requires area=yes
{element.Tags{"barrier": "hedge", "type": "multipolygon"}, []Match{}},
{element.Tags{"barrier": "hedge", "area": "yes", "type": "multipolygon"}, []Match{{"barrier", "hedge", DestTable{Name: "landusages"}, nil}}},
{element.Tags{"building": "shop", "type": "multipolygon"}, []Match{
{"building", "shop", DestTable{Name: "buildings"}, nil},
{"building", "shop", DestTable{Name: "amenity_areas"}, nil},
}},
{element.Tags{"aeroway": "apron", "landuse": "farm", "type": "multipolygon"}, []Match{
{"aeroway", "apron", DestTable{Name: "transport_areas"}, nil},
{"landuse", "farm", DestTable{Name: "landusages"}, nil},
}},
{element.Tags{"landuse": "farm", "highway": "secondary", "type": "multipolygon"}, []Match{
{"landuse", "farm", DestTable{Name: "landusages"}, nil},
}},
{element.Tags{"highway": "footway", "type": "multipolygon"}, []Match{}},
{element.Tags{"highway": "footway", "area": "yes", "type": "multipolygon"}, []Match{
{"highway", "footway", DestTable{Name: "landusages"}, nil},
}},
{element.Tags{"boundary": "administrative", "admin_level": "8"}, []Match{}},
{element.Tags{"boundary": "administrative", "admin_level": "8", "type": "boundary"}, []Match{{"boundary", "administrative", DestTable{Name: "admin"}, nil}}},
}
elem := element.Relation{}
polys := mapping.PolygonMatcher()
/*
landusages mapping has the following order,
check that XxxMatcher always uses the first
amenity:
- university
landuse:
- forest
leisure:
- park
landuse:
- park
*/
elem.Tags = element.Tags{"landuse": "forest", "leisure": "park"}
matchesEqual(t, []Match{{"landuse", "forest", DestTable{Name: "landusages"}, nil}}, polys.MatchRelation(&elem))
elem.Tags = element.Tags{"landuse": "park", "leisure": "park"}
matchesEqual(t, []Match{{"leisure", "park", DestTable{Name: "landusages"}, nil}}, polys.MatchRelation(&elem))
elem.Tags = element.Tags{"landuse": "park", "leisure": "park", "amenity": "university"}
matchesEqual(t, []Match{{"amenity", "university", DestTable{Name: "landusages"}, nil}}, polys.MatchRelation(&elem))
}
func TestFilterNodes(t *testing.T) {
var tags element.Tags
// test name only
tags = make(element.Tags)
tags["name"] = "foo"
points := mapping.NodeTagFilter()
if points.Filter(&tags) != false {
t.Fatal("Filter result not false")
}
if len(tags) != 0 {
t.Fatal("Filter result not empty")
}
// test name + unmapped tags
tags = make(element.Tags)
tags["name"] = "foo"
tags["boring"] = "true"
if points.Filter(&tags) != false {
t.Fatal("Filter result not false")
}
if len(tags) != 0 {
t.Fatal("Filter result not empty")
}
// test fields only, but no mapping
tags = make(element.Tags)
tags["population"] = "0"
tags["name"] = "foo"
tags["boring"] = "true"
if points.Filter(&tags) != false {
t.Fatal("Filter result true", tags)
}
if len(tags) != 0 {
t.Fatal("Filter result not empty", tags)
}
// ... not with mapped tag (place)
tags = make(element.Tags)
tags["population"] = "0"
tags["name"] = "foo"
tags["boring"] = "true"
tags["place"] = "village"
if points.Filter(&tags) != true {
t.Fatal("Filter result true", tags)
}
if len(tags) != 3 && tags["population"] == "0" && tags["name"] == "foo" && tags["place"] == "village" {
t.Fatal("Filter result not expected", tags)
m := mapping.PolygonMatcher
for i, test := range tests {
elem.Tags = test.tags
actual := m.MatchRelation(&elem)
if !matchesEqual(actual, test.matches) {
t.Errorf("unexpected result for case %d: %v != %v", i+1, actual, test.matches)
}
}
}
@ -460,7 +350,7 @@ func TestExcludeFilter(t *testing.T) {
var tags element.Tags
// no matches
f = newExcludeFilter([]Key{})
f = newExcludeFilter([]config.Key{})
tags = element.Tags{"source": "1", "tiger:foo": "1", "source:foo": "1"}
f.Filter(&tags)
if !reflect.DeepEqual(tags, element.Tags{"source": "1", "tiger:foo": "1", "source:foo": "1"}) {
@ -468,7 +358,7 @@ func TestExcludeFilter(t *testing.T) {
}
// match all
f = newExcludeFilter([]Key{"*"})
f = newExcludeFilter([]config.Key{"*"})
tags = element.Tags{"source": "1", "tiger:foo": "1", "source:foo": "1"}
f.Filter(&tags)
if !reflect.DeepEqual(tags, element.Tags{}) {
@ -476,7 +366,7 @@ func TestExcludeFilter(t *testing.T) {
}
// fixed string and wildcard match
f = newExcludeFilter([]Key{"source", "tiger:*"})
f = newExcludeFilter([]config.Key{"source", "tiger:*"})
tags = element.Tags{"source": "1", "tiger:foo": "1", "source:foo": "1"}
f.Filter(&tags)
if !reflect.DeepEqual(tags, element.Tags{"source:foo": "1"}) {
@ -495,9 +385,7 @@ func BenchmarkFilterNodes(b *testing.B) {
tags["boring"] = "true"
points := mapping.NodeTagFilter()
if points.Filter(&tags) != true {
b.Fatal("Filter result true", tags)
}
points.Filter(&tags)
if len(tags) != 2 && tags["population"] == "0" && tags["name"] == "foo" {
b.Fatal("Filter result not expected", tags)
}

477
mapping/mapping.go Normal file
View File

@ -0,0 +1,477 @@
package mapping
import (
"io/ioutil"
"regexp"
"github.com/omniscale/imposm3/element"
"github.com/omniscale/imposm3/mapping/config"
"github.com/pkg/errors"
"gopkg.in/yaml.v2"
)
type orderedDestTable struct {
DestTable
order int
}
type TagTableMapping map[Key]map[Value][]orderedDestTable
func (tt TagTableMapping) addFromMapping(mapping config.KeyValues, table DestTable) {
for key, vals := range mapping {
for _, v := range vals {
vals, ok := tt[Key(key)]
tbl := orderedDestTable{DestTable: table, order: v.Order}
if ok {
vals[Value(v.Value)] = append(vals[Value(v.Value)], tbl)
} else {
tt[Key(key)] = make(map[Value][]orderedDestTable)
tt[Key(key)][Value(v.Value)] = append(tt[Key(key)][Value(v.Value)], tbl)
}
}
}
}
func (tt TagTableMapping) asTagMap() tagMap {
result := make(tagMap)
for k, vals := range tt {
result[k] = make(map[Value]struct{})
for v := range vals {
result[k][v] = struct{}{}
}
}
return result
}
type DestTable struct {
Name string
SubMapping string
}
type TableType string
func (tt *TableType) UnmarshalJSON(data []byte) error {
switch string(data) {
case "":
return errors.New("missing table type")
case `"point"`:
*tt = PointTable
case `"linestring"`:
*tt = LineStringTable
case `"polygon"`:
*tt = PolygonTable
case `"geometry"`:
*tt = GeometryTable
case `"relation"`:
*tt = RelationTable
case `"relation_member"`:
*tt = RelationMemberTable
}
return errors.New("unknown type " + string(data))
}
const (
PolygonTable TableType = "polygon"
LineStringTable TableType = "linestring"
PointTable TableType = "point"
GeometryTable TableType = "geometry"
RelationTable TableType = "relation"
RelationMemberTable TableType = "relation_member"
)
type Mapping struct {
Conf config.Mapping
PointMatcher NodeMatcher
LineStringMatcher WayMatcher
PolygonMatcher RelWayMatcher
RelationMatcher RelationMatcher
RelationMemberMatcher RelationMatcher
}
func NewMapping(filename string) (*Mapping, error) {
f, err := ioutil.ReadFile(filename)
if err != nil {
return nil, err
}
mapping := Mapping{}
err = yaml.Unmarshal(f, &mapping.Conf)
if err != nil {
return nil, err
}
err = mapping.prepare()
if err != nil {
return nil, err
}
err = mapping.createMatcher()
if err != nil {
return nil, err
}
return &mapping, nil
}
func (m *Mapping) prepare() error {
for name, t := range m.Conf.Tables {
t.Name = name
if t.OldFields != nil {
// todo deprecate 'fields'
t.Columns = t.OldFields
}
if t.Type == "" {
return errors.Errorf("missing type for table %s", name)
}
if TableType(t.Type) == GeometryTable {
if t.Mapping != nil || t.Mappings != nil {
return errors.Errorf("table with type:geometry requires type_mapping for table %s", name)
}
}
}
for name, t := range m.Conf.GeneralizedTables {
t.Name = name
}
return nil
}
func (m *Mapping) createMatcher() error {
var err error
m.PointMatcher, err = m.pointMatcher()
if err != nil {
return err
}
m.LineStringMatcher, err = m.lineStringMatcher()
if err != nil {
return err
}
m.PolygonMatcher, err = m.polygonMatcher()
if err != nil {
return err
}
m.RelationMatcher, err = m.relationMatcher()
if err != nil {
return err
}
m.RelationMemberMatcher, err = m.relationMemberMatcher()
if err != nil {
return err
}
return nil
}
func (m *Mapping) mappings(tableType TableType, mappings TagTableMapping) {
for name, t := range m.Conf.Tables {
if TableType(t.Type) != GeometryTable && TableType(t.Type) != tableType {
continue
}
mappings.addFromMapping(t.Mapping, DestTable{Name: name})
for subMappingName, subMapping := range t.Mappings {
mappings.addFromMapping(subMapping.Mapping, DestTable{Name: name, SubMapping: subMappingName})
}
switch tableType {
case PointTable:
mappings.addFromMapping(t.TypeMappings.Points, DestTable{Name: name})
case LineStringTable:
mappings.addFromMapping(t.TypeMappings.LineStrings, DestTable{Name: name})
case PolygonTable:
mappings.addFromMapping(t.TypeMappings.Polygons, DestTable{Name: name})
}
}
}
func (m *Mapping) tables(tableType TableType) (map[string]*rowBuilder, error) {
var err error
result := make(map[string]*rowBuilder)
for name, t := range m.Conf.Tables {
if TableType(t.Type) == tableType || TableType(t.Type) == GeometryTable {
result[name], err = makeRowBuilder(t)
if err != nil {
return nil, errors.Wrapf(err, "creating row builder for %s", name)
}
}
}
return result, nil
}
func makeRowBuilder(tbl *config.Table) (*rowBuilder, error) {
result := rowBuilder{}
for _, mappingColumn := range tbl.Columns {
column := valueBuilder{}
column.key = Key(mappingColumn.Key)
columnType, err := MakeColumnType(mappingColumn)
if err != nil {
return nil, errors.Wrapf(err, "creating column %s", mappingColumn.Name)
}
column.colType = *columnType
result.columns = append(result.columns, column)
}
return &result, nil
}
func MakeColumnType(c *config.Column) (*ColumnType, error) {
columnType, ok := AvailableColumnTypes[c.Type]
if !ok {
return nil, errors.Errorf("unhandled type %s", c.Type)
}
if columnType.MakeFunc != nil {
makeValue, err := columnType.MakeFunc(c.Name, columnType, *c)
if err != nil {
return nil, err
}
columnType = ColumnType{columnType.Name, columnType.GoType, makeValue, nil, nil, columnType.FromMember}
}
columnType.FromMember = c.FromMember
return &columnType, nil
}
func (m *Mapping) extraTags(tableType TableType, tags map[Key]bool) {
for _, t := range m.Conf.Tables {
if TableType(t.Type) != tableType && TableType(t.Type) != GeometryTable {
continue
}
for _, col := range t.Columns {
if col.Key != "" {
tags[Key(col.Key)] = true
}
for _, k := range col.Keys {
tags[Key(k)] = true
}
}
if t.Filters != nil && t.Filters.ExcludeTags != nil {
for _, keyVal := range *t.Filters.ExcludeTags {
tags[Key(keyVal[0])] = true
}
}
if tableType == PolygonTable || tableType == RelationTable || tableType == RelationMemberTable {
if t.RelationTypes != nil {
tags["type"] = true
}
}
}
for _, k := range m.Conf.Tags.Include {
tags[Key(k)] = true
}
// always include area tag for closed-way handling
tags["area"] = true
}
type elementFilter func(tags element.Tags, key Key, closed bool) bool
type tableElementFilters map[string][]elementFilter
func (m *Mapping) addTypedFilters(tableType TableType, filters tableElementFilters) {
var areaTags map[Key]struct{}
var linearTags map[Key]struct{}
if m.Conf.Areas.AreaTags != nil {
areaTags = make(map[Key]struct{})
for _, tag := range m.Conf.Areas.AreaTags {
areaTags[Key(tag)] = struct{}{}
}
}
if m.Conf.Areas.LinearTags != nil {
linearTags = make(map[Key]struct{})
for _, tag := range m.Conf.Areas.LinearTags {
linearTags[Key(tag)] = struct{}{}
}
}
for name, t := range m.Conf.Tables {
if TableType(t.Type) != GeometryTable && TableType(t.Type) != tableType {
continue
}
if TableType(t.Type) == LineStringTable && areaTags != nil {
f := func(tags element.Tags, key Key, closed bool) bool {
if closed {
if tags["area"] == "yes" {
return false
}
if tags["area"] != "no" {
if _, ok := areaTags[key]; ok {
return false
}
}
}
return true
}
filters[name] = append(filters[name], f)
}
if TableType(t.Type) == PolygonTable && linearTags != nil {
f := func(tags element.Tags, key Key, closed bool) bool {
if closed && tags["area"] == "no" {
return false
}
if tags["area"] != "yes" {
if _, ok := linearTags[key]; ok {
return false
}
}
return true
}
filters[name] = append(filters[name], f)
}
}
}
func (m *Mapping) addRelationFilters(tableType TableType, filters tableElementFilters) {
for name, t := range m.Conf.Tables {
if t.RelationTypes != nil {
relTypes := t.RelationTypes // copy loop var for closure
f := func(tags element.Tags, key Key, closed bool) bool {
if v, ok := tags["type"]; ok {
for _, rtype := range relTypes {
if v == rtype {
return true
}
}
}
return false
}
filters[name] = append(filters[name], f)
} else {
if TableType(t.Type) == PolygonTable {
// standard mulipolygon handling (boundary and land_area are for backwards compatibility)
f := func(tags element.Tags, key Key, closed bool) bool {
if v, ok := tags["type"]; ok {
if v == "multipolygon" || v == "boundary" || v == "land_area" {
return true
}
}
return false
}
filters[name] = append(filters[name], f)
}
}
}
}
func (m *Mapping) addFilters(filters tableElementFilters) {
for name, t := range m.Conf.Tables {
if t.Filters == nil {
continue
}
if t.Filters.ExcludeTags != nil {
log.Print("warn: exclude_tags filter is deprecated and will be removed. See require and reject filter.")
for _, filterKeyVal := range *t.Filters.ExcludeTags {
/*
f := func(tags element.Tags, key Key, closed bool) bool {
if v, ok := tags[filterKeyVal[0]]; ok {
if filterKeyVal[1] == "__any__" || v == filterKeyVal[1] {
return false
}
}
return true
}
filters[name] = append(filters[name], f)
*/
// Convert `exclude_tags`` filter to `reject` filter !
keyname := string(filterKeyVal[0])
vararr := []config.OrderedValue{
{
Value: config.Value(filterKeyVal[1]),
Order: 1,
},
}
filters[name] = append(filters[name], makeFiltersFunction(name, false, true, string(keyname), vararr))
}
}
if t.Filters.Require != nil {
for keyname, vararr := range t.Filters.Require {
filters[name] = append(filters[name], makeFiltersFunction(name, true, false, string(keyname), vararr))
}
}
if t.Filters.Reject != nil {
for keyname, vararr := range t.Filters.Reject {
filters[name] = append(filters[name], makeFiltersFunction(name, false, true, string(keyname), vararr))
}
}
if t.Filters.RequireRegexp != nil {
for keyname, regexp := range t.Filters.RequireRegexp {
filters[name] = append(filters[name], makeRegexpFiltersFunction(name, true, false, string(keyname), regexp))
}
}
if t.Filters.RejectRegexp != nil {
for keyname, regexp := range t.Filters.RejectRegexp {
filters[name] = append(filters[name], makeRegexpFiltersFunction(name, false, true, string(keyname), regexp))
}
}
}
}
func findValueInOrderedValue(v config.Value, list []config.OrderedValue) bool {
for _, item := range list {
if item.Value == v {
return true
}
}
return false
}
func makeRegexpFiltersFunction(tablename string, virtualTrue bool, virtualFalse bool, v_keyname string, v_regexp string) func(tags element.Tags, key Key, closed bool) bool {
// Compile regular expression, if not valid regexp --> panic !
r := regexp.MustCompile(v_regexp)
return func(tags element.Tags, key Key, closed bool) bool {
if v, ok := tags[v_keyname]; ok {
if r.MatchString(v) {
return virtualTrue
}
}
return virtualFalse
}
}
func makeFiltersFunction(tablename string, virtualTrue bool, virtualFalse bool, v_keyname string, v_vararr []config.OrderedValue) func(tags element.Tags, key Key, closed bool) bool {
if findValueInOrderedValue("__nil__", v_vararr) { // check __nil__
log.Print("warn: Filter value '__nil__' is not supported ! (tablename:" + tablename + ")")
}
if findValueInOrderedValue("__any__", v_vararr) { // check __any__
if len(v_vararr) > 1 {
log.Print("warn: Multiple filter value with '__any__' keywords is not valid! (tablename:" + tablename + ")")
}
return func(tags element.Tags, key Key, closed bool) bool {
if _, ok := tags[v_keyname]; ok {
return virtualTrue
}
return virtualFalse
}
} else if len(v_vararr) == 1 { // IF 1 parameter THEN we can generate optimal code
return func(tags element.Tags, key Key, closed bool) bool {
if v, ok := tags[v_keyname]; ok {
if config.Value(v) == v_vararr[0].Value {
return virtualTrue
}
}
return virtualFalse
}
} else { // > 1 parameter - less optimal code
return func(tags element.Tags, key Key, closed bool) bool {
if v, ok := tags[v_keyname]; ok {
if findValueInOrderedValue(config.Value(v), v_vararr) {
return virtualTrue
}
}
return virtualFalse
}
}
}

View File

@ -5,71 +5,89 @@ import (
"github.com/omniscale/imposm3/geom"
)
func (m *Mapping) PointMatcher() NodeMatcher {
mappings := make(TagTables)
func (m *Mapping) pointMatcher() (NodeMatcher, error) {
mappings := make(TagTableMapping)
m.mappings(PointTable, mappings)
filters := m.ElementFilters()
filters := make(tableElementFilters)
m.addFilters(filters)
m.addTypedFilters(PointTable, filters)
tables, err := m.tables(PointTable)
return &tagMatcher{
mappings: mappings,
tables: m.tables(PointTable),
filters: filters,
tables: tables,
matchAreas: false,
}
}, err
}
func (m *Mapping) LineStringMatcher() WayMatcher {
mappings := make(TagTables)
func (m *Mapping) lineStringMatcher() (WayMatcher, error) {
mappings := make(TagTableMapping)
m.mappings(LineStringTable, mappings)
filters := m.ElementFilters()
filters := make(tableElementFilters)
m.addFilters(filters)
m.addTypedFilters(LineStringTable, filters)
tables, err := m.tables(LineStringTable)
return &tagMatcher{
mappings: mappings,
tables: m.tables(LineStringTable),
filters: filters,
tables: tables,
matchAreas: false,
}
}, err
}
func (m *Mapping) PolygonMatcher() RelWayMatcher {
mappings := make(TagTables)
func (m *Mapping) polygonMatcher() (RelWayMatcher, error) {
mappings := make(TagTableMapping)
m.mappings(PolygonTable, mappings)
filters := m.ElementFilters()
filters := make(tableElementFilters)
m.addFilters(filters)
m.addTypedFilters(PolygonTable, filters)
relFilters := make(tableElementFilters)
m.addRelationFilters(PolygonTable, relFilters)
tables, err := m.tables(PolygonTable)
return &tagMatcher{
mappings: mappings,
tables: m.tables(PolygonTable),
filters: filters,
tables: tables,
relFilters: relFilters,
matchAreas: true,
}
}, err
}
func (m *Mapping) RelationMatcher() RelationMatcher {
mappings := make(TagTables)
func (m *Mapping) relationMatcher() (RelationMatcher, error) {
mappings := make(TagTableMapping)
m.mappings(RelationTable, mappings)
filters := m.ElementFilters()
filters := make(tableElementFilters)
m.addFilters(filters)
m.addTypedFilters(PolygonTable, filters)
m.addTypedFilters(RelationTable, filters)
relFilters := make(tableElementFilters)
m.addRelationFilters(RelationTable, relFilters)
tables, err := m.tables(RelationTable)
return &tagMatcher{
mappings: mappings,
tables: m.tables(RelationTable),
filters: filters,
tables: tables,
relFilters: relFilters,
matchAreas: true,
}
}, err
}
func (m *Mapping) RelationMemberMatcher() RelationMatcher {
mappings := make(TagTables)
func (m *Mapping) relationMemberMatcher() (RelationMatcher, error) {
mappings := make(TagTableMapping)
m.mappings(RelationMemberTable, mappings)
filters := m.ElementFilters()
filters := make(tableElementFilters)
m.addFilters(filters)
m.addTypedFilters(RelationMemberTable, filters)
relFilters := make(tableElementFilters)
m.addRelationFilters(RelationMemberTable, relFilters)
tables, err := m.tables(RelationMemberTable)
return &tagMatcher{
mappings: mappings,
tables: m.tables(RelationMemberTable),
filters: filters,
tables: tables,
relFilters: relFilters,
matchAreas: true,
}
}
type Match struct {
Key string
Value string
Table DestTable
tableFields *TableFields
}, err
}
type NodeMatcher interface {
@ -89,23 +107,31 @@ type RelWayMatcher interface {
RelationMatcher
}
type tagMatcher struct {
mappings TagTables
tables map[string]*TableFields
filters map[string][]ElementFilter
matchAreas bool
type Match struct {
Key string
Value string
Table DestTable
builder *rowBuilder
}
func (m *Match) Row(elem *element.OSMElem, geom *geom.Geometry) []interface{} {
return m.tableFields.MakeRow(elem, geom, *m)
return m.builder.MakeRow(elem, geom, *m)
}
func (m *Match) MemberRow(rel *element.Relation, member *element.Member, geom *geom.Geometry) []interface{} {
return m.tableFields.MakeMemberRow(rel, member, geom, *m)
return m.builder.MakeMemberRow(rel, member, geom, *m)
}
type tagMatcher struct {
mappings TagTableMapping
tables map[string]*rowBuilder
filters tableElementFilters
relFilters tableElementFilters
matchAreas bool
}
func (tm *tagMatcher) MatchNode(node *element.Node) []Match {
return tm.match(node.Tags, false)
return tm.match(node.Tags, false, false)
}
func (tm *tagMatcher) MatchWay(way *element.Way) []Match {
@ -114,22 +140,22 @@ func (tm *tagMatcher) MatchWay(way *element.Way) []Match {
if way.Tags["area"] == "no" {
return nil
}
return tm.match(way.Tags, true)
return tm.match(way.Tags, true, false)
}
} else { // match way as linestring
if way.IsClosed() {
if way.Tags["area"] == "yes" {
return nil
}
return tm.match(way.Tags, true)
return tm.match(way.Tags, true, false)
}
return tm.match(way.Tags, false)
return tm.match(way.Tags, false, false)
}
return nil
}
func (tm *tagMatcher) MatchRelation(rel *element.Relation) []Match {
return tm.match(rel.Tags, true)
return tm.match(rel.Tags, true, true)
}
type orderedMatch struct {
@ -137,17 +163,17 @@ type orderedMatch struct {
order int
}
func (tm *tagMatcher) match(tags element.Tags, closed bool) []Match {
func (tm *tagMatcher) match(tags element.Tags, closed bool, relation bool) []Match {
tables := make(map[DestTable]orderedMatch)
addTables := func(k, v string, tbls []OrderedDestTable) {
addTables := func(k, v string, tbls []orderedDestTable) {
for _, t := range tbls {
this := orderedMatch{
Match: Match{
Key: k,
Value: v,
Table: t.DestTable,
tableFields: tm.tables[t.Name],
Key: k,
Value: v,
Table: t.DestTable,
builder: tm.tables[t.Name],
},
order: t.order,
}
@ -187,6 +213,18 @@ func (tm *tagMatcher) match(tags element.Tags, closed bool) []Match {
}
}
}
if relation && !filteredOut {
filters, ok := tm.relFilters[t.Name]
if ok {
for _, filter := range filters {
if !filter(tags, Key(match.Key), closed) {
filteredOut = true
break
}
}
}
}
if !filteredOut {
matches = append(matches, match.Match)
}
@ -194,6 +232,54 @@ func (tm *tagMatcher) match(tags element.Tags, closed bool) []Match {
return matches
}
type valueBuilder struct {
key Key
colType ColumnType
}
func (v *valueBuilder) Value(elem *element.OSMElem, geom *geom.Geometry, match Match) interface{} {
if v.colType.Func != nil {
return v.colType.Func(elem.Tags[string(v.key)], elem, geom, match)
}
return nil
}
func (v *valueBuilder) MemberValue(rel *element.Relation, member *element.Member, geom *geom.Geometry, match Match) interface{} {
if v.colType.Func != nil {
if v.colType.FromMember {
if member.Elem == nil {
return nil
}
return v.colType.Func(member.Elem.Tags[string(v.key)], member.Elem, geom, match)
}
return v.colType.Func(rel.Tags[string(v.key)], &rel.OSMElem, geom, match)
}
if v.colType.MemberFunc != nil {
return v.colType.MemberFunc(rel, member, match)
}
return nil
}
type rowBuilder struct {
columns []valueBuilder
}
func (r *rowBuilder) MakeRow(elem *element.OSMElem, geom *geom.Geometry, match Match) []interface{} {
var row []interface{}
for _, column := range r.columns {
row = append(row, column.Value(elem, geom, match))
}
return row
}
func (r *rowBuilder) MakeMemberRow(rel *element.Relation, member *element.Member, geom *geom.Geometry, match Match) []interface{} {
var row []interface{}
for _, column := range r.columns {
row = append(row, column.MemberValue(rel, member, geom, match))
}
return row
}
// SelectRelationPolygons returns a slice of all members that are already
// imported as part of the relation.
// Outer members are "imported" if they share the same destination table. Inner members

View File

@ -11,7 +11,7 @@ func BenchmarkTagMatch(b *testing.B) {
if err != nil {
b.Fatal(err)
}
matcher := m.PolygonMatcher()
matcher := m.PolygonMatcher
for i := 0; i < b.N; i++ {
e := element.Relation{}
e.Tags = element.Tags{"landuse": "forest", "name": "Forest", "source": "bling", "tourism": "zoo"}
@ -43,7 +43,7 @@ func TestSelectRelationPolygonsSimple(t *testing.T) {
t.Fatal(err)
}
r := element.Relation{}
r.Tags = element.Tags{"landuse": "park"}
r.Tags = element.Tags{"landuse": "park", "type": "multipolygon"}
r.Members = []element.Member{
makeMember(0, element.Tags{"landuse": "forest"}),
makeMember(1, element.Tags{"landuse": "park"}),
@ -51,7 +51,7 @@ func TestSelectRelationPolygonsSimple(t *testing.T) {
makeMember(4, element.Tags{"foo": "bar"}),
}
filtered := SelectRelationPolygons(
mapping.PolygonMatcher(),
mapping.PolygonMatcher,
&r,
)
if len(filtered) != 1 {
@ -68,13 +68,13 @@ func TestSelectRelationPolygonsUnrelatedTags(t *testing.T) {
t.Fatal(err)
}
r := element.Relation{}
r.Tags = element.Tags{"landuse": "park"}
r.Tags = element.Tags{"landuse": "park", "type": "multipolygon"}
r.Members = []element.Member{
makeMember(0, element.Tags{"landuse": "park", "layer": "2", "name": "foo"}),
makeMember(1, element.Tags{"landuse": "forest"}),
}
filtered := SelectRelationPolygons(
mapping.PolygonMatcher(),
mapping.PolygonMatcher,
&r,
)
if len(filtered) != 1 {
@ -91,7 +91,7 @@ func TestSelectRelationPolygonsMultiple(t *testing.T) {
t.Fatal(err)
}
r := element.Relation{}
r.Tags = element.Tags{"landuse": "park"}
r.Tags = element.Tags{"landuse": "park", "type": "multipolygon"}
r.Members = []element.Member{
makeMember(0, element.Tags{"landuse": "park"}),
makeMember(1, element.Tags{"natural": "forest"}),
@ -100,7 +100,7 @@ func TestSelectRelationPolygonsMultiple(t *testing.T) {
makeMember(4, element.Tags{"landuse": "park", "layer": "2", "name": "foo"}),
}
filtered := SelectRelationPolygons(
mapping.PolygonMatcher(),
mapping.PolygonMatcher,
&r,
)
if len(filtered) != 3 {
@ -117,13 +117,13 @@ func TestSelectRelationPolygonsMultipleTags(t *testing.T) {
t.Fatal(err)
}
r := element.Relation{}
r.Tags = element.Tags{"landuse": "forest", "natural": "scrub"}
r.Tags = element.Tags{"landuse": "forest", "natural": "scrub", "type": "multipolygon"}
r.Members = []element.Member{
makeMember(0, element.Tags{"natural": "scrub"}),
makeMember(1, element.Tags{"landuse": "forest"}),
}
filtered := SelectRelationPolygons(
mapping.PolygonMatcher(),
mapping.PolygonMatcher,
&r,
)
// TODO both should be filterd out, but we only get one,
@ -139,14 +139,14 @@ func TestSelectRelationPolygonsMultipleTagsOnWay(t *testing.T) {
t.Fatal(err)
}
r := element.Relation{}
r.Tags = element.Tags{"waterway": "riverbank"}
r.Tags = element.Tags{"waterway": "riverbank", "type": "multipolygon"}
r.Members = []element.Member{
makeMemberRole(0, element.Tags{"waterway": "riverbank", "natural": "water"}, "outer"),
makeMemberRole(1, element.Tags{"natural": "water"}, "inner"),
makeMemberRole(2, element.Tags{"place": "islet"}, "inner"),
}
filtered := SelectRelationPolygons(
mapping.PolygonMatcher(),
mapping.PolygonMatcher,
&r,
)

View File

@ -43,7 +43,7 @@
},
"tables": {
"landusages": {
"fields": [
"columns": [
{
"type": "id",
"name": "osm_id",
@ -209,7 +209,7 @@
}
},
"buildings": {
"fields": [
"columns": [
{
"type": "id",
"name": "osm_id",
@ -240,7 +240,7 @@
},
"amenity_areas": {
"_comment": "for testing duplicate inserts with __any__ and exact match",
"fields": [
"columns": [
{
"type": "id",
"name": "osm_id",
@ -270,7 +270,7 @@
}
},
"places": {
"fields": [
"columns": [
{
"type": "id",
"name": "osm_id",
@ -333,7 +333,7 @@
}
},
"transport_areas": {
"fields": [
"columns": [
{
"type": "id",
"name": "osm_id",
@ -370,7 +370,7 @@
}
},
"admin": {
"fields": [
"columns": [
{
"type": "id",
"name": "osm_id",
@ -405,7 +405,7 @@
}
},
"aeroways": {
"fields": [
"columns": [
{
"type": "id",
"name": "osm_id",
@ -436,7 +436,7 @@
}
},
"waterways": {
"fields": [
"columns": [
{
"type": "id",
"name": "osm_id",
@ -473,7 +473,7 @@
}
},
"barrierways": {
"fields": [
"columns": [
{
"type": "id",
"name": "osm_id",
@ -515,7 +515,7 @@
}
},
"transport_points": {
"fields": [
"columns": [
{
"type": "id",
"name": "osm_id",
@ -566,7 +566,7 @@
}
},
"amenities": {
"fields": [
"columns": [
{
"type": "id",
"name": "osm_id",
@ -603,7 +603,7 @@
}
},
"barrierpoints": {
"fields": [
"columns": [
{
"type": "id",
"name": "osm_id",
@ -648,7 +648,7 @@
}
},
"housenumbers_interpolated": {
"fields": [
"columns": [
{
"type": "id",
"name": "osm_id",
@ -698,7 +698,7 @@
}
},
"roads": {
"fields": [
"columns": [
{
"type": "id",
"name": "osm_id",
@ -818,7 +818,7 @@
}
},
"housenumbers": {
"fields": [
"columns": [
{
"type": "id",
"name": "osm_id",

View File

@ -35,7 +35,7 @@ generalized_tables:
tolerance: 50.0
tables:
admin:
fields:
columns:
- name: osm_id
type: id
- name: geometry
@ -53,7 +53,7 @@ tables:
- administrative
type: polygon
aeroways:
fields:
columns:
- name: osm_id
type: id
- name: geometry
@ -69,7 +69,7 @@ tables:
- taxiway
type: linestring
amenities:
fields:
columns:
- name: osm_id
type: id
- name: geometry
@ -92,7 +92,7 @@ tables:
type: point
amenity_areas:
_comment: for testing duplicate inserts with __any__ and exact match
fields:
columns:
- name: osm_id
type: id
- name: geometry
@ -107,7 +107,7 @@ tables:
- shop
type: polygon
barrierpoints:
fields:
columns:
- name: osm_id
type: id
- name: geometry
@ -137,7 +137,7 @@ tables:
- stile
type: point
barrierways:
fields:
columns:
- name: osm_id
type: id
- name: geometry
@ -164,7 +164,7 @@ tables:
- wire_fence
type: linestring
buildings:
fields:
columns:
- name: osm_id
type: id
- name: geometry
@ -179,7 +179,7 @@ tables:
- __any__
type: polygon
housenumbers:
fields:
columns:
- name: osm_id
type: id
- name: geometry
@ -203,7 +203,7 @@ tables:
- __any__
type: point
housenumbers_interpolated:
fields:
columns:
- name: osm_id
type: id
- name: geometry
@ -230,7 +230,7 @@ tables:
- __any__
type: linestring
landusages:
fields:
columns:
- name: osm_id
type: id
- name: geometry
@ -363,7 +363,7 @@ tables:
- riverbank
type: polygon
places:
fields:
columns:
- name: osm_id
type: id
- name: geometry
@ -404,7 +404,7 @@ tables:
- locality
type: point
roads:
fields:
columns:
- name: osm_id
type: id
- name: geometry
@ -524,7 +524,7 @@ tables:
- groyne
type: linestring
transport_areas:
fields:
columns:
- name: osm_id
type: id
- name: geometry
@ -545,7 +545,7 @@ tables:
- platform
type: polygon
transport_points:
fields:
columns:
- name: osm_id
type: id
- name: geometry
@ -577,7 +577,7 @@ tables:
- subway_entrance
type: point
waterways:
fields:
columns:
- name: osm_id
type: id
- name: geometry

View File

@ -1,4 +1,4 @@
# test mappings for config_test.go
# test mappings
#
# only `type: linestring` impemented!
#
@ -7,7 +7,7 @@
tables:
config_test_t0:
testfilters_test_t0:
fields:
- name: id
type: id
@ -21,7 +21,7 @@ tables:
admin_level: ['2','4']
type: linestring
config_test_t1:
testfilters_test_t1:
fields:
- name: id
type: id
@ -38,7 +38,7 @@ tables:
type: linestring
config_test_t2_building:
testfilters_test_t2_building:
fields:
- name: id
type: id
@ -57,7 +57,7 @@ tables:
type: linestring
config_test_t3_highway_with_name:
testfilters_test_t3_highway_with_name:
fields:
- name: id
type: id
@ -78,7 +78,7 @@ tables:
type: linestring
config_test_t4_waterway_with_name:
testfilters_test_t4_waterway_with_name:
fields:
- name: id
type: id
@ -111,7 +111,7 @@ tables:
type: linestring
config_test_t5_depricated_exclude_tags:
testfilters_test_t5_depricated_exclude_tags:
_comment: Allways Empty !
fields:
- name: id

View File

@ -6,10 +6,10 @@ import (
"github.com/omniscale/imposm3/element"
)
// go test ./mapping -run TestFilter_t0 -v
func TestFilter_t0(t *testing.T) {
// go test ./mapping -run TestFilters_test_t0 -v
func TestFilters_test_t0(t *testing.T) {
/* ./config_test_mapping.yml ..
/* ./testfilters_test_mapping.yml ..
filters:
require:
@ -23,7 +23,7 @@ func TestFilter_t0(t *testing.T) {
// *testing.T
t,
// tablename
"config_test_t0",
"testfilters_test_t0",
// Accept
[]element.Tags{
element.Tags{"admin_level": "2", "boundary": "administrative"},
@ -51,10 +51,10 @@ func TestFilter_t0(t *testing.T) {
)
}
// go test ./mapping -run TestFilter_t1 -v
func TestFilter_t1(t *testing.T) {
// go test ./mapping -run TestFilters_test_t1 -v
func TestFilters_test_t1(t *testing.T) {
/* ./config_test_mapping.yml ..
/* ./testfilters_test_mapping.yml ..
filters:
require:
@ -71,7 +71,7 @@ func TestFilter_t1(t *testing.T) {
// *testing.T
t,
// tablename
"config_test_t1",
"testfilters_test_t1",
// Accept
[]element.Tags{
element.Tags{"admin_level": "2", "boundary": "administrative"},
@ -99,10 +99,10 @@ func TestFilter_t1(t *testing.T) {
)
}
// go test ./mapping -run TestFilter_t2_building -v
func TestFilter_t2_building(t *testing.T) {
// go test ./mapping -run TestFilters_test_t2_building -v
func TestFilters_test_t2_building(t *testing.T) {
/* ./config_test_mapping.yml ..
/* ./testfilters_test_mapping.yml ..
filters:
reject:
building: ["no","none"]
@ -120,7 +120,7 @@ func TestFilter_t2_building(t *testing.T) {
// *testing.T
t,
// tablename
"config_test_t2_building",
"testfilters_test_t2_building",
// Accept
[]element.Tags{
element.Tags{"building": "yes", "addr:housenumber": "1a"},
@ -175,10 +175,10 @@ func TestFilter_t2_building(t *testing.T) {
)
}
// go test ./mapping -run TestFilter_t3_highway_with_name -v
func TestFilter_t3_highway_with_name(t *testing.T) {
// go test ./mapping -run TestFilters_test_t3_highway_with_name -v
func TestFilters_testt3_highway_with_name(t *testing.T) {
/* ./config_test_mapping.yml ..
/* ./testfilters_test_mapping.yml ..
filters:
require:
name: ["__any__"]
@ -194,7 +194,7 @@ func TestFilter_t3_highway_with_name(t *testing.T) {
// *testing.T
t,
// tablename
"config_test_t3_highway_with_name",
"testfilters_test_t3_highway_with_name",
// Accept
[]element.Tags{
element.Tags{"highway": "residential", "name": "N1"},
@ -240,10 +240,10 @@ func TestFilter_t3_highway_with_name(t *testing.T) {
)
}
// go test ./mapping -run TestFilter_t4_waterway_with_name -v
func TestFilter_t4_waterway_with_name(t *testing.T) {
// go test ./mapping -run TestFilters_test_t4_waterway_with_name -v
func TestFilters_test_t4_waterway_with_name(t *testing.T) {
/* ./config_test_mapping.yml ..
/* ./testfilters_test_mapping.yml ..
filters:
require:
@ -273,7 +273,7 @@ func TestFilter_t4_waterway_with_name(t *testing.T) {
// *testing.T
t,
// tablename
"config_test_t4_waterway_with_name",
"testfilters_test_t4_waterway_with_name",
// Accept
[]element.Tags{
element.Tags{"waterway": "stream", "name": "N1"},
@ -346,10 +346,10 @@ func TestFilter_t4_waterway_with_name(t *testing.T) {
)
}
// go test ./mapping -run TestFilter_t5_depricated_exclude_tags -v
func TestFilter_t5_depricated_exclude_tags(t *testing.T) {
// go test ./mapping -run TestFilters_test_t5_depricated_exclude_tags -v
func TestFilters_test_t5_depricated_exclude_tags(t *testing.T) {
/* ./config_test_mapping.yml ..
/* ./testfilters_test_mapping.yml ..
filters:
require:
@ -379,7 +379,7 @@ func TestFilter_t5_depricated_exclude_tags(t *testing.T) {
// *testing.T
t,
// tablename
"config_test_t5_depricated_exclude_tags",
"testfilters_test_t5_depricated_exclude_tags",
// Accept - in this case Must be EMPTY !
[]element.Tags{},
// Reject
@ -439,7 +439,7 @@ func filterTest(t *testing.T, tablename string, accept []element.Tags, reject []
var configTestMapping *Mapping
var err error
configTestMapping, err = NewMapping("./config_test_mapping.yml")
configTestMapping, err = NewMapping("./testfilters_mapping.yml")
if err != nil {
panic(err)
}
@ -447,7 +447,7 @@ func filterTest(t *testing.T, tablename string, accept []element.Tags, reject []
var actualMatch []Match
elem := element.Way{}
ls := configTestMapping.LineStringMatcher()
ls := configTestMapping.LineStringMatcher
for _, et := range accept {
elem.Tags = et

View File

@ -90,8 +90,9 @@ fi
if [ ! -e $BUILD_BASE/go/bin/go ]; then
echo "-> installing go"
pushd $SRC
$CURL https://storage.googleapis.com/golang/go1.7.3.linux-amd64.tar.gz -O
tar xzf go1.7.3.linux-amd64.tar.gz -C $BUILD_BASE/
rm -rf $BUILD_BASE/go
$CURL https://storage.googleapis.com/golang/go1.8.linux-amd64.tar.gz -O
tar xzf go1.8.linux-amd64.tar.gz -C $BUILD_BASE/
popd
fi

View File

@ -195,6 +195,9 @@ func ReadPbf(
coordsSync.Wait()
continue
}
if skipCoords {
continue
}
if withLimiter {
for i, _ := range nds {
if !limiter.IntersectsBuffer(g, nds[i].Long, nds[i].Lat) {
@ -225,6 +228,9 @@ func ReadPbf(
coordsSync.Wait()
continue
}
if skipNodes {
continue
}
numWithTags := 0
for i, _ := range nds {
m.Filter(&nds[i].Tags)

View File

@ -3,14 +3,16 @@ package replication
import (
"errors"
"fmt"
"gopkg.in/fsnotify.v1"
"io"
"net"
"net/http"
"os"
"path"
"path/filepath"
"time"
"gopkg.in/fsnotify.v1"
"github.com/omniscale/imposm3"
"github.com/omniscale/imposm3/logging"
)
@ -212,38 +214,18 @@ func (d *reader) Sequences() <-chan Sequence {
func (d *reader) waitTillPresent(seq int, ext string) error {
filename := path.Join(d.dest, seqPath(seq)+ext)
if _, err := os.Stat(filename); err == nil {
return nil
}
w, err := fsnotify.NewWatcher()
if err != nil {
return err
}
defer w.Close()
w.Add(filename)
// check again, in case file was created before we added the file
if _, err := os.Stat(filename); err == nil {
return nil
}
for {
select {
case evt := <-w.Events:
if evt.Op == fsnotify.Create {
return nil
}
}
}
return nil
return waitTillPresent(filename)
}
func (d *reader) fetchNextLoop() {
for {
nextSeq := d.lastSequence + 1
d.waitTillPresent(nextSeq, d.stateExt)
d.waitTillPresent(nextSeq, d.fileExt)
if err := d.waitTillPresent(nextSeq, d.stateExt); err != nil {
log.Error(err)
}
if err := d.waitTillPresent(nextSeq, d.fileExt); err != nil {
log.Error(err)
}
d.lastSequence = nextSeq
base := path.Join(d.dest, seqPath(d.lastSequence))
lastTime, _ := d.stateTime(base + d.stateExt)
@ -255,3 +237,39 @@ func (d *reader) fetchNextLoop() {
}
}
}
// waitTillPresent blocks till file is present.
func waitTillPresent(filename string) error {
if _, err := os.Stat(filename); err == nil {
return nil
}
// fsnotify does not work recursive. wait for parent dirs first (e.g. 002/134)
parent := filepath.Dir(filename)
if err := waitTillPresent(parent); err != nil {
return err
}
w, err := fsnotify.NewWatcher()
if err != nil {
return err
}
defer w.Close()
// need to watch on parent if we want to get events for new file
w.Add(parent)
// check again, in case file was created before we added the file
if _, err := os.Stat(filename); err == nil {
return nil
}
for {
select {
case evt := <-w.Events:
if evt.Op&fsnotify.Create == fsnotify.Create && evt.Name == filename {
return nil
}
}
}
return nil
}

View File

@ -1,6 +1,13 @@
package replication
import "testing"
import (
"io/ioutil"
"os"
"path/filepath"
"time"
"testing"
)
func TestSeqPath(t *testing.T) {
if path := seqPath(0); path != "000/000/000" {
@ -13,3 +20,49 @@ func TestSeqPath(t *testing.T) {
t.Fatal(path)
}
}
func TestWaitTillPresent(t *testing.T) {
tmpdir, err := ioutil.TempDir("", "imposm3tests")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(tmpdir)
exists := filepath.Join(tmpdir, "exists")
f, err := os.Create(exists)
if err != nil {
t.Fatal(err)
}
f.Close()
waitTillPresent(exists)
create := filepath.Join(tmpdir, "create")
go func() {
time.Sleep(200 * time.Millisecond)
f, err := os.Create(create)
if err != nil {
t.Fatal(err)
}
f.Close()
}()
waitTillPresent(create)
sub := filepath.Join(tmpdir, "sub", "dir", "create")
go func() {
time.Sleep(200 * time.Millisecond)
if err := os.Mkdir(filepath.Join(tmpdir, "sub"), 0755); err != nil {
t.Fatal(err)
}
time.Sleep(200 * time.Millisecond)
if err := os.Mkdir(filepath.Join(tmpdir, "sub", "dir"), 0755); err != nil {
t.Fatal(err)
}
time.Sleep(200 * time.Millisecond)
f, err := os.Create(sub)
if err != nil {
t.Fatal(err)
}
f.Close()
}()
waitTillPresent(sub)
}

View File

@ -8,7 +8,7 @@
},
"tables": {
"all": {
"fields": [
"columns": [
{
"type": "id",
"name": "osm_id"
@ -28,7 +28,7 @@
}
},
"amenities": {
"fields": [
"columns": [
{
"type": "id",
"name": "osm_id"

View File

@ -35,15 +35,6 @@
<tag k="name" v="way 11001"/>
<tag k="natural" v="water"/>
</way>
<way id="12001" version="2" timestamp="2011-11-11T00:11:11Z">
<nd ref="12001"/>
<nd ref="12002"/>
<nd ref="12003"/>
<nd ref="12004"/>
<nd ref="12001"/>
<tag k="name" v="way 12001"/>
<tag k="natural" v="water"/>
</way>
<relation id="13001" version="2" timestamp="2011-11-11T00:11:11Z">
<member type="way" ref="13001" role="outer"/>
<tag k="natural" v="water"/>
@ -71,6 +62,7 @@
<tag k="natural" v="water"/>
</way>
<relation id="14001" version="2" timestamp="2011-11-11T00:11:11Z">
<!-- now a new style relation -->
<member type="way" ref="14001" role="outer"/>
<member type="way" ref="14011" role="inner"/>
<tag k="type" v="multipolygon"/>

View File

@ -267,8 +267,19 @@
<relation id="1001" version="1" timestamp="2011-11-11T00:11:11Z">
<member type="way" ref="1001" role="outer"/>
<member type="way" ref="1002" role="inner"/>
<tag k="landuse" v="wood"/>
<tag k="type" v="multipolygon"/>
</relation>
<relation id="1011" version="1" timestamp="2011-11-11T00:11:11Z">
<member type="way" ref="1001" role="outer"/>
<member type="way" ref="1002" role="inner"/>
<tag k="type" v="multipolygon"/>
</relation>
<relation id="1021" version="1" timestamp="2011-11-11T00:11:11Z">
<member type="way" ref="1001" role="outer"/>
<member type="way" ref="1002" role="inner"/>
<tag k="landuse" v="wood"/>
</relation>
<relation id="2001" version="1" timestamp="2011-11-11T00:11:11Z">
<member type="way" ref="2001" role="outer"/>
<member type="way" ref="2002" role="inner"/>
@ -524,7 +535,6 @@
<nd ref="9201"/>
<nd ref="9202"/>
<nd ref="9203"/>
<tag k="landuse" v="park"/>
<tag k="highway" v="secondary"/>
<tag k="name" v="9209"/>
</way>
@ -540,37 +550,10 @@
<member type="way" ref="9209" role="outer"/>
<member type="way" ref="9210" role="outer"/>
<tag k="type" v="multipolygon"/>
</relation>
<!-- test multipolygon ways were inserted (same as 92xx, but different tagging) -->
<node id="9301" version="1" timestamp="2011-11-11T00:11:11Z" lat="47" lon="80"/>
<node id="9302" version="1" timestamp="2011-11-11T00:11:11Z" lat="47" lon="82"/>
<node id="9303" version="1" timestamp="2011-11-11T00:11:11Z" lat="49" lon="82"/>
<node id="9304" version="1" timestamp="2011-11-11T00:11:11Z" lat="49" lon="80"/>
<way id="9309" version="1" timestamp="2011-11-11T00:11:11Z">
<nd ref="9301"/>
<nd ref="9302"/>
<nd ref="9303"/>
<tag k="landuse" v="park"/>
<tag k="highway" v="secondary"/>
<tag k="name" v="9309"/>
</way>
<way id="9310" version="1" timestamp="2011-11-11T00:11:11Z">
<nd ref="9303"/>
<nd ref="9304"/>
<nd ref="9301"/>
<tag k="highway" v="residential"/>
<tag k="name" v="9310"/>
</way>
<relation id="9301" version="1" timestamp="2011-11-11T00:11:11Z">
<member type="way" ref="9309" role="outer"/>
<member type="way" ref="9310" role="outer"/>
<tag k="type" v="multipolygon"/>
<tag k="landuse" v="park"/>
</relation>
<!-- test multipolygon way was inserted -->
<node id="8001" version="1" timestamp="2011-11-11T00:11:11Z" lat="47" lon="80"/>
<node id="8002" version="1" timestamp="2011-11-11T00:11:11Z" lat="47" lon="82"/>
@ -606,25 +589,6 @@
<tag k="landuse" v="park"/>
</way>
<!-- test for changed tags in way belonging to relation -->
<node id="12001" version="1" timestamp="2011-11-11T00:11:11Z" lat="47" lon="85"/>
<node id="12002" version="1" timestamp="2011-11-11T00:11:11Z" lat="47" lon="86"/>
<node id="12003" version="1" timestamp="2011-11-11T00:11:11Z" lat="49" lon="86"/>
<node id="12004" version="1" timestamp="2011-11-11T00:11:11Z" lat="49" lon="85"/>
<way id="12001" version="1" timestamp="2011-11-11T00:11:11Z">
<nd ref="12001"/>
<nd ref="12002"/>
<nd ref="12003"/>
<nd ref="12004"/>
<nd ref="12001"/>
<tag k="name" v="way 12001"/>
<tag k="landuse" v="park"/>
</way>
<relation id="12001" version="1" timestamp="2011-11-11T00:11:11Z">
<member type="way" ref="12001" role="outer"/>
<tag k="type" v="multipolygon"/>
</relation>
<!-- test for changed tags in relation -->
<node id="13001" version="1" timestamp="2011-11-11T00:11:11Z" lat="47" lon="87"/>
<node id="13002" version="1" timestamp="2011-11-11T00:11:11Z" lat="47" lon="88"/>
@ -672,6 +636,7 @@
<tag k="name" v="way 14011"/>
</way>
<relation id="14001" version="1" timestamp="2011-11-11T00:11:11Z">
<!-- old style relation not inserted -->
<member type="way" ref="14001" role="outer"/>
<member type="way" ref="14011" role="inner"/>
<tag k="type" v="multipolygon"/>
@ -694,7 +659,6 @@
<nd ref="15004"/>
<nd ref="15001"/>
<tag k="name" v="way 15001"/>
<tag k="landuse" v="park"/>
</way>
<way id="15011" version="1" timestamp="2011-11-11T00:11:11Z">
<nd ref="15011"/>
@ -708,6 +672,7 @@
<member type="way" ref="15001" role="outer"/>
<member type="way" ref="15011" role="inner"/>
<tag k="type" v="multipolygon"/>
<tag k="landuse" v="park"/>
</relation>
@ -947,7 +912,7 @@
<tag k="landuse" v="park"/>
</relation>
<!-- test removing of relation (r:50121) without tags -->
<!-- test old-style relation does not affect way -->
<node id="50101" version="1" timestamp="2011-11-11T00:11:11Z" lat="42" lon="10"/>
<node id="50102" version="1" timestamp="2011-11-11T00:11:11Z" lat="42" lon="11"/>
<node id="50103" version="1" timestamp="2011-11-11T00:11:11Z" lat="44" lon="10"/>
@ -1031,10 +996,10 @@
<nd ref="52103"/>
<nd ref="52104"/>
<nd ref="52101"/>
<tag k="building" v="yes"/>
</way>
<relation id="52121" version="1" timestamp="2011-11-11T00:11:11Z">
<member type="way" ref="52111" role="outer"/>
<tag k="building" v="yes"/>
<tag k="type" v="multipolygon"/>
</relation>

View File

@ -54,7 +54,7 @@
},
"tables": {
"landusages": {
"fields": [
"columns": [
{
"type": "id",
"name": "osm_id",
@ -191,7 +191,7 @@
}
},
"buildings": {
"fields": [
"columns": [
{
"type": "id",
"name": "osm_id",
@ -228,7 +228,7 @@
}
},
"places": {
"fields": [
"columns": [
{
"type": "id",
"name": "osm_id",
@ -290,7 +290,7 @@
}
},
"transport_areas": {
"fields": [
"columns": [
{
"type": "id",
"name": "osm_id",
@ -327,7 +327,7 @@
}
},
"admin": {
"fields": [
"columns": [
{
"type": "id",
"name": "osm_id",
@ -362,7 +362,7 @@
}
},
"aeroways": {
"fields": [
"columns": [
{
"type": "id",
"name": "osm_id",
@ -393,7 +393,7 @@
}
},
"waterways": {
"fields": [
"columns": [
{
"type": "id",
"name": "osm_id",
@ -430,7 +430,7 @@
}
},
"barrierways": {
"fields": [
"columns": [
{
"type": "id",
"name": "osm_id",
@ -472,7 +472,7 @@
}
},
"transport_points": {
"fields": [
"columns": [
{
"type": "id",
"name": "osm_id",
@ -523,7 +523,7 @@
}
},
"amenities": {
"fields": [
"columns": [
{
"type": "id",
"name": "osm_id",
@ -560,7 +560,7 @@
}
},
"barrierpoints": {
"fields": [
"columns": [
{
"type": "id",
"name": "osm_id",
@ -605,7 +605,7 @@
}
},
"housenumbers_interpolated": {
"fields": [
"columns": [
{
"type": "id",
"name": "osm_id",
@ -655,7 +655,7 @@
}
},
"roads": {
"fields": [
"columns": [
{
"type": "id",
"name": "osm_id",
@ -784,7 +784,7 @@
}
},
"housenumbers": {
"fields": [
"columns": [
{
"type": "id",
"name": "osm_id",
@ -829,7 +829,7 @@
}
},
"waterareas": {
"fields": [
"columns": [
{
"type": "id",
"name": "osm_id",

View File

@ -61,37 +61,38 @@ func TestComplete_Deploy(t *testing.T) {
}
}
func TestComplete_OnlyNewStyleMultipolgon(t *testing.T) {
assertRecords(t, []checkElem{
{"osm_landusages", -1001, "wood", nil},
{"osm_landusages", -1011, Missing, nil},
{"osm_landusages", -1021, Missing, nil},
})
}
func TestComplete_LandusageToWaterarea1(t *testing.T) {
// Parks inserted into landusages
cache := ts.cache(t)
defer cache.Close()
assertCachedWay(t, cache, 11001)
assertCachedWay(t, cache, 12001)
assertCachedWay(t, cache, 13001)
assertRecords(t, []checkElem{
{"osm_waterareas", 11001, Missing, nil},
{"osm_waterareas", -12001, Missing, nil},
{"osm_waterareas", -13001, Missing, nil},
{"osm_waterareas_gen0", 11001, Missing, nil},
{"osm_waterareas_gen0", -12001, Missing, nil},
{"osm_waterareas_gen0", -13001, Missing, nil},
{"osm_waterareas_gen1", 11001, Missing, nil},
{"osm_waterareas_gen1", -12001, Missing, nil},
{"osm_waterareas_gen1", -13001, Missing, nil},
{"osm_landusages", 11001, "park", nil},
{"osm_landusages", -12001, "park", nil},
{"osm_landusages", -13001, "park", nil},
{"osm_landusages_gen0", 11001, "park", nil},
{"osm_landusages_gen0", -12001, "park", nil},
{"osm_landusages_gen0", -13001, "park", nil},
{"osm_landusages_gen1", 11001, "park", nil},
{"osm_landusages_gen1", -12001, "park", nil},
{"osm_landusages_gen1", -13001, "park", nil},
})
}
@ -106,7 +107,8 @@ func TestComplete_ChangedHoleTags1(t *testing.T) {
assertRecords(t, []checkElem{
{"osm_waterareas", 14011, Missing, nil},
{"osm_waterareas", -14011, Missing, nil},
{"osm_landusages", -14001, "park", nil},
{"osm_landusages", 14001, "park", nil},
{"osm_landusages", -14001, Missing, nil},
})
}
@ -218,19 +220,15 @@ func TestComplete_RelationWayNotInserted(t *testing.T) {
func TestComplete_RelationWaysInserted(t *testing.T) {
// Outer ways of multipolygon are inserted.
assertRecords(t, []checkElem{
{"osm_landusages", -9201, "park", map[string]string{"name": "9209"}},
// no name on relation
{"osm_landusages", -9201, "park", map[string]string{"name": ""}},
{"osm_landusages", 9201, Missing, nil},
{"osm_landusages", 9209, Missing, nil},
{"osm_landusages", 9210, Missing, nil},
// outer ways of multipolygon stand for their own
{"osm_roads", 9209, "secondary", map[string]string{"name": "9209"}},
{"osm_roads", 9210, "residential", map[string]string{"name": "9210"}},
// no name on relation
{"osm_landusages", -9301, "park", map[string]string{"name": ""}},
// outer ways of multipolygon stand for their own
{"osm_roads", 9309, "secondary", map[string]string{"name": "9309"}},
{"osm_roads", 9310, "residential", map[string]string{"name": "9310"}},
})
}
func TestComplete_RelationWayInserted(t *testing.T) {
@ -283,12 +281,12 @@ func TestComplete_RelationBeforeRemove(t *testing.T) {
})
}
func TestComplete_RelationWithoutTags(t *testing.T) {
// Relation without tags is inserted.
func TestComplete_OldStyleRelationIsIgnored(t *testing.T) {
// Relation without tags is not inserted.
assertRecords(t, []checkElem{
{"osm_buildings", 50111, Missing, nil},
{"osm_buildings", -50121, "yes", nil},
{"osm_buildings", 50111, "yes", nil},
{"osm_buildings", -50121, Missing, nil},
})
}
@ -332,9 +330,12 @@ func TestComplete_GeneralizedLinestringIsValid(t *testing.T) {
}
func TestComplete_RingWithGap(t *testing.T) {
// Multipolygon and way with gap (overlapping but different endpoints) gets closed
// Multipolygon with gap (overlapping but different endpoints) gets closed
assertGeomValid(t, checkElem{"osm_landusages", -7301, Missing, nil})
assertGeomValid(t, checkElem{"osm_landusages", 7311, Missing, nil})
// but not way
assertRecords(t, []checkElem{
checkElem{"osm_landusages", 7311, Missing, nil},
})
}
func TestComplete_MultipolygonWithOpenRing(t *testing.T) {
@ -480,27 +481,21 @@ func TestComplete_LandusageToWaterarea2(t *testing.T) {
assertRecords(t, []checkElem{
{"osm_waterareas", 11001, "water", nil},
{"osm_waterareas", -12001, "water", nil},
{"osm_waterareas", -13001, "water", nil},
{"osm_waterareas_gen0", 11001, "water", nil},
{"osm_waterareas_gen0", -12001, "water", nil},
{"osm_waterareas_gen0", -13001, "water", nil},
{"osm_waterareas_gen1", 11001, "water", nil},
{"osm_waterareas_gen1", -12001, "water", nil},
{"osm_waterareas_gen1", -13001, "water", nil},
{"osm_landusages", 11001, Missing, nil},
{"osm_landusages", -12001, Missing, nil},
{"osm_landusages", -13001, Missing, nil},
{"osm_landusages_gen0", 11001, Missing, nil},
{"osm_landusages_gen0", -12001, Missing, nil},
{"osm_landusages_gen0", -13001, Missing, nil},
{"osm_landusages_gen1", 11001, Missing, nil},
{"osm_landusages_gen1", -12001, Missing, nil},
{"osm_landusages_gen1", -13001, Missing, nil},
})
}
@ -515,6 +510,11 @@ func TestComplete_ChangedHoleTags2(t *testing.T) {
assertGeomArea(t, checkElem{"osm_waterareas", 14011, "water", nil}, 26672019779)
assertGeomArea(t, checkElem{"osm_landusages", -14001, "park", nil}, 10373697182)
assertRecords(t, []checkElem{
{"osm_waterareas", -14011, Missing, nil},
{"osm_landusages", -14001, "park", nil},
})
}
func TestComplete_SplitOuterMultipolygonWay2(t *testing.T) {

View File

@ -1,7 +1,7 @@
tables:
roads:
type: linestring
fields:
columns:
- name: osm_id
type: id
- name: type
@ -16,7 +16,7 @@ tables:
pois:
type: point
fields:
columns:
- name: osm_id
type: id
- name: type
@ -31,7 +31,7 @@ tables:
buildings:
type: polygon
fields:
columns:
- name: osm_id
type: id
- name: type

View File

@ -153,5 +153,15 @@
<tag k="type" v="route"/>
</relation>
<!-- non-route type is not imported -->
<node id="130101" version="1" timestamp="2015-12-31T23:59:99Z" lat="53.0" lon="8.200">
<tag k="name" v="Stop"/>
</node>
<relation id="130901" version="23" timestamp="2015-06-02T04:13:19Z">
<member type="node" ref="130101" role="stop"/>
<tag k="route" v="bus"/>
<tag k="type" v="bus_route"/> <!-- invalid type -->
</relation>
</osm>

View File

@ -1,13 +1,7 @@
tags:
load_all: true
exclude:
- created_by
- source
tables:
master_routes:
type: relation_member
fields:
columns:
- name: osm_id
type: id
- name: member
@ -27,11 +21,12 @@ tables:
- key: name
name: name
type: string
relation_types: [route_master]
mapping:
route_master: [bus]
route_members:
type: relation_member
fields:
columns:
- name: osm_id
type: id
- key: ref
@ -54,11 +49,12 @@ tables:
key: name
type: string
from_member: true
relation_types: [route]
mapping:
route: [bus, tram, rail]
routes:
type: relation
fields:
columns:
- name: osm_id
type: id
- key: ref
@ -66,5 +62,7 @@ tables:
type: string
- name: tags
type: hstore_tags
relation_types: [route, route_master]
mapping:
route_master: [bus, tram, rail]
route: [bus, tram, rail]

View File

@ -58,6 +58,23 @@ func TestRouteRelation_RelationData(t *testing.T) {
if r.tags["name"] != "Bus 301: A => B" {
t.Error(r)
}
// check tags of master relation
r = ts.queryTags(t, "osm_routes", -100911)
if r.tags["name"] != "Bus 301" {
t.Error(r)
}
}
func TestRouteRelation_MemberUpdatedByNode1(t *testing.T) {
// check that member is updated after node was modified
rows := ts.queryDynamic(t, "osm_route_members", "osm_id = -110901 AND member = 110101")
if len(rows) != 1 {
t.Fatal(rows)
}
if rows[0]["name"] != "Stop" {
t.Error(rows[0])
}
}
func TestRouteRelation_MemberGeomUpdated1(t *testing.T) {
@ -131,7 +148,7 @@ func TestRouteRelation_MemberGeomUpdated2(t *testing.T) {
}
func TestRouteRelation_MemberUpdatedByNode(t *testing.T) {
func TestRouteRelation_MemberUpdatedByNode2(t *testing.T) {
// check that member is updated after node was modified
rows := ts.queryDynamic(t, "osm_route_members", "osm_id = -110901 AND member = 110101")
if len(rows) != 1 {

View File

@ -9,7 +9,7 @@
"use_single_id_space": true,
"tables": {
"all": {
"fields": [
"columns": [
{
"type": "id",
"name": "osm_id",

View File

@ -159,7 +159,7 @@ func TestSingleTable_DuplicateIds1(t *testing.T) {
}
assertHstore(t, []checkElem{
{"osm_all", RelOffset - 31101, "*", map[string]string{"building": "yes"}},
{"osm_all", RelOffset - 31101, "*", map[string]string{"building": "yes", "type": "multipolygon"}},
})
assertGeomType(t, checkElem{"osm_all", RelOffset - 31101, "*", nil}, "Polygon")
}
@ -185,7 +185,7 @@ func TestSingleTable_DuplicateIds2(t *testing.T) {
}
assertHstore(t, []checkElem{
{"osm_all", RelOffset - 31101, "*", map[string]string{"building": "yes"}},
{"osm_all", RelOffset - 31101, "*", map[string]string{"building": "yes", "type": "multipolygon"}},
})
assertGeomType(t, checkElem{"osm_all", RelOffset - 31101, "*", nil}, "Polygon")
}

View File

@ -16,6 +16,8 @@ type Deleter struct {
tmPoints mapping.NodeMatcher
tmLineStrings mapping.WayMatcher
tmPolygons mapping.RelWayMatcher
tmRelation mapping.RelationMatcher
tmRelationMember mapping.RelationMatcher
expireor expire.Expireor
singleIdSpace bool
deletedRelations map[int64]struct{}
@ -28,6 +30,8 @@ func NewDeleter(db database.Deleter, osmCache *cache.OSMCache, diffCache *cache.
tmPoints mapping.NodeMatcher,
tmLineStrings mapping.WayMatcher,
tmPolygons mapping.RelWayMatcher,
tmRelation mapping.RelationMatcher,
tmRelationMember mapping.RelationMatcher,
) *Deleter {
return &Deleter{
delDb: db,
@ -36,6 +40,8 @@ func NewDeleter(db database.Deleter, osmCache *cache.OSMCache, diffCache *cache.
tmPoints: tmPoints,
tmLineStrings: tmLineStrings,
tmPolygons: tmPolygons,
tmRelation: tmRelation,
tmRelationMember: tmRelationMember,
singleIdSpace: singleIdSpace,
deletedRelations: make(map[int64]struct{}),
deletedWays: make(map[int64]struct{}),
@ -82,12 +88,27 @@ func (d *Deleter) deleteRelation(id int64, deleteRefs bool, deleteMembers bool)
if elem.Tags == nil {
return nil
}
// delete from all tables to handle relations with tags from members
// and relation_members
e := element.OSMElem(elem.OSMElem)
e.Id = d.RelId(e.Id)
if err := d.delDb.DeleteElem(e); err != nil {
return err
deleted := false
deletedPolygon := false
if matches := d.tmPolygons.MatchRelation(elem); len(matches) > 0 {
if err := d.delDb.Delete(d.RelId(elem.Id), matches); err != nil {
return err
}
deleted = true
deletedPolygon = true
}
if matches := d.tmRelation.MatchRelation(elem); len(matches) > 0 {
if err := d.delDb.Delete(d.RelId(elem.Id), matches); err != nil {
return err
}
deleted = true
}
if matches := d.tmRelationMember.MatchRelation(elem); len(matches) > 0 {
if err := d.delDb.Delete(d.RelId(elem.Id), matches); err != nil {
return err
}
deleted = true
}
if deleteRefs {
@ -96,13 +117,19 @@ func (d *Deleter) deleteRelation(id int64, deleteRefs bool, deleteMembers bool)
if err := d.diffCache.Ways.DeleteRef(m.Id, id); err != nil {
return err
}
} else if m.Type == element.NODE {
if err := d.diffCache.CoordsRel.DeleteRef(m.Id, id); err != nil {
return err
}
}
}
}
if deleteMembers {
if deleteMembers && deletedPolygon {
// delete members from db and force reinsert of members
// use case: relation is deleted and member now stands for its own
// TODO: still needed after old-style mp removal, remove when #148 is closed
for _, member := range elem.Members {
if member.Type == element.WAY {
d.deletedMembers[member.Id] = struct{}{}
@ -124,7 +151,7 @@ func (d *Deleter) deleteRelation(id int64, deleteRefs bool, deleteMembers bool)
if err := d.osmCache.InsertedWays.DeleteMembers(elem.Members); err != nil {
return err
}
if d.expireor != nil {
if deleted && d.expireor != nil {
if err := d.osmCache.Ways.FillMembers(elem.Members); err != nil {
return err
}
@ -136,7 +163,7 @@ func (d *Deleter) deleteRelation(id int64, deleteRefs bool, deleteMembers bool)
if err != nil {
continue
}
expire.ExpireProjectedNodes(d.expireor, m.Way.Nodes, 4326, true)
expire.ExpireProjectedNodes(d.expireor, m.Way.Nodes, 4326, deletedPolygon)
}
}
return nil

View File

@ -119,7 +119,7 @@ func Update(oscFile string, geometryLimiter *limit.Limiter, expireor expire.Expi
ProductionSchema: config.BaseOptions.Schemas.Production,
BackupSchema: config.BaseOptions.Schemas.Backup,
}
db, err := database.Open(dbConf, tagmapping)
db, err := database.Open(dbConf, &tagmapping.Conf)
if err != nil {
return errors.New("database open: " + err.Error())
}
@ -144,10 +144,12 @@ func Update(oscFile string, geometryLimiter *limit.Limiter, expireor expire.Expi
delDb,
osmCache,
diffCache,
tagmapping.SingleIdSpace,
tagmapping.PointMatcher(),
tagmapping.LineStringMatcher(),
tagmapping.PolygonMatcher(),
tagmapping.Conf.SingleIdSpace,
tagmapping.PointMatcher,
tagmapping.LineStringMatcher,
tagmapping.PolygonMatcher,
tagmapping.RelationMatcher,
tagmapping.RelationMemberMatcher,
)
deleter.SetExpireor(expireor)
@ -162,23 +164,23 @@ func Update(oscFile string, geometryLimiter *limit.Limiter, expireor expire.Expi
nodes := make(chan *element.Node)
relWriter := writer.NewRelationWriter(osmCache, diffCache,
tagmapping.SingleIdSpace,
tagmapping.Conf.SingleIdSpace,
relations,
db, progress,
tagmapping.PolygonMatcher(),
tagmapping.RelationMatcher(),
tagmapping.RelationMemberMatcher(),
tagmapping.PolygonMatcher,
tagmapping.RelationMatcher,
tagmapping.RelationMemberMatcher,
config.BaseOptions.Srid)
relWriter.SetLimiter(geometryLimiter)
relWriter.SetExpireor(expireor)
relWriter.Start()
wayWriter := writer.NewWayWriter(osmCache, diffCache,
tagmapping.SingleIdSpace,
tagmapping.Conf.SingleIdSpace,
ways, db,
progress,
tagmapping.PolygonMatcher(),
tagmapping.LineStringMatcher(),
tagmapping.PolygonMatcher,
tagmapping.LineStringMatcher,
config.BaseOptions.Srid)
wayWriter.SetLimiter(geometryLimiter)
wayWriter.SetExpireor(expireor)
@ -186,7 +188,7 @@ func Update(oscFile string, geometryLimiter *limit.Limiter, expireor expire.Expi
nodeWriter := writer.NewNodeWriter(osmCache, nodes, db,
progress,
tagmapping.PointMatcher(),
tagmapping.PointMatcher,
config.BaseOptions.Srid)
nodeWriter.SetLimiter(geometryLimiter)
nodeWriter.SetExpireor(expireor)
@ -350,11 +352,7 @@ func Update(oscFile string, geometryLimiter *limit.Limiter, expireor expire.Expi
}
// insert new relation
progress.AddRelations(1)
// filter out unsupported relation types, otherwise they might
// get inserted with the tags from an outer way
if relTagFilter.Filter(&rel.Tags) {
relations <- rel
}
relations <- rel
}
for wayId, _ := range wayIds {

11
vendor/github.com/lib/pq/README.md generated vendored
View File

@ -1,6 +1,6 @@
# pq - A pure Go postgres driver for Go's database/sql package
[![Build Status](https://travis-ci.org/lib/pq.png?branch=master)](https://travis-ci.org/lib/pq)
[![Build Status](https://travis-ci.org/lib/pq.svg?branch=master)](https://travis-ci.org/lib/pq)
## Install
@ -20,11 +20,11 @@ variables.
Example:
PGHOST=/var/run/postgresql go test github.com/lib/pq
PGHOST=/run/postgresql go test github.com/lib/pq
Optionally, a benchmark suite can be run as part of the tests:
PGHOST=/var/run/postgresql go test -bench .
PGHOST=/run/postgresql go test -bench .
## Features
@ -38,6 +38,7 @@ Optionally, a benchmark suite can be run as part of the tests:
* Many libpq compatible environment variables
* Unix socket support
* Notifications: `LISTEN`/`NOTIFY`
* pgpass support
## Future / Things you can help with
@ -57,6 +58,7 @@ code still exists in here.
* Brad Fitzpatrick (bradfitz)
* Charlie Melbye (cmelbye)
* Chris Bandy (cbandy)
* Chris Gilling (cgilling)
* Chris Walsh (cwds)
* Dan Sosedoff (sosedoff)
* Daniel Farina (fdr)
@ -66,6 +68,7 @@ code still exists in here.
* Everyone at The Go Team
* Evan Shaw (edsrzf)
* Ewan Chou (coocood)
* Fazal Majid (fazalmajid)
* Federico Romero (federomero)
* Fumin (fumin)
* Gary Burd (garyburd)
@ -82,7 +85,7 @@ code still exists in here.
* Keith Rarick (kr)
* Kir Shatrov (kirs)
* Lann Martin (lann)
* Maciek Sakrejda (deafbybeheading)
* Maciek Sakrejda (uhoh-itsmaciek)
* Marc Brinkmann (mbr)
* Marko Tiikkaja (johto)
* Matt Newberry (MattNewberry)

756
vendor/github.com/lib/pq/array.go generated vendored Normal file
View File

@ -0,0 +1,756 @@
package pq
import (
"bytes"
"database/sql"
"database/sql/driver"
"encoding/hex"
"fmt"
"reflect"
"strconv"
"strings"
)
var typeByteSlice = reflect.TypeOf([]byte{})
var typeDriverValuer = reflect.TypeOf((*driver.Valuer)(nil)).Elem()
var typeSqlScanner = reflect.TypeOf((*sql.Scanner)(nil)).Elem()
// Array returns the optimal driver.Valuer and sql.Scanner for an array or
// slice of any dimension.
//
// For example:
// db.Query(`SELECT * FROM t WHERE id = ANY($1)`, pq.Array([]int{235, 401}))
//
// var x []sql.NullInt64
// db.QueryRow('SELECT ARRAY[235, 401]').Scan(pq.Array(&x))
//
// Scanning multi-dimensional arrays is not supported. Arrays where the lower
// bound is not one (such as `[0:0]={1}') are not supported.
func Array(a interface{}) interface {
driver.Valuer
sql.Scanner
} {
switch a := a.(type) {
case []bool:
return (*BoolArray)(&a)
case []float64:
return (*Float64Array)(&a)
case []int64:
return (*Int64Array)(&a)
case []string:
return (*StringArray)(&a)
case *[]bool:
return (*BoolArray)(a)
case *[]float64:
return (*Float64Array)(a)
case *[]int64:
return (*Int64Array)(a)
case *[]string:
return (*StringArray)(a)
}
return GenericArray{a}
}
// ArrayDelimiter may be optionally implemented by driver.Valuer or sql.Scanner
// to override the array delimiter used by GenericArray.
type ArrayDelimiter interface {
// ArrayDelimiter returns the delimiter character(s) for this element's type.
ArrayDelimiter() string
}
// BoolArray represents a one-dimensional array of the PostgreSQL boolean type.
type BoolArray []bool
// Scan implements the sql.Scanner interface.
func (a *BoolArray) Scan(src interface{}) error {
switch src := src.(type) {
case []byte:
return a.scanBytes(src)
case string:
return a.scanBytes([]byte(src))
case nil:
*a = nil
return nil
}
return fmt.Errorf("pq: cannot convert %T to BoolArray", src)
}
func (a *BoolArray) scanBytes(src []byte) error {
elems, err := scanLinearArray(src, []byte{','}, "BoolArray")
if err != nil {
return err
}
if *a != nil && len(elems) == 0 {
*a = (*a)[:0]
} else {
b := make(BoolArray, len(elems))
for i, v := range elems {
if len(v) != 1 {
return fmt.Errorf("pq: could not parse boolean array index %d: invalid boolean %q", i, v)
}
switch v[0] {
case 't':
b[i] = true
case 'f':
b[i] = false
default:
return fmt.Errorf("pq: could not parse boolean array index %d: invalid boolean %q", i, v)
}
}
*a = b
}
return nil
}
// Value implements the driver.Valuer interface.
func (a BoolArray) Value() (driver.Value, error) {
if a == nil {
return nil, nil
}
if n := len(a); n > 0 {
// There will be exactly two curly brackets, N bytes of values,
// and N-1 bytes of delimiters.
b := make([]byte, 1+2*n)
for i := 0; i < n; i++ {
b[2*i] = ','
if a[i] {
b[1+2*i] = 't'
} else {
b[1+2*i] = 'f'
}
}
b[0] = '{'
b[2*n] = '}'
return string(b), nil
}
return "{}", nil
}
// ByteaArray represents a one-dimensional array of the PostgreSQL bytea type.
type ByteaArray [][]byte
// Scan implements the sql.Scanner interface.
func (a *ByteaArray) Scan(src interface{}) error {
switch src := src.(type) {
case []byte:
return a.scanBytes(src)
case string:
return a.scanBytes([]byte(src))
case nil:
*a = nil
return nil
}
return fmt.Errorf("pq: cannot convert %T to ByteaArray", src)
}
func (a *ByteaArray) scanBytes(src []byte) error {
elems, err := scanLinearArray(src, []byte{','}, "ByteaArray")
if err != nil {
return err
}
if *a != nil && len(elems) == 0 {
*a = (*a)[:0]
} else {
b := make(ByteaArray, len(elems))
for i, v := range elems {
b[i], err = parseBytea(v)
if err != nil {
return fmt.Errorf("could not parse bytea array index %d: %s", i, err.Error())
}
}
*a = b
}
return nil
}
// Value implements the driver.Valuer interface. It uses the "hex" format which
// is only supported on PostgreSQL 9.0 or newer.
func (a ByteaArray) Value() (driver.Value, error) {
if a == nil {
return nil, nil
}
if n := len(a); n > 0 {
// There will be at least two curly brackets, 2*N bytes of quotes,
// 3*N bytes of hex formatting, and N-1 bytes of delimiters.
size := 1 + 6*n
for _, x := range a {
size += hex.EncodedLen(len(x))
}
b := make([]byte, size)
for i, s := 0, b; i < n; i++ {
o := copy(s, `,"\\x`)
o += hex.Encode(s[o:], a[i])
s[o] = '"'
s = s[o+1:]
}
b[0] = '{'
b[size-1] = '}'
return string(b), nil
}
return "{}", nil
}
// Float64Array represents a one-dimensional array of the PostgreSQL double
// precision type.
type Float64Array []float64
// Scan implements the sql.Scanner interface.
func (a *Float64Array) Scan(src interface{}) error {
switch src := src.(type) {
case []byte:
return a.scanBytes(src)
case string:
return a.scanBytes([]byte(src))
case nil:
*a = nil
return nil
}
return fmt.Errorf("pq: cannot convert %T to Float64Array", src)
}
func (a *Float64Array) scanBytes(src []byte) error {
elems, err := scanLinearArray(src, []byte{','}, "Float64Array")
if err != nil {
return err
}
if *a != nil && len(elems) == 0 {
*a = (*a)[:0]
} else {
b := make(Float64Array, len(elems))
for i, v := range elems {
if b[i], err = strconv.ParseFloat(string(v), 64); err != nil {
return fmt.Errorf("pq: parsing array element index %d: %v", i, err)
}
}
*a = b
}
return nil
}
// Value implements the driver.Valuer interface.
func (a Float64Array) Value() (driver.Value, error) {
if a == nil {
return nil, nil
}
if n := len(a); n > 0 {
// There will be at least two curly brackets, N bytes of values,
// and N-1 bytes of delimiters.
b := make([]byte, 1, 1+2*n)
b[0] = '{'
b = strconv.AppendFloat(b, a[0], 'f', -1, 64)
for i := 1; i < n; i++ {
b = append(b, ',')
b = strconv.AppendFloat(b, a[i], 'f', -1, 64)
}
return string(append(b, '}')), nil
}
return "{}", nil
}
// GenericArray implements the driver.Valuer and sql.Scanner interfaces for
// an array or slice of any dimension.
type GenericArray struct{ A interface{} }
func (GenericArray) evaluateDestination(rt reflect.Type) (reflect.Type, func([]byte, reflect.Value) error, string) {
var assign func([]byte, reflect.Value) error
var del = ","
// TODO calculate the assign function for other types
// TODO repeat this section on the element type of arrays or slices (multidimensional)
{
if reflect.PtrTo(rt).Implements(typeSqlScanner) {
// dest is always addressable because it is an element of a slice.
assign = func(src []byte, dest reflect.Value) (err error) {
ss := dest.Addr().Interface().(sql.Scanner)
if src == nil {
err = ss.Scan(nil)
} else {
err = ss.Scan(src)
}
return
}
goto FoundType
}
assign = func([]byte, reflect.Value) error {
return fmt.Errorf("pq: scanning to %s is not implemented; only sql.Scanner", rt)
}
}
FoundType:
if ad, ok := reflect.Zero(rt).Interface().(ArrayDelimiter); ok {
del = ad.ArrayDelimiter()
}
return rt, assign, del
}
// Scan implements the sql.Scanner interface.
func (a GenericArray) Scan(src interface{}) error {
dpv := reflect.ValueOf(a.A)
switch {
case dpv.Kind() != reflect.Ptr:
return fmt.Errorf("pq: destination %T is not a pointer to array or slice", a.A)
case dpv.IsNil():
return fmt.Errorf("pq: destination %T is nil", a.A)
}
dv := dpv.Elem()
switch dv.Kind() {
case reflect.Slice:
case reflect.Array:
default:
return fmt.Errorf("pq: destination %T is not a pointer to array or slice", a.A)
}
switch src := src.(type) {
case []byte:
return a.scanBytes(src, dv)
case string:
return a.scanBytes([]byte(src), dv)
case nil:
if dv.Kind() == reflect.Slice {
dv.Set(reflect.Zero(dv.Type()))
return nil
}
}
return fmt.Errorf("pq: cannot convert %T to %s", src, dv.Type())
}
func (a GenericArray) scanBytes(src []byte, dv reflect.Value) error {
dtype, assign, del := a.evaluateDestination(dv.Type().Elem())
dims, elems, err := parseArray(src, []byte(del))
if err != nil {
return err
}
// TODO allow multidimensional
if len(dims) > 1 {
return fmt.Errorf("pq: scanning from multidimensional ARRAY%s is not implemented",
strings.Replace(fmt.Sprint(dims), " ", "][", -1))
}
// Treat a zero-dimensional array like an array with a single dimension of zero.
if len(dims) == 0 {
dims = append(dims, 0)
}
for i, rt := 0, dv.Type(); i < len(dims); i, rt = i+1, rt.Elem() {
switch rt.Kind() {
case reflect.Slice:
case reflect.Array:
if rt.Len() != dims[i] {
return fmt.Errorf("pq: cannot convert ARRAY%s to %s",
strings.Replace(fmt.Sprint(dims), " ", "][", -1), dv.Type())
}
default:
// TODO handle multidimensional
}
}
values := reflect.MakeSlice(reflect.SliceOf(dtype), len(elems), len(elems))
for i, e := range elems {
if err := assign(e, values.Index(i)); err != nil {
return fmt.Errorf("pq: parsing array element index %d: %v", i, err)
}
}
// TODO handle multidimensional
switch dv.Kind() {
case reflect.Slice:
dv.Set(values.Slice(0, dims[0]))
case reflect.Array:
for i := 0; i < dims[0]; i++ {
dv.Index(i).Set(values.Index(i))
}
}
return nil
}
// Value implements the driver.Valuer interface.
func (a GenericArray) Value() (driver.Value, error) {
if a.A == nil {
return nil, nil
}
rv := reflect.ValueOf(a.A)
switch rv.Kind() {
case reflect.Slice:
if rv.IsNil() {
return nil, nil
}
case reflect.Array:
default:
return nil, fmt.Errorf("pq: Unable to convert %T to array", a.A)
}
if n := rv.Len(); n > 0 {
// There will be at least two curly brackets, N bytes of values,
// and N-1 bytes of delimiters.
b := make([]byte, 0, 1+2*n)
b, _, err := appendArray(b, rv, n)
return string(b), err
}
return "{}", nil
}
// Int64Array represents a one-dimensional array of the PostgreSQL integer types.
type Int64Array []int64
// Scan implements the sql.Scanner interface.
func (a *Int64Array) Scan(src interface{}) error {
switch src := src.(type) {
case []byte:
return a.scanBytes(src)
case string:
return a.scanBytes([]byte(src))
case nil:
*a = nil
return nil
}
return fmt.Errorf("pq: cannot convert %T to Int64Array", src)
}
func (a *Int64Array) scanBytes(src []byte) error {
elems, err := scanLinearArray(src, []byte{','}, "Int64Array")
if err != nil {
return err
}
if *a != nil && len(elems) == 0 {
*a = (*a)[:0]
} else {
b := make(Int64Array, len(elems))
for i, v := range elems {
if b[i], err = strconv.ParseInt(string(v), 10, 64); err != nil {
return fmt.Errorf("pq: parsing array element index %d: %v", i, err)
}
}
*a = b
}
return nil
}
// Value implements the driver.Valuer interface.
func (a Int64Array) Value() (driver.Value, error) {
if a == nil {
return nil, nil
}
if n := len(a); n > 0 {
// There will be at least two curly brackets, N bytes of values,
// and N-1 bytes of delimiters.
b := make([]byte, 1, 1+2*n)
b[0] = '{'
b = strconv.AppendInt(b, a[0], 10)
for i := 1; i < n; i++ {
b = append(b, ',')
b = strconv.AppendInt(b, a[i], 10)
}
return string(append(b, '}')), nil
}
return "{}", nil
}
// StringArray represents a one-dimensional array of the PostgreSQL character types.
type StringArray []string
// Scan implements the sql.Scanner interface.
func (a *StringArray) Scan(src interface{}) error {
switch src := src.(type) {
case []byte:
return a.scanBytes(src)
case string:
return a.scanBytes([]byte(src))
case nil:
*a = nil
return nil
}
return fmt.Errorf("pq: cannot convert %T to StringArray", src)
}
func (a *StringArray) scanBytes(src []byte) error {
elems, err := scanLinearArray(src, []byte{','}, "StringArray")
if err != nil {
return err
}
if *a != nil && len(elems) == 0 {
*a = (*a)[:0]
} else {
b := make(StringArray, len(elems))
for i, v := range elems {
if b[i] = string(v); v == nil {
return fmt.Errorf("pq: parsing array element index %d: cannot convert nil to string", i)
}
}
*a = b
}
return nil
}
// Value implements the driver.Valuer interface.
func (a StringArray) Value() (driver.Value, error) {
if a == nil {
return nil, nil
}
if n := len(a); n > 0 {
// There will be at least two curly brackets, 2*N bytes of quotes,
// and N-1 bytes of delimiters.
b := make([]byte, 1, 1+3*n)
b[0] = '{'
b = appendArrayQuotedBytes(b, []byte(a[0]))
for i := 1; i < n; i++ {
b = append(b, ',')
b = appendArrayQuotedBytes(b, []byte(a[i]))
}
return string(append(b, '}')), nil
}
return "{}", nil
}
// appendArray appends rv to the buffer, returning the extended buffer and
// the delimiter used between elements.
//
// It panics when n <= 0 or rv's Kind is not reflect.Array nor reflect.Slice.
func appendArray(b []byte, rv reflect.Value, n int) ([]byte, string, error) {
var del string
var err error
b = append(b, '{')
if b, del, err = appendArrayElement(b, rv.Index(0)); err != nil {
return b, del, err
}
for i := 1; i < n; i++ {
b = append(b, del...)
if b, del, err = appendArrayElement(b, rv.Index(i)); err != nil {
return b, del, err
}
}
return append(b, '}'), del, nil
}
// appendArrayElement appends rv to the buffer, returning the extended buffer
// and the delimiter to use before the next element.
//
// When rv's Kind is neither reflect.Array nor reflect.Slice, it is converted
// using driver.DefaultParameterConverter and the resulting []byte or string
// is double-quoted.
//
// See http://www.postgresql.org/docs/current/static/arrays.html#ARRAYS-IO
func appendArrayElement(b []byte, rv reflect.Value) ([]byte, string, error) {
if k := rv.Kind(); k == reflect.Array || k == reflect.Slice {
if t := rv.Type(); t != typeByteSlice && !t.Implements(typeDriverValuer) {
if n := rv.Len(); n > 0 {
return appendArray(b, rv, n)
}
return b, "", nil
}
}
var del string = ","
var err error
var iv interface{} = rv.Interface()
if ad, ok := iv.(ArrayDelimiter); ok {
del = ad.ArrayDelimiter()
}
if iv, err = driver.DefaultParameterConverter.ConvertValue(iv); err != nil {
return b, del, err
}
switch v := iv.(type) {
case nil:
return append(b, "NULL"...), del, nil
case []byte:
return appendArrayQuotedBytes(b, v), del, nil
case string:
return appendArrayQuotedBytes(b, []byte(v)), del, nil
}
b, err = appendValue(b, iv)
return b, del, err
}
func appendArrayQuotedBytes(b, v []byte) []byte {
b = append(b, '"')
for {
i := bytes.IndexAny(v, `"\`)
if i < 0 {
b = append(b, v...)
break
}
if i > 0 {
b = append(b, v[:i]...)
}
b = append(b, '\\', v[i])
v = v[i+1:]
}
return append(b, '"')
}
func appendValue(b []byte, v driver.Value) ([]byte, error) {
return append(b, encode(nil, v, 0)...), nil
}
// parseArray extracts the dimensions and elements of an array represented in
// text format. Only representations emitted by the backend are supported.
// Notably, whitespace around brackets and delimiters is significant, and NULL
// is case-sensitive.
//
// See http://www.postgresql.org/docs/current/static/arrays.html#ARRAYS-IO
func parseArray(src, del []byte) (dims []int, elems [][]byte, err error) {
var depth, i int
if len(src) < 1 || src[0] != '{' {
return nil, nil, fmt.Errorf("pq: unable to parse array; expected %q at offset %d", '{', 0)
}
Open:
for i < len(src) {
switch src[i] {
case '{':
depth++
i++
case '}':
elems = make([][]byte, 0)
goto Close
default:
break Open
}
}
dims = make([]int, i)
Element:
for i < len(src) {
switch src[i] {
case '{':
if depth == len(dims) {
break Element
}
depth++
dims[depth-1] = 0
i++
case '"':
var elem = []byte{}
var escape bool
for i++; i < len(src); i++ {
if escape {
elem = append(elem, src[i])
escape = false
} else {
switch src[i] {
default:
elem = append(elem, src[i])
case '\\':
escape = true
case '"':
elems = append(elems, elem)
i++
break Element
}
}
}
default:
for start := i; i < len(src); i++ {
if bytes.HasPrefix(src[i:], del) || src[i] == '}' {
elem := src[start:i]
if len(elem) == 0 {
return nil, nil, fmt.Errorf("pq: unable to parse array; unexpected %q at offset %d", src[i], i)
}
if bytes.Equal(elem, []byte("NULL")) {
elem = nil
}
elems = append(elems, elem)
break Element
}
}
}
}
for i < len(src) {
if bytes.HasPrefix(src[i:], del) && depth > 0 {
dims[depth-1]++
i += len(del)
goto Element
} else if src[i] == '}' && depth > 0 {
dims[depth-1]++
depth--
i++
} else {
return nil, nil, fmt.Errorf("pq: unable to parse array; unexpected %q at offset %d", src[i], i)
}
}
Close:
for i < len(src) {
if src[i] == '}' && depth > 0 {
depth--
i++
} else {
return nil, nil, fmt.Errorf("pq: unable to parse array; unexpected %q at offset %d", src[i], i)
}
}
if depth > 0 {
err = fmt.Errorf("pq: unable to parse array; expected %q at offset %d", '}', i)
}
if err == nil {
for _, d := range dims {
if (len(elems) % d) != 0 {
err = fmt.Errorf("pq: multidimensional arrays must have elements with matching dimensions")
}
}
}
return
}
func scanLinearArray(src, del []byte, typ string) (elems [][]byte, err error) {
dims, elems, err := parseArray(src, del)
if err != nil {
return nil, err
}
if len(dims) > 1 {
return nil, fmt.Errorf("pq: cannot convert ARRAY%s to %s", strings.Replace(fmt.Sprint(dims), " ", "][", -1), typ)
}
return elems, err
}

29
vendor/github.com/lib/pq/buf.go generated vendored
View File

@ -21,6 +21,7 @@ func (b *readBuf) oid() (n oid.Oid) {
return
}
// N.B: this is actually an unsigned 16-bit integer, unlike int32
func (b *readBuf) int16() (n int) {
n = int(binary.BigEndian.Uint16(*b))
*b = (*b)[2:]
@ -47,28 +48,44 @@ func (b *readBuf) byte() byte {
return b.next(1)[0]
}
type writeBuf []byte
type writeBuf struct {
buf []byte
pos int
}
func (b *writeBuf) int32(n int) {
x := make([]byte, 4)
binary.BigEndian.PutUint32(x, uint32(n))
*b = append(*b, x...)
b.buf = append(b.buf, x...)
}
func (b *writeBuf) int16(n int) {
x := make([]byte, 2)
binary.BigEndian.PutUint16(x, uint16(n))
*b = append(*b, x...)
b.buf = append(b.buf, x...)
}
func (b *writeBuf) string(s string) {
*b = append(*b, (s + "\000")...)
b.buf = append(b.buf, (s + "\000")...)
}
func (b *writeBuf) byte(c byte) {
*b = append(*b, c)
b.buf = append(b.buf, c)
}
func (b *writeBuf) bytes(v []byte) {
*b = append(*b, v...)
b.buf = append(b.buf, v...)
}
func (b *writeBuf) wrap() []byte {
p := b.buf[b.pos:]
binary.BigEndian.PutUint32(p, uint32(len(p)))
return b.buf
}
func (b *writeBuf) next(c byte) {
p := b.buf[b.pos:]
binary.BigEndian.PutUint32(p, uint32(len(p)))
b.pos = len(b.buf) + 1
b.buf = append(b.buf, c, 0, 0, 0, 0)
}

1045
vendor/github.com/lib/pq/conn.go generated vendored

File diff suppressed because it is too large Load Diff

107
vendor/github.com/lib/pq/conn_go18.go generated vendored Normal file
View File

@ -0,0 +1,107 @@
// +build go1.8
package pq
import (
"context"
"database/sql/driver"
"errors"
"io"
"io/ioutil"
)
// Implement the "QueryerContext" interface
func (cn *conn) QueryContext(ctx context.Context, query string, args []driver.NamedValue) (driver.Rows, error) {
list := make([]driver.Value, len(args))
for i, nv := range args {
list[i] = nv.Value
}
finish := cn.watchCancel(ctx)
r, err := cn.query(query, list)
if err != nil {
return nil, err
}
r.finish = finish
return r, nil
}
// Implement the "ExecerContext" interface
func (cn *conn) ExecContext(ctx context.Context, query string, args []driver.NamedValue) (driver.Result, error) {
list := make([]driver.Value, len(args))
for i, nv := range args {
list[i] = nv.Value
}
if finish := cn.watchCancel(ctx); finish != nil {
defer finish()
}
return cn.Exec(query, list)
}
// Implement the "ConnBeginTx" interface
func (cn *conn) BeginTx(ctx context.Context, opts driver.TxOptions) (driver.Tx, error) {
if opts.Isolation != 0 {
return nil, errors.New("isolation levels not supported")
}
if opts.ReadOnly {
return nil, errors.New("read-only transactions not supported")
}
tx, err := cn.Begin()
if err != nil {
return nil, err
}
cn.txnFinish = cn.watchCancel(ctx)
return tx, nil
}
func (cn *conn) watchCancel(ctx context.Context) func() {
if done := ctx.Done(); done != nil {
finished := make(chan struct{})
go func() {
select {
case <-done:
_ = cn.cancel()
finished <- struct{}{}
case <-finished:
}
}()
return func() {
select {
case <-finished:
case finished <- struct{}{}:
}
}
}
return nil
}
func (cn *conn) cancel() error {
c, err := dial(cn.dialer, cn.opts)
if err != nil {
return err
}
defer c.Close()
{
can := conn{
c: c,
}
can.ssl(cn.opts)
w := can.writeBuf(0)
w.int32(80877102) // cancel request code
w.int32(cn.processID)
w.int32(cn.secretKey)
if err := can.sendStartupPacket(w); err != nil {
return err
}
}
// Read until EOF to ensure that the server received the cancel.
{
_, err := io.Copy(ioutil.Discard, c)
return err
}
}

38
vendor/github.com/lib/pq/copy.go generated vendored
View File

@ -13,6 +13,7 @@ var (
errBinaryCopyNotSupported = errors.New("pq: only text format supported for COPY")
errCopyToNotSupported = errors.New("pq: COPY TO is not supported")
errCopyNotSupportedOutsideTxn = errors.New("pq: COPY is only allowed inside a transaction")
errCopyInProgress = errors.New("pq: COPY in progress")
)
// CopyIn creates a COPY FROM statement which can be prepared with
@ -96,13 +97,13 @@ awaitCopyInResponse:
err = parseError(r)
case 'Z':
if err == nil {
cn.bad = true
ci.setBad()
errorf("unexpected ReadyForQuery in response to COPY")
}
cn.processReadyForQuery(r)
return nil, err
default:
cn.bad = true
ci.setBad()
errorf("unknown response for copy query: %q", t)
}
}
@ -121,7 +122,7 @@ awaitCopyInResponse:
cn.processReadyForQuery(r)
return nil, err
default:
cn.bad = true
ci.setBad()
errorf("unknown response for CopyFail: %q", t)
}
}
@ -142,7 +143,7 @@ func (ci *copyin) resploop() {
var r readBuf
t, err := ci.cn.recvMessage(&r)
if err != nil {
ci.cn.bad = true
ci.setBad()
ci.setError(err)
ci.done <- true
return
@ -160,7 +161,7 @@ func (ci *copyin) resploop() {
err := parseError(&r)
ci.setError(err)
default:
ci.cn.bad = true
ci.setBad()
ci.setError(fmt.Errorf("unknown response during CopyIn: %q", t))
ci.done <- true
return
@ -168,6 +169,19 @@ func (ci *copyin) resploop() {
}
}
func (ci *copyin) setBad() {
ci.Lock()
ci.cn.bad = true
ci.Unlock()
}
func (ci *copyin) isBad() bool {
ci.Lock()
b := ci.cn.bad
ci.Unlock()
return b
}
func (ci *copyin) isErrorSet() bool {
ci.Lock()
isSet := (ci.err != nil)
@ -205,7 +219,7 @@ func (ci *copyin) Exec(v []driver.Value) (r driver.Result, err error) {
return nil, errCopyInClosed
}
if ci.cn.bad {
if ci.isBad() {
return nil, driver.ErrBadConn
}
defer ci.cn.errRecover(&err)
@ -215,9 +229,7 @@ func (ci *copyin) Exec(v []driver.Value) (r driver.Result, err error) {
}
if len(v) == 0 {
err = ci.Close()
ci.closed = true
return nil, err
return nil, ci.Close()
}
numValues := len(v)
@ -240,11 +252,12 @@ func (ci *copyin) Exec(v []driver.Value) (r driver.Result, err error) {
}
func (ci *copyin) Close() (err error) {
if ci.closed {
return errCopyInClosed
if ci.closed { // Don't do anything, we're already closed
return nil
}
ci.closed = true
if ci.cn.bad {
if ci.isBad() {
return driver.ErrBadConn
}
defer ci.cn.errRecover(&err)
@ -259,6 +272,7 @@ func (ci *copyin) Close() (err error) {
}
<-ci.done
ci.cn.inCopy = false
if ci.isErrorSet() {
err = ci.err

25
vendor/github.com/lib/pq/doc.go generated vendored
View File

@ -86,9 +86,13 @@ variables not supported by pq are set, pq will panic during connection
establishment. Environment variables have a lower precedence than explicitly
provided connection parameters.
The pgpass mechanism as described in http://www.postgresql.org/docs/current/static/libpq-pgpass.html
is supported, but on Windows PGPASSFILE must be specified explicitly.
Queries
database/sql does not dictate any specific format for parameter
markers in query strings, and pq uses the Postgres-native ordinal markers,
as shown above. The same marker can be reused for the same parameter:
@ -112,8 +116,29 @@ For more details on RETURNING, see the Postgres documentation:
For additional instructions on querying see the documentation for the database/sql package.
Data Types
Parameters pass through driver.DefaultParameterConverter before they are handled
by this package. When the binary_parameters connection option is enabled,
[]byte values are sent directly to the backend as data in binary format.
This package returns the following types for values from the PostgreSQL backend:
- integer types smallint, integer, and bigint are returned as int64
- floating-point types real and double precision are returned as float64
- character types char, varchar, and text are returned as string
- temporal types date, time, timetz, timestamp, and timestamptz are returned as time.Time
- the boolean type is returned as bool
- the bytea type is returned as []byte
All other types are returned directly from the backend as []byte values in text format.
Errors
pq may return errors of type *pq.Error which can be interrogated for error details:
if err, ok := err.(*pq.Error); ok {

218
vendor/github.com/lib/pq/encode.go generated vendored
View File

@ -3,7 +3,9 @@ package pq
import (
"bytes"
"database/sql/driver"
"encoding/binary"
"encoding/hex"
"errors"
"fmt"
"math"
"strconv"
@ -14,6 +16,15 @@ import (
"github.com/lib/pq/oid"
)
func binaryEncode(parameterStatus *parameterStatus, x interface{}) []byte {
switch v := x.(type) {
case []byte:
return v
default:
return encode(parameterStatus, x, oid.T_unknown)
}
}
func encode(parameterStatus *parameterStatus, x interface{}, pgtypOid oid.Oid) []byte {
switch v := x.(type) {
case int64:
@ -44,10 +55,51 @@ func encode(parameterStatus *parameterStatus, x interface{}, pgtypOid oid.Oid) [
panic("not reached")
}
func decode(parameterStatus *parameterStatus, s []byte, typ oid.Oid) interface{} {
func decode(parameterStatus *parameterStatus, s []byte, typ oid.Oid, f format) interface{} {
switch f {
case formatBinary:
return binaryDecode(parameterStatus, s, typ)
case formatText:
return textDecode(parameterStatus, s, typ)
default:
panic("not reached")
}
}
func binaryDecode(parameterStatus *parameterStatus, s []byte, typ oid.Oid) interface{} {
switch typ {
case oid.T_bytea:
return parseBytea(s)
return s
case oid.T_int8:
return int64(binary.BigEndian.Uint64(s))
case oid.T_int4:
return int64(int32(binary.BigEndian.Uint32(s)))
case oid.T_int2:
return int64(int16(binary.BigEndian.Uint16(s)))
case oid.T_uuid:
b, err := decodeUUIDBinary(s)
if err != nil {
panic(err)
}
return b
default:
errorf("don't know how to decode binary parameter of type %d", uint32(typ))
}
panic("not reached")
}
func textDecode(parameterStatus *parameterStatus, s []byte, typ oid.Oid) interface{} {
switch typ {
case oid.T_char, oid.T_varchar, oid.T_text:
return string(s)
case oid.T_bytea:
b, err := parseBytea(s)
if err != nil {
errorf("%s", err)
}
return b
case oid.T_timestamptz:
return parseTs(parameterStatus.currentLocation, string(s))
case oid.T_timestamp, oid.T_date:
@ -58,7 +110,7 @@ func decode(parameterStatus *parameterStatus, s []byte, typ oid.Oid) interface{}
return mustParse("15:04:05-07", typ, s)
case oid.T_bool:
return s[0] == 't'
case oid.T_int8, oid.T_int2, oid.T_int4:
case oid.T_int8, oid.T_int4, oid.T_int2:
i, err := strconv.ParseInt(string(s), 10, 64)
if err != nil {
errorf("%s", err)
@ -158,16 +210,39 @@ func mustParse(f string, typ oid.Oid, s []byte) time.Time {
return t
}
func expect(str, char string, pos int) {
if c := str[pos : pos+1]; c != char {
errorf("expected '%v' at position %v; got '%v'", char, pos, c)
var errInvalidTimestamp = errors.New("invalid timestamp")
type timestampParser struct {
err error
}
func (p *timestampParser) expect(str string, char byte, pos int) {
if p.err != nil {
return
}
if pos+1 > len(str) {
p.err = errInvalidTimestamp
return
}
if c := str[pos]; c != char && p.err == nil {
p.err = fmt.Errorf("expected '%v' at position %v; got '%v'", char, pos, c)
}
}
func mustAtoi(str string) int {
result, err := strconv.Atoi(str)
func (p *timestampParser) mustAtoi(str string, begin int, end int) int {
if p.err != nil {
return 0
}
if begin < 0 || end < 0 || begin > end || end > len(str) {
p.err = errInvalidTimestamp
return 0
}
result, err := strconv.Atoi(str[begin:end])
if err != nil {
errorf("expected number; got '%v'", str)
if p.err == nil {
p.err = fmt.Errorf("expected number; got '%v'", str)
}
return 0
}
return result
}
@ -182,7 +257,7 @@ type locationCache struct {
// about 5% speed could be gained by putting the cache in the connection and
// losing the mutex, at the cost of a small amount of memory and a somewhat
// significant increase in code complexity.
var globalLocationCache *locationCache = newLocationCache()
var globalLocationCache = newLocationCache()
func newLocationCache() *locationCache {
return &locationCache{cache: make(map[int]*time.Location)}
@ -212,26 +287,26 @@ const (
infinityTsNegativeMustBeSmaller = "pq: infinity timestamp: negative value must be smaller (before) than positive"
)
/*
* If EnableInfinityTs is not called, "-infinity" and "infinity" will return
* []byte("-infinity") and []byte("infinity") respectively, and potentially
* cause error "sql: Scan error on column index 0: unsupported driver -> Scan pair: []uint8 -> *time.Time",
* when scanning into a time.Time value.
*
* Once EnableInfinityTs has been called, all connections created using this
* driver will decode Postgres' "-infinity" and "infinity" for "timestamp",
* "timestamp with time zone" and "date" types to the predefined minimum and
* maximum times, respectively. When encoding time.Time values, any time which
* equals or preceeds the predefined minimum time will be encoded to
* "-infinity". Any values at or past the maximum time will similarly be
* encoded to "infinity".
*
*
* If EnableInfinityTs is called with negative >= positive, it will panic.
* Calling EnableInfinityTs after a connection has been established results in
* undefined behavior. If EnableInfinityTs is called more than once, it will
* panic.
*/
// EnableInfinityTs controls the handling of Postgres' "-infinity" and
// "infinity" "timestamp"s.
//
// If EnableInfinityTs is not called, "-infinity" and "infinity" will return
// []byte("-infinity") and []byte("infinity") respectively, and potentially
// cause error "sql: Scan error on column index 0: unsupported driver -> Scan
// pair: []uint8 -> *time.Time", when scanning into a time.Time value.
//
// Once EnableInfinityTs has been called, all connections created using this
// driver will decode Postgres' "-infinity" and "infinity" for "timestamp",
// "timestamp with time zone" and "date" types to the predefined minimum and
// maximum times, respectively. When encoding time.Time values, any time which
// equals or precedes the predefined minimum time will be encoded to
// "-infinity". Any values at or past the maximum time will similarly be
// encoded to "infinity".
//
// If EnableInfinityTs is called with negative >= positive, it will panic.
// Calling EnableInfinityTs after a connection has been established results in
// undefined behavior. If EnableInfinityTs is called more than once, it will
// panic.
func EnableInfinityTs(negative time.Time, positive time.Time) {
if infinityTsEnabled {
panic(infinityTsEnabledAlready)
@ -268,28 +343,41 @@ func parseTs(currentLocation *time.Location, str string) interface{} {
}
return []byte(str)
}
t, err := ParseTimestamp(currentLocation, str)
if err != nil {
panic(err)
}
return t
}
// ParseTimestamp parses Postgres' text format. It returns a time.Time in
// currentLocation iff that time's offset agrees with the offset sent from the
// Postgres server. Otherwise, ParseTimestamp returns a time.Time with the
// fixed offset offset provided by the Postgres server.
func ParseTimestamp(currentLocation *time.Location, str string) (time.Time, error) {
p := timestampParser{}
monSep := strings.IndexRune(str, '-')
// this is Gregorian year, not ISO Year
// In Gregorian system, the year 1 BC is followed by AD 1
year := mustAtoi(str[:monSep])
year := p.mustAtoi(str, 0, monSep)
daySep := monSep + 3
month := mustAtoi(str[monSep+1 : daySep])
expect(str, "-", daySep)
month := p.mustAtoi(str, monSep+1, daySep)
p.expect(str, '-', daySep)
timeSep := daySep + 3
day := mustAtoi(str[daySep+1 : timeSep])
day := p.mustAtoi(str, daySep+1, timeSep)
var hour, minute, second int
if len(str) > monSep+len("01-01")+1 {
expect(str, " ", timeSep)
p.expect(str, ' ', timeSep)
minSep := timeSep + 3
expect(str, ":", minSep)
hour = mustAtoi(str[timeSep+1 : minSep])
p.expect(str, ':', minSep)
hour = p.mustAtoi(str, timeSep+1, minSep)
secSep := minSep + 3
expect(str, ":", secSep)
minute = mustAtoi(str[minSep+1 : secSep])
p.expect(str, ':', secSep)
minute = p.mustAtoi(str, minSep+1, secSep)
secEnd := secSep + 3
second = mustAtoi(str[secSep+1 : secEnd])
second = p.mustAtoi(str, secSep+1, secEnd)
}
remainderIdx := monSep + len("01-01 00:00:00") + 1
// Three optional (but ordered) sections follow: the
@ -300,49 +388,50 @@ func parseTs(currentLocation *time.Location, str string) interface{} {
nanoSec := 0
tzOff := 0
if remainderIdx < len(str) && str[remainderIdx:remainderIdx+1] == "." {
if remainderIdx < len(str) && str[remainderIdx] == '.' {
fracStart := remainderIdx + 1
fracOff := strings.IndexAny(str[fracStart:], "-+ ")
if fracOff < 0 {
fracOff = len(str) - fracStart
}
fracSec := mustAtoi(str[fracStart : fracStart+fracOff])
fracSec := p.mustAtoi(str, fracStart, fracStart+fracOff)
nanoSec = fracSec * (1000000000 / int(math.Pow(10, float64(fracOff))))
remainderIdx += fracOff + 1
}
if tzStart := remainderIdx; tzStart < len(str) && (str[tzStart:tzStart+1] == "-" || str[tzStart:tzStart+1] == "+") {
if tzStart := remainderIdx; tzStart < len(str) && (str[tzStart] == '-' || str[tzStart] == '+') {
// time zone separator is always '-' or '+' (UTC is +00)
var tzSign int
if c := str[tzStart : tzStart+1]; c == "-" {
switch c := str[tzStart]; c {
case '-':
tzSign = -1
} else if c == "+" {
case '+':
tzSign = +1
} else {
errorf("expected '-' or '+' at position %v; got %v", tzStart, c)
default:
return time.Time{}, fmt.Errorf("expected '-' or '+' at position %v; got %v", tzStart, c)
}
tzHours := mustAtoi(str[tzStart+1 : tzStart+3])
tzHours := p.mustAtoi(str, tzStart+1, tzStart+3)
remainderIdx += 3
var tzMin, tzSec int
if tzStart+3 < len(str) && str[tzStart+3:tzStart+4] == ":" {
tzMin = mustAtoi(str[tzStart+4 : tzStart+6])
if remainderIdx < len(str) && str[remainderIdx] == ':' {
tzMin = p.mustAtoi(str, remainderIdx+1, remainderIdx+3)
remainderIdx += 3
}
if tzStart+6 < len(str) && str[tzStart+6:tzStart+7] == ":" {
tzSec = mustAtoi(str[tzStart+7 : tzStart+9])
if remainderIdx < len(str) && str[remainderIdx] == ':' {
tzSec = p.mustAtoi(str, remainderIdx+1, remainderIdx+3)
remainderIdx += 3
}
tzOff = tzSign * ((tzHours * 60 * 60) + (tzMin * 60) + tzSec)
}
var isoYear int
if remainderIdx < len(str) && str[remainderIdx:remainderIdx+3] == " BC" {
if remainderIdx+3 <= len(str) && str[remainderIdx:remainderIdx+3] == " BC" {
isoYear = 1 - year
remainderIdx += 3
} else {
isoYear = year
}
if remainderIdx < len(str) {
errorf("expected end of input, got %v", str[remainderIdx:])
return time.Time{}, fmt.Errorf("expected end of input, got %v", str[remainderIdx:])
}
t := time.Date(isoYear, time.Month(month), day,
hour, minute, second, nanoSec,
@ -359,11 +448,11 @@ func parseTs(currentLocation *time.Location, str string) interface{} {
}
}
return t
return t, p.err
}
// formatTs formats t into a format postgres understands.
func formatTs(t time.Time) (b []byte) {
func formatTs(t time.Time) []byte {
if infinityTsEnabled {
// t <= -infinity : ! (t > -infinity)
if !t.After(infinityTsNegative) {
@ -374,6 +463,11 @@ func formatTs(t time.Time) (b []byte) {
return []byte("infinity")
}
}
return FormatTimestamp(t)
}
// FormatTimestamp formats t into Postgres' text format for timestamps.
func FormatTimestamp(t time.Time) []byte {
// Need to send dates before 0001 A.D. with " BC" suffix, instead of the
// minus sign preferred by Go.
// Beware, "0000" in ISO is "1 BC", "-0001" is "2 BC" and so on
@ -383,7 +477,7 @@ func formatTs(t time.Time) (b []byte) {
t = t.AddDate((-t.Year())*2+1, 0, 0)
bc = true
}
b = []byte(t.Format(time.RFC3339Nano))
b := []byte(t.Format("2006-01-02 15:04:05.999999999Z07:00"))
_, offset := t.Zone()
offset = offset % 60
@ -408,14 +502,14 @@ func formatTs(t time.Time) (b []byte) {
// Parse a bytea value received from the server. Both "hex" and the legacy
// "escape" format are supported.
func parseBytea(s []byte) (result []byte) {
func parseBytea(s []byte) (result []byte, err error) {
if len(s) >= 2 && bytes.Equal(s[:2], []byte("\\x")) {
// bytea_output = hex
s = s[2:] // trim off leading "\\x"
result = make([]byte, hex.DecodedLen(len(s)))
_, err := hex.Decode(result, s)
if err != nil {
errorf("%s", err)
return nil, err
}
} else {
// bytea_output = escape
@ -430,11 +524,11 @@ func parseBytea(s []byte) (result []byte) {
// '\\' followed by an octal number
if len(s) < 4 {
errorf("invalid bytea sequence %v", s)
return nil, fmt.Errorf("invalid bytea sequence %v", s)
}
r, err := strconv.ParseInt(string(s[1:4]), 8, 9)
if err != nil {
errorf("could not parse bytea value: %s", err.Error())
return nil, fmt.Errorf("could not parse bytea value: %s", err.Error())
}
result = append(result, byte(r))
s = s[4:]
@ -452,7 +546,7 @@ func parseBytea(s []byte) (result []byte) {
}
}
return result
return result, nil
}
func encodeBytea(serverVersion int, v []byte) (result []byte) {

28
vendor/github.com/lib/pq/notify.go generated vendored
View File

@ -62,14 +62,18 @@ type ListenerConn struct {
// Creates a new ListenerConn. Use NewListener instead.
func NewListenerConn(name string, notificationChan chan<- *Notification) (*ListenerConn, error) {
cn, err := Open(name)
return newDialListenerConn(defaultDialer{}, name, notificationChan)
}
func newDialListenerConn(d Dialer, name string, c chan<- *Notification) (*ListenerConn, error) {
cn, err := DialOpen(d, name)
if err != nil {
return nil, err
}
l := &ListenerConn{
cn: cn.(*conn),
notificationChan: notificationChan,
notificationChan: c,
connState: connStateIdle,
replyChan: make(chan message, 2),
}
@ -253,8 +257,10 @@ func (l *ListenerConn) sendSimpleQuery(q string) (err error) {
// Can't use l.cn.writeBuf here because it uses the scratch buffer which
// might get overwritten by listenerConnLoop.
data := writeBuf([]byte("Q\x00\x00\x00\x00"))
b := &data
b := &writeBuf{
buf: []byte("Q\x00\x00\x00\x00"),
pos: 1,
}
b.string(q)
l.cn.send(b)
@ -389,6 +395,7 @@ type Listener struct {
name string
minReconnectInterval time.Duration
maxReconnectInterval time.Duration
dialer Dialer
eventCallback EventCallbackType
lock sync.Mutex
@ -419,10 +426,21 @@ func NewListener(name string,
minReconnectInterval time.Duration,
maxReconnectInterval time.Duration,
eventCallback EventCallbackType) *Listener {
return NewDialListener(defaultDialer{}, name, minReconnectInterval, maxReconnectInterval, eventCallback)
}
// NewDialListener is like NewListener but it takes a Dialer.
func NewDialListener(d Dialer,
name string,
minReconnectInterval time.Duration,
maxReconnectInterval time.Duration,
eventCallback EventCallbackType) *Listener {
l := &Listener{
name: name,
minReconnectInterval: minReconnectInterval,
maxReconnectInterval: maxReconnectInterval,
dialer: d,
eventCallback: eventCallback,
channels: make(map[string]struct{}),
@ -658,7 +676,7 @@ func (l *Listener) closed() bool {
func (l *Listener) connect() error {
notificationChan := make(chan *Notification, 32)
cn, err := NewListenerConn(l.name, notificationChan)
cn, err := newDialListenerConn(l.dialer, l.name, notificationChan)
if err != nil {
return err
}

158
vendor/github.com/lib/pq/ssl.go generated vendored Normal file
View File

@ -0,0 +1,158 @@
package pq
import (
"crypto/tls"
"crypto/x509"
"io/ioutil"
"net"
"os"
"os/user"
"path/filepath"
)
// ssl generates a function to upgrade a net.Conn based on the "sslmode" and
// related settings. The function is nil when no upgrade should take place.
func ssl(o values) func(net.Conn) net.Conn {
verifyCaOnly := false
tlsConf := tls.Config{}
switch mode := o["sslmode"]; mode {
// "require" is the default.
case "", "require":
// We must skip TLS's own verification since it requires full
// verification since Go 1.3.
tlsConf.InsecureSkipVerify = true
// From http://www.postgresql.org/docs/current/static/libpq-ssl.html:
//
// Note: For backwards compatibility with earlier versions of
// PostgreSQL, if a root CA file exists, the behavior of
// sslmode=require will be the same as that of verify-ca, meaning the
// server certificate is validated against the CA. Relying on this
// behavior is discouraged, and applications that need certificate
// validation should always use verify-ca or verify-full.
if sslrootcert, ok := o["sslrootcert"]; ok {
if _, err := os.Stat(sslrootcert); err == nil {
verifyCaOnly = true
} else {
delete(o, "sslrootcert")
}
}
case "verify-ca":
// We must skip TLS's own verification since it requires full
// verification since Go 1.3.
tlsConf.InsecureSkipVerify = true
verifyCaOnly = true
case "verify-full":
tlsConf.ServerName = o["host"]
case "disable":
return nil
default:
errorf(`unsupported sslmode %q; only "require" (default), "verify-full", "verify-ca", and "disable" supported`, mode)
}
sslClientCertificates(&tlsConf, o)
sslCertificateAuthority(&tlsConf, o)
sslRenegotiation(&tlsConf)
return func(conn net.Conn) net.Conn {
client := tls.Client(conn, &tlsConf)
if verifyCaOnly {
sslVerifyCertificateAuthority(client, &tlsConf)
}
return client
}
}
// sslClientCertificates adds the certificate specified in the "sslcert" and
// "sslkey" settings, or if they aren't set, from the .postgresql directory
// in the user's home directory. The configured files must exist and have
// the correct permissions.
func sslClientCertificates(tlsConf *tls.Config, o values) {
// user.Current() might fail when cross-compiling. We have to ignore the
// error and continue without home directory defaults, since we wouldn't
// know from where to load them.
user, _ := user.Current()
// In libpq, the client certificate is only loaded if the setting is not blank.
//
// https://github.com/postgres/postgres/blob/REL9_6_2/src/interfaces/libpq/fe-secure-openssl.c#L1036-L1037
sslcert := o["sslcert"]
if len(sslcert) == 0 && user != nil {
sslcert = filepath.Join(user.HomeDir, ".postgresql", "postgresql.crt")
}
// https://github.com/postgres/postgres/blob/REL9_6_2/src/interfaces/libpq/fe-secure-openssl.c#L1045
if len(sslcert) == 0 {
return
}
// https://github.com/postgres/postgres/blob/REL9_6_2/src/interfaces/libpq/fe-secure-openssl.c#L1050:L1054
if _, err := os.Stat(sslcert); os.IsNotExist(err) {
return
} else if err != nil {
panic(err)
}
// In libpq, the ssl key is only loaded if the setting is not blank.
//
// https://github.com/postgres/postgres/blob/REL9_6_2/src/interfaces/libpq/fe-secure-openssl.c#L1123-L1222
sslkey := o["sslkey"]
if len(sslkey) == 0 && user != nil {
sslkey = filepath.Join(user.HomeDir, ".postgresql", "postgresql.key")
}
if len(sslkey) > 0 {
if err := sslKeyPermissions(sslkey); err != nil {
panic(err)
}
}
cert, err := tls.LoadX509KeyPair(sslcert, sslkey)
if err != nil {
panic(err)
}
tlsConf.Certificates = []tls.Certificate{cert}
}
// sslCertificateAuthority adds the RootCA specified in the "sslrootcert" setting.
func sslCertificateAuthority(tlsConf *tls.Config, o values) {
// In libpq, the root certificate is only loaded if the setting is not blank.
//
// https://github.com/postgres/postgres/blob/REL9_6_2/src/interfaces/libpq/fe-secure-openssl.c#L950-L951
if sslrootcert := o["sslrootcert"]; len(sslrootcert) > 0 {
tlsConf.RootCAs = x509.NewCertPool()
cert, err := ioutil.ReadFile(sslrootcert)
if err != nil {
panic(err)
}
if !tlsConf.RootCAs.AppendCertsFromPEM(cert) {
errorf("couldn't parse pem in sslrootcert")
}
}
}
// sslVerifyCertificateAuthority carries out a TLS handshake to the server and
// verifies the presented certificate against the CA, i.e. the one specified in
// sslrootcert or the system CA if sslrootcert was not specified.
func sslVerifyCertificateAuthority(client *tls.Conn, tlsConf *tls.Config) {
err := client.Handshake()
if err != nil {
panic(err)
}
certs := client.ConnectionState().PeerCertificates
opts := x509.VerifyOptions{
DNSName: client.ConnectionState().ServerName,
Intermediates: x509.NewCertPool(),
Roots: tlsConf.RootCAs,
}
for i, cert := range certs {
if i == 0 {
continue
}
opts.Intermediates.AddCert(cert)
}
_, err = certs[0].Verify(opts)
if err != nil {
panic(err)
}
}

14
vendor/github.com/lib/pq/ssl_go1.7.go generated vendored Normal file
View File

@ -0,0 +1,14 @@
// +build go1.7
package pq
import "crypto/tls"
// Accept renegotiation requests initiated by the backend.
//
// Renegotiation was deprecated then removed from PostgreSQL 9.5, but
// the default configuration of older versions has it enabled. Redshift
// also initiates renegotiations and cannot be reconfigured.
func sslRenegotiation(conf *tls.Config) {
conf.Renegotiation = tls.RenegotiateFreelyAsClient
}

20
vendor/github.com/lib/pq/ssl_permissions.go generated vendored Normal file
View File

@ -0,0 +1,20 @@
// +build !windows
package pq
import "os"
// sslKeyPermissions checks the permissions on user-supplied ssl key files.
// The key file should have very little access.
//
// libpq does not check key file permissions on Windows.
func sslKeyPermissions(sslkey string) error {
info, err := os.Stat(sslkey)
if err != nil {
return err
}
if info.Mode().Perm()&0077 != 0 {
return ErrSSLKeyHasWorldPermissions
}
return nil
}

8
vendor/github.com/lib/pq/ssl_renegotiation.go generated vendored Normal file
View File

@ -0,0 +1,8 @@
// +build !go1.7
package pq
import "crypto/tls"
// Renegotiation is not supported by crypto/tls until Go 1.7.
func sslRenegotiation(*tls.Config) {}

9
vendor/github.com/lib/pq/ssl_windows.go generated vendored Normal file
View File

@ -0,0 +1,9 @@
// +build windows
package pq
// sslKeyPermissions checks the permissions on user-supplied ssl key files.
// The key file should have very little access.
//
// libpq does not check key file permissions on Windows.
func sslKeyPermissions(string) error { return nil }

8
vendor/github.com/lib/pq/url.go generated vendored
View File

@ -2,6 +2,7 @@ package pq
import (
"fmt"
"net"
nurl "net/url"
"sort"
"strings"
@ -54,12 +55,11 @@ func ParseURL(url string) (string, error) {
accrue("password", v)
}
i := strings.Index(u.Host, ":")
if i < 0 {
if host, port, err := net.SplitHostPort(u.Host); err != nil {
accrue("host", u.Host)
} else {
accrue("host", u.Host[:i])
accrue("port", u.Host[i+1:])
accrue("host", host)
accrue("port", port)
}
if u.Path != "" {

View File

@ -1,6 +1,6 @@
// Package pq is a pure Go Postgres driver for the database/sql package.
// +build darwin dragonfly freebsd linux nacl netbsd openbsd solaris
// +build darwin dragonfly freebsd linux nacl netbsd openbsd solaris rumprun
package pq

23
vendor/github.com/lib/pq/uuid.go generated vendored Normal file
View File

@ -0,0 +1,23 @@
package pq
import (
"encoding/hex"
"fmt"
)
// decodeUUIDBinary interprets the binary format of a uuid, returning it in text format.
func decodeUUIDBinary(src []byte) ([]byte, error) {
if len(src) != 16 {
return nil, fmt.Errorf("pq: unable to decode uuid; bad length: %d", len(src))
}
dst := make([]byte, 36)
dst[8], dst[13], dst[18], dst[23] = '-', '-', '-', '-'
hex.Encode(dst[0:], src[0:4])
hex.Encode(dst[9:], src[4:6])
hex.Encode(dst[14:], src[6:8])
hex.Encode(dst[19:], src[8:10])
hex.Encode(dst[24:], src[10:16])
return dst, nil
}

23
vendor/github.com/pkg/errors/LICENSE generated vendored Normal file
View File

@ -0,0 +1,23 @@
Copyright (c) 2015, Dave Cheney <dave@cheney.net>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

52
vendor/github.com/pkg/errors/README.md generated vendored Normal file
View File

@ -0,0 +1,52 @@
# errors [![Travis-CI](https://travis-ci.org/pkg/errors.svg)](https://travis-ci.org/pkg/errors) [![AppVeyor](https://ci.appveyor.com/api/projects/status/b98mptawhudj53ep/branch/master?svg=true)](https://ci.appveyor.com/project/davecheney/errors/branch/master) [![GoDoc](https://godoc.org/github.com/pkg/errors?status.svg)](http://godoc.org/github.com/pkg/errors) [![Report card](https://goreportcard.com/badge/github.com/pkg/errors)](https://goreportcard.com/report/github.com/pkg/errors)
Package errors provides simple error handling primitives.
`go get github.com/pkg/errors`
The traditional error handling idiom in Go is roughly akin to
```go
if err != nil {
return err
}
```
which applied recursively up the call stack results in error reports without context or debugging information. The errors package allows programmers to add context to the failure path in their code in a way that does not destroy the original value of the error.
## Adding context to an error
The errors.Wrap function returns a new error that adds context to the original error. For example
```go
_, err := ioutil.ReadAll(r)
if err != nil {
return errors.Wrap(err, "read failed")
}
```
## Retrieving the cause of an error
Using `errors.Wrap` constructs a stack of errors, adding context to the preceding error. Depending on the nature of the error it may be necessary to reverse the operation of errors.Wrap to retrieve the original error for inspection. Any error value which implements this interface can be inspected by `errors.Cause`.
```go
type causer interface {
Cause() error
}
```
`errors.Cause` will recursively retrieve the topmost error which does not implement `causer`, which is assumed to be the original cause. For example:
```go
switch err := errors.Cause(err).(type) {
case *MyError:
// handle specifically
default:
// unknown error
}
```
[Read the package documentation for more information](https://godoc.org/github.com/pkg/errors).
## Contributing
We welcome pull requests, bug fixes and issue reports. With that said, the bar for adding new symbols to this package is intentionally set high.
Before proposing a change, please discuss your change by raising an issue.
## Licence
BSD-2-Clause

32
vendor/github.com/pkg/errors/appveyor.yml generated vendored Normal file
View File

@ -0,0 +1,32 @@
version: build-{build}.{branch}
clone_folder: C:\gopath\src\github.com\pkg\errors
shallow_clone: true # for startup speed
environment:
GOPATH: C:\gopath
platform:
- x64
# http://www.appveyor.com/docs/installed-software
install:
# some helpful output for debugging builds
- go version
- go env
# pre-installed MinGW at C:\MinGW is 32bit only
# but MSYS2 at C:\msys64 has mingw64
- set PATH=C:\msys64\mingw64\bin;%PATH%
- gcc --version
- g++ --version
build_script:
- go install -v ./...
test_script:
- set PATH=C:\gopath\bin;%PATH%
- go test -v ./...
#artifacts:
# - path: '%GOPATH%\bin\*.exe'
deploy: off

269
vendor/github.com/pkg/errors/errors.go generated vendored Normal file
View File

@ -0,0 +1,269 @@
// Package errors provides simple error handling primitives.
//
// The traditional error handling idiom in Go is roughly akin to
//
// if err != nil {
// return err
// }
//
// which applied recursively up the call stack results in error reports
// without context or debugging information. The errors package allows
// programmers to add context to the failure path in their code in a way
// that does not destroy the original value of the error.
//
// Adding context to an error
//
// The errors.Wrap function returns a new error that adds context to the
// original error by recording a stack trace at the point Wrap is called,
// and the supplied message. For example
//
// _, err := ioutil.ReadAll(r)
// if err != nil {
// return errors.Wrap(err, "read failed")
// }
//
// If additional control is required the errors.WithStack and errors.WithMessage
// functions destructure errors.Wrap into its component operations of annotating
// an error with a stack trace and an a message, respectively.
//
// Retrieving the cause of an error
//
// Using errors.Wrap constructs a stack of errors, adding context to the
// preceding error. Depending on the nature of the error it may be necessary
// to reverse the operation of errors.Wrap to retrieve the original error
// for inspection. Any error value which implements this interface
//
// type causer interface {
// Cause() error
// }
//
// can be inspected by errors.Cause. errors.Cause will recursively retrieve
// the topmost error which does not implement causer, which is assumed to be
// the original cause. For example:
//
// switch err := errors.Cause(err).(type) {
// case *MyError:
// // handle specifically
// default:
// // unknown error
// }
//
// causer interface is not exported by this package, but is considered a part
// of stable public API.
//
// Formatted printing of errors
//
// All error values returned from this package implement fmt.Formatter and can
// be formatted by the fmt package. The following verbs are supported
//
// %s print the error. If the error has a Cause it will be
// printed recursively
// %v see %s
// %+v extended format. Each Frame of the error's StackTrace will
// be printed in detail.
//
// Retrieving the stack trace of an error or wrapper
//
// New, Errorf, Wrap, and Wrapf record a stack trace at the point they are
// invoked. This information can be retrieved with the following interface.
//
// type stackTracer interface {
// StackTrace() errors.StackTrace
// }
//
// Where errors.StackTrace is defined as
//
// type StackTrace []Frame
//
// The Frame type represents a call site in the stack trace. Frame supports
// the fmt.Formatter interface that can be used for printing information about
// the stack trace of this error. For example:
//
// if err, ok := err.(stackTracer); ok {
// for _, f := range err.StackTrace() {
// fmt.Printf("%+s:%d", f)
// }
// }
//
// stackTracer interface is not exported by this package, but is considered a part
// of stable public API.
//
// See the documentation for Frame.Format for more details.
package errors
import (
"fmt"
"io"
)
// New returns an error with the supplied message.
// New also records the stack trace at the point it was called.
func New(message string) error {
return &fundamental{
msg: message,
stack: callers(),
}
}
// Errorf formats according to a format specifier and returns the string
// as a value that satisfies error.
// Errorf also records the stack trace at the point it was called.
func Errorf(format string, args ...interface{}) error {
return &fundamental{
msg: fmt.Sprintf(format, args...),
stack: callers(),
}
}
// fundamental is an error that has a message and a stack, but no caller.
type fundamental struct {
msg string
*stack
}
func (f *fundamental) Error() string { return f.msg }
func (f *fundamental) Format(s fmt.State, verb rune) {
switch verb {
case 'v':
if s.Flag('+') {
io.WriteString(s, f.msg)
f.stack.Format(s, verb)
return
}
fallthrough
case 's':
io.WriteString(s, f.msg)
case 'q':
fmt.Fprintf(s, "%q", f.msg)
}
}
// WithStack annotates err with a stack trace at the point WithStack was called.
// If err is nil, WithStack returns nil.
func WithStack(err error) error {
if err == nil {
return nil
}
return &withStack{
err,
callers(),
}
}
type withStack struct {
error
*stack
}
func (w *withStack) Cause() error { return w.error }
func (w *withStack) Format(s fmt.State, verb rune) {
switch verb {
case 'v':
if s.Flag('+') {
fmt.Fprintf(s, "%+v", w.Cause())
w.stack.Format(s, verb)
return
}
fallthrough
case 's':
io.WriteString(s, w.Error())
case 'q':
fmt.Fprintf(s, "%q", w.Error())
}
}
// Wrap returns an error annotating err with a stack trace
// at the point Wrap is called, and the supplied message.
// If err is nil, Wrap returns nil.
func Wrap(err error, message string) error {
if err == nil {
return nil
}
err = &withMessage{
cause: err,
msg: message,
}
return &withStack{
err,
callers(),
}
}
// Wrapf returns an error annotating err with a stack trace
// at the point Wrapf is call, and the format specifier.
// If err is nil, Wrapf returns nil.
func Wrapf(err error, format string, args ...interface{}) error {
if err == nil {
return nil
}
err = &withMessage{
cause: err,
msg: fmt.Sprintf(format, args...),
}
return &withStack{
err,
callers(),
}
}
// WithMessage annotates err with a new message.
// If err is nil, WithMessage returns nil.
func WithMessage(err error, message string) error {
if err == nil {
return nil
}
return &withMessage{
cause: err,
msg: message,
}
}
type withMessage struct {
cause error
msg string
}
func (w *withMessage) Error() string { return w.msg + ": " + w.cause.Error() }
func (w *withMessage) Cause() error { return w.cause }
func (w *withMessage) Format(s fmt.State, verb rune) {
switch verb {
case 'v':
if s.Flag('+') {
fmt.Fprintf(s, "%+v\n", w.Cause())
io.WriteString(s, w.msg)
return
}
fallthrough
case 's', 'q':
io.WriteString(s, w.Error())
}
}
// Cause returns the underlying cause of the error, if possible.
// An error value has a cause if it implements the following
// interface:
//
// type causer interface {
// Cause() error
// }
//
// If the error does not implement Cause, the original error will
// be returned. If the error is nil, nil will be returned without further
// investigation.
func Cause(err error) error {
type causer interface {
Cause() error
}
for err != nil {
cause, ok := err.(causer)
if !ok {
break
}
err = cause.Cause()
}
return err
}

178
vendor/github.com/pkg/errors/stack.go generated vendored Normal file
View File

@ -0,0 +1,178 @@
package errors
import (
"fmt"
"io"
"path"
"runtime"
"strings"
)
// Frame represents a program counter inside a stack frame.
type Frame uintptr
// pc returns the program counter for this frame;
// multiple frames may have the same PC value.
func (f Frame) pc() uintptr { return uintptr(f) - 1 }
// file returns the full path to the file that contains the
// function for this Frame's pc.
func (f Frame) file() string {
fn := runtime.FuncForPC(f.pc())
if fn == nil {
return "unknown"
}
file, _ := fn.FileLine(f.pc())
return file
}
// line returns the line number of source code of the
// function for this Frame's pc.
func (f Frame) line() int {
fn := runtime.FuncForPC(f.pc())
if fn == nil {
return 0
}
_, line := fn.FileLine(f.pc())
return line
}
// Format formats the frame according to the fmt.Formatter interface.
//
// %s source file
// %d source line
// %n function name
// %v equivalent to %s:%d
//
// Format accepts flags that alter the printing of some verbs, as follows:
//
// %+s path of source file relative to the compile time GOPATH
// %+v equivalent to %+s:%d
func (f Frame) Format(s fmt.State, verb rune) {
switch verb {
case 's':
switch {
case s.Flag('+'):
pc := f.pc()
fn := runtime.FuncForPC(pc)
if fn == nil {
io.WriteString(s, "unknown")
} else {
file, _ := fn.FileLine(pc)
fmt.Fprintf(s, "%s\n\t%s", fn.Name(), file)
}
default:
io.WriteString(s, path.Base(f.file()))
}
case 'd':
fmt.Fprintf(s, "%d", f.line())
case 'n':
name := runtime.FuncForPC(f.pc()).Name()
io.WriteString(s, funcname(name))
case 'v':
f.Format(s, 's')
io.WriteString(s, ":")
f.Format(s, 'd')
}
}
// StackTrace is stack of Frames from innermost (newest) to outermost (oldest).
type StackTrace []Frame
func (st StackTrace) Format(s fmt.State, verb rune) {
switch verb {
case 'v':
switch {
case s.Flag('+'):
for _, f := range st {
fmt.Fprintf(s, "\n%+v", f)
}
case s.Flag('#'):
fmt.Fprintf(s, "%#v", []Frame(st))
default:
fmt.Fprintf(s, "%v", []Frame(st))
}
case 's':
fmt.Fprintf(s, "%s", []Frame(st))
}
}
// stack represents a stack of program counters.
type stack []uintptr
func (s *stack) Format(st fmt.State, verb rune) {
switch verb {
case 'v':
switch {
case st.Flag('+'):
for _, pc := range *s {
f := Frame(pc)
fmt.Fprintf(st, "\n%+v", f)
}
}
}
}
func (s *stack) StackTrace() StackTrace {
f := make([]Frame, len(*s))
for i := 0; i < len(f); i++ {
f[i] = Frame((*s)[i])
}
return f
}
func callers() *stack {
const depth = 32
var pcs [depth]uintptr
n := runtime.Callers(3, pcs[:])
var st stack = pcs[0:n]
return &st
}
// funcname removes the path prefix component of a function's name reported by func.Name().
func funcname(name string) string {
i := strings.LastIndex(name, "/")
name = name[i+1:]
i = strings.Index(name, ".")
return name[i+1:]
}
func trimGOPATH(name, file string) string {
// Here we want to get the source file path relative to the compile time
// GOPATH. As of Go 1.6.x there is no direct way to know the compiled
// GOPATH at runtime, but we can infer the number of path segments in the
// GOPATH. We note that fn.Name() returns the function name qualified by
// the import path, which does not include the GOPATH. Thus we can trim
// segments from the beginning of the file path until the number of path
// separators remaining is one more than the number of path separators in
// the function name. For example, given:
//
// GOPATH /home/user
// file /home/user/src/pkg/sub/file.go
// fn.Name() pkg/sub.Type.Method
//
// We want to produce:
//
// pkg/sub/file.go
//
// From this we can easily see that fn.Name() has one less path separator
// than our desired output. We count separators from the end of the file
// path until it finds two more than in the function name and then move
// one character forward to preserve the initial path segment without a
// leading separator.
const sep = "/"
goal := strings.Count(name, sep) + 2
i := len(file)
for n := 0; n < goal; n++ {
i = strings.LastIndex(file[:i], sep)
if i == -1 {
// not enough separators found, set i so that the slice expression
// below leaves file unmodified
i = -len(sep)
break
}
}
// get back to 0 or trim the leading separator
file = file[i+len(sep):]
return file
}

11
vendor/vendor.json vendored
View File

@ -13,9 +13,10 @@
"revisionTime": "2015-02-15T19:43:19-08:00"
},
{
"checksumSHA1": "uTUsjF7bymOuKvXbW2BpkK/w4Vg=",
"path": "github.com/lib/pq",
"revision": "93e9980741c9e593411b94e07d5bad8cfb4809db",
"revisionTime": "2015-05-02T14:36:36+03:00"
"revision": "2704adc878c21e1329f46f6e56a1c387d788ff94",
"revisionTime": "2017-03-24T20:46:54Z"
},
{
"path": "github.com/lib/pq/hstore",
@ -27,6 +28,12 @@
"revision": "93e9980741c9e593411b94e07d5bad8cfb4809db",
"revisionTime": "2015-05-02T14:36:36+03:00"
},
{
"checksumSHA1": "ynJSWoF6v+3zMnh9R0QmmG6iGV8=",
"path": "github.com/pkg/errors",
"revision": "248dadf4e9068a0b3e79f02ed0a610d935de5302",
"revisionTime": "2016-10-29T09:36:37Z"
},
{
"checksumSHA1": "8SH0adTcQlA+W5dzqiQ3Hft2VXg=",
"path": "golang.org/x/sys/unix",

View File

@ -7,6 +7,6 @@ var Version string
var buildVersion string
func init() {
Version = "0.3.0"
Version = "0.4.0"
Version += buildVersion
}

View File

@ -6,6 +6,7 @@ import (
"github.com/omniscale/imposm3/cache"
"github.com/omniscale/imposm3/database"
"github.com/omniscale/imposm3/element"
"github.com/omniscale/imposm3/expire"
geomp "github.com/omniscale/imposm3/geom"
"github.com/omniscale/imposm3/geom/geos"
"github.com/omniscale/imposm3/mapping"
@ -49,9 +50,6 @@ func (nw *NodeWriter) loop() {
for n := range nw.nodes {
nw.progress.AddNodes(1)
if matches := nw.pointMatcher.MatchNode(n); len(matches) > 0 {
if nw.expireor != nil {
nw.expireor.Expire(n.Long, n.Lat)
}
nw.NodeToSrid(n)
point, err := geomp.Point(geos, *n)
if err != nil {
@ -67,6 +65,7 @@ func (nw *NodeWriter) loop() {
continue
}
inserted := false
if nw.limiter != nil {
parts, err := nw.limiter.Clip(geom.Geom)
if err != nil {
@ -78,14 +77,19 @@ func (nw *NodeWriter) loop() {
log.Warn(err)
continue
}
inserted = true
}
} else {
if err := nw.inserter.InsertPoint(n.OSMElem, geom, matches); err != nil {
log.Warn(err)
continue
}
inserted = true
}
if inserted && nw.expireor != nil {
expire.ExpireProjectedNode(nw.expireor, *n, nw.srid)
}
}
}
nw.wg.Done()

View File

@ -134,8 +134,12 @@ NextRel:
}
func handleMultiPolygon(rw *RelationWriter, r *element.Relation, geos *geosp.Geos) bool {
// prepare relation first (build rings and compute actual
// relation tags)
matches := rw.polygonMatcher.MatchRelation(r)
if matches == nil {
return false
}
// prepare relation (build rings)
prepedRel, err := geomp.PrepareRelation(r, rw.srid, rw.maxGap)
if err != nil {
if errl, ok := err.(ErrorLevel); !ok || errl.Level() > 0 {
@ -144,12 +148,6 @@ func handleMultiPolygon(rw *RelationWriter, r *element.Relation, geos *geosp.Geo
return false
}
// check for matches befor building the geometry
matches := rw.polygonMatcher.MatchRelation(r)
if matches == nil {
return false
}
// build the multipolygon
geom, err := prepedRel.Build()
if geom.Geom != nil {
@ -172,6 +170,9 @@ func handleMultiPolygon(rw *RelationWriter, r *element.Relation, geos *geosp.Geo
if duration := time.Now().Sub(start); duration > time.Minute {
log.Warnf("clipping relation %d to -limitto took %s", r.Id, duration)
}
if len(parts) == 0 {
return false
}
for _, g := range parts {
rel := element.Relation(*r)
rel.Id = rw.relId(r.Id)

View File

@ -78,52 +78,66 @@ func (ww *WayWriter) loop() {
continue
}
err = ww.osmCache.Coords.FillWay(w)
if err != nil {
continue
filled := false
// fill loads all coords. call only if we have a match
fill := func(w *element.Way) bool {
if filled {
return true
}
err := ww.osmCache.Coords.FillWay(w)
if err != nil {
return false
}
ww.NodesToSrid(w.Nodes)
filled = true
return true
}
ww.NodesToSrid(w.Nodes)
w.Id = ww.wayId(w.Id)
inserted := false
insertedPolygon := false
if matches := ww.lineMatcher.MatchWay(w); len(matches) > 0 {
err := ww.buildAndInsert(geos, w, matches, false)
if !fill(w) {
continue
}
err, inserted = ww.buildAndInsert(geos, w, matches, false)
if err != nil {
if errl, ok := err.(ErrorLevel); !ok || errl.Level() > 0 {
log.Warn(err)
}
continue
}
inserted = true
}
if !insertedAsRelation && (w.IsClosed() || w.TryClose(ww.maxGap)) {
if !insertedAsRelation {
// only add polygons that were not inserted as a MultiPolygon relation
if matches := ww.polygonMatcher.MatchWay(w); len(matches) > 0 {
err := ww.buildAndInsert(geos, w, matches, true)
if err != nil {
if errl, ok := err.(ErrorLevel); !ok || errl.Level() > 0 {
log.Warn(err)
}
if !fill(w) {
continue
}
inserted = true
insertedPolygon = true
if w.IsClosed() {
err, insertedPolygon = ww.buildAndInsert(geos, w, matches, true)
if err != nil {
if errl, ok := err.(ErrorLevel); !ok || errl.Level() > 0 {
log.Warn(err)
}
continue
}
}
}
}
if inserted && ww.expireor != nil {
if (inserted || insertedPolygon) && ww.expireor != nil {
expire.ExpireProjectedNodes(ww.expireor, w.Nodes, ww.srid, insertedPolygon)
}
if ww.diffCache != nil {
if (inserted || insertedPolygon) && ww.diffCache != nil {
ww.diffCache.Coords.AddFromWay(w)
}
}
ww.wg.Done()
}
func (ww *WayWriter) buildAndInsert(g *geos.Geos, w *element.Way, matches []mapping.Match, isPolygon bool) error {
func (ww *WayWriter) buildAndInsert(g *geos.Geos, w *element.Way, matches []mapping.Match, isPolygon bool) (error, bool) {
var err error
var geosgeom *geos.Geom
// make copy to avoid interference with polygon/linestring matches
@ -138,42 +152,47 @@ func (ww *WayWriter) buildAndInsert(g *geos.Geos, w *element.Way, matches []mapp
geosgeom, err = geomp.LineString(g, way.Nodes)
}
if err != nil {
return err
return err, false
}
geom, err := geomp.AsGeomElement(g, geosgeom)
if err != nil {
return err
return err, false
}
inserted := true
if ww.limiter != nil {
parts, err := ww.limiter.Clip(geom.Geom)
if err != nil {
return err
return err, false
}
if len(parts) == 0 {
// outside of limitto
inserted = false
}
for _, p := range parts {
way := element.Way(*w)
geom = geomp.Geometry{Geom: p, Wkb: g.AsEwkbHex(p)}
if isPolygon {
if err := ww.inserter.InsertPolygon(way.OSMElem, geom, matches); err != nil {
return err
return err, false
}
} else {
if err := ww.inserter.InsertLineString(way.OSMElem, geom, matches); err != nil {
return err
return err, false
}
}
}
} else {
if isPolygon {
if err := ww.inserter.InsertPolygon(way.OSMElem, geom, matches); err != nil {
return err
return err, false
}
} else {
if err := ww.inserter.InsertLineString(way.OSMElem, geom, matches); err != nil {
return err
return err, false
}
}
}
return nil
return nil, inserted
}