add support for single table imports

- support type=geometry
- allow to load all tags
master
Oliver Tonnhofer 2014-06-19 11:51:15 +02:00
parent 93b7f99291
commit a9ccec143c
20 changed files with 1216 additions and 799 deletions

4
.gitignore vendored
View File

@ -2,5 +2,5 @@
mapping.json
config.json
test/build
test/.lasttestrun
test/*.pyc
test/.lasttestrun*
test/*.pyc

View File

@ -14,9 +14,9 @@ type Deleter struct {
delDb database.Deleter
osmCache *cache.OSMCache
diffCache *cache.DiffCache
tmPoints *mapping.TagMatcher
tmLineStrings *mapping.TagMatcher
tmPolygons *mapping.TagMatcher
tmPoints mapping.NodeMatcher
tmLineStrings mapping.WayMatcher
tmPolygons mapping.RelWayMatcher
expireor expire.Expireor
deletedRelations map[int64]struct{}
deletedWays map[int64]struct{}
@ -24,9 +24,9 @@ type Deleter struct {
}
func NewDeleter(db database.Deleter, osmCache *cache.OSMCache, diffCache *cache.DiffCache,
tmPoints *mapping.TagMatcher,
tmLineStrings *mapping.TagMatcher,
tmPolygons *mapping.TagMatcher,
tmPoints mapping.NodeMatcher,
tmLineStrings mapping.WayMatcher,
tmPolygons mapping.RelWayMatcher,
) *Deleter {
return &Deleter{
db,
@ -63,7 +63,7 @@ func (d *Deleter) deleteRelation(id int64, deleteRefs bool, deleteMembers bool)
if elem.Tags == nil {
return nil
}
if matches := d.tmPolygons.Match(&elem.Tags); len(matches) > 0 {
if matches := d.tmPolygons.MatchRelation(elem); len(matches) > 0 {
if err := d.delDb.Delete(-elem.Id, matches); err != nil {
return err
}
@ -136,13 +136,13 @@ func (d *Deleter) deleteWay(id int64, deleteRefs bool) error {
return nil
}
deleted := false
if matches := d.tmPolygons.Match(&elem.Tags); len(matches) > 0 {
if matches := d.tmPolygons.MatchWay(elem); len(matches) > 0 {
if err := d.delDb.Delete(elem.Id, matches); err != nil {
return err
}
deleted = true
}
if matches := d.tmLineStrings.Match(&elem.Tags); len(matches) > 0 {
if matches := d.tmLineStrings.MatchWay(elem); len(matches) > 0 {
if err := d.delDb.Delete(elem.Id, matches); err != nil {
return err
}
@ -178,7 +178,7 @@ func (d *Deleter) deleteNode(id int64) error {
}
deleted := false
if matches := d.tmPoints.Match(&elem.Tags); len(matches) > 0 {
if matches := d.tmPoints.MatchNode(elem); len(matches) > 0 {
if err := d.delDb.Delete(elem.Id, matches); err != nil {
return err
}

View File

@ -15,16 +15,13 @@ type Field struct {
}
type Table struct {
Name string
Type TableType `json:"type"`
Mapping map[Key][]Value `json:"mapping"`
Mappings map[string]SubMapping `json:"mappings"`
Fields []*Field `json:"fields"`
Filters *Filters `json:"filters"`
}
type SubMapping struct {
Mapping map[Key][]Value
Name string
Type TableType `json:"type"`
Mapping map[Key][]Value `json:"mapping"`
Mappings map[string]SubMapping `json:"mappings"`
TypeMappings TypeMappings `json:"type_mappings"`
Fields []*Field `json:"fields"`
Filters *Filters `json:"filters"`
}
type GeneralizedTable struct {
@ -45,7 +42,22 @@ type GeneralizedTables map[string]*GeneralizedTable
type Mapping struct {
Tables Tables `json:"tables"`
GeneralizedTables GeneralizedTables `json:"generalized_tables"`
LoadAllTags bool `json:"load_all_tags"`
Tags Tags `json:"tags"`
}
type Tags struct {
LoadAll bool `json:"load_all"`
Exclude []Key `json:"exclude"`
}
type SubMapping struct {
Mapping map[Key][]Value
}
type TypeMappings struct {
Points map[Key][]Value `json:"points"`
LineStrings map[Key][]Value `json:"linestrings"`
Polygons map[Key][]Value `json:"polygons"`
}
type ElementFilter func(tags *element.Tags) bool
@ -59,10 +71,29 @@ type DestTable struct {
type TableType string
func (tt *TableType) UnmarshalJSON(data []byte) error {
switch string(data) {
case "":
return errors.New("missing table type")
case `"point"`:
*tt = PointTable
case `"linestring"`:
*tt = LineStringTable
case `"polygon"`:
*tt = PolygonTable
case `"geometry"`:
*tt = GeometryTable
default:
return errors.New("unknown type " + string(data))
}
return nil
}
const (
PolygonTable TableType = "polygon"
LineStringTable TableType = "linestring"
PointTable TableType = "point"
GeometryTable TableType = "geometry"
)
func NewMapping(filename string) (*Mapping, error) {
@ -100,17 +131,7 @@ func (m *Mapping) prepare() error {
for name, t := range m.Tables {
t.Name = name
}
for name, t := range m.Tables {
switch t.Type {
case "":
return errors.New("missing table type for table " + name)
case "point":
case "linestring":
case "polygon":
default:
return errors.New("unknown type " + string(t.Type) + " for table " + name)
}
}
for name, t := range m.GeneralizedTables {
t.Name = name
}
@ -129,12 +150,11 @@ func (tt TagTables) addFromMapping(mapping map[Key][]Value, table DestTable) {
}
}
}
}
func (m *Mapping) mappings(tableType TableType, mappings TagTables) {
for name, t := range m.Tables {
if t.Type != tableType {
if t.Type != GeometryTable && t.Type != tableType {
continue
}
mappings.addFromMapping(t.Mapping, DestTable{name, ""})
@ -142,13 +162,22 @@ func (m *Mapping) mappings(tableType TableType, mappings TagTables) {
for subMappingName, subMapping := range t.Mappings {
mappings.addFromMapping(subMapping.Mapping, DestTable{name, subMappingName})
}
switch tableType {
case PointTable:
mappings.addFromMapping(t.TypeMappings.Points, DestTable{name, ""})
case LineStringTable:
mappings.addFromMapping(t.TypeMappings.LineStrings, DestTable{name, ""})
case PolygonTable:
mappings.addFromMapping(t.TypeMappings.Polygons, DestTable{name, ""})
}
}
}
func (m *Mapping) tables(tableType TableType) map[string]*TableFields {
result := make(map[string]*TableFields)
for name, t := range m.Tables {
if t.Type == tableType {
if t.Type == tableType || t.Type == "geometry" {
result[name] = t.TableFields()
}
}

View File

@ -5,8 +5,8 @@ import (
)
func (m *Mapping) NodeTagFilter() TagFilterer {
if m.LoadAllTags {
return &NullFilter{}
if m.Tags.LoadAll {
return newExcludeFilter(m.Tags.Exclude)
}
mappings := make(map[Key]map[Value][]DestTable)
m.mappings("point", mappings)
@ -16,8 +16,8 @@ func (m *Mapping) NodeTagFilter() TagFilterer {
}
func (m *Mapping) WayTagFilter() TagFilterer {
if m.LoadAllTags {
return &NullFilter{}
if m.Tags.LoadAll {
return newExcludeFilter(m.Tags.Exclude)
}
mappings := make(map[Key]map[Value][]DestTable)
m.mappings("linestring", mappings)
@ -29,8 +29,8 @@ func (m *Mapping) WayTagFilter() TagFilterer {
}
func (m *Mapping) RelationTagFilter() TagFilterer {
if m.LoadAllTags {
return &NullFilter{}
if m.Tags.LoadAll {
return newExcludeFilter(m.Tags.Exclude)
}
mappings := make(map[Key]map[Value][]DestTable)
m.mappings("linestring", mappings)
@ -56,9 +56,24 @@ type RelationTagFilter struct {
TagFilter
}
type NullFilter struct{}
type ExcludeFilter struct {
exclude map[Key]struct{}
}
func (t *NullFilter) Filter(tags *element.Tags) bool {
func newExcludeFilter(tags []Key) *ExcludeFilter {
f := ExcludeFilter{make(map[Key]struct{}, len(tags))}
for _, tag := range tags {
f.exclude[tag] = struct{}{}
}
return &f
}
func (f *ExcludeFilter) Filter(tags *element.Tags) bool {
for k, _ := range *tags {
if _, ok := f.exclude[Key(k)]; ok {
delete(*tags, k)
}
}
return true
}

View File

@ -251,97 +251,97 @@ func TestTagFilterRelations(t *testing.T) {
}
func TestPointMatcher(t *testing.T) {
var tags element.Tags
elem := element.Node{}
points := mapping.PointMatcher()
tags = element.Tags{"unknown": "baz"}
matchesEqual(t, []Match{}, points.Match(&tags))
elem.Tags = element.Tags{"unknown": "baz"}
matchesEqual(t, []Match{}, points.MatchNode(&elem))
tags = element.Tags{"place": "unknown"}
matchesEqual(t, []Match{}, points.Match(&tags))
elem.Tags = element.Tags{"place": "unknown"}
matchesEqual(t, []Match{}, points.MatchNode(&elem))
tags = element.Tags{"place": "city"}
matchesEqual(t, []Match{{"place", "city", DestTable{"places", ""}, nil}}, points.Match(&tags))
elem.Tags = element.Tags{"place": "city"}
matchesEqual(t, []Match{{"place", "city", DestTable{"places", ""}, nil}}, points.MatchNode(&elem))
tags = element.Tags{"place": "city", "highway": "unknown"}
matchesEqual(t, []Match{{"place", "city", DestTable{"places", ""}, nil}}, points.Match(&tags))
elem.Tags = element.Tags{"place": "city", "highway": "unknown"}
matchesEqual(t, []Match{{"place", "city", DestTable{"places", ""}, nil}}, points.MatchNode(&elem))
tags = element.Tags{"place": "city", "highway": "bus_stop"}
elem.Tags = element.Tags{"place": "city", "highway": "bus_stop"}
matchesEqual(t,
[]Match{
{"place", "city", DestTable{"places", ""}, nil},
{"highway", "bus_stop", DestTable{"transport_points", ""}, nil}},
points.Match(&tags))
points.MatchNode(&elem))
}
func TestLineStringMatcher(t *testing.T) {
var tags element.Tags
elem := element.Way{}
ls := mapping.LineStringMatcher()
tags = element.Tags{"unknown": "baz"}
matchesEqual(t, []Match{}, ls.Match(&tags))
elem.Tags = element.Tags{"unknown": "baz"}
matchesEqual(t, []Match{}, ls.MatchWay(&elem))
tags = element.Tags{"highway": "unknown"}
matchesEqual(t, []Match{}, ls.Match(&tags))
elem.Tags = element.Tags{"highway": "unknown"}
matchesEqual(t, []Match{}, ls.MatchWay(&elem))
tags = element.Tags{"highway": "pedestrian"}
matchesEqual(t, []Match{{"highway", "pedestrian", DestTable{"roads", "roads"}, nil}}, ls.Match(&tags))
elem.Tags = element.Tags{"highway": "pedestrian"}
matchesEqual(t, []Match{{"highway", "pedestrian", DestTable{"roads", "roads"}, nil}}, ls.MatchWay(&elem))
// exclude_tags area=yes
tags = element.Tags{"highway": "pedestrian", "area": "yes"}
matchesEqual(t, []Match{}, ls.Match(&tags))
elem.Tags = element.Tags{"highway": "pedestrian", "area": "yes"}
matchesEqual(t, []Match{}, ls.MatchWay(&elem))
tags = element.Tags{"highway": "secondary", "railway": "tram"}
elem.Tags = element.Tags{"highway": "secondary", "railway": "tram"}
matchesEqual(t,
[]Match{
{"highway", "secondary", DestTable{"roads", "roads"}, nil},
{"railway", "tram", DestTable{"roads", "railway"}, nil}},
ls.Match(&tags))
ls.MatchWay(&elem))
tags = element.Tags{"highway": "footway", "landuse": "park"}
elem.Tags = element.Tags{"highway": "footway", "landuse": "park"}
// landusages not a linestring table
matchesEqual(t, []Match{{"highway", "footway", DestTable{"roads", "roads"}, nil}}, ls.Match(&tags))
matchesEqual(t, []Match{{"highway", "footway", DestTable{"roads", "roads"}, nil}}, ls.MatchWay(&elem))
}
func TestPolygonMatcher(t *testing.T) {
var tags element.Tags
elem := element.Relation{}
polys := mapping.PolygonMatcher()
tags = element.Tags{"unknown": "baz"}
matchesEqual(t, []Match{}, polys.Match(&tags))
elem.Tags = element.Tags{"unknown": "baz"}
matchesEqual(t, []Match{}, polys.MatchRelation(&elem))
tags = element.Tags{"landuse": "unknowns"}
matchesEqual(t, []Match{}, polys.Match(&tags))
elem.Tags = element.Tags{"landuse": "unknowns"}
matchesEqual(t, []Match{}, polys.MatchRelation(&elem))
tags = element.Tags{"building": "yes"}
matchesEqual(t, []Match{{"building", "yes", DestTable{"buildings", ""}, nil}}, polys.Match(&tags))
tags = element.Tags{"building": "residential"}
matchesEqual(t, []Match{{"building", "residential", DestTable{"buildings", ""}, nil}}, polys.Match(&tags))
elem.Tags = element.Tags{"building": "yes"}
matchesEqual(t, []Match{{"building", "yes", DestTable{"buildings", ""}, nil}}, polys.MatchRelation(&elem))
elem.Tags = element.Tags{"building": "residential"}
matchesEqual(t, []Match{{"building", "residential", DestTable{"buildings", ""}, nil}}, polys.MatchRelation(&elem))
tags = element.Tags{"building": "shop"}
elem.Tags = element.Tags{"building": "shop"}
matchesEqual(t, []Match{
{"building", "shop", DestTable{"buildings", ""}, nil},
{"building", "shop", DestTable{"amenity_areas", ""}, nil}},
polys.Match(&tags))
polys.MatchRelation(&elem))
tags = element.Tags{"landuse": "farm"}
matchesEqual(t, []Match{{"landuse", "farm", DestTable{"landusages", ""}, nil}}, polys.Match(&tags))
elem.Tags = element.Tags{"landuse": "farm"}
matchesEqual(t, []Match{{"landuse", "farm", DestTable{"landusages", ""}, nil}}, polys.MatchRelation(&elem))
tags = element.Tags{"landuse": "farm", "highway": "secondary"}
matchesEqual(t, []Match{{"landuse", "farm", DestTable{"landusages", ""}, nil}}, polys.Match(&tags))
elem.Tags = element.Tags{"landuse": "farm", "highway": "secondary"}
matchesEqual(t, []Match{{"landuse", "farm", DestTable{"landusages", ""}, nil}}, polys.MatchRelation(&elem))
tags = element.Tags{"landuse": "farm", "aeroway": "apron"}
elem.Tags = element.Tags{"landuse": "farm", "aeroway": "apron"}
matchesEqual(t,
[]Match{
{"aeroway", "apron", DestTable{"transport_areas", ""}, nil},
{"landuse", "farm", DestTable{"landusages", ""}, nil}},
polys.Match(&tags))
polys.MatchRelation(&elem))
tags = element.Tags{"highway": "footway"}
matchesEqual(t, []Match{{"highway", "footway", DestTable{"landusages", ""}, nil}}, polys.Match(&tags))
elem.Tags = element.Tags{"highway": "footway"}
matchesEqual(t, []Match{{"highway", "footway", DestTable{"landusages", ""}, nil}}, polys.MatchRelation(&elem))
tags = element.Tags{"boundary": "administrative", "admin_level": "8"}
matchesEqual(t, []Match{{"boundary", "administrative", DestTable{"admin", ""}, nil}}, polys.Match(&tags))
elem.Tags = element.Tags{"boundary": "administrative", "admin_level": "8"}
matchesEqual(t, []Match{{"boundary", "administrative", DestTable{"admin", ""}, nil}}, polys.MatchRelation(&elem))
}
func TestFilterNodes(t *testing.T) {

View File

@ -4,31 +4,25 @@ import (
"imposm3/element"
)
func (m *Mapping) PointMatcher() *TagMatcher {
func (m *Mapping) PointMatcher() NodeMatcher {
mappings := make(TagTables)
m.mappings("point", mappings)
filters := m.ElementFilters()
return &TagMatcher{mappings, m.tables("point"), filters}
return &tagMatcher{mappings, m.tables("point"), filters, false}
}
func (m *Mapping) LineStringMatcher() *TagMatcher {
func (m *Mapping) LineStringMatcher() WayMatcher {
mappings := make(TagTables)
m.mappings("linestring", mappings)
filters := m.ElementFilters()
return &TagMatcher{mappings, m.tables("linestring"), filters}
return &tagMatcher{mappings, m.tables("linestring"), filters, false}
}
func (m *Mapping) PolygonMatcher() *TagMatcher {
func (m *Mapping) PolygonMatcher() RelWayMatcher {
mappings := make(TagTables)
m.mappings("polygon", mappings)
filters := m.ElementFilters()
return &TagMatcher{mappings, m.tables("polygon"), filters}
}
type TagMatcher struct {
mappings TagTables
tables map[string]*TableFields
filters map[string][]ElementFilter
return &tagMatcher{mappings, m.tables("polygon"), filters, true}
}
type Match struct {
@ -37,32 +31,82 @@ type Match struct {
Table DestTable
tableFields *TableFields
}
type NodeMatcher interface {
MatchNode(node *element.Node) []Match
}
type WayMatcher interface {
MatchWay(way *element.Way) []Match
}
type RelationMatcher interface {
MatchRelation(rel *element.Relation) []Match
}
type RelWayMatcher interface {
WayMatcher
RelationMatcher
}
type tagMatcher struct {
mappings TagTables
tables map[string]*TableFields
filters map[string][]ElementFilter
matchAreas bool
}
func (m *Match) Row(elem *element.OSMElem) []interface{} {
return m.tableFields.MakeRow(elem, *m)
}
func (tagMatcher *TagMatcher) Match(tags *element.Tags) []Match {
func (tm *tagMatcher) MatchNode(node *element.Node) []Match {
return tm.match(&node.Tags)
}
func (tm *tagMatcher) MatchWay(way *element.Way) []Match {
if tm.matchAreas { // match way as polygon
if way.IsClosed() {
if way.Tags["area"] == "no" {
return nil
}
return tm.match(&way.Tags)
}
} else { // match way as linestring
if way.IsClosed() {
if way.Tags["area"] == "yes" {
return nil
}
}
return tm.match(&way.Tags)
}
return nil
}
func (tm *tagMatcher) MatchRelation(rel *element.Relation) []Match {
return tm.match(&rel.Tags)
}
func (tm *tagMatcher) match(tags *element.Tags) []Match {
tables := make(map[DestTable]Match)
for k, v := range *tags {
values, ok := tagMatcher.mappings[Key(k)]
values, ok := tm.mappings[Key(k)]
if ok {
if tbls, ok := values["__any__"]; ok {
for _, t := range tbls {
tables[t] = Match{k, v, t, tagMatcher.tables[t.Name]}
tables[t] = Match{k, v, t, tm.tables[t.Name]}
}
}
if tbls, ok := values[Value(v)]; ok {
for _, t := range tbls {
tables[t] = Match{k, v, t, tagMatcher.tables[t.Name]}
tables[t] = Match{k, v, t, tm.tables[t.Name]}
}
}
}
}
var matches []Match
for t, match := range tables {
filters, ok := tagMatcher.filters[t.Name]
filters, ok := tm.filters[t.Name]
filteredOut := false
if ok {
for _, filter := range filters {
@ -81,14 +125,14 @@ func (tagMatcher *TagMatcher) Match(tags *element.Tags) []Match {
// SelectRelationPolygons returns a slice of all members that are already
// imported with a relation with tags.
func SelectRelationPolygons(polygonTagMatcher *TagMatcher, tags element.Tags, members []element.Member) []element.Member {
relMatches := polygonTagMatcher.Match(&tags)
func SelectRelationPolygons(polygonTagMatcher RelWayMatcher, rel *element.Relation) []element.Member {
relMatches := polygonTagMatcher.MatchRelation(rel)
result := []element.Member{}
for _, m := range members {
for _, m := range rel.Members {
if m.Type != element.WAY {
continue
}
memberMatches := polygonTagMatcher.Match(&m.Way.Tags)
memberMatches := polygonTagMatcher.MatchWay(m.Way)
if matchEquals(relMatches, memberMatches) {
result = append(result, m)
}

View File

@ -12,8 +12,9 @@ func BenchmarkTagMatch(b *testing.B) {
}
matcher := m.PolygonMatcher()
for i := 0; i < b.N; i++ {
t := element.Tags{"landuse": "forest", "name": "Forest", "source": "bling", "tourism": "zoo"}
if m := matcher.Match(&t); len(m) != 1 {
e := element.Relation{}
e.Tags = element.Tags{"landuse": "forest", "name": "Forest", "source": "bling", "tourism": "zoo"}
if m := matcher.MatchRelation(&e); len(m) != 1 {
b.Fatal(m)
}
}
@ -30,15 +31,18 @@ func TestSelectRelationPolygonsSimple(t *testing.T) {
if err != nil {
t.Fatal(err)
}
r := element.Relation{}
r.Tags = element.Tags{"landuse": "park"}
r.Members = []element.Member{
makeMember(0, element.Tags{"landuse": "forest"}),
makeMember(1, element.Tags{"landuse": "park"}),
makeMember(2, element.Tags{"waterway": "riverbank"}),
makeMember(4, element.Tags{"foo": "bar"}),
}
filtered := SelectRelationPolygons(
mapping.PolygonMatcher(),
element.Tags{"landuse": "park"},
[]element.Member{
makeMember(0, element.Tags{"landuse": "forest"}),
makeMember(1, element.Tags{"landuse": "park"}),
makeMember(2, element.Tags{"waterway": "riverbank"}),
makeMember(4, element.Tags{"foo": "bar"}),
})
&r,
)
if len(filtered) != 1 {
t.Fatal(filtered)
}
@ -52,13 +56,16 @@ func TestSelectRelationPolygonsUnrelatedTags(t *testing.T) {
if err != nil {
t.Fatal(err)
}
r := element.Relation{}
r.Tags = element.Tags{"landuse": "park"}
r.Members = []element.Member{
makeMember(0, element.Tags{"landuse": "park", "layer": "2", "name": "foo"}),
makeMember(1, element.Tags{"landuse": "forest"}),
}
filtered := SelectRelationPolygons(
mapping.PolygonMatcher(),
element.Tags{"landuse": "park"},
[]element.Member{
makeMember(0, element.Tags{"landuse": "park", "layer": "2", "name": "foo"}),
makeMember(1, element.Tags{"landuse": "forest"}),
})
&r,
)
if len(filtered) != 1 {
t.Fatal(filtered)
}
@ -72,16 +79,19 @@ func TestSelectRelationPolygonsMultiple(t *testing.T) {
if err != nil {
t.Fatal(err)
}
r := element.Relation{}
r.Tags = element.Tags{"landuse": "park"}
r.Members = []element.Member{
makeMember(0, element.Tags{"landuse": "park"}),
makeMember(1, element.Tags{"natural": "forest"}),
makeMember(2, element.Tags{"landuse": "park"}),
makeMember(3, element.Tags{"highway": "pedestrian"}),
makeMember(4, element.Tags{"landuse": "park", "layer": "2", "name": "foo"}),
}
filtered := SelectRelationPolygons(
mapping.PolygonMatcher(),
element.Tags{"landuse": "park"},
[]element.Member{
makeMember(0, element.Tags{"landuse": "park"}),
makeMember(1, element.Tags{"natural": "forest"}),
makeMember(2, element.Tags{"landuse": "park"}),
makeMember(3, element.Tags{"highway": "pedestrian"}),
makeMember(4, element.Tags{"landuse": "park", "layer": "2", "name": "foo"}),
})
&r,
)
if len(filtered) != 3 {
t.Fatal(filtered)
}
@ -95,13 +105,16 @@ func TestSelectRelationPolygonsMultipleTags(t *testing.T) {
if err != nil {
t.Fatal(err)
}
r := element.Relation{}
r.Tags = element.Tags{"landuse": "forest", "natural": "scrub"}
r.Members = []element.Member{
makeMember(0, element.Tags{"natural": "scrub"}),
makeMember(1, element.Tags{"landuse": "forest"}),
}
filtered := SelectRelationPolygons(
mapping.PolygonMatcher(),
element.Tags{"landuse": "forest", "natural": "scrub"},
[]element.Member{
makeMember(0, element.Tags{"natural": "scrub"}),
makeMember(1, element.Tags{"landuse": "forest"}),
})
&r,
)
// TODO both should be filterd out, but we only get the first one,
// because we match only one tag per table
if len(filtered) != 1 {

View File

@ -18,16 +18,20 @@ $(IMPOSM_BIN): build
clean:
rm -rf build
build/test.pbf: test.osm
build/%.pbf: %.osm
@mkdir -p build
osmosis --read-xml ./test.osm --write-pbf ./build/test.pbf omitmetadata=true
osmosis --read-xml $< --write-pbf $@ omitmetadata=true
build/test.osc.gz: test.osc
build/%.osc.gz: %.osc
@mkdir -p build
gzip --stdout ./test.osc > ./build/test.osc.gz
gzip --stdout $< > $@
test: .lasttestrun
test: .lasttestrun_complete_db .lasttestrun_single_table
.lasttestrun: $(IMPOSM_BIN) imposm_system_test.py build/test.osc.gz build/test.pbf
nosetests imposm_system_test.py $(NOSEOPTS)
@touch .lasttestrun
.lasttestrun_complete_db: $(IMPOSM_BIN) complete_db_test.py build/complete_db.osc.gz build/complete_db.pbf
nosetests complete_db_test.py $(NOSEOPTS)
@touch .lasttestrun_complete_db
.lasttestrun_single_table: $(IMPOSM_BIN) single_table_test.py build/single_table.pbf
nosetests single_table_test.py $(NOSEOPTS)
@touch .lasttestrun_single_table

440
test/complete_db_test.py Normal file
View File

@ -0,0 +1,440 @@
import unittest
import helper as t
mapping_file = 'complete_db_mapping.json'
#######################################################################
def test_import():
"""Import succeeds"""
t.drop_schemas()
assert not t.table_exists('osm_roads', schema=t.TEST_SCHEMA_IMPORT)
t.imposm3_import(t.db_conf, './build/complete_db.pbf', mapping_file)
assert t.table_exists('osm_roads', schema=t.TEST_SCHEMA_IMPORT)
def test_deploy():
"""Deploy succeeds"""
assert not t.table_exists('osm_roads', schema=t.TEST_SCHEMA_PRODUCTION)
t.imposm3_deploy(t.db_conf, mapping_file)
assert t.table_exists('osm_roads', schema=t.TEST_SCHEMA_PRODUCTION)
assert not t.table_exists('osm_roads', schema=t.TEST_SCHEMA_IMPORT)
#######################################################################
def test_imported_landusage():
"""Multipolygon relation is inserted"""
t.assert_cached_node(1001, (13, 47.5))
landusage_1001 = t.query_row(t.db_conf, 'osm_landusages', -1001)
# point in polygon
assert landusage_1001['geometry'].intersects(t.merc_point(13.4, 47.5))
# hole in multipolygon relation
assert not landusage_1001['geometry'].intersects(t.merc_point(14.75, 47.75))
def test_missing_nodes():
"""Cache does not contain nodes from previous imports"""
t.assert_missing_node(10001)
t.assert_missing_node(10002)
place_10000 = t.query_row(t.db_conf, 'osm_places', 10000)
assert place_10000['name'] == 'Foo', place_10000
def test_name_tags():
"""Road contains multiple names"""
road = t.query_row(t.db_conf, 'osm_roads', 1101)
assert road['name'] == 'name', road
assert road['name:de'] == 'name:de', road
assert road['name_en'] == 'name:en', road
def test_landusage_to_waterarea_1():
"""Parks inserted into landusages"""
t.assert_cached_way(11001)
t.assert_cached_way(12001)
t.assert_cached_way(13001)
assert not t.query_row(t.db_conf, 'osm_waterareas', 11001)
assert not t.query_row(t.db_conf, 'osm_waterareas', -12001)
assert not t.query_row(t.db_conf, 'osm_waterareas', -13001)
assert not t.query_row(t.db_conf, 'osm_waterareas_gen0', 11001)
assert not t.query_row(t.db_conf, 'osm_waterareas_gen0', -12001)
assert not t.query_row(t.db_conf, 'osm_waterareas_gen0', -13001)
assert not t.query_row(t.db_conf, 'osm_waterareas_gen1', 11001)
assert not t.query_row(t.db_conf, 'osm_waterareas_gen1', -12001)
assert not t.query_row(t.db_conf, 'osm_waterareas_gen1', -13001)
assert t.query_row(t.db_conf, 'osm_landusages', 11001)['type'] == 'park'
assert t.query_row(t.db_conf, 'osm_landusages', -12001)['type'] == 'park'
assert t.query_row(t.db_conf, 'osm_landusages', -13001)['type'] == 'park'
assert t.query_row(t.db_conf, 'osm_landusages_gen0', 11001)['type'] == 'park'
assert t.query_row(t.db_conf, 'osm_landusages_gen0', -12001)['type'] == 'park'
assert t.query_row(t.db_conf, 'osm_landusages_gen0', -13001)['type'] == 'park'
assert t.query_row(t.db_conf, 'osm_landusages_gen1', 11001)['type'] == 'park'
assert t.query_row(t.db_conf, 'osm_landusages_gen1', -12001)['type'] == 'park'
assert t.query_row(t.db_conf, 'osm_landusages_gen1', -13001)['type'] == 'park'
def test_changed_hole_tags_1():
"""Multipolygon relation with untagged hole"""
t.assert_cached_way(14001)
t.assert_cached_way(14011)
assert not t.query_row(t.db_conf, 'osm_waterareas', 14011)
assert not t.query_row(t.db_conf, 'osm_waterareas', -14011)
assert t.query_row(t.db_conf, 'osm_landusages', -14001)['type'] == 'park'
def test_split_outer_multipolygon_way_1():
"""Single outer way of multipolygon was inserted."""
park_15001 = t.query_row(t.db_conf, 'osm_landusages', -15001)
assert park_15001['type'] == 'park'
t.assert_almost_equal(park_15001['geometry'].area, 9816216452, -1)
assert t.query_row(t.db_conf, 'osm_roads', 15002) == None
def test_merge_outer_multipolygon_way_1():
"""Splitted outer way of multipolygon was inserted."""
park_16001 = t.query_row(t.db_conf, 'osm_landusages', -16001)
assert park_16001['type'] == 'park'
t.assert_almost_equal(park_16001['geometry'].area, 12779350582, -1)
assert t.query_row(t.db_conf, 'osm_roads', 16002)['type'] == 'residential'
def test_broken_multipolygon_ways():
"""MultiPolygons with broken outer ways are handled."""
# outer way does not merge (17002 has one node)
assert t.query_row(t.db_conf, 'osm_landusages', -17001) == None
assert t.query_row(t.db_conf, 'osm_roads', 17001)['type'] == 'residential'
assert t.query_row(t.db_conf, 'osm_roads', 17002) == None
# outer way does not merge (17102 has no nodes)
assert t.query_row(t.db_conf, 'osm_landusages', -17101) == None
assert t.query_row(t.db_conf, 'osm_roads', 17101)['type'] == 'residential'
assert t.query_row(t.db_conf, 'osm_roads', 17102) == None
def test_node_way_ref_after_delete_1():
"""Nodes refereces way"""
data = t.cache_query(nodes=[20001, 20002], deps=True)
assert '20001' in data['nodes']['20001']['ways']
assert '20001' in data['nodes']['20002']['ways']
assert t.query_row(t.db_conf, 'osm_roads', 20001)['type'] == 'residential'
assert t.query_row(t.db_conf, 'osm_barrierpoints', 20001)['type'] == 'block'
def test_way_rel_ref_after_delete_1():
"""Ways references relation"""
data = t.cache_query(ways=[21001], deps=True)
assert data['ways']['21001']['relations'].keys() == ['21001']
assert t.query_row(t.db_conf, 'osm_roads', 21001)['type'] == 'residential'
assert t.query_row(t.db_conf, 'osm_landusages', -21001)['type'] == 'park'
def test_relation_way_not_inserted():
"""Part of relation was inserted only once."""
park = t.query_row(t.db_conf, 'osm_landusages', -9001)
assert park['type'] == 'park'
assert park['name'] == 'rel 9001'
assert t.query_row(t.db_conf, 'osm_landusages', 9009) == None
park = t.query_row(t.db_conf, 'osm_landusages', -9101)
assert park['type'] == 'park'
assert park['name'] == 'rel 9101'
assert t.query_row(t.db_conf, 'osm_landusages', 9109) == None
scrub = t.query_row(t.db_conf, 'osm_landusages', 9110)
assert scrub['type'] == 'scrub'
def test_relation_ways_inserted():
"""Outer ways of multipolygon are inserted. """
park = t.query_row(t.db_conf, 'osm_landusages', -9201)
assert park['type'] == 'park'
assert park['name'] == '9209'
# outer ways of multipolygon stand for their own
road = t.query_row(t.db_conf, 'osm_roads', 9209)
assert road['type'] == 'secondary'
assert road['name'] == '9209'
road = t.query_row(t.db_conf, 'osm_roads', 9210)
assert road['type'] == 'residential'
assert road['name'] == '9210'
park = t.query_row(t.db_conf, 'osm_landusages', -9301)
assert park['type'] == 'park'
assert park['name'] == '' # no name on relation
# outer ways of multipolygon stand for their own
road = t.query_row(t.db_conf, 'osm_roads', 9309)
assert road['type'] == 'secondary'
assert road['name'] == '9309'
road = t.query_row(t.db_conf, 'osm_roads', 9310)
assert road['type'] == 'residential'
assert road['name'] == '9310'
def test_relation_way_inserted():
"""Part of relation was inserted twice."""
park = t.query_row(t.db_conf, 'osm_landusages', -8001)
assert park['type'] == 'park'
assert park['name'] == 'rel 8001'
assert t.query_row(t.db_conf, 'osm_roads', 8009)["type"] == 'residential'
def test_single_node_ways_not_inserted():
"""Ways with single/duplicate nodes are not inserted."""
assert not t.query_row(t.db_conf, 'osm_roads', 30001)
assert not t.query_row(t.db_conf, 'osm_roads', 30002)
assert not t.query_row(t.db_conf, 'osm_roads', 30003)
def test_polygon_with_duplicate_nodes_is_valid():
"""Polygon with duplicate nodes is valid."""
geom = t.query_row(t.db_conf, 'osm_landusages', 30005)['geometry']
assert geom.is_valid
assert len(geom.exterior.coords) == 4
def test_incomplete_polygons():
"""Non-closed/incomplete polygons are not inserted."""
assert not t.query_row(t.db_conf, 'osm_landusages', 30004)
assert not t.query_row(t.db_conf, 'osm_landusages', 30006)
def test_residential_to_secondary():
"""Residential road is not in roads_gen0/1."""
assert t.query_row(t.db_conf, 'osm_roads', 40001)['type'] == 'residential'
assert not t.query_row(t.db_conf, 'osm_roads_gen0', 40001)
assert not t.query_row(t.db_conf, 'osm_roads_gen1', 40001)
def test_relation_before_remove():
"""Relation and way is inserted."""
assert t.query_row(t.db_conf, 'osm_buildings', 50011)['type'] == 'yes'
assert t.query_row(t.db_conf, 'osm_landusages', -50021)['type'] == 'park'
def test_relation_without_tags():
"""Relation without tags is inserted."""
assert t.query_row(t.db_conf, 'osm_buildings', 50111) == None
assert t.query_row(t.db_conf, 'osm_buildings', -50121)['type'] == 'yes'
def test_duplicate_ids():
"""Relation/way with same ID is inserted."""
assert t.query_row(t.db_conf, 'osm_buildings', 51001)['type'] == 'way'
assert t.query_row(t.db_conf, 'osm_buildings', -51001)['type'] == 'mp'
assert t.query_row(t.db_conf, 'osm_buildings', 51011)['type'] == 'way'
assert t.query_row(t.db_conf, 'osm_buildings', -51011)['type'] == 'mp'
def test_generalized_banana_polygon_is_valid():
"""Generalized polygons are valid."""
park = t.query_row(t.db_conf, 'osm_landusages', 7101)
# geometry is not valid
assert not park['geometry'].is_valid, park
park = t.query_row(t.db_conf, 'osm_landusages_gen0', 7101)
# but simplified geometies are valid
assert park['geometry'].is_valid, park
park = t.query_row(t.db_conf, 'osm_landusages_gen1', 7101)
assert park['geometry'].is_valid, park
def test_generalized_linestring_is_valid():
"""Generalized linestring is valid."""
road = t.query_row(t.db_conf, 'osm_roads', 7201)
# geometry is not simple, but valid
# check that geometry 'survives' simplification
assert not road['geometry'].is_simple, road['geometry'].wkt
assert road['geometry'].is_valid, road['geometry'].wkt
assert road['geometry'].length > 1000000
road = t.query_row(t.db_conf, 'osm_roads_gen0', 7201)
# but simplified geometies are simple
assert road['geometry'].is_valid, road['geometry'].wkt
assert road['geometry'].length > 1000000
road = t.query_row(t.db_conf, 'osm_roads_gen1', 7201)
assert road['geometry'].is_valid, road['geometry'].wkt
assert road['geometry'].length > 1000000
@unittest.skip("not implemented")
def test_relation_with_gap():
"""Multipolygon with gap (overlapping but different endpoints) gets closed"""
park = t.query_row(t.db_conf, 'osm_landusages', -7301)
assert park['geometry'].is_valid, park
def test_updated_nodes1():
"""Zig-Zag line is inserted."""
road = t.query_row(t.db_conf, 'osm_roads', 60000)
t.assert_almost_equal(road['geometry'].length, 14035.61150207768)
#######################################################################
def test_update():
"""Diff import applies"""
t.imposm3_update(t.db_conf, './build/complete_db.osc.gz', mapping_file)
#######################################################################
def test_updated_landusage():
"""Multipolygon relation was modified"""
t.assert_cached_node(1001, (13.5, 47.5))
landusage_1001 = t.query_row(t.db_conf, 'osm_landusages', -1001)
# point not in polygon after update
assert not landusage_1001['geometry'].intersects(t.merc_point(13.4, 47.5))
def test_partial_delete():
"""Deleted relation but nodes are still cached"""
t.assert_cached_node(2001)
t.assert_cached_way(2001)
t.assert_cached_way(2002)
assert not t.query_row(t.db_conf, 'osm_landusages', -2001)
assert not t.query_row(t.db_conf, 'osm_landusages', 2001)
def test_updated_nodes():
"""Nodes were added, modified or deleted"""
t.assert_missing_node(10000)
t.assert_cached_node(10001, (10.0, 40.0))
t.assert_cached_node(10002, (10.1, 40.0))
place_10001 = t.query_row(t.db_conf, 'osm_places', 10001)
assert place_10001['name'] == 'Bar', place_10001
place_10002 = t.query_row(t.db_conf, 'osm_places', 10002)
assert place_10002['name'] == 'Baz', place_10002
def test_landusage_to_waterarea_2():
"""Parks converted to water moved from landusages to waterareas"""
t.assert_cached_way(11001)
t.assert_cached_way(12001)
t.assert_cached_way(13001)
assert not t.query_row(t.db_conf, 'osm_landusages', 11001)
assert not t.query_row(t.db_conf, 'osm_landusages', -12001)
assert not t.query_row(t.db_conf, 'osm_landusages', -13001)
assert not t.query_row(t.db_conf, 'osm_landusages_gen0', 11001)
assert not t.query_row(t.db_conf, 'osm_landusages_gen0', -12001)
assert not t.query_row(t.db_conf, 'osm_landusages_gen0', -13001)
assert not t.query_row(t.db_conf, 'osm_landusages_gen1', 11001)
assert not t.query_row(t.db_conf, 'osm_landusages_gen1', -12001)
assert not t.query_row(t.db_conf, 'osm_landusages_gen1', -13001)
assert t.query_row(t.db_conf, 'osm_waterareas', 11001)['type'] == 'water'
assert t.query_row(t.db_conf, 'osm_waterareas', -12001)['type'] == 'water'
assert t.query_row(t.db_conf, 'osm_waterareas', -13001)['type'] == 'water'
assert t.query_row(t.db_conf, 'osm_waterareas_gen0', 11001)['type'] == 'water'
assert t.query_row(t.db_conf, 'osm_waterareas_gen0', -12001)['type'] == 'water'
assert t.query_row(t.db_conf, 'osm_waterareas_gen0', -13001)['type'] == 'water'
assert t.query_row(t.db_conf, 'osm_waterareas_gen1', 11001)['type'] == 'water'
assert t.query_row(t.db_conf, 'osm_waterareas_gen1', -12001)['type'] == 'water'
assert t.query_row(t.db_conf, 'osm_waterareas_gen1', -13001)['type'] == 'water'
def test_changed_hole_tags_2():
"""Newly tagged hole is inserted"""
t.assert_cached_way(14001)
t.assert_cached_way(14011)
assert t.query_row(t.db_conf, 'osm_waterareas', 14011)['type'] == 'water'
assert t.query_row(t.db_conf, 'osm_landusages', -14001)['type'] == 'park'
t.assert_almost_equal(t.query_row(t.db_conf, 'osm_waterareas', 14011)['geometry'].area, 26672000000, -6)
t.assert_almost_equal(t.query_row(t.db_conf, 'osm_landusages', -14001)['geometry'].area, 10373600000, -6)
def test_split_outer_multipolygon_way_2():
"""Splitted outer way of multipolygon was inserted"""
data = t.cache_query(ways=[15001, 15002], deps=True)
assert data['ways']['15001']['relations'].keys() == ['15001']
assert data['ways']['15002']['relations'].keys() == ['15001']
assert t.query_row(t.db_conf, 'osm_landusages', 15001) == None
park_15001 = t.query_row(t.db_conf, 'osm_landusages', -15001)
assert park_15001['type'] == 'park'
t.assert_almost_equal(park_15001['geometry'].area, 9816216452, -1)
assert t.query_row(t.db_conf, 'osm_roads', 15002)['type'] == 'residential'
def test_merge_outer_multipolygon_way_2():
"""Merged outer way of multipolygon was inserted"""
data = t.cache_query(ways=[16001, 16002], deps=True)
assert data['ways']['16001']['relations'].keys() == ['16001']
assert data['ways']['16002'] == None
data = t.cache_query(relations=[16001], full=True)
assert sorted(data['relations']['16001']['ways'].keys()) == ['16001', '16011']
assert t.query_row(t.db_conf, 'osm_landusages', 16001) == None
park_16001 = t.query_row(t.db_conf, 'osm_landusages', -16001)
assert park_16001['type'] == 'park'
t.assert_almost_equal(park_16001['geometry'].area, 12779350582, -1)
assert t.query_row(t.db_conf, 'osm_roads', 16002) == None
def test_node_way_ref_after_delete_2():
"""Node does not referece deleted way"""
data = t.cache_query(nodes=[20001, 20002], deps=True)
assert 'ways' not in data['nodes']['20001']
assert data['nodes']['20002'] == None
assert t.query_row(t.db_conf, 'osm_roads', 20001) == None
assert t.query_row(t.db_conf, 'osm_barrierpoints', 20001)['type'] == 'block'
def test_way_rel_ref_after_delete_2():
"""Way does not referece deleted relation"""
data = t.cache_query(ways=[21001], deps=True)
assert 'relations' not in data['ways']['21001']
assert t.query_row(t.db_conf, 'osm_roads', 21001)['type'] == 'residential'
assert t.query_row(t.db_conf, 'osm_landusages', 21001) == None
assert t.query_row(t.db_conf, 'osm_landusages', -21001) == None
def test_residential_to_secondary2():
"""New secondary (from residential) is now in roads_gen0/1."""
assert t.query_row(t.db_conf, 'osm_roads', 40001)['type'] == 'secondary'
assert t.query_row(t.db_conf, 'osm_roads_gen0', 40001)['type'] == 'secondary'
assert t.query_row(t.db_conf, 'osm_roads_gen1', 40001)['type'] == 'secondary'
def test_relation_after_remove():
"""Relation is deleted and way is still present."""
assert t.query_row(t.db_conf, 'osm_buildings', 50011)['type'] == 'yes'
assert t.query_row(t.db_conf, 'osm_landusages', 50021) == None
assert t.query_row(t.db_conf, 'osm_landusages', -50021) == None
def test_relation_without_tags2():
"""Relation without tags is removed."""
t.cache_query(ways=[50111], deps=True)
assert t.cache_query(relations=[50121], deps=True)['relations']["50121"] == None
assert t.query_row(t.db_conf, 'osm_buildings', 50111)['type'] == 'yes'
assert t.query_row(t.db_conf, 'osm_buildings', 50121) == None
assert t.query_row(t.db_conf, 'osm_buildings', -50121) == None
def test_duplicate_ids2():
"""Only relation/way with same ID was deleted."""
assert t.query_row(t.db_conf, 'osm_buildings', 51001)['type'] == 'way'
assert t.query_row(t.db_conf, 'osm_buildings', -51001) == None
assert t.query_row(t.db_conf, 'osm_buildings', -51011)['type'] == 'mp'
assert t.query_row(t.db_conf, 'osm_buildings', 51011) == None
def test_updated_way2():
"""All nodes of straightened way are updated."""
road = t.query_row(t.db_conf, 'osm_roads', 60000)
# new length 0.1 degree
t.assert_almost_equal(road['geometry'].length, 20037508.342789244/180.0/10.0)
#######################################################################
def test_deploy_and_revert_deploy():
"""Revert deploy succeeds"""
assert not t.table_exists('osm_roads', schema=t.TEST_SCHEMA_IMPORT)
assert t.table_exists('osm_roads', schema=t.TEST_SCHEMA_PRODUCTION)
assert not t.table_exists('osm_roads', schema=t.TEST_SCHEMA_BACKUP)
# import again to have a new import schema
t.imposm3_import(t.db_conf, './build/complete_db.pbf', mapping_file)
assert t.table_exists('osm_roads', schema=t.TEST_SCHEMA_IMPORT)
t.imposm3_deploy(t.db_conf, mapping_file)
assert not t.table_exists('osm_roads', schema=t.TEST_SCHEMA_IMPORT)
assert t.table_exists('osm_roads', schema=t.TEST_SCHEMA_PRODUCTION)
assert t.table_exists('osm_roads', schema=t.TEST_SCHEMA_BACKUP)
t.imposm3_revert_deploy(t.db_conf, mapping_file)
assert t.table_exists('osm_roads', schema=t.TEST_SCHEMA_IMPORT)
assert t.table_exists('osm_roads', schema=t.TEST_SCHEMA_PRODUCTION)
assert not t.table_exists('osm_roads', schema=t.TEST_SCHEMA_BACKUP)
def test_remove_backup():
"""Remove backup succeeds"""
assert t.table_exists('osm_roads', schema=t.TEST_SCHEMA_IMPORT)
assert t.table_exists('osm_roads', schema=t.TEST_SCHEMA_PRODUCTION)
assert not t.table_exists('osm_roads', schema=t.TEST_SCHEMA_BACKUP)
t.imposm3_deploy(t.db_conf, mapping_file)
assert not t.table_exists('osm_roads', schema=t.TEST_SCHEMA_IMPORT)
assert t.table_exists('osm_roads', schema=t.TEST_SCHEMA_PRODUCTION)
assert t.table_exists('osm_roads', schema=t.TEST_SCHEMA_BACKUP)
t.imposm3_remove_backups(t.db_conf, mapping_file)
assert not t.table_exists('osm_roads', schema=t.TEST_SCHEMA_IMPORT)
assert t.table_exists('osm_roads', schema=t.TEST_SCHEMA_PRODUCTION)
assert not t.table_exists('osm_roads', schema=t.TEST_SCHEMA_BACKUP)

227
test/helper.py Normal file
View File

@ -0,0 +1,227 @@
import math
import tempfile
import shutil
import subprocess
import psycopg2
import psycopg2.extras
import json
from shapely.wkb import loads as wkb_loads
from shapely.geometry import Point
import binascii
import unittest
__all__ = [
"assert_almost_equal",
"query_row",
"cache_query",
"merc_point",
"imposm3_import",
"imposm3_deploy",
"imposm3_update",
"imposm3_revert_deploy",
"imposm3_remove_backups",
"table_exists",
"drop_schemas",
"TEST_SCHEMA_IMPORT",
"TEST_SCHEMA_PRODUCTION",
"TEST_SCHEMA_BACKUP",
"db_conf",
"assert_missing_node",
"assert_cached_node",
"assert_cached_way",
]
class Dummy(unittest.TestCase):
def nop():
pass
_t = Dummy('nop')
assert_almost_equal = _t.assertAlmostEqual
tmpdir = None
def setup():
global tmpdir
tmpdir = tempfile.mkdtemp()
def teardown():
shutil.rmtree(tmpdir)
drop_schemas()
db_conf = {
'host': 'localhost',
}
TEST_SCHEMA_IMPORT = "imposm3testimport"
TEST_SCHEMA_PRODUCTION = "imposm3testpublic"
TEST_SCHEMA_BACKUP = "imposm3testbackup"
def merc_point(lon, lat):
pole = 6378137 * math.pi # 20037508.342789244
x = lon * pole / 180.0
y = math.log(math.tan((90.0+lat)*math.pi/360.0)) / math.pi * pole
return Point(x, y)
def pg_db_url(db_conf):
return 'postgis://%(host)s' % db_conf
def create_geom_in_row(rowdict):
if rowdict:
rowdict['geometry'] = wkb_loads(binascii.unhexlify(rowdict['geometry']))
return rowdict
def query_row(db_conf, table, osmid):
conn = psycopg2.connect(**db_conf)
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute('select * from %s.%s where osm_id = %%s' % (TEST_SCHEMA_PRODUCTION, table), [osmid])
results = []
for row in cur.fetchall():
create_geom_in_row(row)
results.append(row)
if not results:
return None
if len(results) == 1:
return results[0]
return results
def imposm3_import(db_conf, pbf, mapping_file):
conn = pg_db_url(db_conf)
try:
print subprocess.check_output((
"../imposm3 import -connection %s -read %s"
" -write"
" -cachedir %s"
" -diff"
" -overwritecache"
" -dbschema-import " + TEST_SCHEMA_IMPORT +
" -optimize"
" -mapping %s ") % (
conn, pbf, tmpdir, mapping_file
), shell=True)
except subprocess.CalledProcessError, ex:
print ex.output
raise
def imposm3_deploy(db_conf, mapping_file):
conn = pg_db_url(db_conf)
try:
print subprocess.check_output((
"../imposm3 import -connection %s"
" -dbschema-import " + TEST_SCHEMA_IMPORT +
" -dbschema-production " + TEST_SCHEMA_PRODUCTION +
" -dbschema-backup " + TEST_SCHEMA_BACKUP +
" -deployproduction"
" -mapping %s ") % (
conn, mapping_file,
), shell=True)
except subprocess.CalledProcessError, ex:
print ex.output
raise
def imposm3_revert_deploy(db_conf, mapping_file):
conn = pg_db_url(db_conf)
try:
print subprocess.check_output((
"../imposm3 import -connection %s"
" -dbschema-import " + TEST_SCHEMA_IMPORT +
" -dbschema-production " + TEST_SCHEMA_PRODUCTION +
" -dbschema-backup " + TEST_SCHEMA_BACKUP +
" -revertdeploy"
" -mapping %s ") % (
conn, mapping_file,
), shell=True)
except subprocess.CalledProcessError, ex:
print ex.output
raise
def imposm3_remove_backups(db_conf, mapping_file):
conn = pg_db_url(db_conf)
try:
print subprocess.check_output((
"../imposm3 import -connection %s"
" -dbschema-backup " + TEST_SCHEMA_BACKUP +
" -removebackup"
" -mapping %s ") % (
conn, mapping_file,
), shell=True)
except subprocess.CalledProcessError, ex:
print ex.output
raise
def imposm3_update(db_conf, osc, mapping_file):
conn = pg_db_url(db_conf)
try:
print subprocess.check_output((
"../imposm3 diff -connection %s"
" -cachedir %s"
" -limitto clipping-3857.geojson"
" -dbschema-production " + TEST_SCHEMA_PRODUCTION +
" -mapping %s %s") % (
conn, tmpdir, mapping_file, osc,
), shell=True)
except subprocess.CalledProcessError, ex:
print ex.output
raise
def cache_query(nodes='', ways='', relations='', deps='', full=''):
if nodes:
nodes = '-node ' + ','.join(map(str, nodes))
if ways:
ways = '-way ' + ','.join(map(str, ways))
if relations:
relations = '-rel ' + ','.join(map(str, relations))
if deps:
deps = '-deps'
if full:
full = '-full'
out = subprocess.check_output(
"../imposm3 query-cache -cachedir %s %s %s %s %s %s" % (
tmpdir, nodes, ways, relations, deps, full),
shell=True)
print out
return json.loads(out)
def table_exists(table, schema=TEST_SCHEMA_IMPORT):
conn = psycopg2.connect(**db_conf)
cur = conn.cursor()
cur.execute("SELECT EXISTS(SELECT * FROM information_schema.tables WHERE table_name='%s' AND table_schema='%s')"
% (table, schema))
return cur.fetchone()[0]
def assert_missing_node(id):
data = cache_query(nodes=[id])
if data['nodes'][str(id)]:
raise AssertionError('node %d found' % id)
def assert_cached_node(id, (lon, lat)=(None, None)):
data = cache_query(nodes=[id])
node = data['nodes'][str(id)]
if not node:
raise AssertionError('node %d not found' % id)
if lon and lat:
assert_almost_equal(lon, node['lon'], 6)
assert_almost_equal(lat, node['lat'], 6)
def assert_cached_way(id):
data = cache_query(ways=[id])
if not data['ways'][str(id)]:
raise AssertionError('way %d not found' % id)
def drop_schemas():
conn = psycopg2.connect(**db_conf)
cur = conn.cursor()
cur.execute("DROP SCHEMA IF EXISTS %s CASCADE" % TEST_SCHEMA_IMPORT)
cur.execute("DROP SCHEMA IF EXISTS %s CASCADE" % TEST_SCHEMA_PRODUCTION)
cur.execute("DROP SCHEMA IF EXISTS %s CASCADE" % TEST_SCHEMA_BACKUP)
conn.commit()

View File

@ -1,640 +0,0 @@
import math
import tempfile
import shutil
import subprocess
import psycopg2
import psycopg2.extras
import json
from shapely.wkb import loads as wkb_loads
from shapely.geometry import Point
import binascii
import unittest
class Dummy(unittest.TestCase):
def nop():
pass
_t = Dummy('nop')
assert_almost_equal = _t.assertAlmostEqual
tmpdir = None
def setup():
global tmpdir
tmpdir = tempfile.mkdtemp()
def teardown():
shutil.rmtree(tmpdir)
drop_test_schemas()
db_conf = {
'host': 'localhost',
}
TEST_SCHEMA_IMPORT = "imposm3testimport"
TEST_SCHEMA_PRODUCTION = "imposm3testpublic"
TEST_SCHEMA_BACKUP = "imposm3testbackup"
def merc_point(lon, lat):
pole = 6378137 * math.pi # 20037508.342789244
x = lon * pole / 180.0
y = math.log(math.tan((90.0+lat)*math.pi/360.0)) / math.pi * pole
return Point(x, y)
def pg_db_url(db_conf):
return 'postgis://%(host)s' % db_conf
def create_geom_in_row(rowdict):
if rowdict:
rowdict['geometry'] = wkb_loads(binascii.unhexlify(rowdict['geometry']))
return rowdict
def query_row(db_conf, table, osmid):
conn = psycopg2.connect(**db_conf)
cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute('select * from %s.%s where osm_id = %%s' % (TEST_SCHEMA_PRODUCTION, table), [osmid])
results = []
for row in cur.fetchall():
create_geom_in_row(row)
results.append(row)
if not results:
return None
if len(results) == 1:
return results[0]
return results
def imposm3_import(db_conf, pbf):
conn = pg_db_url(db_conf)
try:
print subprocess.check_output((
"../imposm3 import -connection %s -read %s"
" -write"
" -cachedir %s"
" -diff"
" -overwritecache"
" -dbschema-import " + TEST_SCHEMA_IMPORT +
" -optimize"
" -mapping test_mapping.json ") % (
conn, pbf, tmpdir,
), shell=True)
except subprocess.CalledProcessError, ex:
print ex.output
raise
def imposm3_deploy(db_conf):
conn = pg_db_url(db_conf)
try:
print subprocess.check_output((
"../imposm3 import -connection %s"
" -dbschema-import " + TEST_SCHEMA_IMPORT +
" -dbschema-production " + TEST_SCHEMA_PRODUCTION +
" -dbschema-backup " + TEST_SCHEMA_BACKUP +
" -deployproduction"
" -mapping test_mapping.json ") % (
conn,
), shell=True)
except subprocess.CalledProcessError, ex:
print ex.output
raise
def imposm3_revert_deploy(db_conf):
conn = pg_db_url(db_conf)
try:
print subprocess.check_output((
"../imposm3 import -connection %s"
" -dbschema-import " + TEST_SCHEMA_IMPORT +
" -dbschema-production " + TEST_SCHEMA_PRODUCTION +
" -dbschema-backup " + TEST_SCHEMA_BACKUP +
" -revertdeploy"
" -mapping test_mapping.json ") % (
conn,
), shell=True)
except subprocess.CalledProcessError, ex:
print ex.output
raise
def imposm3_remove_backups(db_conf):
conn = pg_db_url(db_conf)
try:
print subprocess.check_output((
"../imposm3 import -connection %s"
" -dbschema-backup " + TEST_SCHEMA_BACKUP +
" -removebackup"
" -mapping test_mapping.json ") % (
conn,
), shell=True)
except subprocess.CalledProcessError, ex:
print ex.output
raise
def imposm3_update(db_conf, osc):
conn = pg_db_url(db_conf)
try:
print subprocess.check_output((
"../imposm3 diff -connection %s"
" -cachedir %s"
" -limitto clipping-3857.geojson"
" -dbschema-production " + TEST_SCHEMA_PRODUCTION +
" -mapping test_mapping.json %s") % (
conn, tmpdir, osc,
), shell=True)
except subprocess.CalledProcessError, ex:
print ex.output
raise
def cache_query(nodes='', ways='', relations='', deps='', full=''):
if nodes:
nodes = '-node ' + ','.join(map(str, nodes))
if ways:
ways = '-way ' + ','.join(map(str, ways))
if relations:
relations = '-rel ' + ','.join(map(str, relations))
if deps:
deps = '-deps'
if full:
full = '-full'
out = subprocess.check_output(
"../imposm3 query-cache -cachedir %s %s %s %s %s %s" % (
tmpdir, nodes, ways, relations, deps, full),
shell=True)
print out
return json.loads(out)
def table_exists(table, schema=TEST_SCHEMA_IMPORT):
conn = psycopg2.connect(**db_conf)
cur = conn.cursor()
cur.execute("SELECT EXISTS(SELECT * FROM information_schema.tables WHERE table_name='%s' AND table_schema='%s')"
% (table, schema))
return cur.fetchone()[0]
def assert_missing_node(id):
data = cache_query(nodes=[id])
if data['nodes'][str(id)]:
raise AssertionError('node %d found' % id)
def assert_cached_node(id, (lon, lat)=(None, None)):
data = cache_query(nodes=[id])
node = data['nodes'][str(id)]
if not node:
raise AssertionError('node %d not found' % id)
if lon and lat:
assert_almost_equal(lon, node['lon'], 6)
assert_almost_equal(lat, node['lat'], 6)
def assert_cached_way(id):
data = cache_query(ways=[id])
if not data['ways'][str(id)]:
raise AssertionError('way %d not found' % id)
def drop_test_schemas():
conn = psycopg2.connect(**db_conf)
cur = conn.cursor()
cur.execute("DROP SCHEMA IF EXISTS %s CASCADE" % TEST_SCHEMA_IMPORT)
cur.execute("DROP SCHEMA IF EXISTS %s CASCADE" % TEST_SCHEMA_PRODUCTION)
cur.execute("DROP SCHEMA IF EXISTS %s CASCADE" % TEST_SCHEMA_BACKUP)
conn.commit()
#######################################################################
def test_import():
"""Import succeeds"""
drop_test_schemas()
assert not table_exists('osm_roads', schema=TEST_SCHEMA_IMPORT)
imposm3_import(db_conf, './build/test.pbf')
assert table_exists('osm_roads', schema=TEST_SCHEMA_IMPORT)
def test_deploy():
"""Deploy succeeds"""
assert not table_exists('osm_roads', schema=TEST_SCHEMA_PRODUCTION)
imposm3_deploy(db_conf)
assert table_exists('osm_roads', schema=TEST_SCHEMA_PRODUCTION)
assert not table_exists('osm_roads', schema=TEST_SCHEMA_IMPORT)
#######################################################################
def test_imported_landusage():
"""Multipolygon relation is inserted"""
assert_cached_node(1001, (13, 47.5))
landusage_1001 = query_row(db_conf, 'osm_landusages', -1001)
# point in polygon
assert landusage_1001['geometry'].intersects(merc_point(13.4, 47.5))
# hole in multipolygon relation
assert not landusage_1001['geometry'].intersects(merc_point(14.75, 47.75))
def test_missing_nodes():
"""Cache does not contain nodes from previous imports"""
assert_missing_node(10001)
assert_missing_node(10002)
place_10000 = query_row(db_conf, 'osm_places', 10000)
assert place_10000['name'] == 'Foo', place_10000
def test_name_tags():
"""Road contains multiple names"""
road = query_row(db_conf, 'osm_roads', 1101)
assert road['name'] == 'name', road
assert road['name:de'] == 'name:de', road
assert road['name_en'] == 'name:en', road
def test_landusage_to_waterarea_1():
"""Parks inserted into landusages"""
assert_cached_way(11001)
assert_cached_way(12001)
assert_cached_way(13001)
assert not query_row(db_conf, 'osm_waterareas', 11001)
assert not query_row(db_conf, 'osm_waterareas', -12001)
assert not query_row(db_conf, 'osm_waterareas', -13001)
assert not query_row(db_conf, 'osm_waterareas_gen0', 11001)
assert not query_row(db_conf, 'osm_waterareas_gen0', -12001)
assert not query_row(db_conf, 'osm_waterareas_gen0', -13001)
assert not query_row(db_conf, 'osm_waterareas_gen1', 11001)
assert not query_row(db_conf, 'osm_waterareas_gen1', -12001)
assert not query_row(db_conf, 'osm_waterareas_gen1', -13001)
assert query_row(db_conf, 'osm_landusages', 11001)['type'] == 'park'
assert query_row(db_conf, 'osm_landusages', -12001)['type'] == 'park'
assert query_row(db_conf, 'osm_landusages', -13001)['type'] == 'park'
assert query_row(db_conf, 'osm_landusages_gen0', 11001)['type'] == 'park'
assert query_row(db_conf, 'osm_landusages_gen0', -12001)['type'] == 'park'
assert query_row(db_conf, 'osm_landusages_gen0', -13001)['type'] == 'park'
assert query_row(db_conf, 'osm_landusages_gen1', 11001)['type'] == 'park'
assert query_row(db_conf, 'osm_landusages_gen1', -12001)['type'] == 'park'
assert query_row(db_conf, 'osm_landusages_gen1', -13001)['type'] == 'park'
def test_changed_hole_tags_1():
"""Multipolygon relation with untagged hole"""
assert_cached_way(14001)
assert_cached_way(14011)
assert not query_row(db_conf, 'osm_waterareas', 14011)
assert not query_row(db_conf, 'osm_waterareas', -14011)
assert query_row(db_conf, 'osm_landusages', -14001)['type'] == 'park'
def test_split_outer_multipolygon_way_1():
"""Single outer way of multipolygon was inserted."""
park_15001 = query_row(db_conf, 'osm_landusages', -15001)
assert park_15001['type'] == 'park'
assert_almost_equal(park_15001['geometry'].area, 9816216452, -1)
assert query_row(db_conf, 'osm_roads', 15002) == None
def test_merge_outer_multipolygon_way_1():
"""Splitted outer way of multipolygon was inserted."""
park_16001 = query_row(db_conf, 'osm_landusages', -16001)
assert park_16001['type'] == 'park'
assert_almost_equal(park_16001['geometry'].area, 12779350582, -1)
assert query_row(db_conf, 'osm_roads', 16002)['type'] == 'residential'
def test_broken_multipolygon_ways():
"""MultiPolygons with broken outer ways are handled."""
# outer way does not merge (17002 has one node)
assert query_row(db_conf, 'osm_landusages', -17001) == None
assert query_row(db_conf, 'osm_roads', 17001)['type'] == 'residential'
assert query_row(db_conf, 'osm_roads', 17002) == None
# outer way does not merge (17102 has no nodes)
assert query_row(db_conf, 'osm_landusages', -17101) == None
assert query_row(db_conf, 'osm_roads', 17101)['type'] == 'residential'
assert query_row(db_conf, 'osm_roads', 17102) == None
def test_node_way_ref_after_delete_1():
"""Nodes refereces way"""
data = cache_query(nodes=[20001, 20002], deps=True)
assert '20001' in data['nodes']['20001']['ways']
assert '20001' in data['nodes']['20002']['ways']
assert query_row(db_conf, 'osm_roads', 20001)['type'] == 'residential'
assert query_row(db_conf, 'osm_barrierpoints', 20001)['type'] == 'block'
def test_way_rel_ref_after_delete_1():
"""Ways references relation"""
data = cache_query(ways=[21001], deps=True)
assert data['ways']['21001']['relations'].keys() == ['21001']
assert query_row(db_conf, 'osm_roads', 21001)['type'] == 'residential'
assert query_row(db_conf, 'osm_landusages', -21001)['type'] == 'park'
def test_relation_way_not_inserted():
"""Part of relation was inserted only once."""
park = query_row(db_conf, 'osm_landusages', -9001)
assert park['type'] == 'park'
assert park['name'] == 'rel 9001'
assert query_row(db_conf, 'osm_landusages', 9009) == None
park = query_row(db_conf, 'osm_landusages', -9101)
assert park['type'] == 'park'
assert park['name'] == 'rel 9101'
assert query_row(db_conf, 'osm_landusages', 9109) == None
scrub = query_row(db_conf, 'osm_landusages', 9110)
assert scrub['type'] == 'scrub'
def test_relation_ways_inserted():
"""Outer ways of multipolygon are inserted. """
park = query_row(db_conf, 'osm_landusages', -9201)
assert park['type'] == 'park'
assert park['name'] == '9209'
# outer ways of multipolygon stand for their own
road = query_row(db_conf, 'osm_roads', 9209)
assert road['type'] == 'secondary'
assert road['name'] == '9209'
road = query_row(db_conf, 'osm_roads', 9210)
assert road['type'] == 'residential'
assert road['name'] == '9210'
park = query_row(db_conf, 'osm_landusages', -9301)
assert park['type'] == 'park'
assert park['name'] == '' # no name on relation
# outer ways of multipolygon stand for their own
road = query_row(db_conf, 'osm_roads', 9309)
assert road['type'] == 'secondary'
assert road['name'] == '9309'
road = query_row(db_conf, 'osm_roads', 9310)
assert road['type'] == 'residential'
assert road['name'] == '9310'
def test_relation_way_inserted():
"""Part of relation was inserted twice."""
park = query_row(db_conf, 'osm_landusages', -8001)
assert park['type'] == 'park'
assert park['name'] == 'rel 8001'
assert query_row(db_conf, 'osm_roads', 8009)["type"] == 'residential'
def test_single_node_ways_not_inserted():
"""Ways with single/duplicate nodes are not inserted."""
assert not query_row(db_conf, 'osm_roads', 30001)
assert not query_row(db_conf, 'osm_roads', 30002)
assert not query_row(db_conf, 'osm_roads', 30003)
def test_polygon_with_duplicate_nodes_is_valid():
"""Polygon with duplicate nodes is valid."""
geom = query_row(db_conf, 'osm_landusages', 30005)['geometry']
assert geom.is_valid
assert len(geom.exterior.coords) == 4
def test_incomplete_polygons():
"""Non-closed/incomplete polygons are not inserted."""
assert not query_row(db_conf, 'osm_landusages', 30004)
assert not query_row(db_conf, 'osm_landusages', 30006)
def test_residential_to_secondary():
"""Residential road is not in roads_gen0/1."""
assert query_row(db_conf, 'osm_roads', 40001)['type'] == 'residential'
assert not query_row(db_conf, 'osm_roads_gen0', 40001)
assert not query_row(db_conf, 'osm_roads_gen1', 40001)
def test_relation_before_remove():
"""Relation and way is inserted."""
assert query_row(db_conf, 'osm_buildings', 50011)['type'] == 'yes'
assert query_row(db_conf, 'osm_landusages', -50021)['type'] == 'park'
def test_relation_without_tags():
"""Relation without tags is inserted."""
assert query_row(db_conf, 'osm_buildings', 50111) == None
assert query_row(db_conf, 'osm_buildings', -50121)['type'] == 'yes'
def test_duplicate_ids():
"""Relation/way with same ID is inserted."""
assert query_row(db_conf, 'osm_buildings', 51001)['type'] == 'way'
assert query_row(db_conf, 'osm_buildings', -51001)['type'] == 'mp'
assert query_row(db_conf, 'osm_buildings', 51011)['type'] == 'way'
assert query_row(db_conf, 'osm_buildings', -51011)['type'] == 'mp'
def test_generalized_banana_polygon_is_valid():
"""Generalized polygons are valid."""
park = query_row(db_conf, 'osm_landusages', 7101)
# geometry is not valid
assert not park['geometry'].is_valid, park
park = query_row(db_conf, 'osm_landusages_gen0', 7101)
# but simplified geometies are valid
assert park['geometry'].is_valid, park
park = query_row(db_conf, 'osm_landusages_gen1', 7101)
assert park['geometry'].is_valid, park
def test_generalized_linestring_is_valid():
"""Generalized linestring is valid."""
road = query_row(db_conf, 'osm_roads', 7201)
# geometry is not simple, but valid
# check that geometry 'survives' simplification
assert not road['geometry'].is_simple, road['geometry'].wkt
assert road['geometry'].is_valid, road['geometry'].wkt
assert road['geometry'].length > 1000000
road = query_row(db_conf, 'osm_roads_gen0', 7201)
# but simplified geometies are simple
assert road['geometry'].is_valid, road['geometry'].wkt
assert road['geometry'].length > 1000000
road = query_row(db_conf, 'osm_roads_gen1', 7201)
assert road['geometry'].is_valid, road['geometry'].wkt
assert road['geometry'].length > 1000000
@unittest.skip("not implemented")
def test_relation_with_gap():
"""Multipolygon with gap (overlapping but different endpoints) gets closed"""
park = query_row(db_conf, 'osm_landusages', -7301)
assert park['geometry'].is_valid, park
def test_updated_nodes1():
"""Zig-Zag line is inserted."""
road = query_row(db_conf, 'osm_roads', 60000)
assert_almost_equal(road['geometry'].length, 14035.61150207768)
#######################################################################
def test_update():
"""Diff import applies"""
imposm3_update(db_conf, './build/test.osc.gz')
#######################################################################
def test_updated_landusage():
"""Multipolygon relation was modified"""
assert_cached_node(1001, (13.5, 47.5))
landusage_1001 = query_row(db_conf, 'osm_landusages', -1001)
# point not in polygon after update
assert not landusage_1001['geometry'].intersects(merc_point(13.4, 47.5))
def test_partial_delete():
"""Deleted relation but nodes are still cached"""
assert_cached_node(2001)
assert_cached_way(2001)
assert_cached_way(2002)
assert not query_row(db_conf, 'osm_landusages', -2001)
assert not query_row(db_conf, 'osm_landusages', 2001)
def test_updated_nodes():
"""Nodes were added, modified or deleted"""
assert_missing_node(10000)
assert_cached_node(10001, (10.0, 40.0))
assert_cached_node(10002, (10.1, 40.0))
place_10001 = query_row(db_conf, 'osm_places', 10001)
assert place_10001['name'] == 'Bar', place_10001
place_10002 = query_row(db_conf, 'osm_places', 10002)
assert place_10002['name'] == 'Baz', place_10002
def test_landusage_to_waterarea_2():
"""Parks converted to water moved from landusages to waterareas"""
assert_cached_way(11001)
assert_cached_way(12001)
assert_cached_way(13001)
assert not query_row(db_conf, 'osm_landusages', 11001)
assert not query_row(db_conf, 'osm_landusages', -12001)
assert not query_row(db_conf, 'osm_landusages', -13001)
assert not query_row(db_conf, 'osm_landusages_gen0', 11001)
assert not query_row(db_conf, 'osm_landusages_gen0', -12001)
assert not query_row(db_conf, 'osm_landusages_gen0', -13001)
assert not query_row(db_conf, 'osm_landusages_gen1', 11001)
assert not query_row(db_conf, 'osm_landusages_gen1', -12001)
assert not query_row(db_conf, 'osm_landusages_gen1', -13001)
assert query_row(db_conf, 'osm_waterareas', 11001)['type'] == 'water'
assert query_row(db_conf, 'osm_waterareas', -12001)['type'] == 'water'
assert query_row(db_conf, 'osm_waterareas', -13001)['type'] == 'water'
assert query_row(db_conf, 'osm_waterareas_gen0', 11001)['type'] == 'water'
assert query_row(db_conf, 'osm_waterareas_gen0', -12001)['type'] == 'water'
assert query_row(db_conf, 'osm_waterareas_gen0', -13001)['type'] == 'water'
assert query_row(db_conf, 'osm_waterareas_gen1', 11001)['type'] == 'water'
assert query_row(db_conf, 'osm_waterareas_gen1', -12001)['type'] == 'water'
assert query_row(db_conf, 'osm_waterareas_gen1', -13001)['type'] == 'water'
def test_changed_hole_tags_2():
"""Newly tagged hole is inserted"""
assert_cached_way(14001)
assert_cached_way(14011)
assert query_row(db_conf, 'osm_waterareas', 14011)['type'] == 'water'
assert query_row(db_conf, 'osm_landusages', -14001)['type'] == 'park'
assert_almost_equal(query_row(db_conf, 'osm_waterareas', 14011)['geometry'].area, 26672000000, -6)
assert_almost_equal(query_row(db_conf, 'osm_landusages', -14001)['geometry'].area, 10373600000, -6)
def test_split_outer_multipolygon_way_2():
"""Splitted outer way of multipolygon was inserted"""
data = cache_query(ways=[15001, 15002], deps=True)
assert data['ways']['15001']['relations'].keys() == ['15001']
assert data['ways']['15002']['relations'].keys() == ['15001']
assert query_row(db_conf, 'osm_landusages', 15001) == None
park_15001 = query_row(db_conf, 'osm_landusages', -15001)
assert park_15001['type'] == 'park'
assert_almost_equal(park_15001['geometry'].area, 9816216452, -1)
assert query_row(db_conf, 'osm_roads', 15002)['type'] == 'residential'
def test_merge_outer_multipolygon_way_2():
"""Merged outer way of multipolygon was inserted"""
data = cache_query(ways=[16001, 16002], deps=True)
assert data['ways']['16001']['relations'].keys() == ['16001']
assert data['ways']['16002'] == None
data = cache_query(relations=[16001], full=True)
assert sorted(data['relations']['16001']['ways'].keys()) == ['16001', '16011']
assert query_row(db_conf, 'osm_landusages', 16001) == None
park_16001 = query_row(db_conf, 'osm_landusages', -16001)
assert park_16001['type'] == 'park'
assert_almost_equal(park_16001['geometry'].area, 12779350582, -1)
assert query_row(db_conf, 'osm_roads', 16002) == None
def test_node_way_ref_after_delete_2():
"""Node does not referece deleted way"""
data = cache_query(nodes=[20001, 20002], deps=True)
assert 'ways' not in data['nodes']['20001']
assert data['nodes']['20002'] == None
assert query_row(db_conf, 'osm_roads', 20001) == None
assert query_row(db_conf, 'osm_barrierpoints', 20001)['type'] == 'block'
def test_way_rel_ref_after_delete_2():
"""Way does not referece deleted relation"""
data = cache_query(ways=[21001], deps=True)
assert 'relations' not in data['ways']['21001']
assert query_row(db_conf, 'osm_roads', 21001)['type'] == 'residential'
assert query_row(db_conf, 'osm_landusages', 21001) == None
assert query_row(db_conf, 'osm_landusages', -21001) == None
def test_residential_to_secondary2():
"""New secondary (from residential) is now in roads_gen0/1."""
assert query_row(db_conf, 'osm_roads', 40001)['type'] == 'secondary'
assert query_row(db_conf, 'osm_roads_gen0', 40001)['type'] == 'secondary'
assert query_row(db_conf, 'osm_roads_gen1', 40001)['type'] == 'secondary'
def test_relation_after_remove():
"""Relation is deleted and way is still present."""
assert query_row(db_conf, 'osm_buildings', 50011)['type'] == 'yes'
assert query_row(db_conf, 'osm_landusages', 50021) == None
assert query_row(db_conf, 'osm_landusages', -50021) == None
def test_relation_without_tags2():
"""Relation without tags is removed."""
cache_query(ways=[50111], deps=True)
assert cache_query(relations=[50121], deps=True)['relations']["50121"] == None
assert query_row(db_conf, 'osm_buildings', 50111)['type'] == 'yes'
assert query_row(db_conf, 'osm_buildings', 50121) == None
assert query_row(db_conf, 'osm_buildings', -50121) == None
def test_duplicate_ids2():
"""Only relation/way with same ID was deleted."""
assert query_row(db_conf, 'osm_buildings', 51001)['type'] == 'way'
assert query_row(db_conf, 'osm_buildings', -51001) == None
assert query_row(db_conf, 'osm_buildings', -51011)['type'] == 'mp'
assert query_row(db_conf, 'osm_buildings', 51011) == None
def test_updated_way2():
"""All nodes of straightened way are updated."""
road = query_row(db_conf, 'osm_roads', 60000)
# new length 0.1 degree
assert_almost_equal(road['geometry'].length, 20037508.342789244/180.0/10.0)
#######################################################################
def test_deploy_and_revert_deploy():
"""Revert deploy succeeds"""
assert not table_exists('osm_roads', schema=TEST_SCHEMA_IMPORT)
assert table_exists('osm_roads', schema=TEST_SCHEMA_PRODUCTION)
assert not table_exists('osm_roads', schema=TEST_SCHEMA_BACKUP)
# import again to have a new import schema
imposm3_import(db_conf, './build/test.pbf')
assert table_exists('osm_roads', schema=TEST_SCHEMA_IMPORT)
imposm3_deploy(db_conf)
assert not table_exists('osm_roads', schema=TEST_SCHEMA_IMPORT)
assert table_exists('osm_roads', schema=TEST_SCHEMA_PRODUCTION)
assert table_exists('osm_roads', schema=TEST_SCHEMA_BACKUP)
imposm3_revert_deploy(db_conf)
assert table_exists('osm_roads', schema=TEST_SCHEMA_IMPORT)
assert table_exists('osm_roads', schema=TEST_SCHEMA_PRODUCTION)
assert not table_exists('osm_roads', schema=TEST_SCHEMA_BACKUP)
def test_remove_backup():
"""Remove backup succeeds"""
assert table_exists('osm_roads', schema=TEST_SCHEMA_IMPORT)
assert table_exists('osm_roads', schema=TEST_SCHEMA_PRODUCTION)
assert not table_exists('osm_roads', schema=TEST_SCHEMA_BACKUP)
imposm3_deploy(db_conf)
assert not table_exists('osm_roads', schema=TEST_SCHEMA_IMPORT)
assert table_exists('osm_roads', schema=TEST_SCHEMA_PRODUCTION)
assert table_exists('osm_roads', schema=TEST_SCHEMA_BACKUP)
imposm3_remove_backups(db_conf)
assert not table_exists('osm_roads', schema=TEST_SCHEMA_IMPORT)
assert table_exists('osm_roads', schema=TEST_SCHEMA_PRODUCTION)
assert not table_exists('osm_roads', schema=TEST_SCHEMA_BACKUP)

109
test/single_table.osm Normal file
View File

@ -0,0 +1,109 @@
<?xml version='1.0' encoding='UTF-8'?>
<osm version="0.6" generator="Osmosis SNAPSHOT-r25240">
<node id="10001" version="1" timestamp="2011-11-11T00:11:11Z" lat="42" lon="10">
<tag k="random" v="tag"/>
<tag k="not" v="mapped"/>
</node>
<node id="10002" version="1" timestamp="2011-11-11T00:11:11Z" lat="42" lon="11">
<tag k="random" v="tag"/>
<tag k="but" v="mapped"/>
<tag k="poi" v="unicorn"/>
</node>
<node id="20001" version="1" timestamp="2011-11-11T00:11:11Z" lat="43" lon="11"/>
<node id="20002" version="1" timestamp="2011-11-11T00:11:11Z" lat="43" lon="12"/>
<node id="20003" version="1" timestamp="2011-11-11T00:11:11Z" lat="44" lon="12"/>
<node id="20004" version="1" timestamp="2011-11-11T00:11:11Z" lat="44" lon="11"/>
<way id="20101" version="1" timestamp="2011-11-11T00:11:11Z">
<nd ref="20001"/>
<nd ref="20002"/>
<tag k="not" v="mapped"/>
<tag k="random" v="tag"/>
</way>
<way id="20102" version="1" timestamp="2011-11-11T00:11:11Z">
<nd ref="20001"/>
<nd ref="20002"/>
<tag k="poi" v="unicorn"/> <!-- only mapped for nodes -->
<tag k="random" v="tag"/>
</way>
<way id="20103" version="1" timestamp="2011-11-11T00:11:11Z">
<nd ref="20001"/>
<nd ref="20002"/>
<tag k="building" v="yes"/> <!-- only mapped for polygons, but way not closed -->
<tag k="random" v="tag"/>
</way>
<way id="20201" version="1" timestamp="2011-11-11T00:11:11Z">
<nd ref="20001"/>
<nd ref="20002"/>
<tag k="highway" v="yes"/>
<tag k="random" v="tag"/>
<tag k="source" v="filtered out"/>
<tag k="created_by" v="filtered out"/>
</way>
<way id="20301" version="1" timestamp="2011-11-11T00:11:11Z">
<nd ref="20001"/>
<nd ref="20002"/>
<nd ref="20003"/>
<nd ref="20004"/>
<nd ref="20001"/>
<tag k="poi" v="unicorn"/> <!-- only mapped for nodes -->
<tag k="random" v="tag"/>
</way>
<way id="20401" version="1" timestamp="2011-11-11T00:11:11Z">
<nd ref="20001"/>
<nd ref="20002"/>
<nd ref="20003"/>
<nd ref="20004"/>
<nd ref="20001"/>
<tag k="building" v="yes"/>
<tag k="random" v="tag"/>
</way>
<way id="20501" version="1" timestamp="2011-11-11T00:11:11Z">
<nd ref="20001"/>
<nd ref="20002"/>
<nd ref="20003"/>
<nd ref="20004"/>
<nd ref="20001"/>
<tag k="landuse" v="grass"/>
<tag k="highway" v="pedestrian"/>
<tag k="area" v="yes"/>
<tag k="random" v="tag"/>
</way>
<way id="20502" version="1" timestamp="2011-11-11T00:11:11Z">
<nd ref="20001"/>
<nd ref="20002"/>
<nd ref="20003"/>
<nd ref="20004"/>
<nd ref="20001"/>
<tag k="landuse" v="grass"/>
<tag k="highway" v="pedestrian"/>
<tag k="area" v="no"/>
<tag k="random" v="tag"/>
</way>
<way id="20601" version="1" timestamp="2011-11-11T00:11:11Z">
<nd ref="20001"/>
<nd ref="20002"/>
<nd ref="20003"/>
<nd ref="20004"/>
<nd ref="20001"/>
<tag k="landuse" v="grass"/>
<tag k="highway" v="pedestrian"/>
<tag k="random" v="tag"/>
</way>
</osm>

View File

@ -0,0 +1,46 @@
{
"tags": {
"load_all": true,
"exclude": [
"created_by",
"source"
]
},
"tables": {
"all": {
"fields": [
{
"type": "id",
"name": "osm_id",
"key": null
},
{
"type": "geometry",
"name": "geometry",
"key": null
},
{
"type": "hstore_tags",
"name": "tags",
"key": null
}
],
"type": "geometry",
"type_mappings": {
"points": {
"amenity": ["__any__"],
"poi": ["__any__"],
"shop": ["__any__"]
},
"linestrings": {
"highway": ["__any__"]
},
"polygons": {
"landuse": ["__any__"],
"building": ["__any__"],
"shop": ["__any__"]
}
}
}
}
}

130
test/single_table_test.py Normal file
View File

@ -0,0 +1,130 @@
import psycopg2
import psycopg2.extras
import helper as t
psycopg2.extras.register_hstore(psycopg2.connect(**t.db_conf), globally=True)
mapping_file = 'single_table_mapping.json'
#######################################################################
def test_import():
"""Import succeeds"""
t.drop_schemas()
assert not t.table_exists('osm_all', schema=t.TEST_SCHEMA_IMPORT)
t.imposm3_import(t.db_conf, './build/single_table.pbf', mapping_file)
assert t.table_exists('osm_all', schema=t.TEST_SCHEMA_IMPORT)
def test_deploy():
"""Deploy succeeds"""
assert not t.table_exists('osm_all', schema=t.TEST_SCHEMA_PRODUCTION)
t.imposm3_deploy(t.db_conf, mapping_file)
assert t.table_exists('osm_all', schema=t.TEST_SCHEMA_PRODUCTION)
assert not t.table_exists('osm_all', schema=t.TEST_SCHEMA_IMPORT)
#######################################################################
def test_non_mapped_node_is_missing():
"""Node without mapped tags is missing."""
t.assert_cached_node(10001, (10, 42))
assert not t.query_row(t.db_conf, 'osm_all', 10001)
def test_mapped_node():
"""Node is stored with all tags."""
t.assert_cached_node(10002, (11, 42))
poi = t.query_row(t.db_conf, 'osm_all', 10002)
assert poi['tags'] == {'random': 'tag', 'but': 'mapped', 'poi': 'unicorn'}
def test_non_mapped_way_is_missing():
"""Way without mapped tags is missing."""
t.assert_cached_way(20101)
assert not t.query_row(t.db_conf, 'osm_all', 20101)
t.assert_cached_way(20102)
assert not t.query_row(t.db_conf, 'osm_all', 20102)
t.assert_cached_way(20103)
assert not t.query_row(t.db_conf, 'osm_all', 20103)
def test_mapped_way():
"""Way is stored with all tags."""
t.assert_cached_way(20201)
highway = t.query_row(t.db_conf, 'osm_all', 20201)
assert highway['tags'] == {'random': 'tag', 'highway': 'yes'}
def test_non_mapped_closed_way_is_missing():
"""Closed way without mapped tags is missing."""
t.assert_cached_way(20301)
assert not t.query_row(t.db_conf, 'osm_all', 20301)
def test_mapped_closed_way():
"""Closed way is stored with all tags."""
t.assert_cached_way(20401)
building = t.query_row(t.db_conf, 'osm_all', 20401)
assert building['tags'] == {'random': 'tag', 'building': 'yes'}
def test_mapped_closed_way_area_yes():
"""Closed way with area=yes is not stored as linestring."""
t.assert_cached_way(20501)
elem = t.query_row(t.db_conf, 'osm_all', 20501)
assert elem['geometry'].type == 'Polygon', elem['geometry'].type
assert elem['tags'] == {'random': 'tag', 'landuse': 'grass', 'highway': 'pedestrian', 'area': 'yes'}
def test_mapped_closed_way_area_no():
"""Closed way with area=no is not stored as polygon."""
t.assert_cached_way(20502)
elem = t.query_row(t.db_conf, 'osm_all', 20502)
assert elem['geometry'].type == 'LineString', elem['geometry'].type
assert elem['tags'] == {'random': 'tag', 'landuse': 'grass', 'highway': 'pedestrian', 'area': 'no'}
def test_mapped_closed_way_without_area():
"""Closed way without area is stored as mapped (linestring and polygon)."""
t.assert_cached_way(20601)
elems = t.query_row(t.db_conf, 'osm_all', 20601)
assert len(elems) == 2
elems.sort(key=lambda x: x['geometry'].type)
assert elems[0]['geometry'].type == 'LineString', elem['geometry'].type
assert elems[0]['tags'] == {'random': 'tag', 'landuse': 'grass', 'highway': 'pedestrian'}
assert elems[1]['geometry'].type == 'Polygon', elem['geometry'].type
assert elems[1]['tags'] == {'random': 'tag', 'landuse': 'grass', 'highway': 'pedestrian'}
#######################################################################
def test_deploy_and_revert_deploy():
"""Revert deploy succeeds"""
assert not t.table_exists('osm_all', schema=t.TEST_SCHEMA_IMPORT)
assert t.table_exists('osm_all', schema=t.TEST_SCHEMA_PRODUCTION)
assert not t.table_exists('osm_all', schema=t.TEST_SCHEMA_BACKUP)
# import again to have a new import schema
t.imposm3_import(t.db_conf, './build/single_table.pbf', mapping_file)
assert t.table_exists('osm_all', schema=t.TEST_SCHEMA_IMPORT)
t.imposm3_deploy(t.db_conf, mapping_file)
assert not t.table_exists('osm_all', schema=t.TEST_SCHEMA_IMPORT)
assert t.table_exists('osm_all', schema=t.TEST_SCHEMA_PRODUCTION)
assert t.table_exists('osm_all', schema=t.TEST_SCHEMA_BACKUP)
t.imposm3_revert_deploy(t.db_conf, mapping_file)
assert t.table_exists('osm_all', schema=t.TEST_SCHEMA_IMPORT)
assert t.table_exists('osm_all', schema=t.TEST_SCHEMA_PRODUCTION)
assert not t.table_exists('osm_all', schema=t.TEST_SCHEMA_BACKUP)
def test_remove_backup():
"""Remove backup succeeds"""
assert t.table_exists('osm_all', schema=t.TEST_SCHEMA_IMPORT)
assert t.table_exists('osm_all', schema=t.TEST_SCHEMA_PRODUCTION)
assert not t.table_exists('osm_all', schema=t.TEST_SCHEMA_BACKUP)
t.imposm3_deploy(t.db_conf, mapping_file)
assert not t.table_exists('osm_all', schema=t.TEST_SCHEMA_IMPORT)
assert t.table_exists('osm_all', schema=t.TEST_SCHEMA_PRODUCTION)
assert t.table_exists('osm_all', schema=t.TEST_SCHEMA_BACKUP)
t.imposm3_remove_backups(t.db_conf, mapping_file)
assert not t.table_exists('osm_all', schema=t.TEST_SCHEMA_IMPORT)
assert t.table_exists('osm_all', schema=t.TEST_SCHEMA_PRODUCTION)
assert not t.table_exists('osm_all', schema=t.TEST_SCHEMA_BACKUP)

View File

@ -15,7 +15,7 @@ import (
type NodeWriter struct {
OsmElemWriter
nodes chan *element.Node
pointMatcher *mapping.TagMatcher
pointMatcher mapping.NodeMatcher
}
func NewNodeWriter(
@ -23,7 +23,7 @@ func NewNodeWriter(
nodes chan *element.Node,
inserter database.Inserter,
progress *stats.Statistics,
matcher *mapping.TagMatcher,
matcher mapping.NodeMatcher,
srid int,
) *OsmElemWriter {
nw := NodeWriter{
@ -48,7 +48,7 @@ func (nw *NodeWriter) loop() {
for n := range nw.nodes {
nw.progress.AddNodes(1)
if matches := nw.pointMatcher.Match(&n.Tags); len(matches) > 0 {
if matches := nw.pointMatcher.MatchNode(n); len(matches) > 0 {
proj.NodeToMerc(n)
if nw.expireor != nil {
nw.expireor.Expire(n.Long, n.Lat)

View File

@ -17,7 +17,7 @@ import (
type RelationWriter struct {
OsmElemWriter
rel chan *element.Relation
polygonMatcher *mapping.TagMatcher
polygonMatcher mapping.RelWayMatcher
}
func NewRelationWriter(
@ -26,7 +26,7 @@ func NewRelationWriter(
rel chan *element.Relation,
inserter database.Inserter,
progress *stats.Statistics,
matcher *mapping.TagMatcher,
matcher mapping.RelWayMatcher,
srid int,
) *OsmElemWriter {
rw := RelationWriter{
@ -89,7 +89,7 @@ NextRel:
}
// check for matches befor building the geometry
matches := rw.polygonMatcher.Match(&r.Tags)
matches := rw.polygonMatcher.MatchRelation(r)
if len(matches) == 0 {
continue NextRel
}
@ -140,7 +140,7 @@ NextRel:
}
}
for _, m := range mapping.SelectRelationPolygons(rw.polygonMatcher, r.Tags, r.Members) {
for _, m := range mapping.SelectRelationPolygons(rw.polygonMatcher, r) {
err = rw.osmCache.InsertedWays.PutWay(m.Way)
if err != nil {
log.Warn(err)

View File

@ -16,8 +16,8 @@ import (
type WayWriter struct {
OsmElemWriter
ways chan *element.Way
lineMatcher *mapping.TagMatcher
polygonMatcher *mapping.TagMatcher
lineMatcher mapping.WayMatcher
polygonMatcher mapping.WayMatcher
}
func NewWayWriter(
@ -26,8 +26,8 @@ func NewWayWriter(
ways chan *element.Way,
inserter database.Inserter,
progress *stats.Statistics,
polygonMatcher *mapping.TagMatcher,
lineMatcher *mapping.TagMatcher,
polygonMatcher mapping.WayMatcher,
lineMatcher mapping.WayMatcher,
srid int,
) *OsmElemWriter {
ww := WayWriter{
@ -69,7 +69,7 @@ func (ww *WayWriter) loop() {
proj.NodesToMerc(w.Nodes)
inserted := false
if matches := ww.lineMatcher.Match(&w.Tags); len(matches) > 0 {
if matches := ww.lineMatcher.MatchWay(w); len(matches) > 0 {
err := ww.buildAndInsert(geos, w, matches, false)
if err != nil {
if errl, ok := err.(ErrorLevel); !ok || errl.Level() > 0 {
@ -81,7 +81,7 @@ func (ww *WayWriter) loop() {
}
if w.IsClosed() && !insertedAsRelation {
// only add polygons that were not inserted as a MultiPolygon relation
if matches := ww.polygonMatcher.Match(&w.Tags); len(matches) > 0 {
if matches := ww.polygonMatcher.MatchWay(w); len(matches) > 0 {
err := ww.buildAndInsert(geos, w, matches, true)
if err != nil {
if errl, ok := err.(ErrorLevel); !ok || errl.Level() > 0 {