config: remove global option structs

master
Oliver Tonnhofer 2018-06-07 20:23:06 +02:00
parent 8c526b0543
commit 6054ef6a34
6 changed files with 201 additions and 187 deletions

View File

@ -46,25 +46,25 @@ func Main(usage func()) {
switch os.Args[1] { switch os.Args[1] {
case "import": case "import":
config.ParseImport(os.Args[2:]) opts := config.ParseImport(os.Args[2:])
if config.BaseOptions.Httpprofile != "" { if opts.Base.Httpprofile != "" {
stats.StartHttpPProf(config.BaseOptions.Httpprofile) stats.StartHttpPProf(opts.Base.Httpprofile)
} }
import_.Import() import_.Import(opts)
case "diff": case "diff":
config.ParseDiffImport(os.Args[2:]) opts, files := config.ParseDiffImport(os.Args[2:])
if config.BaseOptions.Httpprofile != "" { if opts.Httpprofile != "" {
stats.StartHttpPProf(config.BaseOptions.Httpprofile) stats.StartHttpPProf(opts.Httpprofile)
} }
update.Diff() update.Diff(opts, files)
case "run": case "run":
config.ParseRunImport(os.Args[2:]) opts := config.ParseRunImport(os.Args[2:])
if config.BaseOptions.Httpprofile != "" { if opts.Httpprofile != "" {
stats.StartHttpPProf(config.BaseOptions.Httpprofile) stats.StartHttpPProf(opts.Httpprofile)
} }
update.Run() update.Run(opts)
case "query-cache": case "query-cache":
query.Query(os.Args[2:]) query.Query(os.Args[2:])
case "version": case "version":

View File

@ -38,11 +38,7 @@ const defaultSchemaImport = "import"
const defaultSchemaProduction = "public" const defaultSchemaProduction = "public"
const defaultSchemaBackup = "backup" const defaultSchemaBackup = "backup"
var ImportFlags = flag.NewFlagSet("import", flag.ExitOnError) type Base struct {
var DiffFlags = flag.NewFlagSet("diff", flag.ExitOnError)
var RunFlags = flag.NewFlagSet("run", flag.ExitOnError)
type _BaseOptions struct {
Connection string Connection string
CacheDir string CacheDir string
DiffDir string DiffDir string
@ -61,7 +57,7 @@ type _BaseOptions struct {
DiffStateBefore time.Duration DiffStateBefore time.Duration
} }
func (o *_BaseOptions) updateFromConfig() error { func (o *Base) updateFromConfig() error {
conf := &Config{ conf := &Config{
CacheDir: defaultCacheDir, CacheDir: defaultCacheDir,
Srid: defaultSrid, Srid: defaultSrid,
@ -149,7 +145,7 @@ func (o *_BaseOptions) updateFromConfig() error {
return nil return nil
} }
func (o *_BaseOptions) check() []error { func (o *Base) check() []error {
errs := []error{} errs := []error{}
if o.Srid != 3857 && o.Srid != 4326 { if o.Srid != 3857 && o.Srid != 4326 {
errs = append(errs, errors.New("only -srid=3857 or -srid=4326 are supported")) errs = append(errs, errors.New("only -srid=3857 or -srid=4326 are supported"))
@ -160,7 +156,8 @@ func (o *_BaseOptions) check() []error {
return errs return errs
} }
type _ImportOptions struct { type Import struct {
Base Base
Overwritecache bool Overwritecache bool
Appendcache bool Appendcache bool
Read string Read string
@ -172,129 +169,137 @@ type _ImportOptions struct {
RemoveBackup bool RemoveBackup bool
} }
var BaseOptions = _BaseOptions{} func addBaseFlags(opts *Base, flags *flag.FlagSet) {
var ImportOptions = _ImportOptions{} flags.StringVar(&opts.Connection, "connection", "", "connection parameters")
flags.StringVar(&opts.CacheDir, "cachedir", defaultCacheDir, "cache directory")
func addBaseFlags(flags *flag.FlagSet) { flags.StringVar(&opts.DiffDir, "diffdir", "", "diff directory for last.state.txt")
flags.StringVar(&BaseOptions.Connection, "connection", "", "connection parameters") flags.StringVar(&opts.MappingFile, "mapping", "", "mapping file")
flags.StringVar(&BaseOptions.CacheDir, "cachedir", defaultCacheDir, "cache directory") flags.IntVar(&opts.Srid, "srid", defaultSrid, "srs id")
flags.StringVar(&BaseOptions.DiffDir, "diffdir", "", "diff directory for last.state.txt") flags.StringVar(&opts.LimitTo, "limitto", "", "limit to geometries")
flags.StringVar(&BaseOptions.MappingFile, "mapping", "", "mapping file") flags.Float64Var(&opts.LimitToCacheBuffer, "limittocachebuffer", 0.0, "limit to buffer for cache")
flags.IntVar(&BaseOptions.Srid, "srid", defaultSrid, "srs id") flags.StringVar(&opts.ConfigFile, "config", "", "config (json)")
flags.StringVar(&BaseOptions.LimitTo, "limitto", "", "limit to geometries") flags.StringVar(&opts.Httpprofile, "httpprofile", "", "bind address for profile server")
flags.Float64Var(&BaseOptions.LimitToCacheBuffer, "limittocachebuffer", 0.0, "limit to buffer for cache") flags.BoolVar(&opts.Quiet, "quiet", false, "quiet log output")
flags.StringVar(&BaseOptions.ConfigFile, "config", "", "config (json)") flags.StringVar(&opts.Schemas.Import, "dbschema-import", defaultSchemaImport, "db schema for imports")
flags.StringVar(&BaseOptions.Httpprofile, "httpprofile", "", "bind address for profile server") flags.StringVar(&opts.Schemas.Production, "dbschema-production", defaultSchemaProduction, "db schema for production")
flags.BoolVar(&BaseOptions.Quiet, "quiet", false, "quiet log output") flags.StringVar(&opts.Schemas.Backup, "dbschema-backup", defaultSchemaBackup, "db schema for backups")
flags.StringVar(&BaseOptions.Schemas.Import, "dbschema-import", defaultSchemaImport, "db schema for imports")
flags.StringVar(&BaseOptions.Schemas.Production, "dbschema-production", defaultSchemaProduction, "db schema for production")
flags.StringVar(&BaseOptions.Schemas.Backup, "dbschema-backup", defaultSchemaBackup, "db schema for backups")
} }
func UsageImport() { func ParseImport(args []string) Import {
fmt.Fprintf(os.Stderr, "Usage: %s %s [args]\n\n", os.Args[0], os.Args[1]) flags := flag.NewFlagSet("import", flag.ExitOnError)
ImportFlags.PrintDefaults() opts := Import{}
os.Exit(2)
}
func UsageDiff() { addBaseFlags(&opts.Base, flags)
fmt.Fprintf(os.Stderr, "Usage: %s %s [args] [.osc.gz, ...]\n\n", os.Args[0], os.Args[1]) flags.BoolVar(&opts.Overwritecache, "overwritecache", false, "overwritecache")
DiffFlags.PrintDefaults() flags.BoolVar(&opts.Appendcache, "appendcache", false, "append cache")
os.Exit(2) flags.StringVar(&opts.Read, "read", "", "read")
} flags.BoolVar(&opts.Write, "write", false, "write")
flags.BoolVar(&opts.Optimize, "optimize", false, "optimize")
flags.BoolVar(&opts.Diff, "diff", false, "enable diff support")
flags.BoolVar(&opts.DeployProduction, "deployproduction", false, "deploy production")
flags.BoolVar(&opts.RevertDeploy, "revertdeploy", false, "revert deploy to production")
flags.BoolVar(&opts.RemoveBackup, "removebackup", false, "remove backups from deploy")
flags.DurationVar(&opts.Base.DiffStateBefore, "diff-state-before", 0, "set initial diff sequence before")
func UsageRun() { flags.Usage = func() {
fmt.Fprintf(os.Stderr, "Usage: %s %s [args] [.osc.gz, ...]\n\n", os.Args[0], os.Args[1]) fmt.Fprintf(os.Stderr, "Usage: %s %s [args]\n\n", os.Args[0], os.Args[1])
DiffFlags.PrintDefaults() flags.PrintDefaults()
os.Exit(2) os.Exit(2)
} }
func init() {
ImportFlags.Usage = UsageImport
DiffFlags.Usage = UsageDiff
RunFlags.Usage = UsageRun
addBaseFlags(DiffFlags)
addBaseFlags(ImportFlags)
addBaseFlags(RunFlags)
ImportFlags.BoolVar(&ImportOptions.Overwritecache, "overwritecache", false, "overwritecache")
ImportFlags.BoolVar(&ImportOptions.Appendcache, "appendcache", false, "append cache")
ImportFlags.StringVar(&ImportOptions.Read, "read", "", "read")
ImportFlags.BoolVar(&ImportOptions.Write, "write", false, "write")
ImportFlags.BoolVar(&ImportOptions.Optimize, "optimize", false, "optimize")
ImportFlags.BoolVar(&ImportOptions.Diff, "diff", false, "enable diff support")
ImportFlags.BoolVar(&ImportOptions.DeployProduction, "deployproduction", false, "deploy production")
ImportFlags.BoolVar(&ImportOptions.RevertDeploy, "revertdeploy", false, "revert deploy to production")
ImportFlags.BoolVar(&ImportOptions.RemoveBackup, "removebackup", false, "remove backups from deploy")
ImportFlags.DurationVar(&BaseOptions.DiffStateBefore, "diff-state-before", 0, "set initial diff sequence before")
DiffFlags.StringVar(&BaseOptions.ExpireTilesDir, "expiretiles-dir", "", "write expire tiles into dir")
DiffFlags.IntVar(&BaseOptions.ExpireTilesZoom, "expiretiles-zoom", 14, "write expire tiles in this zoom level")
RunFlags.StringVar(&BaseOptions.ExpireTilesDir, "expiretiles-dir", "", "write expire tiles into dir")
RunFlags.IntVar(&BaseOptions.ExpireTilesZoom, "expiretiles-zoom", 14, "write expire tiles in this zoom level")
RunFlags.DurationVar(&BaseOptions.ReplicationInterval, "replication-interval", time.Minute, "replication interval as duration (1m, 1h, 24h)")
}
func ParseImport(args []string) {
if len(args) == 0 { if len(args) == 0 {
UsageImport() flags.Usage()
} }
err := ImportFlags.Parse(args)
err := flags.Parse(args)
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
} }
err = BaseOptions.updateFromConfig() err = opts.Base.updateFromConfig()
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
} }
errs := BaseOptions.check() errs := opts.Base.check()
if len(errs) != 0 { if len(errs) != 0 {
reportErrors(errs) reportErrors(errs)
UsageImport() flags.Usage()
} }
return opts
} }
func ParseDiffImport(args []string) { func ParseDiffImport(args []string) (Base, []string) {
flags := flag.NewFlagSet("diff", flag.ExitOnError)
opts := Base{}
addBaseFlags(&opts, flags)
flags.StringVar(&opts.ExpireTilesDir, "expiretiles-dir", "", "write expire tiles into dir")
flags.IntVar(&opts.ExpireTilesZoom, "expiretiles-zoom", 14, "write expire tiles in this zoom level")
flags.Usage = func() {
fmt.Fprintf(os.Stderr, "Usage: %s %s [args] [.osc.gz, ...]\n\n", os.Args[0], os.Args[1])
flags.PrintDefaults()
os.Exit(2)
}
if len(args) == 0 { if len(args) == 0 {
UsageDiff() flags.Usage()
} }
err := DiffFlags.Parse(args)
err := flags.Parse(args)
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
} }
err = BaseOptions.updateFromConfig() err = opts.updateFromConfig()
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
} }
errs := BaseOptions.check() errs := opts.check()
if len(errs) != 0 { if len(errs) != 0 {
reportErrors(errs) reportErrors(errs)
UsageDiff() flags.Usage()
} }
return opts, flags.Args()
} }
func ParseRunImport(args []string) { func ParseRunImport(args []string) Base {
flags := flag.NewFlagSet("run", flag.ExitOnError)
opts := Base{}
addBaseFlags(&opts, flags)
flags.StringVar(&opts.ExpireTilesDir, "expiretiles-dir", "", "write expire tiles into dir")
flags.IntVar(&opts.ExpireTilesZoom, "expiretiles-zoom", 14, "write expire tiles in this zoom level")
flags.DurationVar(&opts.ReplicationInterval, "replication-interval", time.Minute, "replication interval as duration (1m, 1h, 24h)")
flags.Usage = func() {
fmt.Fprintf(os.Stderr, "Usage: %s %s [args] [.osc.gz, ...]\n\n", os.Args[0], os.Args[1])
flags.PrintDefaults()
os.Exit(2)
}
if len(args) == 0 { if len(args) == 0 {
UsageRun() flags.Usage()
} }
err := RunFlags.Parse(args)
err := flags.Parse(args)
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
} }
err = BaseOptions.updateFromConfig() err = opts.updateFromConfig()
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
} }
errs := BaseOptions.check() errs := opts.check()
if len(errs) != 0 { if len(errs) != 0 {
reportErrors(errs) reportErrors(errs)
UsageRun() flags.Usage()
} }
return opts
} }
func reportErrors(errs []error) { func reportErrors(errs []error) {

View File

@ -21,27 +21,28 @@ import (
var log = logging.NewLogger("") var log = logging.NewLogger("")
func Import() { func Import(importOpts config.Import) {
if config.BaseOptions.Quiet { baseOpts := importOpts.Base
if baseOpts.Quiet {
logging.SetQuiet(true) logging.SetQuiet(true)
} }
if (config.ImportOptions.Write || config.ImportOptions.Read != "") && (config.ImportOptions.RevertDeploy || config.ImportOptions.RemoveBackup) { if (importOpts.Write || importOpts.Read != "") && (importOpts.RevertDeploy || importOpts.RemoveBackup) {
log.Fatal("-revertdeploy and -removebackup not compatible with -read/-write") log.Fatal("-revertdeploy and -removebackup not compatible with -read/-write")
} }
if config.ImportOptions.RevertDeploy && (config.ImportOptions.RemoveBackup || config.ImportOptions.DeployProduction) { if importOpts.RevertDeploy && (importOpts.RemoveBackup || importOpts.DeployProduction) {
log.Fatal("-revertdeploy not compatible with -deployproduction/-removebackup") log.Fatal("-revertdeploy not compatible with -deployproduction/-removebackup")
} }
var geometryLimiter *limit.Limiter var geometryLimiter *limit.Limiter
if (config.ImportOptions.Write || config.ImportOptions.Read != "") && config.BaseOptions.LimitTo != "" { if (importOpts.Write || importOpts.Read != "") && baseOpts.LimitTo != "" {
var err error var err error
step := log.StartStep("Reading limitto geometries") step := log.StartStep("Reading limitto geometries")
geometryLimiter, err = limit.NewFromGeoJSON( geometryLimiter, err = limit.NewFromGeoJSON(
config.BaseOptions.LimitTo, baseOpts.LimitTo,
config.BaseOptions.LimitToCacheBuffer, baseOpts.LimitToCacheBuffer,
config.BaseOptions.Srid, baseOpts.Srid,
) )
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
@ -49,23 +50,23 @@ func Import() {
log.StopStep(step) log.StopStep(step)
} }
tagmapping, err := mapping.FromFile(config.BaseOptions.MappingFile) tagmapping, err := mapping.FromFile(baseOpts.MappingFile)
if err != nil { if err != nil {
log.Fatal("error in mapping file: ", err) log.Fatal("error in mapping file: ", err)
} }
var db database.DB var db database.DB
if config.ImportOptions.Write || config.ImportOptions.DeployProduction || config.ImportOptions.RevertDeploy || config.ImportOptions.RemoveBackup || config.ImportOptions.Optimize { if importOpts.Write || importOpts.DeployProduction || importOpts.RevertDeploy || importOpts.RemoveBackup || importOpts.Optimize {
if config.BaseOptions.Connection == "" { if baseOpts.Connection == "" {
log.Fatal("missing connection option") log.Fatal("missing connection option")
} }
conf := database.Config{ conf := database.Config{
ConnectionParams: config.BaseOptions.Connection, ConnectionParams: baseOpts.Connection,
Srid: config.BaseOptions.Srid, Srid: baseOpts.Srid,
ImportSchema: config.BaseOptions.Schemas.Import, ImportSchema: baseOpts.Schemas.Import,
ProductionSchema: config.BaseOptions.Schemas.Production, ProductionSchema: baseOpts.Schemas.Production,
BackupSchema: config.BaseOptions.Schemas.Backup, BackupSchema: baseOpts.Schemas.Backup,
} }
db, err = database.Open(conf, &tagmapping.Conf) db, err = database.Open(conf, &tagmapping.Conf)
if err != nil { if err != nil {
@ -74,16 +75,16 @@ func Import() {
defer db.Close() defer db.Close()
} }
osmCache := cache.NewOSMCache(config.BaseOptions.CacheDir) osmCache := cache.NewOSMCache(baseOpts.CacheDir)
if config.ImportOptions.Read != "" && osmCache.Exists() { if importOpts.Read != "" && osmCache.Exists() {
if config.ImportOptions.Overwritecache { if importOpts.Overwritecache {
log.Printf("removing existing cache %s", config.BaseOptions.CacheDir) log.Printf("removing existing cache %s", baseOpts.CacheDir)
err := osmCache.Remove() err := osmCache.Remove()
if err != nil { if err != nil {
log.Fatal("unable to remove cache:", err) log.Fatal("unable to remove cache:", err)
} }
} else if !config.ImportOptions.Appendcache { } else if !importOpts.Appendcache {
log.Fatal("cache already exists use -appendcache or -overwritecache") log.Fatal("cache already exists use -appendcache or -overwritecache")
} }
} }
@ -92,7 +93,7 @@ func Import() {
var elementCounts *stats.ElementCounts var elementCounts *stats.ElementCounts
if config.ImportOptions.Read != "" { if importOpts.Read != "" {
step := log.StartStep("Reading OSM data") step := log.StartStep("Reading OSM data")
err = osmCache.Open() err = osmCache.Open()
if err != nil { if err != nil {
@ -100,17 +101,17 @@ func Import() {
} }
progress := stats.NewStatsReporter() progress := stats.NewStatsReporter()
if !config.ImportOptions.Appendcache { if !importOpts.Appendcache {
// enable optimization if we don't append to existing cache // enable optimization if we don't append to existing cache
osmCache.Coords.SetLinearImport(true) osmCache.Coords.SetLinearImport(true)
} }
readLimiter := geometryLimiter readLimiter := geometryLimiter
if config.BaseOptions.LimitToCacheBuffer == 0.0 { if baseOpts.LimitToCacheBuffer == 0.0 {
readLimiter = nil readLimiter = nil
} }
err := reader.ReadPbf(config.ImportOptions.Read, err := reader.ReadPbf(importOpts.Read,
osmCache, osmCache,
progress, progress,
tagmapping, tagmapping,
@ -124,13 +125,13 @@ func Import() {
elementCounts = progress.Stop() elementCounts = progress.Stop()
osmCache.Close() osmCache.Close()
log.StopStep(step) log.StopStep(step)
if config.ImportOptions.Diff { if importOpts.Diff {
diffstate, err := state.FromPbf(config.ImportOptions.Read, config.BaseOptions.DiffStateBefore, config.BaseOptions.ReplicationUrl, config.BaseOptions.ReplicationInterval) diffstate, err := state.FromPbf(importOpts.Read, baseOpts.DiffStateBefore, baseOpts.ReplicationUrl, baseOpts.ReplicationInterval)
if err != nil { if err != nil {
log.Print("error parsing diff state form PBF", err) log.Print("error parsing diff state form PBF", err)
} else if diffstate != nil { } else if diffstate != nil {
os.MkdirAll(config.BaseOptions.DiffDir, 0755) os.MkdirAll(baseOpts.DiffDir, 0755)
err := state.WriteLastState(config.BaseOptions.DiffDir, diffstate) err := state.WriteLastState(baseOpts.DiffDir, diffstate)
if err != nil { if err != nil {
log.Print("error writing last.state.txt: ", err) log.Print("error writing last.state.txt: ", err)
} }
@ -138,7 +139,7 @@ func Import() {
} }
} }
if config.ImportOptions.Write { if importOpts.Write {
stepImport := log.StartStep("Importing OSM data") stepImport := log.StartStep("Importing OSM data")
stepWrite := log.StartStep("Writing OSM data") stepWrite := log.StartStep("Writing OSM data")
progress := stats.NewStatsReporterWithEstimate(elementCounts) progress := stats.NewStatsReporterWithEstimate(elementCounts)
@ -159,8 +160,8 @@ func Import() {
} }
var diffCache *cache.DiffCache var diffCache *cache.DiffCache
if config.ImportOptions.Diff { if importOpts.Diff {
diffCache = cache.NewDiffCache(config.BaseOptions.CacheDir) diffCache = cache.NewDiffCache(baseOpts.CacheDir)
if err = diffCache.Remove(); err != nil { if err = diffCache.Remove(); err != nil {
log.Fatal(err) log.Fatal(err)
} }
@ -187,7 +188,7 @@ func Import() {
tagmapping.PolygonMatcher, tagmapping.PolygonMatcher,
tagmapping.RelationMatcher, tagmapping.RelationMatcher,
tagmapping.RelationMemberMatcher, tagmapping.RelationMemberMatcher,
config.BaseOptions.Srid) baseOpts.Srid)
relWriter.SetLimiter(geometryLimiter) relWriter.SetLimiter(geometryLimiter)
relWriter.EnableConcurrent() relWriter.EnableConcurrent()
relWriter.Start() relWriter.Start()
@ -200,7 +201,7 @@ func Import() {
ways, db, ways, db,
progress, progress,
tagmapping.PolygonMatcher, tagmapping.LineStringMatcher, tagmapping.PolygonMatcher, tagmapping.LineStringMatcher,
config.BaseOptions.Srid) baseOpts.Srid)
wayWriter.SetLimiter(geometryLimiter) wayWriter.SetLimiter(geometryLimiter)
wayWriter.EnableConcurrent() wayWriter.EnableConcurrent()
wayWriter.Start() wayWriter.Start()
@ -211,7 +212,7 @@ func Import() {
nodeWriter := writer.NewNodeWriter(osmCache, nodes, db, nodeWriter := writer.NewNodeWriter(osmCache, nodes, db,
progress, progress,
tagmapping.PointMatcher, tagmapping.PointMatcher,
config.BaseOptions.Srid) baseOpts.Srid)
nodeWriter.SetLimiter(geometryLimiter) nodeWriter.SetLimiter(geometryLimiter)
nodeWriter.EnableConcurrent() nodeWriter.EnableConcurrent()
nodeWriter.Start() nodeWriter.Start()
@ -225,7 +226,7 @@ func Import() {
progress.Stop() progress.Stop()
if config.ImportOptions.Diff { if importOpts.Diff {
diffCache.Close() diffCache.Close()
} }
@ -249,7 +250,7 @@ func Import() {
log.StopStep(stepImport) log.StopStep(stepImport)
} }
if config.ImportOptions.Optimize { if importOpts.Optimize {
if db, ok := db.(database.Optimizer); ok { if db, ok := db.(database.Optimizer); ok {
if err := db.Optimize(); err != nil { if err := db.Optimize(); err != nil {
log.Fatal(err) log.Fatal(err)
@ -259,7 +260,7 @@ func Import() {
} }
} }
if config.ImportOptions.DeployProduction { if importOpts.DeployProduction {
if db, ok := db.(database.Deployer); ok { if db, ok := db.(database.Deployer); ok {
if err := db.Deploy(); err != nil { if err := db.Deploy(); err != nil {
log.Fatal(err) log.Fatal(err)
@ -269,7 +270,7 @@ func Import() {
} }
} }
if config.ImportOptions.RevertDeploy { if importOpts.RevertDeploy {
if db, ok := db.(database.Deployer); ok { if db, ok := db.(database.Deployer); ok {
if err := db.RevertDeploy(); err != nil { if err := db.RevertDeploy(); err != nil {
log.Fatal(err) log.Fatal(err)
@ -279,7 +280,7 @@ func Import() {
} }
} }
if config.ImportOptions.RemoveBackup { if importOpts.RemoveBackup {
if db, ok := db.(database.Deployer); ok { if db, ok := db.(database.Deployer); ok {
if err := db.RemoveBackup(); err != nil { if err := db.RemoveBackup(); err != nil {
log.Fatal(err) log.Fatal(err)

View File

@ -61,8 +61,8 @@ func (s *importTestSuite) importOsm(t *testing.T) {
"-removebackup=false", "-removebackup=false",
} }
config.ParseImport(importArgs) opts := config.ParseImport(importArgs)
import_.Import() import_.Import(opts)
} }
func (s *importTestSuite) deployOsm(t *testing.T) { func (s *importTestSuite) deployOsm(t *testing.T) {
@ -82,8 +82,8 @@ func (s *importTestSuite) deployOsm(t *testing.T) {
"-quiet", "-quiet",
} }
config.ParseImport(importArgs) opts := config.ParseImport(importArgs)
import_.Import() import_.Import(opts)
} }
func (s *importTestSuite) revertDeployOsm(t *testing.T) { func (s *importTestSuite) revertDeployOsm(t *testing.T) {
@ -105,8 +105,8 @@ func (s *importTestSuite) revertDeployOsm(t *testing.T) {
"-quiet", "-quiet",
} }
config.ParseImport(importArgs) opts := config.ParseImport(importArgs)
import_.Import() import_.Import(opts)
} }
func (s *importTestSuite) cache(t *testing.T) *cache.OSMCache { func (s *importTestSuite) cache(t *testing.T) *cache.OSMCache {
@ -141,8 +141,8 @@ func (s *importTestSuite) removeBackupOsm(t *testing.T) {
"-quiet", "-quiet",
} }
config.ParseImport(importArgs) opts := config.ParseImport(importArgs)
import_.Import() import_.Import(opts)
} }
func (s *importTestSuite) updateOsm(t *testing.T, diffFile string) { func (s *importTestSuite) updateOsm(t *testing.T, diffFile string) {
@ -157,8 +157,8 @@ func (s *importTestSuite) updateOsm(t *testing.T, diffFile string) {
args = append(args, "-expiretiles-dir", s.config.expireTileDir) args = append(args, "-expiretiles-dir", s.config.expireTileDir)
} }
args = append(args, diffFile) args = append(args, diffFile)
config.ParseDiffImport(args) opts, files := config.ParseDiffImport(args)
update.Diff() update.Diff(opts, files)
} }
func (s *importTestSuite) dropSchemas() { func (s *importTestSuite) dropSchemas() {

View File

@ -25,33 +25,33 @@ import (
var log = logging.NewLogger("diff") var log = logging.NewLogger("diff")
func Diff() { func Diff(baseOpts config.Base, files []string) {
if config.BaseOptions.Quiet { if baseOpts.Quiet {
logging.SetQuiet(true) logging.SetQuiet(true)
} }
var geometryLimiter *limit.Limiter var geometryLimiter *limit.Limiter
if config.BaseOptions.LimitTo != "" { if baseOpts.LimitTo != "" {
var err error var err error
step := log.StartStep("Reading limitto geometries") step := log.StartStep("Reading limitto geometries")
geometryLimiter, err = limit.NewFromGeoJSON( geometryLimiter, err = limit.NewFromGeoJSON(
config.BaseOptions.LimitTo, baseOpts.LimitTo,
config.BaseOptions.LimitToCacheBuffer, baseOpts.LimitToCacheBuffer,
config.BaseOptions.Srid, baseOpts.Srid,
) )
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
} }
log.StopStep(step) log.StopStep(step)
} }
osmCache := cache.NewOSMCache(config.BaseOptions.CacheDir) osmCache := cache.NewOSMCache(baseOpts.CacheDir)
err := osmCache.Open() err := osmCache.Open()
if err != nil { if err != nil {
log.Fatal("osm cache: ", err) log.Fatal("osm cache: ", err)
} }
defer osmCache.Close() defer osmCache.Close()
diffCache := cache.NewDiffCache(config.BaseOptions.CacheDir) diffCache := cache.NewDiffCache(baseOpts.CacheDir)
err = diffCache.Open() err = diffCache.Open()
if err != nil { if err != nil {
log.Fatal("diff cache: ", err) log.Fatal("diff cache: ", err)
@ -59,8 +59,8 @@ func Diff() {
var exp expire.Expireor var exp expire.Expireor
if config.BaseOptions.ExpireTilesDir != "" { if baseOpts.ExpireTilesDir != "" {
tileexpire := expire.NewTileList(config.BaseOptions.ExpireTilesZoom, config.BaseOptions.ExpireTilesDir) tileexpire := expire.NewTileList(baseOpts.ExpireTilesZoom, baseOpts.ExpireTilesDir)
exp = tileexpire exp = tileexpire
defer func() { defer func() {
if err := tileexpire.Flush(); err != nil { if err := tileexpire.Flush(); err != nil {
@ -69,8 +69,8 @@ func Diff() {
}() }()
} }
for _, oscFile := range config.DiffFlags.Args() { for _, oscFile := range files {
err := Update(oscFile, geometryLimiter, exp, osmCache, diffCache, false) err := Update(baseOpts, oscFile, geometryLimiter, exp, osmCache, diffCache, false)
if err != nil { if err != nil {
osmCache.Close() osmCache.Close()
diffCache.Close() diffCache.Close()
@ -82,12 +82,20 @@ func Diff() {
diffCache.Close() diffCache.Close()
} }
func Update(oscFile string, geometryLimiter *limit.Limiter, expireor expire.Expireor, osmCache *cache.OSMCache, diffCache *cache.DiffCache, force bool) error { func Update(
baseOpts config.Base,
oscFile string,
geometryLimiter *limit.Limiter,
expireor expire.Expireor,
osmCache *cache.OSMCache,
diffCache *cache.DiffCache,
force bool,
) error {
state, err := diffstate.FromOscGz(oscFile) state, err := diffstate.FromOscGz(oscFile)
if err != nil { if err != nil {
return err return err
} }
lastState, err := diffstate.ParseLastState(config.BaseOptions.DiffDir) lastState, err := diffstate.ParseLastState(baseOpts.DiffDir)
if err != nil { if err != nil {
log.Warn(err) log.Warn(err)
} }
@ -106,18 +114,18 @@ func Update(oscFile string, geometryLimiter *limit.Limiter, expireor expire.Expi
return err return err
} }
tagmapping, err := mapping.FromFile(config.BaseOptions.MappingFile) tagmapping, err := mapping.FromFile(baseOpts.MappingFile)
if err != nil { if err != nil {
return err return err
} }
dbConf := database.Config{ dbConf := database.Config{
ConnectionParams: config.BaseOptions.Connection, ConnectionParams: baseOpts.Connection,
Srid: config.BaseOptions.Srid, Srid: baseOpts.Srid,
// we apply diff imports on the Production schema // we apply diff imports on the Production schema
ImportSchema: config.BaseOptions.Schemas.Production, ImportSchema: baseOpts.Schemas.Production,
ProductionSchema: config.BaseOptions.Schemas.Production, ProductionSchema: baseOpts.Schemas.Production,
BackupSchema: config.BaseOptions.Schemas.Backup, BackupSchema: baseOpts.Schemas.Backup,
} }
db, err := database.Open(dbConf, &tagmapping.Conf) db, err := database.Open(dbConf, &tagmapping.Conf)
if err != nil { if err != nil {
@ -170,7 +178,7 @@ func Update(oscFile string, geometryLimiter *limit.Limiter, expireor expire.Expi
tagmapping.PolygonMatcher, tagmapping.PolygonMatcher,
tagmapping.RelationMatcher, tagmapping.RelationMatcher,
tagmapping.RelationMemberMatcher, tagmapping.RelationMemberMatcher,
config.BaseOptions.Srid) baseOpts.Srid)
relWriter.SetLimiter(geometryLimiter) relWriter.SetLimiter(geometryLimiter)
relWriter.SetExpireor(expireor) relWriter.SetExpireor(expireor)
relWriter.Start() relWriter.Start()
@ -181,7 +189,7 @@ func Update(oscFile string, geometryLimiter *limit.Limiter, expireor expire.Expi
progress, progress,
tagmapping.PolygonMatcher, tagmapping.PolygonMatcher,
tagmapping.LineStringMatcher, tagmapping.LineStringMatcher,
config.BaseOptions.Srid) baseOpts.Srid)
wayWriter.SetLimiter(geometryLimiter) wayWriter.SetLimiter(geometryLimiter)
wayWriter.SetExpireor(expireor) wayWriter.SetExpireor(expireor)
wayWriter.Start() wayWriter.Start()
@ -189,7 +197,7 @@ func Update(oscFile string, geometryLimiter *limit.Limiter, expireor expire.Expi
nodeWriter := writer.NewNodeWriter(osmCache, nodes, db, nodeWriter := writer.NewNodeWriter(osmCache, nodes, db,
progress, progress,
tagmapping.PointMatcher, tagmapping.PointMatcher,
config.BaseOptions.Srid) baseOpts.Srid)
nodeWriter.SetLimiter(geometryLimiter) nodeWriter.SetLimiter(geometryLimiter)
nodeWriter.SetExpireor(expireor) nodeWriter.SetExpireor(expireor)
nodeWriter.Start() nodeWriter.Start()
@ -413,7 +421,7 @@ func Update(oscFile string, geometryLimiter *limit.Limiter, expireor expire.Expi
if lastState != nil { if lastState != nil {
state.Url = lastState.Url state.Url = lastState.Url
} }
err = diffstate.WriteLastState(config.BaseOptions.DiffDir, state) err = diffstate.WriteLastState(baseOpts.DiffDir, state)
if err != nil { if err != nil {
log.Warn(err) // warn only log.Warn(err) // warn only
} }

View File

@ -18,19 +18,19 @@ import (
var logger = logging.NewLogger("") var logger = logging.NewLogger("")
func Run() { func Run(baseOpts config.Base) {
if config.BaseOptions.Quiet { if baseOpts.Quiet {
logging.SetQuiet(true) logging.SetQuiet(true)
} }
var geometryLimiter *limit.Limiter var geometryLimiter *limit.Limiter
if config.BaseOptions.LimitTo != "" { if baseOpts.LimitTo != "" {
var err error var err error
step := logger.StartStep("Reading limitto geometries") step := logger.StartStep("Reading limitto geometries")
geometryLimiter, err = limit.NewFromGeoJSON( geometryLimiter, err = limit.NewFromGeoJSON(
config.BaseOptions.LimitTo, baseOpts.LimitTo,
config.BaseOptions.LimitToCacheBuffer, baseOpts.LimitToCacheBuffer,
config.BaseOptions.Srid, baseOpts.Srid,
) )
if err != nil { if err != nil {
logger.Fatal(err) logger.Fatal(err)
@ -38,11 +38,11 @@ func Run() {
logger.StopStep(step) logger.StopStep(step)
} }
s, err := state.ParseLastState(config.BaseOptions.DiffDir) s, err := state.ParseLastState(baseOpts.DiffDir)
if err != nil { if err != nil {
log.Fatal("unable to read last.state.txt", err) log.Fatal("unable to read last.state.txt", err)
} }
replicationUrl := config.BaseOptions.ReplicationUrl replicationUrl := baseOpts.ReplicationUrl
if replicationUrl == "" { if replicationUrl == "" {
replicationUrl = s.Url replicationUrl = s.Url
} }
@ -51,24 +51,24 @@ func Run() {
"or replication_url in -config file") "or replication_url in -config file")
} }
logger.Print("Replication URL: " + replicationUrl) logger.Print("Replication URL: " + replicationUrl)
logger.Print("Replication interval: ", config.BaseOptions.ReplicationInterval) logger.Print("Replication interval: ", baseOpts.ReplicationInterval)
downloader := replication.NewDiffDownloader( downloader := replication.NewDiffDownloader(
config.BaseOptions.DiffDir, baseOpts.DiffDir,
replicationUrl, replicationUrl,
s.Sequence, s.Sequence,
config.BaseOptions.ReplicationInterval, baseOpts.ReplicationInterval,
) )
nextSeq := downloader.Sequences() nextSeq := downloader.Sequences()
osmCache := cache.NewOSMCache(config.BaseOptions.CacheDir) osmCache := cache.NewOSMCache(baseOpts.CacheDir)
err = osmCache.Open() err = osmCache.Open()
if err != nil { if err != nil {
logger.Fatal("osm cache: ", err) logger.Fatal("osm cache: ", err)
} }
defer osmCache.Close() defer osmCache.Close()
diffCache := cache.NewDiffCache(config.BaseOptions.CacheDir) diffCache := cache.NewDiffCache(baseOpts.CacheDir)
err = diffCache.Open() err = diffCache.Open()
if err != nil { if err != nil {
logger.Fatal("diff cache: ", err) logger.Fatal("diff cache: ", err)
@ -81,8 +81,8 @@ func Run() {
var tilelist *expire.TileList var tilelist *expire.TileList
var lastTlFlush = time.Now() var lastTlFlush = time.Now()
var tileExpireor expire.Expireor var tileExpireor expire.Expireor
if config.BaseOptions.ExpireTilesDir != "" { if baseOpts.ExpireTilesDir != "" {
tilelist = expire.NewTileList(config.BaseOptions.ExpireTilesZoom, config.BaseOptions.ExpireTilesDir) tilelist = expire.NewTileList(baseOpts.ExpireTilesZoom, baseOpts.ExpireTilesDir)
tileExpireor = tilelist tileExpireor = tilelist
} }
@ -113,7 +113,7 @@ func Run() {
for { for {
p := logger.StartStep(fmt.Sprintf("importing #%d till %s", seqId, seqTime)) p := logger.StartStep(fmt.Sprintf("importing #%d till %s", seqId, seqTime))
err := Update(fname, geometryLimiter, tileExpireor, osmCache, diffCache, false) err := Update(baseOpts, fname, geometryLimiter, tileExpireor, osmCache, diffCache, false)
osmCache.Coords.Flush() osmCache.Coords.Flush()
diffCache.Flush() diffCache.Flush()