diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..afd75ba --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +import/ +.swp diff --git a/Dockerfile.pskreporter_exporter b/Dockerfile.pskreporter_exporter index f2623ac..d0eca41 100644 --- a/Dockerfile.pskreporter_exporter +++ b/Dockerfile.pskreporter_exporter @@ -1,8 +1,8 @@ from golang:1.15.0 -RUN mkdir /pskreporter_exporter -ADD . /pskreporter_exporter -WORKDIR /pskreporter_exporter/cmd/pskreporter_exporter +RUN mkdir /pskreporter-exporter +ADD . /pskreporter-exporter +WORKDIR /pskreporter-exporter/cmd/pskreporter-exporter RUN go build -CMD ["/pskreporter_exporter/cmd/pskreporter_exporter/pskreporter_exporter"] +CMD ["/pskreporter-exporter/cmd/pskreporter-exporter/pskreporter-exporter"] diff --git a/Dockerfile.wsjtx_exporter b/Dockerfile.wsjtx_exporter index 2e9204f..8d7fb70 100644 --- a/Dockerfile.wsjtx_exporter +++ b/Dockerfile.wsjtx_exporter @@ -1,8 +1,8 @@ from golang:1.15.0 -RUN mkdir /wsjtx_exporter -ADD . /wsjtx_exporter -WORKDIR /wsjtx_exporter/cmd/wsjtx_exporter +RUN mkdir /wsjtx-exporter +ADD . /wsjtx-exporter +WORKDIR /wsjtx-exporter/cmd/wsjtx-exporter RUN go build -CMD ["/wsjtx_exporter/cmd/wsjtx_exporter/wsjtx_exporter"] +CMD ["/wsjtx-exporter/cmd/wsjtx-exporter/wsjtx-exporter"] diff --git a/Readme.md b/Readme.md index 2ad0f3e..19915ec 100644 --- a/Readme.md +++ b/Readme.md @@ -1,6 +1,6 @@ # what is it? -FIXME SCREENSHOT!!!!!!!!!!!!!!!!!!!!!!!!! +![alt text](screenshot.png "Logo Title Text 1") * a set of tools to export your personal WSJT-X * live reception data into prometheus or mysql @@ -34,10 +34,10 @@ have fun! ## tooling overview -* **pskreporter_exporter** +* **pskreporter-exporter** * polls pskreporter.info for your callsign * supports prometheus and mysql -* **wsjtx_exporter** +* **wsjtx-exporter** * follows live traffice in ALL.txt * supports prometheus and mysql * **alltxt2csv** @@ -72,7 +72,7 @@ show pro/con overview: both allow distributed setups with multiple wsjtx instances submitting their data to a central prometheus or mysql service. you can as well run both in parallel and use prometheus for a live overview and mysql for historical evaluations. -### pskreporter_exporter vs other access/polling of pskreporter like GridTracker +### pskreporter-exporter vs other access/polling of pskreporter like GridTracker ### can it read my whole ALL.txt since from the beginning? @@ -109,11 +109,11 @@ choose a bigger interval ### how long does it take to import my data into mysql? -* my ALL.TXT (new format start july 2019) contains ~ 13.7 mio lines and has ~ 850M. -* converting to csv takes ~ 40min on i7-4750HQ (2015) and the result has ~ 1.2G. -* currently this uses only one core, so there is a lot of room for optimization. -* importing the csv to mysql takes ~ 3.5min. -* querying the whole time (~ 1.5 years) in grafana takes some seconds. +* my ALL.TXT (new format start july 2019) contains ~ 13.7 mio lines and has ~ 850M +* converting to csv takes ~ 14min on i7-4750HQ (2015) and the result has ~ 1.2G + * currently this is done using another module which uses regular expressions which is not optimial for this use case +* importing the csv to mysql takes ~ 3.5min +* querying the whole time (~ 1.5 years) in grafana takes some seconds ### does this need a lot of ressource on my machine? @@ -133,8 +133,8 @@ go get github.com/denzs/wsjtx_dashboards build docker containers: ``` -docker build Dockerfile.wsjtx_exporter . -docker build Dockerfile.pskreporter_exporter . +docker build Dockerfile.wsjtx-exporter . +docker build Dockerfile.pskreporter-exporter . ``` ### to be done... diff --git a/cmd/alltxt2csv/main.go b/cmd/alltxt2csv/main.go index 0b5a9a8..334f2bd 100644 --- a/cmd/alltxt2csv/main.go +++ b/cmd/alltxt2csv/main.go @@ -1,17 +1,18 @@ package main import ( - "fmt" - "github.com/jnovack/flag" - log "github.com/sirupsen/logrus" + "fmt" + "github.com/jnovack/flag" + log "github.com/sirupsen/logrus" // "strings" // "strconv" // "time" - "os" - "bufio" + "os" + "bufio" + "runtime" // "github.com/mmcloughlin/geohash" // "github.com/tzneal/ham-go/dxcc" - "github.com/denzs/wsjtx_dashboards/shared/wsjtx" + "github.com/denzs/wsjtx_dashboards/shared/wsjtx" ) var station string @@ -49,6 +50,48 @@ func init() { } } +func eatline(lines chan string, results chan wsjtx.Result) { + for { + select { + case line := <- lines : + result, parsed := wsjtx.ScanLine(line) + if parsed { + results <- result + } + } + } + return +} + +func eatfile(results chan wsjtx.Result) { + log.Info("starting eating file, please wait..") + + filein, err := os.Open(pathin) + if err != nil { + log.Fatal(err) + } + scanner := bufio.NewScanner(filein) + + lines := make(chan string,runtime.NumCPU()) + + for w := 0; w <= runtime.NumCPU(); w++ { + go eatline(lines, results) + } + + i := 0 + for scanner.Scan() { + i++ + if i % 1000000 == 0 { + log.Infof("%d lines parsed..", i) + } + lines <- scanner.Text() + } + + filein.Close() + log.Info("done.. eatfile") + return +} + func main(){ _ , err := os.Stat(pathout) if !os.IsNotExist(err) { @@ -59,33 +102,24 @@ func main(){ if err != nil { log.Fatal(err) } - writer := bufio.NewWriter(fileout) - filein, err := os.Open(pathin) - if err != nil { - log.Fatal(err) - } - lines := bufio.NewScanner(filein) + writer := bufio.NewWriter(fileout) + + results := make(chan wsjtx.Result,runtime.NumCPU()) - counter := 0 - for lines.Scan() { - result, parsed := wsjtx.ScanLine(lines.Text()) - if !parsed { - continue - } + go eatfile(results) - counter++ - if counter % 1000000 == 0 { - log.Infof("%d lines parsed..", counter) - } - _, err := writer.WriteString(fmt.Sprintf("\"%s\",\"%s\",\"%s\",\"%s\",\"%s\",\"%s\",\"%s\",\"%s\",\"%d\",\"%d\",\n", result.Timestamp.Format("2006-01-02 15:04:05"), station, result.Call, result.Band, result.Ent.Continent, result.Mode, result.Ent.Entity, result.GeoHash, result.Signal, result.Rx)) - if err != nil { - log.Warn(err) + for { + select { + case result := <- results : + _, err := writer.WriteString(fmt.Sprintf("\"%s\",\"%s\",\"%s\",\"%s\",\"%s\",\"%s\",\"%s\",\"%s\",\"%d\",\"%d\",\n", result.Timestamp.Format("2006-01-02 15:04:05"), station, result.Call, result.Band, result.Ent.Continent, result.Mode, result.Ent.Entity, result.GeoHash, result.Signal, result.Rx)) + if err != nil { + log.Warn(err) + } } } - writer.Flush() fileout.Close() - filein.Close() - log.Info("done..") + + log.Info("done.. main") } diff --git a/cmd/pskreporter_exporter/main.go b/cmd/pskreporter-exporter/main.go similarity index 98% rename from cmd/pskreporter_exporter/main.go rename to cmd/pskreporter-exporter/main.go index 5c1bf03..08492da 100644 --- a/cmd/pskreporter_exporter/main.go +++ b/cmd/pskreporter-exporter/main.go @@ -39,7 +39,7 @@ func init() { flag.StringVar(&mysql_pass, "dbpass", "secret", "mysql password") flag.StringVar(&mysql_table, "dbtable", "pskreporter_stats", "mysql table name") flag.StringVar(&metricpath, "metricpath", "/metrics", "path for prometheus metric endpoint") - flag.IntVar(&port, "port", 2112, "port for prometheus metric endpoint") + flag.IntVar(&port, "port", 2113, "port for prometheus metric endpoint") flag.BoolVar(&useProm, "prometheus", false, "activate prometheus exporter") flag.BoolVar(&useMysql, "mysql", false, "activate mysql exporter") // flag.BoolVar(&promcalls, "promcalls", false, "activate prometheus callsign metrics") diff --git a/cmd/pskreporter_exporter/mysql.go b/cmd/pskreporter-exporter/mysql.go similarity index 100% rename from cmd/pskreporter_exporter/mysql.go rename to cmd/pskreporter-exporter/mysql.go diff --git a/cmd/pskreporter_exporter/prometheus.go b/cmd/pskreporter-exporter/prometheus.go similarity index 100% rename from cmd/pskreporter_exporter/prometheus.go rename to cmd/pskreporter-exporter/prometheus.go diff --git a/cmd/pskreporter_exporter/pskreporter.go b/cmd/pskreporter-exporter/pskreporter.go similarity index 100% rename from cmd/pskreporter_exporter/pskreporter.go rename to cmd/pskreporter-exporter/pskreporter.go diff --git a/cmd/pskreporter-exporter/pskreporter_exporter.exe b/cmd/pskreporter-exporter/pskreporter_exporter.exe new file mode 100755 index 0000000..61b97af Binary files /dev/null and b/cmd/pskreporter-exporter/pskreporter_exporter.exe differ diff --git a/cmd/wsjtx_exporter/main.go b/cmd/wsjtx-exporter/main.go similarity index 96% rename from cmd/wsjtx_exporter/main.go rename to cmd/wsjtx-exporter/main.go index c326055..a833a38 100644 --- a/cmd/wsjtx_exporter/main.go +++ b/cmd/wsjtx-exporter/main.go @@ -21,7 +21,7 @@ var mysql_user string var mysql_pass string var mysql_table string var port int -var promcalls bool +//var promcalls bool var trace bool var useProm bool var useMysql bool @@ -43,7 +43,7 @@ func init() { flag.IntVar(&port, "port", 2112, "port for prometheus metric endpoint") flag.BoolVar(&useProm, "prometheus", false, "activate prometheus exporter") flag.BoolVar(&useMysql, "mysql", false, "activate mysql exporter") - flag.BoolVar(&promcalls, "promcalls", false, "activate prometheus callsign metrics") +// flag.BoolVar(&promcalls, "promcalls", false, "activate prometheus callsign metrics") flag.BoolVar(&trace, "trace", false, "log almost everything") flag.Parse() diff --git a/cmd/wsjtx_exporter/mysql.go b/cmd/wsjtx-exporter/mysql.go similarity index 93% rename from cmd/wsjtx_exporter/mysql.go rename to cmd/wsjtx-exporter/mysql.go index 10205f1..91b6268 100644 --- a/cmd/wsjtx_exporter/mysql.go +++ b/cmd/wsjtx-exporter/mysql.go @@ -55,8 +55,8 @@ func init_db() { "cqzone INT NOT NULL," + "ituzone INT NOT NULL," + "rx TINYINT NOT NULL," + - "PRIMARY KEY UC_" + mysql_table + "(ts, station, callsign))," + - "INDEX idx_dxcc (dxcc);" + "PRIMARY KEY UC_" + mysql_table + "(ts, station, callsign)," + + "INDEX idx_dxcc (dxcc));" log.WithFields(log.Fields{"query":qry}).Debug("creating database..") _, err := db.Exec(qry) if err != nil { @@ -64,7 +64,7 @@ func init_db() { panic(err) } } else { - log.Info("found existing table") + log.Info("found existing table..") } } @@ -72,13 +72,13 @@ func init_db() { func dbConn() (db *sql.DB, err bool){ db, er := sql.Open("mysql", mysql_user+":"+mysql_pass+"@tcp("+mysql_host+")/"+mysql_db) if er != nil { - log.Debugf("db not reachable: %s",err) + log.Error("db not reachable: %s",err) return nil, true } pingerr := db.Ping() if pingerr != nil { - log.Debug("db not pingable..") + log.Error("db not pingable..") return nil, true } diff --git a/cmd/wsjtx-exporter/prometheus.go b/cmd/wsjtx-exporter/prometheus.go new file mode 100644 index 0000000..f201a9a --- /dev/null +++ b/cmd/wsjtx-exporter/prometheus.go @@ -0,0 +1,56 @@ +package main + +import ( + "fmt" + "github.com/denzs/wsjtx_dashboards/shared/wsjtx" + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promauto" + log "github.com/sirupsen/logrus" +) + +var wsjtx_received_total *prometheus.CounterVec +//var wsjtx_received_call_total *prometheus.CounterVec + +func handlePrometheus(result wsjtx.Result) { + incr_wsjtx_received_total(result) +// if promcalls { +// incr_wsjtx_received_callsigns_total(result) +// } +} + + +func incr_wsjtx_received_total(result wsjtx.Result) { + if(wsjtx_received_total == nil) { + log.Printf("init prometheus metric wsjtx_received_total..") + wsjtx_received_total = promauto.NewCounterVec(prometheus.CounterOpts{ Name: "wsjtx_received_total", Help: "DXCCs ordery by labels", + }, []string{"num","signal","name","continent","cqzone","ituzone","band","mode","geohash","station"},) + } + wsjtx_received_total.With(prometheus.Labels{"num":fmt.Sprintf("%d",result.Ent.DXCC),"signal":fmt.Sprintf("%d",result.Signal), + "band":result.Band, + "name":result.Ent.Entity, + "continent":result.Ent.Continent, + "cqzone":fmt.Sprintf("%d",result.Ent.CQZone), + "mode":result.Mode, + "geohash":result.GeoHash, + "station": station, + "ituzone":fmt.Sprintf("%d",result.Ent.ITUZone)}).Inc() +} + + +//func incr_wsjtx_received_callsigns_total(result wsjtx.Result) { +// if(wsjtx_received_call_total == nil) { +// log.Printf("inicreating wsjtx_received_call_total..") +// wsjtx_received_call_total = promauto.NewCounterVec(prometheus.CounterOpts{ Name: "wsjtx_received_call_total", Help: "DXCCs ordery by labels", +// }, []string{"num","signal","name","continent","cqzone","ituzone","band","call","mode","geohash","station"},) +// } +// wsjtx_received_call_total.With(prometheus.Labels{"num":fmt.Sprintf("%d",result.Ent.DXCC),"signal":fmt.Sprintf("%d",result.Signal), +// "band":result.Band, +// "name":result.Ent.Entity, +// "continent":result.Ent.Continent, +// "cqzone":fmt.Sprintf("%d",result.Ent.CQZone), +// "mode":result.Mode, +// "call":result.Call, +// "geohash":result.GeoHash, +// "station": station, +// "ituzone":fmt.Sprintf("%d",result.Ent.ITUZone)}).Inc() +//} diff --git a/cmd/wsjtx-exporter/wsjtx_exporter.exe b/cmd/wsjtx-exporter/wsjtx_exporter.exe new file mode 100755 index 0000000..15f070d Binary files /dev/null and b/cmd/wsjtx-exporter/wsjtx_exporter.exe differ diff --git a/cmd/wsjtx_exporter/prometheus.go b/cmd/wsjtx_exporter/prometheus.go deleted file mode 100644 index 0f00b63..0000000 --- a/cmd/wsjtx_exporter/prometheus.go +++ /dev/null @@ -1,56 +0,0 @@ -package main - -import ( - "fmt" - "github.com/denzs/wsjtx_dashboards/shared/wsjtx" - "github.com/prometheus/client_golang/prometheus" - "github.com/prometheus/client_golang/prometheus/promauto" - log "github.com/sirupsen/logrus" -) - -var wsjtx_received_total *prometheus.CounterVec -var wsjtx_received_call_total *prometheus.CounterVec - -func handlePrometheus(result wsjtx.Result) { - incr_wsjtx_received_total(result) - if promcalls { - incr_wsjtx_received_callsigns_total(result) - } -} - - -func incr_wsjtx_received_total(result wsjtx.Result) { - if(wsjtx_received_total == nil) { - log.Printf("creating wsjtx_received_total...") - wsjtx_received_total = promauto.NewCounterVec(prometheus.CounterOpts{ Name: "wsjtx_received_total", Help: "DXCCs ordery by labels", - }, []string{"num","signal","name","continent","cqzone","ituzone","band","mode","geohash","station"},) - } - wsjtx_received_total.With(prometheus.Labels{"num":fmt.Sprintf("%d",result.Ent.DXCC),"signal":fmt.Sprintf("%d",result.Signal), - "band":result.Band, - "name":result.Ent.Entity, - "continent":result.Ent.Continent, - "cqzone":fmt.Sprintf("%d",result.Ent.CQZone), - "mode":result.Mode, - "geohash":result.GeoHash, - "station": station, - "ituzone":fmt.Sprintf("%d",result.Ent.ITUZone)}).Inc() -} - - -func incr_wsjtx_received_callsigns_total(result wsjtx.Result) { - if(wsjtx_received_call_total == nil) { - log.Printf("creating wsjtx_received_call_total...") - wsjtx_received_call_total = promauto.NewCounterVec(prometheus.CounterOpts{ Name: "wsjtx_received_call_total", Help: "DXCCs ordery by labels", - }, []string{"num","signal","name","continent","cqzone","ituzone","band","call","mode","geohash","station"},) - } - wsjtx_received_call_total.With(prometheus.Labels{"num":fmt.Sprintf("%d",result.Ent.DXCC),"signal":fmt.Sprintf("%d",result.Signal), - "band":result.Band, - "name":result.Ent.Entity, - "continent":result.Ent.Continent, - "cqzone":fmt.Sprintf("%d",result.Ent.CQZone), - "mode":result.Mode, - "call":result.Call, - "geohash":result.GeoHash, - "station": station, - "ituzone":fmt.Sprintf("%d",result.Ent.ITUZone)}).Inc() -} diff --git a/doc/alltxt2csv.md b/doc/alltxt2csv.md new file mode 100644 index 0000000..38765ee --- /dev/null +++ b/doc/alltxt2csv.md @@ -0,0 +1,27 @@ +# alltxt2csv + +converts ALL.TXT to csv file which can then be nicely imported into mysql. + +parameters: +``` +Usage of ./alltxt2csv: + -in string + path to wsjt-x ALL.txt + -out string + path to csv outfile + -station string + your callsign or wsjtx instance identifier (default "localstation") + -trace + log every line... yes really ;) +``` +## converting ALL.TXT to csv + +## import of csv +``` +alltxt2csv -in ~/.local/share/WSJT-X/ALL.TXT -out ~/dev/wsjtx_dashboards/import/DL3SD.csv -station DL3SD +``` + +* prepare IMPORT.SQL + +docker exec -ti db /usr/bin/mysql --local-infile=1 -pverysecret digimode_stats -e "SET GLOBAL local_infile=1;" +docker exec -ti db /usr/bin/mysql --local-infile=1 -pverysecret digimode_stats -e "\. /wsjtx/import/DL3SD.SQL" diff --git a/doc/pskreporter-exporter.md b/doc/pskreporter-exporter.md new file mode 100644 index 0000000..2cb0ea1 --- /dev/null +++ b/doc/pskreporter-exporter.md @@ -0,0 +1,22 @@ +# pskreporter-exporter + +poll pskreporter.info every 5 minutes to stores the results into mysql and/or export for prometheus. + +parameter: +``` +Usage of go/bin/pskreporter_exporter: + -db="digimode_stats": db name + -dbhost="db": name/ip of mysql host + -dbpass="secret": mysql password + -dbtable="pskreporter_stats": mysql table name + -dbuser="wsjtx": mysql username + -debug=false: enable debug logging + -metricpath="/metrics": path for prometheus metric endpoint + -mysql=false: activate mysql exporter + -port=2113: port for prometheus metric endpoint + -prometheus=false: activate prometheus exporter + -station="": callsign to monitor on pskreporter + -trace=false: log almost everything +``` + +unsure about using in combination with gridtracker... diff --git a/doc/wsjtx-exporter.md b/doc/wsjtx-exporter.md new file mode 100644 index 0000000..2771f46 --- /dev/null +++ b/doc/wsjtx-exporter.md @@ -0,0 +1,56 @@ +# wsjtx-exporter + +follows WSJTX-X ALL.TXT file to store entries in mysql and export metrics for prometheus. + +parameters: +``` +Usage of go/bin/wsjtx-exporter: + -db string + db name (default "digimode_stats") + -dbhost string + name/ip of mysql host (default "db") + -dbpass string + mysql password (default "secret") + -dbtable string + mysql table name (default "wsjtx_all_txt") + -dbuser string + mysql username (default "wsjtx") + -metricpath string + path for prometheus metric endpoint (default "/metrics") + -mysql + activate mysql exporter + -pathin string + path to WSJT-X ALL.TXT (default "/wsjtx/ALL.TXT") + -port int + port for prometheus metric endpoint (default 2112) + -prometheus + activate prometheus exporter + -station string + your callsign or wsjtx instance identifier (default "localstation") + -trace + log almost everything +``` + +## systemd user unit for linux + +create ~/.config/systemd/user/wsjtx-exporter.service and adapt parameters to your needs! + +~/.config/systemd/user/wsjtx-exporter.service: +``` +[Unit] +Description=WSJT-X 'ALL.TXT' prometheues exporter + +[Service] +Restart=always +ExecStart=%h/go/bin/wsjtx-exporter -mysql -prometheus -dbhost 10.0.73.1 -dbuser dl3sd -dbpass tester -station DL3SD -pathin %h/.local/share/WSJT-X/ALL.TXT -trace + +[Install] +WantedBy=default.target +``` + +activate: +``` +systemctl --user daemon-reload +systemctl --user enable wsjtx-exporter.service +systemctl --user start wsjtx-exporter.service +``` diff --git a/go.sum b/go.sum index d4ae24b..806960f 100644 --- a/go.sum +++ b/go.sum @@ -411,6 +411,7 @@ golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8T google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.2.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.4.0 h1:/wp5JvzpHIxhs/dumFmF7BXTf3Z+dd4uXta4kVyO508= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= diff --git a/misc/IMPORT.SQL b/misc/import_csv.sql similarity index 79% rename from misc/IMPORT.SQL rename to misc/import_csv.sql index a7af1a4..50d3668 100644 --- a/misc/IMPORT.SQL +++ b/misc/import_csv.sql @@ -1,4 +1,4 @@ -LOAD DATA LOCAL INFILE '/wsjtx/SEBO.CSV' +LOAD DATA LOCAL INFILE '/wsjtx/DL3SD.CSV' INTO TABLE wsjtx_all_txt FIELDS TERMINATED BY ',' ENCLOSED BY '"' diff --git a/misc/pskreporter_stats.sql b/misc/pskreporter_stats.sql new file mode 100644 index 0000000..31200c5 --- /dev/null +++ b/misc/pskreporter_stats.sql @@ -0,0 +1,14 @@ +CREATE TABLE IF NOT EXISTS pskreporter_stats ( + ts timestamp NOT NULL, + station VARCHAR(16) NOT NULL, + callsign VARCHAR(16) NOT NULL, + band VARCHAR(10) NOT NULL, + continent VARCHAR(32) NOT NULL, + mode VARCHAR(16) NOT NULL, + dxcc VARCHAR(128) NOT NULL, + geohash VARCHAR(16) NOT NULL, + report TINYINT NOT NULL, + cqzone INT NOT NULL, + ituzone INT NOT NULL, + UNIQUE KEY UC_pskreporter_stats (ts, station, callsign) +); diff --git a/misc/wsjtx_all_txt.sql b/misc/wsjtx_all_txt.sql new file mode 100644 index 0000000..e1c7a6d --- /dev/null +++ b/misc/wsjtx_all_txt.sql @@ -0,0 +1,16 @@ +CREATE TABLE IF NOT EXISTS wsjtx_all_txt ( + ts timestamp NOT NULL, + station VARCHAR(16) NOT NULL, + callsign VARCHAR(16) NOT NULL, + band VARCHAR(10) NOT NULL, + continent VARCHAR(32) NOT NULL, + mode VARCHAR(16) NOT NULL, + dxcc VARCHAR(128) NOT NULL, + geohash VARCHAR(16) NOT NULL, + report TINYINT NOT NULL, + cqzone INT NOT NULL, + ituzone INT NOT NULL, + rx TINYINT NOT NULL, + PRIMARY KEY PK_wsjtx_all_txt (ts, station, callsign), + INDEX idx_dxcc (dxcc) +); diff --git a/shared/wsjtx/wsjtx.go b/shared/wsjtx/wsjtx.go index 3f4200e..b1052f4 100644 --- a/shared/wsjtx/wsjtx.go +++ b/shared/wsjtx/wsjtx.go @@ -153,7 +153,7 @@ func ScanLine(line string) (Result, bool) { "dxcc":result.Ent.DXCC, "continent":result.Ent.Continent, "band":result.Band, - "time":string(result.Timestamp.String()), + "time":result.Timestamp.String(), "mode":result.Mode, "geohash":result.GeoHash, "rx":result.Rx, diff --git a/todo.md b/todo.md new file mode 100644 index 0000000..4c8cceb --- /dev/null +++ b/todo.md @@ -0,0 +1,44 @@ +* wsjtx-exporter + * cqzone und ituzone nicht in db + * -back parameter implementieren + * remove calls metric? + * https://prometheus.io/docs/practices/naming/#labels + * cant reach database ist bei mir aufgetreten + * adressieren ;) + * systemd user unit + * windows aequivalent?? + * trace: sucessfully parsed.. loggt: fields.time + * bei kaputter Zeile: + Nov 26 20:10:47 sebo-OptiPlex-980 wsjtx_exporter[869]: goroutine 12 [running]: + Nov 26 20:10:47 sebo-OptiPlex-980 wsjtx_exporter[869]: github.com/denzs/wsjtx_dashboards/shared/wsjtx.ScanLine(0xc0000a4004, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, ...) + +* alltxt2csv + * direkt importfeature wieder aufnehmen..? + +* database + * propably more indices + +* create table aus binaries nehmen? + * kontrolle sollte beim db admin liegen + +* doc +** german docs.. +** server und/oder skript/readme + +* fix dashboards + * umgang mit refresh der variablen?? + +* provide dashboards to grafana + +* prometheues metric + value + TIMESTAMP!!!!!! + +* vendoring + +* add howto for ubuntu/win10 + +* push images to dockerhub + +* Mail an PSKReporter + +* Query to bunde multiple callsigns? +* How are the queries counted? rate per src ip or per query?