Verified Commit db87dae6 authored by 35V LG84's avatar 35V LG84

tep-1010: Txn Geo Location: T3DB records

Add T3DB records and few extra tests.
Signed-off-by: 35V LG84's avatar35V LG84 <[email protected]>
parent c53a1a49
......@@ -158,12 +158,17 @@ sealed abstract class TxnFilterRegex(regex: String) extends TxnFilter {
Seq(indent + target + ": " + "\"" + s"${regex}" + "\"")
}
}
/**
* Selects transaction if txn timestamp is on or after specified time.
*
* @param begin txn timestamp must be on or after this
*/
/**
* Select transaction if regular expression matches txn code.
*
* Used regular expression engine is java.util.regex.Pattern.
*
* @param regex to match txn code.
*/
final case class TxnFilterTxnCode(regex: String) extends TxnFilterRegex(regex) {
val target = "Txn Code"
}
/**
* Select transaction if regular expression matches txn description.
......@@ -177,17 +182,6 @@ final case class TxnFilterTxnDescription(regex: String) extends TxnFilterRegex(r
}
/**
* Select transaction if regular expression matches txn code.
*
* Used regular expression engine is java.util.regex.Pattern.
*
* @param regex to match txn code.
*/
final case class TxnFilterTxnCode(regex: String) extends TxnFilterRegex(regex) {
val target = "Txn Code"
}
/**
* Select transaction if txn UUID is same as specified uuid.
*
......
......@@ -215,13 +215,17 @@ class DirsuiteCoreTest extends DirSuiteLike {
class DirsuiteLocationTest extends DirSuiteLike {
val basedir = Paths.get("tests")
/*
runDirSuiteTestCases(basedir, Glob("location/ex/TacklerParseException-*.exec")) { args: Array[String] =>
assertThrows[TacklerParseException] {
TacklerCli.runExceptions(args)
}
}
runDirSuiteTestCases(basedir, Glob("location/ex/TacklerException-*.exec")) { args: Array[String] =>
assertThrows[TacklerException] {
TacklerCli.runExceptions(args)
}
}
*/
runDirSuiteTestCases(basedir, Glob("location/ok/*.exec")) { args: Array[String] =>
assertResult(TacklerCli.SUCCESS) {
......
......@@ -143,12 +143,12 @@ class TacklerParserLocationTest extends FunSpec {
(
"""
|2019-04-01
| # location: geo:-66.56,-180,0
| # location: geo:-66.56,-180.0,0
| e 1
| a
|
|""".stripMargin,
"geo:-66.56,-180,0",
"geo:-66.56,-180.0,0",
)
)
......@@ -170,6 +170,7 @@ class TacklerParserLocationTest extends FunSpec {
it("perr: detect invalid geo uris") {
val perrStrings: List[(String, String, String)] = List(
(
// perr: missing geo-uri
"""
|2019-05-01
| # location:
......
......@@ -18,8 +18,7 @@ package fi.e257.tackler.report
import io.circe.optics.JsonPath
import org.scalatest.FlatSpec
import fi.e257.tackler.api.{BalanceGroupReport, BalanceReport, RegisterReport}
import fi.e257.tackler.api.{BalanceGroupReport, BalanceReport, RegisterReport, TxnHeader}
import fi.e257.tackler.core.{GroupByIsoWeek, Settings}
import fi.e257.tackler.parser.TacklerTxns
......@@ -48,7 +47,7 @@ class ReportApiTest extends FlatSpec {
val _title = JsonPath.root.title.string
behavior of "Balance report"
behavior of "Balance report API"
val _accountTreeSum = JsonPath.root.balances.index(0).accountTreeSum.string
val _delta = JsonPath.root.deltas.index(0).delta.string
......@@ -87,7 +86,7 @@ class ReportApiTest extends FlatSpec {
}
behavior of "BalanceGroup report"
behavior of "BalanceGroup report API"
val _balgrp_title = JsonPath.root.groups.index(0).title.string
val _balgrp_accountTreeSum = JsonPath.root.groups.index(0).balances.index(0).accountTreeSum.string
val _balgrp_delta = JsonPath.root.groups.index(0).deltas.index(0).delta.string
......@@ -129,7 +128,7 @@ class ReportApiTest extends FlatSpec {
}
behavior of "Register report"
behavior of "Register report API"
val _reg_txn_idx1_desc = JsonPath.root.transactions.index(1).txn.description.string
/**
......@@ -163,4 +162,98 @@ class ReportApiTest extends FlatSpec {
val foo = report.as[RegisterReport]
assert(foo.right.toOption.map(_.title) === Some("Test-Register"))
}
/**
* test: 04d83aba-4d19-4add-bff4-b79180b8b726
*/
it must "metadata: uuid" in {
val uuidTxnStr =
"""
|2019-01-01 'uuid = 78436575-3613-483d-a7ed-d9917b1d5c80
| # uuid: 78436575-3613-483d-a7ed-d9917b1d5c80
| e 1
| a
|
|""".stripMargin
val uuidTxnData = tt.string2Txns(uuidTxnStr)
val regCfg = RegisterSettings(settings, Some("UUID"), None)
val reporter = new RegisterReporter(regCfg)
//
// Report -> JSON
//
val jsonRpt = reporter.jsonReport(uuidTxnData)
val _reg_txn_idx0_desc = JsonPath.root.transactions.index(0).txn.description.string
val _reg_txn_idx0_uuid = JsonPath.root.transactions.index(0).txn.uuid.string
assert(_reg_txn_idx0_desc.getOption(jsonRpt) === Some("uuid = 78436575-3613-483d-a7ed-d9917b1d5c80"))
assert(_reg_txn_idx0_uuid.getOption(jsonRpt) === Some("78436575-3613-483d-a7ed-d9917b1d5c80"))
//
// JSON -> Report
//
val jsonResult = jsonRpt.as[RegisterReport]
assert(jsonResult.isRight)
val rptFromJson = jsonResult.right.get
assert(rptFromJson.title === "UUID")
assert(rptFromJson.transactions.head.txn.description.get.toString === "uuid = 78436575-3613-483d-a7ed-d9917b1d5c80")
assert(rptFromJson.transactions.head.txn.uuid.get.toString === "78436575-3613-483d-a7ed-d9917b1d5c80")
}
/**
* test: f3409965-68ae-4964-a73b-e46e0a2d8304
*/
it must "metadata: location" in {
val geoTxnStr =
"""
|2019-02-02 'geo = geo:61,25.1,2
| # location: geo:61,25.1,2
| e 1
| a
|
|""".stripMargin
val geoTxnData = tt.string2Txns(geoTxnStr)
val regCfg = RegisterSettings(settings, Some("location"), None)
val reporter = new RegisterReporter(regCfg)
//
// Report -> JSON
//
val jsonRpt = reporter.jsonReport(geoTxnData)
val _reg_txn_idx0_desc = JsonPath.root.transactions.index(0).txn.description.string
val _reg_txn_idx0_location_lat = JsonPath.root.transactions.index(0).txn.location.lat.double
val _reg_txn_idx0_location_lon = JsonPath.root.transactions.index(0).txn.location.lon.double
val _reg_txn_idx0_location_alt = JsonPath.root.transactions.index(0).txn.location.alt.double
assert(_reg_txn_idx0_desc.getOption(jsonRpt) === Some("geo = geo:61,25.1,2"))
assert(_reg_txn_idx0_location_lat.getOption(jsonRpt) === Some(61))
assert(_reg_txn_idx0_location_lon.getOption(jsonRpt) === Some(25.1))
assert(_reg_txn_idx0_location_alt.getOption(jsonRpt) === Some(2))
//
// JSON -> Report
//
val jsonResult = jsonRpt.as[RegisterReport]
assert(jsonResult.isRight)
val rptFromJson = jsonResult.right.get
assert(rptFromJson.title === "location")
val txnHdr: TxnHeader = rptFromJson.transactions.head.txn
assert(txnHdr.description.get.toString === "geo = geo:61,25.1,2")
assert(txnHdr.location.get.lat === 61)
assert(txnHdr.location.get.lon === 25.1)
assert(txnHdr.location.get.alt === Some(2))
}
}
......@@ -15,6 +15,7 @@ do high level plans for new features and changes to Tackler.
* xref:./tep-1007.adoc[TEP-1007: Txn set checksum]
* xref:./tep-1008.adoc[TEP-1008: Numeric account names]
* xref:./tep-1009.adoc[TEP-1009: Txn header syntax]
* xref:./tep-1010.adoc[TEP-1010: Txn Geo Location support]
=== TEPs in implementation phase
......
......@@ -14,20 +14,23 @@ Only point is supported (not area)
=== Geo Filter
Bounding Box definition
Used Bounding Box definition is (lat, lon, alt) based, instead of X, Y, Z bbox (lon, lat, alt).
This is compatible with OpenStreetMap's link:https://wiki.openstreetmap.org/wiki/Overpass_API[Overpass API]
[horizontal]
BBoxXYZ:: left,bottom [,depth]; right,top [,height];
left:: min longitude
bottom:: min latitude
depth:: min altitude
right:: max longitude
top:: max latitude
height:: max altitude
BBoxLatLonAlt:: south, west [,depth]; north, east [,height];
South:: min latitude
West:: min longitude
Depth:: min altitude
Nort:: max latitude
East:: max longitude
Height:: max altitude
If BBoxXYZ is missing Z-component, then following defaults are used:
If BBox is missing Z-component, then following defaults are used:
Missing Z: z component is ignored
Missing Z-min:: All values below or at Z-max are included
Missing Z-max:: All values at Z-min or above are included
......@@ -162,13 +165,13 @@ Changes to identity export
== Documentation
* [ ] xref:./readme.adoc[]: Update TEP index
* [x] xref:./readme.adoc[]: Update TEP index
* [ ] xref:../../README.adoc[]: is it a new noteworthy feature?
* [ ] link:../../CHANGELOG[]: add new item
* [ ] Does it warrant own T3DB file?
** [ ] update xref:../../tests/tests.adoc[]
** [ ] update xref:../../tests/check-tests.sh[]
** [ ] Add new T3DB file xref:../../tests/tests-XXXX.yml[]
* [x] Does it warrant own T3DB file?
** [x] update xref:../../tests/tests.adoc[]
** [x] update xref:../../tests/check-tests.sh[]
** [x] Add new T3DB file xref:../../tests/tests-XXXX.yml[]
* [ ] User docs
** [ ] user manual
** [ ] examples
......@@ -188,52 +191,57 @@ Anything which wasn't implemented?
* [x] Changes to journal
** [x] identity to input test
* [x] API test
** [x] Server API (to JSON)
** [x] Client API (from JSON)
=== Geo URI
Normal, ok-case tests to validate functionality:
* [x] Parse tests
** [x] lat, lon
** [x] lat, lon, alt
*** [x] lat.deg, lon.deg, alt.deg
** [x] -lat, -lon, -alt
** [x] -lat.deg, -lon.deg, -alt.deg
* [X] Parse tests
** [X] lat, lon
** [X] lat, lon, alt
*** [X] lat.deg, lon.deg, alt.deg
** [X] -lat, -lon, -alt
** [X] -lat.deg, -lon.deg, -alt.deg
Various special values
* [x] Poles
** [x] lat: -90 or lat:90, lon:0
** [x] lat: -90 or lat:90, lon:-180 - 0
** [x] lat: -90 or lat:90, lon:180 - 0
* [x] lat:0, lon:0
* [x] lat:Y, lon:X, alt:-120
* [X] Poles
** [X] lat: -90 or lat:90, lon:0
** [X] lat: -90 or lat:90, lon:-180 - 0
** [X] lat: -90 or lat:90, lon:180 - 0
* [X] lat:0, lon:0
* [X] lat:Y, lon:X, alt:-120
Metadata tests
* [x] both `uuid` and `location`
** [x] different order
*** [x] `uuid`, `location`
*** [x] `location`, `uuid`
* [X] both `uuid` and `location`
** [X] different order
*** [X] `uuid`, `location`
*** [X] `location`, `uuid`
==== Errors
Various error cases:
* [x] e: comma (`,`) as decimal separator (especially for altitude (internally using NUMBER at the moment))
* [x] e: missing lat/lon
* [x] e: Values outside of specification
** [x] e: lat < -90 || lat > 90
** [x] e: lon < -180 || lon > 180
** [x] e: altitude < -6378137 m (WGS 84: Semi-major axis 6 378 137 m)
* [X] e: missing geo-uri
* [X] e: missing geo
* [X] e: comma (`,`) as decimal separator (especially for altitude (internally using NUMBER at the moment))
* [X] e: missing lat/lon
* [X] e: Values outside of specification
** [X] e: lat < -90 || lat > 90
** [X] e: lon < -180 || lon > 180
** [X] e: altitude < -6378137 m (WGS 84: Semi-major axis 6 378 137 m)
Metadata tests
* [x] multiple `location`
* [x] multiple `uuid`
* [x] both `uuid` and `location`
** [x] multiple `location`
** [x] multiple `uuid`
* [X] multiple `location`
* [X] multiple `uuid`
* [X] both `uuid` and `location`
** [X] multiple `location`
** [X] multiple `uuid`
=== Geo Filter
......@@ -278,7 +286,7 @@ Feature-id::
Feature-id::
* name: Geo URI
* name: Geo URI support
* parent: 415d0acb-8441-4dce-aa81-e99e5b2f2e49
* uuid: c7e45a7b-5295-4dbb-bcda-bdc0990b9e14
......
......@@ -29,8 +29,9 @@ t3db_06="$db_dir/tests-1006.yml"
t3db_07="$db_dir/tests-1007.yml"
t3db_08="$db_dir/tests-1008.yml"
t3db_09="$db_dir/tests-1009.yml"
t3db_10="$db_dir/tests-1010.yml"
T3DBs="$t3db_00 $t3db_01 $t3db_02 $t3db_04 $t3db_05 $t3db_06 $t3db_07 $t3db_08 $t3db_09"
T3DBs="$t3db_00 $t3db_01 $t3db_02 $t3db_04 $t3db_05 $t3db_06 $t3db_07 $t3db_08 $t3db_09 $t3db_10"
get_t3db_content () {
......
# vim: tabstop=2 shiftwidth=2 softtabstop=2 smarttab expandtab autoindent
tackler {
core {
basedir = ex/
input.fs.dir = "not-found/"
input.fs.glob = "**.not-found"
reporting {
formats = [ "txt", "json" ]
reports = [ "register" ]
exports = [ "identity" ]
}
include "ok-accounts.conf"
}
}
# format: exec
# test:uuid: d948d0cd-d06c-4772-be5c-46e8875e4910
# desc: cli: error propagation in case of invalid value
exec:--cfg;tests/location/ex.conf;--input.file;values.txn;
# format: exec
# test:uuid: f1951382-c746-4be0-b367-f903c8c9fb18
# desc: cli: error propagation in case of format error
exec:--cfg;tests/location/ex.conf;--input.file;format.txn;
2019-05-01
# location: geo:0
e 1
a
2019-05-01
# location: geo:90.1,0
e 1
a
......@@ -200,5 +200,13 @@ features:
- desc: deserialize by Client API
references:
- ref: register
- test:
id: 04d83aba-4d19-4add-bff4-b79180b8b726
name: ReportApiTest
references:
- ref: register
descriptions:
- desc: "Serialize UUID to register report JSON"
- desc: "Deserialize UUID from Register report JSON"
### JSON output END
......@@ -77,13 +77,13 @@ features:
errors:
- error:
id: 49f73bec-afd9-4bef-bf5b-f9439ab2ea47
name: "TacklerParserMetadataTest"
name: "TacklerParserUUIDTest"
descriptions:
- desc: "invalid metadata constructs"
- desc: "format version v1 metadata uuid"
operations:
- test:
id: 546e4368-dcfa-44d5-a21d-13f3b8bf51b6
name: "TacklerParserMetadataTest"
name: "TacklerParserUUIDTest"
descriptions:
- desc: "valid metadata constructs"
features:
- feature:
id: 415d0acb-8441-4dce-aa81-e99e5b2f2e49
subject: Transaction Geo Location
- feature:
id: c7e45a7b-5295-4dbb-bcda-bdc0990b9e14
parent: 415d0acb-8441-4dce-aa81-e99e5b2f2e49
subject: Geo URI support
tests:
errors:
- error:
id: b88d6733-2acf-4021-a3d7-deaf58b518a6
name: TacklerParserMetadataTests
descriptions:
- desc: "rejects invalid metadata constructions"
- desc: "metadata must be first"
- desc: "no comments between metadata"
- desc: "multiple uuid"
- desc: "multiple location"
- desc: "both: uuid and location, multiple locations"
- desc: "both: uuid and location, multiple uuids"
- error:
id: c8e7cdf6-3b30-476c-84f0-f5a19812cd28
name: TacklerParserLocationTest
descriptions:
- desc: "perr: detect invalid geo uris"
- desc: "perr: missing geo-uri"
- desc: "perr: missing geo"
- desc: "perr: decimal ','"
- desc: "perr: missing lat/lon"
- error:
id: fc711c0d-2820-4f72-8b4c-1219ef578363
name: TacklerParserLocationTest
descriptions:
- desc: "detect semantically invalid geo uris"
- desc: "out of spec lat < -90 || 90 < lat"
- desc: "out of spec lon < -180 || 180 < lat"
- desc: "out of spec: alt < -6378137"
- error:
id: f1951382-c746-4be0-b367-f903c8c9fb18
name: tests/location/ex/TacklerParseException-format.exec
descriptions:
- desc: "cli: error propagation in case of format error"
- error:
id: d948d0cd-d06c-4772-be5c-46e8875e4910
name: tests/location/ex/TacklerException-values.exec
descriptions:
- desc: "cli: error propagation in case of invalid value"
reports:
registry:
- test:
refid: 8adbd77d-dd49-41a7-9412-fa9189ce3db6
identity:
- test:
refid: 8adbd77d-dd49-41a7-9412-fa9189ce3db6
operations:
- test:
id: 5bb95c2e-2fad-4584-9380-e6cafe732cf6
name: TacklerParserMetadataTests
descriptions:
- desc: "accepts multiple metadata items"
- desc: "uuid and location"
- desc: "location and uuid"
- test:
id: bc98cc89-d3b2-468d-9508-8d7a55924178
name: TacklerParserLocationTest
descriptions:
- desc: "various valid geo uris"
- desc: "decimal: lat, -lat"
- desc: "decimal: lon, -lon"
- desc: "decimal: alt, -alt"
- desc: "int: lat, -lat"
- desc: "int: lon, -lon"
- desc: "int: alt, -alt"
- desc: "lat: -90 or 90"
- desc: "lon: -180 or 180"
- desc: "lat: 0 or lon: 0"
- desc: "negative altitude"
- test:
id: f3409965-68ae-4964-a73b-e46e0a2d8304
name: ReportApiTest
references:
- ref: register
descriptions:
- desc: "Serialize location to register report JSON"
- desc: "Deserialize location from Register report JSON"
- test:
id: 8adbd77d-dd49-41a7-9412-fa9189ce3db6
name: tests/location/ok/basic-01.exec
descriptions:
- desc: "int"
- desc: "decimal"
- desc: "preserve precision"
- desc: "neg / pos value printing"
- desc: "location + uuid"
- desc: "uuid + location"
references:
- ref: identity
formats: txn
- ref: register
formats: txt, json
- test:
id: bb9cee1f-a0e6-45fc-9815-9ad9875e4bd4
name: tests/location/ok/identity-01.exec
desc: "identity-to-input test for location"
references:
- ref: identity
formats: txn
- ref: register
formats: txt, json
- feature:
id: cfa92a0d-a8af-4fb5-a3c6-723029febc5a
parent: 415d0acb-8441-4dce-aa81-e99e5b2f2e49
subject: Geo Filter
#features:
# - feature:
# id:
......
......@@ -65,6 +65,15 @@ link:./tests-1008.yml[T3DB for TEP-1008]::
* Account names
* Numerical Account names
link:./tests-1009.yml[T3DB for TEP-1009]::
* Txn Header Syntax (v2)
link:./tests-1010.yml[T3DB for TEP-1010]::
* Txn Geo Location
* Geo Location Filter
link:./tests-tmpl.yml[Template for T3DB entry]
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment