2024-01-05 12:51:06 +00:00
|
|
|
module Test.Values.Timeline exposing (..)
|
|
|
|
|
2024-01-11 23:08:35 +00:00
|
|
|
import Expect
|
2024-01-05 12:51:06 +00:00
|
|
|
import Fuzz exposing (Fuzzer)
|
|
|
|
import Internal.Filter.Timeline as Filter exposing (Filter)
|
|
|
|
import Internal.Values.Timeline as Timeline exposing (Batch, Timeline)
|
2024-01-11 23:08:35 +00:00
|
|
|
import Json.Decode as D
|
2024-01-05 12:51:06 +00:00
|
|
|
import Test exposing (..)
|
|
|
|
import Test.Filter.Timeline as TestFilter
|
2024-03-29 09:46:02 +00:00
|
|
|
import Internal.Tools.Json as Json
|
2024-01-05 12:51:06 +00:00
|
|
|
|
|
|
|
|
|
|
|
fuzzer : Fuzzer Timeline
|
|
|
|
fuzzer =
|
2024-01-11 23:08:35 +00:00
|
|
|
TestFilter.fuzzer
|
|
|
|
|> Fuzz.andThen
|
|
|
|
(\globalFilter ->
|
|
|
|
Fuzz.oneOf
|
|
|
|
[ Fuzz.map2
|
|
|
|
(\start batches ->
|
|
|
|
List.foldl
|
|
|
|
(\b ( s, f ) ->
|
|
|
|
( b.end
|
2024-02-15 12:15:19 +00:00
|
|
|
, f >> Timeline.insert { b | start = Just s, filter = globalFilter }
|
2024-01-05 12:51:06 +00:00
|
|
|
)
|
|
|
|
)
|
2024-01-11 23:08:35 +00:00
|
|
|
( start, identity )
|
|
|
|
batches
|
|
|
|
|> Tuple.second
|
|
|
|
)
|
|
|
|
Fuzz.string
|
|
|
|
(Fuzz.listOfLengthBetween 0 10 fuzzerBatch)
|
|
|
|
, Fuzz.map2
|
|
|
|
(\start batches ->
|
|
|
|
List.foldl
|
|
|
|
(\b ( s, f ) ->
|
|
|
|
( b.end
|
|
|
|
, f >> Timeline.insert { b | start = Just s, filter = Filter.and globalFilter b.filter }
|
2024-01-05 12:51:06 +00:00
|
|
|
)
|
|
|
|
)
|
2024-01-11 23:08:35 +00:00
|
|
|
( start, identity )
|
|
|
|
batches
|
|
|
|
|> Tuple.second
|
2024-01-05 12:51:06 +00:00
|
|
|
)
|
2024-01-11 23:08:35 +00:00
|
|
|
Fuzz.string
|
|
|
|
(Fuzz.listOfLengthBetween 0 4 fuzzerBatch)
|
|
|
|
]
|
|
|
|
|> Fuzz.listOfLengthBetween 0 10
|
|
|
|
|> Fuzz.map (List.foldl (<|) Timeline.empty)
|
|
|
|
)
|
2024-01-05 12:51:06 +00:00
|
|
|
|
|
|
|
|
|
|
|
fuzzerBatch : Fuzzer Batch
|
|
|
|
fuzzerBatch =
|
2024-01-11 23:08:35 +00:00
|
|
|
Fuzz.map4 Batch
|
2024-01-05 12:51:06 +00:00
|
|
|
(Fuzz.list Fuzz.string)
|
|
|
|
TestFilter.fuzzer
|
2024-01-11 23:08:35 +00:00
|
|
|
(Fuzz.maybe Fuzz.string)
|
2024-01-05 12:51:06 +00:00
|
|
|
Fuzz.string
|
|
|
|
|
|
|
|
|
|
|
|
suite : Test
|
|
|
|
suite =
|
|
|
|
describe "Timeline"
|
2024-02-15 12:15:19 +00:00
|
|
|
[ describe "most recent events with filters"
|
|
|
|
[ fuzz TestFilter.fuzzer
|
|
|
|
"Events are returned properly"
|
|
|
|
(\filter ->
|
|
|
|
Timeline.empty
|
|
|
|
|> Timeline.insert
|
|
|
|
{ events = [ "a", "b", "c" ]
|
|
|
|
, filter = filter
|
|
|
|
, start = Just "token_1"
|
|
|
|
, end = "token_2"
|
|
|
|
}
|
|
|
|
|> Timeline.insert
|
|
|
|
{ events = [ "d", "e", "f" ]
|
|
|
|
, filter = filter
|
|
|
|
, start = Just "token_2"
|
|
|
|
, end = "token_3"
|
|
|
|
}
|
|
|
|
|> Timeline.mostRecentEventsFrom filter "token_3"
|
|
|
|
|> Expect.equal
|
|
|
|
[ [ "a", "b", "c", "d", "e", "f" ] ]
|
2024-01-05 12:51:06 +00:00
|
|
|
)
|
2024-02-15 12:15:19 +00:00
|
|
|
, fuzz2 TestFilter.fuzzer
|
|
|
|
TestFilter.fuzzer
|
|
|
|
"Sub-events get the same results"
|
|
|
|
(\f1 f2 ->
|
|
|
|
let
|
|
|
|
subFilter =
|
|
|
|
Filter.and f1 f2
|
|
|
|
in
|
|
|
|
Timeline.empty
|
|
|
|
|> Timeline.insert
|
|
|
|
{ events = [ "a", "b", "c" ]
|
|
|
|
, filter = f1
|
|
|
|
, start = Just "token_1"
|
|
|
|
, end = "token_2"
|
|
|
|
}
|
|
|
|
|> Timeline.insert
|
|
|
|
{ events = [ "d", "e", "f" ]
|
|
|
|
, filter = f1
|
|
|
|
, start = Just "token_2"
|
|
|
|
, end = "token_3"
|
|
|
|
}
|
|
|
|
|> Timeline.mostRecentEventsFrom subFilter "token_3"
|
|
|
|
|> Expect.equal
|
|
|
|
[ [ "a", "b", "c", "d", "e", "f" ] ]
|
|
|
|
)
|
|
|
|
, fuzz2 TestFilter.fuzzer
|
|
|
|
TestFilter.fuzzer
|
|
|
|
"ONLY same result if sub-filter"
|
|
|
|
(\f1 f2 ->
|
|
|
|
Timeline.empty
|
|
|
|
|> Timeline.insert
|
|
|
|
{ events = [ "a", "b", "c" ]
|
|
|
|
, filter = f1
|
|
|
|
, start = Just "token_1"
|
|
|
|
, end = "token_2"
|
|
|
|
}
|
|
|
|
|> Timeline.insert
|
|
|
|
{ events = [ "d", "e", "f" ]
|
|
|
|
, filter = f1
|
|
|
|
, start = Just "token_2"
|
|
|
|
, end = "token_3"
|
|
|
|
}
|
|
|
|
|> Timeline.mostRecentEventsFrom f2 "token_3"
|
|
|
|
|> (\events ->
|
|
|
|
Expect.equal
|
|
|
|
(Filter.subsetOf f1 f2)
|
|
|
|
(events == [ [ "a", "b", "c", "d", "e", "f" ] ])
|
2024-01-05 12:51:06 +00:00
|
|
|
)
|
|
|
|
)
|
|
|
|
]
|
2024-02-15 12:15:19 +00:00
|
|
|
, describe "Forks in the road"
|
|
|
|
[ fuzz2 TestFilter.fuzzer
|
|
|
|
TestFilter.fuzzer
|
|
|
|
"Two options returned"
|
|
|
|
(\f1 f2 ->
|
|
|
|
let
|
|
|
|
subFilter =
|
|
|
|
Filter.and f1 f2
|
|
|
|
in
|
|
|
|
Timeline.empty
|
|
|
|
|> Timeline.insert
|
|
|
|
{ events = [ "a", "b", "c" ]
|
|
|
|
, filter = f1
|
|
|
|
, start = Just "token_1"
|
|
|
|
, end = "token_2"
|
|
|
|
}
|
|
|
|
|> Timeline.insert
|
|
|
|
{ events = [ "d", "e", "f" ]
|
|
|
|
, filter = f2
|
|
|
|
, start = Just "token_3"
|
|
|
|
, end = "token_2"
|
|
|
|
}
|
|
|
|
|> Timeline.insert
|
|
|
|
{ events = [ "g", "h", "i" ]
|
|
|
|
, filter = subFilter
|
|
|
|
, start = Just "token_2"
|
|
|
|
, end = "token_4"
|
|
|
|
}
|
|
|
|
|> Timeline.mostRecentEventsFrom subFilter "token_4"
|
|
|
|
|> Expect.equal
|
|
|
|
[ [ "a", "b", "c", "g", "h", "i" ]
|
|
|
|
, [ "d", "e", "f", "g", "h", "i" ]
|
|
|
|
]
|
|
|
|
)
|
|
|
|
]
|
|
|
|
, describe "Gaps"
|
|
|
|
[ fuzz TestFilter.fuzzer
|
2024-03-25 07:52:55 +00:00
|
|
|
"Gaps leave behind old events"
|
2024-02-15 12:15:19 +00:00
|
|
|
(\filter ->
|
|
|
|
Timeline.empty
|
|
|
|
|> Timeline.insert
|
|
|
|
{ events = [ "a", "b", "c" ]
|
|
|
|
, filter = filter
|
|
|
|
, start = Just "token_1"
|
|
|
|
, end = "token_2"
|
|
|
|
}
|
|
|
|
|> Timeline.insert
|
|
|
|
{ events = [ "d", "e", "f" ]
|
|
|
|
, filter = filter
|
|
|
|
, start = Just "token_3"
|
|
|
|
, end = "token_4"
|
|
|
|
}
|
|
|
|
|> Timeline.mostRecentEventsFrom filter "token_4"
|
|
|
|
|> Expect.equal [ [ "d", "e", "f" ] ]
|
|
|
|
)
|
2024-03-29 09:46:02 +00:00
|
|
|
, fuzz3 TestFilter.fuzzer (Fuzz.list Fuzz.string) (Fuzz.pair (Fuzz.list Fuzz.string) (Fuzz.list Fuzz.string))
|
2024-03-25 07:52:55 +00:00
|
|
|
"Gaps can be bridged"
|
2024-03-29 09:46:02 +00:00
|
|
|
(\filter l1 (l2, l3) ->
|
2024-02-15 12:15:19 +00:00
|
|
|
Timeline.empty
|
|
|
|
|> Timeline.insert
|
2024-03-29 09:46:02 +00:00
|
|
|
{ events = l1
|
2024-02-15 12:15:19 +00:00
|
|
|
, filter = filter
|
|
|
|
, start = Just "token_1"
|
|
|
|
, end = "token_2"
|
|
|
|
}
|
|
|
|
|> Timeline.insert
|
2024-03-29 09:46:02 +00:00
|
|
|
{ events = l3
|
2024-02-15 12:15:19 +00:00
|
|
|
, filter = filter
|
|
|
|
, start = Just "token_3"
|
|
|
|
, end = "token_4"
|
|
|
|
}
|
|
|
|
|> Timeline.insert
|
2024-03-29 09:46:02 +00:00
|
|
|
{ events = l2
|
2024-02-15 12:15:19 +00:00
|
|
|
, filter = filter
|
|
|
|
, start = Just "token_2"
|
|
|
|
, end = "token_3"
|
|
|
|
}
|
|
|
|
|> Timeline.mostRecentEventsFrom filter "token_4"
|
2024-03-29 09:46:02 +00:00
|
|
|
|> Expect.equal [ List.concat [ l1, l2, l3 ] ]
|
|
|
|
)
|
|
|
|
]
|
|
|
|
, describe "JSON"
|
|
|
|
[ fuzz fuzzer "Encode + Decode gives same output"
|
|
|
|
(\timeline ->
|
|
|
|
timeline
|
|
|
|
|> Json.encode Timeline.coder
|
|
|
|
|> D.decodeValue (Json.decode Timeline.coder)
|
|
|
|
|> Result.map Tuple.first
|
|
|
|
|> Result.map (Timeline.mostRecentEvents Filter.pass)
|
|
|
|
|> Expect.equal (Ok <| Timeline.mostRecentEvents Filter.pass timeline)
|
|
|
|
)
|
|
|
|
]
|
|
|
|
, describe "Weird loops"
|
|
|
|
[ fuzz TestFilter.fuzzer "Weird loops stop looping"
|
|
|
|
(\filter ->
|
|
|
|
Timeline.empty
|
|
|
|
|> Timeline.insert
|
|
|
|
{ events = [ "a", "b", "c" ]
|
|
|
|
, filter = filter
|
|
|
|
, start = Just "token_1"
|
|
|
|
, end = "token_2"
|
|
|
|
}
|
|
|
|
|> Timeline.insert
|
|
|
|
{ events = [ "d", "e", "f" ]
|
|
|
|
, filter = filter
|
|
|
|
, start = Just "token_2"
|
|
|
|
, end = "token_3"
|
|
|
|
}
|
|
|
|
|> Timeline.insert
|
|
|
|
{ events = [ "g", "h", "i" ]
|
|
|
|
, filter = filter
|
|
|
|
, start = Just "token_3"
|
|
|
|
, end = "token_2"
|
|
|
|
}
|
|
|
|
|> Timeline.mostRecentEventsFrom filter "token_2"
|
|
|
|
|> Expect.equal
|
|
|
|
[ [ "a", "b", "c" ]
|
|
|
|
, [ "d", "e", "f", "g", "h", "i" ]
|
|
|
|
]
|
2024-02-15 12:15:19 +00:00
|
|
|
)
|
|
|
|
]
|
2024-01-05 12:51:06 +00:00
|
|
|
]
|