Compare commits

...

13 Commits

Author SHA1 Message Date
Bram f1dde4874b Merge Text code 2024-03-29 12:01:05 +01:00
Bram 5856084b45 Remove unused RationalOrder module
Kill your darlings, I guess. :(
2024-03-29 11:53:24 +01:00
Bram db6573180b elm-format 2024-03-29 11:52:12 +01:00
Bram 3983ab0de2 Merge develop into 3-timeline
Fixing merge conflict
2024-03-29 11:29:48 +01:00
Bram ccefa2ed9b Add documentation 2024-03-29 11:28:04 +01:00
Bram d41c31e8c1 Allow Timeline fuzzers to simulate /sync 2024-03-29 11:18:01 +01:00
Bram 792e60761a Add addSync to Timeline 2024-03-29 11:08:42 +01:00
Bram 33d98dd6ff Improve code readability
Cleaning up unused imports + changing order of operations on a value that is 0 by default
2024-03-29 10:51:10 +01:00
Bram ed78695213 Add Timeline JSON coder 2024-03-29 10:46:02 +01:00
Bram 421e1f6ce7 Add Elm Timeline to elm.json 2024-03-25 08:52:55 +01:00
Bram 1940b1d51f Fix RationalOrder formatting 2024-02-15 13:20:25 +01:00
Bram 7acae258ed Complete Timeline by fixing bugs 2024-02-15 13:15:19 +01:00
Bram bbe1eeef12 Prepare develop for master
elm-test --fuzz 10000 --seed 49678983951728
2024-02-02 12:15:50 +01:00
12 changed files with 990 additions and 507 deletions

View File

@ -3,13 +3,14 @@
"name": "noordstar/elm-matrix-sdk-beta",
"summary": "Matrix SDK for instant communication. Unstable beta version for testing only.",
"license": "EUPL-1.1",
"version": "2.1.1",
"version": "2.1.2",
"exposed-modules": [
"Internal.Config.Default",
"Internal.Config.Leaks",
"Internal.Config.Log",
"Internal.Config.Phantom",
"Internal.Config.Text",
"Internal.Filter.Timeline",
"Internal.Tools.DecodeExtra",
"Internal.Tools.EncodeExtra",
"Internal.Tools.Hashdict",
@ -23,10 +24,12 @@
"Internal.Values.Event",
"Internal.Values.Settings",
"Internal.Values.StateManager",
"Internal.Values.Timeline",
"Internal.Values.Vault",
"Matrix",
"Matrix.Event",
"Matrix.Settings"
"Matrix.Settings",
"Types"
],
"elm-version": "0.19.0 <= v < 0.20.0",
"dependencies": {

View File

@ -23,7 +23,7 @@ will assume until overriden by the user.
-}
currentVersion : String
currentVersion =
"beta 2.1.1"
"beta 2.1.2"
{-| The default device name that is being communicated with the Matrix API.

View File

@ -1,5 +1,5 @@
module Internal.Config.Text exposing
( docs, failures, fields
( docs, failures, fields, mappings, logs
, accessTokenFoundLocally, accessTokenExpired, accessTokenInvalid
, versionsFoundLocally, versionsReceived, versionsFailedToDecode
, unsupportedVersionForEndpoint
@ -27,7 +27,7 @@ You should only do this if you know what you're doing.
## Type documentation
@docs docs, failures, fields
@docs docs, failures, fields, mappings, logs
## API Authentication
@ -116,9 +116,14 @@ docs :
, envelope : TypeDocs
, event : TypeDocs
, hashdict : TypeDocs
, ibatch : TypeDocs
, iddict : TypeDocs
, itoken : TypeDocs
, mashdict : TypeDocs
, settings : TypeDocs
, stateManager : TypeDocs
, timeline : TypeDocs
, timelineFilter : TypeDocs
, unsigned : TypeDocs
}
docs =
@ -148,6 +153,24 @@ docs =
, "For example, the hashdict can store events and use their event id as their key."
]
}
, ibatch =
{ name = "IBatch"
, description =
[ "The internal batch tracks a patch of events on the Matrix timeline."
]
}
, iddict =
{ name = "Iddict"
, description =
[ "An iddict automatically handles creating appropriate keys by incrementally assiging a new key to new values."
]
}
, itoken =
{ name = "IToken"
, description =
[ "The IToken connects batches in the timeline and maintains relative order."
]
}
, mashdict =
{ name = "Mashdict"
, description =
@ -167,6 +190,18 @@ docs =
, "Instead of making the user loop through the room's timeline of events, the StateManager offers the user a dictionary-like experience to navigate through the Matrix room state."
]
}
, timeline =
{ name = "Timeline"
, description =
[ "The Timeline tracks events and orders them in a simple way for the user to view them."
]
}
, timelineFilter =
{ name = "Timeline Filter"
, description =
[ "The Timeline Filter allows the user to be very specific about which events they're interested in."
]
}
, unsigned =
{ name = "Unsigned Data"
, description =
@ -179,13 +214,14 @@ docs =
{-| Description of all edge cases where a JSON decoder can fail.
-}
failures : { hashdict : Desc, mashdict : Desc }
failures : { hashdict : Desc, listWithOne : String, mashdict : Desc }
failures =
{ hashdict =
[ "Not all values map to thir respected hash with the given hash function."
[ "Not all values map to their respected hash with the given hash function."
]
, listWithOne = "Expected at least one value in the list - zero found."
, mashdict =
[ "Not all values map to thir respected hash with the given hash function."
[ "Not all values map to their respected hash with the given hash function."
]
}
@ -218,11 +254,41 @@ fields :
, eventType : Desc
, unsigned : Desc
}
, ibatch :
{ end : Desc
, events : Desc
, filter : Desc
, start : Desc
}
, iddict :
{ cursor : Desc
, dict : Desc
}
, itoken :
{ behind : Desc
, ends : Desc
, inFrontOf : Desc
, name : Desc
, starts : Desc
}
, settings :
{ currentVersion : Desc
, deviceName : Desc
, syncTime : Desc
}
, timeline :
{ batches : Desc
, events : Desc
, filledBatches : Desc
, mostRecentBatch : Desc
, tokens : Desc
}
, timelineFilter :
{ senders : Desc
, sendersAllowOthers : Desc
, types : Desc
, typesAllowOthers : Desc
}
, unsigned :
{ age : Desc
, prevContent : Desc
@ -293,6 +359,45 @@ fields =
[ "Contains optional extra information about the event."
]
}
, ibatch =
{ end =
[ "Pointer to the token that ends the internal batch."
]
, events =
[ "List of event IDs contained within the internal batch."
]
, filter =
[ "Filter that indicates how strictly the homeserver has selected when resulting into the given list of events."
]
, start =
[ "Pointer to the token that starts the internal batch."
]
}
, iddict =
{ cursor =
[ "To ensure uniqueness of all keys and to prevent the usage of keys that were previously assigned to older values, the iddict tracks which is the smallest non-negative integer that hasn't been used yet."
]
, dict =
[ "Dictionary that contains all values stored in the iddict."
]
}
, itoken =
{ behind =
[ "This token is behind all tokens in this field."
]
, ends =
[ "This token is in front of the batches in this field."
]
, inFrontOf =
[ "This token is ahead of all tokens in this field."
]
, name =
[ "Opaque value provided by the homeserver."
]
, starts =
[ "This token is at the start of the batches in this field."
]
}
, settings =
{ currentVersion =
[ "Indicates the current version of the Elm SDK."
@ -304,6 +409,40 @@ fields =
[ "Indicates the frequency in miliseconds with which the Elm SDK should long-poll the /sync endpoint."
]
}
, timeline =
{ batches =
[ "Dictionary storing all event batches in the timeline."
]
, events =
[ "Mapping that allows us to quickly zoom in on an event."
]
, filledBatches =
[ "Counter that tracks how many batches are kept by the timeline."
, "Batches are only counted if they are filled by at least one event."
]
, mostRecentBatch =
[ "Tracks the most recent batch that was sent by the homeserver - usually through `/sync`"
]
, tokens =
[ "Index of all the tokens used to connect event batches on the timeline."
]
}
, timelineFilter =
{ senders =
[ "A list of senders that is considered an exception to the infinite pool of \"other\" users"
]
, sendersAllowOthers =
[ "Value that determines whether the infinite pool of others is included."
, "If False, only the users mentioned in `senders` are included. If True, then all users who aren't mentioned in `senders` are included."
]
, types =
[ "A list of event types that is considered an exception to the infinite pool of \"other\" event types."
]
, typesAllowOthers =
[ "Value that determines whether the infinite pool of others is included."
, "If False, only the event types mentioned in `types` are included. If True, then all users who aren't mentioned in `types` are included."
]
}
, unsigned =
{ age =
[ "The time in milliseconds that has elapsed since the event was sent. This field is generated by the local homeserver, and may be incorrect if the local time on at least one of the two servers is out of sync, which can cause the age to either be negative or greater than it actually is."
@ -347,6 +486,35 @@ leakingValueFound leaking_value =
"Found leaking value : " ++ leaking_value
{-| These logs might appear during a process where something unexpected has
happened. Most of these unexpected results, are taken account of by the Elm SDK,
but logged so that the programmer can do something about it.
-}
logs : { keyIsNotAnInt : String -> String }
logs =
{ keyIsNotAnInt =
\key ->
String.concat
[ "Encountered a key `"
, key
, "` that cannot be converted to an Int"
]
}
{-| Function descriptions
-}
mappings : { itokenPTR : TypeDocs }
mappings =
{ itokenPTR =
{ name = "ITokenPTR init"
, description =
[ "Converts an optional string to an Itoken pointer."
]
}
}
{-| The Matrix homeserver can specify how it wishes to communicate, and the Elm
SDK aims to communicate accordingly. This may fail in some scenarios, however,
in which case it will throw this error.

View File

@ -4,7 +4,7 @@ module Internal.Filter.Timeline exposing
, match, run
, and
, subsetOf
, encode, decoder
, coder, encode, decoder
)
{-|
@ -43,10 +43,12 @@ for interacting with the Matrix API.
## JSON coders
@docs encode, decoder
@docs coder, encode, decoder
-}
import Internal.Config.Text as Text
import Internal.Tools.Json as Json
import Json.Decode as D
import Json.Encode as E
import Set exposing (Set)
@ -160,43 +162,68 @@ and (Filter f1) (Filter f2) =
stdAnd
{-| Define how to encode and decode a Timeline Filter to and from a JSON value.
-}
coder : Json.Coder Filter
coder =
Json.object4
{ name = Text.docs.timelineFilter.name
, description = Text.docs.timelineFilter.description
, init =
\a b c d ->
Filter
{ senders = a
, sendersAllowOthers = b
, types = c
, typesAllowOthers = d
}
}
(Json.field.optional.withDefault
{ fieldName = "senders"
, toField = \(Filter f) -> f.senders
, description = Text.fields.timelineFilter.senders
, coder = Json.set Json.string
, default = ( Set.empty, [] )
, defaultToString = always "[]"
}
)
(Json.field.required
{ fieldName = "sendersAllowOthers"
, toField = \(Filter f) -> f.sendersAllowOthers
, description = Text.fields.timelineFilter.sendersAllowOthers
, coder = Json.bool
}
)
(Json.field.optional.withDefault
{ fieldName = "types"
, toField = \(Filter f) -> f.types
, description = Text.fields.timelineFilter.types
, coder = Json.set Json.string
, default = ( Set.empty, [] )
, defaultToString = always "[]"
}
)
(Json.field.required
{ fieldName = "typesAllowOthers"
, toField = \(Filter f) -> f.typesAllowOthers
, description = Text.fields.timelineFilter.typesAllowOthers
, coder = Json.bool
}
)
{-| Decode a Filter from a JSON value.
-}
decoder : D.Decoder Filter
decoder : Json.Decoder Filter
decoder =
D.map4
(\s sb t tb ->
Filter
{ senders = s
, sendersAllowOthers = sb
, types = t
, typesAllowOthers = tb
}
)
(D.string
|> D.list
|> D.map Set.fromList
|> D.field "senders"
)
(D.field "sendersAllowOthers" D.bool)
(D.string
|> D.list
|> D.map Set.fromList
|> D.field "types"
)
(D.field "typesAllowOthers" D.bool)
Json.decode coder
{-| Encode a Filter into a JSON value.
-}
encode : Filter -> E.Value
encode (Filter f) =
E.object
[ ( "senders", E.set E.string f.senders )
, ( "sendersAllowOthers", E.bool f.sendersAllowOthers )
, ( "types", E.set E.string f.types )
, ( "typesAllowOthers", E.bool f.typesAllowOthers )
]
encode : Json.Encoder Filter
encode =
Json.encode coder
{-| Allow no events. This filter is likely quite useless in practice, but it is

View File

@ -3,7 +3,7 @@ module Internal.Tools.Iddict exposing
, empty, singleton, insert, map, remove
, isEmpty, member, get, size
, keys, values
, encode, decoder
, coder, encode, decoder
)
{-| The id-dict is a data type that lets us store values in a dictionary using
@ -36,13 +36,13 @@ do not need to generate identifiers yourself.
## JSON coders
@docs encode, decoder
@docs coder, encode, decoder
-}
import FastDict as Dict exposing (Dict)
import Json.Decode as D
import Json.Encode as E
import Internal.Config.Text as Text
import Internal.Tools.Json as Json
{-| The Iddict data type.
@ -54,41 +54,49 @@ type Iddict a
}
{-| Define how an Iddict can be encoded and decoded to and from a JSON value.
-}
coder : Json.Coder a -> Json.Coder (Iddict a)
coder x =
Json.object2
{ name = Text.docs.iddict.name
, description = Text.docs.iddict.description
, init =
\c d ->
Iddict
{ cursor =
Dict.keys d
|> List.maximum
|> Maybe.map ((+) 1)
|> Maybe.withDefault 0
|> max (Dict.size d)
|> max c
, dict = d
}
}
(Json.field.optional.withDefault
{ fieldName = "cursor"
, toField = \(Iddict i) -> i.cursor
, description = Text.fields.iddict.cursor
, coder = Json.int
, default = ( 0, [] )
, defaultToString = String.fromInt
}
)
(Json.field.required
{ fieldName = "dict"
, toField = \(Iddict i) -> i.dict
, description = Text.fields.iddict.dict
, coder = Json.fastIntDict x
}
)
{-| Decode an id-dict from a JSON value.
-}
decoder : D.Decoder a -> D.Decoder (Iddict a)
decoder xDecoder =
D.map2
(\c pairs ->
let
dict : Dict Int a
dict =
pairs
|> List.filterMap
(\( k, v ) ->
k
|> String.toInt
|> Maybe.map (\n -> ( n, v ))
)
|> Dict.fromList
in
Iddict
{ cursor =
Dict.keys dict
-- Larger than all values in the list
|> List.map ((+) 1)
|> List.maximum
|> Maybe.withDefault 0
|> max (Dict.size dict)
-- At least the dict size
|> max c
-- At least the given value
, dict = dict
}
)
(D.field "cursor" D.int)
(D.field "dict" <| D.keyValuePairs xDecoder)
decoder : Json.Coder a -> Json.Decoder (Iddict a)
decoder x =
Json.decode (coder x)
{-| Create an empty id-dict.
@ -103,16 +111,9 @@ empty =
{-| Encode an id-dict to a JSON value.
-}
encode : (a -> E.Value) -> Iddict a -> E.Value
encode encodeX (Iddict d) =
E.object
[ ( "cursor", E.int d.cursor )
, ( "dict"
, d.dict
|> Dict.toCoreDict
|> E.dict String.fromInt encodeX
)
]
encode : Json.Coder a -> Json.Encoder (Iddict a)
encode x =
Json.encode (coder x)
{-| Get a value from the id-dict using its key.

View File

@ -3,7 +3,7 @@ module Internal.Tools.Json exposing
, Encoder, encode, Decoder, decode, Value
, succeed, fail, andThen, lazy, map
, Docs(..), RequiredField(..), toDocs
, list, slowDict, fastDict, maybe
, list, listWithOne, slowDict, fastDict, fastIntDict, set, maybe
, Field, field
, object2, object3, object4, object5, object6, object7, object8, object9, object10, object11
)
@ -49,7 +49,7 @@ module to build its encoders and decoders.
## Data types
@docs list, slowDict, fastDict, maybe
@docs list, listWithOne, slowDict, fastDict, fastIntDict, set, maybe
## Objects
@ -68,11 +68,13 @@ Once all fields are constructed, the user can create JSON objects.
import Dict as SlowDict
import FastDict
import Internal.Config.Log exposing (Log)
import Internal.Config.Log as Log exposing (Log)
import Internal.Config.Text as Text
import Internal.Tools.DecodeExtra as D
import Internal.Tools.EncodeExtra as E
import Json.Decode as D
import Json.Encode as E
import Set exposing (Set)
{-| A field of type `a` as a subtype of an object `object`.
@ -139,8 +141,10 @@ type Docs
| DocsDict Docs
| DocsFloat
| DocsInt
| DocsIntDict Docs
| DocsLazy (() -> Docs)
| DocsList Docs
| DocsListWithOne Docs
| DocsMap (Descriptive { content : Docs })
| DocsObject
(Descriptive
@ -155,6 +159,7 @@ type Docs
)
| DocsOptional Docs
| DocsRiskyMap (Descriptive { content : Docs, failure : List String })
| DocsSet Docs
| DocsString
| DocsValue
@ -290,6 +295,48 @@ fastDict (Coder old) =
}
{-| Define a fast dict where the keys are integers, not strings.
-}
fastIntDict : Coder value -> Coder (FastDict.Dict Int value)
fastIntDict (Coder old) =
Coder
{ encoder = FastDict.toCoreDict >> E.dict String.fromInt old.encoder
, decoder =
old.decoder
|> D.keyValuePairs
|> D.map
(\items ->
( items
|> List.map (Tuple.mapSecond Tuple.first)
|> List.filterMap
(\( k, v ) ->
Maybe.map (\a -> ( a, v )) (String.toInt k)
)
|> FastDict.fromList
, List.concat
[ items
|> List.map Tuple.first
|> List.filter
(\k ->
case String.toInt k of
Just _ ->
True
Nothing ->
False
)
|> List.map Text.logs.keyIsNotAnInt
|> List.map Log.log.warn
, items
|> List.map Tuple.second
|> List.concatMap Tuple.second
]
)
)
, docs = DocsIntDict old.docs
}
{-| Create a new field using any of the three provided options.
For example, suppose we are creating a `Field String User` to represent the
@ -465,6 +512,32 @@ list (Coder old) =
}
{-| Define a list that has at least one value
-}
listWithOne : Coder a -> Coder ( a, List a )
listWithOne (Coder old) =
Coder
{ encoder = \( h, t ) -> E.list old.encoder (h :: t)
, decoder =
old.decoder
|> D.list
|> D.andThen
(\items ->
case items of
[] ->
D.fail Text.failures.listWithOne
( h, l1 ) :: t ->
D.succeed
( ( h, List.map Tuple.first items )
, List.concatMap Tuple.second t
|> List.append l1
)
)
, docs = DocsListWithOne old.docs
}
{-| Map a value.
Given that the value needs to be both encoded and decoded, the map function
@ -1079,6 +1152,28 @@ object11 { name, description, init } fa fb fc fd fe ff fg fh fi fj fk =
}
{-| Define a set.
-}
set : Coder comparable -> Coder (Set comparable)
set (Coder data) =
Coder
{ encoder = E.set data.encoder
, decoder =
data.decoder
|> D.list
|> D.map
(\items ->
( items
|> List.map Tuple.first
|> Set.fromList
, items
|> List.concatMap Tuple.second
)
)
, docs = DocsSet data.docs
}
{-| Define a slow dict from the `elm/core` library.
-}
slowDict : Coder value -> Coder (SlowDict.Dict String value)

View File

@ -1,138 +0,0 @@
module Internal.Tools.RationalOrder exposing (..)
{-| # Rational order
The rational order module simulates the properties of the order of rational
numbers: all values have a clear direct ordering, but one can always gain a
new number in-between two existing numbers.
While this property is similarly achievable with floats, the Float type has a
precision limit and it is therefor more desirable to achieve the same property
using an Elm type that uses Int types for comparison.
Given the design of the order, the best case comparison design is O(1), and the
worst case comparison is O(log(n)). The worst case relies on recursively
creating two values a and b, create two new numbers in-between, and repeat.
-}
import Recursion exposing (base, recurse, recurseThen)
{-| The RationalOrder consists of two items: a number for ordering and a
tie-breaking next RationalOrder type for when two RationalOrders have the same
number.
When the next RationalOrder is Nothing, it should be considered -infinite.
-}
type RationalOrder
= With Int (Maybe RationalOrder)
{-| Find a new value that comes after a given value. For optimization reasons,
this will find the nearest number at the highest level.
-}
after : RationalOrder -> RationalOrder
after (With i _) =
With (i + 1) Nothing
{-| Find a new value that comes before a given value. For optimization reasons,
this will find the nearest number at the highest level.
-}
before : RationalOrder -> RationalOrder
before (With i _) =
With (i - 1) Nothing
{-| Find a new value in-between two existing values. The inputs don't need to be
ordered.
-}
between : RationalOrder -> RationalOrder -> RationalOrder
between x y =
Recursion.runRecursion
(\orders ->
case orders of
( Nothing, Nothing ) ->
base (With 0 Nothing)
( Just o1, Nothing ) ->
base (before o1)
( Nothing, Just o2 ) ->
base (before o2)
( Just ((With i1 n1) as o1), Just ((With i2 n2) as o2) ) ->
case Basics.compare i1 i2 of
EQ ->
recurseThen ( n1, n2 )
( base << With i1 << Maybe.Just )
LT ->
case compare (after o1) o2 of
LT ->
base (after o1)
_ ->
Maybe.map after n1
|> Maybe.withDefault (With 0 Nothing)
|> Maybe.Just
|> With i1
|> base
GT ->
case compare (after o2) o1 of
LT ->
base (after o2)
_ ->
Maybe.map after n2
|> Maybe.withDefault (With 0 Nothing)
|> Maybe.Just
|> With i2
|> base
)
( Just x, Just y )
compare : RationalOrder -> RationalOrder -> Basics.Order
compare x y =
Recursion.runRecursion
(\( With i1 n1, With i2 n2 ) ->
case (Basics.compare i1 i2, n1, n2 ) of
( EQ, Just o1, Just o2 ) ->
recurse ( o1, o2 )
( EQ, Just _, Nothing ) ->
base GT
( EQ, Nothing, Just _ ) ->
base LT
( EQ, Nothing, Nothing ) ->
base EQ
( LT, _, _ ) ->
base LT
( GT, _, _ ) ->
base GT
)
( x, y )
fromList : List Int -> Maybe RationalOrder
fromList =
Recursion.runRecursion
(\items ->
case items of
[] ->
base Nothing
head :: tail ->
recurseThen tail (With head >> Maybe.Just >> base)
)
toList : RationalOrder -> List Int
toList =
Recursion.runRecursion
(\(With i next) ->
case next of
Nothing ->
base [ i ]
Just n ->
recurseThen n ((::) i >> base)
)

View File

@ -1,9 +1,9 @@
module Internal.Values.Timeline exposing
( Batch, Timeline
, empty, singleton
, mostRecentEvents
, insert
, encode, decoder
, mostRecentEvents, mostRecentEventsFrom
, addSync, insert
, coder, encode, decoder
)
{-|
@ -52,7 +52,7 @@ events!
## Query
@docs mostRecentEvents
@docs mostRecentEvents, mostRecentEventsFrom
## Manipulate
@ -62,17 +62,16 @@ events!
## JSON coder
@docs encode, decoder
@docs coder, encode, decoder
-}
import FastDict as Dict exposing (Dict)
import Internal.Config.Text as Text
import Internal.Filter.Timeline as Filter exposing (Filter)
import Internal.Tools.Hashdict as Hashdict exposing (Hashdict)
import Internal.Tools.Iddict as Iddict exposing (Iddict)
import Internal.Tools.Json as Json
import Json.Decode as D
import Json.Encode as E
import Recursion
import Recursion.Traverse
import Set exposing (Set)
@ -80,6 +79,10 @@ import Set exposing (Set)
{-| A batch is a batch of events that is placed onto the Timeline. Functions
that require an insertion, generally require this data type.
If the `start` value is `Nothing`, it is either the start of the timeline or the
start of the timeline part that the user is allowed to view.
-}
type alias Batch =
{ events : List String
@ -166,6 +169,236 @@ type alias TokenValue =
String
{-| Add a new batch as a sync
-}
addSync : Batch -> Timeline -> Timeline
addSync batch timeline =
case insertBatch batch timeline of
( Timeline t, { start, end } ) ->
let
old : ITokenPTR
old =
t.mostRecentBatch
in
case Timeline { t | mostRecentBatch = end } of
tl ->
if old == start then
tl
else
connectITokenToIToken old start tl
{-| Define how a Timeline can be encoded and decoded to and from a JSON value.
-}
coder : Json.Coder Timeline
coder =
Json.object5
{ name = Text.docs.timeline.name
, description = Text.docs.timeline.description
, init =
\a b c d e ->
Timeline
{ batches = a
, events = b
, filledBatches = c
, mostRecentBatch = d
, tokens = e
}
}
(Json.field.required
{ fieldName = "batches"
, toField = \(Timeline t) -> t.batches
, description = Text.fields.timeline.batches
, coder = Iddict.coder coderIBatch
}
)
(Json.field.required
{ fieldName = "events"
, toField = \(Timeline t) -> t.events
, description = Text.fields.timeline.events
, coder = Json.fastDict (Json.listWithOne coderIBatchPTR)
}
)
(Json.field.optional.withDefault
{ fieldName = "filledBatches"
, toField = \(Timeline t) -> t.filledBatches
, description = Text.fields.timeline.filledBatches
, coder = Json.int
, default = ( 0, [] )
, defaultToString = String.fromInt
}
)
(Json.field.required
{ fieldName = "mostRecentBatch"
, toField = \(Timeline t) -> t.mostRecentBatch
, description = Text.fields.timeline.mostRecentBatch
, coder = coderITokenPTR
}
)
(Json.field.required
{ fieldName = "tokens"
, toField = \(Timeline t) -> t.tokens
, description = Text.fields.timeline.tokens
, coder = Hashdict.coder .name coderIToken
}
)
{-| Define how to encode and decode a IBatch to and from a JSON value.
-}
coderIBatch : Json.Coder IBatch
coderIBatch =
Json.object4
{ name = Text.docs.ibatch.name
, description = Text.docs.ibatch.description
, init = IBatch
}
(Json.field.required
{ fieldName = "events"
, toField = .events
, description = Text.fields.ibatch.events
, coder = Json.list Json.string
}
)
(Json.field.required
{ fieldName = "filter"
, toField = .filter
, description = Text.fields.ibatch.filter
, coder = Filter.coder
}
)
(Json.field.required
{ fieldName = "start"
, toField = .start
, description = Text.fields.ibatch.start
, coder = coderITokenPTR
}
)
(Json.field.required
{ fieldName = "end"
, toField = .end
, description = Text.fields.ibatch.end
, coder = coderITokenPTR
}
)
{-| Define how to encode and decode a IBatchPTR to and from a JSON value.
-}
coderIBatchPTR : Json.Coder IBatchPTR
coderIBatchPTR =
Json.map
{ name = Text.docs.itoken.name
, description = Text.docs.itoken.description
, back = \(IBatchPTR value) -> value
, forth = IBatchPTR
}
coderIBatchPTRValue
{-| Define how to encode and decode a IBatchPTRValue to and from a JSON value.
-}
coderIBatchPTRValue : Json.Coder IBatchPTRValue
coderIBatchPTRValue =
Json.int
{-| Define how to encode and decode a IToken to and from a JSON value.
-}
coderIToken : Json.Coder IToken
coderIToken =
Json.object5
{ name = Text.docs.itoken.name
, description = Text.docs.itoken.description
, init = IToken
}
(Json.field.required
{ fieldName = "name"
, toField = .name
, description = Text.fields.itoken.name
, coder = coderTokenValue
}
)
(Json.field.optional.withDefault
{ fieldName = "starts"
, toField = .starts
, description = Text.fields.itoken.starts
, coder = Json.set coderIBatchPTRValue
, default = ( Set.empty, [] )
, defaultToString = always "[]"
}
)
(Json.field.optional.withDefault
{ fieldName = "ends"
, toField = .ends
, description = Text.fields.itoken.ends
, coder = Json.set coderIBatchPTRValue
, default = ( Set.empty, [] )
, defaultToString = always "[]"
}
)
(Json.field.optional.withDefault
{ fieldName = "inFrontOf"
, toField = .inFrontOf
, description = Text.fields.itoken.inFrontOf
, coder = Json.set coderITokenPTRValue
, default = ( Set.empty, [] )
, defaultToString = always "[]"
}
)
(Json.field.optional.withDefault
{ fieldName = "behind"
, toField = .behind
, description = Text.fields.itoken.behind
, coder = Json.set coderITokenPTRValue
, default = ( Set.empty, [] )
, defaultToString = always "[]"
}
)
{-| Define how to encode and decode a ITokenPTR to and from a JSON value.
-}
coderITokenPTR : Json.Coder ITokenPTR
coderITokenPTR =
Json.maybe coderITokenPTRValue
|> Json.map
{ name = Text.mappings.itokenPTR.name
, description = Text.mappings.itokenPTR.description
, back =
\itokenptr ->
case itokenptr of
ITokenPTR name ->
Just name
StartOfTimeline ->
Nothing
, forth =
\value ->
case value of
Just name ->
ITokenPTR name
Nothing ->
StartOfTimeline
}
{-| Define how to encode and decode a ITokenPTRValue to and from a JSON value.
-}
coderITokenPTRValue : Json.Coder ITokenPTRValue
coderITokenPTRValue =
Json.string
{-| Define how to encode and decode a TokenValue to and from a JSON value.
-}
coderTokenValue : Json.Coder TokenValue
coderTokenValue =
Json.string
{-| Append a token at the end of a batch.
-}
connectIBatchToIToken : IBatchPTR -> ITokenPTR -> Timeline -> Timeline
@ -238,6 +471,13 @@ connectITokenToIToken pointer1 pointer2 (Timeline tl) =
Timeline tl
{-| Timeline JSON decoder that helps decode a Timeline from JSON.
-}
decoder : Json.Decoder Timeline
decoder =
Json.decode coder
{-| Create a new empty timeline.
-}
empty : Timeline
@ -251,6 +491,13 @@ empty =
}
{-| Directly encode a Timeline into a JSON value.
-}
encode : Json.Encoder Timeline
encode =
Json.encode coder
{-| Get an IBatch from the Timeline.
-}
getIBatch : IBatchPTR -> Timeline -> Maybe IBatch
@ -381,14 +628,22 @@ invokeIToken value (Timeline tl) =
-}
mostRecentEvents : Filter -> Timeline -> List (List String)
mostRecentEvents filter (Timeline timeline) =
mostRecentEventsFrom filter (Timeline timeline) timeline.mostRecentBatch
mostRecentFrom filter (Timeline timeline) timeline.mostRecentBatch
{-| Instead of finding the most recent events from the latest sync, users can
also find the most recent events given a token value.
-}
mostRecentEventsFrom : Filter -> ITokenPTRValue -> Timeline -> List (List String)
mostRecentEventsFrom filter tokenName timeline =
mostRecentFrom filter timeline (ITokenPTR tokenName)
{-| Under a given filter, starting from a given ITokenPTR, find the most recent
events.
-}
mostRecentEventsFrom : Filter -> Timeline -> ITokenPTR -> List (List String)
mostRecentEventsFrom filter timeline ptr =
mostRecentFrom : Filter -> Timeline -> ITokenPTR -> List (List String)
mostRecentFrom filter timeline ptr =
Recursion.runRecursion
(\p ->
case getITokenFromPTR p.ptr timeline of
@ -409,12 +664,18 @@ mostRecentEventsFrom filter timeline ptr =
Recursion.recurseThen
{ ptr = ibatch.start, visited = Set.insert token.name p.visited }
(\optionalTimelines ->
optionalTimelines
|> List.map
(\outTimeline ->
List.append outTimeline ibatch.events
)
|> Recursion.base
case optionalTimelines of
[] ->
List.singleton ibatch.events
|> Recursion.base
_ :: _ ->
optionalTimelines
|> List.map
(\outTimeline ->
List.append outTimeline ibatch.events
)
|> Recursion.base
)
)
|> Recursion.map List.concat
@ -443,4 +704,4 @@ most recent batch, as if created by a sync.
-}
singleton : Batch -> Timeline
singleton b =
addSync b empty
insert b empty

View File

@ -428,7 +428,7 @@ suite =
|> Filter.encode
|> E.encode 0
|> D.decodeString Filter.decoder
|> Expect.equal (Ok filter)
|> Expect.equal (Ok ( filter, [] ))
)
]
]

View File

@ -3,6 +3,7 @@ module Test.Tools.Iddict exposing (..)
import Expect
import Fuzz exposing (Fuzzer)
import Internal.Tools.Iddict as Iddict exposing (Iddict)
import Internal.Tools.Json as Json
import Json.Decode as D
import Json.Encode as E
import Test exposing (..)
@ -73,21 +74,23 @@ empty =
)
, test "JSON encode -> decode -> empty"
(Iddict.empty
|> Iddict.encode identity
|> D.decodeValue (Iddict.decoder D.value)
|> Iddict.encode Json.value
|> D.decodeValue (Iddict.decoder Json.value)
|> Result.map Tuple.first
|> Expect.equal (Ok Iddict.empty)
|> always
)
, test "JSON encode"
(Iddict.empty
|> Iddict.encode identity
|> Iddict.encode Json.value
|> E.encode 0
|> Expect.equal "{\"cursor\":0,\"dict\":{}}"
|> Expect.equal "{\"dict\":{}}"
|> always
)
, test "JSON decode"
("{\"cursor\":0,\"dict\":{}}"
|> D.decodeString (Iddict.decoder D.value)
("{\"dict\":{}}"
|> D.decodeString (Iddict.decoder Json.value)
|> Result.map Tuple.first
|> Expect.equal (Ok Iddict.empty)
|> always
)
@ -170,8 +173,9 @@ singleton =
"JSON encode -> decode -> singleton"
(\single ->
single
|> Iddict.encode E.int
|> D.decodeValue (Iddict.decoder D.int)
|> Iddict.encode Json.int
|> D.decodeValue (Iddict.decoder Json.int)
|> Result.map Tuple.first
|> Expect.equal (Ok single)
)
, fuzz Fuzz.int
@ -179,7 +183,7 @@ singleton =
(\i ->
Iddict.singleton i
|> Tuple.second
|> Iddict.encode E.int
|> Iddict.encode Json.int
|> E.encode 0
|> Expect.equal ("{\"cursor\":1,\"dict\":{\"0\":" ++ String.fromInt i ++ "}}")
)
@ -187,7 +191,8 @@ singleton =
"JSON decode"
(\i ->
("{\"cursor\":1,\"dict\":{\"0\":" ++ String.fromInt i ++ "}}")
|> D.decodeString (Iddict.decoder D.int)
|> D.decodeString (Iddict.decoder Json.int)
|> Result.map Tuple.first
|> Tuple.pair 0
|> Expect.equal (Iddict.singleton i |> Tuple.mapSecond Ok)
)

View File

@ -1,223 +0,0 @@
module Test.Tools.RationalOrder exposing (..)
import Test exposing (..)
import Fuzz exposing (Fuzzer)
import Expect
import Internal.Tools.RationalOrder as RO exposing (RationalOrder(..))
fuzzer : Fuzzer RationalOrder
fuzzer =
Fuzz.map2 With Fuzz.int (Fuzz.lazy (\_ -> Fuzz.maybe fuzzer))
twoUnequal : Fuzzer (RationalOrder, RationalOrder)
twoUnequal =
fuzzer
|> Fuzz.andThen
(\o ->
Fuzz.map2
(\o1 o2 ->
if RO.compare o1 o2 == LT then
( o1, o2 )
else
( o2, o1 )
)
(Fuzz.constant o)
(Fuzz.filter ((/=) o) fuzzer)
)
suite : Test
suite =
describe "RationalOrder"
[ describe "Semantic truths"
[ describe "After is always greater"
[ fuzz fuzzer "Forwards"
(\o ->
Expect.equal LT (RO.compare o (RO.after o))
)
, fuzz fuzzer "Backwards"
(\o ->
Expect.equal GT (RO.compare (RO.after o) o)
)
]
, describe "Before is always lesser"
[ fuzz fuzzer "Forwards"
(\o ->
Expect.equal GT (RO.compare o (RO.before o))
)
, fuzz fuzzer "Backwards"
(\o ->
Expect.equal LT (RO.compare (RO.before o) o)
)
]
, describe "Two unequal == two unequal"
[ fuzz twoUnequal "Forwards"
(\(small, big) ->
Expect.equal LT (RO.compare small big)
)
, fuzz twoUnequal "Backwards"
(\(small, big) ->
Expect.equal GT (RO.compare big small)
)
]
, describe "compare"
[ fuzz2 fuzzer fuzzer "EQ iff same value"
(\o1 o2 ->
Expect.equal
(o1 == o2)
(RO.compare o1 o2 == EQ)
)
, fuzz2 fuzzer fuzzer "LT iff opposite GT"
(\o1 o2 ->
Expect.equal
(RO.compare o1 o2 == LT)
(RO.compare o2 o1 == GT)
)
]
, describe "Between is always between"
[ fuzz twoUnequal "Less than first - forwards"
(\(small, big) ->
(RO.between small big)
|> RO.compare small
|> Expect.equal LT
)
, fuzz twoUnequal "Less than first - backwards"
(\(small, big) ->
small
|> RO.compare (RO.between small big)
|> Expect.equal GT
)
, fuzz twoUnequal "Less than second - forwards"
(\(small, big) ->
RO.between small big
|> RO.compare big
|> Expect.equal GT
)
, fuzz twoUnequal "Less than second - backwards"
(\(small, big) ->
big
|> RO.compare (RO.between small big)
|> Expect.equal LT
)
]
]
, describe "Between creates between"
[ test "With 0 Nothing <--> With 1 Nothing"
(\() ->
RO.between (With 0 Nothing) (With 1 Nothing)
|> Expect.equal (With 0 (Just (With 0 Nothing)))
)
, test "With 1 Nothing <--> With 0 Nothing"
(\() ->
RO.between (With 1 Nothing) (With 0 Nothing)
|> Expect.equal (With 0 (Just (With 0 Nothing)))
)
, test "With 0 is filled between With 1 Nothing"
(\() ->
With 0 Nothing
|> RO.between (With 1 Nothing)
|> RO.between (With 1 Nothing)
|> RO.between (With 1 Nothing)
|> RO.between (With 1 Nothing)
|> RO.between (With 1 Nothing)
|> RO.between (With 1 Nothing)
|> Expect.equal (With 0 (Just (With 5 Nothing)))
)
, test "Will start counting high level as soon as possible"
(\() ->
With 0 Nothing
|> RO.between (With 1 Nothing)
|> RO.between (With 1 Nothing)
|> RO.between (With 1 Nothing)
|> RO.between (With 1 Nothing)
|> RO.between (With 1 Nothing)
|> RO.between (With 1 Nothing)
|> RO.between (With 5 Nothing)
|> RO.between (With 5 Nothing)
|> Expect.equal (With 2 Nothing)
)
, test "Will start counting high level, then return lower level"
(\() ->
With 0 Nothing
|> RO.between (With 1 Nothing)
|> RO.between (With 1 Nothing)
|> RO.between (With 1 Nothing)
|> RO.between (With 1 Nothing)
|> RO.between (With 1 Nothing)
|> RO.between (With 1 Nothing)
|> RO.between (With 5 Nothing)
|> RO.between (With 5 Nothing)
|> RO.between (With 5 Nothing)
|> RO.between (With 5 Nothing)
|> RO.between (With 5 Nothing)
|> RO.between (With 5 Nothing)
|> RO.between (With 5 Nothing)
|> RO.between (With 5 Nothing)
|> RO.between (With 5 Nothing)
|> RO.between (With 5 Nothing)
|> RO.between (With 5 Nothing)
|> Expect.equal (With 4 (Just (With 6 Nothing)))
)
, fuzz2 fuzzer fuzzer "Between is commutative"
(\o1 o2 ->
Expect.equal (RO.between o1 o2) (RO.between o2 o1)
)
]
, describe "After"
[ fuzz Fuzz.int "One more - level 1"
(\a ->
Expect.equal
( RO.after <| With a Nothing )
( With (a + 1) Nothing)
)
, fuzz2 Fuzz.int Fuzz.int "One more - level 2"
(\a b ->
Expect.equal
( RO.after <| With a <| Just <| With b Nothing )
( With (a + 1) Nothing)
)
, fuzz3 Fuzz.int Fuzz.int Fuzz.int "One more - level 3"
(\a b c ->
Expect.equal
( RO.after <| With a <| Just <| With b <| Just <| With c Nothing )
( With (a + 1) Nothing)
)
]
, describe "Before"
[ fuzz Fuzz.int "One less - level 1"
(\a ->
Expect.equal
( RO.before <| With a Nothing )
( With (a - 1) Nothing)
)
, fuzz2 Fuzz.int Fuzz.int "One less - level 2"
(\a b ->
Expect.equal
( RO.before <| With a <| Just <| With b Nothing )
( With (a - 1) Nothing)
)
, fuzz3 Fuzz.int Fuzz.int Fuzz.int "One less - level 3"
(\a b c ->
Expect.equal
( RO.before <| With a <| Just <| With b <| Just <| With c Nothing )
( With (a - 1) Nothing)
)
]
, describe "Compare vs. list compare"
[ fuzz2
(Fuzz.listOfLengthBetween 1 32 Fuzz.int)
(Fuzz.listOfLengthBetween 1 32 Fuzz.int)
"Compares the same between normal lists and orders"
(\l1 l2 ->
Expect.equal
( Just <| Basics.compare l1 l2 )
( Maybe.map2 RO.compare (RO.fromList l1) (RO.fromList l2))
)
, fuzz2 fuzzer fuzzer "Compares the same when converted to list"
(\o1 o2 ->
Expect.equal
( RO.compare o1 o2 )
( Basics.compare (RO.toList o1) (RO.toList o2) )
)
]
]

View File

@ -3,9 +3,9 @@ module Test.Values.Timeline exposing (..)
import Expect
import Fuzz exposing (Fuzzer)
import Internal.Filter.Timeline as Filter exposing (Filter)
import Internal.Tools.Json as Json
import Internal.Values.Timeline as Timeline exposing (Batch, Timeline)
import Json.Decode as D
import Json.Encode as E
import Test exposing (..)
import Test.Filter.Timeline as TestFilter
@ -17,6 +17,34 @@ fuzzer =
(\globalFilter ->
Fuzz.oneOf
[ Fuzz.map2
(\start batches ->
List.foldl
(\b ( s, f ) ->
( b.end
, f >> Timeline.insert { b | start = Just s, filter = globalFilter }
)
)
( start, identity )
batches
|> Tuple.second
)
Fuzz.string
(Fuzz.listOfLengthBetween 0 10 fuzzerBatch)
, Fuzz.map2
(\start batches ->
List.foldl
(\b ( s, f ) ->
( b.end
, f >> Timeline.insert { b | start = Just s, filter = Filter.and globalFilter b.filter }
)
)
( start, identity )
batches
|> Tuple.second
)
Fuzz.string
(Fuzz.listOfLengthBetween 0 4 fuzzerBatch)
, Fuzz.map2
(\start batches ->
List.foldl
(\b ( s, f ) ->
@ -35,7 +63,7 @@ fuzzer =
List.foldl
(\b ( s, f ) ->
( b.end
, f >> Timeline.insert { b | start = Just s, filter = Filter.and globalFilter b.filter }
, f >> Timeline.addSync { b | start = Just s, filter = Filter.and globalFilter b.filter }
)
)
( start, identity )
@ -59,41 +87,297 @@ fuzzerBatch =
Fuzz.string
isEqual : Timeline -> Timeline -> Expect.Expectation
isEqual t1 t2 =
Expect.equal
(E.encode 0 <| Timeline.encode t1)
(E.encode 0 <| Timeline.encode t2)
suite : Test
suite =
describe "Timeline"
[ describe "empty"
[ fuzz fuzzerBatch
"singleton = empty + sync"
(\batch ->
isEqual
(Timeline.singleton batch)
(Timeline.addSync batch Timeline.empty)
[ describe "most recent events with filters"
[ fuzz TestFilter.fuzzer
"Events are returned properly"
(\filter ->
Timeline.empty
|> Timeline.insert
{ events = [ "a", "b", "c" ]
, filter = filter
, start = Just "token_1"
, end = "token_2"
}
|> Timeline.insert
{ events = [ "d", "e", "f" ]
, filter = filter
, start = Just "token_2"
, end = "token_3"
}
|> Timeline.mostRecentEventsFrom filter "token_3"
|> Expect.equal
[ [ "a", "b", "c", "d", "e", "f" ] ]
)
, fuzz2 TestFilter.fuzzer
TestFilter.fuzzer
"Sub-events get the same results"
(\f1 f2 ->
let
subFilter =
Filter.and f1 f2
in
Timeline.empty
|> Timeline.insert
{ events = [ "a", "b", "c" ]
, filter = f1
, start = Just "token_1"
, end = "token_2"
}
|> Timeline.insert
{ events = [ "d", "e", "f" ]
, filter = f1
, start = Just "token_2"
, end = "token_3"
}
|> Timeline.mostRecentEventsFrom subFilter "token_3"
|> Expect.equal
[ [ "a", "b", "c", "d", "e", "f" ] ]
)
, fuzz2 TestFilter.fuzzer
TestFilter.fuzzer
"ONLY same result if sub-filter"
(\f1 f2 ->
Timeline.empty
|> Timeline.insert
{ events = [ "a", "b", "c" ]
, filter = f1
, start = Just "token_1"
, end = "token_2"
}
|> Timeline.insert
{ events = [ "d", "e", "f" ]
, filter = f1
, start = Just "token_2"
, end = "token_3"
}
|> Timeline.mostRecentEventsFrom f2 "token_3"
|> (\events ->
Expect.equal
(Filter.subsetOf f1 f2)
(events == [ [ "a", "b", "c", "d", "e", "f" ] ])
)
)
]
, describe "Forks in the road"
[ fuzz2 TestFilter.fuzzer
TestFilter.fuzzer
"Two options returned"
(\f1 f2 ->
let
subFilter =
Filter.and f1 f2
in
Timeline.empty
|> Timeline.insert
{ events = [ "a", "b", "c" ]
, filter = f1
, start = Just "token_1"
, end = "token_2"
}
|> Timeline.insert
{ events = [ "d", "e", "f" ]
, filter = f2
, start = Just "token_3"
, end = "token_2"
}
|> Timeline.insert
{ events = [ "g", "h", "i" ]
, filter = subFilter
, start = Just "token_2"
, end = "token_4"
}
|> Timeline.mostRecentEventsFrom subFilter "token_4"
|> Expect.equal
[ [ "a", "b", "c", "g", "h", "i" ]
, [ "d", "e", "f", "g", "h", "i" ]
]
)
]
, describe "Gaps"
[ fuzz TestFilter.fuzzer
"Gaps leave behind old events"
(\filter ->
Timeline.empty
|> Timeline.insert
{ events = [ "a", "b", "c" ]
, filter = filter
, start = Just "token_1"
, end = "token_2"
}
|> Timeline.insert
{ events = [ "d", "e", "f" ]
, filter = filter
, start = Just "token_3"
, end = "token_4"
}
|> Timeline.mostRecentEventsFrom filter "token_4"
|> Expect.equal [ [ "d", "e", "f" ] ]
)
, fuzz3 TestFilter.fuzzer
(Fuzz.list Fuzz.string)
(Fuzz.pair (Fuzz.list Fuzz.string) (Fuzz.list Fuzz.string))
"Gaps can be bridged"
(\filter l1 ( l2, l3 ) ->
Timeline.empty
|> Timeline.insert
{ events = l1
, filter = filter
, start = Just "token_1"
, end = "token_2"
}
|> Timeline.insert
{ events = l3
, filter = filter
, start = Just "token_3"
, end = "token_4"
}
|> Timeline.insert
{ events = l2
, filter = filter
, start = Just "token_2"
, end = "token_3"
}
|> Timeline.mostRecentEventsFrom filter "token_4"
|> Expect.equal [ List.concat [ l1, l2, l3 ] ]
)
]
, describe "JSON"
[ fuzz fuzzer
"encode -> decode is same"
"Encode + Decode gives same output"
(\timeline ->
timeline
|> Timeline.encode
|> E.encode 0
|> D.decodeString Timeline.decoder
|> (\t ->
case t of
Ok v ->
isEqual v timeline
Err e ->
Expect.fail (D.errorToString e)
)
|> Json.encode Timeline.coder
|> D.decodeValue (Json.decode Timeline.coder)
|> Result.map Tuple.first
|> Result.map (Timeline.mostRecentEvents Filter.pass)
|> Expect.equal (Ok <| Timeline.mostRecentEvents Filter.pass timeline)
)
]
, describe "Weird loops"
[ fuzz TestFilter.fuzzer
"Weird loops stop looping"
(\filter ->
Timeline.empty
|> Timeline.insert
{ events = [ "a", "b", "c" ]
, filter = filter
, start = Just "token_1"
, end = "token_2"
}
|> Timeline.insert
{ events = [ "d", "e", "f" ]
, filter = filter
, start = Just "token_2"
, end = "token_3"
}
|> Timeline.insert
{ events = [ "g", "h", "i" ]
, filter = filter
, start = Just "token_3"
, end = "token_2"
}
|> Timeline.mostRecentEventsFrom filter "token_2"
|> Expect.equal
[ [ "a", "b", "c" ]
, [ "d", "e", "f", "g", "h", "i" ]
]
)
]
, describe "Sync"
[ fuzz TestFilter.fuzzer
"Sync fills gaps"
(\filter ->
Timeline.empty
|> Timeline.addSync
{ events = [ "a", "b", "c" ]
, filter = filter
, start = Just "token_1"
, end = "token_2"
}
|> Timeline.addSync
{ events = [ "f", "g", "h" ]
, filter = filter
, start = Just "token_3"
, end = "token_4"
}
|> Timeline.insert
{ events = [ "d", "e" ]
, filter = filter
, start = Just "token_2"
, end = "token_3"
}
|> Timeline.mostRecentEvents filter
|> Expect.equal [ [ "a", "b", "c", "d", "e", "f", "g", "h" ] ]
)
, fuzz TestFilter.fuzzer
"Sync doesn't fill open gaps"
(\filter ->
Timeline.empty
|> Timeline.addSync
{ events = [ "a", "b", "c" ]
, filter = filter
, start = Just "token_1"
, end = "token_2"
}
|> Timeline.addSync
{ events = [ "f", "g", "h" ]
, filter = filter
, start = Just "token_3"
, end = "token_4"
}
|> Timeline.mostRecentEvents filter
|> Expect.equal [ [ "f", "g", "h" ] ]
)
, fuzz3 (Fuzz.pair Fuzz.string Fuzz.string)
fuzzer
TestFilter.fuzzer
"Getting /sync is the same as getting from the token"
(\( start, end ) timeline filter ->
let
t : Timeline
t =
Timeline.addSync
{ events = [ "a", "b", "c" ]
, filter = filter
, start = Just start
, end = end
}
timeline
in
Expect.equal
(Timeline.mostRecentEvents filter t)
(Timeline.mostRecentEventsFrom filter end t)
)
, fuzz TestFilter.fuzzer
"Weird loops stop looping"
(\filter ->
Timeline.empty
|> Timeline.insert
{ events = [ "a", "b", "c" ]
, filter = filter
, start = Just "token_1"
, end = "token_2"
}
|> Timeline.insert
{ events = [ "d", "e", "f" ]
, filter = filter
, start = Just "token_2"
, end = "token_3"
}
|> Timeline.insert
{ events = [ "g", "h", "i" ]
, filter = filter
, start = Just "token_3"
, end = "token_2"
}
|> Timeline.mostRecentEventsFrom filter "token_2"
|> Expect.equal
[ [ "a", "b", "c" ]
, [ "d", "e", "f", "g", "h", "i" ]
]
)
]
]