Add Timeline JSON coder

pull/17/head
Bram 2024-03-29 10:46:02 +01:00
parent 421e1f6ce7
commit ed78695213
8 changed files with 506 additions and 111 deletions

View File

@ -1,5 +1,5 @@
module Internal.Config.Text exposing module Internal.Config.Text exposing
( docs, failures, fields, mappings ( docs, failures, fields, mappings, logs
, accessTokenFoundLocally, accessTokenExpired, accessTokenInvalid , accessTokenFoundLocally, accessTokenExpired, accessTokenInvalid
, versionsFoundLocally, versionsReceived, versionsFailedToDecode , versionsFoundLocally, versionsReceived, versionsFailedToDecode
, unsupportedVersionForEndpoint , unsupportedVersionForEndpoint
@ -27,7 +27,7 @@ You should only do this if you know what you're doing.
## Type documentation ## Type documentation
@docs docs, failures, fields, mappings @docs docs, failures, fields, mappings, logs
## API Authentication ## API Authentication
@ -116,9 +116,14 @@ docs :
, envelope : TypeDocs , envelope : TypeDocs
, event : TypeDocs , event : TypeDocs
, hashdict : TypeDocs , hashdict : TypeDocs
, ibatch : TypeDocs
, iddict : TypeDocs
, itoken : TypeDocs
, mashdict : TypeDocs , mashdict : TypeDocs
, settings : TypeDocs , settings : TypeDocs
, stateManager : TypeDocs , stateManager : TypeDocs
, timeline : TypeDocs
, timelineFilter : TypeDocs
, unsigned : TypeDocs , unsigned : TypeDocs
} }
docs = docs =
@ -148,6 +153,24 @@ docs =
, "For example, the hashdict can store events and use their event id as their key." , "For example, the hashdict can store events and use their event id as their key."
] ]
} }
, ibatch =
{ name = "IBatch"
, description =
[ "The internal batch tracks a patch of events on the Matrix timeline."
]
}
, iddict =
{ name = "Iddict"
, description =
[ "An iddict automatically handles creating appropriate keys by incrementally assiging a new key to new values."
]
}
, itoken =
{ name = "IToken"
, description =
[ "The IToken connects batches in the timeline and maintains relative order."
]
}
, mashdict = , mashdict =
{ name = "Mashdict" { name = "Mashdict"
, description = , description =
@ -167,6 +190,18 @@ docs =
, "Instead of making the user loop through the room's timeline of events, the StateManager offers the user a dictionary-like experience to navigate through the Matrix room state." , "Instead of making the user loop through the room's timeline of events, the StateManager offers the user a dictionary-like experience to navigate through the Matrix room state."
] ]
} }
, timeline =
{ name = "Timeline"
, description =
[ "The Timeline tracks events and orders them in a simple way for the user to view them."
]
}
, timelineFilter =
{ name = "Timeline Filter"
, description =
[ "The Timeline Filter allows the user to be very specific about which events they're interested in."
]
}
, unsigned = , unsigned =
{ name = "Unsigned Data" { name = "Unsigned Data"
, description = , description =
@ -218,11 +253,41 @@ fields :
, eventType : Desc , eventType : Desc
, unsigned : Desc , unsigned : Desc
} }
, ibatch :
{ end : Desc
, events : Desc
, filter : Desc
, start : Desc
}
, iddict :
{ cursor : Desc
, dict : Desc
}
, itoken :
{ behind : Desc
, ends : Desc
, inFrontOf : Desc
, name : Desc
, starts : Desc
}
, settings : , settings :
{ currentVersion : Desc { currentVersion : Desc
, deviceName : Desc , deviceName : Desc
, syncTime : Desc , syncTime : Desc
} }
, timeline :
{ batches : Desc
, events : Desc
, filledBatches : Desc
, mostRecentBatch : Desc
, tokens : Desc
}
, timelineFilter :
{ senders : Desc
, sendersAllowOthers : Desc
, types : Desc
, typesAllowOthers : Desc
}
, unsigned : , unsigned :
{ age : Desc { age : Desc
, prevContent : Desc , prevContent : Desc
@ -293,6 +358,45 @@ fields =
[ "Contains optional extra information about the event." [ "Contains optional extra information about the event."
] ]
} }
, ibatch =
{ end =
[ "Pointer to the token that ends the internal batch."
]
, events =
[ "List of event IDs contained within the internal batch."
]
, filter =
[ "Filter that indicates how strictly the homeserver has selected when resulting into the given list of events."
]
, start =
[ "Pointer to the token that starts the internal batch."
]
}
, iddict =
{ cursor =
[ "To ensure uniqueness of all keys and to prevent the usage of keys that were previously assigned to older values, the iddict tracks which is the smallest non-negative integer that hasn't been used yet."
]
, dict =
[ "Dictionary that contains all values stored in the iddict."
]
}
, itoken =
{ behind =
[ "This token is behind all tokens in this field."
]
, ends =
[ "This token is in front of the batches in this field."
]
, inFrontOf =
[ "This token is ahead of all tokens in this field."
]
, name =
[ "Opaque value provided by the homeserver."
]
, starts =
[ "This token is at the start of the batches in this field."
]
}
, settings = , settings =
{ currentVersion = { currentVersion =
[ "Indicates the current version of the Elm SDK." [ "Indicates the current version of the Elm SDK."
@ -304,6 +408,40 @@ fields =
[ "Indicates the frequency in miliseconds with which the Elm SDK should long-poll the /sync endpoint." [ "Indicates the frequency in miliseconds with which the Elm SDK should long-poll the /sync endpoint."
] ]
} }
, timeline =
{ batches =
[ "Dictionary storing all event batches in the timeline."
]
, events =
[ "Mapping that allows us to quickly zoom in on an event."
]
, filledBatches =
[ "Counter that tracks how many batches are kept by the timeline."
, "Batches are only counted if they are filled by at least one event."
]
, mostRecentBatch =
[ "Tracks the most recent batch that was sent by the homeserver - usually through `/sync`"
]
, tokens =
[ "Index of all the tokens used to connect event batches on the timeline."
]
}
, timelineFilter =
{ senders =
[ "A list of senders that is considered an exception to the infinite pool of \"other\" users"
]
, sendersAllowOthers =
[ "Value that determines whether the infinite pool of others is included."
, "If False, only the users mentioned in `senders` are included. If True, then all users who aren't mentioned in `senders` are included."
]
, types =
[ "A list of event types that is considered an exception to the infinite pool of \"other\" event types."
]
, typesAllowOthers =
[ "Value that determines whether the infinite pool of others is included."
, "If False, only the event types mentioned in `types` are included. If True, then all users who aren't mentioned in `types` are included."
]
}
, unsigned = , unsigned =
{ age = { age =
[ "The time in milliseconds that has elapsed since the event was sent. This field is generated by the local homeserver, and may be incorrect if the local time on at least one of the two servers is out of sync, which can cause the age to either be negative or greater than it actually is." [ "The time in milliseconds that has elapsed since the event was sent. This field is generated by the local homeserver, and may be incorrect if the local time on at least one of the two servers is out of sync, which can cause the age to either be negative or greater than it actually is."
@ -347,6 +485,21 @@ leakingValueFound leaking_value =
"Found leaking value : " ++ leaking_value "Found leaking value : " ++ leaking_value
{-|
-}
logs : { keyIsNotAnInt : String -> String }
logs =
{ keyIsNotAnInt =
(\key ->
String.concat
[ "Encountered a key `"
, key
, "` that cannot be converted to an Int"
]
)
}
{-| Function descriptions {-| Function descriptions
-} -}
mappings : { itokenPTR : TypeDocs } mappings : { itokenPTR : TypeDocs }

View File

@ -4,7 +4,7 @@ module Internal.Filter.Timeline exposing
, match, run , match, run
, and , and
, subsetOf , subsetOf
, encode, decoder , coder, encode, decoder
) )
{-| {-|
@ -43,10 +43,12 @@ for interacting with the Matrix API.
## JSON coders ## JSON coders
@docs encode, decoder @docs coder, encode, decoder
-} -}
import Internal.Config.Text as Text
import Internal.Tools.Json as Json
import Json.Decode as D import Json.Decode as D
import Json.Encode as E import Json.Encode as E
import Set exposing (Set) import Set exposing (Set)
@ -159,44 +161,64 @@ and (Filter f1) (Filter f2) =
else else
stdAnd stdAnd
coder : Json.Coder Filter
coder =
Json.object4
{ name = Text.docs.timelineFilter.name
, description = Text.docs.timelineFilter.description
, init =
(\a b c d ->
Filter
{ senders = a, sendersAllowOthers = b
, types = c, typesAllowOthers = d
}
)
}
( Json.field.optional.withDefault
{ fieldName = "senders"
, toField = (\(Filter f) -> f.senders)
, description = Text.fields.timelineFilter.senders
, coder = Json.set Json.string
, default = ( Set.empty, [] )
, defaultToString = always "[]"
}
)
( Json.field.required
{ fieldName = "sendersAllowOthers"
, toField = (\(Filter f) -> f.sendersAllowOthers)
, description = Text.fields.timelineFilter.sendersAllowOthers
, coder = Json.bool
}
)
( Json.field.optional.withDefault
{ fieldName = "types"
, toField = (\(Filter f) -> f.types)
, description = Text.fields.timelineFilter.types
, coder = Json.set Json.string
, default = ( Set.empty, [] )
, defaultToString = always "[]"
}
)
( Json.field.required
{ fieldName = "typesAllowOthers"
, toField = (\(Filter f) -> f.typesAllowOthers)
, description = Text.fields.timelineFilter.typesAllowOthers
, coder = Json.bool
}
)
{-| Decode a Filter from a JSON value. {-| Decode a Filter from a JSON value.
-} -}
decoder : D.Decoder Filter decoder : Json.Decoder Filter
decoder = decoder =
D.map4 Json.decode coder
(\s sb t tb ->
Filter
{ senders = s
, sendersAllowOthers = sb
, types = t
, typesAllowOthers = tb
}
)
(D.string
|> D.list
|> D.map Set.fromList
|> D.field "senders"
)
(D.field "sendersAllowOthers" D.bool)
(D.string
|> D.list
|> D.map Set.fromList
|> D.field "types"
)
(D.field "typesAllowOthers" D.bool)
{-| Encode a Filter into a JSON value. {-| Encode a Filter into a JSON value.
-} -}
encode : Filter -> E.Value encode : Json.Encoder Filter
encode (Filter f) = encode =
E.object Json.encode coder
[ ( "senders", E.set E.string f.senders )
, ( "sendersAllowOthers", E.bool f.sendersAllowOthers )
, ( "types", E.set E.string f.types )
, ( "typesAllowOthers", E.bool f.typesAllowOthers )
]
{-| Allow no events. This filter is likely quite useless in practice, but it is {-| Allow no events. This filter is likely quite useless in practice, but it is

View File

@ -3,7 +3,7 @@ module Internal.Tools.Iddict exposing
, empty, singleton, insert, map, remove , empty, singleton, insert, map, remove
, isEmpty, member, get, size , isEmpty, member, get, size
, keys, values , keys, values
, encode, decoder , coder, encode, decoder
) )
{-| The id-dict is a data type that lets us store values in a dictionary using {-| The id-dict is a data type that lets us store values in a dictionary using
@ -36,11 +36,13 @@ do not need to generate identifiers yourself.
## JSON coders ## JSON coders
@docs encode, decoder @docs coder, encode, decoder
-} -}
import FastDict as Dict exposing (Dict) import FastDict as Dict exposing (Dict)
import Internal.Config.Text as Text
import Internal.Tools.Json as Json
import Json.Decode as D import Json.Decode as D
import Json.Encode as E import Json.Encode as E
@ -53,42 +55,47 @@ type Iddict a
, dict : Dict Int a , dict : Dict Int a
} }
coder : Json.Coder a -> Json.Coder (Iddict a)
coder x =
Json.object2
{ name = Text.docs.iddict.name
, description = Text.docs.iddict.description
, init =
(\c d ->
Iddict
{ cursor =
Dict.keys d
|> List.maximum
|> Maybe.withDefault -1
|> (+) 1
|> max (Dict.size d)
|> max c
, dict = d
}
)
}
( Json.field.optional.withDefault
{ fieldName = "cursor"
, toField = (\(Iddict i) -> i.cursor)
, description = Text.fields.iddict.cursor
, coder = Json.int
, default = ( 0, [] )
, defaultToString = String.fromInt
}
)
( Json.field.required
{ fieldName = "dict"
, toField = (\(Iddict i) -> i.dict)
, description = Text.fields.iddict.dict
, coder = Json.fastIntDict x
}
)
{-| Decode an id-dict from a JSON value. {-| Decode an id-dict from a JSON value.
-} -}
decoder : D.Decoder a -> D.Decoder (Iddict a) decoder : Json.Coder a -> Json.Decoder (Iddict a)
decoder xDecoder = decoder x =
D.map2 Json.decode (coder x)
(\c pairs ->
let
dict : Dict Int a
dict =
pairs
|> List.filterMap
(\( k, v ) ->
k
|> String.toInt
|> Maybe.map (\n -> ( n, v ))
)
|> Dict.fromList
in
Iddict
{ cursor =
Dict.keys dict
-- Larger than all values in the list
|> List.map ((+) 1)
|> List.maximum
|> Maybe.withDefault 0
|> max (Dict.size dict)
-- At least the dict size
|> max c
-- At least the given value
, dict = dict
}
)
(D.field "cursor" D.int)
(D.field "dict" <| D.keyValuePairs xDecoder)
{-| Create an empty id-dict. {-| Create an empty id-dict.
@ -103,16 +110,9 @@ empty =
{-| Encode an id-dict to a JSON value. {-| Encode an id-dict to a JSON value.
-} -}
encode : (a -> E.Value) -> Iddict a -> E.Value encode : Json.Coder a -> Json.Encoder (Iddict a)
encode encodeX (Iddict d) = encode x =
E.object Json.encode (coder x)
[ ( "cursor", E.int d.cursor )
, ( "dict"
, d.dict
|> Dict.toCoreDict
|> E.dict String.fromInt encodeX
)
]
{-| Get a value from the id-dict using its key. {-| Get a value from the id-dict using its key.

View File

@ -3,7 +3,7 @@ module Internal.Tools.Json exposing
, Encoder, encode, Decoder, decode, Value , Encoder, encode, Decoder, decode, Value
, succeed, fail, andThen, lazy, map , succeed, fail, andThen, lazy, map
, Docs(..), RequiredField(..), toDocs , Docs(..), RequiredField(..), toDocs
, list, slowDict, fastDict, set, maybe , list, listWithOne, slowDict, fastDict, fastIntDict, set, maybe
, Field, field , Field, field
, object2, object3, object4, object5, object6, object7, object8, object9, object10, object11 , object2, object3, object4, object5, object6, object7, object8, object9, object10, object11
) )
@ -49,7 +49,7 @@ module to build its encoders and decoders.
## Data types ## Data types
@docs list, slowDict, fastDict, set, maybe @docs list, listWithOne, slowDict, fastDict, fastIntDict, set, maybe
## Objects ## Objects
@ -68,7 +68,8 @@ Once all fields are constructed, the user can create JSON objects.
import Dict as SlowDict import Dict as SlowDict
import FastDict import FastDict
import Internal.Config.Log exposing (Log) import Internal.Config.Log as Log exposing (Log)
import Internal.Config.Text as Text
import Internal.Tools.DecodeExtra as D import Internal.Tools.DecodeExtra as D
import Internal.Tools.EncodeExtra as E import Internal.Tools.EncodeExtra as E
import Json.Decode as D import Json.Decode as D
@ -140,8 +141,10 @@ type Docs
| DocsDict Docs | DocsDict Docs
| DocsFloat | DocsFloat
| DocsInt | DocsInt
| DocsIntDict Docs
| DocsLazy (() -> Docs) | DocsLazy (() -> Docs)
| DocsList Docs | DocsList Docs
| DocsListWithOne Docs
| DocsMap (Descriptive { content : Docs }) | DocsMap (Descriptive { content : Docs })
| DocsObject | DocsObject
(Descriptive (Descriptive
@ -291,6 +294,46 @@ fastDict (Coder old) =
, docs = DocsDict old.docs , docs = DocsDict old.docs
} }
{-| Define a fast dict where the keys are integers, not strings.
-}
fastIntDict : Coder value -> Coder (FastDict.Dict Int value)
fastIntDict (Coder old) =
Coder
{ encoder = FastDict.toCoreDict >> E.dict String.fromInt old.encoder
, decoder =
old.decoder
|> D.keyValuePairs
|> D.map
(\items ->
( items
|> List.map (Tuple.mapSecond Tuple.first)
|> List.filterMap
(\(k, v) ->
Maybe.map (\a -> (a, v)) (String.toInt k)
)
|> FastDict.fromList
, List.concat
[ items
|> List.map Tuple.first
|> List.filter
(\k ->
case String.toInt k of
Just _ ->
True
Nothing ->
False
)
|> List.map Text.logs.keyIsNotAnInt
|> List.map Log.log.warn
, items
|> List.map Tuple.second
|> List.concatMap Tuple.second
]
)
)
, docs = DocsIntDict old.docs
}
{-| Create a new field using any of the three provided options. {-| Create a new field using any of the three provided options.
@ -466,6 +509,31 @@ list (Coder old) =
, docs = DocsList old.docs , docs = DocsList old.docs
} }
{-| Define a list that has at least one value
-}
listWithOne : Coder a -> Coder (a, List a)
listWithOne (Coder old) =
Coder
{ encoder = (\(h, t) -> E.list old.encoder (h :: t))
, decoder =
old.decoder
|> D.list
|> D.andThen
(\items ->
case items of
[] ->
D.fail "Expected at least one value in list"
( h, l1) :: t ->
D.succeed
( (h, List.map Tuple.first items)
, List.concatMap Tuple.second t
|> List.append l1
)
)
, docs = DocsListWithOne old.docs
}
{-| Map a value. {-| Map a value.

View File

@ -3,6 +3,7 @@ module Internal.Values.Timeline exposing
, empty, singleton , empty, singleton
, mostRecentEvents, mostRecentEventsFrom , mostRecentEvents, mostRecentEventsFrom
, insert , insert
, coder
) )
{-| {-|
@ -168,13 +169,100 @@ type Timeline
type alias TokenValue = type alias TokenValue =
String String
coder : Json.Coder Timeline
coder =
Json.object5
{ name = Text.docs.timeline.name
, description = Text.docs.timeline.description
, init =
(\a b c d e ->
Timeline
{ batches = a, events = b, filledBatches = c
, mostRecentBatch = d, tokens = e
}
)
}
( Json.field.required
{ fieldName = "batches"
, toField = (\(Timeline t) -> t.batches)
, description = Text.fields.timeline.batches
, coder = Iddict.coder coderIBatch
}
)
( Json.field.required
{ fieldName = "events"
, toField = (\(Timeline t) -> t.events)
, description = Text.fields.timeline.events
, coder = Json.fastDict (Json.listWithOne coderIBatchPTR)
}
)
( Json.field.optional.withDefault
{ fieldName = "filledBatches"
, toField = (\(Timeline t) -> t.filledBatches)
, description = Text.fields.timeline.filledBatches
, coder = Json.int
, default = ( 0, [] )
, defaultToString = String.fromInt
}
)
( Json.field.required
{ fieldName = "mostRecentBatch"
, toField = (\(Timeline t) -> t.mostRecentBatch)
, description = Text.fields.timeline.mostRecentBatch
, coder = coderITokenPTR
}
)
( Json.field.required
{ fieldName = "tokens"
, toField = (\(Timeline t) -> t.tokens)
, description = Text.fields.timeline.tokens
, coder = Hashdict.coder .name coderIToken
}
)
coderIBatch : Json.Coder IBatch
coderIBatch =
Json.object4
{ name = Text.docs.ibatch.name
, description = Text.docs.ibatch.description
, init = IBatch
}
( Json.field.required
{ fieldName = "events"
, toField = .events
, description = Text.fields.ibatch.events
, coder = Json.list Json.string
}
)
( Json.field.required
{ fieldName = "filter"
, toField = .filter
, description = Text.fields.ibatch.filter
, coder = Filter.coder
}
)
( Json.field.required
{ fieldName = "start"
, toField = .start
, description = Text.fields.ibatch.start
, coder = coderITokenPTR
}
)
( Json.field.required
{ fieldName = "end"
, toField = .end
, description = Text.fields.ibatch.end
, coder = coderITokenPTR
}
)
coderIBatchPTR : Json.Coder IBatchPTR coderIBatchPTR : Json.Coder IBatchPTR
coderIBatchPTR = coderIBatchPTR =
Json.map Json.map
{ name = Debug.todo "Add name" { name = Text.docs.itoken.name
, description = Debug.todo "Add description" , description = Text.docs.itoken.description
, back = IBatchPTR , back = (\(IBatchPTR value) -> value)
, forth = (\(IBatchPTR value) -> value) , forth = IBatchPTR
} }
coderIBatchPTRValue coderIBatchPTRValue
@ -184,21 +272,21 @@ coderIBatchPTRValue = Json.int
coderIToken : Json.Coder IToken coderIToken : Json.Coder IToken
coderIToken = coderIToken =
Json.object5 Json.object5
{ name = "IToken" { name = Text.docs.itoken.name
, description = Debug.todo "TODO: Add description" , description = Text.docs.itoken.description
, init = IToken , init = IToken
} }
( Json.field.required ( Json.field.required
{ fieldName = "name" { fieldName = "name"
, toField = .name , toField = .name
, description = Debug.todo "TODO: Add description" , description = Text.fields.itoken.name
, coder = coderTokenValue , coder = coderTokenValue
} }
) )
( Json.field.optional.withDefault ( Json.field.optional.withDefault
{ fieldName = "starts" { fieldName = "starts"
, toField = .starts , toField = .starts
, description = Debug.todo "TODO: Add description" , description = Text.fields.itoken.starts
, coder = Json.set coderIBatchPTRValue , coder = Json.set coderIBatchPTRValue
, default = ( Set.empty, [] ) , default = ( Set.empty, [] )
, defaultToString = always "[]" , defaultToString = always "[]"
@ -207,12 +295,30 @@ coderIToken =
( Json.field.optional.withDefault ( Json.field.optional.withDefault
{ fieldName = "ends" { fieldName = "ends"
, toField = .ends , toField = .ends
, description = Debug.todo "TODO: Add description" , description = Text.fields.itoken.ends
, coder = Json.set coderIBatchPTRValue , coder = Json.set coderIBatchPTRValue
, default = ( Set.empty, [] ) , default = ( Set.empty, [] )
, defaultToString = always "[]" , defaultToString = always "[]"
} }
) )
( Json.field.optional.withDefault
{ fieldName = "inFrontOf"
, toField = .inFrontOf
, description = Text.fields.itoken.inFrontOf
, coder = Json.set coderITokenPTRValue
, default = ( Set.empty, [] )
, defaultToString = always "[]"
}
)
( Json.field.optional.withDefault
{ fieldName = "behind"
, toField = .behind
, description = Text.fields.itoken.behind
, coder = Json.set coderITokenPTRValue
, default = ( Set.empty, [] )
, defaultToString = always "[]"
}
)
coderITokenPTR : Json.Coder ITokenPTR coderITokenPTR : Json.Coder ITokenPTR
coderITokenPTR = coderITokenPTR =

View File

@ -428,7 +428,7 @@ suite =
|> Filter.encode |> Filter.encode
|> E.encode 0 |> E.encode 0
|> D.decodeString Filter.decoder |> D.decodeString Filter.decoder
|> Expect.equal (Ok filter) |> Expect.equal (Ok (filter, []))
) )
] ]
] ]

View File

@ -3,6 +3,7 @@ module Test.Tools.Iddict exposing (..)
import Expect import Expect
import Fuzz exposing (Fuzzer) import Fuzz exposing (Fuzzer)
import Internal.Tools.Iddict as Iddict exposing (Iddict) import Internal.Tools.Iddict as Iddict exposing (Iddict)
import Internal.Tools.Json as Json
import Json.Decode as D import Json.Decode as D
import Json.Encode as E import Json.Encode as E
import Test exposing (..) import Test exposing (..)
@ -73,21 +74,23 @@ empty =
) )
, test "JSON encode -> decode -> empty" , test "JSON encode -> decode -> empty"
(Iddict.empty (Iddict.empty
|> Iddict.encode identity |> Iddict.encode Json.value
|> D.decodeValue (Iddict.decoder D.value) |> D.decodeValue (Iddict.decoder Json.value)
|> Result.map Tuple.first
|> Expect.equal (Ok Iddict.empty) |> Expect.equal (Ok Iddict.empty)
|> always |> always
) )
, test "JSON encode" , test "JSON encode"
(Iddict.empty (Iddict.empty
|> Iddict.encode identity |> Iddict.encode Json.value
|> E.encode 0 |> E.encode 0
|> Expect.equal "{\"cursor\":0,\"dict\":{}}" |> Expect.equal "{\"dict\":{}}"
|> always |> always
) )
, test "JSON decode" , test "JSON decode"
("{\"cursor\":0,\"dict\":{}}" ("{\"dict\":{}}"
|> D.decodeString (Iddict.decoder D.value) |> D.decodeString (Iddict.decoder Json.value)
|> Result.map Tuple.first
|> Expect.equal (Ok Iddict.empty) |> Expect.equal (Ok Iddict.empty)
|> always |> always
) )
@ -170,8 +173,9 @@ singleton =
"JSON encode -> decode -> singleton" "JSON encode -> decode -> singleton"
(\single -> (\single ->
single single
|> Iddict.encode E.int |> Iddict.encode Json.int
|> D.decodeValue (Iddict.decoder D.int) |> D.decodeValue (Iddict.decoder Json.int)
|> Result.map Tuple.first
|> Expect.equal (Ok single) |> Expect.equal (Ok single)
) )
, fuzz Fuzz.int , fuzz Fuzz.int
@ -179,7 +183,7 @@ singleton =
(\i -> (\i ->
Iddict.singleton i Iddict.singleton i
|> Tuple.second |> Tuple.second
|> Iddict.encode E.int |> Iddict.encode Json.int
|> E.encode 0 |> E.encode 0
|> Expect.equal ("{\"cursor\":1,\"dict\":{\"0\":" ++ String.fromInt i ++ "}}") |> Expect.equal ("{\"cursor\":1,\"dict\":{\"0\":" ++ String.fromInt i ++ "}}")
) )
@ -187,7 +191,8 @@ singleton =
"JSON decode" "JSON decode"
(\i -> (\i ->
("{\"cursor\":1,\"dict\":{\"0\":" ++ String.fromInt i ++ "}}") ("{\"cursor\":1,\"dict\":{\"0\":" ++ String.fromInt i ++ "}}")
|> D.decodeString (Iddict.decoder D.int) |> D.decodeString (Iddict.decoder Json.int)
|> Result.map Tuple.first
|> Tuple.pair 0 |> Tuple.pair 0
|> Expect.equal (Iddict.singleton i |> Tuple.mapSecond Ok) |> Expect.equal (Iddict.singleton i |> Tuple.mapSecond Ok)
) )

View File

@ -8,6 +8,7 @@ import Json.Decode as D
import Json.Encode as E import Json.Encode as E
import Test exposing (..) import Test exposing (..)
import Test.Filter.Timeline as TestFilter import Test.Filter.Timeline as TestFilter
import Internal.Tools.Json as Json
fuzzer : Fuzzer Timeline fuzzer : Fuzzer Timeline
@ -188,8 +189,45 @@ suite =
|> Timeline.mostRecentEventsFrom filter "token_4" |> Timeline.mostRecentEventsFrom filter "token_4"
|> Expect.equal [ [ "d", "e", "f" ] ] |> Expect.equal [ [ "d", "e", "f" ] ]
) )
, fuzz TestFilter.fuzzer , fuzz3 TestFilter.fuzzer (Fuzz.list Fuzz.string) (Fuzz.pair (Fuzz.list Fuzz.string) (Fuzz.list Fuzz.string))
"Gaps can be bridged" "Gaps can be bridged"
(\filter l1 (l2, l3) ->
Timeline.empty
|> Timeline.insert
{ events = l1
, filter = filter
, start = Just "token_1"
, end = "token_2"
}
|> Timeline.insert
{ events = l3
, filter = filter
, start = Just "token_3"
, end = "token_4"
}
|> Timeline.insert
{ events = l2
, filter = filter
, start = Just "token_2"
, end = "token_3"
}
|> Timeline.mostRecentEventsFrom filter "token_4"
|> Expect.equal [ List.concat [ l1, l2, l3 ] ]
)
]
, describe "JSON"
[ fuzz fuzzer "Encode + Decode gives same output"
(\timeline ->
timeline
|> Json.encode Timeline.coder
|> D.decodeValue (Json.decode Timeline.coder)
|> Result.map Tuple.first
|> Result.map (Timeline.mostRecentEvents Filter.pass)
|> Expect.equal (Ok <| Timeline.mostRecentEvents Filter.pass timeline)
)
]
, describe "Weird loops"
[ fuzz TestFilter.fuzzer "Weird loops stop looping"
(\filter -> (\filter ->
Timeline.empty Timeline.empty
|> Timeline.insert |> Timeline.insert
@ -201,17 +239,20 @@ suite =
|> Timeline.insert |> Timeline.insert
{ events = [ "d", "e", "f" ] { events = [ "d", "e", "f" ]
, filter = filter , filter = filter
, start = Just "token_3"
, end = "token_4"
}
|> Timeline.insert
{ events = [ "g", "h" ]
, filter = filter
, start = Just "token_2" , start = Just "token_2"
, end = "token_3" , end = "token_3"
} }
|> Timeline.mostRecentEventsFrom filter "token_4" |> Timeline.insert
|> Expect.equal [ [ "a", "b", "c", "g", "h", "d", "e", "f" ] ] { events = [ "g", "h", "i" ]
, filter = filter
, start = Just "token_3"
, end = "token_2"
}
|> Timeline.mostRecentEventsFrom filter "token_2"
|> Expect.equal
[ [ "a", "b", "c" ]
, [ "d", "e", "f", "g", "h", "i" ]
]
) )
] ]
] ]