Add Timeline JSON coder

pull/17/head
Bram 2024-03-29 10:46:02 +01:00
parent 421e1f6ce7
commit ed78695213
8 changed files with 506 additions and 111 deletions

View File

@ -1,5 +1,5 @@
module Internal.Config.Text exposing
( docs, failures, fields, mappings
( docs, failures, fields, mappings, logs
, accessTokenFoundLocally, accessTokenExpired, accessTokenInvalid
, versionsFoundLocally, versionsReceived, versionsFailedToDecode
, unsupportedVersionForEndpoint
@ -27,7 +27,7 @@ You should only do this if you know what you're doing.
## Type documentation
@docs docs, failures, fields, mappings
@docs docs, failures, fields, mappings, logs
## API Authentication
@ -116,9 +116,14 @@ docs :
, envelope : TypeDocs
, event : TypeDocs
, hashdict : TypeDocs
, ibatch : TypeDocs
, iddict : TypeDocs
, itoken : TypeDocs
, mashdict : TypeDocs
, settings : TypeDocs
, stateManager : TypeDocs
, timeline : TypeDocs
, timelineFilter : TypeDocs
, unsigned : TypeDocs
}
docs =
@ -148,6 +153,24 @@ docs =
, "For example, the hashdict can store events and use their event id as their key."
]
}
, ibatch =
{ name = "IBatch"
, description =
[ "The internal batch tracks a patch of events on the Matrix timeline."
]
}
, iddict =
{ name = "Iddict"
, description =
[ "An iddict automatically handles creating appropriate keys by incrementally assiging a new key to new values."
]
}
, itoken =
{ name = "IToken"
, description =
[ "The IToken connects batches in the timeline and maintains relative order."
]
}
, mashdict =
{ name = "Mashdict"
, description =
@ -167,6 +190,18 @@ docs =
, "Instead of making the user loop through the room's timeline of events, the StateManager offers the user a dictionary-like experience to navigate through the Matrix room state."
]
}
, timeline =
{ name = "Timeline"
, description =
[ "The Timeline tracks events and orders them in a simple way for the user to view them."
]
}
, timelineFilter =
{ name = "Timeline Filter"
, description =
[ "The Timeline Filter allows the user to be very specific about which events they're interested in."
]
}
, unsigned =
{ name = "Unsigned Data"
, description =
@ -218,11 +253,41 @@ fields :
, eventType : Desc
, unsigned : Desc
}
, ibatch :
{ end : Desc
, events : Desc
, filter : Desc
, start : Desc
}
, iddict :
{ cursor : Desc
, dict : Desc
}
, itoken :
{ behind : Desc
, ends : Desc
, inFrontOf : Desc
, name : Desc
, starts : Desc
}
, settings :
{ currentVersion : Desc
, deviceName : Desc
, syncTime : Desc
}
, timeline :
{ batches : Desc
, events : Desc
, filledBatches : Desc
, mostRecentBatch : Desc
, tokens : Desc
}
, timelineFilter :
{ senders : Desc
, sendersAllowOthers : Desc
, types : Desc
, typesAllowOthers : Desc
}
, unsigned :
{ age : Desc
, prevContent : Desc
@ -293,6 +358,45 @@ fields =
[ "Contains optional extra information about the event."
]
}
, ibatch =
{ end =
[ "Pointer to the token that ends the internal batch."
]
, events =
[ "List of event IDs contained within the internal batch."
]
, filter =
[ "Filter that indicates how strictly the homeserver has selected when resulting into the given list of events."
]
, start =
[ "Pointer to the token that starts the internal batch."
]
}
, iddict =
{ cursor =
[ "To ensure uniqueness of all keys and to prevent the usage of keys that were previously assigned to older values, the iddict tracks which is the smallest non-negative integer that hasn't been used yet."
]
, dict =
[ "Dictionary that contains all values stored in the iddict."
]
}
, itoken =
{ behind =
[ "This token is behind all tokens in this field."
]
, ends =
[ "This token is in front of the batches in this field."
]
, inFrontOf =
[ "This token is ahead of all tokens in this field."
]
, name =
[ "Opaque value provided by the homeserver."
]
, starts =
[ "This token is at the start of the batches in this field."
]
}
, settings =
{ currentVersion =
[ "Indicates the current version of the Elm SDK."
@ -304,6 +408,40 @@ fields =
[ "Indicates the frequency in miliseconds with which the Elm SDK should long-poll the /sync endpoint."
]
}
, timeline =
{ batches =
[ "Dictionary storing all event batches in the timeline."
]
, events =
[ "Mapping that allows us to quickly zoom in on an event."
]
, filledBatches =
[ "Counter that tracks how many batches are kept by the timeline."
, "Batches are only counted if they are filled by at least one event."
]
, mostRecentBatch =
[ "Tracks the most recent batch that was sent by the homeserver - usually through `/sync`"
]
, tokens =
[ "Index of all the tokens used to connect event batches on the timeline."
]
}
, timelineFilter =
{ senders =
[ "A list of senders that is considered an exception to the infinite pool of \"other\" users"
]
, sendersAllowOthers =
[ "Value that determines whether the infinite pool of others is included."
, "If False, only the users mentioned in `senders` are included. If True, then all users who aren't mentioned in `senders` are included."
]
, types =
[ "A list of event types that is considered an exception to the infinite pool of \"other\" event types."
]
, typesAllowOthers =
[ "Value that determines whether the infinite pool of others is included."
, "If False, only the event types mentioned in `types` are included. If True, then all users who aren't mentioned in `types` are included."
]
}
, unsigned =
{ age =
[ "The time in milliseconds that has elapsed since the event was sent. This field is generated by the local homeserver, and may be incorrect if the local time on at least one of the two servers is out of sync, which can cause the age to either be negative or greater than it actually is."
@ -347,6 +485,21 @@ leakingValueFound leaking_value =
"Found leaking value : " ++ leaking_value
{-|
-}
logs : { keyIsNotAnInt : String -> String }
logs =
{ keyIsNotAnInt =
(\key ->
String.concat
[ "Encountered a key `"
, key
, "` that cannot be converted to an Int"
]
)
}
{-| Function descriptions
-}
mappings : { itokenPTR : TypeDocs }

View File

@ -4,7 +4,7 @@ module Internal.Filter.Timeline exposing
, match, run
, and
, subsetOf
, encode, decoder
, coder, encode, decoder
)
{-|
@ -43,10 +43,12 @@ for interacting with the Matrix API.
## JSON coders
@docs encode, decoder
@docs coder, encode, decoder
-}
import Internal.Config.Text as Text
import Internal.Tools.Json as Json
import Json.Decode as D
import Json.Encode as E
import Set exposing (Set)
@ -159,44 +161,64 @@ and (Filter f1) (Filter f2) =
else
stdAnd
coder : Json.Coder Filter
coder =
Json.object4
{ name = Text.docs.timelineFilter.name
, description = Text.docs.timelineFilter.description
, init =
(\a b c d ->
Filter
{ senders = a, sendersAllowOthers = b
, types = c, typesAllowOthers = d
}
)
}
( Json.field.optional.withDefault
{ fieldName = "senders"
, toField = (\(Filter f) -> f.senders)
, description = Text.fields.timelineFilter.senders
, coder = Json.set Json.string
, default = ( Set.empty, [] )
, defaultToString = always "[]"
}
)
( Json.field.required
{ fieldName = "sendersAllowOthers"
, toField = (\(Filter f) -> f.sendersAllowOthers)
, description = Text.fields.timelineFilter.sendersAllowOthers
, coder = Json.bool
}
)
( Json.field.optional.withDefault
{ fieldName = "types"
, toField = (\(Filter f) -> f.types)
, description = Text.fields.timelineFilter.types
, coder = Json.set Json.string
, default = ( Set.empty, [] )
, defaultToString = always "[]"
}
)
( Json.field.required
{ fieldName = "typesAllowOthers"
, toField = (\(Filter f) -> f.typesAllowOthers)
, description = Text.fields.timelineFilter.typesAllowOthers
, coder = Json.bool
}
)
{-| Decode a Filter from a JSON value.
-}
decoder : D.Decoder Filter
decoder : Json.Decoder Filter
decoder =
D.map4
(\s sb t tb ->
Filter
{ senders = s
, sendersAllowOthers = sb
, types = t
, typesAllowOthers = tb
}
)
(D.string
|> D.list
|> D.map Set.fromList
|> D.field "senders"
)
(D.field "sendersAllowOthers" D.bool)
(D.string
|> D.list
|> D.map Set.fromList
|> D.field "types"
)
(D.field "typesAllowOthers" D.bool)
Json.decode coder
{-| Encode a Filter into a JSON value.
-}
encode : Filter -> E.Value
encode (Filter f) =
E.object
[ ( "senders", E.set E.string f.senders )
, ( "sendersAllowOthers", E.bool f.sendersAllowOthers )
, ( "types", E.set E.string f.types )
, ( "typesAllowOthers", E.bool f.typesAllowOthers )
]
encode : Json.Encoder Filter
encode =
Json.encode coder
{-| Allow no events. This filter is likely quite useless in practice, but it is

View File

@ -3,7 +3,7 @@ module Internal.Tools.Iddict exposing
, empty, singleton, insert, map, remove
, isEmpty, member, get, size
, keys, values
, encode, decoder
, coder, encode, decoder
)
{-| The id-dict is a data type that lets us store values in a dictionary using
@ -36,11 +36,13 @@ do not need to generate identifiers yourself.
## JSON coders
@docs encode, decoder
@docs coder, encode, decoder
-}
import FastDict as Dict exposing (Dict)
import Internal.Config.Text as Text
import Internal.Tools.Json as Json
import Json.Decode as D
import Json.Encode as E
@ -53,42 +55,47 @@ type Iddict a
, dict : Dict Int a
}
coder : Json.Coder a -> Json.Coder (Iddict a)
coder x =
Json.object2
{ name = Text.docs.iddict.name
, description = Text.docs.iddict.description
, init =
(\c d ->
Iddict
{ cursor =
Dict.keys d
|> List.maximum
|> Maybe.withDefault -1
|> (+) 1
|> max (Dict.size d)
|> max c
, dict = d
}
)
}
( Json.field.optional.withDefault
{ fieldName = "cursor"
, toField = (\(Iddict i) -> i.cursor)
, description = Text.fields.iddict.cursor
, coder = Json.int
, default = ( 0, [] )
, defaultToString = String.fromInt
}
)
( Json.field.required
{ fieldName = "dict"
, toField = (\(Iddict i) -> i.dict)
, description = Text.fields.iddict.dict
, coder = Json.fastIntDict x
}
)
{-| Decode an id-dict from a JSON value.
-}
decoder : D.Decoder a -> D.Decoder (Iddict a)
decoder xDecoder =
D.map2
(\c pairs ->
let
dict : Dict Int a
dict =
pairs
|> List.filterMap
(\( k, v ) ->
k
|> String.toInt
|> Maybe.map (\n -> ( n, v ))
)
|> Dict.fromList
in
Iddict
{ cursor =
Dict.keys dict
-- Larger than all values in the list
|> List.map ((+) 1)
|> List.maximum
|> Maybe.withDefault 0
|> max (Dict.size dict)
-- At least the dict size
|> max c
-- At least the given value
, dict = dict
}
)
(D.field "cursor" D.int)
(D.field "dict" <| D.keyValuePairs xDecoder)
decoder : Json.Coder a -> Json.Decoder (Iddict a)
decoder x =
Json.decode (coder x)
{-| Create an empty id-dict.
@ -103,16 +110,9 @@ empty =
{-| Encode an id-dict to a JSON value.
-}
encode : (a -> E.Value) -> Iddict a -> E.Value
encode encodeX (Iddict d) =
E.object
[ ( "cursor", E.int d.cursor )
, ( "dict"
, d.dict
|> Dict.toCoreDict
|> E.dict String.fromInt encodeX
)
]
encode : Json.Coder a -> Json.Encoder (Iddict a)
encode x =
Json.encode (coder x)
{-| Get a value from the id-dict using its key.

View File

@ -3,7 +3,7 @@ module Internal.Tools.Json exposing
, Encoder, encode, Decoder, decode, Value
, succeed, fail, andThen, lazy, map
, Docs(..), RequiredField(..), toDocs
, list, slowDict, fastDict, set, maybe
, list, listWithOne, slowDict, fastDict, fastIntDict, set, maybe
, Field, field
, object2, object3, object4, object5, object6, object7, object8, object9, object10, object11
)
@ -49,7 +49,7 @@ module to build its encoders and decoders.
## Data types
@docs list, slowDict, fastDict, set, maybe
@docs list, listWithOne, slowDict, fastDict, fastIntDict, set, maybe
## Objects
@ -68,7 +68,8 @@ Once all fields are constructed, the user can create JSON objects.
import Dict as SlowDict
import FastDict
import Internal.Config.Log exposing (Log)
import Internal.Config.Log as Log exposing (Log)
import Internal.Config.Text as Text
import Internal.Tools.DecodeExtra as D
import Internal.Tools.EncodeExtra as E
import Json.Decode as D
@ -140,8 +141,10 @@ type Docs
| DocsDict Docs
| DocsFloat
| DocsInt
| DocsIntDict Docs
| DocsLazy (() -> Docs)
| DocsList Docs
| DocsListWithOne Docs
| DocsMap (Descriptive { content : Docs })
| DocsObject
(Descriptive
@ -291,6 +294,46 @@ fastDict (Coder old) =
, docs = DocsDict old.docs
}
{-| Define a fast dict where the keys are integers, not strings.
-}
fastIntDict : Coder value -> Coder (FastDict.Dict Int value)
fastIntDict (Coder old) =
Coder
{ encoder = FastDict.toCoreDict >> E.dict String.fromInt old.encoder
, decoder =
old.decoder
|> D.keyValuePairs
|> D.map
(\items ->
( items
|> List.map (Tuple.mapSecond Tuple.first)
|> List.filterMap
(\(k, v) ->
Maybe.map (\a -> (a, v)) (String.toInt k)
)
|> FastDict.fromList
, List.concat
[ items
|> List.map Tuple.first
|> List.filter
(\k ->
case String.toInt k of
Just _ ->
True
Nothing ->
False
)
|> List.map Text.logs.keyIsNotAnInt
|> List.map Log.log.warn
, items
|> List.map Tuple.second
|> List.concatMap Tuple.second
]
)
)
, docs = DocsIntDict old.docs
}
{-| Create a new field using any of the three provided options.
@ -466,6 +509,31 @@ list (Coder old) =
, docs = DocsList old.docs
}
{-| Define a list that has at least one value
-}
listWithOne : Coder a -> Coder (a, List a)
listWithOne (Coder old) =
Coder
{ encoder = (\(h, t) -> E.list old.encoder (h :: t))
, decoder =
old.decoder
|> D.list
|> D.andThen
(\items ->
case items of
[] ->
D.fail "Expected at least one value in list"
( h, l1) :: t ->
D.succeed
( (h, List.map Tuple.first items)
, List.concatMap Tuple.second t
|> List.append l1
)
)
, docs = DocsListWithOne old.docs
}
{-| Map a value.

View File

@ -3,6 +3,7 @@ module Internal.Values.Timeline exposing
, empty, singleton
, mostRecentEvents, mostRecentEventsFrom
, insert
, coder
)
{-|
@ -168,13 +169,100 @@ type Timeline
type alias TokenValue =
String
coder : Json.Coder Timeline
coder =
Json.object5
{ name = Text.docs.timeline.name
, description = Text.docs.timeline.description
, init =
(\a b c d e ->
Timeline
{ batches = a, events = b, filledBatches = c
, mostRecentBatch = d, tokens = e
}
)
}
( Json.field.required
{ fieldName = "batches"
, toField = (\(Timeline t) -> t.batches)
, description = Text.fields.timeline.batches
, coder = Iddict.coder coderIBatch
}
)
( Json.field.required
{ fieldName = "events"
, toField = (\(Timeline t) -> t.events)
, description = Text.fields.timeline.events
, coder = Json.fastDict (Json.listWithOne coderIBatchPTR)
}
)
( Json.field.optional.withDefault
{ fieldName = "filledBatches"
, toField = (\(Timeline t) -> t.filledBatches)
, description = Text.fields.timeline.filledBatches
, coder = Json.int
, default = ( 0, [] )
, defaultToString = String.fromInt
}
)
( Json.field.required
{ fieldName = "mostRecentBatch"
, toField = (\(Timeline t) -> t.mostRecentBatch)
, description = Text.fields.timeline.mostRecentBatch
, coder = coderITokenPTR
}
)
( Json.field.required
{ fieldName = "tokens"
, toField = (\(Timeline t) -> t.tokens)
, description = Text.fields.timeline.tokens
, coder = Hashdict.coder .name coderIToken
}
)
coderIBatch : Json.Coder IBatch
coderIBatch =
Json.object4
{ name = Text.docs.ibatch.name
, description = Text.docs.ibatch.description
, init = IBatch
}
( Json.field.required
{ fieldName = "events"
, toField = .events
, description = Text.fields.ibatch.events
, coder = Json.list Json.string
}
)
( Json.field.required
{ fieldName = "filter"
, toField = .filter
, description = Text.fields.ibatch.filter
, coder = Filter.coder
}
)
( Json.field.required
{ fieldName = "start"
, toField = .start
, description = Text.fields.ibatch.start
, coder = coderITokenPTR
}
)
( Json.field.required
{ fieldName = "end"
, toField = .end
, description = Text.fields.ibatch.end
, coder = coderITokenPTR
}
)
coderIBatchPTR : Json.Coder IBatchPTR
coderIBatchPTR =
Json.map
{ name = Debug.todo "Add name"
, description = Debug.todo "Add description"
, back = IBatchPTR
, forth = (\(IBatchPTR value) -> value)
{ name = Text.docs.itoken.name
, description = Text.docs.itoken.description
, back = (\(IBatchPTR value) -> value)
, forth = IBatchPTR
}
coderIBatchPTRValue
@ -184,21 +272,21 @@ coderIBatchPTRValue = Json.int
coderIToken : Json.Coder IToken
coderIToken =
Json.object5
{ name = "IToken"
, description = Debug.todo "TODO: Add description"
{ name = Text.docs.itoken.name
, description = Text.docs.itoken.description
, init = IToken
}
( Json.field.required
{ fieldName = "name"
, toField = .name
, description = Debug.todo "TODO: Add description"
, description = Text.fields.itoken.name
, coder = coderTokenValue
}
)
( Json.field.optional.withDefault
{ fieldName = "starts"
, toField = .starts
, description = Debug.todo "TODO: Add description"
, description = Text.fields.itoken.starts
, coder = Json.set coderIBatchPTRValue
, default = ( Set.empty, [] )
, defaultToString = always "[]"
@ -207,12 +295,30 @@ coderIToken =
( Json.field.optional.withDefault
{ fieldName = "ends"
, toField = .ends
, description = Debug.todo "TODO: Add description"
, description = Text.fields.itoken.ends
, coder = Json.set coderIBatchPTRValue
, default = ( Set.empty, [] )
, defaultToString = always "[]"
}
)
( Json.field.optional.withDefault
{ fieldName = "inFrontOf"
, toField = .inFrontOf
, description = Text.fields.itoken.inFrontOf
, coder = Json.set coderITokenPTRValue
, default = ( Set.empty, [] )
, defaultToString = always "[]"
}
)
( Json.field.optional.withDefault
{ fieldName = "behind"
, toField = .behind
, description = Text.fields.itoken.behind
, coder = Json.set coderITokenPTRValue
, default = ( Set.empty, [] )
, defaultToString = always "[]"
}
)
coderITokenPTR : Json.Coder ITokenPTR
coderITokenPTR =

View File

@ -428,7 +428,7 @@ suite =
|> Filter.encode
|> E.encode 0
|> D.decodeString Filter.decoder
|> Expect.equal (Ok filter)
|> Expect.equal (Ok (filter, []))
)
]
]

View File

@ -3,6 +3,7 @@ module Test.Tools.Iddict exposing (..)
import Expect
import Fuzz exposing (Fuzzer)
import Internal.Tools.Iddict as Iddict exposing (Iddict)
import Internal.Tools.Json as Json
import Json.Decode as D
import Json.Encode as E
import Test exposing (..)
@ -73,21 +74,23 @@ empty =
)
, test "JSON encode -> decode -> empty"
(Iddict.empty
|> Iddict.encode identity
|> D.decodeValue (Iddict.decoder D.value)
|> Iddict.encode Json.value
|> D.decodeValue (Iddict.decoder Json.value)
|> Result.map Tuple.first
|> Expect.equal (Ok Iddict.empty)
|> always
)
, test "JSON encode"
(Iddict.empty
|> Iddict.encode identity
|> Iddict.encode Json.value
|> E.encode 0
|> Expect.equal "{\"cursor\":0,\"dict\":{}}"
|> Expect.equal "{\"dict\":{}}"
|> always
)
, test "JSON decode"
("{\"cursor\":0,\"dict\":{}}"
|> D.decodeString (Iddict.decoder D.value)
("{\"dict\":{}}"
|> D.decodeString (Iddict.decoder Json.value)
|> Result.map Tuple.first
|> Expect.equal (Ok Iddict.empty)
|> always
)
@ -170,8 +173,9 @@ singleton =
"JSON encode -> decode -> singleton"
(\single ->
single
|> Iddict.encode E.int
|> D.decodeValue (Iddict.decoder D.int)
|> Iddict.encode Json.int
|> D.decodeValue (Iddict.decoder Json.int)
|> Result.map Tuple.first
|> Expect.equal (Ok single)
)
, fuzz Fuzz.int
@ -179,7 +183,7 @@ singleton =
(\i ->
Iddict.singleton i
|> Tuple.second
|> Iddict.encode E.int
|> Iddict.encode Json.int
|> E.encode 0
|> Expect.equal ("{\"cursor\":1,\"dict\":{\"0\":" ++ String.fromInt i ++ "}}")
)
@ -187,7 +191,8 @@ singleton =
"JSON decode"
(\i ->
("{\"cursor\":1,\"dict\":{\"0\":" ++ String.fromInt i ++ "}}")
|> D.decodeString (Iddict.decoder D.int)
|> D.decodeString (Iddict.decoder Json.int)
|> Result.map Tuple.first
|> Tuple.pair 0
|> Expect.equal (Iddict.singleton i |> Tuple.mapSecond Ok)
)

View File

@ -8,6 +8,7 @@ import Json.Decode as D
import Json.Encode as E
import Test exposing (..)
import Test.Filter.Timeline as TestFilter
import Internal.Tools.Json as Json
fuzzer : Fuzzer Timeline
@ -188,8 +189,45 @@ suite =
|> Timeline.mostRecentEventsFrom filter "token_4"
|> Expect.equal [ [ "d", "e", "f" ] ]
)
, fuzz TestFilter.fuzzer
, fuzz3 TestFilter.fuzzer (Fuzz.list Fuzz.string) (Fuzz.pair (Fuzz.list Fuzz.string) (Fuzz.list Fuzz.string))
"Gaps can be bridged"
(\filter l1 (l2, l3) ->
Timeline.empty
|> Timeline.insert
{ events = l1
, filter = filter
, start = Just "token_1"
, end = "token_2"
}
|> Timeline.insert
{ events = l3
, filter = filter
, start = Just "token_3"
, end = "token_4"
}
|> Timeline.insert
{ events = l2
, filter = filter
, start = Just "token_2"
, end = "token_3"
}
|> Timeline.mostRecentEventsFrom filter "token_4"
|> Expect.equal [ List.concat [ l1, l2, l3 ] ]
)
]
, describe "JSON"
[ fuzz fuzzer "Encode + Decode gives same output"
(\timeline ->
timeline
|> Json.encode Timeline.coder
|> D.decodeValue (Json.decode Timeline.coder)
|> Result.map Tuple.first
|> Result.map (Timeline.mostRecentEvents Filter.pass)
|> Expect.equal (Ok <| Timeline.mostRecentEvents Filter.pass timeline)
)
]
, describe "Weird loops"
[ fuzz TestFilter.fuzzer "Weird loops stop looping"
(\filter ->
Timeline.empty
|> Timeline.insert
@ -201,17 +239,20 @@ suite =
|> Timeline.insert
{ events = [ "d", "e", "f" ]
, filter = filter
, start = Just "token_3"
, end = "token_4"
}
|> Timeline.insert
{ events = [ "g", "h" ]
, filter = filter
, start = Just "token_2"
, end = "token_3"
}
|> Timeline.mostRecentEventsFrom filter "token_4"
|> Expect.equal [ [ "a", "b", "c", "g", "h", "d", "e", "f" ] ]
|> Timeline.insert
{ events = [ "g", "h", "i" ]
, filter = filter
, start = Just "token_3"
, end = "token_2"
}
|> Timeline.mostRecentEventsFrom filter "token_2"
|> Expect.equal
[ [ "a", "b", "c" ]
, [ "d", "e", "f", "g", "h", "i" ]
]
)
]
]