R4JDMB7LL3FLA4NJEAV2DQEXII5XS5KIMG3H4YS5P6W7ZZUE7FIQC IHBNW3GI2XB6KAWUYRLL6KDOBUNUOU3N7RRLS6BFNW6SE7ZDHLWQC 7CB7WD2VQGU2ZZV3CWXAOFLPM5SY6RADIXRSFLUVXKQXC5472UOAC QJSK6Y6GTGHLBKQVS6Z2ONXGZVYTJ43DECJIBRPUPUUJFE4HETIAC WMCS3B3K2DHIXJUAMFK46GO3CVWBQS3ASBOR6WAVIEOX35GPEQYQC ULS4X3VORQZFBSNPXN76UFH2PWS2MVBA64CJQS3IR4YUHIKEF6GQC 755UGKECZ3PFYEA2TFFOUZTF27CRQTBZWO7UYFU6WQDJMZDBVPRAC AQ2M4QZFKBNHMJF25P275ISB7QV7JWKMFV632PI7NWJJGFXE6Q5AC 55WLMLEEVBRSTAFRZ5RGF7TOGUF5OPVCPA2TMHAQK45OUO7PA3YQC WHYWDNSL67TTB2AGAUACXERJ6X54YMNYVC32KJQFEXFFRYA4K3HQC OJ6KWAG7XUCYNQ6T3FRJK2QOP7DBO26PZEWDVH3Q2MJKIBMNWRPQC MONVI5STEDKY5ALVMEXJJXDUX6XQRKTFLP7BBNOQML3VSJEM2JAAC YWCRGWVDIMCPXBQFM23MKCYZCXJJSD733NJEPB6WUU4G4BSREILAC [ {pubKey, _PubKey}, {links, _Links}, {postType, 2}, {timestamp, _TimeStamp}],[ {infos, Infos} ]
[ {public_key, _PubKey}, {signature, _Sig}, {links, _Links}, {type, 2}, {timestamp, _TimeStamp}, {hash, _Hash}],[ {infos, Infos} ]
[ {pubKey, _PubKey}, {links, _Links}, {postType, 4}, {timestamp, _TimeStamp}],[ {channel, Channel} ]
[ {public_key, _PubKey}, {signature, _Sig}, {links, _Links}, {type, 4}, {timestamp, _TimeStamp}, {hash, _Hash}],[ {channel, Channel} ]
[ {pubKey, _PubKey}, {links, _Links}, {postType, 5}, {timestamp, _TimeStamp}], [ {channel, Channel} ]
[ {public_key, _PubKey}, {signature, _Sig}, {links, _Links}, {type, 5}, {timestamp, _TimeStamp}, {hash, _Hash}], [ {channel, Channel} ]
% SPDX-FileCopyrightText: 2023 Henry Bubert%% SPDX-License-Identifier: LGPL-2.1-or-later-module(cable_database_suite).-include_lib("eunit/include/eunit.hrl").-define(current_function_name(),atom_to_list(element(2, element(2, process_info(self(), current_function))))).open_close_test() ->TestName = ?current_function_name(),Db = open_db(TestName),db:close(Db).post_text_test() ->[{binary, Bin}, {obj, TestObj}] = examples:post_text(),Decoded = posts:decode(Bin),[[ {public_key, _PubKey}, {signature, _Sig}, {links, _Links}, {type, 0}, {timestamp, _TimeStamp}, {hash, Hash}],[ {channel, Channel}, {text, Text}]] = Decoded,Db = open_db(?current_function_name()),{ok, Id} = db:savePost(Db, Decoded),?assertEqual(1, Id),{ok, LoadedPost} = db:loadPost(Db, Id),?assertEqual(Decoded, LoadedPost),{ok, LoadedByHash} = db:loadPost(Db, Hash),?assertEqual(LoadedPost, LoadedByHash),db:close(Db).% Helpers%%%%%%%%%open_db(Name) ->file:make_dir("test-dbs"), %% TODO: better cleanup{ok, Db} = db:start_link("test-dbs/" ++ Name ++ ".db"),Db.
create table posts (-- these exist for all posts--id integer primary key AUTOINCREMENT,hash blob unique not null check (length(hash) = 64),type integer not null check (type >= 0 AND type <= 5 ),-- milliseconds since UNIX Epochtimestamp integer not null check (timestamp > 0),-- public key that authored this postpublic_key blob not null,signature blob not null,-- depends on postTypechannel text, -- all but delete have this one-- join & leave don't have other fields then channeltext text,topic text,deletedHashes blob, -- concatenatedinfos blob, -- encoded as (json?) map {k=>v, k=>v}-- `post/text` | a textual chat message, posted to a channel |CONSTRAINT is_text check (type != 0 OR (channel is not null AND text is not null)),-- `post/delete`| the deletion of a previously published post |CONSTRAINT is_delete check (type != 1 OR (deletedHashes is not null)),-- `post/info` | set or clear informative key/value pairs on a userCONSTRAINT is_info check (type != 2 OR (channel is not null AND infos is not null)),-- `post/topic` | set or clear a channel's topic stringCONSTRAINT is_topic check (type != 3 OR (channel is not null AND topic is not null)),-- `post/join` | announce membership to a channelCONSTRAINT is_join check (type != 4 OR (channel is not null)),-- `post/leave` | announce cessation of membership to a channelCONSTRAINT is_leave check (type != 5 OR (channel is not null)));create table links (source blob not null,parent blob not null,foreign key(source) references posts(hash),foreign key(parent) references posts(hash));CREATE INDEX parent_idx ON links(parent);
## Schema sketchPolymorphic table for all kinds of posts```sqlcreate table posts(-- these exist for all posts--id integer primary key AUTOINCREMENT,hash blob unique not null,type integer not null check (type >= 0 AND type <= 5 ),-- milliseconds since UNIX Epochtimestamp integer not null check (timestamp > 0),-- concatenatedlink_hashes blob not null,-- public key that authored this postpublic_key blob not null,signature blob not null,
-- `post/text` | a textual chat message, posted to a channel |CONSTRAINT is_text check (type != 0 OR (channel is not null AND text is not null)),-- `post/delete`| the deletion of a previously published post |CONSTRAINT is_delete check (type != 1 OR (deletedHashes is not null)),-- `post/info` | set or clear informative key/value pairs on a userCONSTRAINT is_info check (type != 2 OR (channel is not null AND infos is not null)),-- `post/topic` | set or clear a channel's topic stringCONSTRAINT is_topic check (type != 3 OR (channel is not null AND topic is not null)),-- `post/join` | announce membership to a channelCONSTRAINT is_join check (type != 4 OR (channel is not null)),-- `post/leave` | announce cessation of membership to a channelCONSTRAINT is_leave check (type != 5 OR (channel is not null)));```
create table links(id integer primary key,title text not null,path text not null);insert into links (id, title, path) VALUES(1, "things", "/1/"),(2, "fruit", "/1/2/"),(3, "apples", "/1/2/3/"),(4, "oranges", "/1/2/4/");-- find themSELECT *FROM linksWHERE id NOT IN (SELECT DISTINCT substr(path, 1, instr(substr(path, 2), '/') + 1)FROM links);
select source, count(*) from links group by parent where source in (select id from posts where channel = ?);-- TODO look at this SO if zero ranges are not listed:-- https://stackoverflow.com/a/22888342
%% intended public apiclose(Db) ->gen_server:stop(Db).savePost(Db, Post) ->gen_server:call(Db, {savePost, Post}).loadPost(Db, IntId) when is_integer(IntId), IntId >= 0 ->gen_server:call(Db, {loadPostById, IntId});loadPost(Db, Hash) when is_binary(Hash) ->gen_server:call(Db, {loadPostByHash, Hash}).
handle_call({savePost, Post}, _From, [{sql, Db}, _] = State) ->[Header, Body] = Post,{value, {links, Links}, HeaderSansLinks} = lists:keytake(links, 1, Header),%% construct edgesPostHash = proplists:get_value(hash, Header),lists:foreach(fun(Link) ->Row = [{source, {blob, PostHash}}, {parent, {blob, Link}}],{rowid, _EdgeId} = sqlite3:write(Db, links, Row),io:format("[DB/Debug] Inserted Edge: ~p~n", [_EdgeId])end, Links),PostInsert = HeaderSansLinks ++ case proplists:get_value(type, Header) of0 -> Body;1 ->Concat = iolist_to_binary(proplists:get_value(hashes, Body)),[{deletedHashes, Concat}];_TODO -> erlang:error("TODO")end,Prepared = lists:map(fun({Col, Val}) ->{Col, case Val of_ when is_binary(Val) -> {blob, Val};_ -> Valend}end, PostInsert),%% [io:format("[Db/DEBUG] ~p~n", [I]) || I <- Prepared],%% {Cols, Vals} = lists:unzip(Prepared),%% [io:format("[DEBUG] Val:~p~n", [I]) || I <- Vals],{rowid, Id} = sqlite3:write(Db, posts, Prepared),{reply, {ok, Id}, State};
handle_call({addchannel, Name}, _From, State = [{sql, Db}, _]) ->
handle_call({loadPostByHash, Hash}, _From,[{sql, Db}, _] = State) when is_binary(Hash) ->[{columns, _Cols}, {rows, [Row]}] = sqlite3:read(Db, posts, {hash, {blob, Hash}}),{reply, {error, "TODO:loadHash"}, State};handle_call({loadPostById, IntId}, _From, [{sql, Db}, _] = State) when is_integer(IntId), IntId >= 0 ->ExpectedCols = ["id", "hash", "type", "timestamp", "public_key", "signature", "channel", "text", "topic", "deletedHashes", "infos"],[{columns, ExpectedCols}, {rows, [Row]}] = sqlite3:read(Db, posts, {id, IntId}),{IntId, {blob, Hash}, Type, Timestamp, {blob, PubKey}, {blob, Sig}, Channel, TextBlob, Topic, Deleteds, Infos} = Row,[{columns, ["source", "parent"]}, {rows, LinkRows}] = sqlite3:read(Db, links, {source, {blob, Hash}}),Links = [Parent || {{blob, _Source}, {blob, Parent}} <-LinkRows],[io:format("[DEBUG] Links:~p~n", [I]) || I <- Links],Header = [ {public_key, PubKey}, {signature, Sig}, {links, Links}, {type, Type}, {timestamp, Timestamp}, {hash, Hash}],Body = case Type of0 ->{blob, Text} = TextBlob,%% TODO: utf8 channels[{channel,binary_to_list(Channel)}, {text, Text}];_ -> io:format("TODO type: ~p~n", [Type])end,{reply, {ok, [Header, Body]}, State};handle_call({addchannel, Name}, _From, [{sql, Db}, _] = State) ->
ColId = {id, integer, [{primary_key, [asc, autoincrement]}]},Tables = #{channels => [ColId, {name, text, not_null}],users => [ColId, {pubkey, blob, not_null}, {name, text, not_null}],posts => [ColId, {pubkey, blob, not_null}, {type, integer, not_null}]},case tables_exist(Db, maps:keys(Tables)) ofmissing ->CreateTable = fun(T, Cols) ->ok = sqlite3:create_table(Db, T, Cols),io:format("Created table: ~p~n", [T])end,maps:foreach(CreateTable, Tables);ok ->okend.
%% read posts and links schema and exec it{ok, Schema} = file:read_file("./db/schema.sql"),Oks = sqlite3:sql_exec_script(Db, Schema),[] = [V || V <- Oks, V =/= ok],io:format("[DB] schema.sql applied~n"),%% now for some less complex schemaColId = {id, integer, [{primary_key, [asc, autoincrement]}]},Tables = #{channels => [ColId, {name, text, not_null}, {topic, text, not_null}],users => [ColId, {pubkey, blob, not_null}, {name, text, not_null}]},case tables_exist(Db, maps:keys(Tables)) ofmissing ->CreateTable = fun(T, Cols) ->ok = sqlite3:create_table(Db, T, Cols),io:format("[DB] Created table: ~p~n", [T])end,maps:foreach(CreateTable, Tables);ok ->okend.