U256ZALIPTVWLNACYPIMWLNEYDQWP7CHF4Y4CGMILQTONJHMGQVQC
O2BZOX7MS4JCDS3C6EJQXAWUEQV6HVDCIF2FIN2BCJNRLIU6ZVKAC
NJNMO72S7VIUV22JXB4IFPZMHWTJAOTP6EC6Z4QSKYIROSXT52MQC
B6HWAPDPXIWH7CHK5VLMWLL6EQN6NOFZEFYO47BPUY2ZO4SL7VDAC
4U7F3CPIDTK6JSEDMNMHVKSR7HOQDLZQD2PPVMDLHO5SFSIMUXZAC
64C6AWH66FDKU6UE6Z6JPX2J2GBM2JOPTH2GL6LHKAIUBGNGDZ5AC
O722AOKEWXWJPRHGJREU6QPW7HEFPPRETZIAADZ2RMAXHARCNEKAC
ASF3UPJLCX7KIUCNJD5KAXSPDXCUEJHLL4HBRTRUBPCW73IXOCWQC
NEDDHXUK3GNFMOFO3KLU7NRIHCTYNWBT3D6HTKZAOXFDE6HMDZ6AC
IZEVQF627FA7VV25KJAWYWGGC35LZUUBBQRPN5ZAINTQLPEDRTEAC
EFSXYZPOGA5M4DN65IEIDO7JK6U34DMQELAPHOIL2UAT6HRC66BAC
UWMGUJOW5X5HQTS76T2FD7MNAJF7SESPQVU5FDIZO52V75TT2X6AC
LTSVBVA235BQAIU3SQURKSRHIAL33K47G4J6TSEP2K353OCHNJEAC
AWWC6P5ZVFDQHX3EAYDG4DKTUZ6A5LHQAV3NIUO3VP6FM7JKPK5AC
73NDXDEZRMK672GHSTC3CI6YHXFZ2GGJI5IKQGHKFDZKTNSQXLLQC
IPG33FAWXGEQ2PO6OXRT2PWWXHRNMPVUKKADL6UKKN5GD2CNZ25AC
QMRKFEPGFBCEWAIXPEIG5ILKAJ2JH5L3TOITHR4HNJXK5FN3KXBQC
HMDM3B557TO5RYP2IGFFC2C2VN6HYZTDQ47CJY2O37BW55DSMFZAC
4R7XIYK3BP664CO3YJ2VM64ES2JYN27UTQG5KS34OTEPAIODSZLQC
DFOBMSAODB3NKW37B272ZXA2ML5HMIH3N3C4GT2DPEQS7ZFK4SNAC
4QX5E5ACVN57KJLCWOM4JEI6JSV4XZNCWVYPOTKSOMUW3SOMCNJAC
NVOCQVASZWTKQJG7GPH7KHKZZR7NUG4WLV5YY4KAIRPCJRWCZPIAC
KEP5WUFJXTMKRRNZLYTGYYWA4VLFCMHTKTJYF5EA5IWBYFMU6WYQC
2WOOGXDHVQ6L2MQYUTLJ6H6FVSQNJN6SMJL5DG7HAHFYPJLRT2SAC
MJ6R42RCK2ASXAJ6QXDPMAW56RBOJ4F4HI2LFIV3KXFIKWYMQK3QC
V2VDN77HCSRYYWXDJJ2XOVHV4P6PVWNJZLXZ7JUYPQEZQIH5BZ3QC
6L5BK5EHPAOQX3JCKUJ273UDNAC23LPQL4HIJGM4AV3P3QK5OKIQC
2XQD6KKKD6QVHFHAEMVE3XXY7X2T7BLTLL7XIILZAXNJJH2YONUQC
PBD7LZYQHXAA3KLH2ZUX5GW4UFML6BQ32KXZF4KZ6OYFASUYFJ5QC
BROSTG5KP3NUNLSYPVQID254TE47E5RKQAKLPIY7PGWETE6JNMTAC
HALRDT2F22DAQ54M334YSKD2XJCAXDXEZPDKGMOWKH7XELBAS6MAC
BXGLKYRXO2O4NRM3BLNWQ7AWVPQXAMFS57MFYHJNOZZEZZW5BH6AC
MB5SHULBN3WP7TGUWZDP6BRGP423FTYKF67T5IF5YHHLNXKQ5REAC
ADMKQQGCGVSHHIMVQ4XFRDCG544SBJCYALSKZV45CQQBZ4ACUH2AC
Once all the containers are up, you'll need to run the database migrations
as follows:
Database Configuration
----------------------
All database DDL state is handled using the Haskell dbmigrations tool.
Once all the containers are up, you'll need to run the existing database
migrations as follows:
New migrations can be created with:
~~~bash
moo-postgresql new --config-file ./local/conf/server/aftok-migrations.cfg kebab-case-descriptive-name
~~~
pure SignupOK
let signupJSON =
{ username: req.username
, password: req.password
, recoveryType: case req.recoverBy of
RecoverByEmail _ => "email"
RecoverByZAddr _ => "zaddr"
, email: case req.recoverBy of
RecoverByEmail email -> Just email
RecoverByZAddr _ -> Nothing
, zaddr: case req.recoverBy of
RecoverByEmail _ -> Nothing
RecoverByZAddr zaddr -> Just zaddr
}
{-# LANGUAGE TemplateHaskell #-}
module Aftok.Currency.ZCash where
import Control.Lens ( makePrisms
)
newtype ZAddr = ZAddr { zaddrText :: Text }
deriving (Eq, Ord, Show)
makePrisms ''ZAddr
, renderNetworkId <$> nidMay
, addrMay
, user' ^. userEmail . _Email
, user' ^? userAccountRecovery . _RecoverByEmail . _Email
, user' ^? userAccountRecovery . _RecoverByZAddr . _ZAddr
((,) <$> idParser UserId <*> userParser mode)
[sql| SELECT id, handle, default_payment_network, default_payment_addr, email FROM users WHERE handle = ? |]
(Only h)
(btcAddressParser mode)
[sql| SELECT default_payment_network, default_payment_addr FROM users WHERE id = ? |]
(Only uid)
CreateUser :: BTCUser -> DBOp UserId
FindUser :: UserId -> DBOp (Maybe BTCUser)
FindUserByName :: UserName -> DBOp (Maybe (UserId, BTCUser))
CreateUser :: User -> DBOp UserId
FindUser :: UserId -> DBOp (Maybe User)
FindUserByName :: UserName -> DBOp (Maybe (UserId, User))
FindUserPaymentAddress :: UserId -> DBOp (Maybe (BTCNet))
data User a = User
{ _username :: !UserName
, _userAddress :: !(Maybe a)
, _userEmail :: !Email
data AccountRecovery
= RecoverByEmail Email
| RecoverByZAddr ZAddr
makePrisms ''AccountRecovery
data User = User
{ _username :: !UserName
, _userAccountRecovery :: !AccountRecovery
Description: (Describe migration here.)
Created: 2020-09-14 23:27:57.342707094 UTC
Depends: 2020-06-06_03-53-54_add-payment-networks
Apply: |
ALTER TABLE users RENAME COLUMN email TO recovery_email;
ALTER TABLE users ALTER COLUMN recovery_email DROP NOT NULL;
ALTER TABLE users ADD COLUMN recovery_zaddr text;
Revert: |
ALTER TABLE users RENAME COLUMN recovery_email TO email;
ALTER TABLE users ALTER COLUMN email SET NOT NULL;
ALTER TABLE users DROP COLUMN recovery_zaddr;
instance A.FromJSON CUser where
parseJSON (A.Object v) =
let parseUser =
User
<$> (UserName <$> v .: "username")
<*> (v .: "btcAddr")
<*> (Email <$> v .: "email")
instance A.FromJSON RegisterRequest where
parseJSON (A.Object v) = do
recoveryType <- v .: "recoveryType"
recovery <- case (recoveryType :: Text) of
"email" -> RecoverByEmail . Email <$> v .: "email"
"zaddr" -> RecoverByZAddr . ZAddr <$> v .: "zaddr"
_ -> Prelude.empty
user <- User <$> (UserName <$> v .: "username")
<*> pure recovery
parseInvitationCodes c = either
(\e -> fail $ "Invitation code was rejected as invalid: " <> e)
pure
(traverse parseInvCode c)
in CU
<$> parseUser
<*> (fromString <$> v .: "password")
RegisterRequest user
<$> (fromString <$> v .: "password")
<*> (v .: "captchaToken")
registerHandler :: S.Handler App App UserId
registerHandler = do
rbody <- S.readRequestBody 4096
-- allow any number of 'invitationCode' query parameters
userData <- maybe (snapError 400 "Could not parse user data") pure
$ A.decode rbody
t <- liftIO C.getCurrentTime
nmode <- getNetworkMode
let addr =
textToAddr (toNetwork nmode BTC) =<< (userData ^. cuser . userAddress)
registerHandler :: CaptchaConfig -> S.Handler App App UserId
registerHandler cfg = do
rbody <- S.readRequestBody 4096
userData <- fromMaybeM (snapError 400 "Could not parse user data") (A.decode rbody)
captchaResult <- liftIO $ checkCaptcha cfg (userData ^. captchaToken)
void . either (const . throwDenied $ AU.AuthError "Captcha check failed, please try again.") pure $ captchaResult
now <- liftIO C.getCurrentTime
userId <- createUser
((userData ^. cuser) & userAddress .~ ((BTC, ) <$> addr))
void $ traverse (acceptInvitation userId t) (userData ^. invitationCodes)
return userId
userId <- createUser (userData ^. cuser)
void $ traverse (acceptInvitation userId now) (userData ^. invitationCodes)
pure userId
CaptchaResponse <$> v .: "success"
<*> (fmap toError . join . toList <$> v .:? "error-codes")
where
toError = \case
"missing-input-secret" -> MissingInputSecret
"invalid-input-secret" -> InvalidInputSecret
"missing-input-response" -> MissingInputResponse
"invalid-input-response" -> InvalidInputResponse
"bad-request" -> BadRequest
"timeout-or-duplicate" -> TimeoutOrDuplicate
other -> CaptchaError $ "Unexpected error code: " <> other
parseJSON _ =
fail "Captcha response body was not a valid JSON object."
CaptchaResponse
<$> v
.: "success"
<*> (fmap toError . join . toList <$> v .:? "error-codes")
where
toError = \case
"missing-input-secret" -> MissingInputSecret
"invalid-input-secret" -> InvalidInputSecret
"missing-input-response" -> MissingInputResponse
"invalid-input-response" -> InvalidInputResponse
"bad-request" -> BadRequest
"timeout-or-duplicate" -> TimeoutOrDuplicate
other -> CaptchaError $ "Unexpected error code: " <> other
parseJSON _ = fail "Captcha response body was not a valid JSON object."
request <- parseRequest "https://www.google.com/recaptcha/api/siteverify"
reqWithBody <- formDataBody [partBS "secret" (T.encodeUtf8 $ secretKey cfg), partBS "response" (T.encodeUtf8 token)] request
manager <- newTlsManager
request <- parseRequest "https://www.google.com/recaptcha/api/siteverify"
reqWithBody <- formDataBody
[ partBS "secret" (T.encodeUtf8 $ secretKey cfg)
, partBS "response" (T.encodeUtf8 token)
]
request
manager <- newTlsManager
200 ->
case A.eitherDecode (responseBody response) of
Left err -> Left [CaptchaError $ "Failed to decode JSON response: " <> T.pack err]
Right cr -> if success cr then Right () else Left (errorCodes cr)
200 -> case A.eitherDecode (responseBody response) of
Left err ->
Left [CaptchaError $ "Failed to decode JSON response: " <> T.pack err]
Right cr -> if success cr then Right () else Left (errorCodes cr)