_id
stringlengths 64
64
| repository
stringlengths 6
84
| name
stringlengths 4
110
| content
stringlengths 0
248k
| license
null | download_url
stringlengths 89
454
| language
stringclasses 7
values | comments
stringlengths 0
74.6k
| code
stringlengths 0
248k
|
---|---|---|---|---|---|---|---|---|
8ee391d08f00562d52f893df8d793fdb0ae876ff23e1a8abb39287fb9d3edd3f
|
Bodigrim/ntru
|
EES541EP1.hs
|
|
NTRU cryptographic system using the EES541EP1 parameter set , for use at the 112 - bit security level .
NTRU cryptographic system using the EES541EP1 parameter set, for use at the 112-bit security level.
-}
module Math.NTRU.EES541EP1 (keyGen, encrypt, decrypt) where
import qualified Math.NTRU as NTRU
| Generates a random PublicKey - PrivateKey pair
^ A tuple representing ( PublicKey , PrivateKey ) where PrivateKey = 1 + pf , per < enahncement#2 > .
keyGen = NTRU.keyGen (NTRU.genParams "EES541EP1")
| Encrypts a message with the given public key
encrypt :: [Integer] -- ^ A list of ASCII values representing the message
-> [Integer] -- ^ A list of numbers representing the public key
-> IO [Integer] -- ^ A list of numbers representing the ciphertext
encrypt = NTRU.encrypt (NTRU.genParams "EES541EP1")
| Decrypts and verifies a cyphertext with the given keys
decrypt :: [Integer] -- ^ A list of numbers representing the private key
-> [Integer] -- ^ A list of numbers representing the public key
-> [Integer] -- ^ A list of numbers representing the ciphertext
-> Maybe [Integer] -- ^ A list of numbers representing the original message, or nothing on failure
decrypt = NTRU.decrypt (NTRU.genParams "EES541EP1")
| null |
https://raw.githubusercontent.com/Bodigrim/ntru/1974391559eb49ee1960db423b667867dfce946b/src/Math/NTRU/EES541EP1.hs
|
haskell
|
^ A list of ASCII values representing the message
^ A list of numbers representing the public key
^ A list of numbers representing the ciphertext
^ A list of numbers representing the private key
^ A list of numbers representing the public key
^ A list of numbers representing the ciphertext
^ A list of numbers representing the original message, or nothing on failure
|
|
NTRU cryptographic system using the EES541EP1 parameter set , for use at the 112 - bit security level .
NTRU cryptographic system using the EES541EP1 parameter set, for use at the 112-bit security level.
-}
module Math.NTRU.EES541EP1 (keyGen, encrypt, decrypt) where
import qualified Math.NTRU as NTRU
| Generates a random PublicKey - PrivateKey pair
^ A tuple representing ( PublicKey , PrivateKey ) where PrivateKey = 1 + pf , per < enahncement#2 > .
keyGen = NTRU.keyGen (NTRU.genParams "EES541EP1")
| Encrypts a message with the given public key
encrypt = NTRU.encrypt (NTRU.genParams "EES541EP1")
| Decrypts and verifies a cyphertext with the given keys
decrypt = NTRU.decrypt (NTRU.genParams "EES541EP1")
|
cc03595562faf8c56e56cd10eaac4d7ea338fe4632ff7d484387065dcc443b8b
|
Lupino/haskell-periodic
|
Server.hs
|
{-# LANGUAGE RecordWildCards #-}
# LANGUAGE ScopedTypeVariables #
module Periodic.Server
( startServer
) where
import Control.Monad (void)
import Data.Binary.Get (getWord32be, runGet)
import Data.ByteString (ByteString)
import Data.ByteString.Lazy (fromStrict)
import qualified Data.IOMap as IOMap
import Data.Map.Strict (filterWithKey)
import Metro (NodeMode (..), SessionMode (..))
import Metro.Class (Servable (STP),
Transport (TransportConfig),
setPacketId)
import qualified Metro.Class as S (Servable (ServerConfig))
import Metro.Conn (receive_, runConnT, send)
import Metro.Node (NodeEnv1 (..), env, runNodeT1)
import Metro.Server (getMaxPoolSize, getNodeEnvList,
initServerEnv, runServerT,
setDefaultSessionTimeout,
setKeepalive, setNodeMode,
setOnCheckNodeState,
setOnExcClose, setOnNodeLeave,
setServerName, setSessionMode,
stopServerT)
import qualified Metro.Server as M (ServerEnv, startServer)
import Metro.Utils (getEpochTime)
import Periodic.Node (sessionGen)
import Periodic.Server.Client (handleSessionT)
import Periodic.Server.GrabQueue (dropAgentList, newGrabQueue,
pushAgent)
import Periodic.Server.Hook (Hook)
import Periodic.Server.Persist (Persist, PersistConfig)
import Periodic.Server.Scheduler (failJob, initSchedEnv,
removeFunc, runSchedT, shutdown,
startSchedT)
import Periodic.Server.Types (ClientConfig (..), Command,
ServerCommand (Data))
import Periodic.Types (ClientType, Job, Msgid (..),
Nid (..), Packet, getClientType,
getHandle, getTimeout, packetRES,
regPacketRES)
import Periodic.Types.ServerCommand (ServerCommand (JobAssign))
import System.Entropy (getEntropy)
import UnliftIO (MonadUnliftIO, newTVarIO,
readTVarIO, tryAny)
type ServerEnv serv =
M.ServerEnv serv ClientConfig Nid Msgid (Packet Command)
doAssignJob :: Transport tp => ServerEnv serv tp -> Nid -> Msgid -> Job -> IO Bool
doAssignJob sEnv nid msgid job = do
menv0 <- IOMap.lookup nid $ getNodeEnvList sEnv
case menv0 of
Nothing -> return False
Just env0 -> do
r <- tryAny
$ runConnT (connEnv env0)
$ send
$ setPacketId msgid
$ packetRES (JobAssign job)
case r of
Left _ -> return False
Right _ -> do
env1 <- runNodeT1 env0 env
expiredAt <- (+tout) <$> getEpochTime
IOMap.insert jh expiredAt (wJobQueue env1)
return True
where jh = getHandle job
tout = fromIntegral $ getTimeout job
doPushData :: Transport tp => ServerEnv serv tp -> Nid -> Msgid -> ByteString -> IO ()
doPushData sEnv nid msgid w = do
menv0 <- IOMap.lookup nid $ getNodeEnvList sEnv
case menv0 of
Nothing -> return ()
Just env0 -> do
void
$ tryAny
$ runConnT (connEnv env0)
$ send
$ setPacketId msgid
$ packetRES (Data w)
startServer
:: (Servable serv, Transport tp, Persist db, MonadUnliftIO m)
=> PersistConfig db
-> (TransportConfig (STP serv) -> TransportConfig tp)
-> S.ServerConfig serv
-> Hook
-> Int
-> Int
-> m ()
startServer dbconfig mk config hook pushTaskSize schedTaskSize = do
grabQueue <- newGrabQueue
sEnv <- fmap mapEnv . initServerEnv config sessionGen mk $ \_ _ connEnv0 -> do
(_ :: ClientType) <- getClientType <$> runConnT connEnv0 receive_
nid <- getEntropy 4
runConnT connEnv0 $ send (regPacketRES $ Data nid)
let nidV = Nid $! runGet getWord32be $ fromStrict nid
wFuncList <- newTVarIO []
wJobQueue <- IOMap.empty
wMsgidList <- newTVarIO []
pushAgent grabQueue wFuncList nidV wMsgidList
return $ Just (nidV, ClientConfig {..})
setDefaultSessionTimeout sEnv 100
setKeepalive sEnv 500
schedEnv <- initSchedEnv dbconfig grabQueue
(runServerT sEnv stopServerT)
(doAssignJob sEnv) (doPushData sEnv) hook (getMaxPoolSize sEnv)
setOnNodeLeave sEnv $ \nid ClientConfig {..} ->
runSchedT schedEnv $ do
mapM_ failJob =<< IOMap.keys wJobQueue
mapM_ removeFunc =<< readTVarIO wFuncList
dropAgentList grabQueue nid
setOnCheckNodeState sEnv $ \_ ClientConfig {..} -> do
now <- getEpochTime
handles <- map fst . filter (\(_, t) -> t < now) <$> IOMap.toList wJobQueue
IOMap.modifyIOMap (filterWithKey (\_ expiredAt -> expiredAt > now)) wJobQueue
runSchedT schedEnv $ mapM_ failJob handles
runSchedT schedEnv $ do
startSchedT pushTaskSize schedTaskSize
M.startServer sEnv handleSessionT
shutdown
where mapEnv :: ServerEnv serv tp -> ServerEnv serv tp
mapEnv =
setNodeMode Multi
. setSessionMode SingleAction
. setServerName "Periodic"
. setOnExcClose True
| null |
https://raw.githubusercontent.com/Lupino/haskell-periodic/a95b184a255da4cfefb32f15b4892541a11a57a0/periodic-server/src/Periodic/Server.hs
|
haskell
|
# LANGUAGE RecordWildCards #
|
# LANGUAGE ScopedTypeVariables #
module Periodic.Server
( startServer
) where
import Control.Monad (void)
import Data.Binary.Get (getWord32be, runGet)
import Data.ByteString (ByteString)
import Data.ByteString.Lazy (fromStrict)
import qualified Data.IOMap as IOMap
import Data.Map.Strict (filterWithKey)
import Metro (NodeMode (..), SessionMode (..))
import Metro.Class (Servable (STP),
Transport (TransportConfig),
setPacketId)
import qualified Metro.Class as S (Servable (ServerConfig))
import Metro.Conn (receive_, runConnT, send)
import Metro.Node (NodeEnv1 (..), env, runNodeT1)
import Metro.Server (getMaxPoolSize, getNodeEnvList,
initServerEnv, runServerT,
setDefaultSessionTimeout,
setKeepalive, setNodeMode,
setOnCheckNodeState,
setOnExcClose, setOnNodeLeave,
setServerName, setSessionMode,
stopServerT)
import qualified Metro.Server as M (ServerEnv, startServer)
import Metro.Utils (getEpochTime)
import Periodic.Node (sessionGen)
import Periodic.Server.Client (handleSessionT)
import Periodic.Server.GrabQueue (dropAgentList, newGrabQueue,
pushAgent)
import Periodic.Server.Hook (Hook)
import Periodic.Server.Persist (Persist, PersistConfig)
import Periodic.Server.Scheduler (failJob, initSchedEnv,
removeFunc, runSchedT, shutdown,
startSchedT)
import Periodic.Server.Types (ClientConfig (..), Command,
ServerCommand (Data))
import Periodic.Types (ClientType, Job, Msgid (..),
Nid (..), Packet, getClientType,
getHandle, getTimeout, packetRES,
regPacketRES)
import Periodic.Types.ServerCommand (ServerCommand (JobAssign))
import System.Entropy (getEntropy)
import UnliftIO (MonadUnliftIO, newTVarIO,
readTVarIO, tryAny)
type ServerEnv serv =
M.ServerEnv serv ClientConfig Nid Msgid (Packet Command)
doAssignJob :: Transport tp => ServerEnv serv tp -> Nid -> Msgid -> Job -> IO Bool
doAssignJob sEnv nid msgid job = do
menv0 <- IOMap.lookup nid $ getNodeEnvList sEnv
case menv0 of
Nothing -> return False
Just env0 -> do
r <- tryAny
$ runConnT (connEnv env0)
$ send
$ setPacketId msgid
$ packetRES (JobAssign job)
case r of
Left _ -> return False
Right _ -> do
env1 <- runNodeT1 env0 env
expiredAt <- (+tout) <$> getEpochTime
IOMap.insert jh expiredAt (wJobQueue env1)
return True
where jh = getHandle job
tout = fromIntegral $ getTimeout job
doPushData :: Transport tp => ServerEnv serv tp -> Nid -> Msgid -> ByteString -> IO ()
doPushData sEnv nid msgid w = do
menv0 <- IOMap.lookup nid $ getNodeEnvList sEnv
case menv0 of
Nothing -> return ()
Just env0 -> do
void
$ tryAny
$ runConnT (connEnv env0)
$ send
$ setPacketId msgid
$ packetRES (Data w)
startServer
:: (Servable serv, Transport tp, Persist db, MonadUnliftIO m)
=> PersistConfig db
-> (TransportConfig (STP serv) -> TransportConfig tp)
-> S.ServerConfig serv
-> Hook
-> Int
-> Int
-> m ()
startServer dbconfig mk config hook pushTaskSize schedTaskSize = do
grabQueue <- newGrabQueue
sEnv <- fmap mapEnv . initServerEnv config sessionGen mk $ \_ _ connEnv0 -> do
(_ :: ClientType) <- getClientType <$> runConnT connEnv0 receive_
nid <- getEntropy 4
runConnT connEnv0 $ send (regPacketRES $ Data nid)
let nidV = Nid $! runGet getWord32be $ fromStrict nid
wFuncList <- newTVarIO []
wJobQueue <- IOMap.empty
wMsgidList <- newTVarIO []
pushAgent grabQueue wFuncList nidV wMsgidList
return $ Just (nidV, ClientConfig {..})
setDefaultSessionTimeout sEnv 100
setKeepalive sEnv 500
schedEnv <- initSchedEnv dbconfig grabQueue
(runServerT sEnv stopServerT)
(doAssignJob sEnv) (doPushData sEnv) hook (getMaxPoolSize sEnv)
setOnNodeLeave sEnv $ \nid ClientConfig {..} ->
runSchedT schedEnv $ do
mapM_ failJob =<< IOMap.keys wJobQueue
mapM_ removeFunc =<< readTVarIO wFuncList
dropAgentList grabQueue nid
setOnCheckNodeState sEnv $ \_ ClientConfig {..} -> do
now <- getEpochTime
handles <- map fst . filter (\(_, t) -> t < now) <$> IOMap.toList wJobQueue
IOMap.modifyIOMap (filterWithKey (\_ expiredAt -> expiredAt > now)) wJobQueue
runSchedT schedEnv $ mapM_ failJob handles
runSchedT schedEnv $ do
startSchedT pushTaskSize schedTaskSize
M.startServer sEnv handleSessionT
shutdown
where mapEnv :: ServerEnv serv tp -> ServerEnv serv tp
mapEnv =
setNodeMode Multi
. setSessionMode SingleAction
. setServerName "Periodic"
. setOnExcClose True
|
de6dc6405966f0c9b6ead99f804c16b62f44e5c56bef117c07f5192ecebab4d0
|
gbour/wave
|
wave_access_log.erl
|
%%
Wave - MQTT Broker
Copyright ( C ) 2014 - 2016 -
%%
%% This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation , version 3 of the License .
%%
%% This program is distributed in the hope that it will be useful,
%% but WITHOUT ANY WARRANTY; without even the implied warranty of
%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details .
%%
You should have received a copy of the GNU Affero General Public License
%% along with this program. If not, see </>.
-module(wave_access_log).
-author("Guillaume Bour <>").
-behaviour(gen_server).
-record(state, {
enabled = false,
logfile,
fh
}).
%
-export([log/1]).
% gen_server API
-export([start_link/1, init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
start_link(Args) ->
NOTE : currently we set ONE global registry service for the current erlang server
gen_server:start_link({local,?MODULE}, ?MODULE, Args, []).
%TODO: if log not enable, we should not start gene_server
% so log cast will fail silently (avoid data copy)
( see wave_sup how to not restart srv in this case )
init(Args) ->
Enabled = proplists:get_value(enabled, Args),
LogFile = proplists:get_value(logfile, Args),
{ok, Fh} = case Enabled of
true -> file:open(LogFile, [write,append]);
_ -> {ok, undefined}
end,
{ok, #state{enabled = Enabled, logfile = LogFile, fh = Fh}}.
%%
%% PUBLIC API
%%
log(Fields) ->
gen_server:cast(?MODULE, {log, Fields}).
%%
%% PRIVATE API
%%
handle_call(_,_,State) ->
{reply, ok, State}.
handle_cast({log, _}, State=#state{enabled=false}) ->
{noreply, State};
handle_cast({log, Fields=#{verb := Verb, status_code := Code, ua := Ua}}, State=#state{fh=Fh}) ->
Date = qdate:to_string("d/M/Y:H:i:s O", "Europe/Paris", calendar:universal_time()),
Uri = maps:get(uri , Fields, ""),
Ip = maps:get(ip , Fields, "-"),
Size = maps:get(size, Fields, "-"),
io:format(Fh, "~s - - [~s] \"~s ~s\" ~B ~s \"-\" \"~s\"~n",
[Ip, Date, Verb, Uri, Code, wave_utils:str(Size), Ua]),
{noreply, State};
handle_cast(_, State) ->
{noreply, State}.
handle_info(_, State) ->
{noreply, State}.
terminate(_, #state{fh=Fh}) ->
file:close(Fh),
ok.
code_change(_, State, _) ->
{ok, State}.
%%
%% PRIVATE FUNS
%%
| null |
https://raw.githubusercontent.com/gbour/wave/fe5b78408a7c6e723b19cd454068958058e5e072/apps/wave/src/wave_access_log.erl
|
erlang
|
This program is free software: you can redistribute it and/or modify
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
along with this program. If not, see </>.
gen_server API
TODO: if log not enable, we should not start gene_server
so log cast will fail silently (avoid data copy)
PUBLIC API
PRIVATE API
PRIVATE FUNS
|
Wave - MQTT Broker
Copyright ( C ) 2014 - 2016 -
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation , version 3 of the License .
GNU Affero General Public License for more details .
You should have received a copy of the GNU Affero General Public License
-module(wave_access_log).
-author("Guillaume Bour <>").
-behaviour(gen_server).
-record(state, {
enabled = false,
logfile,
fh
}).
-export([log/1]).
-export([start_link/1, init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
start_link(Args) ->
NOTE : currently we set ONE global registry service for the current erlang server
gen_server:start_link({local,?MODULE}, ?MODULE, Args, []).
( see wave_sup how to not restart srv in this case )
init(Args) ->
Enabled = proplists:get_value(enabled, Args),
LogFile = proplists:get_value(logfile, Args),
{ok, Fh} = case Enabled of
true -> file:open(LogFile, [write,append]);
_ -> {ok, undefined}
end,
{ok, #state{enabled = Enabled, logfile = LogFile, fh = Fh}}.
log(Fields) ->
gen_server:cast(?MODULE, {log, Fields}).
handle_call(_,_,State) ->
{reply, ok, State}.
handle_cast({log, _}, State=#state{enabled=false}) ->
{noreply, State};
handle_cast({log, Fields=#{verb := Verb, status_code := Code, ua := Ua}}, State=#state{fh=Fh}) ->
Date = qdate:to_string("d/M/Y:H:i:s O", "Europe/Paris", calendar:universal_time()),
Uri = maps:get(uri , Fields, ""),
Ip = maps:get(ip , Fields, "-"),
Size = maps:get(size, Fields, "-"),
io:format(Fh, "~s - - [~s] \"~s ~s\" ~B ~s \"-\" \"~s\"~n",
[Ip, Date, Verb, Uri, Code, wave_utils:str(Size), Ua]),
{noreply, State};
handle_cast(_, State) ->
{noreply, State}.
handle_info(_, State) ->
{noreply, State}.
terminate(_, #state{fh=Fh}) ->
file:close(Fh),
ok.
code_change(_, State, _) ->
{ok, State}.
|
8b8248065f67c4330d06b0b45032f6efc258d43654b48d36767f2c30caaa4e27
|
bobbae/gosling-emacs
|
ll-window.ml
|
;****************************************************************
;* File: ll-window.ml *
;* Last modified on Fri Apr 18 12:16:02 1986 by roberts *
;* ----------------------------------------------------------- *
;* This package has the responsibility for maintaining the *
;* window arrangement required by lauralee. The rules are *
;* *
;* #1 ("folders") The top window displays the list of *
;* folders and is adjusted so that then *
;* bottom bar is flush against the last *
;* line of the window. The bar contains *
;* only the identifying tag. *
;* *
;* #2 ("inbox") This window contains the message list *
;* for the currently selected folder. *
;* The menu bar is buggable and contains *
;* the standard operations. *
;* *
;* #3 ("display") The display window may be any emacs *
;* window, but will be used as the window *
;* in which messages are displayed. *
;* *
;* #3/4 ("draft") For replies or new messages, a "draft" *
;* window is used for message generation. *
;* This will usually be window #3 for new *
;* messages and #4 for replies. *
;* *
;* last ("help") Help window, always at bottom of screen. *
;****************************************************************
(declare-global ll-buffer-draft-replaced)
(defun (ignore-key (nothing)))
(defun (no-typing nt-i
(setq nt-i 127)
(while (>= nt-i ' ')
(local-bind-to-key "ignore-key" (char-to-string nt-i))
(setq nt-i (- nt-i 1))
)
(local-bind-to-key "ignore-key" (char-to-string 8))
(local-bind-to-key "ignore-key" (char-to-string 9))
(local-bind-to-key "ignore-key" (char-to-string 10))
(local-bind-to-key "ignore-key" (char-to-string 13))
)
)
;****************************************************************
;* (ll-select-folders-window [adjflag]) *
;* *
;* Selects the folders window and adjusts its size. *
;* An error is signalled if the "Mail folders" buffer *
;* does not exist. *
;****************************************************************
(defun
(ll-select-folders-window &adjflag height nlines
(setq &adjflag (if (< (nargs) 1) 0 (arg 1)))
(if (= (number-of-windows) 1) (split-current-window))
(goto-window 1)
(if (| (!= ll-window-type "folders") &adjflag)
(progn
(switch-to-buffer "Mail folders")
(no-typing)
(setq ll-window-type "folders")
(setq pad-mode-line 1)
(set-mark)
(setq nlines (line-number))
(end-of-file)
(setq height (min 8 (line-number)))
(provide-prefix-argument (- height (window-height))
(enlarge-window)
)
(beginning-of-line)
(setq nlines (- (line-number) nlines))
(setq nlines
(max (/ height 2) (- (- height nlines) 1))
)
(goto-character (mark))
(line-to-top-of-window)
(provide-prefix-argument nlines (scroll-one-line-down))
(exchange-dot-and-mark)
)
)
)
)
;****************************************************************
;* (ll-select-inbox-window [adjflag]) *
;* *
;* Selects the inbox window and make sure that it is *
;* associated with the buffer given by ll-source-folder. *
;* If the adjflag parameter is given and is nonzero, this *
;* also adjusts the message display so that there is only *
;* one blank line at the bottom if possible. The size of *
;* the window is chosen to be 1/3 of the space remaining *
;* on the screen after the folders window. *
;****************************************************************
(defun
(ll-select-inbox-window adjflag inbox-height nlines
(setq adjflag (if (< (nargs) 1) 0 (arg 1)))
(goto-window 1)
(if (!= ll-window-type "folders") (ll-select-folders-window))
(setq inbox-height (/ (- screen-height (window-height)) 3))
(goto-window 2)
(if (= (number-of-windows) 2) (split-current-window))
(goto-window 2)
(if (| (!= ll-window-type "inbox") (!= ll-source-folder ll-folder-name))
(progn
(switch-to-buffer (concat "+" ll-source-folder))
(no-typing)
(setq pad-mode-line 1)
(provide-prefix-argument (- inbox-height (window-height))
(enlarge-window)
)
(setq split-height-threshhold (+ inbox-height 1))
)
(setq inbox-height (window-height))
)
(if adjflag
(progn
(set-mark)
(setq nlines (line-number))
(end-of-file)
(beginning-of-line)
(setq nlines (- (line-number) nlines))
(setq nlines
(max (/ inbox-height 2) (- (- inbox-height nlines) 2))
)
(goto-character (mark))
(line-to-top-of-window)
(provide-prefix-argument nlines (scroll-one-line-down))
(exchange-dot-and-mark)
)
)
)
)
;****************************************************************
;* (ll-select-display-window) *
;* *
;* Selects the third window on the screen. If there *
;* is another window on the screen, it is deleted unless it *
;* is marked as an active draft window. *
;****************************************************************
(defun
(ll-select-display-window
(goto-window 1)
(if (!= ll-window-type "folders") (ll-select-folders-window))
(goto-window 2)
(if (!= ll-folder-name ll-source-folder) (ll-select-inbox-window))
(goto-window 3)
(if (& (!= ll-window-type "display") (!= ll-window-type "draft"))
(setq ll-previous-buffer (current-buffer-name))
)
(if (& (ll-draft-window-is-visible) ll-draft-active)
(switch-to-buffer "Draft")
)
(while (> (number-of-windows) 3)
(goto-window 4)
(delete-window)
)
(if (& (= ll-window-type "draft") ll-draft-active)
(split-current-window)
)
(goto-window 3)
(switch-to-buffer "Display")
(setq ll-window-type "display")
(setq needs-checkpointing 0)
)
)
;****************************************************************
;* (ll-select-draft-window window) *
;* *
;* This selects the composition/reply window, which *
;* will live either in window #3 or #4, depending on *
;* whether this is a new message or a reply, respectively. *
;****************************************************************
(defun
(ll-select-draft-window &window old-active
(setq &window (arg 1))
(setq old-active ll-draft-active)
(setq ll-draft-active 0)
(ll-select-display-window)
(if (!= ll-window-type "draft")
(setq ll-buffer-draft-replaced (current-buffer-name))
)
(if (= &window 4) (split-current-window))
(switch-to-buffer "Draft")
(setq ll-window-type "draft")
(setq ll-draft-active old-active)
)
)
;****************************************************************
;* (ll-select-help-window) *
;* *
;* Selects a help window at the bottom of the screen *
;* and reads in the help file. The height of the window *
;* is adjusted so that the first page fits exactly in the *
;* window, using a "-------" line as a sentinel. *
;****************************************************************
(defun
(ll-select-help-window helppath
(ll-select-display-window)
(if (= (number-of-windows) 3)
(progn
(goto-window 3)
(split-current-window)
)
)
(goto-window 4)
(switch-to-buffer "Lauralee Help")
(setq ll-window-type "help")
(setq needs-checkpointing 0)
(if (= (current-file-name) "")
(progn
(if (error-occured (setq helppath (path-of "lauralee.help")))
(insert-string "No help file available!\n")
(progn (read-file helppath) (no-typing))
)
)
)
(beginning-of-file)
(if (error-occured (re-search-forward "^----*$")) (end-of-file))
(provide-prefix-argument (- (- (line-number) (window-height)) 1)
(enlarge-window)
)
(beginning-of-file)
)
)
;****************************************************************
;* (ll-draft-window-is-visible) *
;* *
;* Returns true if the draft window is visible on the *
;* display. *
;****************************************************************
(defun
(ll-draft-window-is-visible oldwin iwin draft-found
(setq oldwin (current-window))
(setq draft-found 0)
(setq iwin 1)
(while (& (! draft-found) (<= iwin (number-of-windows)))
(goto-window iwin)
(if (= ll-window-type "draft") (setq draft-found 1))
(setq iwin (+ 1 iwin))
)
(goto-window oldwin)
draft-found
)
)
;****************************************************************
;* (ll-help-window-is-visible) *
;* *
;* Returns true if the help window is visible on the *
;* display. *
;****************************************************************
(defun
(ll-help-window-is-visible oldwin iwin help-found
(setq oldwin (current-window))
(goto-window (number-of-windows))
(setq help-found (= ll-window-type "help"))
(goto-window oldwin)
help-found
)
)
;****************************************************************
;* (ll-delete-draft-window) *
;* *
;* Deletes the draft window from the screen, replacing *
;* it either with the contents of window 3 or the buffer *
;* which the draft window replaced when it was created *
;* (if it still exists). *
;****************************************************************
(defun
(ll-delete-draft-window
(save-excursion
(temp-use-buffer "Draft")
(setq buffer-is-modified 0)
)
(ll-set-draft-active 0)
(ll-select-display-window)
(if (& (= ll-window-type "draft")
(buffer-exists ll-buffer-draft-replaced))
(switch-to-buffer ll-buffer-draft-replaced)
)
)
)
;****************************************************************
;* (ll-delete-help-window) *
;* *
;* Deletes the help window from the screen, giving its *
;* space to window 3 or the active draft. *
;****************************************************************
(defun
(ll-delete-help-window
(if (ll-help-window-is-visible)
(save-excursion
(goto-window (number-of-windows))
(delete-window)
(if (& ll-draft-active (! (ll-draft-window-is-visible)))
(ll-select-draft-window 4)
)
)
)
)
)
;****************************************************************
;* (ll-set-draft-active [0/1]) *
;* *
;* Performs the same action as (setq ll-draft-active ...) *
;* but also sets the mode line to reflect the changed in *
;* setting. *
;****************************************************************
(defun
(ll-set-draft-active
(save-excursion
(temp-use-buffer "Draft")
(setq ll-draft-active (arg 1))
(if (! ll-draft-active) (setq buffer-is-modified 0))
(temp-use-buffer (concat "+" ll-source-folder))
(ll-set-inbox-mode-line)
)
)
)
| null |
https://raw.githubusercontent.com/bobbae/gosling-emacs/8fdda532abbffb0c952251a0b5a4857e0f27495a/maclib/ll-window.ml
|
ocaml
|
;****************************************************************
;* File: ll-window.ml *
;* Last modified on Fri Apr 18 12:16:02 1986 by roberts *
;* ----------------------------------------------------------- *
;* This package has the responsibility for maintaining the *
;* window arrangement required by lauralee. The rules are *
;* *
;* #1 ("folders") The top window displays the list of *
;* folders and is adjusted so that then *
;* bottom bar is flush against the last *
;* line of the window. The bar contains *
;* only the identifying tag. *
;* *
;* #2 ("inbox") This window contains the message list *
;* for the currently selected folder. *
;* The menu bar is buggable and contains *
;* the standard operations. *
;* *
;* #3 ("display") The display window may be any emacs *
;* window, but will be used as the window *
;* in which messages are displayed. *
;* *
;* #3/4 ("draft") For replies or new messages, a "draft" *
;* window is used for message generation. *
;* This will usually be window #3 for new *
;* messages and #4 for replies. *
;* *
;* last ("help") Help window, always at bottom of screen. *
;****************************************************************
(declare-global ll-buffer-draft-replaced)
(defun (ignore-key (nothing)))
(defun (no-typing nt-i
(setq nt-i 127)
(while (>= nt-i ' ')
(local-bind-to-key "ignore-key" (char-to-string nt-i))
(setq nt-i (- nt-i 1))
)
(local-bind-to-key "ignore-key" (char-to-string 8))
(local-bind-to-key "ignore-key" (char-to-string 9))
(local-bind-to-key "ignore-key" (char-to-string 10))
(local-bind-to-key "ignore-key" (char-to-string 13))
)
)
;****************************************************************
;* (ll-select-folders-window [adjflag]) *
;* *
;* Selects the folders window and adjusts its size. *
;* An error is signalled if the "Mail folders" buffer *
;* does not exist. *
;****************************************************************
(defun
(ll-select-folders-window &adjflag height nlines
(setq &adjflag (if (< (nargs) 1) 0 (arg 1)))
(if (= (number-of-windows) 1) (split-current-window))
(goto-window 1)
(if (| (!= ll-window-type "folders") &adjflag)
(progn
(switch-to-buffer "Mail folders")
(no-typing)
(setq ll-window-type "folders")
(setq pad-mode-line 1)
(set-mark)
(setq nlines (line-number))
(end-of-file)
(setq height (min 8 (line-number)))
(provide-prefix-argument (- height (window-height))
(enlarge-window)
)
(beginning-of-line)
(setq nlines (- (line-number) nlines))
(setq nlines
(max (/ height 2) (- (- height nlines) 1))
)
(goto-character (mark))
(line-to-top-of-window)
(provide-prefix-argument nlines (scroll-one-line-down))
(exchange-dot-and-mark)
)
)
)
)
;****************************************************************
;* (ll-select-inbox-window [adjflag]) *
;* *
;* Selects the inbox window and make sure that it is *
;* associated with the buffer given by ll-source-folder. *
;* If the adjflag parameter is given and is nonzero, this *
;* also adjusts the message display so that there is only *
;* one blank line at the bottom if possible. The size of *
;* the window is chosen to be 1/3 of the space remaining *
;* on the screen after the folders window. *
;****************************************************************
(defun
(ll-select-inbox-window adjflag inbox-height nlines
(setq adjflag (if (< (nargs) 1) 0 (arg 1)))
(goto-window 1)
(if (!= ll-window-type "folders") (ll-select-folders-window))
(setq inbox-height (/ (- screen-height (window-height)) 3))
(goto-window 2)
(if (= (number-of-windows) 2) (split-current-window))
(goto-window 2)
(if (| (!= ll-window-type "inbox") (!= ll-source-folder ll-folder-name))
(progn
(switch-to-buffer (concat "+" ll-source-folder))
(no-typing)
(setq pad-mode-line 1)
(provide-prefix-argument (- inbox-height (window-height))
(enlarge-window)
)
(setq split-height-threshhold (+ inbox-height 1))
)
(setq inbox-height (window-height))
)
(if adjflag
(progn
(set-mark)
(setq nlines (line-number))
(end-of-file)
(beginning-of-line)
(setq nlines (- (line-number) nlines))
(setq nlines
(max (/ inbox-height 2) (- (- inbox-height nlines) 2))
)
(goto-character (mark))
(line-to-top-of-window)
(provide-prefix-argument nlines (scroll-one-line-down))
(exchange-dot-and-mark)
)
)
)
)
;****************************************************************
;* (ll-select-display-window) *
;* *
;* Selects the third window on the screen. If there *
;* is another window on the screen, it is deleted unless it *
;* is marked as an active draft window. *
;****************************************************************
(defun
(ll-select-display-window
(goto-window 1)
(if (!= ll-window-type "folders") (ll-select-folders-window))
(goto-window 2)
(if (!= ll-folder-name ll-source-folder) (ll-select-inbox-window))
(goto-window 3)
(if (& (!= ll-window-type "display") (!= ll-window-type "draft"))
(setq ll-previous-buffer (current-buffer-name))
)
(if (& (ll-draft-window-is-visible) ll-draft-active)
(switch-to-buffer "Draft")
)
(while (> (number-of-windows) 3)
(goto-window 4)
(delete-window)
)
(if (& (= ll-window-type "draft") ll-draft-active)
(split-current-window)
)
(goto-window 3)
(switch-to-buffer "Display")
(setq ll-window-type "display")
(setq needs-checkpointing 0)
)
)
;****************************************************************
;* (ll-select-draft-window window) *
;* *
;* This selects the composition/reply window, which *
;* will live either in window #3 or #4, depending on *
;* whether this is a new message or a reply, respectively. *
;****************************************************************
(defun
(ll-select-draft-window &window old-active
(setq &window (arg 1))
(setq old-active ll-draft-active)
(setq ll-draft-active 0)
(ll-select-display-window)
(if (!= ll-window-type "draft")
(setq ll-buffer-draft-replaced (current-buffer-name))
)
(if (= &window 4) (split-current-window))
(switch-to-buffer "Draft")
(setq ll-window-type "draft")
(setq ll-draft-active old-active)
)
)
;****************************************************************
;* (ll-select-help-window) *
;* *
;* Selects a help window at the bottom of the screen *
;* and reads in the help file. The height of the window *
;* is adjusted so that the first page fits exactly in the *
;* window, using a "-------" line as a sentinel. *
;****************************************************************
(defun
(ll-select-help-window helppath
(ll-select-display-window)
(if (= (number-of-windows) 3)
(progn
(goto-window 3)
(split-current-window)
)
)
(goto-window 4)
(switch-to-buffer "Lauralee Help")
(setq ll-window-type "help")
(setq needs-checkpointing 0)
(if (= (current-file-name) "")
(progn
(if (error-occured (setq helppath (path-of "lauralee.help")))
(insert-string "No help file available!\n")
(progn (read-file helppath) (no-typing))
)
)
)
(beginning-of-file)
(if (error-occured (re-search-forward "^----*$")) (end-of-file))
(provide-prefix-argument (- (- (line-number) (window-height)) 1)
(enlarge-window)
)
(beginning-of-file)
)
)
;****************************************************************
;* (ll-draft-window-is-visible) *
;* *
;* Returns true if the draft window is visible on the *
;* display. *
;****************************************************************
(defun
(ll-draft-window-is-visible oldwin iwin draft-found
(setq oldwin (current-window))
(setq draft-found 0)
(setq iwin 1)
(while (& (! draft-found) (<= iwin (number-of-windows)))
(goto-window iwin)
(if (= ll-window-type "draft") (setq draft-found 1))
(setq iwin (+ 1 iwin))
)
(goto-window oldwin)
draft-found
)
)
;****************************************************************
;* (ll-help-window-is-visible) *
;* *
;* Returns true if the help window is visible on the *
;* display. *
;****************************************************************
(defun
(ll-help-window-is-visible oldwin iwin help-found
(setq oldwin (current-window))
(goto-window (number-of-windows))
(setq help-found (= ll-window-type "help"))
(goto-window oldwin)
help-found
)
)
;****************************************************************
;* (ll-delete-draft-window) *
;* *
;* Deletes the draft window from the screen, replacing *
;* it either with the contents of window 3 or the buffer *
;* which the draft window replaced when it was created *
;* (if it still exists). *
;****************************************************************
(defun
(ll-delete-draft-window
(save-excursion
(temp-use-buffer "Draft")
(setq buffer-is-modified 0)
)
(ll-set-draft-active 0)
(ll-select-display-window)
(if (& (= ll-window-type "draft")
(buffer-exists ll-buffer-draft-replaced))
(switch-to-buffer ll-buffer-draft-replaced)
)
)
)
;****************************************************************
;* (ll-delete-help-window) *
;* *
;* Deletes the help window from the screen, giving its *
;* space to window 3 or the active draft. *
;****************************************************************
(defun
(ll-delete-help-window
(if (ll-help-window-is-visible)
(save-excursion
(goto-window (number-of-windows))
(delete-window)
(if (& ll-draft-active (! (ll-draft-window-is-visible)))
(ll-select-draft-window 4)
)
)
)
)
)
;****************************************************************
;* (ll-set-draft-active [0/1]) *
;* *
;* Performs the same action as (setq ll-draft-active ...) *
;* but also sets the mode line to reflect the changed in *
;* setting. *
;****************************************************************
(defun
(ll-set-draft-active
(save-excursion
(temp-use-buffer "Draft")
(setq ll-draft-active (arg 1))
(if (! ll-draft-active) (setq buffer-is-modified 0))
(temp-use-buffer (concat "+" ll-source-folder))
(ll-set-inbox-mode-line)
)
)
)
|
|
8eb73a2535590f389d06b043a3c874284a53a9a4b666f5698c96c8e90117006f
|
pa-ba/compdata
|
Comp.hs
|
# LANGUAGE TemplateHaskell , MultiParamTypeClasses , FlexibleInstances ,
FlexibleContexts , UndecidableInstances , TypeOperators , ScopedTypeVariables ,
TypeSynonymInstances #
FlexibleContexts, UndecidableInstances, TypeOperators, ScopedTypeVariables,
TypeSynonymInstances #-}
module Param.DataTypes.Comp
(
module Param.DataTypes.Comp
) where
import Data.Comp.Param.Derive
import Data.Comp.Param.Show
import Data.Comp.Param
--import Data.Comp.Arbitrary ()
import Data . Comp . . Show
import Data .
import Test.QuickCheck.Arbitrary
import Test.QuickCheck.Gen
--import Test.QuickCheck.Property
import Control . Monad hiding ( sequence_,mapM )
import Prelude hiding (sequence_,mapM)
-- base values
type ValueSig = Fun :+: Value
type ValueExpr = Term ValueSig
type ExprSig = Value :+: Op :+: Lam
type Expr = Term ExprSig
type SugarSig = Sugar :+: SugarLet :+: ExprSig
type SugarExpr = Term SugarSig
type BaseTypeSig = ValueT
type BaseType = Term BaseTypeSig
data ValueT a e = TInt
| TBool
| TPair e e
| TFun e e
data Value a e = VInt Int
| VBool Bool
| VPair e e
data Fun a e = Fun (e -> e)
data Proj = ProjLeft | ProjRight
deriving (Eq, Show)
data Op a e = Plus e e
| Mult e e
| If e e e
| Eq e e
| Lt e e
| And e e
| Not e
| Proj Proj e
data Lam a e = Lam (a -> e)
| App e e
data Sugar a e = Neg e
| Minus e e
| Gt e e
| Or e e
| Impl e e
data SugarLet a e = Let e (a -> e)
$(derive [makeDifunctor, makeFoldable, makeTraversable,
makeEqD, makeShowD, smartConstructors]
[''Value, ''Op, ''Sugar, ''ValueT])
$(derive [makeDifunctor, makeEqD, makeShowD, smartConstructors]
[''Lam, ''SugarLet])
showBinOp : : String - > String - > String - > String
showBinOp op x y = " ( " + + x + + op + + y + + " ) "
instance ShowF Value where
showF ( VInt i ) = show i
showF ( VBool b ) = show b
showF ( x y ) = showBinOp " , " x y
instance where
showF ( Plus x y ) = showBinOp " + " x y
showF ( Mult x y ) = showBinOp " * " x y
showF ( If b x y ) = " if " + + b + + " then " + + x + + " else " + + y + + " fi "
showF ( Eq x y ) = showBinOp " = = " x y
showF ( Lt x y ) = showBinOp " < " x y
showF ( And x y ) = showBinOp " & & " x y
showF ( Not x ) = " ~ " + + x
showF ( Proj ProjLeft x ) = x + + " ! 0 "
showF ( Proj ProjRight x ) = x + + " ! 1 "
instance where
showF TInt = " Int "
showF TBool = " Bool "
showF ( TPair x y ) = " ( " + + x + + " , " + + y + + " ) "
instance ShowF Sugar where
showF ( Neg x ) = " - " + + x
showF ( Minus x y ) = " ( " + + x + + " - " + + y + + " ) "
showF ( Gt x y ) = " ( " + + x + + " > " + + y + + " ) "
showF ( Or x y ) = " ( " + + x + + " || " + + y + + " ) "
showF ( Impl x y ) = " ( " + + x + + " - > " + + y + + " ) "
showBinOp :: String -> String -> String -> String
showBinOp op x y = "("++ x ++ op ++ y ++ ")"
instance ShowF Value where
showF (VInt i) = show i
showF (VBool b) = show b
showF (VPair x y) = showBinOp "," x y
instance ShowF Op where
showF (Plus x y) = showBinOp "+" x y
showF (Mult x y) = showBinOp "*" x y
showF (If b x y) = "if " ++ b ++ " then " ++ x ++ " else " ++ y ++ " fi"
showF (Eq x y) = showBinOp "==" x y
showF (Lt x y) = showBinOp "<" x y
showF (And x y) = showBinOp "&&" x y
showF (Not x) = "~" ++ x
showF (Proj ProjLeft x) = x ++ "!0"
showF (Proj ProjRight x) = x ++ "!1"
instance ShowF ValueT where
showF TInt = "Int"
showF TBool = "Bool"
showF (TPair x y) = "(" ++ x ++ "," ++ y ++ ")"
instance ShowF Sugar where
showF (Neg x) = "- " ++ x
showF (Minus x y) = "(" ++ x ++ "-" ++ y ++ ")"
showF (Gt x y) = "(" ++ x ++ ">" ++ y ++ ")"
showF (Or x y) = "(" ++ x ++ "||" ++ y ++ ")"
showF (Impl x y) = "(" ++ x ++ "->" ++ y ++ ")"-}
class GenTyped f where
genTypedAlg :: CoalgM Gen f BaseType
genTypedAlg a = do dist <- genTypedAlg' a
frequency $ map (\(i,f) -> (i,return f)) dist
genTypedAlg' :: BaseType -> Gen [(Int,f BaseType BaseType)]
genTypedAlg' a = genTypedAlg a >>= \ g -> return [(1,g)]
genTyped : : forall f . ( f , f ) = > BaseType - > Gen ( Term f )
genTyped = run
where run : : BaseType - > Gen ( Term f )
run t = liftM Term $ genTypedAlg t > > = mapM ( desize . run )
desize : : Gen a - > Gen a
desize gen = sized ( \n - > resize ( max 0 ( n-1 ) ) gen )
genSomeTyped : : ( , f ) = > Gen ( Term f )
genSomeTyped = arbitrary > > = genTyped
forAllTyped : : ( f , ShowF f , Testable prop , f ) = >
( Term f - > prop ) - > Property
forAllTyped f = forAll genSomeTyped f
instance ( GenTyped f , ) = > ( f : + : g ) where
genTypedAlg ' t = do
left < - genTypedAlg ' t
right < - genTypedAlg ' t
let left ' = map inl left
right ' = map inr right
return ( left ' + + right ' )
where inl ( i , gen ) = ( i , )
inr ( i , gen ) = ( i , Inr gen )
instance GenTyped Value where
genTypedAlg ' ( Term t ) = run t
where run TInt = arbitrary > > = \i- > return [ ( 1,VInt i ) ]
run TBool = arbitrary > > = \b- > return [ ( 1,VBool b ) ]
run ( TPair s t ) = return [ ( 1 , VPair s t ) ]
instance GenTyped Op where
genTypedAlg ' ty = sized run
where run n = do ( ty1,ty2 ) < - arbitrary
other ' < - other n
return $ other ' + + [ ( n , If ) ,
( n , Proj ProjLeft ( iTPair ty ty1 ) ) ,
( n , ( iTPair ty2 ty ) ) ]
other n = case unTerm ty of
TInt - > return [ ( n , Plus iTInt iTInt),(n , Plus iTInt iTInt ) ]
TBool - > arbitrary > > = \t - > return
[ ( n , Eq t t ) ,
( n , Lt iTInt iTInt ) ,
( n , And iTBool iTBool ) ,
( n , Not iTBool ) ]
TPair _ _ - > return [ ]
instance GenTyped Sugar where
genTypedAlg ' ( Term t ) = sized ( run t )
where run TInt n = return [ ( 5*n , , Minus iTInt iTInt ) ]
run TBool n = return [ ( 5*n , Gt iTInt iTInt),(5*n , Or iTBool iTBool ) ,
( 5*n , Impl iTBool iTBool ) ]
run TPair { } _ = return [ ]
genTyped :: forall f . (Traversable f, GenTyped f) => BaseType -> Gen (Term f)
genTyped = run
where run :: BaseType -> Gen (Term f)
run t = liftM Term $ genTypedAlg t >>= mapM (desize . run)
desize :: Gen a -> Gen a
desize gen = sized (\n -> resize (max 0 (n-1)) gen)
genSomeTyped :: (Traversable f, GenTyped f) => Gen (Term f)
genSomeTyped = arbitrary >>= genTyped
forAllTyped :: (GenTyped f, ShowF f, Testable prop, Traversable f) =>
(Term f -> prop) -> Property
forAllTyped f = forAll genSomeTyped f
instance (GenTyped f, GenTyped g) => GenTyped (f :+: g) where
genTypedAlg' t = do
left <- genTypedAlg' t
right <- genTypedAlg' t
let left' = map inl left
right' = map inr right
return (left' ++ right')
where inl (i,gen) = (i,Inl gen)
inr (i,gen) = (i,Inr gen)
instance GenTyped Value where
genTypedAlg' (Term t) = run t
where run TInt = arbitrary >>= \i-> return [(1,VInt i)]
run TBool = arbitrary >>= \b-> return [(1,VBool b)]
run (TPair s t) = return [(1, VPair s t)]
instance GenTyped Op where
genTypedAlg' ty = sized run
where run n = do (ty1,ty2) <- arbitrary
other' <- other n
return $ other' ++ [(n,If iTBool ty ty),
(n,Proj ProjLeft (iTPair ty ty1)),
(n,Proj ProjRight (iTPair ty2 ty))]
other n = case unTerm ty of
TInt -> return [(n,Plus iTInt iTInt),(n,Plus iTInt iTInt)]
TBool -> arbitrary >>= \t -> return
[(n, Eq t t),
(n,Lt iTInt iTInt),
(n,And iTBool iTBool),
(n,Not iTBool)]
TPair _ _ -> return []
instance GenTyped Sugar where
genTypedAlg' (Term t) = sized (run t)
where run TInt n = return [(5*n,Neg iTInt),(5*n,Minus iTInt iTInt)]
run TBool n = return [(5*n,Gt iTInt iTInt),(5*n,Or iTBool iTBool),
(5*n,Impl iTBool iTBool)]
run TPair{} _ = return []-}
| null |
https://raw.githubusercontent.com/pa-ba/compdata/5783d0e11129097e045cabba61643114b154e3f2/benchmark/Param/DataTypes/Comp.hs
|
haskell
|
import Data.Comp.Arbitrary ()
import Test.QuickCheck.Property
base values
|
# LANGUAGE TemplateHaskell , MultiParamTypeClasses , FlexibleInstances ,
FlexibleContexts , UndecidableInstances , TypeOperators , ScopedTypeVariables ,
TypeSynonymInstances #
FlexibleContexts, UndecidableInstances, TypeOperators, ScopedTypeVariables,
TypeSynonymInstances #-}
module Param.DataTypes.Comp
(
module Param.DataTypes.Comp
) where
import Data.Comp.Param.Derive
import Data.Comp.Param.Show
import Data.Comp.Param
import Data . Comp . . Show
import Data .
import Test.QuickCheck.Arbitrary
import Test.QuickCheck.Gen
import Control . Monad hiding ( sequence_,mapM )
import Prelude hiding (sequence_,mapM)
type ValueSig = Fun :+: Value
type ValueExpr = Term ValueSig
type ExprSig = Value :+: Op :+: Lam
type Expr = Term ExprSig
type SugarSig = Sugar :+: SugarLet :+: ExprSig
type SugarExpr = Term SugarSig
type BaseTypeSig = ValueT
type BaseType = Term BaseTypeSig
data ValueT a e = TInt
| TBool
| TPair e e
| TFun e e
data Value a e = VInt Int
| VBool Bool
| VPair e e
data Fun a e = Fun (e -> e)
data Proj = ProjLeft | ProjRight
deriving (Eq, Show)
data Op a e = Plus e e
| Mult e e
| If e e e
| Eq e e
| Lt e e
| And e e
| Not e
| Proj Proj e
data Lam a e = Lam (a -> e)
| App e e
data Sugar a e = Neg e
| Minus e e
| Gt e e
| Or e e
| Impl e e
data SugarLet a e = Let e (a -> e)
$(derive [makeDifunctor, makeFoldable, makeTraversable,
makeEqD, makeShowD, smartConstructors]
[''Value, ''Op, ''Sugar, ''ValueT])
$(derive [makeDifunctor, makeEqD, makeShowD, smartConstructors]
[''Lam, ''SugarLet])
showBinOp : : String - > String - > String - > String
showBinOp op x y = " ( " + + x + + op + + y + + " ) "
instance ShowF Value where
showF ( VInt i ) = show i
showF ( VBool b ) = show b
showF ( x y ) = showBinOp " , " x y
instance where
showF ( Plus x y ) = showBinOp " + " x y
showF ( Mult x y ) = showBinOp " * " x y
showF ( If b x y ) = " if " + + b + + " then " + + x + + " else " + + y + + " fi "
showF ( Eq x y ) = showBinOp " = = " x y
showF ( Lt x y ) = showBinOp " < " x y
showF ( And x y ) = showBinOp " & & " x y
showF ( Not x ) = " ~ " + + x
showF ( Proj ProjLeft x ) = x + + " ! 0 "
showF ( Proj ProjRight x ) = x + + " ! 1 "
instance where
showF TInt = " Int "
showF TBool = " Bool "
showF ( TPair x y ) = " ( " + + x + + " , " + + y + + " ) "
instance ShowF Sugar where
showF ( Neg x ) = " - " + + x
showF ( Minus x y ) = " ( " + + x + + " - " + + y + + " ) "
showF ( Gt x y ) = " ( " + + x + + " > " + + y + + " ) "
showF ( Or x y ) = " ( " + + x + + " || " + + y + + " ) "
showF ( Impl x y ) = " ( " + + x + + " - > " + + y + + " ) "
showBinOp :: String -> String -> String -> String
showBinOp op x y = "("++ x ++ op ++ y ++ ")"
instance ShowF Value where
showF (VInt i) = show i
showF (VBool b) = show b
showF (VPair x y) = showBinOp "," x y
instance ShowF Op where
showF (Plus x y) = showBinOp "+" x y
showF (Mult x y) = showBinOp "*" x y
showF (If b x y) = "if " ++ b ++ " then " ++ x ++ " else " ++ y ++ " fi"
showF (Eq x y) = showBinOp "==" x y
showF (Lt x y) = showBinOp "<" x y
showF (And x y) = showBinOp "&&" x y
showF (Not x) = "~" ++ x
showF (Proj ProjLeft x) = x ++ "!0"
showF (Proj ProjRight x) = x ++ "!1"
instance ShowF ValueT where
showF TInt = "Int"
showF TBool = "Bool"
showF (TPair x y) = "(" ++ x ++ "," ++ y ++ ")"
instance ShowF Sugar where
showF (Neg x) = "- " ++ x
showF (Minus x y) = "(" ++ x ++ "-" ++ y ++ ")"
showF (Gt x y) = "(" ++ x ++ ">" ++ y ++ ")"
showF (Or x y) = "(" ++ x ++ "||" ++ y ++ ")"
showF (Impl x y) = "(" ++ x ++ "->" ++ y ++ ")"-}
class GenTyped f where
genTypedAlg :: CoalgM Gen f BaseType
genTypedAlg a = do dist <- genTypedAlg' a
frequency $ map (\(i,f) -> (i,return f)) dist
genTypedAlg' :: BaseType -> Gen [(Int,f BaseType BaseType)]
genTypedAlg' a = genTypedAlg a >>= \ g -> return [(1,g)]
genTyped : : forall f . ( f , f ) = > BaseType - > Gen ( Term f )
genTyped = run
where run : : BaseType - > Gen ( Term f )
run t = liftM Term $ genTypedAlg t > > = mapM ( desize . run )
desize : : Gen a - > Gen a
desize gen = sized ( \n - > resize ( max 0 ( n-1 ) ) gen )
genSomeTyped : : ( , f ) = > Gen ( Term f )
genSomeTyped = arbitrary > > = genTyped
forAllTyped : : ( f , ShowF f , Testable prop , f ) = >
( Term f - > prop ) - > Property
forAllTyped f = forAll genSomeTyped f
instance ( GenTyped f , ) = > ( f : + : g ) where
genTypedAlg ' t = do
left < - genTypedAlg ' t
right < - genTypedAlg ' t
let left ' = map inl left
right ' = map inr right
return ( left ' + + right ' )
where inl ( i , gen ) = ( i , )
inr ( i , gen ) = ( i , Inr gen )
instance GenTyped Value where
genTypedAlg ' ( Term t ) = run t
where run TInt = arbitrary > > = \i- > return [ ( 1,VInt i ) ]
run TBool = arbitrary > > = \b- > return [ ( 1,VBool b ) ]
run ( TPair s t ) = return [ ( 1 , VPair s t ) ]
instance GenTyped Op where
genTypedAlg ' ty = sized run
where run n = do ( ty1,ty2 ) < - arbitrary
other ' < - other n
return $ other ' + + [ ( n , If ) ,
( n , Proj ProjLeft ( iTPair ty ty1 ) ) ,
( n , ( iTPair ty2 ty ) ) ]
other n = case unTerm ty of
TInt - > return [ ( n , Plus iTInt iTInt),(n , Plus iTInt iTInt ) ]
TBool - > arbitrary > > = \t - > return
[ ( n , Eq t t ) ,
( n , Lt iTInt iTInt ) ,
( n , And iTBool iTBool ) ,
( n , Not iTBool ) ]
TPair _ _ - > return [ ]
instance GenTyped Sugar where
genTypedAlg ' ( Term t ) = sized ( run t )
where run TInt n = return [ ( 5*n , , Minus iTInt iTInt ) ]
run TBool n = return [ ( 5*n , Gt iTInt iTInt),(5*n , Or iTBool iTBool ) ,
( 5*n , Impl iTBool iTBool ) ]
run TPair { } _ = return [ ]
genTyped :: forall f . (Traversable f, GenTyped f) => BaseType -> Gen (Term f)
genTyped = run
where run :: BaseType -> Gen (Term f)
run t = liftM Term $ genTypedAlg t >>= mapM (desize . run)
desize :: Gen a -> Gen a
desize gen = sized (\n -> resize (max 0 (n-1)) gen)
genSomeTyped :: (Traversable f, GenTyped f) => Gen (Term f)
genSomeTyped = arbitrary >>= genTyped
forAllTyped :: (GenTyped f, ShowF f, Testable prop, Traversable f) =>
(Term f -> prop) -> Property
forAllTyped f = forAll genSomeTyped f
instance (GenTyped f, GenTyped g) => GenTyped (f :+: g) where
genTypedAlg' t = do
left <- genTypedAlg' t
right <- genTypedAlg' t
let left' = map inl left
right' = map inr right
return (left' ++ right')
where inl (i,gen) = (i,Inl gen)
inr (i,gen) = (i,Inr gen)
instance GenTyped Value where
genTypedAlg' (Term t) = run t
where run TInt = arbitrary >>= \i-> return [(1,VInt i)]
run TBool = arbitrary >>= \b-> return [(1,VBool b)]
run (TPair s t) = return [(1, VPair s t)]
instance GenTyped Op where
genTypedAlg' ty = sized run
where run n = do (ty1,ty2) <- arbitrary
other' <- other n
return $ other' ++ [(n,If iTBool ty ty),
(n,Proj ProjLeft (iTPair ty ty1)),
(n,Proj ProjRight (iTPair ty2 ty))]
other n = case unTerm ty of
TInt -> return [(n,Plus iTInt iTInt),(n,Plus iTInt iTInt)]
TBool -> arbitrary >>= \t -> return
[(n, Eq t t),
(n,Lt iTInt iTInt),
(n,And iTBool iTBool),
(n,Not iTBool)]
TPair _ _ -> return []
instance GenTyped Sugar where
genTypedAlg' (Term t) = sized (run t)
where run TInt n = return [(5*n,Neg iTInt),(5*n,Minus iTInt iTInt)]
run TBool n = return [(5*n,Gt iTInt iTInt),(5*n,Or iTBool iTBool),
(5*n,Impl iTBool iTBool)]
run TPair{} _ = return []-}
|
41e2e3f3b1e39cad22f99e574f7313e88fda725170a006946d333b4c6a208409
|
diku-dk/futhark
|
DataDependencies.hs
|
-- | Facilities for inspecting the data dependencies of a program.
module Futhark.Analysis.DataDependencies
( Dependencies,
dataDependencies,
findNecessaryForReturned,
)
where
import Data.List qualified as L
import Data.Map.Strict qualified as M
import Futhark.IR
| A mapping from a variable name @v@ , to those variables on which
the value of @v@ is dependent . The intuition is that we could
remove all other variables , and @v@ would still be computable .
This also includes names bound in loops or by .
type Dependencies = M.Map VName Names
-- | Compute the data dependencies for an entire body.
dataDependencies :: ASTRep rep => Body rep -> Dependencies
dataDependencies = dataDependencies' M.empty
dataDependencies' ::
ASTRep rep =>
Dependencies ->
Body rep ->
Dependencies
dataDependencies' startdeps = foldl grow startdeps . bodyStms
where
grow deps (Let pat _ (Match c cases defbody _)) =
let cases_deps = map (dataDependencies' deps . caseBody) cases
defbody_deps = dataDependencies' deps defbody
cdeps = foldMap (depsOf deps) c
comb (pe, se_cases_deps, se_defbody_deps) =
( patElemName pe,
mconcat $
se_cases_deps
++ [freeIn pe, cdeps, se_defbody_deps]
++ map (depsOfVar deps) (namesToList $ freeIn pe)
)
branchdeps =
M.fromList $
map comb $
zip3
(patElems pat)
( L.transpose . zipWith (map . depsOf) cases_deps $
map (map resSubExp . bodyResult . caseBody) cases
)
(map (depsOf defbody_deps . resSubExp) (bodyResult defbody))
in M.unions $ [branchdeps, deps, defbody_deps] ++ cases_deps
grow deps (Let pat _ e) =
let free = freeIn pat <> freeIn e
freeDeps = mconcat $ map (depsOfVar deps) $ namesToList free
in M.fromList [(name, freeDeps) | name <- patNames pat] `M.union` deps
depsOf :: Dependencies -> SubExp -> Names
depsOf _ (Constant _) = mempty
depsOf deps (Var v) = depsOfVar deps v
depsOfVar :: Dependencies -> VName -> Names
depsOfVar deps name = oneName name <> M.findWithDefault mempty name deps
-- | @findNecessaryForReturned p merge deps@ computes which of the
loop parameters ( @merge@ ) are necessary for the result of the loop ,
where @p@ given a loop parameter indicates whether the final value
-- of that parameter is live after the loop. @deps@ is the data
-- dependencies of the loop body. This is computed by straightforward
-- fixpoint iteration.
findNecessaryForReturned ::
(Param dec -> Bool) ->
[(Param dec, SubExp)] ->
M.Map VName Names ->
Names
findNecessaryForReturned usedAfterLoop merge_and_res allDependencies =
iterateNecessary mempty
<> namesFromList (map paramName $ filter usedAfterLoop $ map fst merge_and_res)
where
iterateNecessary prev_necessary
| necessary == prev_necessary = necessary
| otherwise = iterateNecessary necessary
where
necessary = mconcat $ map dependencies returnedResultSubExps
usedAfterLoopOrNecessary param =
usedAfterLoop param || paramName param `nameIn` prev_necessary
returnedResultSubExps =
map snd $ filter (usedAfterLoopOrNecessary . fst) merge_and_res
dependencies (Constant _) =
mempty
dependencies (Var v) =
M.findWithDefault (oneName v) v allDependencies
| null |
https://raw.githubusercontent.com/diku-dk/futhark/98e4a75e4de7042afe030837084764bbf3c6c66e/src/Futhark/Analysis/DataDependencies.hs
|
haskell
|
| Facilities for inspecting the data dependencies of a program.
| Compute the data dependencies for an entire body.
| @findNecessaryForReturned p merge deps@ computes which of the
of that parameter is live after the loop. @deps@ is the data
dependencies of the loop body. This is computed by straightforward
fixpoint iteration.
|
module Futhark.Analysis.DataDependencies
( Dependencies,
dataDependencies,
findNecessaryForReturned,
)
where
import Data.List qualified as L
import Data.Map.Strict qualified as M
import Futhark.IR
| A mapping from a variable name @v@ , to those variables on which
the value of @v@ is dependent . The intuition is that we could
remove all other variables , and @v@ would still be computable .
This also includes names bound in loops or by .
type Dependencies = M.Map VName Names
dataDependencies :: ASTRep rep => Body rep -> Dependencies
dataDependencies = dataDependencies' M.empty
dataDependencies' ::
ASTRep rep =>
Dependencies ->
Body rep ->
Dependencies
dataDependencies' startdeps = foldl grow startdeps . bodyStms
where
grow deps (Let pat _ (Match c cases defbody _)) =
let cases_deps = map (dataDependencies' deps . caseBody) cases
defbody_deps = dataDependencies' deps defbody
cdeps = foldMap (depsOf deps) c
comb (pe, se_cases_deps, se_defbody_deps) =
( patElemName pe,
mconcat $
se_cases_deps
++ [freeIn pe, cdeps, se_defbody_deps]
++ map (depsOfVar deps) (namesToList $ freeIn pe)
)
branchdeps =
M.fromList $
map comb $
zip3
(patElems pat)
( L.transpose . zipWith (map . depsOf) cases_deps $
map (map resSubExp . bodyResult . caseBody) cases
)
(map (depsOf defbody_deps . resSubExp) (bodyResult defbody))
in M.unions $ [branchdeps, deps, defbody_deps] ++ cases_deps
grow deps (Let pat _ e) =
let free = freeIn pat <> freeIn e
freeDeps = mconcat $ map (depsOfVar deps) $ namesToList free
in M.fromList [(name, freeDeps) | name <- patNames pat] `M.union` deps
depsOf :: Dependencies -> SubExp -> Names
depsOf _ (Constant _) = mempty
depsOf deps (Var v) = depsOfVar deps v
depsOfVar :: Dependencies -> VName -> Names
depsOfVar deps name = oneName name <> M.findWithDefault mempty name deps
loop parameters ( @merge@ ) are necessary for the result of the loop ,
where @p@ given a loop parameter indicates whether the final value
findNecessaryForReturned ::
(Param dec -> Bool) ->
[(Param dec, SubExp)] ->
M.Map VName Names ->
Names
findNecessaryForReturned usedAfterLoop merge_and_res allDependencies =
iterateNecessary mempty
<> namesFromList (map paramName $ filter usedAfterLoop $ map fst merge_and_res)
where
iterateNecessary prev_necessary
| necessary == prev_necessary = necessary
| otherwise = iterateNecessary necessary
where
necessary = mconcat $ map dependencies returnedResultSubExps
usedAfterLoopOrNecessary param =
usedAfterLoop param || paramName param `nameIn` prev_necessary
returnedResultSubExps =
map snd $ filter (usedAfterLoopOrNecessary . fst) merge_and_res
dependencies (Constant _) =
mempty
dependencies (Var v) =
M.findWithDefault (oneName v) v allDependencies
|
9f3b7b3f6ad1effe6d57d8fae7b2e6ddeefcaeb6d0bfeebe67647b183245de37
|
supki/liblastfm
|
TasteometerSpec.hs
|
# LANGUAGE DataKinds #
{-# LANGUAGE OverloadedStrings #-}
module Xml.TasteometerSpec (spec) where
import Data.Text (Text)
import Lastfm
import qualified Lastfm.Tasteometer as Taste
import Test.Hspec
import Text.Xml.Lens
import SpecHelper
spec :: Spec
spec = do
it "compare" $
publicly (Taste.compare (user "smpcln") (user "MCDOOMDESTROYER"))
`shouldHaveXml`
xmlQuery
it "compare" $
publicly (Taste.compare (user "smpcln") (artists ["enduser", "venetian snares"]))
`shouldHaveXml`
xmlQuery
it "compare" $
publicly (Taste.compare (artists ["enduser", "venetian snares"]) (user "smpcln"))
`shouldHaveXml`
xmlQuery
it "compare" $
publicly (Taste.compare (artists ["enduser", "venetian snares"]) (artists ["enduser", "venetian snares"]))
`shouldHaveXml`
xmlQuery
xmlQuery :: Fold Document Text
xmlQuery = root.node "comparison".node "result".node "score".text
| null |
https://raw.githubusercontent.com/supki/liblastfm/754be163c4ce14c9b4819f1359b5f95a0f91a29d/test/api/Xml/TasteometerSpec.hs
|
haskell
|
# LANGUAGE OverloadedStrings #
|
# LANGUAGE DataKinds #
module Xml.TasteometerSpec (spec) where
import Data.Text (Text)
import Lastfm
import qualified Lastfm.Tasteometer as Taste
import Test.Hspec
import Text.Xml.Lens
import SpecHelper
spec :: Spec
spec = do
it "compare" $
publicly (Taste.compare (user "smpcln") (user "MCDOOMDESTROYER"))
`shouldHaveXml`
xmlQuery
it "compare" $
publicly (Taste.compare (user "smpcln") (artists ["enduser", "venetian snares"]))
`shouldHaveXml`
xmlQuery
it "compare" $
publicly (Taste.compare (artists ["enduser", "venetian snares"]) (user "smpcln"))
`shouldHaveXml`
xmlQuery
it "compare" $
publicly (Taste.compare (artists ["enduser", "venetian snares"]) (artists ["enduser", "venetian snares"]))
`shouldHaveXml`
xmlQuery
xmlQuery :: Fold Document Text
xmlQuery = root.node "comparison".node "result".node "score".text
|
0b66f329eab7a13638f9e93e5ac5e196f1657d75265ee94b3efad725c6b10008
|
nmunro/cl-tutorials
|
main.lisp
|
(defpackage hangman/tests/main
(:use :cl
:hangman
:rove))
(in-package :hangman/tests/main)
;; NOTE: To run this test file, execute `(asdf:test-system :hangman)' in your Lisp.
(deftest test-target-1
(testing "should (= 1 1) to be true"
(ok (= 1 1))))
| null |
https://raw.githubusercontent.com/nmunro/cl-tutorials/e42f879edb01456f3cf0d159b0042e8e61f1b02e/3-hangman/tests/main.lisp
|
lisp
|
NOTE: To run this test file, execute `(asdf:test-system :hangman)' in your Lisp.
|
(defpackage hangman/tests/main
(:use :cl
:hangman
:rove))
(in-package :hangman/tests/main)
(deftest test-target-1
(testing "should (= 1 1) to be true"
(ok (= 1 1))))
|
5f4cbae5aec17020c1a44ee5f2dfe34239100ab10581597d30dbb0ee25fd9064
|
bcc32/projecteuler-ocaml
|
bench_is_prime.ml
|
open! Core
open! Import
let%bench_fun ("is_prime cached" [@indexed n = [ 10; 100; 1_000; 10_000 ]]) =
ignore (Number_theory.Int.is_prime n : bool);
fun () -> Number_theory.Int.is_prime n
;;
| null |
https://raw.githubusercontent.com/bcc32/projecteuler-ocaml/647a1db1caaf919a1c506c213a2e7948ac3ea63c/bench/src/bench_is_prime.ml
|
ocaml
|
open! Core
open! Import
let%bench_fun ("is_prime cached" [@indexed n = [ 10; 100; 1_000; 10_000 ]]) =
ignore (Number_theory.Int.is_prime n : bool);
fun () -> Number_theory.Int.is_prime n
;;
|
|
c47654a6c116db6bca75236b941f5f8d0819798ff3eaeb9129ac8fa35fb7823d
|
iu-parfunc/haskell_dsl_tour
|
OverloadingFeldspar.hs
|
# LANGUAGE FlexibleInstances #
# LANGUAGE RebindableSyntax #
module OverloadingFeldspar where
import Feldspar.GADT
import Feldspar.InterpGADT
import Prelude (Int,(*),(+),fromInteger, return,(>>))
import qualified Prelude as P
x :: Exp () Int
x = 3 + 4
type EBool e = Exp e (Int -> Int -> Int)
true :: EBool e
true = Abs Int (Abs Int (Var (Suc Zro)))
false :: EBool e
false = Abs Int (Abs Int (Var Zro))
ifThenElse :: EBool e -> Exp e Int -> Exp e Int -> Exp e Int
ifThenElse b e1 e2 =
App (App b e1) e2
y :: Exp () Int
y = if true
then x
else 2 * x
z :: Exp () Int
z = if false
then x
else 2 * x
main :: P.IO ()
main = do P.print (run y ())
P.print (run z ())
| null |
https://raw.githubusercontent.com/iu-parfunc/haskell_dsl_tour/f75a7e492a1e5d219a77fb128f70441d54a706eb/front_end/overloading/OverloadingFeldspar.hs
|
haskell
|
# LANGUAGE FlexibleInstances #
# LANGUAGE RebindableSyntax #
module OverloadingFeldspar where
import Feldspar.GADT
import Feldspar.InterpGADT
import Prelude (Int,(*),(+),fromInteger, return,(>>))
import qualified Prelude as P
x :: Exp () Int
x = 3 + 4
type EBool e = Exp e (Int -> Int -> Int)
true :: EBool e
true = Abs Int (Abs Int (Var (Suc Zro)))
false :: EBool e
false = Abs Int (Abs Int (Var Zro))
ifThenElse :: EBool e -> Exp e Int -> Exp e Int -> Exp e Int
ifThenElse b e1 e2 =
App (App b e1) e2
y :: Exp () Int
y = if true
then x
else 2 * x
z :: Exp () Int
z = if false
then x
else 2 * x
main :: P.IO ()
main = do P.print (run y ())
P.print (run z ())
|
|
3542197334c64eae488959ddcdccb44bec87b8379c22aa60fc06f8e56a4574e3
|
missingfaktor/akar
|
combinators.clj
|
(ns akar.combinators
(:require [akar.patterns :refer [!fail !bind !pred !any]]
[akar.internal.utilities :refer :all]
[akar-commons.miscellaneous :refer :all]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
Combinators to compose a number of patterns into one
(def !and
(variadic-reductive-function
:zero !any
:combine (fn [!p1 !p2]
(fn [arg]
(if-some [matches1 (!p1 arg)]
(if-some [matches2 (!p2 arg)]
(concat matches1 matches2)))))))
(def !or
(variadic-reductive-function
:zero !fail
:combine (fn [!p1 !p2]
(fn [arg]
(or (!p1 arg)
(!p2 arg))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; Combinators corresponding to common pattern operations
(defn !not [!p]
(fn [arg]
(if (nil? (!p arg))
[]
nil)))
(defn !at [!p]
(!and !bind !p))
(defn !guard [!p cond]
(!and !p (!pred cond)))
(defn !view [f !p]
(comp !p f))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; "Further" combinators
To support nested patterns , we must allow values emitted by one pattern to be further
; matched by other patterns. What follows are a set of combinators that support such
; "furthering", and related features.
(defn ^:private fan-out [& {:keys [!root !nexts modify-root-emissions modify-nexts]}]
(fn [arg]
(if-some [root-emissions (!root arg)]
(let [root-emissions' (modify-root-emissions root-emissions)
!nexts' (modify-nexts !nexts)]
(if (same-size? root-emissions' !nexts')
(let [pairings (map vector root-emissions' !nexts')]
(reduce
(fn [emissions [in pattern]]
(let [new-emissions (pattern in)]
(if (nil? new-emissions)
(reduced nil)
(concat emissions new-emissions))))
[]
pairings)))))))
(defn !further [!root !nexts]
(fan-out :!root !root
:!nexts !nexts
:modify-root-emissions identity
:modify-nexts identity))
(defn !further-many
([!root !nexts] (fan-out :!root !root
:!nexts !nexts
:modify-root-emissions single
:modify-nexts identity))
([!root !nexts !rest] (fan-out :!root !root
:!nexts !nexts
:modify-root-emissions (fn [root-emissions]
(->> root-emissions
single
(clump-after (count !nexts))))
:modify-nexts (fn [!nexts]
(append !nexts !rest)))))
; Aliases for succinctness in direct use
(define-alias !f !further)
(define-alias !f* !further-many)
| null |
https://raw.githubusercontent.com/missingfaktor/akar/66403b735cafab4e9fb971b4bf56e899ee84962f/akar-core/src/akar/combinators.clj
|
clojure
|
Combinators corresponding to common pattern operations
"Further" combinators
matched by other patterns. What follows are a set of combinators that support such
"furthering", and related features.
Aliases for succinctness in direct use
|
(ns akar.combinators
(:require [akar.patterns :refer [!fail !bind !pred !any]]
[akar.internal.utilities :refer :all]
[akar-commons.miscellaneous :refer :all]))
Combinators to compose a number of patterns into one
(def !and
(variadic-reductive-function
:zero !any
:combine (fn [!p1 !p2]
(fn [arg]
(if-some [matches1 (!p1 arg)]
(if-some [matches2 (!p2 arg)]
(concat matches1 matches2)))))))
(def !or
(variadic-reductive-function
:zero !fail
:combine (fn [!p1 !p2]
(fn [arg]
(or (!p1 arg)
(!p2 arg))))))
(defn !not [!p]
(fn [arg]
(if (nil? (!p arg))
[]
nil)))
(defn !at [!p]
(!and !bind !p))
(defn !guard [!p cond]
(!and !p (!pred cond)))
(defn !view [f !p]
(comp !p f))
To support nested patterns , we must allow values emitted by one pattern to be further
(defn ^:private fan-out [& {:keys [!root !nexts modify-root-emissions modify-nexts]}]
(fn [arg]
(if-some [root-emissions (!root arg)]
(let [root-emissions' (modify-root-emissions root-emissions)
!nexts' (modify-nexts !nexts)]
(if (same-size? root-emissions' !nexts')
(let [pairings (map vector root-emissions' !nexts')]
(reduce
(fn [emissions [in pattern]]
(let [new-emissions (pattern in)]
(if (nil? new-emissions)
(reduced nil)
(concat emissions new-emissions))))
[]
pairings)))))))
(defn !further [!root !nexts]
(fan-out :!root !root
:!nexts !nexts
:modify-root-emissions identity
:modify-nexts identity))
(defn !further-many
([!root !nexts] (fan-out :!root !root
:!nexts !nexts
:modify-root-emissions single
:modify-nexts identity))
([!root !nexts !rest] (fan-out :!root !root
:!nexts !nexts
:modify-root-emissions (fn [root-emissions]
(->> root-emissions
single
(clump-after (count !nexts))))
:modify-nexts (fn [!nexts]
(append !nexts !rest)))))
(define-alias !f !further)
(define-alias !f* !further-many)
|
704fc03a6259bfca8167df872472b55a66501f4832c1aafc06689d367c266189
|
dstarcev/stepic-haskell
|
Task13.hs
|
module Module4.Task13 where
data Person = Person {
firstName :: String,
lastName :: String,
age :: Int } deriving Show
abbrFirstName :: Person -> Person
abbrFirstName p@Person{ firstName = (x:_:_) } = p { firstName = x : "." }
abbrFirstName p = p
| null |
https://raw.githubusercontent.com/dstarcev/stepic-haskell/6a8cf4b3cc17333ac4175e825db57dbe151ebae0/src/Module4/Task13.hs
|
haskell
|
module Module4.Task13 where
data Person = Person {
firstName :: String,
lastName :: String,
age :: Int } deriving Show
abbrFirstName :: Person -> Person
abbrFirstName p@Person{ firstName = (x:_:_) } = p { firstName = x : "." }
abbrFirstName p = p
|
|
956dfc84d08fa52a8538b3020dea37c331d81c332843f983660093248fb1461f
|
dannypsnl/scheme-to-arm64
|
scheme.rkt
|
#lang nanopass
(provide E
scm/Final
unparse-scm/Final)
(define (constant? x)
(or (integer? x) (char? x) (boolean? x)))
(define-language scm
(terminals (symbol [name])
(constant [c])
(vector [v])
(string [s]))
(Expr [e body]
c
v
s
name
(define name e)
(begin e* ... e)
(lambda (name* ...) body* ... body)
(let ([name* e*] ...) body* ... body)
(if e0 e1)
(if e0 e1 e2)
(cond [e body* ... body] ...)
(e0 e1 ...)))
(define-language scm/L1
(extends scm)
(Expr [e body]
(- (lambda (name* ...) body* ... body)
(let ([name* e*] ...) body* ... body)
(cond [e body* ... body] ...))
(+ (lambda (name* ...) body)
(cond [e body] ...))))
(define-pass wrap-begin : (scm Expr) (expr) -> (scm/L1 Expr) ()
[Expr : Expr (expr) -> Expr ()
[(lambda (,name* ...) ,[body*] ... ,[body])
`(lambda (,name* ...) (begin ,body* ... ,body))]
[(let ([,name* ,[e*]] ...) ,[body*] ... ,[body])
`(begin (define ,name* ,e*) ...
,body* ... ,body)]
[(cond [,[e] ,[body*] ... ,[body]] ...)
`(cond [,e (begin ,body* ... ,body)] ...)]])
(define-language scm/L2
(extends scm/L1)
(Expr [e body]
(- (if e0 e1))))
(define-pass remove-if : (scm/L1 Expr) (e) -> (scm/L2 Expr) ()
[Expr : Expr (e) -> Expr ()
[(if ,[e0] ,[e1])
`(if ,e0 ,e1 (void))]])
(define-language scm/L3 (extends scm/L2))
(define-pass normalize-data : (scm/L2 Expr) (e) -> (scm/L3 Expr) ()
[Expr : Expr (e) -> Expr ()
[(,[e0] ,[e1] ...)
(case e0
[(list quote) (foldr (Ξ» (v r) `(cons ,v ,r)) `null e1)]
[(vector) `,(apply vector e1)]
[else `(,e0 ,e1 ...)])]])
(define primitive-functions
'(+
-
*
/
add1 sub1
zero?
; list and pair
car cdr cons
null?
; logical
and or
; comparison operators
= < > <= >= char=?
; type check
integer?
boolean?
char?
; string
make-string string-ref string? string-length
; vector
vector make-vector vector-ref vector? vector-length))
(define (primitive? x) (member x primitive-functions))
(define-language scm/L4
(extends scm/L3)
(terminals (+ (primitive [op])))
(Expr [e body]
(+ (prim op e1 ...))))
(define-pass explicit-prim-call : (scm/L3 Expr) (e) -> (scm/L4 Expr) ()
[Expr : Expr (e) -> Expr ()
[(,[e0] ,[e1] ...)
(cond
[(member e0 primitive-functions)
`(prim ,e0 ,e1 ...)]
[else `(,e0 ,e1 ...)])]])
(define-language scm/L5
(extends scm/L4)
(Expr [e body]
(- (lambda (name* ...) body))
(+ (lifted-lambda name (name* ...) body)
; make-closure stores function and environment
(make-closure e0 e1)
(make-env name ...))))
(define-pass freevars : (scm/L4 Expr) (e) -> * ()
(Expr : Expr (e) -> * ()
[,name (set name)]
[(lambda (,name* ...) ,body)
(set-subtract (freevars body)
(list->set name*))]
[(define ,name ,e)
(freevars e)]
[(begin ,e* ... ,e)
(apply set-union (map freevars (append e* (list e))))]
[(if ,e0 ,e1 ,e2)
(set-union (freevars e0)
(freevars e1)
(freevars e2))]
[(cond [,e ,body] ...)
(apply set-union
(append (map freevars e)
(map freevars body)))]
[(prim ,op ,e* ...)
(apply set-union (map freevars e*))]
[(,e0 ,e1 ...)
(apply set-union (map freevars (cons e0 e1)))]
[else (set)]))
(define-pass replace-free : (scm/L5 Expr) (e $env fvs) -> (scm/L5 Expr) ()
(Expr : Expr (e) -> Expr ()
[,name (guard (set-member? fvs name))
`(prim vector-ref ,$env ,(index-of (set->list fvs) name))]))
(define-pass closure-conversion : (scm/L4 Expr) (e) -> (scm/L5 Expr) ()
(Expr : Expr (e) -> Expr ()
[(lambda (,name* ...) ,[body])
(define $lifted-function-name (gensym 'lifted))
(define $env (gensym 'env))
(define fvs (freevars e))
; convert free-vars in body by using reference to $env
(if (set-empty? fvs)
`(make-closure (lifted-lambda ,$lifted-function-name
(,name* ...)
,body)
(prim vector))
`(make-closure (lifted-lambda ,$lifted-function-name
(,name* ... ,$env)
,(replace-free body $env fvs))
(prim vector ,(set->list fvs) ...)))]))
(define-language scm/Final (extends scm/L5))
(define-pass final : (scm/L5 Expr) (e) -> (scm/Final Expr) ()
[Expr : Expr (e) -> Expr ()])
(define-parser parse-scm/Final scm/Final)
(define-parser parse-scm scm)
(define (E x)
(foldl (lambda (f e)
(f e))
(parse-scm x)
(list wrap-begin
remove-if
normalize-data
explicit-prim-call
closure-conversion
final)))
(module+ test
(require rackunit)
(define-parser pL4 scm/L4)
(check-equal? (freevars (pL4 '(lambda (x) (if x y z))))
(set 'y 'z))
(check-equal? (freevars (pL4 '(lambda (x) (prim cons x y))))
(set 'y))
(check-equal? (freevars (pL4 '(lambda (x) #(x y))))
(set 'y))
)
| null |
https://raw.githubusercontent.com/dannypsnl/scheme-to-arm64/7be6aaf2d5c3b45cffb98750b43d48df23550c50/lang/scheme.rkt
|
racket
|
list and pair
logical
comparison operators
type check
string
vector
make-closure stores function and environment
convert free-vars in body by using reference to $env
|
#lang nanopass
(provide E
scm/Final
unparse-scm/Final)
(define (constant? x)
(or (integer? x) (char? x) (boolean? x)))
(define-language scm
(terminals (symbol [name])
(constant [c])
(vector [v])
(string [s]))
(Expr [e body]
c
v
s
name
(define name e)
(begin e* ... e)
(lambda (name* ...) body* ... body)
(let ([name* e*] ...) body* ... body)
(if e0 e1)
(if e0 e1 e2)
(cond [e body* ... body] ...)
(e0 e1 ...)))
(define-language scm/L1
(extends scm)
(Expr [e body]
(- (lambda (name* ...) body* ... body)
(let ([name* e*] ...) body* ... body)
(cond [e body* ... body] ...))
(+ (lambda (name* ...) body)
(cond [e body] ...))))
(define-pass wrap-begin : (scm Expr) (expr) -> (scm/L1 Expr) ()
[Expr : Expr (expr) -> Expr ()
[(lambda (,name* ...) ,[body*] ... ,[body])
`(lambda (,name* ...) (begin ,body* ... ,body))]
[(let ([,name* ,[e*]] ...) ,[body*] ... ,[body])
`(begin (define ,name* ,e*) ...
,body* ... ,body)]
[(cond [,[e] ,[body*] ... ,[body]] ...)
`(cond [,e (begin ,body* ... ,body)] ...)]])
(define-language scm/L2
(extends scm/L1)
(Expr [e body]
(- (if e0 e1))))
(define-pass remove-if : (scm/L1 Expr) (e) -> (scm/L2 Expr) ()
[Expr : Expr (e) -> Expr ()
[(if ,[e0] ,[e1])
`(if ,e0 ,e1 (void))]])
(define-language scm/L3 (extends scm/L2))
(define-pass normalize-data : (scm/L2 Expr) (e) -> (scm/L3 Expr) ()
[Expr : Expr (e) -> Expr ()
[(,[e0] ,[e1] ...)
(case e0
[(list quote) (foldr (Ξ» (v r) `(cons ,v ,r)) `null e1)]
[(vector) `,(apply vector e1)]
[else `(,e0 ,e1 ...)])]])
(define primitive-functions
'(+
-
*
/
add1 sub1
zero?
car cdr cons
null?
and or
= < > <= >= char=?
integer?
boolean?
char?
make-string string-ref string? string-length
vector make-vector vector-ref vector? vector-length))
(define (primitive? x) (member x primitive-functions))
(define-language scm/L4
(extends scm/L3)
(terminals (+ (primitive [op])))
(Expr [e body]
(+ (prim op e1 ...))))
(define-pass explicit-prim-call : (scm/L3 Expr) (e) -> (scm/L4 Expr) ()
[Expr : Expr (e) -> Expr ()
[(,[e0] ,[e1] ...)
(cond
[(member e0 primitive-functions)
`(prim ,e0 ,e1 ...)]
[else `(,e0 ,e1 ...)])]])
(define-language scm/L5
(extends scm/L4)
(Expr [e body]
(- (lambda (name* ...) body))
(+ (lifted-lambda name (name* ...) body)
(make-closure e0 e1)
(make-env name ...))))
(define-pass freevars : (scm/L4 Expr) (e) -> * ()
(Expr : Expr (e) -> * ()
[,name (set name)]
[(lambda (,name* ...) ,body)
(set-subtract (freevars body)
(list->set name*))]
[(define ,name ,e)
(freevars e)]
[(begin ,e* ... ,e)
(apply set-union (map freevars (append e* (list e))))]
[(if ,e0 ,e1 ,e2)
(set-union (freevars e0)
(freevars e1)
(freevars e2))]
[(cond [,e ,body] ...)
(apply set-union
(append (map freevars e)
(map freevars body)))]
[(prim ,op ,e* ...)
(apply set-union (map freevars e*))]
[(,e0 ,e1 ...)
(apply set-union (map freevars (cons e0 e1)))]
[else (set)]))
(define-pass replace-free : (scm/L5 Expr) (e $env fvs) -> (scm/L5 Expr) ()
(Expr : Expr (e) -> Expr ()
[,name (guard (set-member? fvs name))
`(prim vector-ref ,$env ,(index-of (set->list fvs) name))]))
(define-pass closure-conversion : (scm/L4 Expr) (e) -> (scm/L5 Expr) ()
(Expr : Expr (e) -> Expr ()
[(lambda (,name* ...) ,[body])
(define $lifted-function-name (gensym 'lifted))
(define $env (gensym 'env))
(define fvs (freevars e))
(if (set-empty? fvs)
`(make-closure (lifted-lambda ,$lifted-function-name
(,name* ...)
,body)
(prim vector))
`(make-closure (lifted-lambda ,$lifted-function-name
(,name* ... ,$env)
,(replace-free body $env fvs))
(prim vector ,(set->list fvs) ...)))]))
(define-language scm/Final (extends scm/L5))
(define-pass final : (scm/L5 Expr) (e) -> (scm/Final Expr) ()
[Expr : Expr (e) -> Expr ()])
(define-parser parse-scm/Final scm/Final)
(define-parser parse-scm scm)
(define (E x)
(foldl (lambda (f e)
(f e))
(parse-scm x)
(list wrap-begin
remove-if
normalize-data
explicit-prim-call
closure-conversion
final)))
(module+ test
(require rackunit)
(define-parser pL4 scm/L4)
(check-equal? (freevars (pL4 '(lambda (x) (if x y z))))
(set 'y 'z))
(check-equal? (freevars (pL4 '(lambda (x) (prim cons x y))))
(set 'y))
(check-equal? (freevars (pL4 '(lambda (x) #(x y))))
(set 'y))
)
|
6dcc0473e8c5b82ebf0272cfc4f70fd16ecf3171d8b1b67832006451853c6e58
|
PLSysSec/lio
|
Error.hs
|
# LANGUAGE Trustworthy #
{-# LANGUAGE DeriveDataTypeable #-}
# LANGUAGE ExistentialQuantification #
{- |
This module exports exception types thrown in response to label
failures. In addition, it provides 'withContext', a function that
annotates any exceptions in the 'AnyLabelError' hierarchy that are
thrown within a given scope. These annotations should be used to add
function names to exceptions, so as to make it easier to pinpoint the
cause of a label error.
-}
module LIO.Error (
Annotatable(..), withContext
, AnyLabelError(..), lerrToException, lerrFromException
, GenericPrivDesc(..), LabelError(..), labelError, labelErrorP
, InsufficientPrivs(..), insufficientPrivs
, ResultExceedsLabel(..)
) where
import safe qualified Control.Exception as IO
import safe Data.Typeable
import safe LIO.Exception
import safe LIO.Label
import LIO.TCB
-- | Class of error messages that can be annotated with context.
class Annotatable e where
annotate :: String -> e -> e
-- | Parent of all label-related exceptions.
data AnyLabelError = forall e. (Exception e, Annotatable e) =>
AnyLabelError e deriving Typeable
instance Show AnyLabelError where
showsPrec d (AnyLabelError e) = showsPrec d e
instance Annotatable AnyLabelError where
annotate s (AnyLabelError e) = AnyLabelError $ annotate s e
instance Exception AnyLabelError
-- | Executes an action with a context string, which will be added to
-- any label exception thrown.
--
-- Note: this function wraps an action with a 'catch', and thus may
incur a small runtime cost ( though it is well under 100 ns on
-- machines we benchmarked).
withContext :: String -> LIO l a -> LIO l a
withContext ctx (LIOTCB act) =
LIOTCB $ \st -> act st `IO.catch` \e ->
IO.throwIO $ annotate ctx (e :: AnyLabelError)
-- | Definition of 'toException' for children of 'AnyLabelError' in
-- the exception hierarchy.
lerrToException :: (Exception e, Annotatable e) => e -> SomeException
lerrToException = toException . AnyLabelError
-- | Definition of 'fromException' for children of 'AnyLabelError' in
-- the exception hierarchy.
lerrFromException :: (Exception e) => SomeException -> Maybe e
lerrFromException se = do
AnyLabelError e <- fromException se
cast e
-- | A generic privilege description for recording relevant privileges
-- in exceptions.
data GenericPrivDesc l = forall p. (PrivDesc l p) => GenericPrivDesc p
instance Show (GenericPrivDesc l) where
showsPrec d (GenericPrivDesc p) = showsPrec d p
| Main error type thrown by label failures in the ' LIO ' monad .
data LabelError l = LabelError {
lerrContext :: [String] -- ^ Annotation of where the failure happened.
, lerrFailure :: String -- ^ Actual function that failed.
, lerrCurLabel :: l -- ^ Current label at time of error.
, lerrCurClearance :: l -- ^ Current clearance at time of error.
, lerrPrivs :: [GenericPrivDesc l] -- ^ Any privileges involved in error.
, lerrLabels :: [l] -- ^ Any labels involved in error.
} deriving (Show, Typeable)
instance Annotatable (LabelError l) where
annotate a e = e { lerrContext = a : lerrContext e }
instance Label l => Exception (LabelError l) where
toException = lerrToException
fromException = lerrFromException
-- | Throw a label-error exception.
labelError :: (Label l) => String -- ^ Function that failed.
-> [l] -- ^ Labels involved in error.
-> LIO l a
labelError fl ls = do
st <- getLIOStateTCB
throwLIO LabelError {
lerrContext = []
, lerrFailure = fl
, lerrCurLabel = lioLabel st
, lerrCurClearance = lioClearance st
, lerrPrivs = []
, lerrLabels = ls
}
-- | Throw a label-error exception.
labelErrorP :: (Label l, PrivDesc l p) => String -- ^ Function that failed.
-> Priv p -- ^ Privileges involved.
-> [l] -- ^ Labels involved.
-> LIO l a
labelErrorP fl p ls = do
st <- getLIOStateTCB
throwLIO LabelError {
lerrContext = []
, lerrFailure = fl
, lerrCurLabel = lioLabel st
, lerrCurClearance = lioClearance st
, lerrPrivs = [GenericPrivDesc $ privDesc p]
, lerrLabels = ls
}
-- | Error indicating insufficient privileges (independent of the
-- current label). This exception is thrown by 'delegate', and
-- should also be thrown by gates that receive insufficient privilege
descriptions ( see " LIO.Delegate " ) .
data InsufficientPrivs = forall p. (SpeaksFor p) => InsufficientPrivs {
inspContext :: [String]
, inspFailure :: String
, inspSupplied :: p
, inspNeeded :: p
} deriving (Typeable)
instance Show InsufficientPrivs where
showsPrec _ (InsufficientPrivs c l s n) =
("InsufficientPrivs { inspContext = " ++) . shows c .
(", inspLocation = " ++) . shows l .
(", inspSupplied = " ++) . shows s .
(", inspNeeded = " ++) . shows n .
(" }" ++)
instance Annotatable InsufficientPrivs where
annotate a e = e { inspContext = a : inspContext e }
instance Exception InsufficientPrivs where
toException = lerrToException
fromException = lerrFromException
| Raise ' InsufficientPrivs ' error .
insufficientPrivs :: (SpeaksFor p) =>
String -- ^ Function in which error occurs
-> p -- ^ Description of privileges supplied
-> p -- ^ Description of privileges needed
-> a
insufficientPrivs fl supplied needed
| isPriv supplied = error $ "insufficientPrivs: " ++ show fl ++
" supplied actual privileges instead of description"
| otherwise = IO.throw $ InsufficientPrivs [] fl supplied needed
-- | Error raised when a computation spawned by 'lFork' terminates
-- with its current label above the label of the result.
data ResultExceedsLabel l = ResultExceedsLabel {
relContext :: [String]
, relLocation :: String
, relDeclaredLabel :: l
, relActualLabel :: Maybe l
} deriving (Show, Typeable)
instance Annotatable (ResultExceedsLabel l) where
annotate a e = e { relContext = a : relContext e }
instance (Label l) => Exception (ResultExceedsLabel l) where
toException = lerrToException
fromException = lerrFromException
| null |
https://raw.githubusercontent.com/PLSysSec/lio/622a3e7bc86a3b42ab4ce8be954064a5f142247a/lio/LIO/Error.hs
|
haskell
|
# LANGUAGE DeriveDataTypeable #
|
This module exports exception types thrown in response to label
failures. In addition, it provides 'withContext', a function that
annotates any exceptions in the 'AnyLabelError' hierarchy that are
thrown within a given scope. These annotations should be used to add
function names to exceptions, so as to make it easier to pinpoint the
cause of a label error.
| Class of error messages that can be annotated with context.
| Parent of all label-related exceptions.
| Executes an action with a context string, which will be added to
any label exception thrown.
Note: this function wraps an action with a 'catch', and thus may
machines we benchmarked).
| Definition of 'toException' for children of 'AnyLabelError' in
the exception hierarchy.
| Definition of 'fromException' for children of 'AnyLabelError' in
the exception hierarchy.
| A generic privilege description for recording relevant privileges
in exceptions.
^ Annotation of where the failure happened.
^ Actual function that failed.
^ Current label at time of error.
^ Current clearance at time of error.
^ Any privileges involved in error.
^ Any labels involved in error.
| Throw a label-error exception.
^ Function that failed.
^ Labels involved in error.
| Throw a label-error exception.
^ Function that failed.
^ Privileges involved.
^ Labels involved.
| Error indicating insufficient privileges (independent of the
current label). This exception is thrown by 'delegate', and
should also be thrown by gates that receive insufficient privilege
^ Function in which error occurs
^ Description of privileges supplied
^ Description of privileges needed
| Error raised when a computation spawned by 'lFork' terminates
with its current label above the label of the result.
|
# LANGUAGE Trustworthy #
# LANGUAGE ExistentialQuantification #
module LIO.Error (
Annotatable(..), withContext
, AnyLabelError(..), lerrToException, lerrFromException
, GenericPrivDesc(..), LabelError(..), labelError, labelErrorP
, InsufficientPrivs(..), insufficientPrivs
, ResultExceedsLabel(..)
) where
import safe qualified Control.Exception as IO
import safe Data.Typeable
import safe LIO.Exception
import safe LIO.Label
import LIO.TCB
class Annotatable e where
annotate :: String -> e -> e
data AnyLabelError = forall e. (Exception e, Annotatable e) =>
AnyLabelError e deriving Typeable
instance Show AnyLabelError where
showsPrec d (AnyLabelError e) = showsPrec d e
instance Annotatable AnyLabelError where
annotate s (AnyLabelError e) = AnyLabelError $ annotate s e
instance Exception AnyLabelError
incur a small runtime cost ( though it is well under 100 ns on
withContext :: String -> LIO l a -> LIO l a
withContext ctx (LIOTCB act) =
LIOTCB $ \st -> act st `IO.catch` \e ->
IO.throwIO $ annotate ctx (e :: AnyLabelError)
lerrToException :: (Exception e, Annotatable e) => e -> SomeException
lerrToException = toException . AnyLabelError
lerrFromException :: (Exception e) => SomeException -> Maybe e
lerrFromException se = do
AnyLabelError e <- fromException se
cast e
data GenericPrivDesc l = forall p. (PrivDesc l p) => GenericPrivDesc p
instance Show (GenericPrivDesc l) where
showsPrec d (GenericPrivDesc p) = showsPrec d p
| Main error type thrown by label failures in the ' LIO ' monad .
data LabelError l = LabelError {
} deriving (Show, Typeable)
instance Annotatable (LabelError l) where
annotate a e = e { lerrContext = a : lerrContext e }
instance Label l => Exception (LabelError l) where
toException = lerrToException
fromException = lerrFromException
-> LIO l a
labelError fl ls = do
st <- getLIOStateTCB
throwLIO LabelError {
lerrContext = []
, lerrFailure = fl
, lerrCurLabel = lioLabel st
, lerrCurClearance = lioClearance st
, lerrPrivs = []
, lerrLabels = ls
}
-> LIO l a
labelErrorP fl p ls = do
st <- getLIOStateTCB
throwLIO LabelError {
lerrContext = []
, lerrFailure = fl
, lerrCurLabel = lioLabel st
, lerrCurClearance = lioClearance st
, lerrPrivs = [GenericPrivDesc $ privDesc p]
, lerrLabels = ls
}
descriptions ( see " LIO.Delegate " ) .
data InsufficientPrivs = forall p. (SpeaksFor p) => InsufficientPrivs {
inspContext :: [String]
, inspFailure :: String
, inspSupplied :: p
, inspNeeded :: p
} deriving (Typeable)
instance Show InsufficientPrivs where
showsPrec _ (InsufficientPrivs c l s n) =
("InsufficientPrivs { inspContext = " ++) . shows c .
(", inspLocation = " ++) . shows l .
(", inspSupplied = " ++) . shows s .
(", inspNeeded = " ++) . shows n .
(" }" ++)
instance Annotatable InsufficientPrivs where
annotate a e = e { inspContext = a : inspContext e }
instance Exception InsufficientPrivs where
toException = lerrToException
fromException = lerrFromException
| Raise ' InsufficientPrivs ' error .
insufficientPrivs :: (SpeaksFor p) =>
-> a
insufficientPrivs fl supplied needed
| isPriv supplied = error $ "insufficientPrivs: " ++ show fl ++
" supplied actual privileges instead of description"
| otherwise = IO.throw $ InsufficientPrivs [] fl supplied needed
data ResultExceedsLabel l = ResultExceedsLabel {
relContext :: [String]
, relLocation :: String
, relDeclaredLabel :: l
, relActualLabel :: Maybe l
} deriving (Show, Typeable)
instance Annotatable (ResultExceedsLabel l) where
annotate a e = e { relContext = a : relContext e }
instance (Label l) => Exception (ResultExceedsLabel l) where
toException = lerrToException
fromException = lerrFromException
|
494209c51a14253ea1621ce55ea5dfcd969e713883710ce7edbddaf27a100b1a
|
gfour/gic
|
digits_of_e1.hs
|
module Main where
main :: IO ()
main = putStrLn (show result)
eContFrac :: [Int]
eContFrac = 2 : (aux 2);
aux :: Int -> [Int]
aux n = 1 : (n : (1 : (aux (n+2)))) ;
-- Output a digit if we can
ratTrans :: Int -> Int -> Int -> Int -> [Int] -> [Int]
ratTrans a b c d xs =
case xs of
[] -> []
h : tl -> if (((signum1 c == signum1 d) || (abs1 c < abs1 d)) && (((c+d)*(b `div` d)) <= (a+b)) && ((c+d)*(b `div` d) + (c+d) > (a+b)))
then
((b `div` d) : (ratTrans c d (a-((b `div` d)*c)) (b-((b `div` d)*d)) xs))
else
(ratTrans b (a+(h*b)) d (c+(h*d)) tl);
signum1 :: Int -> Int
signum1 x = if (x<0) then (-1) else if (x>0) then 1 else 0;
abs1 :: Int -> Int
abs1 x = if (x>=0) then x else (-x);
Finally , we convert a continued fraction to digits by repeatedly multiplying by 10 .
toDigits :: [Int] -> [Int]
toDigits l =
case l of
[] -> []
a : b -> a : (toDigits (ratTrans 10 0 0 1 b));
e :: [Int]
e = toDigits eContFrac
select1 :: [Int] -> Int -> Int
select1 xs n =
case xs of
a : b -> if (n==0) then a else select1 b (n-1);
result :: Int
result = select1 e 1000;
| null |
https://raw.githubusercontent.com/gfour/gic/d5f2e506b31a1a28e02ca54af9610b3d8d618e9a/Examples/Data/digits_of_e1.hs
|
haskell
|
Output a digit if we can
|
module Main where
main :: IO ()
main = putStrLn (show result)
eContFrac :: [Int]
eContFrac = 2 : (aux 2);
aux :: Int -> [Int]
aux n = 1 : (n : (1 : (aux (n+2)))) ;
ratTrans :: Int -> Int -> Int -> Int -> [Int] -> [Int]
ratTrans a b c d xs =
case xs of
[] -> []
h : tl -> if (((signum1 c == signum1 d) || (abs1 c < abs1 d)) && (((c+d)*(b `div` d)) <= (a+b)) && ((c+d)*(b `div` d) + (c+d) > (a+b)))
then
((b `div` d) : (ratTrans c d (a-((b `div` d)*c)) (b-((b `div` d)*d)) xs))
else
(ratTrans b (a+(h*b)) d (c+(h*d)) tl);
signum1 :: Int -> Int
signum1 x = if (x<0) then (-1) else if (x>0) then 1 else 0;
abs1 :: Int -> Int
abs1 x = if (x>=0) then x else (-x);
Finally , we convert a continued fraction to digits by repeatedly multiplying by 10 .
toDigits :: [Int] -> [Int]
toDigits l =
case l of
[] -> []
a : b -> a : (toDigits (ratTrans 10 0 0 1 b));
e :: [Int]
e = toDigits eContFrac
select1 :: [Int] -> Int -> Int
select1 xs n =
case xs of
a : b -> if (n==0) then a else select1 b (n-1);
result :: Int
result = select1 e 1000;
|
dda286777a4a6db71223f5883687e2bd5a4f2e856f140790c6c21387aebd1986
|
McCLIM/McCLIM
|
seos-baseline.lisp
|
(in-package #:clim-demo)
(define-application-frame seos-baseline ()
()
(:menu-bar seos-command-table)
(:pane :application
:width 400
:height 400
:display-function #'display
:end-of-line-action :allow
:end-of-page-action :allow
:text-margins '(:left (:absolute 30)
:right (:relative 30)
:top (:relative 30)
:bottom (:absolute 370))))
(defun show-line (stream &rest args)
(loop for (size text) on args by #'cddr do
(with-drawing-options (stream :text-size size)
(format stream text)))
(terpri stream))
(defmethod display ((frame seos-baseline) pane)
(declare (ignore frame))
(show-line pane :normal "Hello " :huge "world!")
(show-line pane
:normal "Hello world "
:normal "hiho" :large "hiho" :huge "hiho" :tiny "hiho" :normal "hiho"
:normal "hiho" :large "hiho" :huge "hiho" :tiny "hiho" :normal "hiho"
:normal "hiho" :large "hiho" :huge "hiho" :tiny "hiho" :normal "hiho"
:normal "hiho" :large "hiho" :huge "hiho" :tiny "hiho" :normal "hiho")
(show-line pane :huge "Third " :normal "line " :tiny "hello " :huge "world!")
(show-line pane :normal "Last " :huge "line " :normal "bam bam")
(terpri pane)
(with-bounding-rectangle* (:x1 x1 :x2 x2) (clime:stream-page-region pane)
(let ((y (nth-value 1 (stream-cursor-position pane))))
(draw-line* pane x1 y x2 y :ink +blue+ :line-dashes t)))
(terpri pane)
(format pane "All lines should have text aligned on the same baseline. Likely failures:
1. Parts of the text with different size aligned to the top (not baseline).
2. Pressing space cause redisplay and schedules repaint after 1s. This may exhibit different outlook of displayed and repainted output.
3. All lines in this description are long. Use menu to change end of line action. Current action is ~s.
4. When viewport is smaller than the whole scrolling area ALT scrolls to the very bottom.
6. There is one line below these points. Stream height may not be recalculated to take it into account because it doesn't have newline character in the end. When wrapped part of the text may not be rendered. Try pressing space.
7. Said last line may be rendered and recorded, but not scrolled to the end despite :SCROLL contract.
8. Some lines here are lengthy to test different wrapping scenarios. Page end action :WRAP is not very useful - it is not a bug that text is drawn on top of the previous one. Here comes a lot of letters with random spaces: AAA BBBBBBBBBBBBBBB CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD.
See the introduction in \"15.3 The Text Cursor\"."
(stream-end-of-line-action pane))
(draw-design pane (clime:stream-page-region pane)
:ink +red+ :line-dashes t :filled nil))
(define-seos-baseline-command (com-redisplay :keystroke #\space) ()
(schedule-event *standard-output*
(make-instance 'window-repaint-event
:region +everywhere+
:sheet *standard-output*)
1))
(make-command-table 'seos-command-table
:errorp nil
:menu '(("Line" :menu line-ct)
("Page" :menu page-ct)))
(make-command-table 'line-ct :errorp nil
:menu '(("Allow" :command com-allow-line)
("Scroll" :command com-scroll-line)
("Wrap" :command com-wrap-line)
("Wrap word" :command com-wrap*-line)))
(make-command-table 'page-ct :errorp nil
:menu '(("Allow" :command com-allow-page)
("Scroll" :command com-scroll-page)
("Wrap" :command com-wrap-page)))
(define-seos-baseline-command (com-allow-line :keystroke #\1) ()
(setf (stream-end-of-line-action *standard-output*) :allow))
(define-seos-baseline-command (com-scroll-line :keystroke #\2) ()
(setf (stream-end-of-line-action *standard-output*) :scroll))
(define-seos-baseline-command (com-wrap-line :keystroke #\3) ()
(setf (stream-end-of-line-action *standard-output*) :wrap))
(define-seos-baseline-command (com-wrap*-line :keystroke #\4) ()
(setf (stream-end-of-line-action *standard-output*) :wrap*))
(define-seos-baseline-command (com-allow-page :keystroke #\q) ()
(setf (stream-end-of-page-action *standard-output*) :allow))
(define-seos-baseline-command (com-scroll-page :keystroke #\w) ()
(setf (stream-end-of-page-action *standard-output*) :scroll))
(define-seos-baseline-command (com-wrap-page :keystroke #\e) ()
(setf (stream-end-of-page-action *standard-output*) :wrap))
;(run-frame-top-level (make-application-frame 'seos-baseline))
| null |
https://raw.githubusercontent.com/McCLIM/McCLIM/c079691b0913f8306ceff2620b045b6e24e2f745/Examples/seos-baseline.lisp
|
lisp
|
(run-frame-top-level (make-application-frame 'seos-baseline))
|
(in-package #:clim-demo)
(define-application-frame seos-baseline ()
()
(:menu-bar seos-command-table)
(:pane :application
:width 400
:height 400
:display-function #'display
:end-of-line-action :allow
:end-of-page-action :allow
:text-margins '(:left (:absolute 30)
:right (:relative 30)
:top (:relative 30)
:bottom (:absolute 370))))
(defun show-line (stream &rest args)
(loop for (size text) on args by #'cddr do
(with-drawing-options (stream :text-size size)
(format stream text)))
(terpri stream))
(defmethod display ((frame seos-baseline) pane)
(declare (ignore frame))
(show-line pane :normal "Hello " :huge "world!")
(show-line pane
:normal "Hello world "
:normal "hiho" :large "hiho" :huge "hiho" :tiny "hiho" :normal "hiho"
:normal "hiho" :large "hiho" :huge "hiho" :tiny "hiho" :normal "hiho"
:normal "hiho" :large "hiho" :huge "hiho" :tiny "hiho" :normal "hiho"
:normal "hiho" :large "hiho" :huge "hiho" :tiny "hiho" :normal "hiho")
(show-line pane :huge "Third " :normal "line " :tiny "hello " :huge "world!")
(show-line pane :normal "Last " :huge "line " :normal "bam bam")
(terpri pane)
(with-bounding-rectangle* (:x1 x1 :x2 x2) (clime:stream-page-region pane)
(let ((y (nth-value 1 (stream-cursor-position pane))))
(draw-line* pane x1 y x2 y :ink +blue+ :line-dashes t)))
(terpri pane)
(format pane "All lines should have text aligned on the same baseline. Likely failures:
1. Parts of the text with different size aligned to the top (not baseline).
2. Pressing space cause redisplay and schedules repaint after 1s. This may exhibit different outlook of displayed and repainted output.
3. All lines in this description are long. Use menu to change end of line action. Current action is ~s.
4. When viewport is smaller than the whole scrolling area ALT scrolls to the very bottom.
6. There is one line below these points. Stream height may not be recalculated to take it into account because it doesn't have newline character in the end. When wrapped part of the text may not be rendered. Try pressing space.
7. Said last line may be rendered and recorded, but not scrolled to the end despite :SCROLL contract.
8. Some lines here are lengthy to test different wrapping scenarios. Page end action :WRAP is not very useful - it is not a bug that text is drawn on top of the previous one. Here comes a lot of letters with random spaces: AAA BBBBBBBBBBBBBBB CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD.
See the introduction in \"15.3 The Text Cursor\"."
(stream-end-of-line-action pane))
(draw-design pane (clime:stream-page-region pane)
:ink +red+ :line-dashes t :filled nil))
(define-seos-baseline-command (com-redisplay :keystroke #\space) ()
(schedule-event *standard-output*
(make-instance 'window-repaint-event
:region +everywhere+
:sheet *standard-output*)
1))
(make-command-table 'seos-command-table
:errorp nil
:menu '(("Line" :menu line-ct)
("Page" :menu page-ct)))
(make-command-table 'line-ct :errorp nil
:menu '(("Allow" :command com-allow-line)
("Scroll" :command com-scroll-line)
("Wrap" :command com-wrap-line)
("Wrap word" :command com-wrap*-line)))
(make-command-table 'page-ct :errorp nil
:menu '(("Allow" :command com-allow-page)
("Scroll" :command com-scroll-page)
("Wrap" :command com-wrap-page)))
(define-seos-baseline-command (com-allow-line :keystroke #\1) ()
(setf (stream-end-of-line-action *standard-output*) :allow))
(define-seos-baseline-command (com-scroll-line :keystroke #\2) ()
(setf (stream-end-of-line-action *standard-output*) :scroll))
(define-seos-baseline-command (com-wrap-line :keystroke #\3) ()
(setf (stream-end-of-line-action *standard-output*) :wrap))
(define-seos-baseline-command (com-wrap*-line :keystroke #\4) ()
(setf (stream-end-of-line-action *standard-output*) :wrap*))
(define-seos-baseline-command (com-allow-page :keystroke #\q) ()
(setf (stream-end-of-page-action *standard-output*) :allow))
(define-seos-baseline-command (com-scroll-page :keystroke #\w) ()
(setf (stream-end-of-page-action *standard-output*) :scroll))
(define-seos-baseline-command (com-wrap-page :keystroke #\e) ()
(setf (stream-end-of-page-action *standard-output*) :wrap))
|
a6e486afc85c0d46d2ab7b0f58cbd0e987fbf75ed1e5c836618a550fdf13bf78
|
simonmar/haxl-icfp14-sample-code
|
MockData.hs
|
# LANGUAGE ExistentialQuantification , GADTs , StandaloneDeriving #
module MockData (requestVal) where
import Types
import Data.Time.Clock
import Data.Time.Calendar
requestVal :: Request a -> a
requestVal r = case r of
FetchPosts -> map PostId postids
FetchPostInfo (PostId n) -> postinfos !! n
FetchPostContent (PostId n) -> postcontent !! n
FetchPostViews (PostId n) -> postviews !! n
where
postids = [0..10] :: [Int]
postinfos = map mkExamplePostInfo postids
postcontent = [ "Post " ++ show n | n <- postids ]
postviews = [ x `mod` 53 | x <- [ p, p + p .. ] ]
where p = 10000001 :: Int
mkExamplePostInfo :: Int -> PostInfo
mkExamplePostInfo p = PostInfo
{ postId = PostId p
, postDate = UTCTime (ModifiedJulianDay (fromIntegral p)) 0
, postTopic = "Topic " ++ show (p `mod` 3)
}
| null |
https://raw.githubusercontent.com/simonmar/haxl-icfp14-sample-code/31859f50e0548f3e581acd26944ceb00953f2c42/MockData.hs
|
haskell
|
# LANGUAGE ExistentialQuantification , GADTs , StandaloneDeriving #
module MockData (requestVal) where
import Types
import Data.Time.Clock
import Data.Time.Calendar
requestVal :: Request a -> a
requestVal r = case r of
FetchPosts -> map PostId postids
FetchPostInfo (PostId n) -> postinfos !! n
FetchPostContent (PostId n) -> postcontent !! n
FetchPostViews (PostId n) -> postviews !! n
where
postids = [0..10] :: [Int]
postinfos = map mkExamplePostInfo postids
postcontent = [ "Post " ++ show n | n <- postids ]
postviews = [ x `mod` 53 | x <- [ p, p + p .. ] ]
where p = 10000001 :: Int
mkExamplePostInfo :: Int -> PostInfo
mkExamplePostInfo p = PostInfo
{ postId = PostId p
, postDate = UTCTime (ModifiedJulianDay (fromIntegral p)) 0
, postTopic = "Topic " ++ show (p `mod` 3)
}
|
|
56bfddf2d81d11109b02e7d7eed4d4018a63e8dfb05de85eb2ed5c5a4ccb9846
|
BinaryAnalysisPlatform/bap
|
arm_helpers.ml
|
open Core_kernel[@@warning "-D"]
let sexpable_of_string t_of_sexp name =
try Some (t_of_sexp @@ Sexp.of_string name)
with Sexp.Of_sexp_error _ -> None
| null |
https://raw.githubusercontent.com/BinaryAnalysisPlatform/bap/253afc171bbfd0fe1b34f6442795dbf4b1798348/lib/arm/arm_helpers.ml
|
ocaml
|
open Core_kernel[@@warning "-D"]
let sexpable_of_string t_of_sexp name =
try Some (t_of_sexp @@ Sexp.of_string name)
with Sexp.Of_sexp_error _ -> None
|
|
f0770d5c62b3433d2400e9b50bd574626d91e36b089087dc2cc259ef60e84b3c
|
cyga/real-world-haskell
|
A.hs
|
-- file: ch25/A.hs
import System.Environment
import Text.Printf
main = do
[d] <- map read `fmap` getArgs
printf "%f\n" (mean [1..d])
mean :: [Double] -> Double
mean xs = sum xs / fromIntegral (length xs)
| null |
https://raw.githubusercontent.com/cyga/real-world-haskell/4ed581af5b96c6ef03f20d763b8de26be69d43d9/ch25/A.hs
|
haskell
|
file: ch25/A.hs
|
import System.Environment
import Text.Printf
main = do
[d] <- map read `fmap` getArgs
printf "%f\n" (mean [1..d])
mean :: [Double] -> Double
mean xs = sum xs / fromIntegral (length xs)
|
5be187f0357a9bb8decb34dc0a430e45ad371ace97763edfdf4f239dec9ef920
|
technion/erlvulnscan
|
recaptcha.erl
|
-module(recaptcha).
-define(RECAPTHA_API_URL, "").
-export([verify/2]).
-spec verify_live(binary(), binary(), string()) -> atom().
verify_live(RemoteIP, Captcha, Key) ->
URL = ?RECAPTHA_API_URL ++ "?"
"secret=" ++ Key ++ "&"
"remoteip=" ++ binary_to_list(RemoteIP) ++ "&"
"response=" ++ binary_to_list(Captcha),
{ok, {{_Version, 200, "OK"}, _Headers, Body}} =
httpc:request(get, {URL, []}, [], []),
{Google} = jiffy:decode(Body),
proplists:get_value(<<"success">>, Google).
-spec verify(binary(), binary()) -> atom().
verify(RemoteIP, Captcha) ->
{ok, Key} = application:get_env(erlvulnscan, captcha_key),
case Key of
undefined ->
key not setup - probably testing
true;
_ ->
verify_live(RemoteIP, Captcha, Key)
end.
| null |
https://raw.githubusercontent.com/technion/erlvulnscan/8d109c49c8ecf331a4a859296e9fafd57aa458cd/src/recaptcha.erl
|
erlang
|
-module(recaptcha).
-define(RECAPTHA_API_URL, "").
-export([verify/2]).
-spec verify_live(binary(), binary(), string()) -> atom().
verify_live(RemoteIP, Captcha, Key) ->
URL = ?RECAPTHA_API_URL ++ "?"
"secret=" ++ Key ++ "&"
"remoteip=" ++ binary_to_list(RemoteIP) ++ "&"
"response=" ++ binary_to_list(Captcha),
{ok, {{_Version, 200, "OK"}, _Headers, Body}} =
httpc:request(get, {URL, []}, [], []),
{Google} = jiffy:decode(Body),
proplists:get_value(<<"success">>, Google).
-spec verify(binary(), binary()) -> atom().
verify(RemoteIP, Captcha) ->
{ok, Key} = application:get_env(erlvulnscan, captcha_key),
case Key of
undefined ->
key not setup - probably testing
true;
_ ->
verify_live(RemoteIP, Captcha, Key)
end.
|
|
a1268aabd251048a102a9979e7485eab6759d47f0e2a3b159c4d75c397287832
|
thiagozg/GitHubJobs-Clojure-Service
|
http_config.clj
|
(ns github-jobs.di.http-config
(:require [github-jobs.service :as service]
[io.pedestal.http :as http]
[io.pedestal.http :as server]))
(defn provides
[environment]
(-> {:env environment
::http/routes service/routes
::http/type :jetty
::http/port 8890
::http/resource-path "/public"
::http/join? false
::http/host "0.0.0.0"}
server/default-interceptors
server/dev-interceptors))
| null |
https://raw.githubusercontent.com/thiagozg/GitHubJobs-Clojure-Service/9b17da56ebe773f6aed96337ac516307d91a1403/src/github_jobs/di/http_config.clj
|
clojure
|
(ns github-jobs.di.http-config
(:require [github-jobs.service :as service]
[io.pedestal.http :as http]
[io.pedestal.http :as server]))
(defn provides
[environment]
(-> {:env environment
::http/routes service/routes
::http/type :jetty
::http/port 8890
::http/resource-path "/public"
::http/join? false
::http/host "0.0.0.0"}
server/default-interceptors
server/dev-interceptors))
|
|
fd1308a3e66344b53db8d6ec4199114383fd6cb1a05c1e345aa6fcf858119840
|
bennn/dissertation
|
moment.rkt
|
#lang typed/racket/base
;; Moments in time
;; Need the requires on top to stop syntax errors; opaques must
;; come lexically before their use
(require
require-typed-check
"../base/types.rkt"
"gregor-adapter.rkt"
racket/match
(only-in racket/math exact-round)
"tzinfo-adapter.rkt"
)
(require/typed/check "hmsn.rkt"
[NS/SECOND Natural]
)
(require/typed/check "datetime.rkt"
[datetime (->* (Natural) (Month Natural Natural Natural Natural Natural) DateTime)]
[datetime->posix (-> DateTime Exact-Rational)]
[posix->datetime (-> Exact-Rational DateTime)]
[datetime->jd (-> DateTime Exact-Rational)]
[datetime-add-seconds (-> DateTime Integer DateTime)]
)
(require/typed/check "moment-base.rkt"
[make-moment (-> DateTime Integer (U String #f) Moment)]
[moment->iso8601 (-> Moment String)]
[moment->iso8601/tzid (-> Moment String)]
)
(require/typed/check "offset-resolvers.rkt"
[resolve-offset/raise (-> (U tzgap tzoverlap) DateTime (U String #f) (U Moment #f) Moment)]
)
;; -----------------------------------------------------------------------------
(provide;/contract
current-timezone ;(parameter/c tz/c)]
moment ;(->i ([year exact-integer?])
( [ month ( integer - in 1 12 ) ]
; [day (year month) (day-of-month/c year month)]
[ hour ( integer - in 0 23 ) ]
[ minute ( integer - in 0 59 ) ]
[ second ( integer - in 0 59 ) ]
[ nanosecond ( integer - in 0 ( sub1 NS / SECOND ) ) ]
; #:tz [tz tz/c]
; #:resolve-offset [resolve offset-resolver/c])
; [res moment?])]
datetime+tz->moment ;(-> datetime? tz/c offset-resolver/c moment?)]
moment->iso8601 ;(-> moment? string?)]
moment->iso8601/tzid ;(-> moment? string?)]
moment->datetime/local ;(-> moment? datetime?)]
moment->utc-offset ;(-> moment? exact-integer?)]
( - > moment ? / c ) ]
moment->tzid ;(-> moment? (or/c string? #f))]
moment->jd ;(-> moment? rational?)]
moment->posix ;(-> moment? rational?)]
posix->moment ;(-> rational? tz/c moment?)]
moment-add-nanoseconds ;(-> moment? exact-integer? moment?)]
moment-in-utc ;(-> moment? moment?)]
( - > moment ? / c moment ? ) ]
timezone-coerce ;(->i ([m moment?]
; [z tz/c])
; (#:resolve-offset [r offset-resolver/c])
; [res moment?])]
moment=? ;(-> moment? moment? boolean?)]
moment<? ;(-> moment? moment? boolean?)]
moment<=? ;(-> moment? moment? boolean?)]
UTC ;tz/c]
)
;; =============================================================================
(: current-timezone (Parameterof (U tz #f)))
(define current-timezone (make-parameter (system-tzid)))
(: moment (->* (Natural) (Month
Natural Natural Natural Natural Natural
#:tz (U tz #f)
#:resolve-offset (-> (U tzgap tzoverlap)
DateTime
(U String #f)
(U #f Moment) Moment)
)
Moment))
(define (moment year [month 1] [day 1] [hour 0] [minute 0] [second 0] [nano 0]
#:tz [tz (current-timezone)]
#:resolve-offset [resolve resolve-offset/raise])
(when (eq? tz #f) (error "no timezone"))
(datetime+tz->moment (datetime year month day hour minute second nano) tz resolve))
(: datetime+tz->moment (-> DateTime
(U Integer String)
(-> (U tzgap tzoverlap)
DateTime
(U String #f)
(U Moment #f) Moment)
Moment))
(define (datetime+tz->moment dt zone resolve)
(cond [(string? zone)
(define res (local-seconds->tzoffset zone (exact-round (datetime->posix dt))))
(cond
[(tzoffset? res)
(make-moment dt (tzoffset-utc-seconds res) zone)]
[else (resolve res dt zone #f)])]
[(index? zone)
(make-moment dt zone #f)]
[else (error (format "datetime+tz->moment unknown zone ~a" zone))]))
(define moment->datetime/local Moment-datetime/local)
(define moment->utc-offset Moment-utc-offset)
(define moment->tzid Moment-zone)
(: moment->timezone (-> Moment tz))
(define (moment->timezone m)
(or (moment->tzid m)
(moment->utc-offset m)))
(: moment-in-utc (-> Moment Moment))
(define (moment-in-utc m)
(if (equal? UTC (moment->timezone m))
m
(timezone-adjust m UTC)))
(: moment->jd (-> Moment Exact-Rational))
(define (moment->jd m)
(datetime->jd
(moment->datetime/local
(moment-in-utc m))))
(: moment->posix (-> Moment Exact-Rational))
(define (moment->posix m)
(datetime->posix
(moment->datetime/local
(moment-in-utc m))))
(: posix->moment (-> Exact-Rational tz Moment))
(define (posix->moment p z)
(: off Integer)
(define off
(cond [(string? z) (tzoffset-utc-seconds (utc-seconds->tzoffset z p))]
[else 0]))
(define dt (posix->datetime (+ p off)))
(unless (string? z) (error "posix->moment: can't call make-moment with an integer"))
(make-moment dt off z))
(: moment-add-nanoseconds (-> Moment Natural Moment))
(define (moment-add-nanoseconds m n)
(posix->moment (+ (moment->posix m) (* n (/ 1 NS/SECOND)))
(moment->timezone m)))
(: timezone-adjust (-> Moment (U Natural String) Moment))
(define (timezone-adjust m z)
(match-define (Moment dt neg-sec _) m)
(: dt/utc DateTime)
(define dt/utc
(datetime-add-seconds dt (- neg-sec)))
(cond [(string? z)
(define posix (datetime->posix dt/utc))
(match-define (tzoffset offset _ _) (utc-seconds->tzoffset z posix))
(define local (datetime-add-seconds dt/utc offset))
(make-moment local offset z)]
[else
(define local (datetime-add-seconds dt/utc z))
(make-moment local z #f)]))
(: timezone-coerce (->* [Moment (U Natural String)]
(#:resolve-offset (-> (U tzgap tzoverlap) DateTime (U String #f) (U #f Moment) Moment))
Moment))
(define (timezone-coerce m z #:resolve-offset [resolve resolve-offset/raise])
(datetime+tz->moment (moment->datetime/local m) z resolve))
(: moment=? (-> Moment Moment Boolean))
(define (moment=? m1 m2)
(= (moment->jd m1) (moment->jd m2)))
(: moment<? (-> Moment Moment Boolean))
(define (moment<? m1 m2)
(< (moment->jd m1) (moment->jd m2)))
(: moment<=? (-> Moment Moment Boolean))
(define (moment<=? m1 m2)
(<= (moment->jd m1) (moment->jd m2)))
(: UTC String)
(define UTC "Etc/UTC")
| null |
https://raw.githubusercontent.com/bennn/dissertation/779bfe6f8fee19092849b7e2cfc476df33e9357b/dissertation/scrbl/jfp-2019/benchmarks/gregor/typed/moment.rkt
|
racket
|
Moments in time
Need the requires on top to stop syntax errors; opaques must
come lexically before their use
-----------------------------------------------------------------------------
/contract
(parameter/c tz/c)]
(->i ([year exact-integer?])
[day (year month) (day-of-month/c year month)]
#:tz [tz tz/c]
#:resolve-offset [resolve offset-resolver/c])
[res moment?])]
(-> datetime? tz/c offset-resolver/c moment?)]
(-> moment? string?)]
(-> moment? string?)]
(-> moment? datetime?)]
(-> moment? exact-integer?)]
(-> moment? (or/c string? #f))]
(-> moment? rational?)]
(-> moment? rational?)]
(-> rational? tz/c moment?)]
(-> moment? exact-integer? moment?)]
(-> moment? moment?)]
(->i ([m moment?]
[z tz/c])
(#:resolve-offset [r offset-resolver/c])
[res moment?])]
(-> moment? moment? boolean?)]
(-> moment? moment? boolean?)]
(-> moment? moment? boolean?)]
tz/c]
=============================================================================
|
#lang typed/racket/base
(require
require-typed-check
"../base/types.rkt"
"gregor-adapter.rkt"
racket/match
(only-in racket/math exact-round)
"tzinfo-adapter.rkt"
)
(require/typed/check "hmsn.rkt"
[NS/SECOND Natural]
)
(require/typed/check "datetime.rkt"
[datetime (->* (Natural) (Month Natural Natural Natural Natural Natural) DateTime)]
[datetime->posix (-> DateTime Exact-Rational)]
[posix->datetime (-> Exact-Rational DateTime)]
[datetime->jd (-> DateTime Exact-Rational)]
[datetime-add-seconds (-> DateTime Integer DateTime)]
)
(require/typed/check "moment-base.rkt"
[make-moment (-> DateTime Integer (U String #f) Moment)]
[moment->iso8601 (-> Moment String)]
[moment->iso8601/tzid (-> Moment String)]
)
(require/typed/check "offset-resolvers.rkt"
[resolve-offset/raise (-> (U tzgap tzoverlap) DateTime (U String #f) (U Moment #f) Moment)]
)
( [ month ( integer - in 1 12 ) ]
[ hour ( integer - in 0 23 ) ]
[ minute ( integer - in 0 59 ) ]
[ second ( integer - in 0 59 ) ]
[ nanosecond ( integer - in 0 ( sub1 NS / SECOND ) ) ]
( - > moment ? / c ) ]
( - > moment ? / c moment ? ) ]
)
(: current-timezone (Parameterof (U tz #f)))
(define current-timezone (make-parameter (system-tzid)))
(: moment (->* (Natural) (Month
Natural Natural Natural Natural Natural
#:tz (U tz #f)
#:resolve-offset (-> (U tzgap tzoverlap)
DateTime
(U String #f)
(U #f Moment) Moment)
)
Moment))
(define (moment year [month 1] [day 1] [hour 0] [minute 0] [second 0] [nano 0]
#:tz [tz (current-timezone)]
#:resolve-offset [resolve resolve-offset/raise])
(when (eq? tz #f) (error "no timezone"))
(datetime+tz->moment (datetime year month day hour minute second nano) tz resolve))
(: datetime+tz->moment (-> DateTime
(U Integer String)
(-> (U tzgap tzoverlap)
DateTime
(U String #f)
(U Moment #f) Moment)
Moment))
(define (datetime+tz->moment dt zone resolve)
(cond [(string? zone)
(define res (local-seconds->tzoffset zone (exact-round (datetime->posix dt))))
(cond
[(tzoffset? res)
(make-moment dt (tzoffset-utc-seconds res) zone)]
[else (resolve res dt zone #f)])]
[(index? zone)
(make-moment dt zone #f)]
[else (error (format "datetime+tz->moment unknown zone ~a" zone))]))
(define moment->datetime/local Moment-datetime/local)
(define moment->utc-offset Moment-utc-offset)
(define moment->tzid Moment-zone)
(: moment->timezone (-> Moment tz))
(define (moment->timezone m)
(or (moment->tzid m)
(moment->utc-offset m)))
(: moment-in-utc (-> Moment Moment))
(define (moment-in-utc m)
(if (equal? UTC (moment->timezone m))
m
(timezone-adjust m UTC)))
(: moment->jd (-> Moment Exact-Rational))
(define (moment->jd m)
(datetime->jd
(moment->datetime/local
(moment-in-utc m))))
(: moment->posix (-> Moment Exact-Rational))
(define (moment->posix m)
(datetime->posix
(moment->datetime/local
(moment-in-utc m))))
(: posix->moment (-> Exact-Rational tz Moment))
(define (posix->moment p z)
(: off Integer)
(define off
(cond [(string? z) (tzoffset-utc-seconds (utc-seconds->tzoffset z p))]
[else 0]))
(define dt (posix->datetime (+ p off)))
(unless (string? z) (error "posix->moment: can't call make-moment with an integer"))
(make-moment dt off z))
(: moment-add-nanoseconds (-> Moment Natural Moment))
(define (moment-add-nanoseconds m n)
(posix->moment (+ (moment->posix m) (* n (/ 1 NS/SECOND)))
(moment->timezone m)))
(: timezone-adjust (-> Moment (U Natural String) Moment))
(define (timezone-adjust m z)
(match-define (Moment dt neg-sec _) m)
(: dt/utc DateTime)
(define dt/utc
(datetime-add-seconds dt (- neg-sec)))
(cond [(string? z)
(define posix (datetime->posix dt/utc))
(match-define (tzoffset offset _ _) (utc-seconds->tzoffset z posix))
(define local (datetime-add-seconds dt/utc offset))
(make-moment local offset z)]
[else
(define local (datetime-add-seconds dt/utc z))
(make-moment local z #f)]))
(: timezone-coerce (->* [Moment (U Natural String)]
(#:resolve-offset (-> (U tzgap tzoverlap) DateTime (U String #f) (U #f Moment) Moment))
Moment))
(define (timezone-coerce m z #:resolve-offset [resolve resolve-offset/raise])
(datetime+tz->moment (moment->datetime/local m) z resolve))
(: moment=? (-> Moment Moment Boolean))
(define (moment=? m1 m2)
(= (moment->jd m1) (moment->jd m2)))
(: moment<? (-> Moment Moment Boolean))
(define (moment<? m1 m2)
(< (moment->jd m1) (moment->jd m2)))
(: moment<=? (-> Moment Moment Boolean))
(define (moment<=? m1 m2)
(<= (moment->jd m1) (moment->jd m2)))
(: UTC String)
(define UTC "Etc/UTC")
|
ee5010fce5765e95e1fdcc43736ee80714b9ce9cfae90569cb8f010d6e9f0c57
|
cmsc430/www
|
regexp.rkt
|
#lang racket
(provide accepts)
type Regexp =
| ' zero
| ' one
| ` ( char , )
| ` ( times , Regexp , Regexp )
| ` ( plus , Regexp , Regexp )
| ` ( star , Regexp )
;; Regexp String -> Boolean
(define (accepts r s)
(matcher r (string->list s) (Ξ» (cs) (empty? cs))))
Regexp ( ) ( ( ) - > Bool ) - > Bool
(define (matcher r cs k)
(match r
['zero #f]
['one (k cs)]
[`(char ,c)
(match cs
['() #f]
[(cons d cs) (and (char=? c d) (k cs))])]
[`(plus ,r1 ,r2)
(or (matcher r1 cs k) (matcher r2 cs k))]
[`(times ,r1 ,r2)
(matcher r1 cs (Ξ» (cs) (matcher r2 cs k)))]
[`(star ,r)
(letrec ((matcher* (Ξ» (cs) (or (k cs) (matcher r cs matcher*)))))
(matcher* cs))]))
| null |
https://raw.githubusercontent.com/cmsc430/www/fcc64c41d1b96cce6dbda49509d2f2ab8ee9f404/langs/loot/regexp.rkt
|
racket
|
Regexp String -> Boolean
|
#lang racket
(provide accepts)
type Regexp =
| ' zero
| ' one
| ` ( char , )
| ` ( times , Regexp , Regexp )
| ` ( plus , Regexp , Regexp )
| ` ( star , Regexp )
(define (accepts r s)
(matcher r (string->list s) (Ξ» (cs) (empty? cs))))
Regexp ( ) ( ( ) - > Bool ) - > Bool
(define (matcher r cs k)
(match r
['zero #f]
['one (k cs)]
[`(char ,c)
(match cs
['() #f]
[(cons d cs) (and (char=? c d) (k cs))])]
[`(plus ,r1 ,r2)
(or (matcher r1 cs k) (matcher r2 cs k))]
[`(times ,r1 ,r2)
(matcher r1 cs (Ξ» (cs) (matcher r2 cs k)))]
[`(star ,r)
(letrec ((matcher* (Ξ» (cs) (or (k cs) (matcher r cs matcher*)))))
(matcher* cs))]))
|
21b27d87a9f6ca3560fb3a28bb7a44fec6e58cd34c9b4d6ed80330047a980edf
|
hopv/MoCHi
|
ModelCheck.ml
|
open Util
open Mochi_util
open CEGAR_syntax
open CEGAR_type
open CEGAR_util
module Debug = Debug.Make(struct let check = Flag.Debug.make_check __MODULE__ end)
type filename = string
type node = UnitNode | BrNode | LineNode of int | EventNode of string
type counterexample =
| CESafety of TrecsInterface.counterexample
| CENonTerm of HorSatInterface.counterexample_apt
| CEFairNonTerm of HORS_syntax.rules
type result = Safe of (var * Inter_type.t) list | Unsafe of counterexample
type mc_spec =
| SpecTRecS of TrecsInterface.spec
| SpecHorSat of HorSatInterface.spec
type spec =
| Fairness of Fair_termination_type.fairness
| Other
let make_file_spec () =
[0, "unit", [];
0, "event_newr", [1];
1, "event_read", [1];
1, "event_close", [4];
0, "event_neww", [2];
2, "event_write", [2];
2, "event_close", [4];
2, "event_newr", [3];
1, "event_neww", [3];
3, "unit", [];
3, "event_read", [3];
3, "event_write", [3];
3, "event_close", [3];
4, "unit", [];]
let capitalize_var = String.capitalize
let uncapitalize_var = String.uncapitalize
let uncapitalize_env env = List.map (Pair.map_fst uncapitalize_var) env
let capitalize {env;defs;main;info} =
let env' = List.map (Pair.map_fst capitalize_var) env in
let sbst1 = subst_map @@ List.map (fun (f,_) -> f, Var (capitalize_var f)) env in
let aux {fn=f; args=xs; cond; body} =
let xs' = List.map uncapitalize_var xs in
let sbst2 = subst_map @@ List.map2 (fun x x' -> x, Var x') xs xs' in
{fn = capitalize_var f;
args = xs';
cond = sbst1 @@ sbst2 cond;
body = sbst1 @@ sbst2 body}
in
let defs' = List.map aux defs in
let main' = capitalize_var main in
{env=env'; defs=defs'; main=main'; info}
let elim_non_det ({defs;main;info} as prog) =
let env = get_ext_fun_env prog in
let check f {fn=g} = f = g in
let mem f defs = List.exists (check f) defs in
let rec elim_non_det_def = function
| [] -> []
| {fn=f; args=xs; cond=t1; body=t2}::defs when mem f defs ->
let f' = rename_id f in
let defs1,defs2 = List.partition (check f) defs in
let defs1' = List.map (fun def -> {def with fn = rename_id def.fn}) defs1 in
let t =
let args = List.map (fun x -> Var x) xs in
let app f = make_app (Var f) args in
List.fold_left (fun t {fn=g} -> make_br (app g) t) (app f') defs1'
in
{fn=f;args=xs;cond=Const True;body=t}::{fn=f';args=xs;cond=t1;body=t2}::defs1' @ elim_non_det_def defs2
| def::defs -> def :: elim_non_det_def defs
in
Typing.infer {env; defs=elim_non_det_def defs; main; info}
let make_bottom {env;defs;main;info} =
let bottoms = ref [] in
let make_bottom n =
let x = "Bottom" ^ string_of_int n in
bottoms := (x,n)::!bottoms;
Var x
in
let aux_def {fn=f; args=xs; cond; body=t2} =
let f_typ = List.assoc f env in
let env' = get_arg_env f_typ xs @@@ env in
let rec aux_term t typ =
match t,typ with
| Const Bottom, typ -> make_bottom (get_arg_num typ)
| Const c, _ -> Const c
| Var x, _ -> Var x
| App(App(App(Const If, t1), t2), t3), typ ->
let t1' = aux_term t1 (TBase(TBool,fun _ -> [])) in
let t2' =
try
aux_term t2 typ
with TypeBottom -> make_bottom 0
in
let t3' =
try
aux_term t3 typ
with TypeBottom -> make_bottom 0
in
App(App(App(Const If, t1'), t2'), t3')
| App(Const (Label n), t), typ -> App(Const (Label n), aux_term t typ)
| App(t1,t2), _ ->
let typ = get_typ env' t1 in
let typ' =
match typ with
TFun(typ,_) -> typ
| _ -> assert false
in
App(aux_term t1 typ, aux_term t2 typ')
| Let _, _ -> assert false
| Fun _, _ -> assert false
in
let app_typ x = function
TFun(_,typ2) -> typ2 (Var x)
| _ -> assert false
in
let typ = List.fold_right app_typ xs f_typ in
let t2' = aux_term t2 typ in
{fn=f; args=xs; cond; body=t2'}
in
let bot0 = make_bottom 0 in
let defs' = List.map aux_def defs in
let bottom_defs =
let make (x,n) = {fn=x; args=List.init n @@ Fun.const "x"; cond=Const True; body=bot0} in
List.map make (List.unique !bottoms)
in
{env; defs=bottom_defs@@@defs'; main; info}
let rec eta_expand_term env = function
Const c -> Const c
| Var x -> Var x
| App(App(App(Const If, Const (Rand(TBool,_))), t2), t3) ->
let typ = get_typ env t2 in
let xs = Array.to_list (Array.init (arg_num typ) (fun _ -> new_id "x")) in
let aux t = List.fold_left (fun t x -> App(t, Var x)) (eta_expand_term env t) xs in
let t = make_if (Const (Rand(TBool,None))) (aux t2) (aux t3) in
List.fold_right (fun x t -> Fun(x,None,t)) xs t
| App(t1, t2) -> App(eta_expand_term env t1, eta_expand_term env t2)
| Fun _ -> assert false
| Let _ -> assert false
let eta_expand_def env ({fn=f; args=xs; body=t2} as def) =
let d = arg_num (List.assoc f env) - List.length xs in
let ys = Array.to_list (Array.init d (fun _ -> new_id "x")) in
let t2' = eta_expand_term (get_arg_env (List.assoc f env) xs @@@ env) t2 in
let body = List.fold_left (fun t x -> App(t, Var x)) t2' ys in
{def with args=xs@ys; body}
let eta_expand prog =
{prog with defs = List.map (eta_expand_def prog.env) prog.defs}
|> CEGAR_lift.lift2
let trans_ce ce =
let aux (s,_) =
match s with
| "unit" -> []
| "br" -> []
| s when s.[0] = 'l' -> [int_of_string @@ String.slice ~first:1 s]
| s when String.starts_with s "event_" -> []
| _ -> assert false
in
List.flatten_map aux ce
let true_var = "True"
let false_var = "False"
let rec church_encode_term = function
| Const True -> Var true_var
| Const False -> Var false_var
| Const If -> assert false
| Const c -> Const c
| Var x -> Var x
| App(App(App(Const If, Const (Rand(TBool,_))), t2), t3) ->
let t2' = church_encode_term t2 in
let t3' = church_encode_term t3 in
make_app (Const If) [Const (Rand(TBool,None)); t2'; t3']
| App(App(App(Const If, Var b), t2), t3) ->
let t2' = church_encode_term t2 in
let t3' = church_encode_term t3 in
make_app (Var b) [t2'; t3']
| App(t1, t2) -> App(church_encode_term t1, church_encode_term t2)
| Let _ -> assert false
| Fun _ -> assert false
let church_encode {defs;main;info} =
let args = ["x"; "y"] in
let cond = Const True in
let true_def = {fn=true_var; args; cond=Const True; body=Var "x"} in
let false_def = {fn=false_var; args; cond; body=Var "y"} in
let defs' = List.map (map_body_def church_encode_term) defs @ [true_def; false_def] in
let prog = {env=[];defs=defs';main;info} in
if false then Format.printf "CHURCH ENCODE:\n%a@." CEGAR_print.prog prog;
Typing.infer prog
let rec full_app f n = function
| Const _ -> true
| Var x when f = x -> false
| Var _ -> true
| App _ as t ->
let t1,ts = decomp_app t in
let b1 = if t1 = Var f then n = List.length ts else true in
let b2 = List.for_all (full_app f n) ts in
b1 && b2
| Let _ -> assert false
| Fun _ -> assert false
let should_reduce {fn=f; cond} env defs =
let n = arg_num (List.assoc f env) in
cond = Const True &&
List.count (fun {fn} -> f = fn) defs = 1 &&
List.length (List.rev_flatten_map (fun {body} -> List.filter ((=) f) (get_fv body)) defs) = 1 &&
List.for_all (fun def -> full_app f n def.body) defs
let rec get_head_count f = function
| Const _ -> 0
| Var _ -> 0
| App _ as t ->
let t1,ts = decomp_app t in
let n = List.fold_left (fun n t -> n + get_head_count f t) 0 ts in
if t1 = Var f
then 1 + n
else n
| Let _ -> assert false
| Fun _ -> assert false
let rec beta_reduce_term flag ({fn=f; args=xs; body=t2} as def) = function
| Const c -> Const c
| Var x -> Var x
| App _ as t ->
let t1,ts = decomp_app t in
let ts' = List.map (beta_reduce_term flag def) ts in
if t1 = Var f then
if List.for_all (function Const _ | Var _ -> true | App _ -> false | _ -> assert false) ts'
then List.fold_right2 subst xs ts' t2
else (flag := true; make_app t1 ts')
else
make_app t1 ts'
| Let _ -> assert false
| Fun _ -> assert false
let beta_reduce_term flag ({fn=f} as def) t =
let n = get_head_count f t in
if n = 1
then beta_reduce_term flag def t
else (if n >= 2 then flag := true; t)
let beta_reduce_aux {env;defs;main;info} =
let rec aux defs1 = function
[] -> defs1
| def::defs2 when should_reduce def env (defs1@@@def::defs2) ->
let flag = ref false in
let reduce_def def' = {def' with body = beta_reduce_term flag def def'.body} in
let defs1' = List.map reduce_def defs1 in
let defs2' = List.map reduce_def defs2 in
if !flag
then aux (defs1'@[def]) defs2'
else aux defs1' defs2'
| def::defs2 -> aux (defs1@[def]) defs2
in
{env; defs = aux [] defs; main; info}
let rec beta_reduce prog =
let prog' = beta_reduce_aux prog in
if prog.defs = prog'.defs
then prog
else beta_reduce prog'
let pr s t =
Debug.printf "##[ModelCheck] %s:@.%a@.@." s CEGAR_print.prog_typ t;
ignore @@ Typing.infer t
let preprocess prog =
Format.eprintf "WARNING: model checking for non-CPS programs is unmaintained.@.";
prog
|@> pr "INPUT"
|> CEGAR_CPS.trans -$- true
|@> pr "CPS"
|> eta_expand
|@> pr "eta_expand"
|> elim_non_det
|@> pr "elim_non_det"
|> make_bottom
|@> pr "make_bottom"
|> pop_main
|@> pr "pop_main"
|> capitalize
|@> pr "capitalize"
|> Typing.infer
|&Flag.ModelCheck.useless_elim&> Useless_elim.elim
|&Flag.ModelCheck.beta_reduce&> beta_reduce
|& !Flag.ModelCheck.church_encode&> church_encode
|@> pr "church_encode"
let preprocess_cps prog =
prog
|@> pr "INPUT"
|> eta_expand
|@> pr "eta_expand"
|> elim_non_det
|@> pr "elim_non_det"
|> put_arg_into_if
|@> pr "put_arg_into_if"
|> make_bottom
|@> pr "make_bottom"
|> pop_main
|@> pr "pop_main"
|> capitalize
|@> pr "capitalize"
|> Typing.infer
|&Flag.ModelCheck.useless_elim&> Useless_elim.elim
|&Flag.ModelCheck.beta_reduce&> beta_reduce
|& !Flag.ModelCheck.church_encode&> church_encode
|@> pr "church_encode"
let check abst prog spec =
if !Flag.Print.progress
then Color.printf Color.Green "(%d-2)[%.3f] Checking HORS ... @?" !Flag.Log.cegar_loop !!Time.get;
set_status @@ Flag.Log.Other (Format.sprintf "(%d-2) Model checking" !Flag.Log.cegar_loop);
let abst' =
if List.mem ACPS prog.info.attr
then preprocess_cps abst
else preprocess abst
in
let result =
match !Flag.ModelCheck.mc, !Flag.mode with
| Flag.ModelCheck.TRecS, _ ->
let spec = TrecsInterface.make_spec (max_label abst) in
begin
match TrecsInterface.check (abst',spec) with
| TrecsInterface.Safe env -> Safe (uncapitalize_env env)
| TrecsInterface.Unsafe ce -> Unsafe (CESafety ce)
end
| Flag.ModelCheck.HorSat, Flag.NonTermination ->
let labels = List.map make_randint_label @@ List.filter_map (decomp_randint_name -| fst) prog.env in
let spec = HorSatInterface.make_spec_nonterm labels in
begin
match HorSatInterface.check_apt (abst',spec) with
| HorSatInterface.Safe env -> Safe (uncapitalize_env env)
| HorSatInterface.UnsafeAPT ce -> Unsafe (CENonTerm ce)
| HorSatInterface.Unsafe _ -> assert false
end
| Flag.ModelCheck.HorSat2, Flag.NonTermination ->
let labels = List.map make_randint_label @@ List.filter_map (decomp_randint_name -| fst) prog.env in
let spec = HorSat2Interface.make_spec_nonterm labels in
begin
match HorSat2Interface.check_apt (abst',spec) with
| HorSat2Interface.Safe env -> Safe (uncapitalize_env env)
| HorSat2Interface.UnsafeAPT ce -> Unsafe (CENonTerm ce)
| HorSat2Interface.Unsafe _ -> assert false
end
| Flag.ModelCheck.HorSat, _ ->
let spec = HorSatInterface.make_spec (max_label abst) in
begin
match HorSatInterface.check (abst',spec) with
| HorSatInterface.Safe env -> Safe (uncapitalize_env env)
| HorSatInterface.Unsafe ce -> Unsafe (CESafety ce)
| HorSatInterface.UnsafeAPT _ -> assert false
end
| Flag.ModelCheck.HorSat2, _ ->
let spec = HorSat2Interface.make_spec (max_label abst) in
begin
match HorSat2Interface.check (abst',spec) with
| HorSat2Interface.Safe env -> Safe (uncapitalize_env env)
| HorSat2Interface.Unsafe ce -> Unsafe (CESafety ce)
| HorSat2Interface.UnsafeAPT _ -> assert false
end
| Flag.ModelCheck.HorSatP, Flag.FairNonTermination ->
let fairness =
match spec with
| Fairness x -> x
| Other -> assert false in
Verbose.printf "\nFAIRNESS: %a@." Fair_termination_util.print_fairness fairness;
let randint_labels = List.map make_randint_label @@ List.filter_map (decomp_randint_name -| fst) prog.env in
let events = List.map (fun s -> "event_" ^ s) @@ col_events prog in
let labels = events @ randint_labels in
let spec = HorSatPInterface.make_fair_nonterm_spec labels fairness in
begin
match HorSatPInterface.check (abst',spec) with
| HorSatPInterface.Satisfied -> Safe []
| HorSatPInterface.Unsatisfied ->
let fname = Filename.change_extension !!Flag.Input.main "error_hors" in
let rules = HorSatPInterface.read_HORS_file fname in
Unsafe (CEFairNonTerm rules)
end
| Flag.ModelCheck.HorSatP, _ ->
assert false
in
if !Flag.Print.progress then Color.printf Color.Green "DONE!@.@.";
result
let check abst prog spec =
Time.measure_and_add Flag.Log.Time.mc (check abst prog) spec
| null |
https://raw.githubusercontent.com/hopv/MoCHi/b0ac0d626d64b1e3c779d8e98cb232121cc3196a/src/ModelCheck.ml
|
ocaml
|
open Util
open Mochi_util
open CEGAR_syntax
open CEGAR_type
open CEGAR_util
module Debug = Debug.Make(struct let check = Flag.Debug.make_check __MODULE__ end)
type filename = string
type node = UnitNode | BrNode | LineNode of int | EventNode of string
type counterexample =
| CESafety of TrecsInterface.counterexample
| CENonTerm of HorSatInterface.counterexample_apt
| CEFairNonTerm of HORS_syntax.rules
type result = Safe of (var * Inter_type.t) list | Unsafe of counterexample
type mc_spec =
| SpecTRecS of TrecsInterface.spec
| SpecHorSat of HorSatInterface.spec
type spec =
| Fairness of Fair_termination_type.fairness
| Other
let make_file_spec () =
[0, "unit", [];
0, "event_newr", [1];
1, "event_read", [1];
1, "event_close", [4];
0, "event_neww", [2];
2, "event_write", [2];
2, "event_close", [4];
2, "event_newr", [3];
1, "event_neww", [3];
3, "unit", [];
3, "event_read", [3];
3, "event_write", [3];
3, "event_close", [3];
4, "unit", [];]
let capitalize_var = String.capitalize
let uncapitalize_var = String.uncapitalize
let uncapitalize_env env = List.map (Pair.map_fst uncapitalize_var) env
let capitalize {env;defs;main;info} =
let env' = List.map (Pair.map_fst capitalize_var) env in
let sbst1 = subst_map @@ List.map (fun (f,_) -> f, Var (capitalize_var f)) env in
let aux {fn=f; args=xs; cond; body} =
let xs' = List.map uncapitalize_var xs in
let sbst2 = subst_map @@ List.map2 (fun x x' -> x, Var x') xs xs' in
{fn = capitalize_var f;
args = xs';
cond = sbst1 @@ sbst2 cond;
body = sbst1 @@ sbst2 body}
in
let defs' = List.map aux defs in
let main' = capitalize_var main in
{env=env'; defs=defs'; main=main'; info}
let elim_non_det ({defs;main;info} as prog) =
let env = get_ext_fun_env prog in
let check f {fn=g} = f = g in
let mem f defs = List.exists (check f) defs in
let rec elim_non_det_def = function
| [] -> []
| {fn=f; args=xs; cond=t1; body=t2}::defs when mem f defs ->
let f' = rename_id f in
let defs1,defs2 = List.partition (check f) defs in
let defs1' = List.map (fun def -> {def with fn = rename_id def.fn}) defs1 in
let t =
let args = List.map (fun x -> Var x) xs in
let app f = make_app (Var f) args in
List.fold_left (fun t {fn=g} -> make_br (app g) t) (app f') defs1'
in
{fn=f;args=xs;cond=Const True;body=t}::{fn=f';args=xs;cond=t1;body=t2}::defs1' @ elim_non_det_def defs2
| def::defs -> def :: elim_non_det_def defs
in
Typing.infer {env; defs=elim_non_det_def defs; main; info}
let make_bottom {env;defs;main;info} =
let bottoms = ref [] in
let make_bottom n =
let x = "Bottom" ^ string_of_int n in
bottoms := (x,n)::!bottoms;
Var x
in
let aux_def {fn=f; args=xs; cond; body=t2} =
let f_typ = List.assoc f env in
let env' = get_arg_env f_typ xs @@@ env in
let rec aux_term t typ =
match t,typ with
| Const Bottom, typ -> make_bottom (get_arg_num typ)
| Const c, _ -> Const c
| Var x, _ -> Var x
| App(App(App(Const If, t1), t2), t3), typ ->
let t1' = aux_term t1 (TBase(TBool,fun _ -> [])) in
let t2' =
try
aux_term t2 typ
with TypeBottom -> make_bottom 0
in
let t3' =
try
aux_term t3 typ
with TypeBottom -> make_bottom 0
in
App(App(App(Const If, t1'), t2'), t3')
| App(Const (Label n), t), typ -> App(Const (Label n), aux_term t typ)
| App(t1,t2), _ ->
let typ = get_typ env' t1 in
let typ' =
match typ with
TFun(typ,_) -> typ
| _ -> assert false
in
App(aux_term t1 typ, aux_term t2 typ')
| Let _, _ -> assert false
| Fun _, _ -> assert false
in
let app_typ x = function
TFun(_,typ2) -> typ2 (Var x)
| _ -> assert false
in
let typ = List.fold_right app_typ xs f_typ in
let t2' = aux_term t2 typ in
{fn=f; args=xs; cond; body=t2'}
in
let bot0 = make_bottom 0 in
let defs' = List.map aux_def defs in
let bottom_defs =
let make (x,n) = {fn=x; args=List.init n @@ Fun.const "x"; cond=Const True; body=bot0} in
List.map make (List.unique !bottoms)
in
{env; defs=bottom_defs@@@defs'; main; info}
let rec eta_expand_term env = function
Const c -> Const c
| Var x -> Var x
| App(App(App(Const If, Const (Rand(TBool,_))), t2), t3) ->
let typ = get_typ env t2 in
let xs = Array.to_list (Array.init (arg_num typ) (fun _ -> new_id "x")) in
let aux t = List.fold_left (fun t x -> App(t, Var x)) (eta_expand_term env t) xs in
let t = make_if (Const (Rand(TBool,None))) (aux t2) (aux t3) in
List.fold_right (fun x t -> Fun(x,None,t)) xs t
| App(t1, t2) -> App(eta_expand_term env t1, eta_expand_term env t2)
| Fun _ -> assert false
| Let _ -> assert false
let eta_expand_def env ({fn=f; args=xs; body=t2} as def) =
let d = arg_num (List.assoc f env) - List.length xs in
let ys = Array.to_list (Array.init d (fun _ -> new_id "x")) in
let t2' = eta_expand_term (get_arg_env (List.assoc f env) xs @@@ env) t2 in
let body = List.fold_left (fun t x -> App(t, Var x)) t2' ys in
{def with args=xs@ys; body}
let eta_expand prog =
{prog with defs = List.map (eta_expand_def prog.env) prog.defs}
|> CEGAR_lift.lift2
let trans_ce ce =
let aux (s,_) =
match s with
| "unit" -> []
| "br" -> []
| s when s.[0] = 'l' -> [int_of_string @@ String.slice ~first:1 s]
| s when String.starts_with s "event_" -> []
| _ -> assert false
in
List.flatten_map aux ce
let true_var = "True"
let false_var = "False"
let rec church_encode_term = function
| Const True -> Var true_var
| Const False -> Var false_var
| Const If -> assert false
| Const c -> Const c
| Var x -> Var x
| App(App(App(Const If, Const (Rand(TBool,_))), t2), t3) ->
let t2' = church_encode_term t2 in
let t3' = church_encode_term t3 in
make_app (Const If) [Const (Rand(TBool,None)); t2'; t3']
| App(App(App(Const If, Var b), t2), t3) ->
let t2' = church_encode_term t2 in
let t3' = church_encode_term t3 in
make_app (Var b) [t2'; t3']
| App(t1, t2) -> App(church_encode_term t1, church_encode_term t2)
| Let _ -> assert false
| Fun _ -> assert false
let church_encode {defs;main;info} =
let args = ["x"; "y"] in
let cond = Const True in
let true_def = {fn=true_var; args; cond=Const True; body=Var "x"} in
let false_def = {fn=false_var; args; cond; body=Var "y"} in
let defs' = List.map (map_body_def church_encode_term) defs @ [true_def; false_def] in
let prog = {env=[];defs=defs';main;info} in
if false then Format.printf "CHURCH ENCODE:\n%a@." CEGAR_print.prog prog;
Typing.infer prog
let rec full_app f n = function
| Const _ -> true
| Var x when f = x -> false
| Var _ -> true
| App _ as t ->
let t1,ts = decomp_app t in
let b1 = if t1 = Var f then n = List.length ts else true in
let b2 = List.for_all (full_app f n) ts in
b1 && b2
| Let _ -> assert false
| Fun _ -> assert false
let should_reduce {fn=f; cond} env defs =
let n = arg_num (List.assoc f env) in
cond = Const True &&
List.count (fun {fn} -> f = fn) defs = 1 &&
List.length (List.rev_flatten_map (fun {body} -> List.filter ((=) f) (get_fv body)) defs) = 1 &&
List.for_all (fun def -> full_app f n def.body) defs
let rec get_head_count f = function
| Const _ -> 0
| Var _ -> 0
| App _ as t ->
let t1,ts = decomp_app t in
let n = List.fold_left (fun n t -> n + get_head_count f t) 0 ts in
if t1 = Var f
then 1 + n
else n
| Let _ -> assert false
| Fun _ -> assert false
let rec beta_reduce_term flag ({fn=f; args=xs; body=t2} as def) = function
| Const c -> Const c
| Var x -> Var x
| App _ as t ->
let t1,ts = decomp_app t in
let ts' = List.map (beta_reduce_term flag def) ts in
if t1 = Var f then
if List.for_all (function Const _ | Var _ -> true | App _ -> false | _ -> assert false) ts'
then List.fold_right2 subst xs ts' t2
else (flag := true; make_app t1 ts')
else
make_app t1 ts'
| Let _ -> assert false
| Fun _ -> assert false
let beta_reduce_term flag ({fn=f} as def) t =
let n = get_head_count f t in
if n = 1
then beta_reduce_term flag def t
else (if n >= 2 then flag := true; t)
let beta_reduce_aux {env;defs;main;info} =
let rec aux defs1 = function
[] -> defs1
| def::defs2 when should_reduce def env (defs1@@@def::defs2) ->
let flag = ref false in
let reduce_def def' = {def' with body = beta_reduce_term flag def def'.body} in
let defs1' = List.map reduce_def defs1 in
let defs2' = List.map reduce_def defs2 in
if !flag
then aux (defs1'@[def]) defs2'
else aux defs1' defs2'
| def::defs2 -> aux (defs1@[def]) defs2
in
{env; defs = aux [] defs; main; info}
let rec beta_reduce prog =
let prog' = beta_reduce_aux prog in
if prog.defs = prog'.defs
then prog
else beta_reduce prog'
let pr s t =
Debug.printf "##[ModelCheck] %s:@.%a@.@." s CEGAR_print.prog_typ t;
ignore @@ Typing.infer t
let preprocess prog =
Format.eprintf "WARNING: model checking for non-CPS programs is unmaintained.@.";
prog
|@> pr "INPUT"
|> CEGAR_CPS.trans -$- true
|@> pr "CPS"
|> eta_expand
|@> pr "eta_expand"
|> elim_non_det
|@> pr "elim_non_det"
|> make_bottom
|@> pr "make_bottom"
|> pop_main
|@> pr "pop_main"
|> capitalize
|@> pr "capitalize"
|> Typing.infer
|&Flag.ModelCheck.useless_elim&> Useless_elim.elim
|&Flag.ModelCheck.beta_reduce&> beta_reduce
|& !Flag.ModelCheck.church_encode&> church_encode
|@> pr "church_encode"
let preprocess_cps prog =
prog
|@> pr "INPUT"
|> eta_expand
|@> pr "eta_expand"
|> elim_non_det
|@> pr "elim_non_det"
|> put_arg_into_if
|@> pr "put_arg_into_if"
|> make_bottom
|@> pr "make_bottom"
|> pop_main
|@> pr "pop_main"
|> capitalize
|@> pr "capitalize"
|> Typing.infer
|&Flag.ModelCheck.useless_elim&> Useless_elim.elim
|&Flag.ModelCheck.beta_reduce&> beta_reduce
|& !Flag.ModelCheck.church_encode&> church_encode
|@> pr "church_encode"
let check abst prog spec =
if !Flag.Print.progress
then Color.printf Color.Green "(%d-2)[%.3f] Checking HORS ... @?" !Flag.Log.cegar_loop !!Time.get;
set_status @@ Flag.Log.Other (Format.sprintf "(%d-2) Model checking" !Flag.Log.cegar_loop);
let abst' =
if List.mem ACPS prog.info.attr
then preprocess_cps abst
else preprocess abst
in
let result =
match !Flag.ModelCheck.mc, !Flag.mode with
| Flag.ModelCheck.TRecS, _ ->
let spec = TrecsInterface.make_spec (max_label abst) in
begin
match TrecsInterface.check (abst',spec) with
| TrecsInterface.Safe env -> Safe (uncapitalize_env env)
| TrecsInterface.Unsafe ce -> Unsafe (CESafety ce)
end
| Flag.ModelCheck.HorSat, Flag.NonTermination ->
let labels = List.map make_randint_label @@ List.filter_map (decomp_randint_name -| fst) prog.env in
let spec = HorSatInterface.make_spec_nonterm labels in
begin
match HorSatInterface.check_apt (abst',spec) with
| HorSatInterface.Safe env -> Safe (uncapitalize_env env)
| HorSatInterface.UnsafeAPT ce -> Unsafe (CENonTerm ce)
| HorSatInterface.Unsafe _ -> assert false
end
| Flag.ModelCheck.HorSat2, Flag.NonTermination ->
let labels = List.map make_randint_label @@ List.filter_map (decomp_randint_name -| fst) prog.env in
let spec = HorSat2Interface.make_spec_nonterm labels in
begin
match HorSat2Interface.check_apt (abst',spec) with
| HorSat2Interface.Safe env -> Safe (uncapitalize_env env)
| HorSat2Interface.UnsafeAPT ce -> Unsafe (CENonTerm ce)
| HorSat2Interface.Unsafe _ -> assert false
end
| Flag.ModelCheck.HorSat, _ ->
let spec = HorSatInterface.make_spec (max_label abst) in
begin
match HorSatInterface.check (abst',spec) with
| HorSatInterface.Safe env -> Safe (uncapitalize_env env)
| HorSatInterface.Unsafe ce -> Unsafe (CESafety ce)
| HorSatInterface.UnsafeAPT _ -> assert false
end
| Flag.ModelCheck.HorSat2, _ ->
let spec = HorSat2Interface.make_spec (max_label abst) in
begin
match HorSat2Interface.check (abst',spec) with
| HorSat2Interface.Safe env -> Safe (uncapitalize_env env)
| HorSat2Interface.Unsafe ce -> Unsafe (CESafety ce)
| HorSat2Interface.UnsafeAPT _ -> assert false
end
| Flag.ModelCheck.HorSatP, Flag.FairNonTermination ->
let fairness =
match spec with
| Fairness x -> x
| Other -> assert false in
Verbose.printf "\nFAIRNESS: %a@." Fair_termination_util.print_fairness fairness;
let randint_labels = List.map make_randint_label @@ List.filter_map (decomp_randint_name -| fst) prog.env in
let events = List.map (fun s -> "event_" ^ s) @@ col_events prog in
let labels = events @ randint_labels in
let spec = HorSatPInterface.make_fair_nonterm_spec labels fairness in
begin
match HorSatPInterface.check (abst',spec) with
| HorSatPInterface.Satisfied -> Safe []
| HorSatPInterface.Unsatisfied ->
let fname = Filename.change_extension !!Flag.Input.main "error_hors" in
let rules = HorSatPInterface.read_HORS_file fname in
Unsafe (CEFairNonTerm rules)
end
| Flag.ModelCheck.HorSatP, _ ->
assert false
in
if !Flag.Print.progress then Color.printf Color.Green "DONE!@.@.";
result
let check abst prog spec =
Time.measure_and_add Flag.Log.Time.mc (check abst prog) spec
|
|
6970b4be68924fadde514078fc275d19118262a085a033f60f6bc7465842da61
|
mirage/irmin-watcher
|
polling.mli
|
---------------------------------------------------------------------------
Copyright ( c ) 2016 . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
% % NAME%% % % ---------------------------------------------------------------------------
Copyright (c) 2016 Thomas Gazagnaire. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
%%NAME%% %%VERSION%%
---------------------------------------------------------------------------*)
* Active polling backend for Irmin watchers .
{ e % % VERSION%% β { { : % % PKG_HOMEPAGE%% } homepage } }
{e %%VERSION%% β {{:%%PKG_HOMEPAGE%%} homepage}} *)
open Core
val with_delay : float -> t
* [ with_delay delay i d p f ] is the hook calling [ f ] everytime a sub - path of
[ p ] is modified . Return a function to call to remove the hook . Active
polling is done every [ delay ] seconds .
[p] is modified. Return a function to call to remove the hook. Active
polling is done every [delay] seconds. *)
val v : t Lazy.t
(** [v] is [with_delay !default_polling_time]. *)
val mode : [ `Polling ]
---------------------------------------------------------------------------
Copyright ( c ) 2016
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2016 Thomas Gazagnaire
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
| null |
https://raw.githubusercontent.com/mirage/irmin-watcher/753084ba383ca5d1ea30b3bf89c8757a782d213c/src/polling.mli
|
ocaml
|
* [v] is [with_delay !default_polling_time].
|
---------------------------------------------------------------------------
Copyright ( c ) 2016 . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
% % NAME%% % % ---------------------------------------------------------------------------
Copyright (c) 2016 Thomas Gazagnaire. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
%%NAME%% %%VERSION%%
---------------------------------------------------------------------------*)
* Active polling backend for Irmin watchers .
{ e % % VERSION%% β { { : % % PKG_HOMEPAGE%% } homepage } }
{e %%VERSION%% β {{:%%PKG_HOMEPAGE%%} homepage}} *)
open Core
val with_delay : float -> t
* [ with_delay delay i d p f ] is the hook calling [ f ] everytime a sub - path of
[ p ] is modified . Return a function to call to remove the hook . Active
polling is done every [ delay ] seconds .
[p] is modified. Return a function to call to remove the hook. Active
polling is done every [delay] seconds. *)
val v : t Lazy.t
val mode : [ `Polling ]
---------------------------------------------------------------------------
Copyright ( c ) 2016
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2016 Thomas Gazagnaire
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
|
98ce410f77ee5b4b23f7cffa4e49676c89faacc97c9b80625b4d227d832791cb
|
coccinelle/coccinelle
|
check_reachability.ml
|
* This file is part of Coccinelle , licensed under the terms of the GPL v2 .
* See copyright.txt in the Coccinelle source code for more information .
* The Coccinelle source code can be obtained at
* This file is part of Coccinelle, licensed under the terms of the GPL v2.
* See copyright.txt in the Coccinelle source code for more information.
* The Coccinelle source code can be obtained at
*)
(* ---------------------------------------------------------------- *)
(* code to check for ambiguities *)
Idea : for each node that is said to be modified in any witness tree , we
check that all backward paths end up at the root of some witness tree
that says that the node should be modified . We then give a warning , if
the node itself appears more than once in such a path , because then there
could be some instances that are modified and some that are not . An
example is as follows :
f ( ) ; ... ( ) ; ... - h ( ) ;
with C code : f ( ) ; while(E ) { h ( ) ; ( ) ; } g ( ) ; h ( ) ;
Then the h ( ) in the while loop matches both the first ... and the - h ( ) ;
Concretely , if a node 47 is in the witness tree rooted at 1 and the
witness tree rooted at 2 , then we give an error if 47 is not in the set
of nodes satisfying ] and give a warning if 47 is in the set of
nodes satisfying EXEF(47 & EXEF(1v2 ) ) . ( Note that the root of a witness
tree here is the node causing the pattern to match ; there might not be
any witnesses associated with this node . )
Another try on the exists formula :
! ( 1v2 ) & EXE[!(1v2 ) U 47 ]
The first ! ( 1v2 ) is to discard immediately cases where the beginning and
end of the path are the same . Afterwards , it would only seem necessary to
search up to the next occurrence of 47 ( leaf ) , ensuring that there are not
1s or 2s ( starting points ) along the way . Then the second 47 would be in
the path , but possible not transformed .
check that all backward paths end up at the root of some witness tree
that says that the node should be modified. We then give a warning, if
the node itself appears more than once in such a path, because then there
could be some instances that are modified and some that are not. An
example is as follows:
f(); ... g(); ... - h();
with C code: f(); while(E) { h(); g(); } g(); h();
Then the h() in the while loop matches both the first ... and the - h();
Concretely, if a node 47 is in the witness tree rooted at 1 and the
witness tree rooted at 2, then we give an error if 47 is not in the set
of nodes satisfying AF[1v2] and give a warning if 47 is in the set of
nodes satisfying EXEF(47 & EXEF(1v2)). (Note that the root of a witness
tree here is the node causing the pattern to match; there might not be
any witnesses associated with this node.)
Another try on the exists formula:
!(1v2) & EXE[!(1v2) U 47]
The first !(1v2) is to discard immediately cases where the beginning and
end of the path are the same. Afterwards, it would only seem necessary to
search up to the next occurrence of 47 (leaf), ensuring that there are not
1s or 2s (starting points) along the way. Then the second 47 would be in
the path, but possible not transformed.
*)
module CTL = Ast_ctl
Step 1 : for each tree , make a mapping from the modified nodes to the root
of the tree
of the tree *)
type nodei = Control_flow_c.G.key
let modified = (Hashtbl.create(25) : (nodei, nodei list ref) Hashtbl.t)
let build_modified (n,_,wits) =
let rec loop = function
CTL.Wit(st,[CTL.Subst(x,Wrapper_ctl.PredVal(CTL.Modif(v)))],anno,wit) ->
let cell =
try Hashtbl.find modified st
with Not_found ->
let cell = ref [] in Hashtbl.add modified st cell; cell in
cell := n :: !cell;
List.iter loop wit
| CTL.Wit(st,_,anno,wit) -> List.iter loop wit
| CTL.NegWit(wit) -> () in
List.iter loop wits
Step 2 : For each node in the hash table , create the error and warning
formulas
formulas *)
type 'a nodes = Node of 'a | After
let create_formulas _ =
Hashtbl.fold
(function node ->
function roots ->
function acc ->
let exef f =
wrap
( Ast_ctl . EX
( Ast_ctl . BACKWARD , wrap(Ast_ctl . EF(Ast_ctl . BACKWARD , f ) ) ) ) in
wrap
(Ast_ctl.EX
(Ast_ctl.BACKWARD,wrap(Ast_ctl.EF(Ast_ctl.BACKWARD,f)))) in*)
let match_node = Ast_ctl.Pred(Node(node)) in
let match_roots =
List.map (function n -> Ast_ctl.Pred(Node(n)))
(List.sort compare !roots) in
let or_roots =
List.fold_left
(function prev -> function cur -> Ast_ctl.Or(prev,cur))
(List.hd match_roots) (List.tl match_roots) in
(* no point to search if no path, and the presence of after
in the AF formula can make things slow *)
if List.mem node !roots
then acc
else
(node,
Ast_ctl.AF(Ast_ctl.BACKWARD,Ast_ctl.NONSTRICT,
Ast_ctl.Or(or_roots,Ast_ctl.Pred(After))),
let nonroot = Ast_ctl.Not(or_roots) in
Ast_ctl.And
(Ast_ctl.NONSTRICT,
nonroot,
Ast_ctl.EX
(Ast_ctl.BACKWARD,
Ast_ctl.EU(Ast_ctl.BACKWARD,nonroot,match_node))))
exef
( wrap(Ast_ctl . And(Ast_ctl . NONSTRICT , match_node , exef(roots ) ) ) )
(wrap(Ast_ctl.And(Ast_ctl.NONSTRICT,match_node,exef(roots))))*)
:: acc)
modified []
Step 3 : check the formula on the control - flow graph
module PRED =
struct
type t = nodei nodes
let print_predicate = function
After -> Format.print_string "after"
| Node x -> Format.print_string (string_of_int x)
end
module ENV =
struct
type value = unit
type mvar = unit
let eq_mvar x x' = failwith "should not be invoked"
let eq_val v v' = failwith "should not be invoked"
let merge_val v v' = failwith "should not be invoked"
let print_mvar s = failwith "should not be invoked"
let print_value x = failwith "should not be invoked"
end
module CFG =
struct
type node = nodei
type cfg = Control_flow_c.node Control_flow_c.G.ograph_mutable
let predecessors cfg n =
List.map fst (Control_flow_c.KeyEdgeSet.elements (cfg#predecessors n))
let successors cfg n =
List.map fst (Control_flow_c.KeyEdgeSet.elements (cfg#successors n))
let direct_predecessors cfg n =
List.map fst
(List.filter (fun (a,c) -> c = Control_flow_c.Direct)
(Control_flow_c.KeyEdgeSet.elements (cfg#predecessors n)))
let direct_successors cfg n =
List.map fst
(List.filter (fun (a,c) -> c = Control_flow_c.Direct)
(Control_flow_c.KeyEdgeSet.elements (cfg#successors n)))
let extract_is_loop cfg n =
Control_flow_c.extract_is_loop (Control_flow_c.KeyMap.find n cfg#nodes)
let print_node i = Format.print_string (string_of_int i)
let size cfg = Control_flow_c.KeyMap.cardinal cfg#nodes
let print_graph cfg label border_nodes fill_nodes filename = ()
end
module ENGINE = Ctl_engine.CTL_ENGINE (ENV) (CFG) (PRED)
let test_formula state formula cfg =
let label = function
Node pred -> [(pred,[],[])]
| After ->
List.concat
(List.map
(fun (nodei, node) ->
match Control_flow_c.unwrap node with
Control_flow_c.AfterNode _ -> [(nodei,[],[])]
| _ -> [])
(Control_flow_c.KeyMap.bindings cfg#nodes)) in
let preproc _ = true in
let verbose = !Flag_ctl.verbose_ctl_engine in
let pm = !Flag_ctl.partial_match in
(* let gt = !Flag_ctl.graphical_trace in *)
Flag_ctl.verbose_ctl_engine := false;
Flag_ctl.partial_match := false;
Flag_ctl.checking_reachability := true;
(* Flag_ctl.graphical_trace := ""; *)
let res =
ENGINE.sat (cfg,label,preproc,List.map fst (Control_flow_c.KeyMap.bindings cfg#nodes))
(CTL.And(CTL.NONSTRICT,CTL.Pred(Node(state)),formula))
[[Node(state)]] in
Flag_ctl.verbose_ctl_engine := verbose;
Flag_ctl.partial_match := pm;
Flag_ctl.checking_reachability := false;
(* Flag_ctl.graphical_trace := gt; *)
match res with [] -> false | _ -> true
(* ---------------------------------------------------------------- *)
(* Entry point *)
(* The argument is a list of triples with a node name, an empty environment
and a witness tree *)
type witness =
(nodei, unit,
(nodei, unit, unit) Ast_ctl.generic_ctl list)
Ast_ctl.generic_witnesstree
type ('a,'b,'c,'d,'e) triples =
(nodei * 'a *
(nodei,
('b, ('c, 'd) Wrapper_ctl.wrapped_binding) CTL.generic_subst list, 'e)
CTL.generic_witnesstree list) list
let check_reachability rulename triples cfg =
List.iter build_modified triples;
let formulas = create_formulas() in
Hashtbl.clear modified;
List.iter
(function (node,af_formula,ef_formula) ->
if test_formula node af_formula cfg
then
if test_formula node ef_formula cfg
then
let n = Control_flow_c.KeyMap.find node cfg#nodes in
Printf.printf
"warning: %s, node %d: %s in %s may be inconsistently modified\n"
rulename node (snd n) !Flag.current_element
else ()
else
let n = Control_flow_c.KeyMap.find node cfg#nodes in
failwith
(Printf.sprintf
"%s: node %d: %s in %s reachable by inconsistent control-flow paths"
rulename node (snd n) !Flag.current_element))
formulas
| null |
https://raw.githubusercontent.com/coccinelle/coccinelle/2df29c1740c8639d181e83eb58b36599686a0b62/engine/check_reachability.ml
|
ocaml
|
----------------------------------------------------------------
code to check for ambiguities
no point to search if no path, and the presence of after
in the AF formula can make things slow
let gt = !Flag_ctl.graphical_trace in
Flag_ctl.graphical_trace := "";
Flag_ctl.graphical_trace := gt;
----------------------------------------------------------------
Entry point
The argument is a list of triples with a node name, an empty environment
and a witness tree
|
* This file is part of Coccinelle , licensed under the terms of the GPL v2 .
* See copyright.txt in the Coccinelle source code for more information .
* The Coccinelle source code can be obtained at
* This file is part of Coccinelle, licensed under the terms of the GPL v2.
* See copyright.txt in the Coccinelle source code for more information.
* The Coccinelle source code can be obtained at
*)
Idea : for each node that is said to be modified in any witness tree , we
check that all backward paths end up at the root of some witness tree
that says that the node should be modified . We then give a warning , if
the node itself appears more than once in such a path , because then there
could be some instances that are modified and some that are not . An
example is as follows :
f ( ) ; ... ( ) ; ... - h ( ) ;
with C code : f ( ) ; while(E ) { h ( ) ; ( ) ; } g ( ) ; h ( ) ;
Then the h ( ) in the while loop matches both the first ... and the - h ( ) ;
Concretely , if a node 47 is in the witness tree rooted at 1 and the
witness tree rooted at 2 , then we give an error if 47 is not in the set
of nodes satisfying ] and give a warning if 47 is in the set of
nodes satisfying EXEF(47 & EXEF(1v2 ) ) . ( Note that the root of a witness
tree here is the node causing the pattern to match ; there might not be
any witnesses associated with this node . )
Another try on the exists formula :
! ( 1v2 ) & EXE[!(1v2 ) U 47 ]
The first ! ( 1v2 ) is to discard immediately cases where the beginning and
end of the path are the same . Afterwards , it would only seem necessary to
search up to the next occurrence of 47 ( leaf ) , ensuring that there are not
1s or 2s ( starting points ) along the way . Then the second 47 would be in
the path , but possible not transformed .
check that all backward paths end up at the root of some witness tree
that says that the node should be modified. We then give a warning, if
the node itself appears more than once in such a path, because then there
could be some instances that are modified and some that are not. An
example is as follows:
f(); ... g(); ... - h();
with C code: f(); while(E) { h(); g(); } g(); h();
Then the h() in the while loop matches both the first ... and the - h();
Concretely, if a node 47 is in the witness tree rooted at 1 and the
witness tree rooted at 2, then we give an error if 47 is not in the set
of nodes satisfying AF[1v2] and give a warning if 47 is in the set of
nodes satisfying EXEF(47 & EXEF(1v2)). (Note that the root of a witness
tree here is the node causing the pattern to match; there might not be
any witnesses associated with this node.)
Another try on the exists formula:
!(1v2) & EXE[!(1v2) U 47]
The first !(1v2) is to discard immediately cases where the beginning and
end of the path are the same. Afterwards, it would only seem necessary to
search up to the next occurrence of 47 (leaf), ensuring that there are not
1s or 2s (starting points) along the way. Then the second 47 would be in
the path, but possible not transformed.
*)
module CTL = Ast_ctl
Step 1 : for each tree , make a mapping from the modified nodes to the root
of the tree
of the tree *)
type nodei = Control_flow_c.G.key
let modified = (Hashtbl.create(25) : (nodei, nodei list ref) Hashtbl.t)
let build_modified (n,_,wits) =
let rec loop = function
CTL.Wit(st,[CTL.Subst(x,Wrapper_ctl.PredVal(CTL.Modif(v)))],anno,wit) ->
let cell =
try Hashtbl.find modified st
with Not_found ->
let cell = ref [] in Hashtbl.add modified st cell; cell in
cell := n :: !cell;
List.iter loop wit
| CTL.Wit(st,_,anno,wit) -> List.iter loop wit
| CTL.NegWit(wit) -> () in
List.iter loop wits
Step 2 : For each node in the hash table , create the error and warning
formulas
formulas *)
type 'a nodes = Node of 'a | After
let create_formulas _ =
Hashtbl.fold
(function node ->
function roots ->
function acc ->
let exef f =
wrap
( Ast_ctl . EX
( Ast_ctl . BACKWARD , wrap(Ast_ctl . EF(Ast_ctl . BACKWARD , f ) ) ) ) in
wrap
(Ast_ctl.EX
(Ast_ctl.BACKWARD,wrap(Ast_ctl.EF(Ast_ctl.BACKWARD,f)))) in*)
let match_node = Ast_ctl.Pred(Node(node)) in
let match_roots =
List.map (function n -> Ast_ctl.Pred(Node(n)))
(List.sort compare !roots) in
let or_roots =
List.fold_left
(function prev -> function cur -> Ast_ctl.Or(prev,cur))
(List.hd match_roots) (List.tl match_roots) in
if List.mem node !roots
then acc
else
(node,
Ast_ctl.AF(Ast_ctl.BACKWARD,Ast_ctl.NONSTRICT,
Ast_ctl.Or(or_roots,Ast_ctl.Pred(After))),
let nonroot = Ast_ctl.Not(or_roots) in
Ast_ctl.And
(Ast_ctl.NONSTRICT,
nonroot,
Ast_ctl.EX
(Ast_ctl.BACKWARD,
Ast_ctl.EU(Ast_ctl.BACKWARD,nonroot,match_node))))
exef
( wrap(Ast_ctl . And(Ast_ctl . NONSTRICT , match_node , exef(roots ) ) ) )
(wrap(Ast_ctl.And(Ast_ctl.NONSTRICT,match_node,exef(roots))))*)
:: acc)
modified []
Step 3 : check the formula on the control - flow graph
module PRED =
struct
type t = nodei nodes
let print_predicate = function
After -> Format.print_string "after"
| Node x -> Format.print_string (string_of_int x)
end
module ENV =
struct
type value = unit
type mvar = unit
let eq_mvar x x' = failwith "should not be invoked"
let eq_val v v' = failwith "should not be invoked"
let merge_val v v' = failwith "should not be invoked"
let print_mvar s = failwith "should not be invoked"
let print_value x = failwith "should not be invoked"
end
module CFG =
struct
type node = nodei
type cfg = Control_flow_c.node Control_flow_c.G.ograph_mutable
let predecessors cfg n =
List.map fst (Control_flow_c.KeyEdgeSet.elements (cfg#predecessors n))
let successors cfg n =
List.map fst (Control_flow_c.KeyEdgeSet.elements (cfg#successors n))
let direct_predecessors cfg n =
List.map fst
(List.filter (fun (a,c) -> c = Control_flow_c.Direct)
(Control_flow_c.KeyEdgeSet.elements (cfg#predecessors n)))
let direct_successors cfg n =
List.map fst
(List.filter (fun (a,c) -> c = Control_flow_c.Direct)
(Control_flow_c.KeyEdgeSet.elements (cfg#successors n)))
let extract_is_loop cfg n =
Control_flow_c.extract_is_loop (Control_flow_c.KeyMap.find n cfg#nodes)
let print_node i = Format.print_string (string_of_int i)
let size cfg = Control_flow_c.KeyMap.cardinal cfg#nodes
let print_graph cfg label border_nodes fill_nodes filename = ()
end
module ENGINE = Ctl_engine.CTL_ENGINE (ENV) (CFG) (PRED)
let test_formula state formula cfg =
let label = function
Node pred -> [(pred,[],[])]
| After ->
List.concat
(List.map
(fun (nodei, node) ->
match Control_flow_c.unwrap node with
Control_flow_c.AfterNode _ -> [(nodei,[],[])]
| _ -> [])
(Control_flow_c.KeyMap.bindings cfg#nodes)) in
let preproc _ = true in
let verbose = !Flag_ctl.verbose_ctl_engine in
let pm = !Flag_ctl.partial_match in
Flag_ctl.verbose_ctl_engine := false;
Flag_ctl.partial_match := false;
Flag_ctl.checking_reachability := true;
let res =
ENGINE.sat (cfg,label,preproc,List.map fst (Control_flow_c.KeyMap.bindings cfg#nodes))
(CTL.And(CTL.NONSTRICT,CTL.Pred(Node(state)),formula))
[[Node(state)]] in
Flag_ctl.verbose_ctl_engine := verbose;
Flag_ctl.partial_match := pm;
Flag_ctl.checking_reachability := false;
match res with [] -> false | _ -> true
type witness =
(nodei, unit,
(nodei, unit, unit) Ast_ctl.generic_ctl list)
Ast_ctl.generic_witnesstree
type ('a,'b,'c,'d,'e) triples =
(nodei * 'a *
(nodei,
('b, ('c, 'd) Wrapper_ctl.wrapped_binding) CTL.generic_subst list, 'e)
CTL.generic_witnesstree list) list
let check_reachability rulename triples cfg =
List.iter build_modified triples;
let formulas = create_formulas() in
Hashtbl.clear modified;
List.iter
(function (node,af_formula,ef_formula) ->
if test_formula node af_formula cfg
then
if test_formula node ef_formula cfg
then
let n = Control_flow_c.KeyMap.find node cfg#nodes in
Printf.printf
"warning: %s, node %d: %s in %s may be inconsistently modified\n"
rulename node (snd n) !Flag.current_element
else ()
else
let n = Control_flow_c.KeyMap.find node cfg#nodes in
failwith
(Printf.sprintf
"%s: node %d: %s in %s reachable by inconsistent control-flow paths"
rulename node (snd n) !Flag.current_element))
formulas
|
36eeff395c485060f76df0b003b3c0c2f74531a6e20e8f8714f1f9c4e5882121
|
diku-dk/futhark
|
LowerAllocations.hs
|
{-# LANGUAGE ConstraintKinds #-}
# LANGUAGE FlexibleContexts #
# LANGUAGE NamedFieldPuns #
# LANGUAGE TypeFamilies #
-- | This pass attempts to lower allocations as far towards the bottom of their
-- body as possible.
module Futhark.Pass.LowerAllocations
( lowerAllocationsSeqMem,
lowerAllocationsGPUMem,
lowerAllocationsMCMem,
)
where
import Control.Monad.Reader
import Data.Function ((&))
import Data.Map qualified as M
import Data.Sequence (Seq (..))
import Data.Sequence qualified as Seq
import Futhark.IR.GPUMem
import Futhark.IR.MCMem
import Futhark.IR.SeqMem
import Futhark.Pass (Pass (..))
lowerAllocationsSeqMem :: Pass SeqMem SeqMem
lowerAllocationsSeqMem =
Pass "lower allocations" "lower allocations" $ \prog@Prog {progFuns} ->
pure $
prog
{ progFuns =
fmap
( \f@FunDef {funDefBody} ->
f {funDefBody = runReader (lowerAllocationsInBody funDefBody) (Env pure)}
)
progFuns
}
lowerAllocationsGPUMem :: Pass GPUMem GPUMem
lowerAllocationsGPUMem =
Pass "lower allocations gpu" "lower allocations gpu" $ \prog@Prog {progFuns} ->
pure $
prog
{ progFuns =
fmap
( \f@FunDef {funDefBody} ->
f {funDefBody = runReader (lowerAllocationsInBody funDefBody) (Env lowerAllocationsInHostOp)}
)
progFuns
}
lowerAllocationsMCMem :: Pass MCMem MCMem
lowerAllocationsMCMem =
Pass "lower allocations mc" "lower allocations mc" $ \prog@Prog {progFuns} ->
pure $
prog
{ progFuns =
fmap
( \f@FunDef {funDefBody} ->
f {funDefBody = runReader (lowerAllocationsInBody funDefBody) (Env lowerAllocationsInMCOp)}
)
progFuns
}
newtype Env inner = Env
{onInner :: inner -> LowerM inner inner}
type LowerM inner a = Reader (Env inner) a
lowerAllocationsInBody ::
(Mem rep inner, LetDec rep ~ LetDecMem) =>
Body rep ->
LowerM (inner rep) (Body rep)
lowerAllocationsInBody body = do
stms <- lowerAllocationsInStms (bodyStms body) mempty mempty
pure $ body {bodyStms = stms}
lowerAllocationsInStms ::
(Mem rep inner, LetDec rep ~ LetDecMem) =>
-- | The input stms
Stms rep ->
-- | The allocations currently being lowered
M.Map VName (Stm rep) ->
-- | The other statements processed so far
Stms rep ->
LowerM (inner rep) (Stms rep)
lowerAllocationsInStms Empty allocs acc = pure $ acc <> Seq.fromList (M.elems allocs)
lowerAllocationsInStms (stm@(Let (Pat [PatElem vname _]) _ (Op (Alloc _ _))) :<| stms) allocs acc =
lowerAllocationsInStms stms (M.insert vname stm allocs) acc
lowerAllocationsInStms (stm0@(Let _ _ (Op (Inner inner))) :<| stms) alloc0 acc0 = do
on_inner <- asks onInner
inner' <- on_inner inner
let stm = stm0 {stmExp = Op $ Inner inner'}
(alloc, acc) = insertLoweredAllocs (freeIn stm0) alloc0 acc0
lowerAllocationsInStms stms alloc (acc :|> stm)
lowerAllocationsInStms (stm@(Let _ _ (Match cond_ses cases body dec)) :<| stms) alloc acc = do
cases' <- mapM (\(Case pat b) -> Case pat <$> lowerAllocationsInBody b) cases
body' <- lowerAllocationsInBody body
let stm' = stm {stmExp = Match cond_ses cases' body' dec}
(alloc', acc') = insertLoweredAllocs (freeIn stm) alloc acc
lowerAllocationsInStms stms alloc' (acc' :|> stm')
lowerAllocationsInStms (stm@(Let _ _ (DoLoop params form body)) :<| stms) alloc acc = do
body' <- lowerAllocationsInBody body
let stm' = stm {stmExp = DoLoop params form body'}
(alloc', acc') = insertLoweredAllocs (freeIn stm) alloc acc
lowerAllocationsInStms stms alloc' (acc' :|> stm')
lowerAllocationsInStms (stm :<| stms) alloc acc = do
let (alloc', acc') = insertLoweredAllocs (freeIn stm) alloc acc
lowerAllocationsInStms stms alloc' (acc' :|> stm)
insertLoweredAllocs :: Names -> M.Map VName (Stm rep) -> Stms rep -> (M.Map VName (Stm rep), Stms rep)
insertLoweredAllocs frees alloc acc =
frees
`namesIntersection` namesFromList (M.keys alloc)
& namesToList
& foldl
( \(alloc', acc') name ->
( M.delete name alloc',
acc' :|> alloc' M.! name
)
)
(alloc, acc)
lowerAllocationsInSegOp ::
(Mem rep inner, LetDec rep ~ LetDecMem) =>
SegOp lvl rep ->
LowerM (inner rep) (SegOp lvl rep)
lowerAllocationsInSegOp (SegMap lvl sp tps body) = do
stms <- lowerAllocationsInStms (kernelBodyStms body) mempty mempty
pure $ SegMap lvl sp tps $ body {kernelBodyStms = stms}
lowerAllocationsInSegOp (SegRed lvl sp binops tps body) = do
stms <- lowerAllocationsInStms (kernelBodyStms body) mempty mempty
pure $ SegRed lvl sp binops tps $ body {kernelBodyStms = stms}
lowerAllocationsInSegOp (SegScan lvl sp binops tps body) = do
stms <- lowerAllocationsInStms (kernelBodyStms body) mempty mempty
pure $ SegScan lvl sp binops tps $ body {kernelBodyStms = stms}
lowerAllocationsInSegOp (SegHist lvl sp histops tps body) = do
stms <- lowerAllocationsInStms (kernelBodyStms body) mempty mempty
pure $ SegHist lvl sp histops tps $ body {kernelBodyStms = stms}
lowerAllocationsInHostOp :: HostOp NoOp GPUMem -> LowerM (HostOp NoOp GPUMem) (HostOp NoOp GPUMem)
lowerAllocationsInHostOp (SegOp op) = SegOp <$> lowerAllocationsInSegOp op
lowerAllocationsInHostOp op = pure op
lowerAllocationsInMCOp :: MCOp NoOp MCMem -> LowerM (MCOp NoOp MCMem) (MCOp NoOp MCMem)
lowerAllocationsInMCOp (ParOp par op) =
ParOp <$> traverse lowerAllocationsInSegOp par <*> lowerAllocationsInSegOp op
lowerAllocationsInMCOp op = pure op
| null |
https://raw.githubusercontent.com/diku-dk/futhark/174e8d862def6f52d5c9a36fa3aa6e746049776e/src/Futhark/Pass/LowerAllocations.hs
|
haskell
|
# LANGUAGE ConstraintKinds #
| This pass attempts to lower allocations as far towards the bottom of their
body as possible.
| The input stms
| The allocations currently being lowered
| The other statements processed so far
|
# LANGUAGE FlexibleContexts #
# LANGUAGE NamedFieldPuns #
# LANGUAGE TypeFamilies #
module Futhark.Pass.LowerAllocations
( lowerAllocationsSeqMem,
lowerAllocationsGPUMem,
lowerAllocationsMCMem,
)
where
import Control.Monad.Reader
import Data.Function ((&))
import Data.Map qualified as M
import Data.Sequence (Seq (..))
import Data.Sequence qualified as Seq
import Futhark.IR.GPUMem
import Futhark.IR.MCMem
import Futhark.IR.SeqMem
import Futhark.Pass (Pass (..))
lowerAllocationsSeqMem :: Pass SeqMem SeqMem
lowerAllocationsSeqMem =
Pass "lower allocations" "lower allocations" $ \prog@Prog {progFuns} ->
pure $
prog
{ progFuns =
fmap
( \f@FunDef {funDefBody} ->
f {funDefBody = runReader (lowerAllocationsInBody funDefBody) (Env pure)}
)
progFuns
}
lowerAllocationsGPUMem :: Pass GPUMem GPUMem
lowerAllocationsGPUMem =
Pass "lower allocations gpu" "lower allocations gpu" $ \prog@Prog {progFuns} ->
pure $
prog
{ progFuns =
fmap
( \f@FunDef {funDefBody} ->
f {funDefBody = runReader (lowerAllocationsInBody funDefBody) (Env lowerAllocationsInHostOp)}
)
progFuns
}
lowerAllocationsMCMem :: Pass MCMem MCMem
lowerAllocationsMCMem =
Pass "lower allocations mc" "lower allocations mc" $ \prog@Prog {progFuns} ->
pure $
prog
{ progFuns =
fmap
( \f@FunDef {funDefBody} ->
f {funDefBody = runReader (lowerAllocationsInBody funDefBody) (Env lowerAllocationsInMCOp)}
)
progFuns
}
newtype Env inner = Env
{onInner :: inner -> LowerM inner inner}
type LowerM inner a = Reader (Env inner) a
lowerAllocationsInBody ::
(Mem rep inner, LetDec rep ~ LetDecMem) =>
Body rep ->
LowerM (inner rep) (Body rep)
lowerAllocationsInBody body = do
stms <- lowerAllocationsInStms (bodyStms body) mempty mempty
pure $ body {bodyStms = stms}
lowerAllocationsInStms ::
(Mem rep inner, LetDec rep ~ LetDecMem) =>
Stms rep ->
M.Map VName (Stm rep) ->
Stms rep ->
LowerM (inner rep) (Stms rep)
lowerAllocationsInStms Empty allocs acc = pure $ acc <> Seq.fromList (M.elems allocs)
lowerAllocationsInStms (stm@(Let (Pat [PatElem vname _]) _ (Op (Alloc _ _))) :<| stms) allocs acc =
lowerAllocationsInStms stms (M.insert vname stm allocs) acc
lowerAllocationsInStms (stm0@(Let _ _ (Op (Inner inner))) :<| stms) alloc0 acc0 = do
on_inner <- asks onInner
inner' <- on_inner inner
let stm = stm0 {stmExp = Op $ Inner inner'}
(alloc, acc) = insertLoweredAllocs (freeIn stm0) alloc0 acc0
lowerAllocationsInStms stms alloc (acc :|> stm)
lowerAllocationsInStms (stm@(Let _ _ (Match cond_ses cases body dec)) :<| stms) alloc acc = do
cases' <- mapM (\(Case pat b) -> Case pat <$> lowerAllocationsInBody b) cases
body' <- lowerAllocationsInBody body
let stm' = stm {stmExp = Match cond_ses cases' body' dec}
(alloc', acc') = insertLoweredAllocs (freeIn stm) alloc acc
lowerAllocationsInStms stms alloc' (acc' :|> stm')
lowerAllocationsInStms (stm@(Let _ _ (DoLoop params form body)) :<| stms) alloc acc = do
body' <- lowerAllocationsInBody body
let stm' = stm {stmExp = DoLoop params form body'}
(alloc', acc') = insertLoweredAllocs (freeIn stm) alloc acc
lowerAllocationsInStms stms alloc' (acc' :|> stm')
lowerAllocationsInStms (stm :<| stms) alloc acc = do
let (alloc', acc') = insertLoweredAllocs (freeIn stm) alloc acc
lowerAllocationsInStms stms alloc' (acc' :|> stm)
insertLoweredAllocs :: Names -> M.Map VName (Stm rep) -> Stms rep -> (M.Map VName (Stm rep), Stms rep)
insertLoweredAllocs frees alloc acc =
frees
`namesIntersection` namesFromList (M.keys alloc)
& namesToList
& foldl
( \(alloc', acc') name ->
( M.delete name alloc',
acc' :|> alloc' M.! name
)
)
(alloc, acc)
lowerAllocationsInSegOp ::
(Mem rep inner, LetDec rep ~ LetDecMem) =>
SegOp lvl rep ->
LowerM (inner rep) (SegOp lvl rep)
lowerAllocationsInSegOp (SegMap lvl sp tps body) = do
stms <- lowerAllocationsInStms (kernelBodyStms body) mempty mempty
pure $ SegMap lvl sp tps $ body {kernelBodyStms = stms}
lowerAllocationsInSegOp (SegRed lvl sp binops tps body) = do
stms <- lowerAllocationsInStms (kernelBodyStms body) mempty mempty
pure $ SegRed lvl sp binops tps $ body {kernelBodyStms = stms}
lowerAllocationsInSegOp (SegScan lvl sp binops tps body) = do
stms <- lowerAllocationsInStms (kernelBodyStms body) mempty mempty
pure $ SegScan lvl sp binops tps $ body {kernelBodyStms = stms}
lowerAllocationsInSegOp (SegHist lvl sp histops tps body) = do
stms <- lowerAllocationsInStms (kernelBodyStms body) mempty mempty
pure $ SegHist lvl sp histops tps $ body {kernelBodyStms = stms}
lowerAllocationsInHostOp :: HostOp NoOp GPUMem -> LowerM (HostOp NoOp GPUMem) (HostOp NoOp GPUMem)
lowerAllocationsInHostOp (SegOp op) = SegOp <$> lowerAllocationsInSegOp op
lowerAllocationsInHostOp op = pure op
lowerAllocationsInMCOp :: MCOp NoOp MCMem -> LowerM (MCOp NoOp MCMem) (MCOp NoOp MCMem)
lowerAllocationsInMCOp (ParOp par op) =
ParOp <$> traverse lowerAllocationsInSegOp par <*> lowerAllocationsInSegOp op
lowerAllocationsInMCOp op = pure op
|
fcf7c1983e6aa6a476848c899fbf1e475908e49a235502d715f7a0f94b766914
|
mck-/Open-VRP
|
conditions.lisp
|
;; Error condition definitions
;; -------------------------
(in-package :open-vrp.util)
lib / lists.lisp
(define-condition unaccepted-predicate (error)
((pred :initarg :pred :reader pred))
(:report "Accepts only #'> or #'<."))
(define-condition index-out-of-bounds (error)
((index :initarg :index :reader index)
(ls :initarg :ls :reader ls)))
(define-condition list-of-nils (error)
((ls :initarg :ls :reader ls)
(key :initarg :key :reader key))
(:report "Cannot get min/max from a list of NIL values."))
lib / network.lisp
(define-condition same-origin-destination (error)
((from :initarg :from :reader from)
(to :initarg :to :reader to))
(:report "Trying to lookup distance for same origin and destination - NIL"))
;; lib/constraints.lisp
(define-condition infeasible-solution (error)
((sol :initarg :sol :reader sol)
(func :initarg :func :reader func))
(:report "The provided solution is already infeasible, cannot check for feasibility of the move."))
(define-condition no-capacities-vehicle (error)
((veh :initarg :veh :reader veh))
(:report "Trying to check capacities for a vehicle that has no defined capacity."))
(define-condition no-speed-vehicle (error)
((veh :initarg :veh :reader veh))
(:report "Trying to check TW constraints for a vehicle that has no defined speed."))
lib / network.lisp lib / fleet.lisp
(define-condition not-equal-length (error)
((lists :initarg :lists :reader lists))
(:report "Trying to create objects where input lists are of unequal length!"))
;; lib/init-macros.lisp
(define-condition empty-network (error)()
(:report "Network is empty! To create a network requires at least one parameter!"))
;; lib/draw-solution.lisp
(define-condition missing-drawer-object (error)
((prob :initarg :prob :reader prob))
(:report "Missing a <Drawer> object! Are you trying to plot without node-coords?"))
;; lib/read-test-case.lisp
(define-condition file-not-recognized (error)
((file :initarg :file :reader :file))
(:report "Input file not recognized!"))
| null |
https://raw.githubusercontent.com/mck-/Open-VRP/408cb67063474ab61ddfc1631b5ac39714f2535e/lib/conditions.lisp
|
lisp
|
Error condition definitions
-------------------------
lib/constraints.lisp
lib/init-macros.lisp
lib/draw-solution.lisp
lib/read-test-case.lisp
|
(in-package :open-vrp.util)
lib / lists.lisp
(define-condition unaccepted-predicate (error)
((pred :initarg :pred :reader pred))
(:report "Accepts only #'> or #'<."))
(define-condition index-out-of-bounds (error)
((index :initarg :index :reader index)
(ls :initarg :ls :reader ls)))
(define-condition list-of-nils (error)
((ls :initarg :ls :reader ls)
(key :initarg :key :reader key))
(:report "Cannot get min/max from a list of NIL values."))
lib / network.lisp
(define-condition same-origin-destination (error)
((from :initarg :from :reader from)
(to :initarg :to :reader to))
(:report "Trying to lookup distance for same origin and destination - NIL"))
(define-condition infeasible-solution (error)
((sol :initarg :sol :reader sol)
(func :initarg :func :reader func))
(:report "The provided solution is already infeasible, cannot check for feasibility of the move."))
(define-condition no-capacities-vehicle (error)
((veh :initarg :veh :reader veh))
(:report "Trying to check capacities for a vehicle that has no defined capacity."))
(define-condition no-speed-vehicle (error)
((veh :initarg :veh :reader veh))
(:report "Trying to check TW constraints for a vehicle that has no defined speed."))
lib / network.lisp lib / fleet.lisp
(define-condition not-equal-length (error)
((lists :initarg :lists :reader lists))
(:report "Trying to create objects where input lists are of unequal length!"))
(define-condition empty-network (error)()
(:report "Network is empty! To create a network requires at least one parameter!"))
(define-condition missing-drawer-object (error)
((prob :initarg :prob :reader prob))
(:report "Missing a <Drawer> object! Are you trying to plot without node-coords?"))
(define-condition file-not-recognized (error)
((file :initarg :file :reader :file))
(:report "Input file not recognized!"))
|
8366e4da31beffd4a7fa0dfc8804204481aa70b269557aa68e1a522e8bc5b4ac
|
monadbobo/ocaml-core
|
field.mli
|
(** OCaml record field. *)
(* ['record] is the type of the record. ['field] is the type of the
values stored in the record field with name [name]. *)
type ('record, 'field) t = {
name : string;
setter : ('record -> 'field -> unit) option;
getter : ('record -> 'field);
fset : ('record -> 'field -> 'record);
}
val name : (_, _) t -> string
val get : ('r, 'a) t -> 'r -> 'a
val fset : ('r, 'a) t -> 'r -> 'a -> 'r
val setter : ('r, 'a) t -> ('r -> 'a -> unit) option
type ('record,'result) user =
{f : 'field. ('record,'field) t -> 'result}
| null |
https://raw.githubusercontent.com/monadbobo/ocaml-core/9c1c06e7a1af7e15b6019a325d7dbdbd4cdb4020/base/fieldslib/lib/field.mli
|
ocaml
|
* OCaml record field.
['record] is the type of the record. ['field] is the type of the
values stored in the record field with name [name].
|
type ('record, 'field) t = {
name : string;
setter : ('record -> 'field -> unit) option;
getter : ('record -> 'field);
fset : ('record -> 'field -> 'record);
}
val name : (_, _) t -> string
val get : ('r, 'a) t -> 'r -> 'a
val fset : ('r, 'a) t -> 'r -> 'a -> 'r
val setter : ('r, 'a) t -> ('r -> 'a -> unit) option
type ('record,'result) user =
{f : 'field. ('record,'field) t -> 'result}
|
ea9948012efc71c1ef705e8e4e60f634c889f0656428f18533b6ba1f20fda3d5
|
clojure-interop/java-jdk
|
SynthTableHeaderUI.clj
|
(ns javax.swing.plaf.synth.SynthTableHeaderUI
"Provides the Synth L&F UI delegate for
JTableHeader."
(:refer-clojure :only [require comment defn ->])
(:import [javax.swing.plaf.synth SynthTableHeaderUI]))
(defn ->synth-table-header-ui
"Constructor."
(^SynthTableHeaderUI []
(new SynthTableHeaderUI )))
(defn *create-ui
"Creates a new UI object for the given component.
h - component to create UI object for - `javax.swing.JComponent`
returns: the UI object - `javax.swing.plaf.ComponentUI`"
(^javax.swing.plaf.ComponentUI [^javax.swing.JComponent h]
(SynthTableHeaderUI/createUI h)))
(defn update
"Notifies this UI delegate to repaint the specified component.
This method paints the component background, then calls
the paint(SynthContext,Graphics) method.
In general, this method does not need to be overridden by subclasses.
All Look and Feel rendering code should reside in the paint method.
g - the Graphics object used for painting - `java.awt.Graphics`
c - the component being painted - `javax.swing.JComponent`"
([^SynthTableHeaderUI this ^java.awt.Graphics g ^javax.swing.JComponent c]
(-> this (.update g c))))
(defn paint
"Paints the specified component according to the Look and Feel.
This method is not used by Synth Look and Feel.
Painting is handled by the paint(SynthContext,Graphics) method.
g - the Graphics object used for painting - `java.awt.Graphics`
c - the component being painted - `javax.swing.JComponent`"
([^SynthTableHeaderUI this ^java.awt.Graphics g ^javax.swing.JComponent c]
(-> this (.paint g c))))
(defn paint-border
"Paints the border.
context - a component context - `javax.swing.plaf.synth.SynthContext`
g - Graphics to paint on - `java.awt.Graphics`
x - the X coordinate - `int`
y - the Y coordinate - `int`
w - width of the border - `int`
h - height of the border - `int`"
([^SynthTableHeaderUI this ^javax.swing.plaf.synth.SynthContext context ^java.awt.Graphics g ^Integer x ^Integer y ^Integer w ^Integer h]
(-> this (.paintBorder context g x y w h))))
(defn get-context
"Returns the Context for the specified component.
c - Component requesting SynthContext. - `javax.swing.JComponent`
returns: SynthContext describing component. - `javax.swing.plaf.synth.SynthContext`"
(^javax.swing.plaf.synth.SynthContext [^SynthTableHeaderUI this ^javax.swing.JComponent c]
(-> this (.getContext c))))
(defn property-change
"This method gets called when a bound property is changed.
evt - A PropertyChangeEvent object describing the event source and the property that has changed. - `java.beans.PropertyChangeEvent`"
([^SynthTableHeaderUI this ^java.beans.PropertyChangeEvent evt]
(-> this (.propertyChange evt))))
| null |
https://raw.githubusercontent.com/clojure-interop/java-jdk/8d7a223e0f9a0965eb0332fad595cf7649d9d96e/javax.swing/src/javax/swing/plaf/synth/SynthTableHeaderUI.clj
|
clojure
|
(ns javax.swing.plaf.synth.SynthTableHeaderUI
"Provides the Synth L&F UI delegate for
JTableHeader."
(:refer-clojure :only [require comment defn ->])
(:import [javax.swing.plaf.synth SynthTableHeaderUI]))
(defn ->synth-table-header-ui
"Constructor."
(^SynthTableHeaderUI []
(new SynthTableHeaderUI )))
(defn *create-ui
"Creates a new UI object for the given component.
h - component to create UI object for - `javax.swing.JComponent`
returns: the UI object - `javax.swing.plaf.ComponentUI`"
(^javax.swing.plaf.ComponentUI [^javax.swing.JComponent h]
(SynthTableHeaderUI/createUI h)))
(defn update
"Notifies this UI delegate to repaint the specified component.
This method paints the component background, then calls
the paint(SynthContext,Graphics) method.
In general, this method does not need to be overridden by subclasses.
All Look and Feel rendering code should reside in the paint method.
g - the Graphics object used for painting - `java.awt.Graphics`
c - the component being painted - `javax.swing.JComponent`"
([^SynthTableHeaderUI this ^java.awt.Graphics g ^javax.swing.JComponent c]
(-> this (.update g c))))
(defn paint
"Paints the specified component according to the Look and Feel.
This method is not used by Synth Look and Feel.
Painting is handled by the paint(SynthContext,Graphics) method.
g - the Graphics object used for painting - `java.awt.Graphics`
c - the component being painted - `javax.swing.JComponent`"
([^SynthTableHeaderUI this ^java.awt.Graphics g ^javax.swing.JComponent c]
(-> this (.paint g c))))
(defn paint-border
"Paints the border.
context - a component context - `javax.swing.plaf.synth.SynthContext`
g - Graphics to paint on - `java.awt.Graphics`
x - the X coordinate - `int`
y - the Y coordinate - `int`
w - width of the border - `int`
h - height of the border - `int`"
([^SynthTableHeaderUI this ^javax.swing.plaf.synth.SynthContext context ^java.awt.Graphics g ^Integer x ^Integer y ^Integer w ^Integer h]
(-> this (.paintBorder context g x y w h))))
(defn get-context
"Returns the Context for the specified component.
c - Component requesting SynthContext. - `javax.swing.JComponent`
returns: SynthContext describing component. - `javax.swing.plaf.synth.SynthContext`"
(^javax.swing.plaf.synth.SynthContext [^SynthTableHeaderUI this ^javax.swing.JComponent c]
(-> this (.getContext c))))
(defn property-change
"This method gets called when a bound property is changed.
evt - A PropertyChangeEvent object describing the event source and the property that has changed. - `java.beans.PropertyChangeEvent`"
([^SynthTableHeaderUI this ^java.beans.PropertyChangeEvent evt]
(-> this (.propertyChange evt))))
|
|
8035e7ad0ad59487c87a3fdbfa6a7f5e2ed3e2ae53d54ad2deed76dfb4bdbcc0
|
Decentralized-Pictures/T4L3NT
|
time_repr.ml
|
(*****************************************************************************)
(* *)
(* Open Source License *)
Copyright ( c ) 2018 Dynamic Ledger Solutions , Inc. < >
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
to deal in the Software without restriction , including without limitation
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
and/or sell copies of the Software , and to permit persons to whom the
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
include Time
type time = Time.t
type error += Timestamp_add (* `Permanent *)
type error += Timestamp_sub (* `Permanent *)
let () =
register_error_kind
`Permanent
~id:"timestamp_add"
~title:"Timestamp add"
~description:"Overflow when adding timestamps."
~pp:(fun ppf () -> Format.fprintf ppf "Overflow when adding timestamps.")
Data_encoding.empty
(function Timestamp_add -> Some () | _ -> None)
(fun () -> Timestamp_add) ;
register_error_kind
`Permanent
~id:"timestamp_sub"
~title:"Timestamp sub"
~description:"Subtracting timestamps resulted in negative period."
~pp:(fun ppf () ->
Format.fprintf ppf "Subtracting timestamps resulted in negative period.")
Data_encoding.empty
(function Timestamp_sub -> Some () | _ -> None)
(fun () -> Timestamp_sub)
let of_seconds_string s = Option.map Time.of_seconds (Int64.of_string_opt s)
let to_seconds_string s = Int64.to_string (to_seconds s)
let pp = pp_hum
let ( +? ) x y =
let span = Period_repr.to_seconds y in
let t64 = Time.add x span in
As long as span and time representations are int64 , we cannont overflow if
x is negative .
x is negative. *)
if x < Time.of_seconds 0L then ok t64
else if t64 < Time.of_seconds 0L then error Timestamp_add
else ok t64
let ( -? ) x y =
record_trace Timestamp_sub (Period_repr.of_seconds (Time.diff x y))
let ( - ) x y =
Time.of_seconds Int64.(sub (Time.to_seconds x) (Period_repr.to_seconds y))
| null |
https://raw.githubusercontent.com/Decentralized-Pictures/T4L3NT/6d4d3edb2d73575384282ad5a633518cba3d29e3/src/proto_alpha/lib_protocol/time_repr.ml
|
ocaml
|
***************************************************************************
Open Source License
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
***************************************************************************
`Permanent
`Permanent
|
Copyright ( c ) 2018 Dynamic Ledger Solutions , Inc. < >
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
include Time
type time = Time.t
let () =
register_error_kind
`Permanent
~id:"timestamp_add"
~title:"Timestamp add"
~description:"Overflow when adding timestamps."
~pp:(fun ppf () -> Format.fprintf ppf "Overflow when adding timestamps.")
Data_encoding.empty
(function Timestamp_add -> Some () | _ -> None)
(fun () -> Timestamp_add) ;
register_error_kind
`Permanent
~id:"timestamp_sub"
~title:"Timestamp sub"
~description:"Subtracting timestamps resulted in negative period."
~pp:(fun ppf () ->
Format.fprintf ppf "Subtracting timestamps resulted in negative period.")
Data_encoding.empty
(function Timestamp_sub -> Some () | _ -> None)
(fun () -> Timestamp_sub)
let of_seconds_string s = Option.map Time.of_seconds (Int64.of_string_opt s)
let to_seconds_string s = Int64.to_string (to_seconds s)
let pp = pp_hum
let ( +? ) x y =
let span = Period_repr.to_seconds y in
let t64 = Time.add x span in
As long as span and time representations are int64 , we cannont overflow if
x is negative .
x is negative. *)
if x < Time.of_seconds 0L then ok t64
else if t64 < Time.of_seconds 0L then error Timestamp_add
else ok t64
let ( -? ) x y =
record_trace Timestamp_sub (Period_repr.of_seconds (Time.diff x y))
let ( - ) x y =
Time.of_seconds Int64.(sub (Time.to_seconds x) (Period_repr.to_seconds y))
|
e07730aed36ed7755af05e1d987582accde15f4c7d2e12614f5633f7d5cb9230
|
wenkokke/fgg-gen
|
Base.hs
|
{-# OPTIONS -fno-warn-partial-type-signatures #-}
# LANGUAGE AllowAmbiguousTypes #
{-# LANGUAGE EmptyDataDeriving #-}
# LANGUAGE FlexibleInstances #
{-# LANGUAGE FlexibleContexts #-}
# LANGUAGE GADTs #
{-# LANGUAGE PatternSynonyms #-}
# LANGUAGE PartialTypeSignatures #
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TypeOperators #-}
# LANGUAGE TypeFamilies #
# LANGUAGE TypeApplications #
module Language.FGG.DeBruijn.Base where
import Control.Enumerable
import Data.Bifunctor
import Language.FGG.Common
-- ** Programs
data Prog ann
= FDecls ann (FDecls ann Z)
deriving (Typeable)
instance Enumerable (Prog ()) where
enumerate = share $ aconcat
[ c1 $ FDecls ()
]
data FDecls ann f
= NewF ann (FDecls ann (S f))
| MDecls ann (MDecls ann f Z)
deriving (Typeable)
instance ( Enumerable f
) => Enumerable (FDecls () f) where
enumerate = share $ aconcat
[ pay . c1 $ NewF ()
, c1 $ MDecls ()
]
data MDecls ann f m
= NewM ann (MDecls ann f (S m))
| TyDecls ann (TyDecls ann Z Z f m)
deriving (Typeable)
instance ( Enumerable f
, Enumerable m
) => Enumerable (MDecls () f m) where
enumerate = share $ aconcat
[ pay . c1 $ NewM ()
, c1 $ TyDecls ()
]
-- ** Type declarations
data TyDecls ann ts ti f m
= forall a n.
(Fin a) =>
LetStruct
ann
(Vec (Type Z ts ti) a) -- ^ The bounds of the parameters
(Vec (f, Type a ts ti) n) -- ^ Field types
(TyDecls ann (S ts) ti f m)
| forall a n p.
(Fin a) =>
LetInterface
ann
(Vec (Type Z ts ti) a) -- ^ The bounds of the type parameters
(Vec (Type a ts ti) p) -- ^ Parent interfaces
(Vec (m, MSig a ts (S ti)) n) -- ^ Method signatures
(TyDecls ann ts (S ti) f m)
| TmDecls ann (TmDecls ann ts ti f m)
deriving (Typeable)
instance ( Enumerable ts
, Enumerable ti
, Enumerable f
, Enumerable m
) => Enumerable (TyDecls () ts ti f m) where
enumerate = share $ aconcat
[ -- * Structures
pay . c3 $ LetStruct @() @ts @ti @f @m @Z @Z ()
, pay . c3 $ LetStruct @() @ts @ti @f @m @(S Z) @Z ()
, pay . c3 $ LetStruct @() @ts @ti @f @m @(S (S Z)) @Z ()
, pay . c3 $ LetStruct @() @ts @ti @f @m @Z @(S Z) ()
, pay . c3 $ LetStruct @() @ts @ti @f @m @(S Z) @(S Z) ()
, pay . c3 $ LetStruct @() @ts @ti @f @m @(S (S Z)) @(S Z) ()
, pay . c3 $ LetStruct @() @ts @ti @f @m @Z @(S (S Z)) ()
, pay . c3 $ LetStruct @() @ts @ti @f @m @(S Z) @(S (S Z)) ()
, pay . c3 $ LetStruct @() @ts @ti @f @m @(S (S Z)) @(S (S Z)) ()
, -- * Interfaces
pay . c4 $ LetInterface @() @ts @ti @f @m @Z @Z @Z ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S Z) @Z @Z ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S (S Z)) @Z @Z ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @Z @(S Z) @Z ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S Z) @(S Z) @Z ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S (S Z)) @(S Z) @Z ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @Z @(S (S Z)) @Z ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S Z) @(S (S Z)) @Z ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S (S Z)) @(S (S Z)) @Z ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @Z @Z @(S Z) ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S Z) @Z @(S Z) ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S (S Z)) @Z @(S Z) ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @Z @(S Z) @(S Z) ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S Z) @(S Z) @(S Z) ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S (S Z)) @(S Z) @(S Z) ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @Z @(S (S Z)) @(S Z) ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S Z) @(S (S Z)) @(S Z) ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S (S Z)) @(S (S Z)) @(S Z) ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @Z @Z @(S (S Z)) ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S Z) @Z @(S (S Z)) ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S (S Z)) @Z @(S (S Z)) ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @Z @(S Z) @(S (S Z)) ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S Z) @(S Z) @(S (S Z)) ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S (S Z)) @(S Z) @(S (S Z)) ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @Z @(S (S Z)) @(S (S Z)) ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S Z) @(S (S Z)) @(S (S Z)) ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S (S Z)) @(S (S Z)) @(S (S Z)) ()
, c1 $ TmDecls ()
]
-- ** Term declarations
data TmDecls ann ts ti f m
= forall a a' n.
(Fin a', Fin a, Fin (a' :+ a)) =>
LetMethod -- ^ Declaration of a method instance
ann
(Vec (Type Z ts ti) a) -- ^ The bounds of the type parameters
ts -- ^ The struct name
m -- ^ The method which is declared
(Vec (Type a ts ti) a') -- ^ The bounds of the type parameters
(Vec (Type (a' :+ a) ts ti) n) -- ^ The types of the arguments
(Type (a' :+ a) ts ti) -- ^ The return type
(Expr ann (a' :+ a) ts ti f m (S n)) -- ^ The method body
(TmDecls ann ts ti f m)
| Main -- ^ The main function
ann
(Type Z ts ti)
(Expr ann Z ts ti f m Z)
deriving (Typeable)
instance ( Enumerable ts
, Enumerable ti
, Enumerable f
, Enumerable m
) => Enumerable (TmDecls () ts ti f m) where
enumerate = share $ aconcat
[ pay . c8 $ LetMethod @() @ts @ti @f @m @Z @Z @Z ()
, pay . c8 $ LetMethod @() @ts @ti @f @m @(S Z) @Z @Z ()
, pay . c8 $ LetMethod @() @ts @ti @f @m @(S (S Z)) @Z @Z ()
, pay . c8 $ LetMethod @() @ts @ti @f @m @Z @(S Z) @Z ()
, pay . c8 $ LetMethod @() @ts @ti @f @m @(S Z) @(S Z) @Z ()
, pay . c8 $ LetMethod @() @ts @ti @f @m @Z @(S (S Z)) @Z ()
, pay . c8 $ LetMethod @() @ts @ti @f @m @Z @Z @(S Z) ()
, pay . c8 $ LetMethod @() @ts @ti @f @m @(S Z) @Z @(S Z) ()
, pay . c8 $ LetMethod @() @ts @ti @f @m @(S (S Z)) @Z @(S Z) ()
, pay . c8 $ LetMethod @() @ts @ti @f @m @Z @(S Z) @(S Z) ()
, pay . c8 $ LetMethod @() @ts @ti @f @m @(S Z) @(S Z) @(S Z) ()
, pay . c8 $ LetMethod @() @ts @ti @f @m @Z @(S (S Z)) @(S Z) ()
, pay . c8 $ LetMethod @() @ts @ti @f @m @Z @Z @(S (S Z)) ()
, pay . c8 $ LetMethod @() @ts @ti @f @m @(S Z) @Z @(S (S Z)) ()
, pay . c8 $ LetMethod @() @ts @ti @f @m @(S (S Z)) @Z @(S (S Z)) ()
, pay . c8 $ LetMethod @() @ts @ti @f @m @Z @(S Z) @(S (S Z)) ()
, pay . c8 $ LetMethod @() @ts @ti @f @m @(S Z) @(S Z) @(S (S Z)) ()
, pay . c8 $ LetMethod @() @ts @ti @f @m @Z @(S (S Z)) @(S (S Z)) ()
, c2 $ Main ()
]
-- ** Expressions
data Expr ann a ts ti f m x
= Var
ann
x -- ^ Variable name
| forall n1 n2.
Struct
ann
ts -- ^ Struct name
(Vec (Type a ts ti) n1) -- ^ Type parameters
(Vec (Expr ann a ts ti f m x, Type a ts ti) n2) -- ^ Struct arguments
| Select
ann
(Expr ann a ts ti f m x) -- ^ Struct
(Type a ts ti) -- ^ Struct type
f -- ^ Field name
| forall n1 n2.
Call
ann
(Expr ann a ts ti f m x) -- ^ Object
(Type a ts ti) -- ^ Object type
m -- ^ Method name
(Vec (Type a ts ti) n1) -- ^ Type parameters
(Vec (Expr ann a ts ti f m x, Type a ts ti) n2) -- ^ Method arguments
| Assert
ann
(Expr ann a ts ti f m x) -- ^ Expression
(Type a ts ti) -- ^ Expression type
(Type a ts ti) -- ^ Asserted type
deriving (Typeable)
instance ( Enumerable a
, Enumerable ts
, Enumerable ti
, Enumerable f
, Enumerable m
, Enumerable x
) => Enumerable (Expr () a ts ti f m x) where
enumerate = share $ aconcat
[ pay . c1 $ Var ()
, pay . c3 $ Struct @() @a @ts @ti @f @m @x @Z @Z ()
, pay . c3 $ Struct @() @a @ts @ti @f @m @x @(S Z) @Z ()
, pay . c3 $ Struct @() @a @ts @ti @f @m @x @(S (S Z)) @Z ()
, pay . c3 $ Struct @() @a @ts @ti @f @m @x @Z @(S Z) ()
, pay . c3 $ Struct @() @a @ts @ti @f @m @x @(S Z) @(S Z) ()
, pay . c3 $ Struct @() @a @ts @ti @f @m @x @(S (S Z)) @(S Z) ()
, pay . c3 $ Struct @() @a @ts @ti @f @m @x @Z @(S (S Z)) ()
, pay . c3 $ Struct @() @a @ts @ti @f @m @x @(S Z) @(S (S Z)) ()
, pay . c3 $ Struct @() @a @ts @ti @f @m @x @(S (S Z)) @(S (S Z)) ()
, pay . c3 $ Select ()
, pay . c5 $ Call @() @a @ts @ti @f @m @x @Z @Z ()
, pay . c5 $ Call @() @a @ts @ti @f @m @x @(S Z) @Z ()
, pay . c5 $ Call @() @a @ts @ti @f @m @x @(S (S Z)) @Z ()
, pay . c5 $ Call @() @a @ts @ti @f @m @x @Z @(S Z) ()
, pay . c5 $ Call @() @a @ts @ti @f @m @x @(S Z) @(S Z) ()
, pay . c5 $ Call @() @a @ts @ti @f @m @x @(S (S Z)) @(S Z) ()
, pay . c5 $ Call @() @a @ts @ti @f @m @x @Z @(S (S Z)) ()
, pay . c5 $ Call @() @a @ts @ti @f @m @x @(S Z) @(S (S Z)) ()
, pay . c5 $ Call @() @a @ts @ti @f @m @x @(S (S Z)) @(S (S Z)) ()
, pay . c3 $ Assert ()
]
-- ** Types
data Type a ts ti
= Par
a
| forall a'.
(Fin a') =>
Con
(TyCon ts ti)
(Vec (Type a ts ti) a')
deriving (Typeable)
deriving instance (Show a, Show ts, Show ti) => Show (Type a ts ti)
instance ( Eq a
, Eq ts
, Eq ti
) => Eq (Type a ts ti) where
Par a1 == Par a2 = a1 == a2
Par _ == Con _ _ = False
Con _ _ == Par _ = False
Con tc1 args1 == Con tc2 args2
| tc1 == tc2 =
case vlength args1 `decEq` vlength args2 of
Nothing -> False
Just Refl -> args1 == args2
| otherwise = False
instance ( Ord a
, Ord ts
, Ord ti
) => Ord (Type a ts ti) where
compare (Par a1) (Par a2) = compare a1 a2
compare (Par _ ) (Con _ _) = LT
compare (Con _ _) (Par _) = GT
compare (Con tc1 args1) (Con tc2 args2) =
case compare tc1 tc2 of
EQ -> compare (vlist args1) (vlist args2)
ord -> ord
instance ( Enumerable ts
, Enumerable ti
, Enumerable a
) => Enumerable (Type a ts ti) where
enumerate = share $ aconcat
[ pay . c2 $ Con @a @ts @ti @Z
, pay . c2 $ Con @a @ts @ti @(S Z)
, pay . c2 $ Con @a @ts @ti @(S (S Z))
, pay . c1 $ Par
]
instance Bifunctor (Type a) where
bimap _ _ (Par a) = Par a
bimap f g (Con tc args) = Con (bimap f g tc) (vmap (bimap f g) args)
-- |Map over the parameter argument of types.
mapPar :: (a -> b) -> Type a ts ti -> Type b ts ti
mapPar f (Par a) = Par (f a)
mapPar f (Con tc args) = Con tc (vmap (mapPar f) args)
-- ** Type signatures
data SSig a ts ti
= forall n.
SSig
(Vec (Type a ts ti) n)
deriving (Typeable)
instance Bifunctor (SSig a) where
bimap f g (SSig argTys)
= SSig (vmap (bimap f g) argTys)
data ISig a ts ti
= ISig
deriving (Typeable)
instance Bifunctor (ISig a) where
bimap _ _ ISig
= ISig
newtype FSig a ts ti
= FSig
(Type a ts ti)
deriving (Typeable)
instance Bifunctor (FSig a) where
bimap f g (FSig retTy)
= FSig (bimap f g retTy)
data MSig a ts ti
= forall a' n.
(Fin a, Fin (a' :+ a)) =>
MSig
(Vec (Type a ts ti) a') -- ^ Type parameter bounds
(TyCon ts ti) -- ^ Object type
(Vec (Type (a' :+ a) ts ti) n) -- ^ Arguments types
(Type (a' :+ a) ts ti) -- ^ Return type
deriving (Typeable)
instance ( Fin a
, Enumerable a
, Enumerable ts
, Enumerable ti
) => Enumerable (MSig a ts ti) where
enumerate = share $ aconcat
[ c4 . plusFin @_ @a Z $ MSig @a @ts @ti @Z @Z
, c4 . plusFin @_ @a Z $ MSig @a @ts @ti @(S Z) @Z
, c4 . plusFin @_ @a Z $ MSig @a @ts @ti @(S (S Z)) @Z
, c4 . plusFin @_ @a (S Z) $ MSig @a @ts @ti @Z @(S Z)
, c4 . plusFin @_ @a (S Z) $ MSig @a @ts @ti @(S Z) @(S Z)
, c4 . plusFin @_ @a (S Z) $ MSig @a @ts @ti @(S (S Z)) @(S Z)
, c4 . plusFin @_ @a (S (S Z)) $ MSig @a @ts @ti @Z @(S (S Z))
, c4 . plusFin @_ @a (S (S Z)) $ MSig @a @ts @ti @(S Z) @(S (S Z))
, c4 . plusFin @_ @a (S (S Z)) $ MSig @a @ts @ti @(S (S Z)) @(S (S Z))
]
instance Bifunctor (MSig a) where
bimap f g (MSig parBnds objTy argTys retTy)
= MSig
(vmap (bimap f g) parBnds)
(bimap f g objTy)
(vmap (bimap f g) argTys)
(bimap f g retTy)
data TySig sig ts ti
= forall a.
(Fin a) =>
TySig
(Vec (Type Z ts ti) a)
(sig a ts ti)
instance Bifunctor (TySig SSig) where
bimap f g (TySig parBnds ssig)
= TySig (vmap (bimap f g) parBnds) (bimap f g ssig)
instance Bifunctor (TySig ISig) where
bimap f g (TySig parBnds isig)
= TySig (vmap (bimap f g) parBnds) (bimap f g isig)
instance Bifunctor (TySig FSig) where
bimap f g (TySig parBnds fsig)
= TySig (vmap (bimap f g) parBnds) (bimap f g fsig)
instance Bifunctor (TySig MSig) where
bimap f g (TySig parBnds msig)
= TySig (vmap (bimap f g) parBnds) (bimap f g msig)
-- ** Substitution for type parameters
-- |Simultaneous substitutions for type parameters in types.
substType :: (a -> Type b ts ti) -> Type a ts ti -> Type b ts ti
substType s = go
where
go (Par a) = s a
go (Con tc args) = Con tc (vmap go args)
-- |Simultaneous substitutions for type parameters in method signatures.
substMethSig :: forall a b ts ti.
(Fin b)
=> (a -> Type b ts ti)
-> MSig a ts ti
-> MSig b ts ti
substMethSig s (MSig (parBnds :: Vec _ a') objTy (argTys :: Vec _ n) retTy)
= plusFin @_ @b a' $ MSig parBnds' objTy argTys' retTy'
where
a' :: Nat a'
a' = vlength parBnds
parBnds' :: Vec (Type b ts ti) a'
parBnds' = vmap (substType s) parBnds
s' :: a' :+ a -> Type (a' :+ b) ts ti
s' = raiseSubst a' s
argTys' :: Vec (Type (a' :+ b) ts ti) n
argTys' = vmap (substType s') argTys
retTy' :: Type (a' :+ b) ts ti
retTy' = substType s' retTy
-- |Raise substitutions.
raiseSubst :: forall a a' b ts ti.
Nat a'
-> (a -> Type b ts ti)
-> ((a' :+ a) -> Type (a' :+ b) ts ti)
raiseSubst Z s i = s i
raiseSubst (S _ ) _ FZ = Par FZ
raiseSubst (S a') s (FS i) = mapPar FS (raiseSubst a' s i)
-- |Simultaneous substitutions for type parameters in expressions.
substExpr :: (a -> Type b ts ti) -> Expr ann a ts ti f m x -> Expr ann b ts ti f m x
substExpr s = go1
where
go2
= substType s
go1 (Var ann x)
= Var ann x
go1 (Struct ann ts tyArgs args)
= Struct ann ts (vmap go2 tyArgs) (vmap (bimap go1 go2) args)
go1 (Select ann obj objTy f)
= Select ann (go1 obj) (go2 objTy) f
go1 (Call ann obj objTy m tyArgs args)
= Call ann (go1 obj) (go2 objTy) m (vmap go2 tyArgs) (vmap (bimap go1 go2) args)
go1 (Assert ann obj objTy assTy)
= Assert ann (go1 obj) (go2 objTy) (go2 assTy)
* Extension to the combinators from Control . Enumerable
c8 :: ( Enumerable a
, Enumerable b
, Enumerable c
, Enumerable d
, Enumerable e
, Enumerable g
, Enumerable h
, Enumerable i
, Sized f
, Typeable f
) => (a -> b -> c -> d -> e -> g -> h -> i -> x) -> Shareable f x
c8 f = c7 (uncurry f)
c9 :: ( Enumerable a
, Enumerable b
, Enumerable c
, Enumerable d
, Enumerable e
, Enumerable g
, Enumerable h
, Enumerable i
, Enumerable j
, Sized f
, Typeable f
) => (a -> b -> c -> d -> e -> g -> h -> i -> j -> x) -> Shareable f x
c9 f = c8 (uncurry f)
c10 :: ( Enumerable a
, Enumerable b
, Enumerable c
, Enumerable d
, Enumerable e
, Enumerable g
, Enumerable h
, Enumerable i
, Enumerable j
, Enumerable k
, Sized f
, Typeable f
) => (a -> b -> c -> d -> e -> g -> h -> i -> j -> k -> x) -> Shareable f x
c10 f = c9 (uncurry f)
c11 :: ( Enumerable a
, Enumerable b
, Enumerable c
, Enumerable d
, Enumerable e
, Enumerable g
, Enumerable h
, Enumerable i
, Enumerable j
, Enumerable k
, Enumerable l
, Sized f
, Typeable f
) => (a -> b -> c -> d -> e -> g -> h -> i -> j -> k -> l -> x) -> Shareable f x
c11 f = c10 (uncurry f)
-- -}
-- -}
-- -}
-- -}
-- -}
| null |
https://raw.githubusercontent.com/wenkokke/fgg-gen/2beadebaf7d46b947c1deb7b6e4b2c0084251343/src/Language/FGG/DeBruijn/Base.hs
|
haskell
|
# OPTIONS -fno-warn-partial-type-signatures #
# LANGUAGE EmptyDataDeriving #
# LANGUAGE FlexibleContexts #
# LANGUAGE PatternSynonyms #
# LANGUAGE RankNTypes #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE StandaloneDeriving #
# LANGUAGE TypeOperators #
** Programs
** Type declarations
^ The bounds of the parameters
^ Field types
^ The bounds of the type parameters
^ Parent interfaces
^ Method signatures
* Structures
* Interfaces
** Term declarations
^ Declaration of a method instance
^ The bounds of the type parameters
^ The struct name
^ The method which is declared
^ The bounds of the type parameters
^ The types of the arguments
^ The return type
^ The method body
^ The main function
** Expressions
^ Variable name
^ Struct name
^ Type parameters
^ Struct arguments
^ Struct
^ Struct type
^ Field name
^ Object
^ Object type
^ Method name
^ Type parameters
^ Method arguments
^ Expression
^ Expression type
^ Asserted type
** Types
|Map over the parameter argument of types.
** Type signatures
^ Type parameter bounds
^ Object type
^ Arguments types
^ Return type
** Substitution for type parameters
|Simultaneous substitutions for type parameters in types.
|Simultaneous substitutions for type parameters in method signatures.
|Raise substitutions.
|Simultaneous substitutions for type parameters in expressions.
-}
-}
-}
-}
-}
|
# LANGUAGE AllowAmbiguousTypes #
# LANGUAGE FlexibleInstances #
# LANGUAGE GADTs #
# LANGUAGE PartialTypeSignatures #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeApplications #
module Language.FGG.DeBruijn.Base where
import Control.Enumerable
import Data.Bifunctor
import Language.FGG.Common
data Prog ann
= FDecls ann (FDecls ann Z)
deriving (Typeable)
instance Enumerable (Prog ()) where
enumerate = share $ aconcat
[ c1 $ FDecls ()
]
data FDecls ann f
= NewF ann (FDecls ann (S f))
| MDecls ann (MDecls ann f Z)
deriving (Typeable)
instance ( Enumerable f
) => Enumerable (FDecls () f) where
enumerate = share $ aconcat
[ pay . c1 $ NewF ()
, c1 $ MDecls ()
]
data MDecls ann f m
= NewM ann (MDecls ann f (S m))
| TyDecls ann (TyDecls ann Z Z f m)
deriving (Typeable)
instance ( Enumerable f
, Enumerable m
) => Enumerable (MDecls () f m) where
enumerate = share $ aconcat
[ pay . c1 $ NewM ()
, c1 $ TyDecls ()
]
data TyDecls ann ts ti f m
= forall a n.
(Fin a) =>
LetStruct
ann
(TyDecls ann (S ts) ti f m)
| forall a n p.
(Fin a) =>
LetInterface
ann
(TyDecls ann ts (S ti) f m)
| TmDecls ann (TmDecls ann ts ti f m)
deriving (Typeable)
instance ( Enumerable ts
, Enumerable ti
, Enumerable f
, Enumerable m
) => Enumerable (TyDecls () ts ti f m) where
enumerate = share $ aconcat
pay . c3 $ LetStruct @() @ts @ti @f @m @Z @Z ()
, pay . c3 $ LetStruct @() @ts @ti @f @m @(S Z) @Z ()
, pay . c3 $ LetStruct @() @ts @ti @f @m @(S (S Z)) @Z ()
, pay . c3 $ LetStruct @() @ts @ti @f @m @Z @(S Z) ()
, pay . c3 $ LetStruct @() @ts @ti @f @m @(S Z) @(S Z) ()
, pay . c3 $ LetStruct @() @ts @ti @f @m @(S (S Z)) @(S Z) ()
, pay . c3 $ LetStruct @() @ts @ti @f @m @Z @(S (S Z)) ()
, pay . c3 $ LetStruct @() @ts @ti @f @m @(S Z) @(S (S Z)) ()
, pay . c3 $ LetStruct @() @ts @ti @f @m @(S (S Z)) @(S (S Z)) ()
pay . c4 $ LetInterface @() @ts @ti @f @m @Z @Z @Z ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S Z) @Z @Z ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S (S Z)) @Z @Z ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @Z @(S Z) @Z ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S Z) @(S Z) @Z ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S (S Z)) @(S Z) @Z ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @Z @(S (S Z)) @Z ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S Z) @(S (S Z)) @Z ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S (S Z)) @(S (S Z)) @Z ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @Z @Z @(S Z) ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S Z) @Z @(S Z) ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S (S Z)) @Z @(S Z) ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @Z @(S Z) @(S Z) ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S Z) @(S Z) @(S Z) ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S (S Z)) @(S Z) @(S Z) ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @Z @(S (S Z)) @(S Z) ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S Z) @(S (S Z)) @(S Z) ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S (S Z)) @(S (S Z)) @(S Z) ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @Z @Z @(S (S Z)) ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S Z) @Z @(S (S Z)) ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S (S Z)) @Z @(S (S Z)) ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @Z @(S Z) @(S (S Z)) ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S Z) @(S Z) @(S (S Z)) ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S (S Z)) @(S Z) @(S (S Z)) ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @Z @(S (S Z)) @(S (S Z)) ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S Z) @(S (S Z)) @(S (S Z)) ()
, pay . c4 $ LetInterface @() @ts @ti @f @m @(S (S Z)) @(S (S Z)) @(S (S Z)) ()
, c1 $ TmDecls ()
]
data TmDecls ann ts ti f m
= forall a a' n.
(Fin a', Fin a, Fin (a' :+ a)) =>
ann
(TmDecls ann ts ti f m)
ann
(Type Z ts ti)
(Expr ann Z ts ti f m Z)
deriving (Typeable)
instance ( Enumerable ts
, Enumerable ti
, Enumerable f
, Enumerable m
) => Enumerable (TmDecls () ts ti f m) where
enumerate = share $ aconcat
[ pay . c8 $ LetMethod @() @ts @ti @f @m @Z @Z @Z ()
, pay . c8 $ LetMethod @() @ts @ti @f @m @(S Z) @Z @Z ()
, pay . c8 $ LetMethod @() @ts @ti @f @m @(S (S Z)) @Z @Z ()
, pay . c8 $ LetMethod @() @ts @ti @f @m @Z @(S Z) @Z ()
, pay . c8 $ LetMethod @() @ts @ti @f @m @(S Z) @(S Z) @Z ()
, pay . c8 $ LetMethod @() @ts @ti @f @m @Z @(S (S Z)) @Z ()
, pay . c8 $ LetMethod @() @ts @ti @f @m @Z @Z @(S Z) ()
, pay . c8 $ LetMethod @() @ts @ti @f @m @(S Z) @Z @(S Z) ()
, pay . c8 $ LetMethod @() @ts @ti @f @m @(S (S Z)) @Z @(S Z) ()
, pay . c8 $ LetMethod @() @ts @ti @f @m @Z @(S Z) @(S Z) ()
, pay . c8 $ LetMethod @() @ts @ti @f @m @(S Z) @(S Z) @(S Z) ()
, pay . c8 $ LetMethod @() @ts @ti @f @m @Z @(S (S Z)) @(S Z) ()
, pay . c8 $ LetMethod @() @ts @ti @f @m @Z @Z @(S (S Z)) ()
, pay . c8 $ LetMethod @() @ts @ti @f @m @(S Z) @Z @(S (S Z)) ()
, pay . c8 $ LetMethod @() @ts @ti @f @m @(S (S Z)) @Z @(S (S Z)) ()
, pay . c8 $ LetMethod @() @ts @ti @f @m @Z @(S Z) @(S (S Z)) ()
, pay . c8 $ LetMethod @() @ts @ti @f @m @(S Z) @(S Z) @(S (S Z)) ()
, pay . c8 $ LetMethod @() @ts @ti @f @m @Z @(S (S Z)) @(S (S Z)) ()
, c2 $ Main ()
]
data Expr ann a ts ti f m x
= Var
ann
| forall n1 n2.
Struct
ann
| Select
ann
| forall n1 n2.
Call
ann
| Assert
ann
deriving (Typeable)
instance ( Enumerable a
, Enumerable ts
, Enumerable ti
, Enumerable f
, Enumerable m
, Enumerable x
) => Enumerable (Expr () a ts ti f m x) where
enumerate = share $ aconcat
[ pay . c1 $ Var ()
, pay . c3 $ Struct @() @a @ts @ti @f @m @x @Z @Z ()
, pay . c3 $ Struct @() @a @ts @ti @f @m @x @(S Z) @Z ()
, pay . c3 $ Struct @() @a @ts @ti @f @m @x @(S (S Z)) @Z ()
, pay . c3 $ Struct @() @a @ts @ti @f @m @x @Z @(S Z) ()
, pay . c3 $ Struct @() @a @ts @ti @f @m @x @(S Z) @(S Z) ()
, pay . c3 $ Struct @() @a @ts @ti @f @m @x @(S (S Z)) @(S Z) ()
, pay . c3 $ Struct @() @a @ts @ti @f @m @x @Z @(S (S Z)) ()
, pay . c3 $ Struct @() @a @ts @ti @f @m @x @(S Z) @(S (S Z)) ()
, pay . c3 $ Struct @() @a @ts @ti @f @m @x @(S (S Z)) @(S (S Z)) ()
, pay . c3 $ Select ()
, pay . c5 $ Call @() @a @ts @ti @f @m @x @Z @Z ()
, pay . c5 $ Call @() @a @ts @ti @f @m @x @(S Z) @Z ()
, pay . c5 $ Call @() @a @ts @ti @f @m @x @(S (S Z)) @Z ()
, pay . c5 $ Call @() @a @ts @ti @f @m @x @Z @(S Z) ()
, pay . c5 $ Call @() @a @ts @ti @f @m @x @(S Z) @(S Z) ()
, pay . c5 $ Call @() @a @ts @ti @f @m @x @(S (S Z)) @(S Z) ()
, pay . c5 $ Call @() @a @ts @ti @f @m @x @Z @(S (S Z)) ()
, pay . c5 $ Call @() @a @ts @ti @f @m @x @(S Z) @(S (S Z)) ()
, pay . c5 $ Call @() @a @ts @ti @f @m @x @(S (S Z)) @(S (S Z)) ()
, pay . c3 $ Assert ()
]
data Type a ts ti
= Par
a
| forall a'.
(Fin a') =>
Con
(TyCon ts ti)
(Vec (Type a ts ti) a')
deriving (Typeable)
deriving instance (Show a, Show ts, Show ti) => Show (Type a ts ti)
instance ( Eq a
, Eq ts
, Eq ti
) => Eq (Type a ts ti) where
Par a1 == Par a2 = a1 == a2
Par _ == Con _ _ = False
Con _ _ == Par _ = False
Con tc1 args1 == Con tc2 args2
| tc1 == tc2 =
case vlength args1 `decEq` vlength args2 of
Nothing -> False
Just Refl -> args1 == args2
| otherwise = False
instance ( Ord a
, Ord ts
, Ord ti
) => Ord (Type a ts ti) where
compare (Par a1) (Par a2) = compare a1 a2
compare (Par _ ) (Con _ _) = LT
compare (Con _ _) (Par _) = GT
compare (Con tc1 args1) (Con tc2 args2) =
case compare tc1 tc2 of
EQ -> compare (vlist args1) (vlist args2)
ord -> ord
instance ( Enumerable ts
, Enumerable ti
, Enumerable a
) => Enumerable (Type a ts ti) where
enumerate = share $ aconcat
[ pay . c2 $ Con @a @ts @ti @Z
, pay . c2 $ Con @a @ts @ti @(S Z)
, pay . c2 $ Con @a @ts @ti @(S (S Z))
, pay . c1 $ Par
]
instance Bifunctor (Type a) where
bimap _ _ (Par a) = Par a
bimap f g (Con tc args) = Con (bimap f g tc) (vmap (bimap f g) args)
mapPar :: (a -> b) -> Type a ts ti -> Type b ts ti
mapPar f (Par a) = Par (f a)
mapPar f (Con tc args) = Con tc (vmap (mapPar f) args)
data SSig a ts ti
= forall n.
SSig
(Vec (Type a ts ti) n)
deriving (Typeable)
instance Bifunctor (SSig a) where
bimap f g (SSig argTys)
= SSig (vmap (bimap f g) argTys)
data ISig a ts ti
= ISig
deriving (Typeable)
instance Bifunctor (ISig a) where
bimap _ _ ISig
= ISig
newtype FSig a ts ti
= FSig
(Type a ts ti)
deriving (Typeable)
instance Bifunctor (FSig a) where
bimap f g (FSig retTy)
= FSig (bimap f g retTy)
data MSig a ts ti
= forall a' n.
(Fin a, Fin (a' :+ a)) =>
MSig
deriving (Typeable)
instance ( Fin a
, Enumerable a
, Enumerable ts
, Enumerable ti
) => Enumerable (MSig a ts ti) where
enumerate = share $ aconcat
[ c4 . plusFin @_ @a Z $ MSig @a @ts @ti @Z @Z
, c4 . plusFin @_ @a Z $ MSig @a @ts @ti @(S Z) @Z
, c4 . plusFin @_ @a Z $ MSig @a @ts @ti @(S (S Z)) @Z
, c4 . plusFin @_ @a (S Z) $ MSig @a @ts @ti @Z @(S Z)
, c4 . plusFin @_ @a (S Z) $ MSig @a @ts @ti @(S Z) @(S Z)
, c4 . plusFin @_ @a (S Z) $ MSig @a @ts @ti @(S (S Z)) @(S Z)
, c4 . plusFin @_ @a (S (S Z)) $ MSig @a @ts @ti @Z @(S (S Z))
, c4 . plusFin @_ @a (S (S Z)) $ MSig @a @ts @ti @(S Z) @(S (S Z))
, c4 . plusFin @_ @a (S (S Z)) $ MSig @a @ts @ti @(S (S Z)) @(S (S Z))
]
instance Bifunctor (MSig a) where
bimap f g (MSig parBnds objTy argTys retTy)
= MSig
(vmap (bimap f g) parBnds)
(bimap f g objTy)
(vmap (bimap f g) argTys)
(bimap f g retTy)
data TySig sig ts ti
= forall a.
(Fin a) =>
TySig
(Vec (Type Z ts ti) a)
(sig a ts ti)
instance Bifunctor (TySig SSig) where
bimap f g (TySig parBnds ssig)
= TySig (vmap (bimap f g) parBnds) (bimap f g ssig)
instance Bifunctor (TySig ISig) where
bimap f g (TySig parBnds isig)
= TySig (vmap (bimap f g) parBnds) (bimap f g isig)
instance Bifunctor (TySig FSig) where
bimap f g (TySig parBnds fsig)
= TySig (vmap (bimap f g) parBnds) (bimap f g fsig)
instance Bifunctor (TySig MSig) where
bimap f g (TySig parBnds msig)
= TySig (vmap (bimap f g) parBnds) (bimap f g msig)
substType :: (a -> Type b ts ti) -> Type a ts ti -> Type b ts ti
substType s = go
where
go (Par a) = s a
go (Con tc args) = Con tc (vmap go args)
substMethSig :: forall a b ts ti.
(Fin b)
=> (a -> Type b ts ti)
-> MSig a ts ti
-> MSig b ts ti
substMethSig s (MSig (parBnds :: Vec _ a') objTy (argTys :: Vec _ n) retTy)
= plusFin @_ @b a' $ MSig parBnds' objTy argTys' retTy'
where
a' :: Nat a'
a' = vlength parBnds
parBnds' :: Vec (Type b ts ti) a'
parBnds' = vmap (substType s) parBnds
s' :: a' :+ a -> Type (a' :+ b) ts ti
s' = raiseSubst a' s
argTys' :: Vec (Type (a' :+ b) ts ti) n
argTys' = vmap (substType s') argTys
retTy' :: Type (a' :+ b) ts ti
retTy' = substType s' retTy
raiseSubst :: forall a a' b ts ti.
Nat a'
-> (a -> Type b ts ti)
-> ((a' :+ a) -> Type (a' :+ b) ts ti)
raiseSubst Z s i = s i
raiseSubst (S _ ) _ FZ = Par FZ
raiseSubst (S a') s (FS i) = mapPar FS (raiseSubst a' s i)
substExpr :: (a -> Type b ts ti) -> Expr ann a ts ti f m x -> Expr ann b ts ti f m x
substExpr s = go1
where
go2
= substType s
go1 (Var ann x)
= Var ann x
go1 (Struct ann ts tyArgs args)
= Struct ann ts (vmap go2 tyArgs) (vmap (bimap go1 go2) args)
go1 (Select ann obj objTy f)
= Select ann (go1 obj) (go2 objTy) f
go1 (Call ann obj objTy m tyArgs args)
= Call ann (go1 obj) (go2 objTy) m (vmap go2 tyArgs) (vmap (bimap go1 go2) args)
go1 (Assert ann obj objTy assTy)
= Assert ann (go1 obj) (go2 objTy) (go2 assTy)
* Extension to the combinators from Control . Enumerable
c8 :: ( Enumerable a
, Enumerable b
, Enumerable c
, Enumerable d
, Enumerable e
, Enumerable g
, Enumerable h
, Enumerable i
, Sized f
, Typeable f
) => (a -> b -> c -> d -> e -> g -> h -> i -> x) -> Shareable f x
c8 f = c7 (uncurry f)
c9 :: ( Enumerable a
, Enumerable b
, Enumerable c
, Enumerable d
, Enumerable e
, Enumerable g
, Enumerable h
, Enumerable i
, Enumerable j
, Sized f
, Typeable f
) => (a -> b -> c -> d -> e -> g -> h -> i -> j -> x) -> Shareable f x
c9 f = c8 (uncurry f)
c10 :: ( Enumerable a
, Enumerable b
, Enumerable c
, Enumerable d
, Enumerable e
, Enumerable g
, Enumerable h
, Enumerable i
, Enumerable j
, Enumerable k
, Sized f
, Typeable f
) => (a -> b -> c -> d -> e -> g -> h -> i -> j -> k -> x) -> Shareable f x
c10 f = c9 (uncurry f)
c11 :: ( Enumerable a
, Enumerable b
, Enumerable c
, Enumerable d
, Enumerable e
, Enumerable g
, Enumerable h
, Enumerable i
, Enumerable j
, Enumerable k
, Enumerable l
, Sized f
, Typeable f
) => (a -> b -> c -> d -> e -> g -> h -> i -> j -> k -> l -> x) -> Shareable f x
c11 f = c10 (uncurry f)
|
cbc7c37151bf65addb305f061337b33530c4a9157899927d1b394ce0fe541715
|
namin/propagators
|
load.scm
|
;;; ----------------------------------------------------------------------
Copyright 2009 Massachusetts Institute of Technology .
;;; ----------------------------------------------------------------------
This file is part of Propagator Network Prototype .
;;;
Propagator Network Prototype is free software ; you can
;;; redistribute it and/or modify it under the terms of the GNU
General Public License as published by the Free Software
Foundation , either version 3 of the License , or ( at your option )
;;; any later version.
;;;
Propagator Network Prototype is distributed in the hope that it
;;; will be useful, but WITHOUT ANY WARRANTY; without even the implied
;;; warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
;;; See the GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
along with Propagator Network Prototype . If not , see
;;; </>.
;;; ----------------------------------------------------------------------
(define (self-relatively thunk)
(let ((place (ignore-errors current-load-pathname)))
(if (pathname? place)
(with-working-directory-pathname
(directory-namestring place)
thunk)
(thunk))))
(define (load-relative filename)
(self-relatively (lambda () (load filename))))
(load-relative "../support/load")
(for-each load-relative-compiled
'("scheduler"
;"metadata"
"diagrams"
"merge-effects"
"cells"
"cell-sugar"
"propagators"
"application"
"sugar"
"generic-definitions"
"compound-data"
"physical-closures"
"standard-propagators"
"carrying-cells"
;;Intervals must follow standard-propagators in the load order
;;because it depends on interval-non-zero?, numerical-zero?,
;;binary-nothing, and binary-contradiction previously defined.
"intervals"
"premises"
"supported-values"
"truth-maintenance"
"contradictions"
"search"
"amb-utils"
"ui"
"explain"
"example-networks"
"test-utils"))
(maybe-warn-low-memory)
(initialize-scheduler)
| null |
https://raw.githubusercontent.com/namin/propagators/ae694dfe680125e53a3d49e5e91c378f2d333937/core/load.scm
|
scheme
|
----------------------------------------------------------------------
----------------------------------------------------------------------
you can
redistribute it and/or modify it under the terms of the GNU
any later version.
will be useful, but WITHOUT ANY WARRANTY; without even the implied
warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details.
</>.
----------------------------------------------------------------------
"metadata"
Intervals must follow standard-propagators in the load order
because it depends on interval-non-zero?, numerical-zero?,
binary-nothing, and binary-contradiction previously defined.
|
Copyright 2009 Massachusetts Institute of Technology .
This file is part of Propagator Network Prototype .
General Public License as published by the Free Software
Foundation , either version 3 of the License , or ( at your option )
Propagator Network Prototype is distributed in the hope that it
You should have received a copy of the GNU General Public License
along with Propagator Network Prototype . If not , see
(define (self-relatively thunk)
(let ((place (ignore-errors current-load-pathname)))
(if (pathname? place)
(with-working-directory-pathname
(directory-namestring place)
thunk)
(thunk))))
(define (load-relative filename)
(self-relatively (lambda () (load filename))))
(load-relative "../support/load")
(for-each load-relative-compiled
'("scheduler"
"diagrams"
"merge-effects"
"cells"
"cell-sugar"
"propagators"
"application"
"sugar"
"generic-definitions"
"compound-data"
"physical-closures"
"standard-propagators"
"carrying-cells"
"intervals"
"premises"
"supported-values"
"truth-maintenance"
"contradictions"
"search"
"amb-utils"
"ui"
"explain"
"example-networks"
"test-utils"))
(maybe-warn-low-memory)
(initialize-scheduler)
|
9fe8f4ccb3e1efe51da6a92db6a4aadb3a3b339759f689d8e561b1b0c19ba555
|
ddmcdonald/sparser
|
post-trans-mods.lisp
|
;;; -*- Mode:LISP; Syntax:Common-Lisp; Package:(SPARSER COMMON-LISP) -*-
Copyright ( c ) 2017 SIFT LLC . All Rights Reserved
;;;
;;; File: "post-trans-mod"
;;; Module: "grammar/model/sl/biology/
version : May 2017
Broken out from phenomena 5/10/17
;; Contains post-translational modifications; phosphorylation; ubiquitination; and post-translational modifications with IDs
(in-package :sparser)
;;;---------------------------------
;;; post-translational-modification
;;;---------------------------------
" It is interesting that various stimuli provoke EGF receptor tyrosine phosphorylation by two distinct means . " )
(define-category post-translational-modification :specializes caused-biochemical-process
:bindings (uid "GO:0043687")
:mixins (on-substrate with-specified-amino-acid capable-of-being-measured)
:binds (;; allow variant to pick up "phosphorylated fraction"
(site (:or molecular-location residue))) ;; which is attached here
:restrict ((agent (:or protein gene bio-complex bio-mechanism bio-activate peptide component;; this is for "fraction of ..."
)) ;;bio-process
(object blocked-category)) ;; trying to BLOCK object by giving it an impossible class
:realization
(:noun ("post-translational modification" "PTM"
"post-transcriptional modification" "post-transcriptional fate")
:o substrate
:o site
:o amino-acid
:m amino-acid
:m substrate
:m site
:verb-premod site
:verb-premod amino-acid
:m agent ;;somehow this overrides the ones below
:of substrate
:of site
:at amino-acid
:on amino-acid ;;"BRAF that is phosphorylated on serine and tyrosine."
:at site
:in site
:in substrate
the tyrosine residues hyperphosphorylated on p130 CAS
:m site
:on site))
#+ignore ;; not sure if this is a post-translational process or a cellular-process
(define-category transformation
:specializes post-translational-modification
:realization
(:verb "transform" :noun "transformation"
:etf (svo-passive)))
(define-category phosphorylation-modification :specializes post-translational-modification)
(define-category acetylation
:specializes post-translational-modification
:bindings (uid "GO:0006473")
:realization
(:verb "acetylate" :noun "acetylation"
:etf (svo-passive)))
(define-category de-acetylation
:specializes post-translational-modification
:bindings (uid "GO:0006476")
:realization
(:verb ("de-acetylate" "deacetylate")
:noun ("de-acetylation" "deacetylation")
:etf (svo-passive)))
(define-category hyper-acetylation
:specializes post-translational-modification
:bindings (uid "GO:0006476")
:realization
(:verb ("hyper-acetylate" "hyperacetylate")
:noun ("hyper-acetylation" "hyperacetylation")
:etf (svo-passive)))
(define-category hypo-acetylation
:specializes post-translational-modification
:bindings (uid "GO:0006476")
:realization
(:verb ("hypo-acetylate" "hypoacetylate")
:noun ("hypo-acetylation" "hypoacetylation")
:etf (svo-passive)))
;; manually added from TRIPS
(define-category acylation
:specializes post-translational-modification
:bindings (uid "GO:0043543")
:realization
(:verb "acylate"
:noun "acylation"
:etf (svo-passive)))
;; manually added from TRIPS
(define-category de-acylation
:specializes post-translational-modification
:bindings (uid "GO:0035601")
:realization
(:verb ("de-acylate" "deacylate")
:noun ("de-acylation" "deacylation")
:etf (svo-passive)))
;; manually added from TRIPS
(define-category lipidation
:specializes post-translational-modification
:bindings (uid "GO:0006497")
:realization
(:verb "lipidate" :noun "lipidation"
:etf (svo-passive)))
;; manually added from TRIPS
(define-category delipidation
:specializes post-translational-modification
:bindings (uid "GO:0051697")
:realization
(:verb ("delipidate" "de-lipidate")
:noun ("delipidation" "de-lipidation")
:etf (svo-passive)))
;; manually added from TRIPS
(define-category myristoylation
based on wikipedia for lipidation
:bindings (uid "GO:0018377")
:realization
(:verb "myristoylate" :noun "myristoylation"
:etf (svo-passive)))
;; manually added from TRIPS
(define-category palmitoylation
based on wikipedia for lipidation
:bindings (uid "GO:0018345")
:realization
(:verb "palmitoylate" :noun "palmitoylation"
:etf (svo-passive)))
;; manually added from TRIPS
(define-category depalmitoylation
based on wikipedia for lipidation
:bindings (uid "GO:0002084")
:realization
(:verb "depalmitoylate" :noun "depalmitoylation"
:etf (svo-passive)))
;; manually added from TRIPS
(define-category prenylation
based on wikipedia for lipidation
:bindings (uid "GO:0018342")
:realization
(:verb "prenylate"
:noun "prenylation"
:etf (svo-passive)))
(define-category isoprenylation
;; can't find a go:id, but the past tense came up for a protein definition
:specializes prenylation
:realization
(:verb "isoprenylate"
:noun "isoprenylation"
:etf (svo-passive)))
(define-category farnesylation
based on wikipedia for lipidation
:bindings (uid "GO:0018343")
:realization
(:verb "farnesylate"
:noun "farnesylation"
:etf (svo-passive) ))
;; manually added from TRIPS
(define-category geranyl-geranylation
based on wikipedia for lipidation
:bindings (uid "GO:0018344")
:realization
(:verb ("geranyl-geranylate" "geranylβgeranylate") ;; different hyphen type
:noun ("geranyl-geranylation" "geranylβgeranylation")
:etf (svo-passive)))
;; manually added from TRIPS
(define-category nitrosylation
:specializes post-translational-modification
:bindings (uid "GO:0017014")
:realization
(:verb "nitrosylate" :noun "nitrosylation"
:etf (svo-passive)))
;; manually added from TRIPS
(define-category s-nitrosylation
:specializes post-translational-modification
:bindings (uid "NCIT:C120476")
:realization
(:verb "s-nitrosylate" :noun "s-nitrosylation"
:etf (svo-passive)))
;; manually added from TRIPS
(define-category neddylation
:specializes post-translational-modification
:bindings (uid "GO:0045116")
:realization
(:verb "neddylate" :noun "neddylation"
:etf (svo-passive)))
;; manually added from TRIPS
(define-category deneddylation
:specializes post-translational-modification
:bindings (uid "GO:0000338")
:realization
(:verb "deneddylate" :noun "deneddylation"
:etf (svo-passive)))
;; manually added from TRIPS
(define-category biotinylation
:specializes post-translational-modification
:bindings (uid "GO:0009305")
:realization
(:verb "biotinylate" :noun "biotinylation"
:etf (svo-passive)))
;; manually added from TRIPS
(define-category amidation
:specializes post-translational-modification
:bindings (uid "GO:0018032")
:realization
(:verb "amidate" :noun "amidation"
:etf (svo-passive)))
;; manually added from TRIPS
(define-category alkylation
:specializes post-translational-modification
:bindings (uid "GO:0008213")
:realization
(:verb "alkylate" :noun "alkylation"
:etf (svo-passive)))
;; manually added from TRIPS
(define-category carboxylation
:specializes post-translational-modification
:bindings (uid "GO:0018214")
:realization
(:verb "carboxylate" :noun "carboxylation"
:etf (svo-passive)))
;; manually added from TRIPS
(define-category deamination
:specializes post-translational-modification
:bindings (uid "GO:0018277")
:realization
(:verb "deaminate" :noun "deamination"
:etf (svo-passive)))
;; manually added from TRIPS
(define-category glutathionylation
:specializes post-translational-modification
:bindings (uid "GO:0010731")
:realization
(:verb "glutathionylate" :noun "glutathionylation"
:etf (svo-passive)))
;; manually added from TRIPS
(define-category formylation
:specializes post-translational-modification
:bindings (uid "GO:0018256")
:realization
(:verb "formylate" :noun "formylation"
:etf (svo-passive)))
;; manually added from TRIPS
(define-category esterification
:specializes post-translational-modification
:bindings (uid "GO:0018350")
:realization
(:verb "esterificate" :noun "esterification"
:etf (svo-passive)))
;; manually added from TRIPS
(define-category mannosylation
:specializes post-translational-modification
:bindings (uid "GO:0035268")
:realization
(:verb "mannosylate" :noun "mannosylation"
:etf (svo-passive)))
;; manually added from TRIPS
(define-category sulfation
:specializes post-translational-modification
:bindings (uid "GO:0006477")
:realization
(:verb "sulfate" :noun "sulfation"
:etf (svo-passive)))
;; manually added from TRIPS
(define-category succinylation
:specializes post-translational-modification
:bindings (uid "GO:0018335")
:realization
(:verb "succinylate" :noun "succinylation"
:etf (svo-passive)))
;; manually added from TRIPS
(define-category oxidation
:specializes post-translational-modification ;; note: not necessarily post-trans-mod, could be general bio-process, but basing on trips for now, and many article examples are the post-trans-mod
:bindings (uid "GO:0018158")
:realization
(:verb "oxidate" :noun "oxidation"
:etf (svo-passive)))
(define-category glycosylation
:specializes post-translational-modification
:bindings (uid "GO:0006486")
:realization
(:verb "glycosylate"
:noun "glycosylation"
:etf (svo-passive)))
(define-category de-glycosylation
:specializes post-translational-modification
:bindings (uid "GO:0006517")
:realization
(:verb ("deglycosylate" "de-glycosylate")
:noun ("deglycosylation" "de-glycosylation")
:etf (svo-passive)))
(define-category fucosylation
:specializes glycosylation
:bindings (uid "GO:0036065")
:realization
(:verb "fucosylate"
:noun "fucosylation"
:etf (svo-passive)))
(define-category hydroxylation
:specializes post-translational-modification
:bindings (uid "GO:0018126")
:realization
(:verb "hydroxylate"
:noun "hydroxylation"
:etf (svo-passive)))
(define-category methylation
:specializes post-translational-modification ;; uid for protein methylation: GO:0006479
:restrict ((substrate (:or protein variant dna gene)))
:realization
(:verb "methylate"
:noun "methylation"
:etf (svo-passive)))
(define-category de-methylation
:specializes post-translational-modification
:bindings (uid "GO:0070988")
:restrict ((substrate (:or protein variant dna gene)))
:realization
(:verb ("de-methylate" "demethylate")
:noun ("de-methylation" "demethylation")
:etf (svo-passive)))
(define-category hyper-methylation
:specializes post-translational-modification
:bindings (uid "NCIT:C20102")
:restrict ((substrate (:or protein variant dna gene)))
:realization
(:verb ("hyper-methylate" "hypermethylate")
:noun ("hyper-methylation" "hypermethylation")
:etf (svo-passive)))
(define-category hypo-methylation
:specializes post-translational-modification
:bindings (uid "NCIT:C121521")
:restrict ((substrate (:or protein variant dna gene)))
:realization
(:verb ("hypo-methylate" "hypomethylate")
:noun ("hypo-methylation" "hypomethylation")
:etf (svo-passive)))
(define-category ribosylation
:specializes post-translational-modification
:realization
(:verb "ribosylate"
:noun "ribosylation"
:etf (svo-passive)))
;; manually added from TRIPS
(define-category adp-ribosylation
:specializes ribosylation
:bindings (uid "GO:0006471") ;; manually added from TRIPS
:realization
(:verb ("adp-ribosylate" "ADP-ribosylate")
:noun ("adp-ribosylation" "ADP-ribosylation")
:etf (svo-passive)))
;; manually added from TRIPS
(define-category mono-adp-ribosylation
:specializes adp-ribosylation
:bindings (uid "GO:0140289") ;; manually added from TRIPS
:realization
(:verb ("mono-adp-ribosylate" "mono-ADP-ribosylate")
:noun ("mono-adp-ribosylation" "mono-ADP-ribosylation")
:etf (svo-passive)))
;; manually added from TRIPS
(define-category poly-adp-ribosylation
:specializes adp-ribosylation
:bindings (uid "GO:0070212") ;; manually added from TRIPS
:realization
(:verb ("poly-adp-ribosylate" "poly-ADP-ribosylate")
:noun ("poly-adp-ribosylation" "poly-ADP-ribosylation")
:etf (svo-passive)))
(define-category sumoylation
:specializes post-translational-modification
:bindings (uid "GO:0016925")
:realization
(:verb "sumoylate"
:noun "sumoylation"
:etf (svo-passive)))
;; manually added from TRIPS
(define-category desumoylation
:specializes post-translational-modification
:bindings (uid "GO:0016926")
:realization
(:verb ("desumoylate" "de-sumoylate")
:noun ("desumoylation" "de-sumoylation")
:etf (svo-passive)))
;; manually added from TRIPS
(define-category poly-adenylation
:specializes ribosylation
:bindings (uid "NCIT:C18947") ;; manually added from TRIPS
:realization
(:verb ("poly-adenylate" "polyadenylate")
:noun ("poly-adenylation" "polyadenylation" "RNA polyadenylation")
:etf (svo-passive)))
;;;---------------
;;; phosphorylate
;;;---------------
;; GO:0016310
;; "activated IKKΞ± phosphorylates specific serines"
;; "The phosphorylation of these specific serines"
(if (eq (script) :bio)
(define-category phosphorylate
:specializes phosphorylation-modification
:instantiates self
:realization
(:verb ("phosphorylate" :past-participle ("p" "p-" "phosphorylated"))
:noun "phosphorylation"
:adj "phospho"
: ( svo - passive ) < -- messed up the mapping
:mumble ("phosphorylate" svo :s agent :o substrate)))
(define-category phosphorylate
:specializes phosphorylation-modification
:instantiates self
:realization
(:verb ("phosphorylate" :past-participle ("phosphorylated"))
:noun "phosphorylation"
:adj "phospho"
: ( svo - passive ) < -- messed up the mapping
:mumble ("phosphorylate" svo :s agent :o substrate))))
(define-category auto-phosphorylate
:specializes phosphorylation-modification
:realization
(:verb ("auto-phosphorylate" "autophosphorylate" "autophosphosphorylate")
:noun ("auto-phosphorylation" "autophosphorylation")
:etf (sv)))
(define-category cis-auto-phosphorylate
:specializes auto-phosphorylate
:realization
(:verb "cis-auto-phosphorylate" :noun "cis-auto-phosphorylation"
:etf (sv)))
(def-synonym cis-auto-phosphorylate
(:verb "cis-autophosphosphorylate"
:etf (sv)
:noun "cis-autophosphorylation"))
(define-category trans-auto-phosphorylate
:specializes auto-phosphorylate
:realization
(:verb ("trans-auto-phosphorylate" "trans-autophosphosphorylate")
:noun ("trans-auto-phosphorylation" "trans-autophosphorylation")
:etf (sv)))
(define-category dephosphorylate
:specializes post-translational-modification
:realization
(:verb ("dephosphorylate" "de-phosphorylate")
:noun ("dephosphorylation" "de-phosphorylation" "dephophosphorylation") ;; mispelling from comments
:etf (svo-passive)))
(define-category diphosphorylate
:specializes post-translational-modification
:realization
(:verb ("diphosphorylate" "di-phosphorylate")
:noun ("diphosphorylation" "di-phosphorylation" ) ;; mispelling from comments
:etf (svo-passive)))
(define-category transphosphorylate
:specializes phosphorylation-modification
:instantiates self
:realization
(:verb "transphosphorylate" :noun "transphosphorylation"
:etf (svo-passive)))
(define-category hyperphosphorylate
:specializes phosphorylation-modification
:instantiates self
:realization
(:verb ("hyperphosphorylate" "hyper-phosphorylate")
:noun ("hyperphosphorylation" "hyper-phosphorylation")
:etf (svo-passive)))
(define-category hypophosphorylate
:specializes phosphorylation-modification
:instantiates self
:realization
(:verb ("hypophosphorylate" "hypo-phosphorylate")
:noun ("hypophosphorylation" "hypo-phosphorylation")
:etf (svo-passive)))
(define-category unphosphorylate
:specializes post-translational-modification
:realization
(:verb "unphosphorylate"
:etf (svo-passive)))
(define-category double-phosphorylate
:specializes post-translational-modification
:realization
(:verb "double-phosphorylate"
:etf (svo-passive)))
;;;---------------
;;; ubiquitination
;;;---------------
; monoubiquitination increases the population
;; this process has this effect
; the enzymatic and chemical ubiquitination linkers
the monoubiquitinated and unmodified fractions of Ras
; the sensitivity of mUbRas
; our ability to easily generate mUbRas
the c β terminus of Ubiquitin ( Ubiquitin C77 )
Ras ligated to Ubiquitin C77
ligated to Ubiquitin G76C. < --- point mutation
;;(def-bio "ubiquitin" protein) defined elsewhere with ID
;; not strictly true, but a reasonable approximation.
(define-category ubiquitination
:specializes post-translational-modification
:realization
(:verb ("ubiquitinate" "ubiquitylate" "ubiquinate")
:noun ("ubiquitination" "ubiquitylation")
:etf (svo-passive)))
(define-category auto-ubiquitinate
:specializes ubiquitination
:realization
(:verb ("auto-ubiquitinate" "autoubiquitinate"
"auto-ubiquitylate" "autoubiquitylate")
:noun ("auto-ubiquitination" "autoubiquitination"
"auto-ubiquitylation" "autoubiquitylation")
:etf (sv)))
(define-category poly-ubiquitination :specializes ubiquitination
:bindings (uid "GO:0000209") ;; manually added from TRIPS
:realization
(:verb ("poly-ubiquitinate" "polyubiquitinate" "poly-ubiquitylate" "polyubiquitylate")
:etf (svo-passive)
:noun ("poly-ubiquitination" "polyubiquitination" "poly-ubiquitylation" "polyubiquitylation"))) ;; manually added from TRIPS
;; manually added from TRIPS
(define-category de-ubiquitination :specializes post-translational-modification
:bindings (uid "GO:0016579")
:realization
(:verb ("de-ubiquitinate" "deubiquitinate" "deubiquinate"
"de-ubiquitylate" "deubiquitylate")
:etf (svo-passive)
:noun ("de-ubiquitination" "deubiquitination"
"de-ubiquitylation" "deubiquitylation")))
;;;------------------------------
mUbRas , monoubiquitinated Ras
;;;------------------------------
;;///// This is a process/result pattern. This verb results
in a protein that has been ubiquitinated . ( Has one or
;; more ubiquitin molecules attached to it.
;; Need to do this systematically
In et al .
;; "we did not separate monoubiquitinated Ras (mUbRas) from ..."
Jan # 1 " the effect of Ras monoubiquitination on ...
... effect of Ras monoubiquitination on ...
Resulting version of after adding one ubiquitin .
;; strictly for the rule-label
(define-category monoubiquitination
:specializes post-translational-modification
:realization
;;/// only providing a realization for the result, not the process
;; that leads to the result
(:verb ("monoubiquitinate" "mono-ubiquitinate" "mono-ubiquitylate" "monoubiquitylate")
:noun ("monoubiquitination" "mono-ubiquitination" "mono-ubiquitylation" "monoubiquitylation")
:etf (svo-passive)))
| null |
https://raw.githubusercontent.com/ddmcdonald/sparser/304bd02d0cf7337ca25c8f1d44b1d7912759460f/Sparser/code/s/grammar/model/sl/biology/post-trans-mods.lisp
|
lisp
|
-*- Mode:LISP; Syntax:Common-Lisp; Package:(SPARSER COMMON-LISP) -*-
File: "post-trans-mod"
Module: "grammar/model/sl/biology/
Contains post-translational modifications; phosphorylation; ubiquitination; and post-translational modifications with IDs
---------------------------------
post-translational-modification
---------------------------------
allow variant to pick up "phosphorylated fraction"
which is attached here
this is for "fraction of ..."
bio-process
trying to BLOCK object by giving it an impossible class
somehow this overrides the ones below
"BRAF that is phosphorylated on serine and tyrosine."
not sure if this is a post-translational process or a cellular-process
manually added from TRIPS
manually added from TRIPS
manually added from TRIPS
manually added from TRIPS
manually added from TRIPS
manually added from TRIPS
manually added from TRIPS
manually added from TRIPS
can't find a go:id, but the past tense came up for a protein definition
manually added from TRIPS
different hyphen type
manually added from TRIPS
manually added from TRIPS
manually added from TRIPS
manually added from TRIPS
manually added from TRIPS
manually added from TRIPS
manually added from TRIPS
manually added from TRIPS
manually added from TRIPS
manually added from TRIPS
manually added from TRIPS
manually added from TRIPS
manually added from TRIPS
manually added from TRIPS
manually added from TRIPS
manually added from TRIPS
note: not necessarily post-trans-mod, could be general bio-process, but basing on trips for now, and many article examples are the post-trans-mod
uid for protein methylation: GO:0006479
manually added from TRIPS
manually added from TRIPS
manually added from TRIPS
manually added from TRIPS
manually added from TRIPS
manually added from TRIPS
manually added from TRIPS
manually added from TRIPS
manually added from TRIPS
---------------
phosphorylate
---------------
GO:0016310
"activated IKKΞ± phosphorylates specific serines"
"The phosphorylation of these specific serines"
mispelling from comments
mispelling from comments
---------------
ubiquitination
---------------
monoubiquitination increases the population
this process has this effect
the enzymatic and chemical ubiquitination linkers
the sensitivity of mUbRas
our ability to easily generate mUbRas
(def-bio "ubiquitin" protein) defined elsewhere with ID
not strictly true, but a reasonable approximation.
manually added from TRIPS
manually added from TRIPS
manually added from TRIPS
------------------------------
------------------------------
///// This is a process/result pattern. This verb results
more ubiquitin molecules attached to it.
Need to do this systematically
"we did not separate monoubiquitinated Ras (mUbRas) from ..."
strictly for the rule-label
/// only providing a realization for the result, not the process
that leads to the result
|
Copyright ( c ) 2017 SIFT LLC . All Rights Reserved
version : May 2017
Broken out from phenomena 5/10/17
(in-package :sparser)
" It is interesting that various stimuli provoke EGF receptor tyrosine phosphorylation by two distinct means . " )
(define-category post-translational-modification :specializes caused-biochemical-process
:bindings (uid "GO:0043687")
:mixins (on-substrate with-specified-amino-acid capable-of-being-measured)
:realization
(:noun ("post-translational modification" "PTM"
"post-transcriptional modification" "post-transcriptional fate")
:o substrate
:o site
:o amino-acid
:m amino-acid
:m substrate
:m site
:verb-premod site
:verb-premod amino-acid
:of substrate
:of site
:at amino-acid
:at site
:in site
:in substrate
the tyrosine residues hyperphosphorylated on p130 CAS
:m site
:on site))
(define-category transformation
:specializes post-translational-modification
:realization
(:verb "transform" :noun "transformation"
:etf (svo-passive)))
(define-category phosphorylation-modification :specializes post-translational-modification)
(define-category acetylation
:specializes post-translational-modification
:bindings (uid "GO:0006473")
:realization
(:verb "acetylate" :noun "acetylation"
:etf (svo-passive)))
(define-category de-acetylation
:specializes post-translational-modification
:bindings (uid "GO:0006476")
:realization
(:verb ("de-acetylate" "deacetylate")
:noun ("de-acetylation" "deacetylation")
:etf (svo-passive)))
(define-category hyper-acetylation
:specializes post-translational-modification
:bindings (uid "GO:0006476")
:realization
(:verb ("hyper-acetylate" "hyperacetylate")
:noun ("hyper-acetylation" "hyperacetylation")
:etf (svo-passive)))
(define-category hypo-acetylation
:specializes post-translational-modification
:bindings (uid "GO:0006476")
:realization
(:verb ("hypo-acetylate" "hypoacetylate")
:noun ("hypo-acetylation" "hypoacetylation")
:etf (svo-passive)))
(define-category acylation
:specializes post-translational-modification
:bindings (uid "GO:0043543")
:realization
(:verb "acylate"
:noun "acylation"
:etf (svo-passive)))
(define-category de-acylation
:specializes post-translational-modification
:bindings (uid "GO:0035601")
:realization
(:verb ("de-acylate" "deacylate")
:noun ("de-acylation" "deacylation")
:etf (svo-passive)))
(define-category lipidation
:specializes post-translational-modification
:bindings (uid "GO:0006497")
:realization
(:verb "lipidate" :noun "lipidation"
:etf (svo-passive)))
(define-category delipidation
:specializes post-translational-modification
:bindings (uid "GO:0051697")
:realization
(:verb ("delipidate" "de-lipidate")
:noun ("delipidation" "de-lipidation")
:etf (svo-passive)))
(define-category myristoylation
based on wikipedia for lipidation
:bindings (uid "GO:0018377")
:realization
(:verb "myristoylate" :noun "myristoylation"
:etf (svo-passive)))
(define-category palmitoylation
based on wikipedia for lipidation
:bindings (uid "GO:0018345")
:realization
(:verb "palmitoylate" :noun "palmitoylation"
:etf (svo-passive)))
(define-category depalmitoylation
based on wikipedia for lipidation
:bindings (uid "GO:0002084")
:realization
(:verb "depalmitoylate" :noun "depalmitoylation"
:etf (svo-passive)))
(define-category prenylation
based on wikipedia for lipidation
:bindings (uid "GO:0018342")
:realization
(:verb "prenylate"
:noun "prenylation"
:etf (svo-passive)))
(define-category isoprenylation
:specializes prenylation
:realization
(:verb "isoprenylate"
:noun "isoprenylation"
:etf (svo-passive)))
(define-category farnesylation
based on wikipedia for lipidation
:bindings (uid "GO:0018343")
:realization
(:verb "farnesylate"
:noun "farnesylation"
:etf (svo-passive) ))
(define-category geranyl-geranylation
based on wikipedia for lipidation
:bindings (uid "GO:0018344")
:realization
:noun ("geranyl-geranylation" "geranylβgeranylation")
:etf (svo-passive)))
(define-category nitrosylation
:specializes post-translational-modification
:bindings (uid "GO:0017014")
:realization
(:verb "nitrosylate" :noun "nitrosylation"
:etf (svo-passive)))
(define-category s-nitrosylation
:specializes post-translational-modification
:bindings (uid "NCIT:C120476")
:realization
(:verb "s-nitrosylate" :noun "s-nitrosylation"
:etf (svo-passive)))
(define-category neddylation
:specializes post-translational-modification
:bindings (uid "GO:0045116")
:realization
(:verb "neddylate" :noun "neddylation"
:etf (svo-passive)))
(define-category deneddylation
:specializes post-translational-modification
:bindings (uid "GO:0000338")
:realization
(:verb "deneddylate" :noun "deneddylation"
:etf (svo-passive)))
(define-category biotinylation
:specializes post-translational-modification
:bindings (uid "GO:0009305")
:realization
(:verb "biotinylate" :noun "biotinylation"
:etf (svo-passive)))
(define-category amidation
:specializes post-translational-modification
:bindings (uid "GO:0018032")
:realization
(:verb "amidate" :noun "amidation"
:etf (svo-passive)))
(define-category alkylation
:specializes post-translational-modification
:bindings (uid "GO:0008213")
:realization
(:verb "alkylate" :noun "alkylation"
:etf (svo-passive)))
(define-category carboxylation
:specializes post-translational-modification
:bindings (uid "GO:0018214")
:realization
(:verb "carboxylate" :noun "carboxylation"
:etf (svo-passive)))
(define-category deamination
:specializes post-translational-modification
:bindings (uid "GO:0018277")
:realization
(:verb "deaminate" :noun "deamination"
:etf (svo-passive)))
(define-category glutathionylation
:specializes post-translational-modification
:bindings (uid "GO:0010731")
:realization
(:verb "glutathionylate" :noun "glutathionylation"
:etf (svo-passive)))
(define-category formylation
:specializes post-translational-modification
:bindings (uid "GO:0018256")
:realization
(:verb "formylate" :noun "formylation"
:etf (svo-passive)))
(define-category esterification
:specializes post-translational-modification
:bindings (uid "GO:0018350")
:realization
(:verb "esterificate" :noun "esterification"
:etf (svo-passive)))
(define-category mannosylation
:specializes post-translational-modification
:bindings (uid "GO:0035268")
:realization
(:verb "mannosylate" :noun "mannosylation"
:etf (svo-passive)))
(define-category sulfation
:specializes post-translational-modification
:bindings (uid "GO:0006477")
:realization
(:verb "sulfate" :noun "sulfation"
:etf (svo-passive)))
(define-category succinylation
:specializes post-translational-modification
:bindings (uid "GO:0018335")
:realization
(:verb "succinylate" :noun "succinylation"
:etf (svo-passive)))
(define-category oxidation
:bindings (uid "GO:0018158")
:realization
(:verb "oxidate" :noun "oxidation"
:etf (svo-passive)))
(define-category glycosylation
:specializes post-translational-modification
:bindings (uid "GO:0006486")
:realization
(:verb "glycosylate"
:noun "glycosylation"
:etf (svo-passive)))
(define-category de-glycosylation
:specializes post-translational-modification
:bindings (uid "GO:0006517")
:realization
(:verb ("deglycosylate" "de-glycosylate")
:noun ("deglycosylation" "de-glycosylation")
:etf (svo-passive)))
(define-category fucosylation
:specializes glycosylation
:bindings (uid "GO:0036065")
:realization
(:verb "fucosylate"
:noun "fucosylation"
:etf (svo-passive)))
(define-category hydroxylation
:specializes post-translational-modification
:bindings (uid "GO:0018126")
:realization
(:verb "hydroxylate"
:noun "hydroxylation"
:etf (svo-passive)))
(define-category methylation
:restrict ((substrate (:or protein variant dna gene)))
:realization
(:verb "methylate"
:noun "methylation"
:etf (svo-passive)))
(define-category de-methylation
:specializes post-translational-modification
:bindings (uid "GO:0070988")
:restrict ((substrate (:or protein variant dna gene)))
:realization
(:verb ("de-methylate" "demethylate")
:noun ("de-methylation" "demethylation")
:etf (svo-passive)))
(define-category hyper-methylation
:specializes post-translational-modification
:bindings (uid "NCIT:C20102")
:restrict ((substrate (:or protein variant dna gene)))
:realization
(:verb ("hyper-methylate" "hypermethylate")
:noun ("hyper-methylation" "hypermethylation")
:etf (svo-passive)))
(define-category hypo-methylation
:specializes post-translational-modification
:bindings (uid "NCIT:C121521")
:restrict ((substrate (:or protein variant dna gene)))
:realization
(:verb ("hypo-methylate" "hypomethylate")
:noun ("hypo-methylation" "hypomethylation")
:etf (svo-passive)))
(define-category ribosylation
:specializes post-translational-modification
:realization
(:verb "ribosylate"
:noun "ribosylation"
:etf (svo-passive)))
(define-category adp-ribosylation
:specializes ribosylation
:realization
(:verb ("adp-ribosylate" "ADP-ribosylate")
:noun ("adp-ribosylation" "ADP-ribosylation")
:etf (svo-passive)))
(define-category mono-adp-ribosylation
:specializes adp-ribosylation
:realization
(:verb ("mono-adp-ribosylate" "mono-ADP-ribosylate")
:noun ("mono-adp-ribosylation" "mono-ADP-ribosylation")
:etf (svo-passive)))
(define-category poly-adp-ribosylation
:specializes adp-ribosylation
:realization
(:verb ("poly-adp-ribosylate" "poly-ADP-ribosylate")
:noun ("poly-adp-ribosylation" "poly-ADP-ribosylation")
:etf (svo-passive)))
(define-category sumoylation
:specializes post-translational-modification
:bindings (uid "GO:0016925")
:realization
(:verb "sumoylate"
:noun "sumoylation"
:etf (svo-passive)))
(define-category desumoylation
:specializes post-translational-modification
:bindings (uid "GO:0016926")
:realization
(:verb ("desumoylate" "de-sumoylate")
:noun ("desumoylation" "de-sumoylation")
:etf (svo-passive)))
(define-category poly-adenylation
:specializes ribosylation
:realization
(:verb ("poly-adenylate" "polyadenylate")
:noun ("poly-adenylation" "polyadenylation" "RNA polyadenylation")
:etf (svo-passive)))
(if (eq (script) :bio)
(define-category phosphorylate
:specializes phosphorylation-modification
:instantiates self
:realization
(:verb ("phosphorylate" :past-participle ("p" "p-" "phosphorylated"))
:noun "phosphorylation"
:adj "phospho"
: ( svo - passive ) < -- messed up the mapping
:mumble ("phosphorylate" svo :s agent :o substrate)))
(define-category phosphorylate
:specializes phosphorylation-modification
:instantiates self
:realization
(:verb ("phosphorylate" :past-participle ("phosphorylated"))
:noun "phosphorylation"
:adj "phospho"
: ( svo - passive ) < -- messed up the mapping
:mumble ("phosphorylate" svo :s agent :o substrate))))
(define-category auto-phosphorylate
:specializes phosphorylation-modification
:realization
(:verb ("auto-phosphorylate" "autophosphorylate" "autophosphosphorylate")
:noun ("auto-phosphorylation" "autophosphorylation")
:etf (sv)))
(define-category cis-auto-phosphorylate
:specializes auto-phosphorylate
:realization
(:verb "cis-auto-phosphorylate" :noun "cis-auto-phosphorylation"
:etf (sv)))
(def-synonym cis-auto-phosphorylate
(:verb "cis-autophosphosphorylate"
:etf (sv)
:noun "cis-autophosphorylation"))
(define-category trans-auto-phosphorylate
:specializes auto-phosphorylate
:realization
(:verb ("trans-auto-phosphorylate" "trans-autophosphosphorylate")
:noun ("trans-auto-phosphorylation" "trans-autophosphorylation")
:etf (sv)))
(define-category dephosphorylate
:specializes post-translational-modification
:realization
(:verb ("dephosphorylate" "de-phosphorylate")
:etf (svo-passive)))
(define-category diphosphorylate
:specializes post-translational-modification
:realization
(:verb ("diphosphorylate" "di-phosphorylate")
:etf (svo-passive)))
(define-category transphosphorylate
:specializes phosphorylation-modification
:instantiates self
:realization
(:verb "transphosphorylate" :noun "transphosphorylation"
:etf (svo-passive)))
(define-category hyperphosphorylate
:specializes phosphorylation-modification
:instantiates self
:realization
(:verb ("hyperphosphorylate" "hyper-phosphorylate")
:noun ("hyperphosphorylation" "hyper-phosphorylation")
:etf (svo-passive)))
(define-category hypophosphorylate
:specializes phosphorylation-modification
:instantiates self
:realization
(:verb ("hypophosphorylate" "hypo-phosphorylate")
:noun ("hypophosphorylation" "hypo-phosphorylation")
:etf (svo-passive)))
(define-category unphosphorylate
:specializes post-translational-modification
:realization
(:verb "unphosphorylate"
:etf (svo-passive)))
(define-category double-phosphorylate
:specializes post-translational-modification
:realization
(:verb "double-phosphorylate"
:etf (svo-passive)))
the monoubiquitinated and unmodified fractions of Ras
the c β terminus of Ubiquitin ( Ubiquitin C77 )
Ras ligated to Ubiquitin C77
ligated to Ubiquitin G76C. < --- point mutation
(define-category ubiquitination
:specializes post-translational-modification
:realization
(:verb ("ubiquitinate" "ubiquitylate" "ubiquinate")
:noun ("ubiquitination" "ubiquitylation")
:etf (svo-passive)))
(define-category auto-ubiquitinate
:specializes ubiquitination
:realization
(:verb ("auto-ubiquitinate" "autoubiquitinate"
"auto-ubiquitylate" "autoubiquitylate")
:noun ("auto-ubiquitination" "autoubiquitination"
"auto-ubiquitylation" "autoubiquitylation")
:etf (sv)))
(define-category poly-ubiquitination :specializes ubiquitination
:realization
(:verb ("poly-ubiquitinate" "polyubiquitinate" "poly-ubiquitylate" "polyubiquitylate")
:etf (svo-passive)
(define-category de-ubiquitination :specializes post-translational-modification
:bindings (uid "GO:0016579")
:realization
(:verb ("de-ubiquitinate" "deubiquitinate" "deubiquinate"
"de-ubiquitylate" "deubiquitylate")
:etf (svo-passive)
:noun ("de-ubiquitination" "deubiquitination"
"de-ubiquitylation" "deubiquitylation")))
mUbRas , monoubiquitinated Ras
in a protein that has been ubiquitinated . ( Has one or
In et al .
Jan # 1 " the effect of Ras monoubiquitination on ...
... effect of Ras monoubiquitination on ...
Resulting version of after adding one ubiquitin .
(define-category monoubiquitination
:specializes post-translational-modification
:realization
(:verb ("monoubiquitinate" "mono-ubiquitinate" "mono-ubiquitylate" "monoubiquitylate")
:noun ("monoubiquitination" "mono-ubiquitination" "mono-ubiquitylation" "monoubiquitylation")
:etf (svo-passive)))
|
13ab80f00dd5692fb8312581ccb34d8180cbf3e19d2aaf0b7761cb4aaa7eee3f
|
gator1/jepsen
|
kafka_test.clj
|
(ns jepsen.kafka-test
(:require [clojure.test :refer :all]
[jepsen.core :as jepsen]
[jepsen.kafka :as kafka]))
(deftest kafka-test
(is (:valid? (:results (jepsen/run! (kafka/kafka-test "2.12" "0.10.2.0"))))))
| null |
https://raw.githubusercontent.com/gator1/jepsen/1932cbd72cbc1f6c2a27abe0fe347ea989f0cfbb/kafka/test/jepsen/kafka_test.clj
|
clojure
|
(ns jepsen.kafka-test
(:require [clojure.test :refer :all]
[jepsen.core :as jepsen]
[jepsen.kafka :as kafka]))
(deftest kafka-test
(is (:valid? (:results (jepsen/run! (kafka/kafka-test "2.12" "0.10.2.0"))))))
|
|
4fc29eddae3ce31657e5c9c415e9aaf74bd7b6a469b3b51edec9655aba603deb
|
metabase/metabase
|
date_bucketing_test.clj
|
(ns metabase.query-processor-test.date-bucketing-test
"The below tests cover the various date bucketing/grouping scenarios that we support. There are are always two
timezones in play when querying using these date bucketing features. The most visible is how timestamps are returned
to the user. With no report timezone specified, the JVM's timezone is used to represent the timestamps regardless of
timezone of the database. Specifying a report timezone (if the database supports it) will return the timestamps in
that timezone (manifesting itself as an offset for that time). Using the JVM timezone that doesn't match the
database timezone (assuming the database doesn't support a report timezone) can lead to incorrect results.
The second place timezones can impact this is calculations in the database. A good example of this is grouping
something by day. In that case, the start (or end) of the day will be different depending on what timezone the
database is in. The start of the day in pacific time is 7 (or 8) hours earlier than UTC. This means there might be a
different number of results depending on what timezone we're in. Report timezone lets the user specify that, and it
gets pushed into the database so calculations are made in that timezone.
If a report timezone is specified and the database supports it, the JVM timezone should have no impact on queries or
their results."
(:require
[clojure.string :as str]
[clojure.test :refer :all]
[java-time :as t]
[metabase.driver :as driver]
[metabase.driver.sql-jdbc.sync :as sql-jdbc.sync]
[metabase.driver.sql.query-processor :as sql.qp]
[metabase.driver.sql.query-processor-test-util :as sql.qp-test-util]
[metabase.models.database :refer [Database]]
[metabase.models.table :refer [Table]]
[metabase.query-processor :as qp]
[metabase.query-processor-test :as qp.test]
[metabase.query-processor.middleware.format-rows :as format-rows]
[metabase.test :as mt]
[metabase.util :as u]
[metabase.util.date-2 :as u.date]
[metabase.util.honeysql-extensions :as hx]
[metabase.util.log :as log]
[metabase.util.regex :as u.regex]
[potemkin.types :as p.types]
[pretty.core :as pretty]
[toucan.db :as db])
(:import [java.time LocalDate LocalDateTime]))
(set! *warn-on-reflection* true)
(defn- ->long-if-number [x]
(if (number? x)
(long x)
x))
(def ^:private timezone
{:utc "UTC"
:pacific "America/Los_Angeles"
:eastern "America/New_York"})
(defn- ->timezone-id ^String [x]
(if (keyword? x)
(get timezone x)
x))
(deftest sanity-check-test
;; TIMEZONE FIXME β currently broken for Snowflake. UNIX timestamps are interpreted as being in the report timezone
;; rather than UTC.
(mt/test-drivers (disj (mt/normal-drivers) :snowflake :redshift)
(testing "\nRegardless of report timezone, UNIX timestamps should always be interpreted a being in UTC."
(let [utc-results [[1 "2015-06-06T10:40:00Z" 4]
[2 "2015-06-10T19:51:00Z" 0]
[3 "2015-06-09T15:42:00Z" 5]
[4 "2015-06-22T23:49:00Z" 3]
[5 "2015-06-20T01:45:00Z" 3]]]
(doseq [timezone [:pacific :utc :eastern]]
(testing "\nResults should be returned in report timezone, if supported by driver."
(testing (format "\ntimezone = %s" timezone)
(let [local-results (cond
(= driver/*driver* :sqlite)
(for [[id s cnt] utc-results]
[id (u.date/format-sql (t/local-date-time (u.date/parse s))) cnt])
(or (= timezone :utc)
(not (driver/supports? driver/*driver* :set-timezone)))
utc-results
:else
(for [[id s cnt] utc-results]
(let [zone-id (t/zone-id (->timezone-id timezone))
t (t/offset-date-time (t/with-zone-same-instant (u.date/parse s) zone-id))
s (t/format :iso-offset-date-time t)]
[id s cnt])))]
(mt/with-report-timezone-id (->timezone-id timezone)
(mt/dataset sad-toucan-incidents
(is (= local-results
(mt/formatted-rows [int identity int]
(mt/run-mbql-query incidents
{:fields [$id $timestamp $severity]
:order-by [[:asc $id]]
:limit 5}))))))))))))))
(defn- sad-toucan-incidents-with-bucketing
"Returns 10 sad toucan incidents grouped by `unit`"
([unit]
(->> (mt/dataset sad-toucan-incidents
(mt/run-mbql-query incidents
{:aggregation [[:count]]
:breakout [[:field %timestamp {:temporal-unit unit}]]
:limit 10}))
mt/rows
(mt/format-rows-by [->long-if-number int])))
([unit timezone-id]
(mt/initialize-if-needed! :db)
(mt/with-report-timezone-id (->timezone-id timezone-id)
(sad-toucan-incidents-with-bucketing unit))))
(defn- default-timezone-parse-fn
"Create a date formatter, interpretting the datestring as being in `tz`"
[default-timezone-id]
(let [timezone-id (->timezone-id default-timezone-id)]
(fn [s]
(u.date/parse s timezone-id))))
(defn- format-in-timezone-fn
"Create a formatter for converting a date to `tz` and in the format that the query processor would return"
[results-timezone-id]
(let [zone-id (-> results-timezone-id ->timezone-id t/zone-id)]
(fn [t]
(format-rows/format-value t zone-id))))
(defn- date-without-time-format-fn
"sqlite returns dates that do not include their time, this formatter is useful for those DBs"
[t]
(condp instance? t
LocalDate (t/format :iso-local-date t)
LocalDateTime (t/format :iso-local-date t)
(t/format :iso-offset-date t)))
(def ^:private sad-toucan-dates
"This is the first 10 sad toucan dates when converted from millis since epoch in the UTC timezone. The timezone is
left off of the timezone string so that we can emulate how certain conversions work in the code today. As an
example, the UTC dates in Oracle are interpreted as the reporting timezone when they're UTC"
["2015-06-01T10:31:00.000"
"2015-06-01T16:06:00.000"
"2015-06-01T17:23:00.000"
"2015-06-01T18:55:00.000"
"2015-06-01T21:04:00.000"
"2015-06-01T21:19:00.000"
"2015-06-02T02:13:00.000"
"2015-06-02T05:37:00.000"
"2015-06-02T08:20:00.000"
"2015-06-02T11:11:00.000"])
(defn- sad-toucan-result
"Creates a sad toucan result set by parsing literal strings with `parse-fn` and formatting then in results with
`format-result-fn`."
([parse-fn format-result-fn]
(sad-toucan-result parse-fn format-result-fn sad-toucan-dates))
([parse-fn format-result-fn temporal-literal-strs]
(for [s temporal-literal-strs]
[(-> s parse-fn format-result-fn) 1])))
(deftest group-by-default-test
(mt/test-drivers (mt/normal-drivers)
(testing "\nPacific timezone"
(is (= (cond
Timezone is omitted by these databases HACK - SQLite returns datetimes as strings , and we do n't
;; re-parse them or do anything smart with them; we just return them directly. This is less than ideal.
;; TIMEZONE FIXME
(= :sqlite driver/*driver*)
[["2015-06-01 10:31:00" 1]
["2015-06-01 16:06:00" 1]
["2015-06-01 17:23:00" 1]
["2015-06-01 18:55:00" 1]
["2015-06-01 21:04:00" 1]
["2015-06-01 21:19:00" 1]
["2015-06-02 02:13:00" 1]
["2015-06-02 05:37:00" 1]
["2015-06-02 08:20:00" 1]
["2015-06-02 11:11:00" 1]]
There 's a bug here where we are reading in the UTC time as pacific , so we 're 7 hours off
( This is fixed for Oracle now )
(and (qp.test/tz-shifted-driver-bug? driver/*driver*) (not= driver/*driver* :oracle))
[["2015-06-01T10:31:00-07:00" 1]
["2015-06-01T16:06:00-07:00" 1]
["2015-06-01T17:23:00-07:00" 1]
["2015-06-01T18:55:00-07:00" 1]
["2015-06-01T21:04:00-07:00" 1]
["2015-06-01T21:19:00-07:00" 1]
["2015-06-02T02:13:00-07:00" 1]
["2015-06-02T05:37:00-07:00" 1]
["2015-06-02T08:20:00-07:00" 1]
["2015-06-02T11:11:00-07:00" 1]]
;; When the reporting timezone is applied, the same datetime value is returned, but set in the pacific
;; timezone
(qp.test/supports-report-timezone? driver/*driver*)
[["2015-06-01T03:31:00-07:00" 1]
["2015-06-01T09:06:00-07:00" 1]
["2015-06-01T10:23:00-07:00" 1]
["2015-06-01T11:55:00-07:00" 1]
["2015-06-01T14:04:00-07:00" 1]
["2015-06-01T14:19:00-07:00" 1]
["2015-06-01T19:13:00-07:00" 1]
["2015-06-01T22:37:00-07:00" 1]
["2015-06-02T01:20:00-07:00" 1]
["2015-06-02T04:11:00-07:00" 1]]
Databases that do n't support report timezone will always return the time using the JVM 's timezone
;; setting Our tests force UTC time, so this should always be UTC
:else
[["2015-06-01T10:31:00Z" 1]
["2015-06-01T16:06:00Z" 1]
["2015-06-01T17:23:00Z" 1]
["2015-06-01T18:55:00Z" 1]
["2015-06-01T21:04:00Z" 1]
["2015-06-01T21:19:00Z" 1]
["2015-06-02T02:13:00Z" 1]
["2015-06-02T05:37:00Z" 1]
["2015-06-02T08:20:00Z" 1]
["2015-06-02T11:11:00Z" 1]])
(sad-toucan-incidents-with-bucketing :default :pacific))))
(testing "\nEastern timezone"
(is (= (cond
;; These databases are always in UTC so aren't impacted by changes in report-timezone
(= :sqlite driver/*driver*)
[["2015-06-01 10:31:00" 1]
["2015-06-01 16:06:00" 1]
["2015-06-01 17:23:00" 1]
["2015-06-01 18:55:00" 1]
["2015-06-01 21:04:00" 1]
["2015-06-01 21:19:00" 1]
["2015-06-02 02:13:00" 1]
["2015-06-02 05:37:00" 1]
["2015-06-02 08:20:00" 1]
["2015-06-02 11:11:00" 1]]
(and (qp.test/tz-shifted-driver-bug? driver/*driver*) (not= driver/*driver* :oracle))
[["2015-06-01T10:31:00-04:00" 1]
["2015-06-01T16:06:00-04:00" 1]
["2015-06-01T17:23:00-04:00" 1]
["2015-06-01T18:55:00-04:00" 1]
["2015-06-01T21:04:00-04:00" 1]
["2015-06-01T21:19:00-04:00" 1]
["2015-06-02T02:13:00-04:00" 1]
["2015-06-02T05:37:00-04:00" 1]
["2015-06-02T08:20:00-04:00" 1]
["2015-06-02T11:11:00-04:00" 1]]
;; The time instant is the same as UTC (or pacific) but should be offset by the eastern timezone
(qp.test/supports-report-timezone? driver/*driver*)
[["2015-06-01T06:31:00-04:00" 1]
["2015-06-01T12:06:00-04:00" 1]
["2015-06-01T13:23:00-04:00" 1]
["2015-06-01T14:55:00-04:00" 1]
["2015-06-01T17:04:00-04:00" 1]
["2015-06-01T17:19:00-04:00" 1]
["2015-06-01T22:13:00-04:00" 1]
["2015-06-02T01:37:00-04:00" 1]
["2015-06-02T04:20:00-04:00" 1]
["2015-06-02T07:11:00-04:00" 1]]
;; The change in report timezone has no affect on this group
:else
[["2015-06-01T10:31:00Z" 1]
["2015-06-01T16:06:00Z" 1]
["2015-06-01T17:23:00Z" 1]
["2015-06-01T18:55:00Z" 1]
["2015-06-01T21:04:00Z" 1]
["2015-06-01T21:19:00Z" 1]
["2015-06-02T02:13:00Z" 1]
["2015-06-02T05:37:00Z" 1]
["2015-06-02T08:20:00Z" 1]
["2015-06-02T11:11:00Z" 1]])
(sad-toucan-incidents-with-bucketing :default :eastern)))))
Changes the JVM timezone from UTC to Pacific , this test is n't run on H2 as the database stores it 's timezones in
;; the JVM timezone (UTC on startup). When we change that timezone, it then assumes the data was also stored in that
;; timezone. This leads to incorrect results. In this example it applies the pacific offset twice
;;
The exclusions here are databases that give incorrect answers when the JVM timezone does n't match the databases
;; timezone
;;
;; TIMEZONE FIXME
(mt/test-drivers (mt/normal-drivers-with-feature :test/jvm-timezone-setting)
(testing "Change JVM timezone from UTC to Pacific"
(is (= (cond
(= :sqlite driver/*driver*)
(sad-toucan-result (default-timezone-parse-fn :utc) (comp u.date/format-sql t/local-date-time))
(and (qp.test/tz-shifted-driver-bug? driver/*driver*) (not= driver/*driver* :oracle))
(sad-toucan-result (default-timezone-parse-fn :eastern) (format-in-timezone-fn :eastern))
;; The JVM timezone should have no impact on results from a database that uses a report timezone
(qp.test/supports-report-timezone? driver/*driver*)
(sad-toucan-result (default-timezone-parse-fn :utc) (format-in-timezone-fn :eastern))
:else
(sad-toucan-result (default-timezone-parse-fn :utc) (format-in-timezone-fn :pacific)))
(mt/with-system-timezone-id (timezone :pacific)
(sad-toucan-incidents-with-bucketing :default :eastern)))))))
(deftest group-by-minute-test
(testing "This dataset doesn't have multiple events in a minute, the results are the same as the default grouping"
(mt/test-drivers (mt/normal-drivers)
(is (= (cond
(= :sqlite driver/*driver*)
(sad-toucan-result (default-timezone-parse-fn :utc) (comp u.date/format-sql t/local-date-time))
(qp.test/tz-shifted-driver-bug? driver/*driver*)
(sad-toucan-result (default-timezone-parse-fn :pacific) (format-in-timezone-fn :pacific))
(qp.test/supports-report-timezone? driver/*driver*)
(sad-toucan-result (default-timezone-parse-fn :utc) (format-in-timezone-fn :pacific))
:else
(sad-toucan-result (default-timezone-parse-fn :utc) (format-in-timezone-fn :utc)))
(sad-toucan-incidents-with-bucketing :minute :pacific))))))
(deftest group-by-minute-of-hour-test
(testing "Grouping by minute of hour is not affected by timezones"
(mt/test-drivers (mt/normal-drivers)
(is (= [[0 5]
[1 4]
[2 2]
[3 4]
[4 4]
[5 3]
[6 5]
[7 1]
[8 1]
[9 1]]
(sad-toucan-incidents-with-bucketing :minute-of-hour :pacific))))))
(def ^:private sad-toucan-dates-grouped-by-hour
"This is the first 10 groupings of sad toucan dates at the same hour when converted from millis since epoch in the UTC
timezone. The timezone is left off of the timezone string so that we can emulate how certain conversions are broken
in the code today. As an example, the UTC dates in Oracle are interpreted as the reporting timezone when they're
UTC"
["2015-06-01T10:00:00"
"2015-06-01T16:00:00"
"2015-06-01T17:00:00"
"2015-06-01T18:00:00"
"2015-06-01T21:00:00"
"2015-06-02T02:00:00"
"2015-06-02T05:00:00"
"2015-06-02T08:00:00"
"2015-06-02T11:00:00"
"2015-06-02T13:00:00"])
(defn- results-by-hour [parse-fn format-result-fn]
(map
(fn [s cnt]
[(-> s parse-fn format-result-fn) cnt])
sad-toucan-dates-grouped-by-hour
[1 1 1 1 2 1 1 1 1 1]))
;; For this test, the results are the same for each database, but the formatting of the time for that given count is
;; different depending on whether the database supports a report timezone and what timezone that database is in
(deftest group-by-hour-test
(mt/test-drivers (mt/normal-drivers)
(is (= (cond
(= :sqlite driver/*driver*)
(results-by-hour (default-timezone-parse-fn :utc) (comp u.date/format-sql t/local-date-time))
(qp.test/tz-shifted-driver-bug? driver/*driver*)
(results-by-hour (default-timezone-parse-fn :pacific) (format-in-timezone-fn :pacific))
(qp.test/supports-report-timezone? driver/*driver*)
(results-by-hour (default-timezone-parse-fn :utc) (format-in-timezone-fn :pacific))
:else
(results-by-hour (default-timezone-parse-fn :utc) (format-in-timezone-fn :utc)))
(sad-toucan-incidents-with-bucketing :hour :pacific)))))
The counts are affected by timezone as the times are shifted back by 7 hours . These count changes can be validated
by matching the first three results of the pacific results to the last three of the UTC results ( i.e. pacific is 7
;; hours back of UTC at that time)
(deftest group-by-hour-of-day-test
(mt/test-drivers (mt/normal-drivers)
(testing "results in pacific timezone"
(is (= (if (and (not (qp.test/tz-shifted-driver-bug? driver/*driver*))
(qp.test/supports-report-timezone? driver/*driver*))
[[0 8] [1 9] [2 7] [3 10] [4 10] [5 9] [6 6] [7 5] [8 7] [9 7]]
[[0 13] [1 8] [2 4] [3 7] [4 5] [5 13] [6 10] [7 8] [8 9] [9 7]])
(sad-toucan-incidents-with-bucketing :hour-of-day :pacific))))
(testing "results in UTC"
(is (= [[0 13] [1 8] [2 4] [3 7] [4 5] [5 13] [6 10] [7 8] [8 9] [9 7]]
(sad-toucan-incidents-with-bucketing :hour-of-day :utc))
"With all databases in UTC, the results should be the same for all DBs"))))
(defn- find-events-in-range
"Find the number of sad toucan events between `start-date-str` and `end-date-str`"
[start-date-str end-date-str]
(-> (mt/dataset sad-toucan-incidents
(mt/run-mbql-query incidents
{:aggregation [[:count]]
:breakout [!day.timestamp]
:filter [:between !default.timestamp start-date-str end-date-str]}))
mt/rows
first
second
(or 0)))
;; This test uses H2 (in UTC) to determine the difference in number of events in UTC time vs pacific time. It does
this using a the UTC dataset and some math to figure out if our 24 hour window is shifted 7 hours back , how many
events to we gain and lose . Although this test is technically covered by the other grouping by day tests , it 's
;; useful for debugging to answer why row counts change when the timezone shifts by removing timezones and the related
;; database settings
(deftest new-events-after-timezone-shift-test
(driver/with-driver :h2
(doseq [[timezone-id expected-net-gains] {:pacific [2 -1 5 -5 2 0 -2 1 -1 1]
:eastern [1 -1 3 -3 3 -2 -1 0 1 1]}]
(testing (format "Timezone = %s" timezone-id)
(doseq [[i expected-net-gain] (map-indexed vector expected-net-gains)
:let [start (t/local-date 2015 6 (inc i))
end (t/plus start (t/days 1))
->tz #(t/zoned-date-time % (t/local-time 0) (t/zone-id (->timezone-id timezone-id)))
find-events-in-range (fn [x y]
(find-events-in-range (u.date/format x) (u.date/format y)))
num-events-gained (find-events-in-range end (->tz end))
num-events-lost (find-events-in-range start (->tz start))]]
(testing (format "events between %s and %s" start end)
(is (= expected-net-gain
(- num-events-gained num-events-lost))
(format "When shifting to %s timezone we should lose %d events and gain %d, for a net gain of %d"
timezone-id num-events-gained num-events-lost expected-net-gain))))))))
(def ^:private sad-toucan-events-grouped-by-day
["2015-06-01"
"2015-06-02"
"2015-06-03"
"2015-06-04"
"2015-06-05"
"2015-06-06"
"2015-06-07"
"2015-06-08"
"2015-06-09"
"2015-06-10"])
(defn- results-by-day [parse-fn format-result-fn counts]
(map
(fn [s cnt]
[(-> s parse-fn format-result-fn) cnt])
sad-toucan-events-grouped-by-day
counts))
(deftest group-by-day-test
(mt/test-drivers (mt/normal-drivers)
(testing "\nUTC timezone"
(is (= (if (= :sqlite driver/*driver*)
(results-by-day u.date/parse date-without-time-format-fn [6 10 4 9 9 8 8 9 7 9])
(results-by-day u.date/parse (format-in-timezone-fn :utc) [6 10 4 9 9 8 8 9 7 9]))
(sad-toucan-incidents-with-bucketing :day :utc))))
(testing "\nPacific timezone"
(is (= (cond
(= :sqlite driver/*driver*)
[["2015-06-01" 6]
["2015-06-02" 10]
["2015-06-03" 4]
["2015-06-04" 9]
["2015-06-05" 9]
["2015-06-06" 8]
["2015-06-07" 8]
["2015-06-08" 9]
["2015-06-09" 7]
["2015-06-10" 9]]
(qp.test/tz-shifted-driver-bug? driver/*driver*)
[["2015-06-01T00:00:00-07:00" 6]
["2015-06-02T00:00:00-07:00" 10]
["2015-06-03T00:00:00-07:00" 4]
["2015-06-04T00:00:00-07:00" 9]
["2015-06-05T00:00:00-07:00" 9]
["2015-06-06T00:00:00-07:00" 8]
["2015-06-07T00:00:00-07:00" 8]
["2015-06-08T00:00:00-07:00" 9]
["2015-06-09T00:00:00-07:00" 7]
["2015-06-10T00:00:00-07:00" 9]]
(qp.test/supports-report-timezone? driver/*driver*)
[["2015-06-01T00:00:00-07:00" 8]
["2015-06-02T00:00:00-07:00" 9]
["2015-06-03T00:00:00-07:00" 9]
["2015-06-04T00:00:00-07:00" 4]
["2015-06-05T00:00:00-07:00" 11]
["2015-06-06T00:00:00-07:00" 8]
["2015-06-07T00:00:00-07:00" 6]
["2015-06-08T00:00:00-07:00" 10]
["2015-06-09T00:00:00-07:00" 6]
["2015-06-10T00:00:00-07:00" 10]]
:else
[["2015-06-01T00:00:00Z" 6]
["2015-06-02T00:00:00Z" 10]
["2015-06-03T00:00:00Z" 4]
["2015-06-04T00:00:00Z" 9]
["2015-06-05T00:00:00Z" 9]
["2015-06-06T00:00:00Z" 8]
["2015-06-07T00:00:00Z" 8]
["2015-06-08T00:00:00Z" 9]
["2015-06-09T00:00:00Z" 7]
["2015-06-10T00:00:00Z" 9]])
(sad-toucan-incidents-with-bucketing :day :pacific))))
(testing "\nEastern timezone"
(is (= (cond
(= :sqlite driver/*driver*)
(results-by-day u.date/parse date-without-time-format-fn [6 10 4 9 9 8 8 9 7 9])
(qp.test/tz-shifted-driver-bug? driver/*driver*)
(results-by-day (default-timezone-parse-fn :eastern)
(format-in-timezone-fn :eastern)
[6 10 4 9 9 8 8 9 7 9])
(qp.test/supports-report-timezone? driver/*driver*)
(results-by-day (default-timezone-parse-fn :eastern)
(format-in-timezone-fn :eastern)
[7 9 7 6 12 6 7 9 8 10])
:else
(results-by-day u.date/parse
(format-in-timezone-fn :utc)
[6 10 4 9 9 8 8 9 7 9]))
(sad-toucan-incidents-with-bucketing :day :eastern)))))
(testing "\nWith JVM timezone set to Pacific time"
;; This tests out the JVM timezone's impact on the results. For databases supporting a report timezone, this should
have no affect on the results . When no report timezone is used it should convert dates to the JVM 's timezone
;;
;; H2 doesn't support us switching timezones after the dates have been stored. This causes H2 to (incorrectly) apply
;; the timezone shift twice, so instead of -07:00 it will become -14:00. Leaving out the test rather than validate
;; wrong results.
;;
The exclusions here are databases that give incorrect answers when the JVM timezone does n't match the databases
;; timezone
;;
;; TIMEZONE FIXME
(mt/test-drivers (mt/normal-drivers-with-feature :test/jvm-timezone-setting)
(is (= (cond
(= :sqlite driver/*driver*)
(results-by-day u.date/parse date-without-time-format-fn [6 10 4 9 9 8 8 9 7 9])
(qp.test/tz-shifted-driver-bug? driver/*driver*)
(results-by-day (default-timezone-parse-fn :pacific)
(format-in-timezone-fn :pacific)
[6 10 4 9 9 8 8 9 7 9])
(qp.test/supports-report-timezone? driver/*driver*)
(results-by-day (default-timezone-parse-fn :pacific)
(format-in-timezone-fn :pacific)
[8 9 9 4 11 8 6 10 6 10])
:else
(results-by-day (default-timezone-parse-fn :utc)
(format-in-timezone-fn :pacific)
[6 10 4 9 9 8 8 9 7 9]))
(mt/with-system-timezone-id (timezone :pacific)
(sad-toucan-incidents-with-bucketing :day :pacific)))))))
(deftest group-by-day-of-week-test
(mt/test-drivers (mt/normal-drivers)
(testing "\nPacific timezone"
(is (= (if (and (not (qp.test/tz-shifted-driver-bug? driver/*driver*))
(qp.test/supports-report-timezone? driver/*driver*))
[[1 29] [2 36] [3 33] [4 29] [5 13] [6 38] [7 22]]
[[1 28] [2 38] [3 29] [4 27] [5 24] [6 30] [7 24]])
(sad-toucan-incidents-with-bucketing :day-of-week :pacific))))
(testing "\nUTC timezone"
(is (= [[1 28] [2 38] [3 29] [4 27] [5 24] [6 30] [7 24]]
(sad-toucan-incidents-with-bucketing :day-of-week :utc))))))
(deftest group-by-day-of-month-test
(mt/test-drivers (mt/normal-drivers)
(testing "\nPacific timezone"
(is (= (if (and (not (qp.test/tz-shifted-driver-bug? driver/*driver*))
(qp.test/supports-report-timezone? driver/*driver*))
[[1 8] [2 9] [3 9] [4 4] [5 11] [6 8] [7 6] [8 10] [9 6] [10 10]]
[[1 6] [2 10] [3 4] [4 9] [5 9] [6 8] [7 8] [8 9] [9 7] [10 9]])
(sad-toucan-incidents-with-bucketing :day-of-month :pacific))))
(testing "\nUTC timezone"
(is (= [[1 6] [2 10] [3 4] [4 9] [5 9] [6 8] [7 8] [8 9] [9 7] [10 9]]
(sad-toucan-incidents-with-bucketing :day-of-month :utc))))))
(deftest group-by-day-of-year-test
(mt/test-drivers (mt/normal-drivers)
(testing "\nPacific timezone"
(is (= (if (and (not (qp.test/tz-shifted-driver-bug? driver/*driver*))
(qp.test/supports-report-timezone? driver/*driver*))
[[152 8] [153 9] [154 9] [155 4] [156 11] [157 8] [158 6] [159 10] [160 6] [161 10]]
[[152 6] [153 10] [154 4] [155 9] [156 9] [157 8] [158 8] [159 9] [160 7] [161 9]])
(sad-toucan-incidents-with-bucketing :day-of-year :pacific))))
(testing "\nUTC timezone"
(is (= [[152 6] [153 10] [154 4] [155 9] [156 9] [157 8] [158 8] [159 9] [160 7] [161 9]]
(sad-toucan-incidents-with-bucketing :day-of-year :utc))))))
;; This test helps in debugging why event counts change with a given timezone. It queries only a UTC H2 datatabase to
;; find how those counts would change if time was in pacific time. The results of this test are also in the UTC test
;; above and pacific test below, but this is still useful for debugging as it doesn't involve changing timezones or
;; database settings
(deftest new-weekly-events-after-tz-shift-test
(driver/with-driver :h2
(doseq [[timezone-id start-date->expected-net-gain] {:pacific {"2015-05-31" 3
"2015-06-07" 0
"2015-06-14" -1
"2015-06-21" -2
"2015-06-28" 0}
:eastern {"2015-05-31" 1
"2015-06-07" 1
"2015-06-14" -1
"2015-06-21" -1
"2015-06-28" 0}}]
(testing (format "Timezone = %s" timezone-id)
(doseq [[start-str expected-net-gain] start-date->expected-net-gain
:let [start (u.date/parse start-str)
end (t/plus start (t/days 7))
->tz #(t/zoned-date-time % (t/local-time 0) (t/zone-id (->timezone-id timezone-id)))
find-events-in-range (fn [x y]
(find-events-in-range (u.date/format x) (u.date/format y)))
num-events-gained (find-events-in-range end (->tz end))
num-events-lost (find-events-in-range start (->tz start))]]
(testing (format "events between %s and %s" start end)
(is (= expected-net-gain
(- num-events-gained num-events-lost))
(format "When shifting to %s timezone we should lose %d events and gain %d, for a net gain of %d"
timezone-id num-events-gained num-events-lost expected-net-gain))))))))
(defn- results-by-week [parse-fn format-result-fn counts]
(map
(fn [s cnt]
[(-> s parse-fn format-result-fn) cnt])
["2015-05-31"
"2015-06-07"
"2015-06-14"
"2015-06-21"
"2015-06-28"]
counts))
Sad toucan incidents by week . Databases in UTC that do n't support report timezones will be the same as the UTC test
above . Databases that support report timezone will have different counts as the week starts and ends 7 hours
;; earlier
(deftest group-by-week-test
(mt/test-drivers (mt/normal-drivers)
(testing "\nUTC timezone"
(is (= (if (= :sqlite driver/*driver*)
(results-by-week u.date/parse
date-without-time-format-fn
[46 47 40 60 7])
(results-by-week u.date/parse
(format-in-timezone-fn :utc)
[46 47 40 60 7]))
(sad-toucan-incidents-with-bucketing :week :utc))))
(testing "\nPacific timezone"
(is (= (cond
(= :sqlite driver/*driver*)
(results-by-week u.date/parse
date-without-time-format-fn
[46 47 40 60 7])
(qp.test/tz-shifted-driver-bug? driver/*driver*)
(results-by-week (default-timezone-parse-fn :pacific)
(format-in-timezone-fn :pacific)
[46 47 40 60 7])
(qp.test/supports-report-timezone? driver/*driver*)
(results-by-week (default-timezone-parse-fn :pacific)
(format-in-timezone-fn :pacific)
[49 47 39 58 7])
:else
(results-by-week u.date/parse
(format-in-timezone-fn :utc)
[46 47 40 60 7]))
(sad-toucan-incidents-with-bucketing :week :pacific))))
Tests eastern timezone grouping by week , UTC databases do n't change , databases with reporting timezones need to
account for the 4 - 5 hour difference
(testing "\nEastern timezone"
(mt/test-drivers (mt/normal-drivers)
(is (= (cond
(= :sqlite driver/*driver*)
(results-by-week u.date/parse
date-without-time-format-fn
[46 47 40 60 7])
(qp.test/tz-shifted-driver-bug? driver/*driver*)
(results-by-week (default-timezone-parse-fn :eastern)
(format-in-timezone-fn :eastern)
[46 47 40 60 7])
(qp.test/supports-report-timezone? driver/*driver*)
(results-by-week (default-timezone-parse-fn :eastern)
(format-in-timezone-fn :eastern)
[47 48 39 59 7])
:else
(results-by-week u.date/parse
(format-in-timezone-fn :utc)
[46 47 40 60 7]))
(sad-toucan-incidents-with-bucketing :week :eastern))))))
Setting the JVM timezone will change how the datetime results are displayed but do n't impact the calculation of the
begin / end of the week
;;
The exclusions here are databases that give incorrect answers when the JVM timezone does n't match the databases
timezone ( FIXME )
(testing "JVM timezone set to Pacific"
(mt/test-drivers (mt/normal-drivers-except #{:h2 :sqlserver :redshift :sparksql :mongo :bigquery-cloud-sdk})
(is (= (cond
(= :sqlite driver/*driver*)
(results-by-week u.date/parse
date-without-time-format-fn
[46 47 40 60 7])
;; TODO - these results are the same as the `:else` results
(qp.test/tz-shifted-driver-bug? driver/*driver*)
(results-by-week (default-timezone-parse-fn :pacific)
(format-in-timezone-fn :pacific)
[46 47 40 60 7])
(qp.test/supports-report-timezone? driver/*driver*)
(results-by-week (default-timezone-parse-fn :pacific)
(format-in-timezone-fn :pacific)
[49 47 39 58 7])
:else
(results-by-week u.date/parse
(format-in-timezone-fn :pacific)
[46 47 40 60 7]))
(mt/with-system-timezone-id (timezone :pacific)
(sad-toucan-incidents-with-bucketing :week :pacific)))))))
(deftest group-by-week-of-year-test
(mt/test-drivers (mt/normal-drivers)
(is (= [[22 46] [23 47] [24 40] [25 60] [26 7]]
(sad-toucan-incidents-with-bucketing :week-of-year :utc)))))
(defn- fmt-str-or-int
[x]
(if (string? x)
(str x)
(int x)))
(deftest week-of-year-and-week-count-should-be-consistent-test
(testing "consistent break out between weeks and week-of-year #4910"
(mt/test-drivers (mt/normal-drivers)
2019 - 01 - 01 is Tuesday , so set start - of - week to tuesday so
breakout by week - of - year will have first row is the 1st week of year
(mt/with-temporary-setting-values [start-of-week :tuesday]
(mt/dataset sample-dataset
(letfn [(test-break-out [unit]
(->> (mt/mbql-query orders
{:filter [:between $created_at "2019-01-01" "2019-12-31"]
:breakout [:field $created_at {:temporal-unit unit}]
:aggregation [[:count]]})
mt/process-query
(mt/formatted-rows [fmt-str-or-int int])))]
(testing "count result should be the same between week and week-of-year"
(is (= (map second (test-break-out :week))
(map second (test-break-out :week-of-year))))
(is (= [127 124 136]
(->> (test-break-out :week)
(map second)
(take 3)))))
(testing "make sure all drivers returns the same week column"
(is (= (case driver/*driver*
:sqlite ["2019-01-01 00:00:00" "2019-01-08 00:00:00" "2019-01-15 00:00:00"]
["2019-01-01T00:00:00Z" "2019-01-08T00:00:00Z" "2019-01-15T00:00:00Z"])
(->> (test-break-out :week)
(map first)
(take 3)))))
(testing "make sure all drivers returns the same week-of-year column"
(is (= [1 2 3]
(->> (test-break-out :week-of-year)
(map first)
(take 3)))))))))))
All of the sad toucan events in the test data fit in June . The results are the same on all databases and the only
difference is how the beginning of hte month is represented , since we always return times with our dates
(deftest group-by-month-test
(mt/test-drivers (mt/normal-drivers)
(testing "\nPacific timezone"
(is (= [[(cond
(= :sqlite driver/*driver*)
"2015-06-01"
(qp.test/supports-report-timezone? driver/*driver*)
"2015-06-01T00:00:00-07:00"
:else
"2015-06-01T00:00:00Z")
200]]
(sad-toucan-incidents-with-bucketing :month :pacific))))
(testing "\nEastern timezone"
(mt/test-drivers (mt/normal-drivers)
(is (= [[(cond
(= :sqlite driver/*driver*)
"2015-06-01"
(qp.test/supports-report-timezone? driver/*driver*)
"2015-06-01T00:00:00-04:00"
:else
"2015-06-01T00:00:00Z")
200]]
(sad-toucan-incidents-with-bucketing :month :eastern)))))))
(deftest group-by-month-of-year-test
(mt/test-drivers (mt/normal-drivers)
(is (= [[6 200]]
(sad-toucan-incidents-with-bucketing :month-of-year :pacific)))))
(deftest group-by-quarter-test
(mt/test-drivers (mt/normal-drivers)
(testing "\nPacific timezone"
(is (= [[(cond (= :sqlite driver/*driver*)
"2015-04-01"
(qp.test/supports-report-timezone? driver/*driver*)
"2015-04-01T00:00:00-07:00"
:else
"2015-04-01T00:00:00Z")
200]]
(sad-toucan-incidents-with-bucketing :quarter :pacific))))
(testing "\nEastern timezone"
(is (= [[(cond (= :sqlite driver/*driver*)
"2015-04-01"
(qp.test/supports-report-timezone? driver/*driver*)
"2015-04-01T00:00:00-04:00"
:else
"2015-04-01T00:00:00Z")
200]]
(sad-toucan-incidents-with-bucketing :quarter :eastern))))))
(deftest group-by-quarter-of-year-test
(mt/test-drivers (mt/normal-drivers)
(is (= [[2 200]]
(sad-toucan-incidents-with-bucketing :quarter-of-year :pacific)))
(is (= [[1 200]
[2 284]
[3 278]
[4 238]]
(mt/formatted-rows [int int]
(mt/run-mbql-query checkins
{:aggregation [[:count]]
:breakout [!quarter-of-year.date]}))))))
(deftest group-by-year-test
(mt/test-drivers (mt/normal-drivers)
(is (= [[(cond
(= :sqlite driver/*driver*)
"2015-01-01"
(qp.test/supports-report-timezone? driver/*driver*)
"2015-01-01T00:00:00-08:00"
:else
"2015-01-01T00:00:00Z")
200]]
(sad-toucan-incidents-with-bucketing :year :pacific)))
(is (= (if (= :sqlite driver/*driver*)
[["2013-01-01" 235]
["2014-01-01" 498]
["2015-01-01" 267]]
[["2013-01-01T00:00:00Z" 235]
["2014-01-01T00:00:00Z" 498]
["2015-01-01T00:00:00Z" 267]])
(mt/formatted-rows [str int]
(mt/run-mbql-query checkins
{:aggregation [[:count]]
:breakout [!year.date]}))))))
;; RELATIVE DATES
(p.types/deftype+ ^:private TimestampDatasetDef [intervalSeconds]
pretty/PrettyPrintable
(pretty [_]
(list 'TimestampDatasetDef. intervalSeconds)))
(defn- driver->current-datetime-base-type
"Returns the :base-type of the \"current timestamp\" HoneySQL form defined by the driver `d`. Relies upon the driver
implementation having set that explicitly via `hx/with-type-info`. Returns `nil` if it can't be determined."
[d]
(when (isa? driver/hierarchy driver/*driver* :sql)
(let [db-type (-> (sql.qp/current-datetime-honeysql-form d)
hx/type-info
hx/type-info->db-type)]
(when-not (str/blank? db-type)
(sql-jdbc.sync/database-type->base-type d db-type)))))
(defmethod mt/get-dataset-definition TimestampDatasetDef
[^TimestampDatasetDef this]
(let [interval-seconds (.intervalSeconds this)]
(mt/dataset-definition
(str "checkins_interval_" interval-seconds)
["checkins"
[{:field-name "timestamp"
:base-type (or (driver->current-datetime-base-type driver/*driver*) :type/DateTime)}]
(mapv (fn [i]
;; TIMESTAMP FIXME β not sure if still needed
;;
;; Create timestamps using relative dates (e.g. `DATEADD(second, -195, GETUTCDATE())` instead of
generating Java classes here so they 'll be in the DB 's native timezone . Some DBs refuse to use
;; the same timezone we're running the tests from *cough* SQL Server *cough*
[(u/prog1 (if (and (isa? driver/hierarchy driver/*driver* :sql)
;; BigQuery/Vertica don't insert rows using SQL statements
;;
TODO -- make ' insert - rows - using - statements ? ` a multimethod so we do n't need to
;; hardcode the whitelist here.
(not (#{:vertica :bigquery-cloud-sdk} driver/*driver*)))
(binding [hx/*honey-sql-version* (sql.qp/honey-sql-version driver/*driver*)]
(sql.qp/compiled
(sql.qp/add-interval-honeysql-form driver/*driver*
(sql.qp/current-datetime-honeysql-form driver/*driver*)
(* i interval-seconds)
:second)))
(u.date/add :second (* i interval-seconds)))
(assert <>))])
(range -15 15))])))
(defn- dataset-def-with-timestamps [interval-seconds]
(TimestampDatasetDef. interval-seconds))
(def ^:private checkins:4-per-minute
"Dynamically generated dataset with 30 checkins spaced 15 seconds apart, from 3 mins 45 seconds ago to 3 minutes 30
seconds in the future."
(dataset-def-with-timestamps 15))
(def ^:private checkins:4-per-hour
"Dynamically generated dataset with 30 checkins spaced 15 minutes apart, from 3 hours 45 minutes ago to 3 hours 30
minutes in the future."
(dataset-def-with-timestamps (u/minutes->seconds 15)))
(def ^:private checkins:1-per-day
"Dynamically generated dataset with 30 checkins spaced 24 hours apart, from 15 days ago to 14 days in the future."
(dataset-def-with-timestamps (* 24 (u/minutes->seconds 60))))
(defn- checkins-db-is-old?
"Determine whether we need to recreate one of the dynamically-generated datasets above, if the data has grown a little
stale."
[max-age-seconds]
(u.date/greater-than-period-duration? (u.date/period-duration (:created_at (mt/db)) (t/zoned-date-time))
(t/seconds max-age-seconds)))
(def ^:private ^:dynamic *recreate-db-if-stale?* true)
(defn- count-of-grouping [^TimestampDatasetDef dataset field-grouping & relative-datetime-args]
(mt/dataset dataset
DB has values in the range of now ( ) - ( interval - seconds * 15 ) and now ( ) + ( interval - seconds * 15 ) . So if it
was created more than ( interval - seconds * 5 ) seconds ago , delete the Database and recreate it to make sure
;; the tests pass.
;;
;; TODO - perhaps this should be rolled into `mt/dataset` itself -- it seems like a useful feature?
(if (and (checkins-db-is-old? (* (.intervalSeconds dataset) 5)) *recreate-db-if-stale?*)
(binding [*recreate-db-if-stale?* false]
(log/infof "DB for %s is stale! Deleteing and running test again\n" dataset)
(db/delete! Database :id (mt/id))
(apply count-of-grouping dataset field-grouping relative-datetime-args))
(let [results (mt/run-mbql-query checkins
{:aggregation [[:count]]
:filter [:=
[:field %timestamp {:temporal-unit field-grouping}]
(cons :relative-datetime relative-datetime-args)]})]
(or (some-> results mt/first-row first int)
results)))))
;; HACK - Don't run these tests against Snowflake/etc. because the databases need to be loaded every time the tests are
ran and loading data into these DBs is mind - bogglingly slow . This also applies to Athena for now , because
;; deleting data is not easy.
;;
Do n't run the minute tests against Oracle because the Oracle tests are kind of slow and case CI to fail randomly
;; when it takes so long to load the data that the times are no longer current (these tests pass locally if your
;; machine isn't as slow as the CircleCI ones)
(deftest count-of-grouping-test
(mt/test-drivers (mt/normal-drivers-except #{:snowflake :athena})
(testing "4 checkins per minute dataset"
(testing "group by minute"
(doseq [args [[:current] [-1 :minute] [1 :minute]]]
(is (= 4
(apply count-of-grouping checkins:4-per-minute :minute args))
(format "filter by minute = %s" (into [:relative-datetime] args)))))))
(mt/test-drivers (mt/normal-drivers-except #{:snowflake :athena})
(testing "4 checkins per hour dataset"
(testing "group by hour"
(doseq [args [[:current] [-1 :hour] [1 :hour]]]
(is (= 4
(apply count-of-grouping checkins:4-per-hour :hour args))
(format "filter by hour = %s" (into [:relative-datetime] args))))))
(testing "1 checkin per day dataset"
(testing "group by day"
(doseq [args [[:current] [-1 :day] [1 :day]]]
(is (= 1
(apply count-of-grouping checkins:1-per-day :day args))
(format "filter by day = %s" (into [:relative-datetime] args)))))
(testing "group by week"
(is (= 7
(count-of-grouping checkins:1-per-day :week :current))
"filter by week = [:relative-datetime :current]")))))
(deftest time-interval-test
(mt/test-drivers (mt/normal-drivers-except #{:snowflake :athena})
(testing "Syntactic sugar (`:time-interval` clause)"
(mt/dataset checkins:1-per-day
(is (= 1
(ffirst
(mt/formatted-rows [int]
(mt/run-mbql-query checkins
{:aggregation [[:count]]
:filter [:time-interval $timestamp :current :day]})))))
(is (= 7
(ffirst
(mt/formatted-rows [int]
(mt/run-mbql-query checkins
{:aggregation [[:count]]
:filter [:time-interval $timestamp :last :week]})))))))))
;; Make sure that when referencing the same field multiple times with different units we return the one that actually
reflects the units the results are in . eg when we breakout by one unit and filter by another , make sure the results
;; and the col info use the unit used by breakout
(defn- date-bucketing-unit-when-you [& {:keys [breakout-by filter-by with-interval]
:or {with-interval :current}}]
(let [results (mt/dataset checkins:1-per-day
(mt/run-mbql-query checkins
{:aggregation [[:count]]
:breakout [[:field %timestamp {:temporal-unit breakout-by}]]
:filter [:time-interval $timestamp with-interval filter-by]}))]
{:rows (or (-> results :row_count)
(throw (ex-info "Query failed!" results)))
:unit (-> results :data :cols first :unit)}))
(deftest date-bucketing-when-you-test
(mt/test-drivers (mt/normal-drivers-except #{:snowflake :athena})
(is (= {:rows 1, :unit :day}
(date-bucketing-unit-when-you :breakout-by "day", :filter-by "day")))
(is (= {:rows 7, :unit :day}
(date-bucketing-unit-when-you :breakout-by "day", :filter-by "week")))
(is (= {:rows 1, :unit :week}
(date-bucketing-unit-when-you :breakout-by "week", :filter-by "day")))
(is (= {:rows 1, :unit :quarter}
(date-bucketing-unit-when-you :breakout-by "quarter", :filter-by "day")))
(is (= {:rows 1, :unit :hour}
(date-bucketing-unit-when-you :breakout-by "hour", :filter-by "day")))
make sure if you use a relative date bucket in the past ( e.g. " past 2 months " ) you get the correct amount of rows
;; (#3910)
(is (= {:rows 2, :unit :day}
(date-bucketing-unit-when-you :breakout-by "day", :filter-by "day", :with-interval -2)))
(is (= {:rows 2, :unit :day}
(date-bucketing-unit-when-you :breakout-by "day", :filter-by "day", :with-interval 2)))))
Filtering by a unbucketed datetime Field should automatically bucket that Field by day if not already done ( # 8927 )
;;
This should only apply when comparing to ` yyyy - MM - dd ` date strings .
;;
e.g. ` [: = < field > " 2018 - 11 - 19 " ] should get rewritten as ` [: = [ : field < field > { : temporal - unit : day } ] " 2018 - 11 - 19 " ] ` if
;; `<field>` is a `:type/DateTime` Field
;;
We should get count = 1 for the current day , as opposed to count = 0 if we were n't auto - bucketing
( e.g. 2018 - 11 - 19T00:00 ! = 2018 - 11 - 19T12:37 or whatever time the checkin is at )
(deftest default-bucketing-test
(mt/test-drivers (mt/normal-drivers-except #{:snowflake :athena})
(mt/dataset checkins:1-per-day
(is (= [[1]]
(mt/formatted-rows [int]
(mt/run-mbql-query checkins
{:aggregation [[:count]]
:filter [:= [:field $timestamp nil] (t/format "yyyy-MM-dd" (u.date/truncate :day))]}))))))
;; this is basically the same test as above, but using the office-checkins dataset instead of the dynamically
;; created checkins DBs so we can run it against Snowflake as well.
(mt/test-drivers (mt/normal-drivers)
(mt/dataset office-checkins
(is (= [[1]]
(mt/formatted-rows [int]
(mt/run-mbql-query checkins
{:aggregation [[:count]]
:filter [:= [:field $timestamp nil] "2019-01-16"]}))))
(testing "Check that automatic bucketing still happens when using compound filter clauses (#9127)"
(is (= [[1]]
(mt/formatted-rows [int]
(mt/run-mbql-query checkins
{:aggregation [[:count]]
:filter [:and
[:= [:field $timestamp nil] "2019-01-16"]
[:= [:field $id nil] 6]]})))))))
(mt/test-drivers (mt/normal-drivers-except #{:snowflake :athena})
(testing "if datetime string is not yyyy-MM-dd no date bucketing should take place, and thus we should get no (exact) matches"
(mt/dataset checkins:1-per-day
(is (=
Mongo returns empty row for count = 0 . We should fix that ( # 5419 )
(case driver/*driver*
:mongo []
[[0]])
(mt/formatted-rows [int]
(mt/run-mbql-query checkins
{:aggregation [[:count]]
:filter [:= [:field $timestamp nil] (str (t/format "yyyy-MM-dd" (u.date/truncate :day))
"T14:16:00Z")]}))))))))
(def ^:private addition-unit-filtering-vals
[[3 :day "2014-03-03"]
[135 :day-of-week 1]
[36 :day-of-month 1]
[9 :day-of-year 214]
[11 :week "2014-03-03"]
[7 :week-of-year 2]
[48 :month "2014-03"]
[38 :month-of-year 1]
[107 :quarter "2014-01"]
[200 :quarter-of-year 1]
[498 :year "2014"]])
(defn- count-of-checkins [unit filter-value]
(ffirst
(mt/formatted-rows [int]
(mt/run-mbql-query checkins
{:aggregation [[:count]]
:filter [:= [:field %date {:temporal-unit unit}] filter-value]}))))
(deftest additional-unit-filtering-tests
(testing "Additional tests for filtering against various datetime bucketing units that aren't tested above"
(mt/with-temporary-setting-values [start-of-week :sunday]
(mt/test-drivers (mt/normal-drivers)
(doseq [[expected-count unit filter-value] addition-unit-filtering-vals]
iterate on at least two report time zones to suss out bugs related to that
(mt/with-temporary-setting-values [report-timezone tz]
(testing (format "\nunit = %s" unit)
(is (= expected-count (count-of-checkins unit filter-value))
(format
"count of rows where (= (%s date) %s) should be %d"
(name unit)
filter-value
expected-count))))))))))
(deftest legacy-default-datetime-bucketing-test
(testing (str ":type/Date or :type/DateTime fields that don't have `:temporal-unit` clauses should get default `:day` "
"bucketing for legacy reasons. See #9014")
(is (= (str "SELECT COUNT(*) AS \"count\" "
"FROM \"PUBLIC\".\"CHECKINS\" "
"WHERE ("
"\"PUBLIC\".\"CHECKINS\".\"DATE\" >= CAST(NOW() AS date)) "
"AND "
"(\"PUBLIC\".\"CHECKINS\".\"DATE\" < CAST(DATEADD('day', CAST(1 AS long), CAST(NOW() AS datetime)) AS date)"
")")
(:query
(qp/compile
(mt/mbql-query checkins
{:aggregation [[:count]]
:filter [:= $date [:relative-datetime :current]]})))))))
(deftest compile-time-interval-test
(testing "Make sure time-intervals work the way they're supposed to."
(testing "[:time-interval $date -4 :month] should give us something like Oct 01 2020 - Feb 01 2021 if today is Feb 17 2021"
(is (= (str "SELECT CHECKINS.DATE AS DATE "
"FROM CHECKINS "
"WHERE ("
"CHECKINS.DATE >= DATE_TRUNC('month', DATEADD('month', CAST(-4 AS long), CAST(NOW() AS datetime))))"
" AND "
"(CHECKINS.DATE < DATE_TRUNC('month', NOW())) "
"GROUP BY CHECKINS.DATE "
"ORDER BY CHECKINS.DATE ASC "
"LIMIT 1048575")
(sql.qp-test-util/pretty-sql
(:query
(qp/compile
(mt/mbql-query checkins
{:filter [:time-interval $date -4 :month]
:breakout [!day.date]})))))))))
(deftest field-filter-start-of-week-test
(testing "Field Filters with relative date ranges should respect the custom start of week setting (#14294)"
(mt/dataset checkins:1-per-day
(let [query (mt/native-query {:query (str "SELECT dayname(\"TIMESTAMP\") as \"day\" "
"FROM checkins "
"[[WHERE {{date_range}}]] "
"ORDER BY \"TIMESTAMP\" ASC "
"LIMIT 1")
:template-tags {"date_range"
{:name "date_range"
:display-name "Date Range"
:type :dimension
:widget-type :date/all-options
:dimension (mt/$ids $checkins.timestamp)}}
:parameters [{:type :date/range
:name "created_at"
:target [:dimension [:template-tag "date_range"]]
:value "past1weeks"}]})]
(doseq [[first-day-of-week expected] {"sunday" ["Sunday"]
"monday" ["Monday"]
"tuesday" ["Tuesday"]
"wednesday" ["Wednesday"]
"thursday" ["Thursday"]
"friday" ["Friday"]
"saturday" ["Saturday"]}]
(mt/with-temporary-setting-values [start-of-week first-day-of-week]
(is (= expected
(mt/first-row
(qp/process-query query))))))))))
(deftest day-of-week-custom-start-of-week-test
(mt/test-drivers (mt/normal-drivers)
(testing "`:day-of-week` bucketing should respect the `start-of-week` Setting (#13604)"
(testing "filter by `:day-of-week` should work correctly (#15044)"
(doseq [[day [thursday-day-of-week saturday-day-of-week]] {:sunday [5 7]
:monday [4 6]
:tuesday [3 5]}]
(mt/with-temporary-setting-values [start-of-week day]
(is (= (sort-by
first
[[thursday-day-of-week 2]
[saturday-day-of-week 1]])
(mt/formatted-rows [int int]
(mt/run-mbql-query checkins
{:aggregation [[:count]]
:breakout [!day-of-week.date]
:filter [:between $date "2013-01-03" "2013-01-20"]}))))))))))
(deftest first-day-of-week-for-day-of-week-bucketing-test
(testing "First day of week for `:day-of-week` bucketing should be the consistent (#17801)"
(mt/test-drivers (mt/normal-drivers-with-feature :basic-aggregations)
(let [query (mt/mbql-query checkins
{:aggregation [[:count]]
:breakout [!day-of-week.date]})]
(doseq [[first-day-of-week expected-rows] {:sunday [[1 135] [2 143] [3 153] [4 136] [5 139] [6 160] [7 134]]
:monday [[1 143] [2 153] [3 136] [4 139] [5 160] [6 134] [7 135]]
:tuesday [[1 153] [2 136] [3 139] [4 160] [5 134] [6 135] [7 143]]
:wednesday [[1 136] [2 139] [3 160] [4 134] [5 135] [6 143] [7 153]]
:thursday [[1 139] [2 160] [3 134] [4 135] [5 143] [6 153] [7 136]]
:friday [[1 160] [2 134] [3 135] [4 143] [5 153] [6 136] [7 139]]
:saturday [[1 134] [2 135] [3 143] [4 153] [5 136] [6 139] [7 160]]}]
(mt/with-temporary-setting-values [start-of-week first-day-of-week]
(mt/with-native-query-testing-context query
(is (= expected-rows
(mt/formatted-rows [int int] (qp/process-query query)))))))))))
(deftest filter-by-current-quarter-test
Oracle does n't work on March 31st because March 31st + 3 months = June 31st , which does n't exist . See # 10072
(mt/test-drivers (disj (mt/normal-drivers) :oracle)
(testing "Should be able to filter by current quarter (#20683)"
(let [query (mt/mbql-query checkins
{:aggregation [[:count]]
:filter [:= !quarter.date [:relative-datetime :now]]})]
(mt/with-native-query-testing-context query
;; this isn't expected to return anything; for now it's enough just to make sure that the query doesn't fail.
(is (=
Mongo returns empty row for count = 0 . We should fix that ( # 5419 )
(case driver/*driver*
:mongo []
[[0]])
(mt/formatted-rows [int] (qp/process-query query)))))))))
TODO -- is this really date BUCKETING ? Does this BELONG HERE ? !
(deftest june-31st-test
(testing "What happens when you try to add 3 months to March 31st? It should still work (#10072, #21968, #21969)"
(mt/with-temporary-setting-values [report-timezone "UTC"]
only testing the SQL drivers for now since I 'm not 100 % sure how to mock this for everyone else . Maybe one day
;; when we support expressions like `+` for temporal types we can do an `:absolute-datetime` plus
;; `:relative-datetime` expression and do this directly in MBQL.
(mt/test-drivers (filter #(isa? driver/hierarchy (driver/the-initialized-driver %) :sql)
(mt/normal-drivers))
(doseq [[n unit] [[3 :month]
[1 :quarter]]
t [#t "2022-03-31"
#t "2022-03-31T00:00:00"
#t "2022-03-31T00:00:00-00:00"]]
(testing (format "%d %s ^%s %s" n unit (.getCanonicalName (class t)) (pr-str t))
(binding [hx/*honey-sql-version* (sql.qp/honey-sql-version driver/*driver*)]
(let [march-31 (sql.qp/->honeysql driver/*driver* [:absolute-datetime t :day])
june-31 (sql.qp/add-interval-honeysql-form driver/*driver* march-31 n unit)
checkins (mt/with-everything-store
(sql.qp/->honeysql driver/*driver* (db/select-one Table :id (mt/id :checkins))))
honeysql {:select [[june-31 :june_31]]
:from [(sql.qp/maybe-wrap-unaliased-expr checkins)]}
honeysql (sql.qp/apply-top-level-clause driver/*driver* :limit honeysql {:limit 1})
[sql & args] (sql.qp/format-honeysql driver/*driver* honeysql)
query (mt/native-query {:query sql, :params args})]
(mt/with-native-query-testing-context query
(is (re= (u.regex/rx #"^2022-"
We do n't really care if someone returns June 29th or 30th or July 1st here . I
guess you could make a case for either June 30th or July 1st . I do n't really know
how you can get June 29th from this , but that 's what Vertica returns . : shrug : The
;; main thing here is that it's not barfing.
[:or [:and "06-" [:or "29" "30"]] "07-01"]
;; We also don't really care if this is returned as a date or a timestamp with or
;; without time zone.
[:? [:or "T" #"\s"] "00:00:00" [:? "Z"]])
(first (mt/first-row (qp/process-query query))))))))))))))
| null |
https://raw.githubusercontent.com/metabase/metabase/f827b28e5cd3a6eff8bce08780d48bb9b825a654/test/metabase/query_processor_test/date_bucketing_test.clj
|
clojure
|
TIMEZONE FIXME β currently broken for Snowflake. UNIX timestamps are interpreted as being in the report timezone
rather than UTC.
re-parse them or do anything smart with them; we just return them directly. This is less than ideal.
TIMEZONE FIXME
When the reporting timezone is applied, the same datetime value is returned, but set in the pacific
timezone
setting Our tests force UTC time, so this should always be UTC
These databases are always in UTC so aren't impacted by changes in report-timezone
The time instant is the same as UTC (or pacific) but should be offset by the eastern timezone
The change in report timezone has no affect on this group
the JVM timezone (UTC on startup). When we change that timezone, it then assumes the data was also stored in that
timezone. This leads to incorrect results. In this example it applies the pacific offset twice
timezone
TIMEZONE FIXME
The JVM timezone should have no impact on results from a database that uses a report timezone
For this test, the results are the same for each database, but the formatting of the time for that given count is
different depending on whether the database supports a report timezone and what timezone that database is in
hours back of UTC at that time)
This test uses H2 (in UTC) to determine the difference in number of events in UTC time vs pacific time. It does
useful for debugging to answer why row counts change when the timezone shifts by removing timezones and the related
database settings
This tests out the JVM timezone's impact on the results. For databases supporting a report timezone, this should
H2 doesn't support us switching timezones after the dates have been stored. This causes H2 to (incorrectly) apply
the timezone shift twice, so instead of -07:00 it will become -14:00. Leaving out the test rather than validate
wrong results.
timezone
TIMEZONE FIXME
This test helps in debugging why event counts change with a given timezone. It queries only a UTC H2 datatabase to
find how those counts would change if time was in pacific time. The results of this test are also in the UTC test
above and pacific test below, but this is still useful for debugging as it doesn't involve changing timezones or
database settings
earlier
TODO - these results are the same as the `:else` results
RELATIVE DATES
TIMESTAMP FIXME β not sure if still needed
Create timestamps using relative dates (e.g. `DATEADD(second, -195, GETUTCDATE())` instead of
the same timezone we're running the tests from *cough* SQL Server *cough*
BigQuery/Vertica don't insert rows using SQL statements
hardcode the whitelist here.
the tests pass.
TODO - perhaps this should be rolled into `mt/dataset` itself -- it seems like a useful feature?
HACK - Don't run these tests against Snowflake/etc. because the databases need to be loaded every time the tests are
deleting data is not easy.
when it takes so long to load the data that the times are no longer current (these tests pass locally if your
machine isn't as slow as the CircleCI ones)
Make sure that when referencing the same field multiple times with different units we return the one that actually
and the col info use the unit used by breakout
(#3910)
`<field>` is a `:type/DateTime` Field
this is basically the same test as above, but using the office-checkins dataset instead of the dynamically
created checkins DBs so we can run it against Snowflake as well.
this isn't expected to return anything; for now it's enough just to make sure that the query doesn't fail.
when we support expressions like `+` for temporal types we can do an `:absolute-datetime` plus
`:relative-datetime` expression and do this directly in MBQL.
main thing here is that it's not barfing.
We also don't really care if this is returned as a date or a timestamp with or
without time zone.
|
(ns metabase.query-processor-test.date-bucketing-test
"The below tests cover the various date bucketing/grouping scenarios that we support. There are are always two
timezones in play when querying using these date bucketing features. The most visible is how timestamps are returned
to the user. With no report timezone specified, the JVM's timezone is used to represent the timestamps regardless of
timezone of the database. Specifying a report timezone (if the database supports it) will return the timestamps in
that timezone (manifesting itself as an offset for that time). Using the JVM timezone that doesn't match the
database timezone (assuming the database doesn't support a report timezone) can lead to incorrect results.
The second place timezones can impact this is calculations in the database. A good example of this is grouping
something by day. In that case, the start (or end) of the day will be different depending on what timezone the
database is in. The start of the day in pacific time is 7 (or 8) hours earlier than UTC. This means there might be a
different number of results depending on what timezone we're in. Report timezone lets the user specify that, and it
gets pushed into the database so calculations are made in that timezone.
If a report timezone is specified and the database supports it, the JVM timezone should have no impact on queries or
their results."
(:require
[clojure.string :as str]
[clojure.test :refer :all]
[java-time :as t]
[metabase.driver :as driver]
[metabase.driver.sql-jdbc.sync :as sql-jdbc.sync]
[metabase.driver.sql.query-processor :as sql.qp]
[metabase.driver.sql.query-processor-test-util :as sql.qp-test-util]
[metabase.models.database :refer [Database]]
[metabase.models.table :refer [Table]]
[metabase.query-processor :as qp]
[metabase.query-processor-test :as qp.test]
[metabase.query-processor.middleware.format-rows :as format-rows]
[metabase.test :as mt]
[metabase.util :as u]
[metabase.util.date-2 :as u.date]
[metabase.util.honeysql-extensions :as hx]
[metabase.util.log :as log]
[metabase.util.regex :as u.regex]
[potemkin.types :as p.types]
[pretty.core :as pretty]
[toucan.db :as db])
(:import [java.time LocalDate LocalDateTime]))
(set! *warn-on-reflection* true)
(defn- ->long-if-number [x]
(if (number? x)
(long x)
x))
(def ^:private timezone
{:utc "UTC"
:pacific "America/Los_Angeles"
:eastern "America/New_York"})
(defn- ->timezone-id ^String [x]
(if (keyword? x)
(get timezone x)
x))
(deftest sanity-check-test
(mt/test-drivers (disj (mt/normal-drivers) :snowflake :redshift)
(testing "\nRegardless of report timezone, UNIX timestamps should always be interpreted a being in UTC."
(let [utc-results [[1 "2015-06-06T10:40:00Z" 4]
[2 "2015-06-10T19:51:00Z" 0]
[3 "2015-06-09T15:42:00Z" 5]
[4 "2015-06-22T23:49:00Z" 3]
[5 "2015-06-20T01:45:00Z" 3]]]
(doseq [timezone [:pacific :utc :eastern]]
(testing "\nResults should be returned in report timezone, if supported by driver."
(testing (format "\ntimezone = %s" timezone)
(let [local-results (cond
(= driver/*driver* :sqlite)
(for [[id s cnt] utc-results]
[id (u.date/format-sql (t/local-date-time (u.date/parse s))) cnt])
(or (= timezone :utc)
(not (driver/supports? driver/*driver* :set-timezone)))
utc-results
:else
(for [[id s cnt] utc-results]
(let [zone-id (t/zone-id (->timezone-id timezone))
t (t/offset-date-time (t/with-zone-same-instant (u.date/parse s) zone-id))
s (t/format :iso-offset-date-time t)]
[id s cnt])))]
(mt/with-report-timezone-id (->timezone-id timezone)
(mt/dataset sad-toucan-incidents
(is (= local-results
(mt/formatted-rows [int identity int]
(mt/run-mbql-query incidents
{:fields [$id $timestamp $severity]
:order-by [[:asc $id]]
:limit 5}))))))))))))))
(defn- sad-toucan-incidents-with-bucketing
"Returns 10 sad toucan incidents grouped by `unit`"
([unit]
(->> (mt/dataset sad-toucan-incidents
(mt/run-mbql-query incidents
{:aggregation [[:count]]
:breakout [[:field %timestamp {:temporal-unit unit}]]
:limit 10}))
mt/rows
(mt/format-rows-by [->long-if-number int])))
([unit timezone-id]
(mt/initialize-if-needed! :db)
(mt/with-report-timezone-id (->timezone-id timezone-id)
(sad-toucan-incidents-with-bucketing unit))))
(defn- default-timezone-parse-fn
"Create a date formatter, interpretting the datestring as being in `tz`"
[default-timezone-id]
(let [timezone-id (->timezone-id default-timezone-id)]
(fn [s]
(u.date/parse s timezone-id))))
(defn- format-in-timezone-fn
"Create a formatter for converting a date to `tz` and in the format that the query processor would return"
[results-timezone-id]
(let [zone-id (-> results-timezone-id ->timezone-id t/zone-id)]
(fn [t]
(format-rows/format-value t zone-id))))
(defn- date-without-time-format-fn
"sqlite returns dates that do not include their time, this formatter is useful for those DBs"
[t]
(condp instance? t
LocalDate (t/format :iso-local-date t)
LocalDateTime (t/format :iso-local-date t)
(t/format :iso-offset-date t)))
(def ^:private sad-toucan-dates
"This is the first 10 sad toucan dates when converted from millis since epoch in the UTC timezone. The timezone is
left off of the timezone string so that we can emulate how certain conversions work in the code today. As an
example, the UTC dates in Oracle are interpreted as the reporting timezone when they're UTC"
["2015-06-01T10:31:00.000"
"2015-06-01T16:06:00.000"
"2015-06-01T17:23:00.000"
"2015-06-01T18:55:00.000"
"2015-06-01T21:04:00.000"
"2015-06-01T21:19:00.000"
"2015-06-02T02:13:00.000"
"2015-06-02T05:37:00.000"
"2015-06-02T08:20:00.000"
"2015-06-02T11:11:00.000"])
(defn- sad-toucan-result
"Creates a sad toucan result set by parsing literal strings with `parse-fn` and formatting then in results with
`format-result-fn`."
([parse-fn format-result-fn]
(sad-toucan-result parse-fn format-result-fn sad-toucan-dates))
([parse-fn format-result-fn temporal-literal-strs]
(for [s temporal-literal-strs]
[(-> s parse-fn format-result-fn) 1])))
(deftest group-by-default-test
(mt/test-drivers (mt/normal-drivers)
(testing "\nPacific timezone"
(is (= (cond
Timezone is omitted by these databases HACK - SQLite returns datetimes as strings , and we do n't
(= :sqlite driver/*driver*)
[["2015-06-01 10:31:00" 1]
["2015-06-01 16:06:00" 1]
["2015-06-01 17:23:00" 1]
["2015-06-01 18:55:00" 1]
["2015-06-01 21:04:00" 1]
["2015-06-01 21:19:00" 1]
["2015-06-02 02:13:00" 1]
["2015-06-02 05:37:00" 1]
["2015-06-02 08:20:00" 1]
["2015-06-02 11:11:00" 1]]
There 's a bug here where we are reading in the UTC time as pacific , so we 're 7 hours off
( This is fixed for Oracle now )
(and (qp.test/tz-shifted-driver-bug? driver/*driver*) (not= driver/*driver* :oracle))
[["2015-06-01T10:31:00-07:00" 1]
["2015-06-01T16:06:00-07:00" 1]
["2015-06-01T17:23:00-07:00" 1]
["2015-06-01T18:55:00-07:00" 1]
["2015-06-01T21:04:00-07:00" 1]
["2015-06-01T21:19:00-07:00" 1]
["2015-06-02T02:13:00-07:00" 1]
["2015-06-02T05:37:00-07:00" 1]
["2015-06-02T08:20:00-07:00" 1]
["2015-06-02T11:11:00-07:00" 1]]
(qp.test/supports-report-timezone? driver/*driver*)
[["2015-06-01T03:31:00-07:00" 1]
["2015-06-01T09:06:00-07:00" 1]
["2015-06-01T10:23:00-07:00" 1]
["2015-06-01T11:55:00-07:00" 1]
["2015-06-01T14:04:00-07:00" 1]
["2015-06-01T14:19:00-07:00" 1]
["2015-06-01T19:13:00-07:00" 1]
["2015-06-01T22:37:00-07:00" 1]
["2015-06-02T01:20:00-07:00" 1]
["2015-06-02T04:11:00-07:00" 1]]
Databases that do n't support report timezone will always return the time using the JVM 's timezone
:else
[["2015-06-01T10:31:00Z" 1]
["2015-06-01T16:06:00Z" 1]
["2015-06-01T17:23:00Z" 1]
["2015-06-01T18:55:00Z" 1]
["2015-06-01T21:04:00Z" 1]
["2015-06-01T21:19:00Z" 1]
["2015-06-02T02:13:00Z" 1]
["2015-06-02T05:37:00Z" 1]
["2015-06-02T08:20:00Z" 1]
["2015-06-02T11:11:00Z" 1]])
(sad-toucan-incidents-with-bucketing :default :pacific))))
(testing "\nEastern timezone"
(is (= (cond
(= :sqlite driver/*driver*)
[["2015-06-01 10:31:00" 1]
["2015-06-01 16:06:00" 1]
["2015-06-01 17:23:00" 1]
["2015-06-01 18:55:00" 1]
["2015-06-01 21:04:00" 1]
["2015-06-01 21:19:00" 1]
["2015-06-02 02:13:00" 1]
["2015-06-02 05:37:00" 1]
["2015-06-02 08:20:00" 1]
["2015-06-02 11:11:00" 1]]
(and (qp.test/tz-shifted-driver-bug? driver/*driver*) (not= driver/*driver* :oracle))
[["2015-06-01T10:31:00-04:00" 1]
["2015-06-01T16:06:00-04:00" 1]
["2015-06-01T17:23:00-04:00" 1]
["2015-06-01T18:55:00-04:00" 1]
["2015-06-01T21:04:00-04:00" 1]
["2015-06-01T21:19:00-04:00" 1]
["2015-06-02T02:13:00-04:00" 1]
["2015-06-02T05:37:00-04:00" 1]
["2015-06-02T08:20:00-04:00" 1]
["2015-06-02T11:11:00-04:00" 1]]
(qp.test/supports-report-timezone? driver/*driver*)
[["2015-06-01T06:31:00-04:00" 1]
["2015-06-01T12:06:00-04:00" 1]
["2015-06-01T13:23:00-04:00" 1]
["2015-06-01T14:55:00-04:00" 1]
["2015-06-01T17:04:00-04:00" 1]
["2015-06-01T17:19:00-04:00" 1]
["2015-06-01T22:13:00-04:00" 1]
["2015-06-02T01:37:00-04:00" 1]
["2015-06-02T04:20:00-04:00" 1]
["2015-06-02T07:11:00-04:00" 1]]
:else
[["2015-06-01T10:31:00Z" 1]
["2015-06-01T16:06:00Z" 1]
["2015-06-01T17:23:00Z" 1]
["2015-06-01T18:55:00Z" 1]
["2015-06-01T21:04:00Z" 1]
["2015-06-01T21:19:00Z" 1]
["2015-06-02T02:13:00Z" 1]
["2015-06-02T05:37:00Z" 1]
["2015-06-02T08:20:00Z" 1]
["2015-06-02T11:11:00Z" 1]])
(sad-toucan-incidents-with-bucketing :default :eastern)))))
Changes the JVM timezone from UTC to Pacific , this test is n't run on H2 as the database stores it 's timezones in
The exclusions here are databases that give incorrect answers when the JVM timezone does n't match the databases
(mt/test-drivers (mt/normal-drivers-with-feature :test/jvm-timezone-setting)
(testing "Change JVM timezone from UTC to Pacific"
(is (= (cond
(= :sqlite driver/*driver*)
(sad-toucan-result (default-timezone-parse-fn :utc) (comp u.date/format-sql t/local-date-time))
(and (qp.test/tz-shifted-driver-bug? driver/*driver*) (not= driver/*driver* :oracle))
(sad-toucan-result (default-timezone-parse-fn :eastern) (format-in-timezone-fn :eastern))
(qp.test/supports-report-timezone? driver/*driver*)
(sad-toucan-result (default-timezone-parse-fn :utc) (format-in-timezone-fn :eastern))
:else
(sad-toucan-result (default-timezone-parse-fn :utc) (format-in-timezone-fn :pacific)))
(mt/with-system-timezone-id (timezone :pacific)
(sad-toucan-incidents-with-bucketing :default :eastern)))))))
(deftest group-by-minute-test
(testing "This dataset doesn't have multiple events in a minute, the results are the same as the default grouping"
(mt/test-drivers (mt/normal-drivers)
(is (= (cond
(= :sqlite driver/*driver*)
(sad-toucan-result (default-timezone-parse-fn :utc) (comp u.date/format-sql t/local-date-time))
(qp.test/tz-shifted-driver-bug? driver/*driver*)
(sad-toucan-result (default-timezone-parse-fn :pacific) (format-in-timezone-fn :pacific))
(qp.test/supports-report-timezone? driver/*driver*)
(sad-toucan-result (default-timezone-parse-fn :utc) (format-in-timezone-fn :pacific))
:else
(sad-toucan-result (default-timezone-parse-fn :utc) (format-in-timezone-fn :utc)))
(sad-toucan-incidents-with-bucketing :minute :pacific))))))
(deftest group-by-minute-of-hour-test
(testing "Grouping by minute of hour is not affected by timezones"
(mt/test-drivers (mt/normal-drivers)
(is (= [[0 5]
[1 4]
[2 2]
[3 4]
[4 4]
[5 3]
[6 5]
[7 1]
[8 1]
[9 1]]
(sad-toucan-incidents-with-bucketing :minute-of-hour :pacific))))))
(def ^:private sad-toucan-dates-grouped-by-hour
"This is the first 10 groupings of sad toucan dates at the same hour when converted from millis since epoch in the UTC
timezone. The timezone is left off of the timezone string so that we can emulate how certain conversions are broken
in the code today. As an example, the UTC dates in Oracle are interpreted as the reporting timezone when they're
UTC"
["2015-06-01T10:00:00"
"2015-06-01T16:00:00"
"2015-06-01T17:00:00"
"2015-06-01T18:00:00"
"2015-06-01T21:00:00"
"2015-06-02T02:00:00"
"2015-06-02T05:00:00"
"2015-06-02T08:00:00"
"2015-06-02T11:00:00"
"2015-06-02T13:00:00"])
(defn- results-by-hour [parse-fn format-result-fn]
(map
(fn [s cnt]
[(-> s parse-fn format-result-fn) cnt])
sad-toucan-dates-grouped-by-hour
[1 1 1 1 2 1 1 1 1 1]))
(deftest group-by-hour-test
(mt/test-drivers (mt/normal-drivers)
(is (= (cond
(= :sqlite driver/*driver*)
(results-by-hour (default-timezone-parse-fn :utc) (comp u.date/format-sql t/local-date-time))
(qp.test/tz-shifted-driver-bug? driver/*driver*)
(results-by-hour (default-timezone-parse-fn :pacific) (format-in-timezone-fn :pacific))
(qp.test/supports-report-timezone? driver/*driver*)
(results-by-hour (default-timezone-parse-fn :utc) (format-in-timezone-fn :pacific))
:else
(results-by-hour (default-timezone-parse-fn :utc) (format-in-timezone-fn :utc)))
(sad-toucan-incidents-with-bucketing :hour :pacific)))))
The counts are affected by timezone as the times are shifted back by 7 hours . These count changes can be validated
by matching the first three results of the pacific results to the last three of the UTC results ( i.e. pacific is 7
(deftest group-by-hour-of-day-test
(mt/test-drivers (mt/normal-drivers)
(testing "results in pacific timezone"
(is (= (if (and (not (qp.test/tz-shifted-driver-bug? driver/*driver*))
(qp.test/supports-report-timezone? driver/*driver*))
[[0 8] [1 9] [2 7] [3 10] [4 10] [5 9] [6 6] [7 5] [8 7] [9 7]]
[[0 13] [1 8] [2 4] [3 7] [4 5] [5 13] [6 10] [7 8] [8 9] [9 7]])
(sad-toucan-incidents-with-bucketing :hour-of-day :pacific))))
(testing "results in UTC"
(is (= [[0 13] [1 8] [2 4] [3 7] [4 5] [5 13] [6 10] [7 8] [8 9] [9 7]]
(sad-toucan-incidents-with-bucketing :hour-of-day :utc))
"With all databases in UTC, the results should be the same for all DBs"))))
(defn- find-events-in-range
"Find the number of sad toucan events between `start-date-str` and `end-date-str`"
[start-date-str end-date-str]
(-> (mt/dataset sad-toucan-incidents
(mt/run-mbql-query incidents
{:aggregation [[:count]]
:breakout [!day.timestamp]
:filter [:between !default.timestamp start-date-str end-date-str]}))
mt/rows
first
second
(or 0)))
this using a the UTC dataset and some math to figure out if our 24 hour window is shifted 7 hours back , how many
events to we gain and lose . Although this test is technically covered by the other grouping by day tests , it 's
(deftest new-events-after-timezone-shift-test
(driver/with-driver :h2
(doseq [[timezone-id expected-net-gains] {:pacific [2 -1 5 -5 2 0 -2 1 -1 1]
:eastern [1 -1 3 -3 3 -2 -1 0 1 1]}]
(testing (format "Timezone = %s" timezone-id)
(doseq [[i expected-net-gain] (map-indexed vector expected-net-gains)
:let [start (t/local-date 2015 6 (inc i))
end (t/plus start (t/days 1))
->tz #(t/zoned-date-time % (t/local-time 0) (t/zone-id (->timezone-id timezone-id)))
find-events-in-range (fn [x y]
(find-events-in-range (u.date/format x) (u.date/format y)))
num-events-gained (find-events-in-range end (->tz end))
num-events-lost (find-events-in-range start (->tz start))]]
(testing (format "events between %s and %s" start end)
(is (= expected-net-gain
(- num-events-gained num-events-lost))
(format "When shifting to %s timezone we should lose %d events and gain %d, for a net gain of %d"
timezone-id num-events-gained num-events-lost expected-net-gain))))))))
(def ^:private sad-toucan-events-grouped-by-day
["2015-06-01"
"2015-06-02"
"2015-06-03"
"2015-06-04"
"2015-06-05"
"2015-06-06"
"2015-06-07"
"2015-06-08"
"2015-06-09"
"2015-06-10"])
(defn- results-by-day [parse-fn format-result-fn counts]
(map
(fn [s cnt]
[(-> s parse-fn format-result-fn) cnt])
sad-toucan-events-grouped-by-day
counts))
(deftest group-by-day-test
(mt/test-drivers (mt/normal-drivers)
(testing "\nUTC timezone"
(is (= (if (= :sqlite driver/*driver*)
(results-by-day u.date/parse date-without-time-format-fn [6 10 4 9 9 8 8 9 7 9])
(results-by-day u.date/parse (format-in-timezone-fn :utc) [6 10 4 9 9 8 8 9 7 9]))
(sad-toucan-incidents-with-bucketing :day :utc))))
(testing "\nPacific timezone"
(is (= (cond
(= :sqlite driver/*driver*)
[["2015-06-01" 6]
["2015-06-02" 10]
["2015-06-03" 4]
["2015-06-04" 9]
["2015-06-05" 9]
["2015-06-06" 8]
["2015-06-07" 8]
["2015-06-08" 9]
["2015-06-09" 7]
["2015-06-10" 9]]
(qp.test/tz-shifted-driver-bug? driver/*driver*)
[["2015-06-01T00:00:00-07:00" 6]
["2015-06-02T00:00:00-07:00" 10]
["2015-06-03T00:00:00-07:00" 4]
["2015-06-04T00:00:00-07:00" 9]
["2015-06-05T00:00:00-07:00" 9]
["2015-06-06T00:00:00-07:00" 8]
["2015-06-07T00:00:00-07:00" 8]
["2015-06-08T00:00:00-07:00" 9]
["2015-06-09T00:00:00-07:00" 7]
["2015-06-10T00:00:00-07:00" 9]]
(qp.test/supports-report-timezone? driver/*driver*)
[["2015-06-01T00:00:00-07:00" 8]
["2015-06-02T00:00:00-07:00" 9]
["2015-06-03T00:00:00-07:00" 9]
["2015-06-04T00:00:00-07:00" 4]
["2015-06-05T00:00:00-07:00" 11]
["2015-06-06T00:00:00-07:00" 8]
["2015-06-07T00:00:00-07:00" 6]
["2015-06-08T00:00:00-07:00" 10]
["2015-06-09T00:00:00-07:00" 6]
["2015-06-10T00:00:00-07:00" 10]]
:else
[["2015-06-01T00:00:00Z" 6]
["2015-06-02T00:00:00Z" 10]
["2015-06-03T00:00:00Z" 4]
["2015-06-04T00:00:00Z" 9]
["2015-06-05T00:00:00Z" 9]
["2015-06-06T00:00:00Z" 8]
["2015-06-07T00:00:00Z" 8]
["2015-06-08T00:00:00Z" 9]
["2015-06-09T00:00:00Z" 7]
["2015-06-10T00:00:00Z" 9]])
(sad-toucan-incidents-with-bucketing :day :pacific))))
(testing "\nEastern timezone"
(is (= (cond
(= :sqlite driver/*driver*)
(results-by-day u.date/parse date-without-time-format-fn [6 10 4 9 9 8 8 9 7 9])
(qp.test/tz-shifted-driver-bug? driver/*driver*)
(results-by-day (default-timezone-parse-fn :eastern)
(format-in-timezone-fn :eastern)
[6 10 4 9 9 8 8 9 7 9])
(qp.test/supports-report-timezone? driver/*driver*)
(results-by-day (default-timezone-parse-fn :eastern)
(format-in-timezone-fn :eastern)
[7 9 7 6 12 6 7 9 8 10])
:else
(results-by-day u.date/parse
(format-in-timezone-fn :utc)
[6 10 4 9 9 8 8 9 7 9]))
(sad-toucan-incidents-with-bucketing :day :eastern)))))
(testing "\nWith JVM timezone set to Pacific time"
have no affect on the results . When no report timezone is used it should convert dates to the JVM 's timezone
The exclusions here are databases that give incorrect answers when the JVM timezone does n't match the databases
(mt/test-drivers (mt/normal-drivers-with-feature :test/jvm-timezone-setting)
(is (= (cond
(= :sqlite driver/*driver*)
(results-by-day u.date/parse date-without-time-format-fn [6 10 4 9 9 8 8 9 7 9])
(qp.test/tz-shifted-driver-bug? driver/*driver*)
(results-by-day (default-timezone-parse-fn :pacific)
(format-in-timezone-fn :pacific)
[6 10 4 9 9 8 8 9 7 9])
(qp.test/supports-report-timezone? driver/*driver*)
(results-by-day (default-timezone-parse-fn :pacific)
(format-in-timezone-fn :pacific)
[8 9 9 4 11 8 6 10 6 10])
:else
(results-by-day (default-timezone-parse-fn :utc)
(format-in-timezone-fn :pacific)
[6 10 4 9 9 8 8 9 7 9]))
(mt/with-system-timezone-id (timezone :pacific)
(sad-toucan-incidents-with-bucketing :day :pacific)))))))
(deftest group-by-day-of-week-test
(mt/test-drivers (mt/normal-drivers)
(testing "\nPacific timezone"
(is (= (if (and (not (qp.test/tz-shifted-driver-bug? driver/*driver*))
(qp.test/supports-report-timezone? driver/*driver*))
[[1 29] [2 36] [3 33] [4 29] [5 13] [6 38] [7 22]]
[[1 28] [2 38] [3 29] [4 27] [5 24] [6 30] [7 24]])
(sad-toucan-incidents-with-bucketing :day-of-week :pacific))))
(testing "\nUTC timezone"
(is (= [[1 28] [2 38] [3 29] [4 27] [5 24] [6 30] [7 24]]
(sad-toucan-incidents-with-bucketing :day-of-week :utc))))))
(deftest group-by-day-of-month-test
(mt/test-drivers (mt/normal-drivers)
(testing "\nPacific timezone"
(is (= (if (and (not (qp.test/tz-shifted-driver-bug? driver/*driver*))
(qp.test/supports-report-timezone? driver/*driver*))
[[1 8] [2 9] [3 9] [4 4] [5 11] [6 8] [7 6] [8 10] [9 6] [10 10]]
[[1 6] [2 10] [3 4] [4 9] [5 9] [6 8] [7 8] [8 9] [9 7] [10 9]])
(sad-toucan-incidents-with-bucketing :day-of-month :pacific))))
(testing "\nUTC timezone"
(is (= [[1 6] [2 10] [3 4] [4 9] [5 9] [6 8] [7 8] [8 9] [9 7] [10 9]]
(sad-toucan-incidents-with-bucketing :day-of-month :utc))))))
(deftest group-by-day-of-year-test
(mt/test-drivers (mt/normal-drivers)
(testing "\nPacific timezone"
(is (= (if (and (not (qp.test/tz-shifted-driver-bug? driver/*driver*))
(qp.test/supports-report-timezone? driver/*driver*))
[[152 8] [153 9] [154 9] [155 4] [156 11] [157 8] [158 6] [159 10] [160 6] [161 10]]
[[152 6] [153 10] [154 4] [155 9] [156 9] [157 8] [158 8] [159 9] [160 7] [161 9]])
(sad-toucan-incidents-with-bucketing :day-of-year :pacific))))
(testing "\nUTC timezone"
(is (= [[152 6] [153 10] [154 4] [155 9] [156 9] [157 8] [158 8] [159 9] [160 7] [161 9]]
(sad-toucan-incidents-with-bucketing :day-of-year :utc))))))
(deftest new-weekly-events-after-tz-shift-test
(driver/with-driver :h2
(doseq [[timezone-id start-date->expected-net-gain] {:pacific {"2015-05-31" 3
"2015-06-07" 0
"2015-06-14" -1
"2015-06-21" -2
"2015-06-28" 0}
:eastern {"2015-05-31" 1
"2015-06-07" 1
"2015-06-14" -1
"2015-06-21" -1
"2015-06-28" 0}}]
(testing (format "Timezone = %s" timezone-id)
(doseq [[start-str expected-net-gain] start-date->expected-net-gain
:let [start (u.date/parse start-str)
end (t/plus start (t/days 7))
->tz #(t/zoned-date-time % (t/local-time 0) (t/zone-id (->timezone-id timezone-id)))
find-events-in-range (fn [x y]
(find-events-in-range (u.date/format x) (u.date/format y)))
num-events-gained (find-events-in-range end (->tz end))
num-events-lost (find-events-in-range start (->tz start))]]
(testing (format "events between %s and %s" start end)
(is (= expected-net-gain
(- num-events-gained num-events-lost))
(format "When shifting to %s timezone we should lose %d events and gain %d, for a net gain of %d"
timezone-id num-events-gained num-events-lost expected-net-gain))))))))
(defn- results-by-week [parse-fn format-result-fn counts]
(map
(fn [s cnt]
[(-> s parse-fn format-result-fn) cnt])
["2015-05-31"
"2015-06-07"
"2015-06-14"
"2015-06-21"
"2015-06-28"]
counts))
Sad toucan incidents by week . Databases in UTC that do n't support report timezones will be the same as the UTC test
above . Databases that support report timezone will have different counts as the week starts and ends 7 hours
(deftest group-by-week-test
(mt/test-drivers (mt/normal-drivers)
(testing "\nUTC timezone"
(is (= (if (= :sqlite driver/*driver*)
(results-by-week u.date/parse
date-without-time-format-fn
[46 47 40 60 7])
(results-by-week u.date/parse
(format-in-timezone-fn :utc)
[46 47 40 60 7]))
(sad-toucan-incidents-with-bucketing :week :utc))))
(testing "\nPacific timezone"
(is (= (cond
(= :sqlite driver/*driver*)
(results-by-week u.date/parse
date-without-time-format-fn
[46 47 40 60 7])
(qp.test/tz-shifted-driver-bug? driver/*driver*)
(results-by-week (default-timezone-parse-fn :pacific)
(format-in-timezone-fn :pacific)
[46 47 40 60 7])
(qp.test/supports-report-timezone? driver/*driver*)
(results-by-week (default-timezone-parse-fn :pacific)
(format-in-timezone-fn :pacific)
[49 47 39 58 7])
:else
(results-by-week u.date/parse
(format-in-timezone-fn :utc)
[46 47 40 60 7]))
(sad-toucan-incidents-with-bucketing :week :pacific))))
Tests eastern timezone grouping by week , UTC databases do n't change , databases with reporting timezones need to
account for the 4 - 5 hour difference
(testing "\nEastern timezone"
(mt/test-drivers (mt/normal-drivers)
(is (= (cond
(= :sqlite driver/*driver*)
(results-by-week u.date/parse
date-without-time-format-fn
[46 47 40 60 7])
(qp.test/tz-shifted-driver-bug? driver/*driver*)
(results-by-week (default-timezone-parse-fn :eastern)
(format-in-timezone-fn :eastern)
[46 47 40 60 7])
(qp.test/supports-report-timezone? driver/*driver*)
(results-by-week (default-timezone-parse-fn :eastern)
(format-in-timezone-fn :eastern)
[47 48 39 59 7])
:else
(results-by-week u.date/parse
(format-in-timezone-fn :utc)
[46 47 40 60 7]))
(sad-toucan-incidents-with-bucketing :week :eastern))))))
Setting the JVM timezone will change how the datetime results are displayed but do n't impact the calculation of the
begin / end of the week
The exclusions here are databases that give incorrect answers when the JVM timezone does n't match the databases
timezone ( FIXME )
(testing "JVM timezone set to Pacific"
(mt/test-drivers (mt/normal-drivers-except #{:h2 :sqlserver :redshift :sparksql :mongo :bigquery-cloud-sdk})
(is (= (cond
(= :sqlite driver/*driver*)
(results-by-week u.date/parse
date-without-time-format-fn
[46 47 40 60 7])
(qp.test/tz-shifted-driver-bug? driver/*driver*)
(results-by-week (default-timezone-parse-fn :pacific)
(format-in-timezone-fn :pacific)
[46 47 40 60 7])
(qp.test/supports-report-timezone? driver/*driver*)
(results-by-week (default-timezone-parse-fn :pacific)
(format-in-timezone-fn :pacific)
[49 47 39 58 7])
:else
(results-by-week u.date/parse
(format-in-timezone-fn :pacific)
[46 47 40 60 7]))
(mt/with-system-timezone-id (timezone :pacific)
(sad-toucan-incidents-with-bucketing :week :pacific)))))))
(deftest group-by-week-of-year-test
(mt/test-drivers (mt/normal-drivers)
(is (= [[22 46] [23 47] [24 40] [25 60] [26 7]]
(sad-toucan-incidents-with-bucketing :week-of-year :utc)))))
(defn- fmt-str-or-int
[x]
(if (string? x)
(str x)
(int x)))
(deftest week-of-year-and-week-count-should-be-consistent-test
(testing "consistent break out between weeks and week-of-year #4910"
(mt/test-drivers (mt/normal-drivers)
2019 - 01 - 01 is Tuesday , so set start - of - week to tuesday so
breakout by week - of - year will have first row is the 1st week of year
(mt/with-temporary-setting-values [start-of-week :tuesday]
(mt/dataset sample-dataset
(letfn [(test-break-out [unit]
(->> (mt/mbql-query orders
{:filter [:between $created_at "2019-01-01" "2019-12-31"]
:breakout [:field $created_at {:temporal-unit unit}]
:aggregation [[:count]]})
mt/process-query
(mt/formatted-rows [fmt-str-or-int int])))]
(testing "count result should be the same between week and week-of-year"
(is (= (map second (test-break-out :week))
(map second (test-break-out :week-of-year))))
(is (= [127 124 136]
(->> (test-break-out :week)
(map second)
(take 3)))))
(testing "make sure all drivers returns the same week column"
(is (= (case driver/*driver*
:sqlite ["2019-01-01 00:00:00" "2019-01-08 00:00:00" "2019-01-15 00:00:00"]
["2019-01-01T00:00:00Z" "2019-01-08T00:00:00Z" "2019-01-15T00:00:00Z"])
(->> (test-break-out :week)
(map first)
(take 3)))))
(testing "make sure all drivers returns the same week-of-year column"
(is (= [1 2 3]
(->> (test-break-out :week-of-year)
(map first)
(take 3)))))))))))
All of the sad toucan events in the test data fit in June . The results are the same on all databases and the only
difference is how the beginning of hte month is represented , since we always return times with our dates
(deftest group-by-month-test
(mt/test-drivers (mt/normal-drivers)
(testing "\nPacific timezone"
(is (= [[(cond
(= :sqlite driver/*driver*)
"2015-06-01"
(qp.test/supports-report-timezone? driver/*driver*)
"2015-06-01T00:00:00-07:00"
:else
"2015-06-01T00:00:00Z")
200]]
(sad-toucan-incidents-with-bucketing :month :pacific))))
(testing "\nEastern timezone"
(mt/test-drivers (mt/normal-drivers)
(is (= [[(cond
(= :sqlite driver/*driver*)
"2015-06-01"
(qp.test/supports-report-timezone? driver/*driver*)
"2015-06-01T00:00:00-04:00"
:else
"2015-06-01T00:00:00Z")
200]]
(sad-toucan-incidents-with-bucketing :month :eastern)))))))
(deftest group-by-month-of-year-test
(mt/test-drivers (mt/normal-drivers)
(is (= [[6 200]]
(sad-toucan-incidents-with-bucketing :month-of-year :pacific)))))
(deftest group-by-quarter-test
(mt/test-drivers (mt/normal-drivers)
(testing "\nPacific timezone"
(is (= [[(cond (= :sqlite driver/*driver*)
"2015-04-01"
(qp.test/supports-report-timezone? driver/*driver*)
"2015-04-01T00:00:00-07:00"
:else
"2015-04-01T00:00:00Z")
200]]
(sad-toucan-incidents-with-bucketing :quarter :pacific))))
(testing "\nEastern timezone"
(is (= [[(cond (= :sqlite driver/*driver*)
"2015-04-01"
(qp.test/supports-report-timezone? driver/*driver*)
"2015-04-01T00:00:00-04:00"
:else
"2015-04-01T00:00:00Z")
200]]
(sad-toucan-incidents-with-bucketing :quarter :eastern))))))
(deftest group-by-quarter-of-year-test
(mt/test-drivers (mt/normal-drivers)
(is (= [[2 200]]
(sad-toucan-incidents-with-bucketing :quarter-of-year :pacific)))
(is (= [[1 200]
[2 284]
[3 278]
[4 238]]
(mt/formatted-rows [int int]
(mt/run-mbql-query checkins
{:aggregation [[:count]]
:breakout [!quarter-of-year.date]}))))))
(deftest group-by-year-test
(mt/test-drivers (mt/normal-drivers)
(is (= [[(cond
(= :sqlite driver/*driver*)
"2015-01-01"
(qp.test/supports-report-timezone? driver/*driver*)
"2015-01-01T00:00:00-08:00"
:else
"2015-01-01T00:00:00Z")
200]]
(sad-toucan-incidents-with-bucketing :year :pacific)))
(is (= (if (= :sqlite driver/*driver*)
[["2013-01-01" 235]
["2014-01-01" 498]
["2015-01-01" 267]]
[["2013-01-01T00:00:00Z" 235]
["2014-01-01T00:00:00Z" 498]
["2015-01-01T00:00:00Z" 267]])
(mt/formatted-rows [str int]
(mt/run-mbql-query checkins
{:aggregation [[:count]]
:breakout [!year.date]}))))))
(p.types/deftype+ ^:private TimestampDatasetDef [intervalSeconds]
pretty/PrettyPrintable
(pretty [_]
(list 'TimestampDatasetDef. intervalSeconds)))
(defn- driver->current-datetime-base-type
"Returns the :base-type of the \"current timestamp\" HoneySQL form defined by the driver `d`. Relies upon the driver
implementation having set that explicitly via `hx/with-type-info`. Returns `nil` if it can't be determined."
[d]
(when (isa? driver/hierarchy driver/*driver* :sql)
(let [db-type (-> (sql.qp/current-datetime-honeysql-form d)
hx/type-info
hx/type-info->db-type)]
(when-not (str/blank? db-type)
(sql-jdbc.sync/database-type->base-type d db-type)))))
(defmethod mt/get-dataset-definition TimestampDatasetDef
[^TimestampDatasetDef this]
(let [interval-seconds (.intervalSeconds this)]
(mt/dataset-definition
(str "checkins_interval_" interval-seconds)
["checkins"
[{:field-name "timestamp"
:base-type (or (driver->current-datetime-base-type driver/*driver*) :type/DateTime)}]
(mapv (fn [i]
generating Java classes here so they 'll be in the DB 's native timezone . Some DBs refuse to use
[(u/prog1 (if (and (isa? driver/hierarchy driver/*driver* :sql)
TODO -- make ' insert - rows - using - statements ? ` a multimethod so we do n't need to
(not (#{:vertica :bigquery-cloud-sdk} driver/*driver*)))
(binding [hx/*honey-sql-version* (sql.qp/honey-sql-version driver/*driver*)]
(sql.qp/compiled
(sql.qp/add-interval-honeysql-form driver/*driver*
(sql.qp/current-datetime-honeysql-form driver/*driver*)
(* i interval-seconds)
:second)))
(u.date/add :second (* i interval-seconds)))
(assert <>))])
(range -15 15))])))
(defn- dataset-def-with-timestamps [interval-seconds]
(TimestampDatasetDef. interval-seconds))
(def ^:private checkins:4-per-minute
"Dynamically generated dataset with 30 checkins spaced 15 seconds apart, from 3 mins 45 seconds ago to 3 minutes 30
seconds in the future."
(dataset-def-with-timestamps 15))
(def ^:private checkins:4-per-hour
"Dynamically generated dataset with 30 checkins spaced 15 minutes apart, from 3 hours 45 minutes ago to 3 hours 30
minutes in the future."
(dataset-def-with-timestamps (u/minutes->seconds 15)))
(def ^:private checkins:1-per-day
"Dynamically generated dataset with 30 checkins spaced 24 hours apart, from 15 days ago to 14 days in the future."
(dataset-def-with-timestamps (* 24 (u/minutes->seconds 60))))
(defn- checkins-db-is-old?
"Determine whether we need to recreate one of the dynamically-generated datasets above, if the data has grown a little
stale."
[max-age-seconds]
(u.date/greater-than-period-duration? (u.date/period-duration (:created_at (mt/db)) (t/zoned-date-time))
(t/seconds max-age-seconds)))
(def ^:private ^:dynamic *recreate-db-if-stale?* true)
(defn- count-of-grouping [^TimestampDatasetDef dataset field-grouping & relative-datetime-args]
(mt/dataset dataset
DB has values in the range of now ( ) - ( interval - seconds * 15 ) and now ( ) + ( interval - seconds * 15 ) . So if it
was created more than ( interval - seconds * 5 ) seconds ago , delete the Database and recreate it to make sure
(if (and (checkins-db-is-old? (* (.intervalSeconds dataset) 5)) *recreate-db-if-stale?*)
(binding [*recreate-db-if-stale?* false]
(log/infof "DB for %s is stale! Deleteing and running test again\n" dataset)
(db/delete! Database :id (mt/id))
(apply count-of-grouping dataset field-grouping relative-datetime-args))
(let [results (mt/run-mbql-query checkins
{:aggregation [[:count]]
:filter [:=
[:field %timestamp {:temporal-unit field-grouping}]
(cons :relative-datetime relative-datetime-args)]})]
(or (some-> results mt/first-row first int)
results)))))
ran and loading data into these DBs is mind - bogglingly slow . This also applies to Athena for now , because
Do n't run the minute tests against Oracle because the Oracle tests are kind of slow and case CI to fail randomly
(deftest count-of-grouping-test
(mt/test-drivers (mt/normal-drivers-except #{:snowflake :athena})
(testing "4 checkins per minute dataset"
(testing "group by minute"
(doseq [args [[:current] [-1 :minute] [1 :minute]]]
(is (= 4
(apply count-of-grouping checkins:4-per-minute :minute args))
(format "filter by minute = %s" (into [:relative-datetime] args)))))))
(mt/test-drivers (mt/normal-drivers-except #{:snowflake :athena})
(testing "4 checkins per hour dataset"
(testing "group by hour"
(doseq [args [[:current] [-1 :hour] [1 :hour]]]
(is (= 4
(apply count-of-grouping checkins:4-per-hour :hour args))
(format "filter by hour = %s" (into [:relative-datetime] args))))))
(testing "1 checkin per day dataset"
(testing "group by day"
(doseq [args [[:current] [-1 :day] [1 :day]]]
(is (= 1
(apply count-of-grouping checkins:1-per-day :day args))
(format "filter by day = %s" (into [:relative-datetime] args)))))
(testing "group by week"
(is (= 7
(count-of-grouping checkins:1-per-day :week :current))
"filter by week = [:relative-datetime :current]")))))
(deftest time-interval-test
(mt/test-drivers (mt/normal-drivers-except #{:snowflake :athena})
(testing "Syntactic sugar (`:time-interval` clause)"
(mt/dataset checkins:1-per-day
(is (= 1
(ffirst
(mt/formatted-rows [int]
(mt/run-mbql-query checkins
{:aggregation [[:count]]
:filter [:time-interval $timestamp :current :day]})))))
(is (= 7
(ffirst
(mt/formatted-rows [int]
(mt/run-mbql-query checkins
{:aggregation [[:count]]
:filter [:time-interval $timestamp :last :week]})))))))))
reflects the units the results are in . eg when we breakout by one unit and filter by another , make sure the results
(defn- date-bucketing-unit-when-you [& {:keys [breakout-by filter-by with-interval]
:or {with-interval :current}}]
(let [results (mt/dataset checkins:1-per-day
(mt/run-mbql-query checkins
{:aggregation [[:count]]
:breakout [[:field %timestamp {:temporal-unit breakout-by}]]
:filter [:time-interval $timestamp with-interval filter-by]}))]
{:rows (or (-> results :row_count)
(throw (ex-info "Query failed!" results)))
:unit (-> results :data :cols first :unit)}))
(deftest date-bucketing-when-you-test
(mt/test-drivers (mt/normal-drivers-except #{:snowflake :athena})
(is (= {:rows 1, :unit :day}
(date-bucketing-unit-when-you :breakout-by "day", :filter-by "day")))
(is (= {:rows 7, :unit :day}
(date-bucketing-unit-when-you :breakout-by "day", :filter-by "week")))
(is (= {:rows 1, :unit :week}
(date-bucketing-unit-when-you :breakout-by "week", :filter-by "day")))
(is (= {:rows 1, :unit :quarter}
(date-bucketing-unit-when-you :breakout-by "quarter", :filter-by "day")))
(is (= {:rows 1, :unit :hour}
(date-bucketing-unit-when-you :breakout-by "hour", :filter-by "day")))
make sure if you use a relative date bucket in the past ( e.g. " past 2 months " ) you get the correct amount of rows
(is (= {:rows 2, :unit :day}
(date-bucketing-unit-when-you :breakout-by "day", :filter-by "day", :with-interval -2)))
(is (= {:rows 2, :unit :day}
(date-bucketing-unit-when-you :breakout-by "day", :filter-by "day", :with-interval 2)))))
Filtering by a unbucketed datetime Field should automatically bucket that Field by day if not already done ( # 8927 )
This should only apply when comparing to ` yyyy - MM - dd ` date strings .
e.g. ` [: = < field > " 2018 - 11 - 19 " ] should get rewritten as ` [: = [ : field < field > { : temporal - unit : day } ] " 2018 - 11 - 19 " ] ` if
We should get count = 1 for the current day , as opposed to count = 0 if we were n't auto - bucketing
( e.g. 2018 - 11 - 19T00:00 ! = 2018 - 11 - 19T12:37 or whatever time the checkin is at )
(deftest default-bucketing-test
(mt/test-drivers (mt/normal-drivers-except #{:snowflake :athena})
(mt/dataset checkins:1-per-day
(is (= [[1]]
(mt/formatted-rows [int]
(mt/run-mbql-query checkins
{:aggregation [[:count]]
:filter [:= [:field $timestamp nil] (t/format "yyyy-MM-dd" (u.date/truncate :day))]}))))))
(mt/test-drivers (mt/normal-drivers)
(mt/dataset office-checkins
(is (= [[1]]
(mt/formatted-rows [int]
(mt/run-mbql-query checkins
{:aggregation [[:count]]
:filter [:= [:field $timestamp nil] "2019-01-16"]}))))
(testing "Check that automatic bucketing still happens when using compound filter clauses (#9127)"
(is (= [[1]]
(mt/formatted-rows [int]
(mt/run-mbql-query checkins
{:aggregation [[:count]]
:filter [:and
[:= [:field $timestamp nil] "2019-01-16"]
[:= [:field $id nil] 6]]})))))))
(mt/test-drivers (mt/normal-drivers-except #{:snowflake :athena})
(testing "if datetime string is not yyyy-MM-dd no date bucketing should take place, and thus we should get no (exact) matches"
(mt/dataset checkins:1-per-day
(is (=
Mongo returns empty row for count = 0 . We should fix that ( # 5419 )
(case driver/*driver*
:mongo []
[[0]])
(mt/formatted-rows [int]
(mt/run-mbql-query checkins
{:aggregation [[:count]]
:filter [:= [:field $timestamp nil] (str (t/format "yyyy-MM-dd" (u.date/truncate :day))
"T14:16:00Z")]}))))))))
(def ^:private addition-unit-filtering-vals
[[3 :day "2014-03-03"]
[135 :day-of-week 1]
[36 :day-of-month 1]
[9 :day-of-year 214]
[11 :week "2014-03-03"]
[7 :week-of-year 2]
[48 :month "2014-03"]
[38 :month-of-year 1]
[107 :quarter "2014-01"]
[200 :quarter-of-year 1]
[498 :year "2014"]])
(defn- count-of-checkins [unit filter-value]
(ffirst
(mt/formatted-rows [int]
(mt/run-mbql-query checkins
{:aggregation [[:count]]
:filter [:= [:field %date {:temporal-unit unit}] filter-value]}))))
(deftest additional-unit-filtering-tests
(testing "Additional tests for filtering against various datetime bucketing units that aren't tested above"
(mt/with-temporary-setting-values [start-of-week :sunday]
(mt/test-drivers (mt/normal-drivers)
(doseq [[expected-count unit filter-value] addition-unit-filtering-vals]
iterate on at least two report time zones to suss out bugs related to that
(mt/with-temporary-setting-values [report-timezone tz]
(testing (format "\nunit = %s" unit)
(is (= expected-count (count-of-checkins unit filter-value))
(format
"count of rows where (= (%s date) %s) should be %d"
(name unit)
filter-value
expected-count))))))))))
(deftest legacy-default-datetime-bucketing-test
(testing (str ":type/Date or :type/DateTime fields that don't have `:temporal-unit` clauses should get default `:day` "
"bucketing for legacy reasons. See #9014")
(is (= (str "SELECT COUNT(*) AS \"count\" "
"FROM \"PUBLIC\".\"CHECKINS\" "
"WHERE ("
"\"PUBLIC\".\"CHECKINS\".\"DATE\" >= CAST(NOW() AS date)) "
"AND "
"(\"PUBLIC\".\"CHECKINS\".\"DATE\" < CAST(DATEADD('day', CAST(1 AS long), CAST(NOW() AS datetime)) AS date)"
")")
(:query
(qp/compile
(mt/mbql-query checkins
{:aggregation [[:count]]
:filter [:= $date [:relative-datetime :current]]})))))))
(deftest compile-time-interval-test
(testing "Make sure time-intervals work the way they're supposed to."
(testing "[:time-interval $date -4 :month] should give us something like Oct 01 2020 - Feb 01 2021 if today is Feb 17 2021"
(is (= (str "SELECT CHECKINS.DATE AS DATE "
"FROM CHECKINS "
"WHERE ("
"CHECKINS.DATE >= DATE_TRUNC('month', DATEADD('month', CAST(-4 AS long), CAST(NOW() AS datetime))))"
" AND "
"(CHECKINS.DATE < DATE_TRUNC('month', NOW())) "
"GROUP BY CHECKINS.DATE "
"ORDER BY CHECKINS.DATE ASC "
"LIMIT 1048575")
(sql.qp-test-util/pretty-sql
(:query
(qp/compile
(mt/mbql-query checkins
{:filter [:time-interval $date -4 :month]
:breakout [!day.date]})))))))))
(deftest field-filter-start-of-week-test
(testing "Field Filters with relative date ranges should respect the custom start of week setting (#14294)"
(mt/dataset checkins:1-per-day
(let [query (mt/native-query {:query (str "SELECT dayname(\"TIMESTAMP\") as \"day\" "
"FROM checkins "
"[[WHERE {{date_range}}]] "
"ORDER BY \"TIMESTAMP\" ASC "
"LIMIT 1")
:template-tags {"date_range"
{:name "date_range"
:display-name "Date Range"
:type :dimension
:widget-type :date/all-options
:dimension (mt/$ids $checkins.timestamp)}}
:parameters [{:type :date/range
:name "created_at"
:target [:dimension [:template-tag "date_range"]]
:value "past1weeks"}]})]
(doseq [[first-day-of-week expected] {"sunday" ["Sunday"]
"monday" ["Monday"]
"tuesday" ["Tuesday"]
"wednesday" ["Wednesday"]
"thursday" ["Thursday"]
"friday" ["Friday"]
"saturday" ["Saturday"]}]
(mt/with-temporary-setting-values [start-of-week first-day-of-week]
(is (= expected
(mt/first-row
(qp/process-query query))))))))))
(deftest day-of-week-custom-start-of-week-test
(mt/test-drivers (mt/normal-drivers)
(testing "`:day-of-week` bucketing should respect the `start-of-week` Setting (#13604)"
(testing "filter by `:day-of-week` should work correctly (#15044)"
(doseq [[day [thursday-day-of-week saturday-day-of-week]] {:sunday [5 7]
:monday [4 6]
:tuesday [3 5]}]
(mt/with-temporary-setting-values [start-of-week day]
(is (= (sort-by
first
[[thursday-day-of-week 2]
[saturday-day-of-week 1]])
(mt/formatted-rows [int int]
(mt/run-mbql-query checkins
{:aggregation [[:count]]
:breakout [!day-of-week.date]
:filter [:between $date "2013-01-03" "2013-01-20"]}))))))))))
(deftest first-day-of-week-for-day-of-week-bucketing-test
(testing "First day of week for `:day-of-week` bucketing should be the consistent (#17801)"
(mt/test-drivers (mt/normal-drivers-with-feature :basic-aggregations)
(let [query (mt/mbql-query checkins
{:aggregation [[:count]]
:breakout [!day-of-week.date]})]
(doseq [[first-day-of-week expected-rows] {:sunday [[1 135] [2 143] [3 153] [4 136] [5 139] [6 160] [7 134]]
:monday [[1 143] [2 153] [3 136] [4 139] [5 160] [6 134] [7 135]]
:tuesday [[1 153] [2 136] [3 139] [4 160] [5 134] [6 135] [7 143]]
:wednesday [[1 136] [2 139] [3 160] [4 134] [5 135] [6 143] [7 153]]
:thursday [[1 139] [2 160] [3 134] [4 135] [5 143] [6 153] [7 136]]
:friday [[1 160] [2 134] [3 135] [4 143] [5 153] [6 136] [7 139]]
:saturday [[1 134] [2 135] [3 143] [4 153] [5 136] [6 139] [7 160]]}]
(mt/with-temporary-setting-values [start-of-week first-day-of-week]
(mt/with-native-query-testing-context query
(is (= expected-rows
(mt/formatted-rows [int int] (qp/process-query query)))))))))))
(deftest filter-by-current-quarter-test
Oracle does n't work on March 31st because March 31st + 3 months = June 31st , which does n't exist . See # 10072
(mt/test-drivers (disj (mt/normal-drivers) :oracle)
(testing "Should be able to filter by current quarter (#20683)"
(let [query (mt/mbql-query checkins
{:aggregation [[:count]]
:filter [:= !quarter.date [:relative-datetime :now]]})]
(mt/with-native-query-testing-context query
(is (=
Mongo returns empty row for count = 0 . We should fix that ( # 5419 )
(case driver/*driver*
:mongo []
[[0]])
(mt/formatted-rows [int] (qp/process-query query)))))))))
TODO -- is this really date BUCKETING ? Does this BELONG HERE ? !
(deftest june-31st-test
(testing "What happens when you try to add 3 months to March 31st? It should still work (#10072, #21968, #21969)"
(mt/with-temporary-setting-values [report-timezone "UTC"]
only testing the SQL drivers for now since I 'm not 100 % sure how to mock this for everyone else . Maybe one day
(mt/test-drivers (filter #(isa? driver/hierarchy (driver/the-initialized-driver %) :sql)
(mt/normal-drivers))
(doseq [[n unit] [[3 :month]
[1 :quarter]]
t [#t "2022-03-31"
#t "2022-03-31T00:00:00"
#t "2022-03-31T00:00:00-00:00"]]
(testing (format "%d %s ^%s %s" n unit (.getCanonicalName (class t)) (pr-str t))
(binding [hx/*honey-sql-version* (sql.qp/honey-sql-version driver/*driver*)]
(let [march-31 (sql.qp/->honeysql driver/*driver* [:absolute-datetime t :day])
june-31 (sql.qp/add-interval-honeysql-form driver/*driver* march-31 n unit)
checkins (mt/with-everything-store
(sql.qp/->honeysql driver/*driver* (db/select-one Table :id (mt/id :checkins))))
honeysql {:select [[june-31 :june_31]]
:from [(sql.qp/maybe-wrap-unaliased-expr checkins)]}
honeysql (sql.qp/apply-top-level-clause driver/*driver* :limit honeysql {:limit 1})
[sql & args] (sql.qp/format-honeysql driver/*driver* honeysql)
query (mt/native-query {:query sql, :params args})]
(mt/with-native-query-testing-context query
(is (re= (u.regex/rx #"^2022-"
We do n't really care if someone returns June 29th or 30th or July 1st here . I
guess you could make a case for either June 30th or July 1st . I do n't really know
how you can get June 29th from this , but that 's what Vertica returns . : shrug : The
[:or [:and "06-" [:or "29" "30"]] "07-01"]
[:? [:or "T" #"\s"] "00:00:00" [:? "Z"]])
(first (mt/first-row (qp/process-query query))))))))))))))
|
abe8967babb7ad43c60195ad6984e53a7f207c96bdc18150fc6ac953314ec2b5
|
guildhall/guile-sly
|
fps.scm
|
Copyright ( C ) 2013 , 2014 >
;;;
;;; This program is free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation , either version 3 of the
;;; License, or (at your option) any later version.
;;;
;;; This program is distributed in the hope that it will be useful,
;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
;;; General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
;;; along with this program. If not, see
;;; </>.
;;; Commentary:
;;
Frames per second counter .
;;
;;; Code:
(define-module (sly fps)
#:use-module (sly game)
#:use-module (sly signal)
#:export (fps))
Current frames per second
(define-signal fps (make-signal 0))
(define accumulate-fps!
(let* ((elapsed-time 0)
(fps-counter 0))
(lambda (dt alpha)
(let ((new-time (+ elapsed-time dt))
(new-fps (1+ fps-counter)))
(if (>= new-time 1000)
(begin
(signal-set! fps new-fps)
(set! fps-counter 0)
(set! elapsed-time 0))
(begin
(set! fps-counter new-fps)
(set! elapsed-time new-time)))))))
(add-hook! draw-hook accumulate-fps!)
| null |
https://raw.githubusercontent.com/guildhall/guile-sly/92f5f21da76986c5b606b36afc4bb984cc63da5b/sly/fps.scm
|
scheme
|
This program is free software: you can redistribute it and/or
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
along with this program. If not, see
</>.
Commentary:
Code:
|
Copyright ( C ) 2013 , 2014 >
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation , either version 3 of the
You should have received a copy of the GNU General Public License
Frames per second counter .
(define-module (sly fps)
#:use-module (sly game)
#:use-module (sly signal)
#:export (fps))
Current frames per second
(define-signal fps (make-signal 0))
(define accumulate-fps!
(let* ((elapsed-time 0)
(fps-counter 0))
(lambda (dt alpha)
(let ((new-time (+ elapsed-time dt))
(new-fps (1+ fps-counter)))
(if (>= new-time 1000)
(begin
(signal-set! fps new-fps)
(set! fps-counter 0)
(set! elapsed-time 0))
(begin
(set! fps-counter new-fps)
(set! elapsed-time new-time)))))))
(add-hook! draw-hook accumulate-fps!)
|
9e370e11c5e6f2def3c38fd0877964d55a210ae0b610a533e563acfd24880658
|
janestreet/async_extra
|
async_bus.mli
|
(** Async operations on {{!Core.Bus}[Core.Bus]}. *)
open! Core
open! Async_kernel
open! Import
(** [pipe1_exn t] returns a pipe of updates from [t] by subscribing to [t]. Closing the
pipe unsubscribes from [t]. Closing [t] closes the pipe. Calling [pipe1_exn] on a
closed bus always returns an empty pipe. [pipe1_exn] raises in the same circumstances
as [subscribe_exn]. *)
val pipe1_exn
: ('a -> unit, [> read ]) Bus.t
-> Source_code_position.t
-> 'a Pipe.Reader.t
(** [pipe1_filter_map_exn] is the [filter_map]ing version of [pipe1_exn], allowing users
to [filter_map] the values without incurring the cost of an additional pipe. *)
val pipe1_filter_map_exn
: ('a -> unit, [> read ]) Bus.t
-> Source_code_position.t
-> f:('a -> 'b option)
-> 'b Pipe.Reader.t
module First_arity : sig
type (_, _, _) t =
| Arity1 : ('a -> unit, 'a -> 'r option, 'r) t
| Arity2 : ('a -> 'b -> unit, 'a -> 'b -> 'r option, 'r) t
| Arity3 : ('a -> 'b -> 'c -> unit, 'a -> 'b -> 'c -> 'r option, 'r) t
| Arity4 : ('a -> 'b -> 'c -> 'd -> unit, 'a -> 'b -> 'c -> 'd -> 'r option, 'r) t
| Arity5
: ( 'a -> 'b -> 'c -> 'd -> 'e -> unit
, 'a -> 'b -> 'c -> 'd -> 'e -> 'r option
, 'r )
t
[@@deriving sexp_of]
end
* [ first_exn here t arity ~f ] returns a deferred that becomes determined with value [ r ]
when the first event is published to [ t ] where [ f ] returns [ Some r ] . [ first_exn ] then
unsubscribes from [ t ] , ensuring that [ f ] is never called again after it returns
[ Some ] . [ first_exn ] raises if it ca n't subscribe to the bus , i.e. , if [ subscribe_exn ]
raises . If [ f ] raises , then [ first_exn ] raises to the monitor in effect when
[ first_exn ] was called . [ first_exn ] takes time proportional to the number of bus
subscribers .
If [ stop ] is provided and becomes determined , [ f ] will not be called again , it will
unsubscribe from the bus , and the deferred that was returned by [ first_exn ] will never
become determined .
when the first event is published to [t] where [f] returns [Some r]. [first_exn] then
unsubscribes from [t], ensuring that [f] is never called again after it returns
[Some]. [first_exn] raises if it can't subscribe to the bus, i.e., if [subscribe_exn]
raises. If [f] raises, then [first_exn] raises to the monitor in effect when
[first_exn] was called. [first_exn] takes time proportional to the number of bus
subscribers.
If [stop] is provided and becomes determined, [f] will not be called again, it will
unsubscribe from the bus, and the deferred that was returned by [first_exn] will never
become determined. *)
val first_exn
: ?stop:unit Deferred.t
-> ('c, [> read ]) Bus.t
-> Source_code_position.t
-> ('c, 'f, 'r) First_arity.t
-> f:'f
-> 'r Deferred.t
| null |
https://raw.githubusercontent.com/janestreet/async_extra/581ea77ab9a2ac8d3cbbbcfc0bd8421d0b5342e2/async_bus/src/async_bus.mli
|
ocaml
|
* Async operations on {{!Core.Bus}[Core.Bus]}.
* [pipe1_exn t] returns a pipe of updates from [t] by subscribing to [t]. Closing the
pipe unsubscribes from [t]. Closing [t] closes the pipe. Calling [pipe1_exn] on a
closed bus always returns an empty pipe. [pipe1_exn] raises in the same circumstances
as [subscribe_exn].
* [pipe1_filter_map_exn] is the [filter_map]ing version of [pipe1_exn], allowing users
to [filter_map] the values without incurring the cost of an additional pipe.
|
open! Core
open! Async_kernel
open! Import
val pipe1_exn
: ('a -> unit, [> read ]) Bus.t
-> Source_code_position.t
-> 'a Pipe.Reader.t
val pipe1_filter_map_exn
: ('a -> unit, [> read ]) Bus.t
-> Source_code_position.t
-> f:('a -> 'b option)
-> 'b Pipe.Reader.t
module First_arity : sig
type (_, _, _) t =
| Arity1 : ('a -> unit, 'a -> 'r option, 'r) t
| Arity2 : ('a -> 'b -> unit, 'a -> 'b -> 'r option, 'r) t
| Arity3 : ('a -> 'b -> 'c -> unit, 'a -> 'b -> 'c -> 'r option, 'r) t
| Arity4 : ('a -> 'b -> 'c -> 'd -> unit, 'a -> 'b -> 'c -> 'd -> 'r option, 'r) t
| Arity5
: ( 'a -> 'b -> 'c -> 'd -> 'e -> unit
, 'a -> 'b -> 'c -> 'd -> 'e -> 'r option
, 'r )
t
[@@deriving sexp_of]
end
* [ first_exn here t arity ~f ] returns a deferred that becomes determined with value [ r ]
when the first event is published to [ t ] where [ f ] returns [ Some r ] . [ first_exn ] then
unsubscribes from [ t ] , ensuring that [ f ] is never called again after it returns
[ Some ] . [ first_exn ] raises if it ca n't subscribe to the bus , i.e. , if [ subscribe_exn ]
raises . If [ f ] raises , then [ first_exn ] raises to the monitor in effect when
[ first_exn ] was called . [ first_exn ] takes time proportional to the number of bus
subscribers .
If [ stop ] is provided and becomes determined , [ f ] will not be called again , it will
unsubscribe from the bus , and the deferred that was returned by [ first_exn ] will never
become determined .
when the first event is published to [t] where [f] returns [Some r]. [first_exn] then
unsubscribes from [t], ensuring that [f] is never called again after it returns
[Some]. [first_exn] raises if it can't subscribe to the bus, i.e., if [subscribe_exn]
raises. If [f] raises, then [first_exn] raises to the monitor in effect when
[first_exn] was called. [first_exn] takes time proportional to the number of bus
subscribers.
If [stop] is provided and becomes determined, [f] will not be called again, it will
unsubscribe from the bus, and the deferred that was returned by [first_exn] will never
become determined. *)
val first_exn
: ?stop:unit Deferred.t
-> ('c, [> read ]) Bus.t
-> Source_code_position.t
-> ('c, 'f, 'r) First_arity.t
-> f:'f
-> 'r Deferred.t
|
2b3ad90ce9a9fe84ea5a833c24dac8ee5846bce8a886410616d0ea742edacd87
|
tonyg/kali-scheme
|
eval2.scm
|
(define-local-syntax (define-primitive id nargs)
(let ((args (reverse (list-tail '(z y x) (- '3 nargs)))))
`(define (,id . ,args)
(call-primitively ,id . ,args))))
(define-primitive ashr 2)
(define high-bits ashr)
| null |
https://raw.githubusercontent.com/tonyg/kali-scheme/79bf76b4964729b63fce99c4d2149b32cb067ac0/ps-compiler/prescheme/test/eval2.scm
|
scheme
|
(define-local-syntax (define-primitive id nargs)
(let ((args (reverse (list-tail '(z y x) (- '3 nargs)))))
`(define (,id . ,args)
(call-primitively ,id . ,args))))
(define-primitive ashr 2)
(define high-bits ashr)
|
|
e459a5408b9de73468b443edcd7bb1fd6c7bd3d9affc8f057268fcbb21af6b02
|
archaelus/tsung
|
mochiweb_xpath.erl
|
%% mochiweb_html_xpath.erl
@author
created on < 2008 - 04 - 29 >
%%
XPath interpreter , navigate 's html structs
Only a subset of is implemented , see what is supported in test.erl
-module(mochiweb_xpath).
-export([execute/2,execute/3,compile_xpath/1]).
internal data
-record(ctx, {
root,
ctx,
functions
}).
( string ( ) ) - > compiled_xpath ( )
compile_xpath(Expr) ->
mochiweb_xpath_parser:compile_xpath(Expr).
@doc Execute the given XPath expression against the given document , using
%% the default set of functions.
, Doc ) - > Results
%% @type XPath = compiled_xpath() | string()
%% @type Doc = node()
%% @type Results = [node()] | binary() | boolean() | number()
execute(XPathString,Doc) when is_list(XPathString) ->
XPath = mochiweb_xpath_parser:compile_xpath(XPathString),
execute(XPath,Doc);
execute(XPath,Root) ->
execute(XPath,Root,[]).
@doc Execute the given XPath expression against the given document ,
%% using the default set of functions plus the user-supplied ones.
%%
%% @see mochiweb_xpath_functions.erl to see how to write functions
%%
, Doc , Functions ) - > Results
%% @type XPath = compiled_xpath() | string()
%% @type Doc = node()
%% @type Functions = [FunctionDefinition]
@type FunctionDefinition = { FunName , Fun , Signature }
%% @type FunName = atom()
%% @type Fun = fun/2
%% @type Signature = [ArgType]
%% @type ArgType = node_set | string | number | boolean
%% @type Results = [node()] | binary() | boolean() | number()
%% TODO: should pass the user-defined functions when compiling
the xpath expression ( ) . Then the
%% compiled expression would have all its functions
%% resolved, and no function lookup would occur when
%% the expression is executed
execute(XPathString,Doc,Functions) when is_list(XPathString) ->
XPath = mochiweb_xpath_parser:compile_xpath(XPathString),
execute(XPath,Doc,Functions);
execute(XPath,Doc,Functions) ->
R = {root,none,[Doc]},
Funs = lists:foldl(fun(T={Key,_Fun,_Signature},Prev) ->
lists:keystore(Key,1,Prev,T)
end,mochiweb_xpath_functions:default_functions(),Functions),
execute_expr(XPath,#ctx{ctx=[R],root=R,functions=Funs}).
execute_expr({path,'abs',Path},Ctx =#ctx{root=Root}) ->
do_path_expr(Path,Ctx#ctx{ctx=[Root]});
execute_expr({path,'rel',Path},Ctx) ->
do_path_expr(Path,Ctx);
execute_expr({comp,Comp,A,B},Ctx) ->
CompFun = comp_fun(Comp),
L = execute_expr(A,Ctx),
R = execute_expr(B,Ctx),
comp(CompFun,L,R);
execute_expr({literal,L},_Ctx) ->
[L];
execute_expr({number,N},_Ctx) ->
[N];
execute_expr({function_call,Fun,Args},Ctx=#ctx{functions=Funs}) ->
RealArgs = lists:map(fun(Arg) ->
execute_expr(Arg,Ctx)
end,Args),
case lists:keysearch(Fun,1,Funs) of
{value,{Fun,F,FormalSignature}} ->
TypedArgs = lists:map(fun({Type,Arg}) ->
mochiweb_xpath_utils:convert(Arg,Type)
end,lists:zip(FormalSignature,RealArgs)),
F(Ctx,TypedArgs);
false ->
throw({efun_not_found,Fun})
end.
do_path_expr({step,{Axis,NodeTest,Predicates}},Ctx=#ctx{ctx=Context}) ->
NewNodeList = axis(Axis,NodeTest,Context),
apply_predicates(Predicates,NewNodeList,Ctx);
do_path_expr({refine,Step1,Step2},Ctx) ->
S1 = do_path_expr(Step1,Ctx),
do_path_expr(Step2,Ctx#ctx{ctx=S1}).
axis('child',{name,{Tag,_,_}},Context) ->
F = fun ({Tag2,_,_}) when Tag2 == Tag -> true;
(_) -> false
end,
N = lists:map(fun ({_,_,Childs}) ->
lists:filter(F,Childs) ;
(_) -> []
end, Context),
lists:flatten(N);
axis('child',{node_type,text},Context) ->
L = lists:map(fun ({_,_,Childs}) ->
case lists:filter(fun is_binary/1,Childs) of
[] -> [];
T -> list_to_binary(T)
end;
(_) ->
[]
end,Context),
L;
axis('child',{wildcard,wildcard},Context) ->
L = lists:map(fun
({_,_,Children})-> Children;
(_) -> []
end, Context),
lists:flatten(L);
axis(attribute,{name,{Attr,_Prefix,_Local}},Context) ->
L = lists:foldl(fun ({_,Attrs,_},Acc) ->
case proplists:get_value(Attr,Attrs) of
undefined -> Acc;
V -> [V|Acc]
end;
(_,Acc) ->
Acc
end,[],Context),
lists:reverse(L);
axis('descendant_or_self',{node_type,'node'},Context) ->
descendant_or_self(Context);
axis('self',{node_type,'node'},Context) ->
Context.
%%FIXME:The order of the result is wrong, it doesn't return the nodes in
%% document order. Actually the problem isn't here, but in
axis('child',{Tag,_,_},Ctx ) . We may need to find a better strategy
%% to implement the child axis if document order is important
descendant_or_self(Ctx) ->
L = descendant_or_self(Ctx,[]),
lists:reverse(L).
descendant_or_self([],Acc) ->
Acc;
descendant_or_self([E={_,_,Children}|Rest],Acc) ->
N = descendant_or_self(Children,[E|Acc]),
descendant_or_self(Rest,N);
%% text() nodes aren't included
descendant_or_self([_|Rest],Acc) ->
descendant_or_self(Rest,Acc).
apply_predicates(Predicates,NodeList,Ctx) ->
lists:foldl(fun(Pred,Nodes) ->
apply_predicate(Pred,Nodes,Ctx)
end, NodeList,Predicates).
% special case: indexing
apply_predicate({pred,{number,N}},NodeList,_Ctx) when length(NodeList) >= N ->
[lists:nth(N,NodeList)];
apply_predicate({pred,Pred},NodeList,Ctx) ->
Filter = fun(Node) ->
mochiweb_xpath_utils:boolean_value(
execute_expr(Pred,Ctx#ctx{ctx=[Node]}))
end,
L = lists:filter(Filter,NodeList),
L.
@see -xpath-19991116 , section 3.4
comp(CompFun,L,R) when is_list(L), is_list(R) ->
lists:any(fun(LeftValue) ->
lists:any(fun(RightValue)->
CompFun(LeftValue,RightValue)
end, R)
end, L);
comp(CompFun,L,R) when is_list(L) ->
lists:any(fun(LeftValue) -> CompFun(LeftValue,R) end,L);
comp(CompFun,L,R) when is_list(R) ->
lists:any(fun(RightValue) -> CompFun(L,RightValue) end,R);
comp(CompFun,L,R) ->
CompFun(L,R).
comp_fun('=') ->
fun
(A,B) when is_number(A) -> A == mochiweb_xpath_utils:number_value(B);
(A,B) when is_number(B) -> mochiweb_xpath_utils:number_value(A) == B;
(A,B) when is_boolean(A) -> A == mochiweb_xpath_utils:boolean_value(B);
(A,B) when is_boolean(B) -> mochiweb_xpath_utils:boolean_value(A) == B;
(A,B) -> mochiweb_xpath_utils:string_value(A) == mochiweb_xpath_utils:string_value(B)
end;
comp_fun('!=') ->
fun(A,B) -> F = comp_fun('='),
not F(A,B)
end;
comp_fun('>') ->
fun(A,B) ->
mochiweb_xpath_utils:number_value(A) > mochiweb_xpath_utils:number_value(B)
end;
comp_fun('<') ->
fun(A,B) ->
mochiweb_xpath_utils:number_value(A) < mochiweb_xpath_utils:number_value(B)
end;
comp_fun('<=') ->
fun(A,B) ->
mochiweb_xpath_utils:number_value(A) =< mochiweb_xpath_utils:number_value(B)
end;
comp_fun('>=') ->
fun(A,B) ->
mochiweb_xpath_utils:number_value(A) >= mochiweb_xpath_utils:number_value(B)
end.
| null |
https://raw.githubusercontent.com/archaelus/tsung/b4ea0419c6902d8bb63795200964d25b19e46532/src/lib/mochiweb_xpath.erl
|
erlang
|
mochiweb_html_xpath.erl
the default set of functions.
@type XPath = compiled_xpath() | string()
@type Doc = node()
@type Results = [node()] | binary() | boolean() | number()
using the default set of functions plus the user-supplied ones.
@see mochiweb_xpath_functions.erl to see how to write functions
@type XPath = compiled_xpath() | string()
@type Doc = node()
@type Functions = [FunctionDefinition]
@type FunName = atom()
@type Fun = fun/2
@type Signature = [ArgType]
@type ArgType = node_set | string | number | boolean
@type Results = [node()] | binary() | boolean() | number()
TODO: should pass the user-defined functions when compiling
compiled expression would have all its functions
resolved, and no function lookup would occur when
the expression is executed
FIXME:The order of the result is wrong, it doesn't return the nodes in
document order. Actually the problem isn't here, but in
to implement the child axis if document order is important
text() nodes aren't included
special case: indexing
|
@author
created on < 2008 - 04 - 29 >
XPath interpreter , navigate 's html structs
Only a subset of is implemented , see what is supported in test.erl
-module(mochiweb_xpath).
-export([execute/2,execute/3,compile_xpath/1]).
internal data
-record(ctx, {
root,
ctx,
functions
}).
( string ( ) ) - > compiled_xpath ( )
compile_xpath(Expr) ->
mochiweb_xpath_parser:compile_xpath(Expr).
@doc Execute the given XPath expression against the given document , using
, Doc ) - > Results
execute(XPathString,Doc) when is_list(XPathString) ->
XPath = mochiweb_xpath_parser:compile_xpath(XPathString),
execute(XPath,Doc);
execute(XPath,Root) ->
execute(XPath,Root,[]).
@doc Execute the given XPath expression against the given document ,
, Doc , Functions ) - > Results
@type FunctionDefinition = { FunName , Fun , Signature }
the xpath expression ( ) . Then the
execute(XPathString,Doc,Functions) when is_list(XPathString) ->
XPath = mochiweb_xpath_parser:compile_xpath(XPathString),
execute(XPath,Doc,Functions);
execute(XPath,Doc,Functions) ->
R = {root,none,[Doc]},
Funs = lists:foldl(fun(T={Key,_Fun,_Signature},Prev) ->
lists:keystore(Key,1,Prev,T)
end,mochiweb_xpath_functions:default_functions(),Functions),
execute_expr(XPath,#ctx{ctx=[R],root=R,functions=Funs}).
execute_expr({path,'abs',Path},Ctx =#ctx{root=Root}) ->
do_path_expr(Path,Ctx#ctx{ctx=[Root]});
execute_expr({path,'rel',Path},Ctx) ->
do_path_expr(Path,Ctx);
execute_expr({comp,Comp,A,B},Ctx) ->
CompFun = comp_fun(Comp),
L = execute_expr(A,Ctx),
R = execute_expr(B,Ctx),
comp(CompFun,L,R);
execute_expr({literal,L},_Ctx) ->
[L];
execute_expr({number,N},_Ctx) ->
[N];
execute_expr({function_call,Fun,Args},Ctx=#ctx{functions=Funs}) ->
RealArgs = lists:map(fun(Arg) ->
execute_expr(Arg,Ctx)
end,Args),
case lists:keysearch(Fun,1,Funs) of
{value,{Fun,F,FormalSignature}} ->
TypedArgs = lists:map(fun({Type,Arg}) ->
mochiweb_xpath_utils:convert(Arg,Type)
end,lists:zip(FormalSignature,RealArgs)),
F(Ctx,TypedArgs);
false ->
throw({efun_not_found,Fun})
end.
do_path_expr({step,{Axis,NodeTest,Predicates}},Ctx=#ctx{ctx=Context}) ->
NewNodeList = axis(Axis,NodeTest,Context),
apply_predicates(Predicates,NewNodeList,Ctx);
do_path_expr({refine,Step1,Step2},Ctx) ->
S1 = do_path_expr(Step1,Ctx),
do_path_expr(Step2,Ctx#ctx{ctx=S1}).
axis('child',{name,{Tag,_,_}},Context) ->
F = fun ({Tag2,_,_}) when Tag2 == Tag -> true;
(_) -> false
end,
N = lists:map(fun ({_,_,Childs}) ->
lists:filter(F,Childs) ;
(_) -> []
end, Context),
lists:flatten(N);
axis('child',{node_type,text},Context) ->
L = lists:map(fun ({_,_,Childs}) ->
case lists:filter(fun is_binary/1,Childs) of
[] -> [];
T -> list_to_binary(T)
end;
(_) ->
[]
end,Context),
L;
axis('child',{wildcard,wildcard},Context) ->
L = lists:map(fun
({_,_,Children})-> Children;
(_) -> []
end, Context),
lists:flatten(L);
axis(attribute,{name,{Attr,_Prefix,_Local}},Context) ->
L = lists:foldl(fun ({_,Attrs,_},Acc) ->
case proplists:get_value(Attr,Attrs) of
undefined -> Acc;
V -> [V|Acc]
end;
(_,Acc) ->
Acc
end,[],Context),
lists:reverse(L);
axis('descendant_or_self',{node_type,'node'},Context) ->
descendant_or_self(Context);
axis('self',{node_type,'node'},Context) ->
Context.
axis('child',{Tag,_,_},Ctx ) . We may need to find a better strategy
descendant_or_self(Ctx) ->
L = descendant_or_self(Ctx,[]),
lists:reverse(L).
descendant_or_self([],Acc) ->
Acc;
descendant_or_self([E={_,_,Children}|Rest],Acc) ->
N = descendant_or_self(Children,[E|Acc]),
descendant_or_self(Rest,N);
descendant_or_self([_|Rest],Acc) ->
descendant_or_self(Rest,Acc).
apply_predicates(Predicates,NodeList,Ctx) ->
lists:foldl(fun(Pred,Nodes) ->
apply_predicate(Pred,Nodes,Ctx)
end, NodeList,Predicates).
apply_predicate({pred,{number,N}},NodeList,_Ctx) when length(NodeList) >= N ->
[lists:nth(N,NodeList)];
apply_predicate({pred,Pred},NodeList,Ctx) ->
Filter = fun(Node) ->
mochiweb_xpath_utils:boolean_value(
execute_expr(Pred,Ctx#ctx{ctx=[Node]}))
end,
L = lists:filter(Filter,NodeList),
L.
@see -xpath-19991116 , section 3.4
comp(CompFun,L,R) when is_list(L), is_list(R) ->
lists:any(fun(LeftValue) ->
lists:any(fun(RightValue)->
CompFun(LeftValue,RightValue)
end, R)
end, L);
comp(CompFun,L,R) when is_list(L) ->
lists:any(fun(LeftValue) -> CompFun(LeftValue,R) end,L);
comp(CompFun,L,R) when is_list(R) ->
lists:any(fun(RightValue) -> CompFun(L,RightValue) end,R);
comp(CompFun,L,R) ->
CompFun(L,R).
comp_fun('=') ->
fun
(A,B) when is_number(A) -> A == mochiweb_xpath_utils:number_value(B);
(A,B) when is_number(B) -> mochiweb_xpath_utils:number_value(A) == B;
(A,B) when is_boolean(A) -> A == mochiweb_xpath_utils:boolean_value(B);
(A,B) when is_boolean(B) -> mochiweb_xpath_utils:boolean_value(A) == B;
(A,B) -> mochiweb_xpath_utils:string_value(A) == mochiweb_xpath_utils:string_value(B)
end;
comp_fun('!=') ->
fun(A,B) -> F = comp_fun('='),
not F(A,B)
end;
comp_fun('>') ->
fun(A,B) ->
mochiweb_xpath_utils:number_value(A) > mochiweb_xpath_utils:number_value(B)
end;
comp_fun('<') ->
fun(A,B) ->
mochiweb_xpath_utils:number_value(A) < mochiweb_xpath_utils:number_value(B)
end;
comp_fun('<=') ->
fun(A,B) ->
mochiweb_xpath_utils:number_value(A) =< mochiweb_xpath_utils:number_value(B)
end;
comp_fun('>=') ->
fun(A,B) ->
mochiweb_xpath_utils:number_value(A) >= mochiweb_xpath_utils:number_value(B)
end.
|
7e91e5c3a5a1c3d45ef91c742c2cf992ffc2f3e4a633fb5e0497da2d2f752ec9
|
Quid2/zm
|
K20ffacc8f8c9.hs
|
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
module Test.ZM.ADT.LeastSignificantFirst.K20ffacc8f8c9 (LeastSignificantFirst(..)) where
import qualified Prelude(Eq,Ord,Show)
import qualified GHC.Generics
import qualified Flat
import qualified Data.Model
newtype LeastSignificantFirst a = LeastSignificantFirst a
deriving (Prelude.Eq, Prelude.Ord, Prelude.Show, GHC.Generics.Generic, Flat.Flat)
instance ( Data.Model.Model a ) => Data.Model.Model ( LeastSignificantFirst a )
| null |
https://raw.githubusercontent.com/Quid2/zm/02c0514777a75ac054bfd6251edd884372faddea/test/Test/ZM/ADT/LeastSignificantFirst/K20ffacc8f8c9.hs
|
haskell
|
# LANGUAGE DeriveAnyClass #
# LANGUAGE DeriveGeneric #
|
module Test.ZM.ADT.LeastSignificantFirst.K20ffacc8f8c9 (LeastSignificantFirst(..)) where
import qualified Prelude(Eq,Ord,Show)
import qualified GHC.Generics
import qualified Flat
import qualified Data.Model
newtype LeastSignificantFirst a = LeastSignificantFirst a
deriving (Prelude.Eq, Prelude.Ord, Prelude.Show, GHC.Generics.Generic, Flat.Flat)
instance ( Data.Model.Model a ) => Data.Model.Model ( LeastSignificantFirst a )
|
1e86f3d5658ec7992e6e14f4e0386199e2f4a36bd1105c0694b10dd6b326d1f8
|
fare/lisp-interface-library
|
stateful-map.lisp
|
(uiop:define-package :lil/test/stateful-map
(:import-from :pure)
(:use :stateful :lil/test/base :lil/transform/mutating-map :lil/interface/base
:cl :uiop :fare-utils :hu.dwim.stefil)
(:import-from :lil/test/pure-map))
(in-package :lil/test/stateful-map)
(declaim (optimize (speed 1) (debug 3) (space 3)))
(defsuite* (test-stateful-map
:in test-suite
:documentation "Testing pure functional maps"))
(defmacro with-map ((m i &optional (init '())) &body body)
`(let ((,m (alist-map* ,i ,init))) ,@body (map-alist* i ,m)))
(defmethod interface-test ((i <map>))
(lil/test/pure-map:read-only-linear-map-test i)
(simple-map-test i)
(harder-map-test i))
(defmethod simple-map-test ((i <map>))
(X 'interface-test *package* i)
;;; TODO: test each and every function in the API
(X 'insert)
(is (equal '((0)) (with-map (m i) (insert i m 0 nil))))
(is (equal-alist
'((1 . "1") (2 . "2") (3 . "3"))
(with-map (m i '((1 . "1") (3 . "3"))) (insert i m 2 "2"))))
(X 'insert-and-join)
(is (equal-alist
'((0 . "0") (1 . "1") (2 . "2"))
(with-map (m i '((1 . "1")))
(join i m (alist-map* i '((2 . "2"))))
(insert i m 0 "0"))))
(X 'insert-and-size)
(with-map (m i *al-1*)
(insert i m 101 "101")
(is (= 101 (size i m))))
(X 'drop)
(with-map (m i)
(is (equal '(nil nil)
(multiple-value-list (drop i m 0)))))
(is (equal (with-map (m i '((1 . "1") (2 . "2")))
(is (equal '("1" t) (multiple-value-list (drop i m 1)))))
'((2 . "2"))))
(with-map (m i *al-1*)
(is (equal '("42" t) (multiple-value-list (drop i m 42))))
(is (= (size i m) 99)))
(X 'drop-and-size)
(with-map (m i *alist-100-decimal*)
(is (equal '("57" t) (multiple-value-list (drop i m 57))))
(is (= (size i m) 99)))
(X 'decons)
(with-map (m i)
(is (equal '(() () ()) (multiple-value-list (decons i m)))))
(with-map (m i *alist-10-latin*)
(multiple-value-bind (b k v) (decons i m)
(is (eq b t))
(is (equal (list v t)
(multiple-value-list (lookup lil/pure/alist:<alist> *alist-10-latin* k))))
(is (equal '(nil nil)
(multiple-value-list (lookup i m k))))
(is (= (size i m) 9))))
(X 'fold-left)
(is (equal-alist
'((2 . "2") (1 . "1") (20 . "20") (30 . "30"))
(with-map (m i '((20 . "20") (30 . "30")))
(fold-left
i (alist-map* i (make-alist 2))
#'(lambda (n e) (declare (ignore n)) (insert i m (car e) (cdr e)))
nil))))
(X 'fold-left*)
(is (equal-alist
'((2 . "2") (1 . "1") (20 . "20") (30 . "30"))
(with-map (m i '((20 . "20") (30 . "30")))
(fold-left*
i (alist-map* i (make-alist 2))
#'(lambda (n k v) (declare (ignore n)) (insert i m k v))
nil))))
(X 'fold-left*-and-size)
(with-map (m i *alist-100-latin*)
(fold-left* i (alist-map* i *alist-100-decimal*)
#'(lambda (n k v) (declare (ignore n)) (insert i m k v))
nil)
(is (= 100 (size i m))))
(X 'fold-right)
(is (eql nil (fold-right i (empty i) (constantly t) nil)))
(is (eql t (fold-right i (empty i) (constantly t) t)))
(X 'fold-right*)
(is (eql nil (fold-right* i (empty i) (constantly t) nil)))
(is (eql t (fold-right* i (empty i) (constantly t) t)))
(is (equal-alist
'((1 . "1") (2 . "2") (20 . "20") (30 . "30"))
(with-map (m i '((20 . "20") (30 . "30")))
(fold-right*
i (alist-map* i (make-alist 2))
#'(lambda (k v n) (declare (ignore n)) (insert i m k v))
nil))))
(X 'join)
(is (equal-alist *al-5*
(with-map (m2 i *al-2*)
(is (null (values-list (join i m2 (alist-map i *al-3*))))))))
(is (equal '() (with-map (m i) (join i m (empty i)))))
(is (equal-alist '((1 . "1") (2 . "2") (5 . "5") (6 . "6"))
(with-map (m i '((1 . "1") (2 . "2")))
(join i m (alist-map* i '((5 . "5") (6 . "6")))))))
(X 'join-and-size)
(with-map (m i *alist-10-latin*)
(join i m (alist-map* i *alist-100-latin*))
(is (= 100 (size i m))))
(X 'divide-and-join)
(multiple-value-bind (x y) (divide i (empty i))
(is (empty-p i x))
(is (empty-p i y)))
(with-map (m i *alist-10-latin*)
(multiple-value-bind (x y) (divide i m)
(is (eq m y))
(is (equal-alist *alist-10-latin*
(append (map-alist* i x) (map-alist* i y))))))
(X 'divide-and-size)
(multiple-value-bind (x y)
(divide i (alist-map* i '()))
(is (empty-p i x))
(is (empty-p i y)))
(multiple-value-bind (x y)
(divide i (alist-map* i '((1 . "1"))))
(is (empty-p i x))
(is (= 1 (size i y))))
(multiple-value-bind (x y)
(divide i (alist-map* i *alist-100-latin*))
(let ((sx (size i x)) (sy (size i y)))
(is (plusp sx))
(is (plusp sy))
(is (= 100 (+ sx sy)))))
(X 'size)
(with-map (m i *alist-100-decimal*)
(decons i m)
(is (= 99 (size i m))))
(X 'update-key)
;; TODO: add more tests
(is (null (update-key i (empty i) 0 (constantly nil))))
(X 'map/2)
;; TODO: add more tests
(let ((x (empty i)))
(map/2 i (constantly t) x (empty i))
(is (empty-p i x)))
(X 'convert)
(is (null (convert pure:<alist> i (empty i))))
(is (equal-alist *alist-10-latin*
(convert pure:<alist> i (convert i pure:<alist> *alist-10-latin*))))
(X 'iterator)
(is (equal-alist *alist-10-latin*
(map-alist* i
(flow i i (alist-map* i *alist-10-latin*) (empty i)))))
t)
(defmethod harder-map-test ((i <map>))
;; (X 'join/list)
;; TODO: add tests
(X 'divide/list)
;; TODO: add more tests
(is (null (divide/list i (empty i))))
t)
(defmethod interface-test :after ((i <number-map>))
(let* ((a1 (make-alist 200 "~@R"))
(a2 (shuffle-list a1))
(m1 (alist-map* i a1))
(m2 (alist-map* i a2)))
(is (= 8 (node-height m1)))
(is (<= 8 (node-height m2) 15))
(is (= 200 (size i m1)))
(is (= 200 (size i m2)))))
(defparameter <denm> (<encoded-key-map>
:base-interface <number-map>
:key-encoder #'(lambda (dk) (* dk 2))
:key-decoder #'(lambda (ek) (/ ek 2))))
(deftest test-stateful-map-interfaces ()
(dolist (i (list <number-map> <hash-table> <denm> <alist>))
(interface-test i)))
(defparameter <msnm> (<mutating-map> pure:<number-map>))
(deftest test-mutating-map-interfaces ()
(interface-test <msnm>))
(defparameter *special-test-data*
'((:INSERT (720255619831889/500000 . 1))
(:INSERT (180063904958453/125000 . 101))
(:INSERT (1440511239667639/1000000 . 102))
(:INSERT (720255619833821/500000 . 103))
(:INSERT (1440511239667643/1000000 . 104))
(:INSERT (360127809916911/250000 . 105))
(:INSERT (720255619833823/500000 . 106))
(:INSERT (1440511239667647/1000000 . 107))
(:INSERT (22507988119807/15625 . 108))
(:INSERT (1440511239667649/1000000 . 109))
(:REMOVE (720255619831889/500000 . 1))
(:INSERT (720255619833833/500000 . 110))
(:REMOVE (180063904958453/125000 . 101))
(:INSERT (180063904958459/125000 . 111))
(:REMOVE (1440511239667639/1000000 . 102))
(:INSERT (720255619833837/500000 . 112))
(:REMOVE (720255619833821/500000 . 103))
(:INSERT (360127809916919/250000 . 113))
(:REMOVE (1440511239667643/1000000 . 104))
(:INSERT (1440511239667677/1000000 . 114))
(:REMOVE (360127809916911/250000 . 105))
(:INSERT (1440511239667679/1000000 . 115))
(:REMOVE (720255619833823/500000 . 106))
(:INSERT (9003195247923/6250 . 116))
(:REMOVE (1440511239667647/1000000 . 107))
(:INSERT (720255619833841/500000 . 117))
(:REMOVE (22507988119807/15625 . 108))
(:INSERT (1440511239667683/1000000 . 118))
(:REMOVE (1440511239667649/1000000 . 109))
(:INSERT (288102247933537/200000 . 119))
(:INSERT (90031952478987/62500 . 2))
(:INSERT (720255619835067/500000 . 120))
(:INSERT (720255619835069/500000 . 121))
(:INSERT (720255619835071/500000 . 122))
(:INSERT (22507988119846/15625 . 123))
(:INSERT (288102247934029/200000 . 124))
(:INSERT (720255619835073/500000 . 125))
(:INSERT (1440511239670147/1000000 . 126))
(:INSERT (360127809917537/250000 . 127))
(:INSERT (28810224793403/20000 . 128))
(:REMOVE (90031952478987/62500 . 2))
(:INSERT (180063904958769/125000 . 129))
(:REMOVE (720255619835067/500000 . 120))
(:INSERT (1440511239670153/1000000 . 130))
(:REMOVE (720255619835069/500000 . 121))
(:INSERT (288102247934031/200000 . 131))
(:REMOVE (720255619835071/500000 . 122))))
(deftest special-test-1 ()
(loop :for <m> :in (list <number-map> <hash-table> <denm> <alist>) :do
(loop
:with q = (empty <m>)
:with expected-size = 0
:for i from 0
:for (cmd (k . v)) in *special-test-data*
:do (is (= expected-size (size <m> q)))
(ecase cmd
(:insert (progn
(insert <m> q k v)
(incf expected-size)))
(:remove (multiple-value-bind (dropped foundp) (drop <m> q k)
(is foundp)
(is (equal dropped v))
(decf expected-size))))
:finally (is (= expected-size (size <m> q))))))
(defun value< (v1 v2)
(cond ((= (car v1) (car v2))
(< (cdr v1) (cdr v2)))
(t
(< (car v1) (car v2)))))
(defparameter <number-pair> (<lessp> 'value<))
(defparameter <number-pair-map> (<parametric-avl-tree> <number-pair>))
(deftest special-test-2 ()
(let* ((<m> <number-pair-map>)
(q (empty <m>)))
(loop
:with expected-size = 0
:for i from 0
:for (cmd k) in *special-test-data*
:do (is (= expected-size (size <m> q)))
(ecase cmd
(:insert (progn
(insert <m> q k (cdr k))
(incf expected-size)))
(:remove (multiple-value-bind (dropped foundp) (drop <m> q k)
(is foundp)
(is (equal dropped (cdr k)))
(decf expected-size))))
:finally (is (= expected-size (size <m> q))))))
| null |
https://raw.githubusercontent.com/fare/lisp-interface-library/ac2e0063dc65feb805f0c57715d52fda28d4dcd8/test/stateful-map.lisp
|
lisp
|
TODO: test each and every function in the API
TODO: add more tests
TODO: add more tests
(X 'join/list)
TODO: add tests
TODO: add more tests
|
(uiop:define-package :lil/test/stateful-map
(:import-from :pure)
(:use :stateful :lil/test/base :lil/transform/mutating-map :lil/interface/base
:cl :uiop :fare-utils :hu.dwim.stefil)
(:import-from :lil/test/pure-map))
(in-package :lil/test/stateful-map)
(declaim (optimize (speed 1) (debug 3) (space 3)))
(defsuite* (test-stateful-map
:in test-suite
:documentation "Testing pure functional maps"))
(defmacro with-map ((m i &optional (init '())) &body body)
`(let ((,m (alist-map* ,i ,init))) ,@body (map-alist* i ,m)))
(defmethod interface-test ((i <map>))
(lil/test/pure-map:read-only-linear-map-test i)
(simple-map-test i)
(harder-map-test i))
(defmethod simple-map-test ((i <map>))
(X 'interface-test *package* i)
(X 'insert)
(is (equal '((0)) (with-map (m i) (insert i m 0 nil))))
(is (equal-alist
'((1 . "1") (2 . "2") (3 . "3"))
(with-map (m i '((1 . "1") (3 . "3"))) (insert i m 2 "2"))))
(X 'insert-and-join)
(is (equal-alist
'((0 . "0") (1 . "1") (2 . "2"))
(with-map (m i '((1 . "1")))
(join i m (alist-map* i '((2 . "2"))))
(insert i m 0 "0"))))
(X 'insert-and-size)
(with-map (m i *al-1*)
(insert i m 101 "101")
(is (= 101 (size i m))))
(X 'drop)
(with-map (m i)
(is (equal '(nil nil)
(multiple-value-list (drop i m 0)))))
(is (equal (with-map (m i '((1 . "1") (2 . "2")))
(is (equal '("1" t) (multiple-value-list (drop i m 1)))))
'((2 . "2"))))
(with-map (m i *al-1*)
(is (equal '("42" t) (multiple-value-list (drop i m 42))))
(is (= (size i m) 99)))
(X 'drop-and-size)
(with-map (m i *alist-100-decimal*)
(is (equal '("57" t) (multiple-value-list (drop i m 57))))
(is (= (size i m) 99)))
(X 'decons)
(with-map (m i)
(is (equal '(() () ()) (multiple-value-list (decons i m)))))
(with-map (m i *alist-10-latin*)
(multiple-value-bind (b k v) (decons i m)
(is (eq b t))
(is (equal (list v t)
(multiple-value-list (lookup lil/pure/alist:<alist> *alist-10-latin* k))))
(is (equal '(nil nil)
(multiple-value-list (lookup i m k))))
(is (= (size i m) 9))))
(X 'fold-left)
(is (equal-alist
'((2 . "2") (1 . "1") (20 . "20") (30 . "30"))
(with-map (m i '((20 . "20") (30 . "30")))
(fold-left
i (alist-map* i (make-alist 2))
#'(lambda (n e) (declare (ignore n)) (insert i m (car e) (cdr e)))
nil))))
(X 'fold-left*)
(is (equal-alist
'((2 . "2") (1 . "1") (20 . "20") (30 . "30"))
(with-map (m i '((20 . "20") (30 . "30")))
(fold-left*
i (alist-map* i (make-alist 2))
#'(lambda (n k v) (declare (ignore n)) (insert i m k v))
nil))))
(X 'fold-left*-and-size)
(with-map (m i *alist-100-latin*)
(fold-left* i (alist-map* i *alist-100-decimal*)
#'(lambda (n k v) (declare (ignore n)) (insert i m k v))
nil)
(is (= 100 (size i m))))
(X 'fold-right)
(is (eql nil (fold-right i (empty i) (constantly t) nil)))
(is (eql t (fold-right i (empty i) (constantly t) t)))
(X 'fold-right*)
(is (eql nil (fold-right* i (empty i) (constantly t) nil)))
(is (eql t (fold-right* i (empty i) (constantly t) t)))
(is (equal-alist
'((1 . "1") (2 . "2") (20 . "20") (30 . "30"))
(with-map (m i '((20 . "20") (30 . "30")))
(fold-right*
i (alist-map* i (make-alist 2))
#'(lambda (k v n) (declare (ignore n)) (insert i m k v))
nil))))
(X 'join)
(is (equal-alist *al-5*
(with-map (m2 i *al-2*)
(is (null (values-list (join i m2 (alist-map i *al-3*))))))))
(is (equal '() (with-map (m i) (join i m (empty i)))))
(is (equal-alist '((1 . "1") (2 . "2") (5 . "5") (6 . "6"))
(with-map (m i '((1 . "1") (2 . "2")))
(join i m (alist-map* i '((5 . "5") (6 . "6")))))))
(X 'join-and-size)
(with-map (m i *alist-10-latin*)
(join i m (alist-map* i *alist-100-latin*))
(is (= 100 (size i m))))
(X 'divide-and-join)
(multiple-value-bind (x y) (divide i (empty i))
(is (empty-p i x))
(is (empty-p i y)))
(with-map (m i *alist-10-latin*)
(multiple-value-bind (x y) (divide i m)
(is (eq m y))
(is (equal-alist *alist-10-latin*
(append (map-alist* i x) (map-alist* i y))))))
(X 'divide-and-size)
(multiple-value-bind (x y)
(divide i (alist-map* i '()))
(is (empty-p i x))
(is (empty-p i y)))
(multiple-value-bind (x y)
(divide i (alist-map* i '((1 . "1"))))
(is (empty-p i x))
(is (= 1 (size i y))))
(multiple-value-bind (x y)
(divide i (alist-map* i *alist-100-latin*))
(let ((sx (size i x)) (sy (size i y)))
(is (plusp sx))
(is (plusp sy))
(is (= 100 (+ sx sy)))))
(X 'size)
(with-map (m i *alist-100-decimal*)
(decons i m)
(is (= 99 (size i m))))
(X 'update-key)
(is (null (update-key i (empty i) 0 (constantly nil))))
(X 'map/2)
(let ((x (empty i)))
(map/2 i (constantly t) x (empty i))
(is (empty-p i x)))
(X 'convert)
(is (null (convert pure:<alist> i (empty i))))
(is (equal-alist *alist-10-latin*
(convert pure:<alist> i (convert i pure:<alist> *alist-10-latin*))))
(X 'iterator)
(is (equal-alist *alist-10-latin*
(map-alist* i
(flow i i (alist-map* i *alist-10-latin*) (empty i)))))
t)
(defmethod harder-map-test ((i <map>))
(X 'divide/list)
(is (null (divide/list i (empty i))))
t)
(defmethod interface-test :after ((i <number-map>))
(let* ((a1 (make-alist 200 "~@R"))
(a2 (shuffle-list a1))
(m1 (alist-map* i a1))
(m2 (alist-map* i a2)))
(is (= 8 (node-height m1)))
(is (<= 8 (node-height m2) 15))
(is (= 200 (size i m1)))
(is (= 200 (size i m2)))))
(defparameter <denm> (<encoded-key-map>
:base-interface <number-map>
:key-encoder #'(lambda (dk) (* dk 2))
:key-decoder #'(lambda (ek) (/ ek 2))))
(deftest test-stateful-map-interfaces ()
(dolist (i (list <number-map> <hash-table> <denm> <alist>))
(interface-test i)))
(defparameter <msnm> (<mutating-map> pure:<number-map>))
(deftest test-mutating-map-interfaces ()
(interface-test <msnm>))
(defparameter *special-test-data*
'((:INSERT (720255619831889/500000 . 1))
(:INSERT (180063904958453/125000 . 101))
(:INSERT (1440511239667639/1000000 . 102))
(:INSERT (720255619833821/500000 . 103))
(:INSERT (1440511239667643/1000000 . 104))
(:INSERT (360127809916911/250000 . 105))
(:INSERT (720255619833823/500000 . 106))
(:INSERT (1440511239667647/1000000 . 107))
(:INSERT (22507988119807/15625 . 108))
(:INSERT (1440511239667649/1000000 . 109))
(:REMOVE (720255619831889/500000 . 1))
(:INSERT (720255619833833/500000 . 110))
(:REMOVE (180063904958453/125000 . 101))
(:INSERT (180063904958459/125000 . 111))
(:REMOVE (1440511239667639/1000000 . 102))
(:INSERT (720255619833837/500000 . 112))
(:REMOVE (720255619833821/500000 . 103))
(:INSERT (360127809916919/250000 . 113))
(:REMOVE (1440511239667643/1000000 . 104))
(:INSERT (1440511239667677/1000000 . 114))
(:REMOVE (360127809916911/250000 . 105))
(:INSERT (1440511239667679/1000000 . 115))
(:REMOVE (720255619833823/500000 . 106))
(:INSERT (9003195247923/6250 . 116))
(:REMOVE (1440511239667647/1000000 . 107))
(:INSERT (720255619833841/500000 . 117))
(:REMOVE (22507988119807/15625 . 108))
(:INSERT (1440511239667683/1000000 . 118))
(:REMOVE (1440511239667649/1000000 . 109))
(:INSERT (288102247933537/200000 . 119))
(:INSERT (90031952478987/62500 . 2))
(:INSERT (720255619835067/500000 . 120))
(:INSERT (720255619835069/500000 . 121))
(:INSERT (720255619835071/500000 . 122))
(:INSERT (22507988119846/15625 . 123))
(:INSERT (288102247934029/200000 . 124))
(:INSERT (720255619835073/500000 . 125))
(:INSERT (1440511239670147/1000000 . 126))
(:INSERT (360127809917537/250000 . 127))
(:INSERT (28810224793403/20000 . 128))
(:REMOVE (90031952478987/62500 . 2))
(:INSERT (180063904958769/125000 . 129))
(:REMOVE (720255619835067/500000 . 120))
(:INSERT (1440511239670153/1000000 . 130))
(:REMOVE (720255619835069/500000 . 121))
(:INSERT (288102247934031/200000 . 131))
(:REMOVE (720255619835071/500000 . 122))))
(deftest special-test-1 ()
(loop :for <m> :in (list <number-map> <hash-table> <denm> <alist>) :do
(loop
:with q = (empty <m>)
:with expected-size = 0
:for i from 0
:for (cmd (k . v)) in *special-test-data*
:do (is (= expected-size (size <m> q)))
(ecase cmd
(:insert (progn
(insert <m> q k v)
(incf expected-size)))
(:remove (multiple-value-bind (dropped foundp) (drop <m> q k)
(is foundp)
(is (equal dropped v))
(decf expected-size))))
:finally (is (= expected-size (size <m> q))))))
(defun value< (v1 v2)
(cond ((= (car v1) (car v2))
(< (cdr v1) (cdr v2)))
(t
(< (car v1) (car v2)))))
(defparameter <number-pair> (<lessp> 'value<))
(defparameter <number-pair-map> (<parametric-avl-tree> <number-pair>))
(deftest special-test-2 ()
(let* ((<m> <number-pair-map>)
(q (empty <m>)))
(loop
:with expected-size = 0
:for i from 0
:for (cmd k) in *special-test-data*
:do (is (= expected-size (size <m> q)))
(ecase cmd
(:insert (progn
(insert <m> q k (cdr k))
(incf expected-size)))
(:remove (multiple-value-bind (dropped foundp) (drop <m> q k)
(is foundp)
(is (equal dropped (cdr k)))
(decf expected-size))))
:finally (is (= expected-size (size <m> q))))))
|
71a9abbe564703cf7193ae6d15aa317099124158e6eda7fdcc6ef4112397a85a
|
gedge-platform/gedge-platform
|
rabbit_amqp1_0_message.erl
|
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
%%
Copyright ( c ) 2007 - 2021 VMware , Inc. or its affiliates . All rights reserved .
%%
-module(rabbit_amqp1_0_message).
-export([assemble/1, annotated_message/3]).
-define(PROPERTIES_HEADER, <<"x-amqp-1.0-properties">>).
-define(APP_PROPERTIES_HEADER, <<"x-amqp-1.0-app-properties">>).
-define(MESSAGE_ANNOTATIONS_HEADER, <<"x-amqp-1.0-message-annotations">>).
-define(FOOTER, <<"x-amqp-1.0-footer">>).
-include_lib("amqp_client/include/amqp_client.hrl").
-include("rabbit_amqp1_0.hrl").
assemble(MsgBin) ->
{RKey, Props, Content} = assemble(header, {<<"">>, #'P_basic'{}, []},
decode_section(MsgBin), MsgBin),
{RKey, #amqp_msg{props = Props, payload = Content}}.
assemble(header, {R, P, C}, {H = #'v1_0.header'{}, Rest}, _Uneaten) ->
assemble(message_annotations, {R, translate_header(H, P), C},
decode_section(Rest), Rest);
assemble(header, {R, P, C}, Else, Uneaten) ->
assemble(message_annotations, {R, P, C}, Else, Uneaten);
assemble(delivery_annotations, RPC, {#'v1_0.delivery_annotations'{}, Rest},
Uneaten) ->
%% ignore delivery annotations for now
%% TODO: handle "rejected" error
assemble(message_annotations, RPC, Rest, Uneaten);
assemble(delivery_annotations, RPC, Else, Uneaten) ->
assemble(message_annotations, RPC, Else, Uneaten);
assemble(message_annotations, {R, P = #'P_basic'{headers = Headers}, C},
{#'v1_0.message_annotations'{}, Rest}, Uneaten) ->
MsgAnnoBin = chunk(Rest, Uneaten),
assemble(properties, {R, P#'P_basic'{
headers = set_header(?MESSAGE_ANNOTATIONS_HEADER,
MsgAnnoBin, Headers)}, C},
decode_section(Rest), Rest);
assemble(message_annotations, {R, P, C}, Else, Uneaten) ->
assemble(properties, {R, P, C}, Else, Uneaten);
assemble(properties, {_R, P, C}, {X = #'v1_0.properties'{}, Rest}, Uneaten) ->
PropsBin = chunk(Rest, Uneaten),
assemble(app_properties, {routing_key(X),
translate_properties(X, PropsBin, P), C},
decode_section(Rest), Rest);
assemble(properties, {R, P, C}, Else, Uneaten) ->
assemble(app_properties, {R, P, C}, Else, Uneaten);
assemble(app_properties, {R, P = #'P_basic'{headers = Headers}, C},
{#'v1_0.application_properties'{}, Rest}, Uneaten) ->
AppPropsBin = chunk(Rest, Uneaten),
assemble(body, {R, P#'P_basic'{
headers = set_header(?APP_PROPERTIES_HEADER,
AppPropsBin, Headers)}, C},
decode_section(Rest), Rest);
assemble(app_properties, {R, P, C}, Else, Uneaten) ->
assemble(body, {R, P, C}, Else, Uneaten);
The only ' interoperable ' content is a single amqp - data section .
%% Everything else we will leave as-is. We still have to parse the
%% sections one-by-one, however, to see when we hit the footer or
%% whatever comes next.
NB we do not strictly enforce the ( slightly random ) rules
%% pertaining to body sections, that is:
- one amqp - value ; OR
- one or more amqp - sequence ; OR
- one or more amqp - data .
%% We allow any number of each kind, in any permutation.
assemble(body, {R, P, _}, {#'v1_0.data'{content = Content}, Rest}, Uneaten) ->
Chunk = chunk(Rest, Uneaten),
assemble(amqp10body, {R, set_1_0_type(<<"binary">>, P),
{data, Content, Chunk}},
decode_section(Rest), Rest);
assemble(body, {R, P, C}, Else, Uneaten) ->
assemble(amqp10body, {R, P, C}, Else, Uneaten);
assemble(amqp10body, {R, P, C}, {{Type, _}, Rest}, Uneaten)
when Type =:= 'v1_0.data' orelse
Type =:= 'v1_0.amqp_sequence' orelse
Type =:= 'v1_0.amqp_value' ->
Encoded = chunk(Rest, Uneaten),
assemble(amqp10body,
{R, set_1_0_type(<<"amqp-1.0">>, P), add_body_section(Encoded, C)},
decode_section(Rest), Rest);
assemble(amqp10body, {R, P, C}, Else, Uneaten) ->
assemble(footer, {R, P, compile_body(C)}, Else, Uneaten);
assemble(footer, {R, P = #'P_basic'{headers = Headers}, C},
{#'v1_0.footer'{}, <<>>}, Uneaten) ->
{R, P#'P_basic'{headers = set_header(?FOOTER, Uneaten, Headers)}, C};
assemble(footer, {R, P, C}, none, _) ->
{R, P, C};
assemble(footer, _, Else, _) ->
exit({unexpected_trailing_sections, Else});
assemble(Expected, _, Actual, _) ->
exit({expected_section, Expected, Actual}).
decode_section(<<>>) ->
none;
decode_section(MsgBin) ->
{AmqpValue, Rest} = amqp10_binary_parser:parse(MsgBin),
{amqp10_framing:decode(AmqpValue), Rest}.
chunk(Rest, Uneaten) ->
ChunkLen = size(Uneaten) - size(Rest),
<<Chunk:ChunkLen/binary, _ActuallyRest/binary>> = Uneaten,
Chunk.
add_body_section(C, {data, _, Bin}) ->
[C, Bin];
add_body_section(C, Cs) ->
[C | Cs].
compile_body({data, Content, _}) ->
Content;
compile_body(Sections) ->
lists:reverse(Sections).
translate_header(Header10, Props) ->
Props#'P_basic'{
delivery_mode = case Header10#'v1_0.header'.durable of
true -> 2;
_ -> 1
end,
priority = unwrap(Header10#'v1_0.header'.priority),
expiration = to_expiration(Header10#'v1_0.header'.ttl),
type = undefined,
app_id = undefined,
cluster_id = undefined}.
translate_properties(Props10, Props10Bin,
Props = #'P_basic'{headers = Headers}) ->
Props#'P_basic'{
headers = set_header(?PROPERTIES_HEADER, Props10Bin,
Headers),
content_type = unwrap(Props10#'v1_0.properties'.content_type),
content_encoding = unwrap(Props10#'v1_0.properties'.content_encoding),
correlation_id = unwrap(Props10#'v1_0.properties'.correlation_id),
reply_to = case unwrap(Props10#'v1_0.properties'.reply_to) of
<<"/queue/", Q/binary>> -> Q;
Else -> Else
end,
message_id = unwrap(Props10#'v1_0.properties'.message_id),
user_id = unwrap(Props10#'v1_0.properties'.user_id),
timestamp = unwrap(Props10#'v1_0.properties'.creation_time)}.
routing_key(Props10) ->
unwrap(Props10#'v1_0.properties'.subject).
unwrap(undefined) -> undefined;
unwrap({_Type, Thing}) -> Thing.
to_expiration(undefined) ->
undefined;
to_expiration({uint, Num}) ->
list_to_binary(integer_to_list(Num)).
from_expiration(undefined) ->
undefined;
from_expiration(PBasic) ->
case rabbit_basic:parse_expiration(PBasic) of
{ok, undefined} -> undefined;
{ok, N} -> {uint, N};
_ -> undefined
end.
set_header(Header, Value, undefined) ->
set_header(Header, Value, []);
set_header(Header, Value, Headers) ->
rabbit_misc:set_table_value(Headers, Header, longstr, Value).
set_1_0_type(Type, Props = #'P_basic'{}) ->
Props#'P_basic'{type = Type}.
%%--------------------------------------------------------------------
TODO create delivery - annotations
annotated_message(RKey, #'basic.deliver'{redelivered = Redelivered},
#amqp_msg{props = Props,
payload = Content}) ->
#'P_basic'{ headers = Headers } = Props,
Header10 = #'v1_0.header'
{durable = case Props#'P_basic'.delivery_mode of
2 -> true;
_ -> false
end,
priority = wrap(ubyte, Props#'P_basic'.priority),
ttl = from_expiration(Props),
first_acquirer = not Redelivered,
delivery_count = undefined},
HeadersBin = amqp10_framing:encode_bin(Header10),
MsgAnnoBin =
case table_lookup(Headers, ?MESSAGE_ANNOTATIONS_HEADER) of
undefined -> <<>>;
{_, MABin} -> MABin
end,
PropsBin =
case table_lookup(Headers, ?PROPERTIES_HEADER) of
{_, Props10Bin} ->
Props10Bin;
undefined ->
Props10 = #'v1_0.properties'{
message_id = wrap(utf8, Props#'P_basic'.message_id),
user_id = wrap(utf8, Props#'P_basic'.user_id),
to = undefined,
subject = wrap(utf8, RKey),
reply_to = case Props#'P_basic'.reply_to of
undefined ->
undefined;
_ ->
wrap(utf8,
<<"/queue/",
(Props#'P_basic'.reply_to)/binary>>)
end,
correlation_id = wrap(utf8, Props#'P_basic'.correlation_id),
content_type = wrap(symbol, Props#'P_basic'.content_type),
content_encoding = wrap(symbol, Props#'P_basic'.content_encoding),
creation_time = wrap(timestamp, Props#'P_basic'.timestamp)},
amqp10_framing:encode_bin(Props10)
end,
AppPropsBin =
case table_lookup(Headers, ?APP_PROPERTIES_HEADER) of
{_, AppProps10Bin} ->
AppProps10Bin;
undefined ->
[]
end,
DataBin = case Props#'P_basic'.type of
<<"amqp-1.0">> ->
Content;
e.g. , < < " binary " > > if originally from 1.0
amqp10_framing:encode_bin(
#'v1_0.data'{content = Content})
end,
FooterBin =
case table_lookup(Headers, ?FOOTER) of
undefined -> <<>>;
{_, FBin} -> FBin
end,
[HeadersBin, MsgAnnoBin, PropsBin, AppPropsBin, DataBin, FooterBin].
wrap(_Type, undefined) ->
undefined;
wrap(Type, Val) ->
{Type, Val}.
table_lookup(undefined, _) -> undefined;
table_lookup(Headers, Header) -> rabbit_misc:table_lookup(Headers, Header).
| null |
https://raw.githubusercontent.com/gedge-platform/gedge-platform/97c1e87faf28ba2942a77196b6be0a952bff1c3e/gs-broker/broker-server/deps/rabbitmq_amqp1_0/src/rabbit_amqp1_0_message.erl
|
erlang
|
ignore delivery annotations for now
TODO: handle "rejected" error
Everything else we will leave as-is. We still have to parse the
sections one-by-one, however, to see when we hit the footer or
whatever comes next.
pertaining to body sections, that is:
We allow any number of each kind, in any permutation.
--------------------------------------------------------------------
|
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
Copyright ( c ) 2007 - 2021 VMware , Inc. or its affiliates . All rights reserved .
-module(rabbit_amqp1_0_message).
-export([assemble/1, annotated_message/3]).
-define(PROPERTIES_HEADER, <<"x-amqp-1.0-properties">>).
-define(APP_PROPERTIES_HEADER, <<"x-amqp-1.0-app-properties">>).
-define(MESSAGE_ANNOTATIONS_HEADER, <<"x-amqp-1.0-message-annotations">>).
-define(FOOTER, <<"x-amqp-1.0-footer">>).
-include_lib("amqp_client/include/amqp_client.hrl").
-include("rabbit_amqp1_0.hrl").
assemble(MsgBin) ->
{RKey, Props, Content} = assemble(header, {<<"">>, #'P_basic'{}, []},
decode_section(MsgBin), MsgBin),
{RKey, #amqp_msg{props = Props, payload = Content}}.
assemble(header, {R, P, C}, {H = #'v1_0.header'{}, Rest}, _Uneaten) ->
assemble(message_annotations, {R, translate_header(H, P), C},
decode_section(Rest), Rest);
assemble(header, {R, P, C}, Else, Uneaten) ->
assemble(message_annotations, {R, P, C}, Else, Uneaten);
assemble(delivery_annotations, RPC, {#'v1_0.delivery_annotations'{}, Rest},
Uneaten) ->
assemble(message_annotations, RPC, Rest, Uneaten);
assemble(delivery_annotations, RPC, Else, Uneaten) ->
assemble(message_annotations, RPC, Else, Uneaten);
assemble(message_annotations, {R, P = #'P_basic'{headers = Headers}, C},
{#'v1_0.message_annotations'{}, Rest}, Uneaten) ->
MsgAnnoBin = chunk(Rest, Uneaten),
assemble(properties, {R, P#'P_basic'{
headers = set_header(?MESSAGE_ANNOTATIONS_HEADER,
MsgAnnoBin, Headers)}, C},
decode_section(Rest), Rest);
assemble(message_annotations, {R, P, C}, Else, Uneaten) ->
assemble(properties, {R, P, C}, Else, Uneaten);
assemble(properties, {_R, P, C}, {X = #'v1_0.properties'{}, Rest}, Uneaten) ->
PropsBin = chunk(Rest, Uneaten),
assemble(app_properties, {routing_key(X),
translate_properties(X, PropsBin, P), C},
decode_section(Rest), Rest);
assemble(properties, {R, P, C}, Else, Uneaten) ->
assemble(app_properties, {R, P, C}, Else, Uneaten);
assemble(app_properties, {R, P = #'P_basic'{headers = Headers}, C},
{#'v1_0.application_properties'{}, Rest}, Uneaten) ->
AppPropsBin = chunk(Rest, Uneaten),
assemble(body, {R, P#'P_basic'{
headers = set_header(?APP_PROPERTIES_HEADER,
AppPropsBin, Headers)}, C},
decode_section(Rest), Rest);
assemble(app_properties, {R, P, C}, Else, Uneaten) ->
assemble(body, {R, P, C}, Else, Uneaten);
The only ' interoperable ' content is a single amqp - data section .
NB we do not strictly enforce the ( slightly random ) rules
- one amqp - value ; OR
- one or more amqp - sequence ; OR
- one or more amqp - data .
assemble(body, {R, P, _}, {#'v1_0.data'{content = Content}, Rest}, Uneaten) ->
Chunk = chunk(Rest, Uneaten),
assemble(amqp10body, {R, set_1_0_type(<<"binary">>, P),
{data, Content, Chunk}},
decode_section(Rest), Rest);
assemble(body, {R, P, C}, Else, Uneaten) ->
assemble(amqp10body, {R, P, C}, Else, Uneaten);
assemble(amqp10body, {R, P, C}, {{Type, _}, Rest}, Uneaten)
when Type =:= 'v1_0.data' orelse
Type =:= 'v1_0.amqp_sequence' orelse
Type =:= 'v1_0.amqp_value' ->
Encoded = chunk(Rest, Uneaten),
assemble(amqp10body,
{R, set_1_0_type(<<"amqp-1.0">>, P), add_body_section(Encoded, C)},
decode_section(Rest), Rest);
assemble(amqp10body, {R, P, C}, Else, Uneaten) ->
assemble(footer, {R, P, compile_body(C)}, Else, Uneaten);
assemble(footer, {R, P = #'P_basic'{headers = Headers}, C},
{#'v1_0.footer'{}, <<>>}, Uneaten) ->
{R, P#'P_basic'{headers = set_header(?FOOTER, Uneaten, Headers)}, C};
assemble(footer, {R, P, C}, none, _) ->
{R, P, C};
assemble(footer, _, Else, _) ->
exit({unexpected_trailing_sections, Else});
assemble(Expected, _, Actual, _) ->
exit({expected_section, Expected, Actual}).
decode_section(<<>>) ->
none;
decode_section(MsgBin) ->
{AmqpValue, Rest} = amqp10_binary_parser:parse(MsgBin),
{amqp10_framing:decode(AmqpValue), Rest}.
chunk(Rest, Uneaten) ->
ChunkLen = size(Uneaten) - size(Rest),
<<Chunk:ChunkLen/binary, _ActuallyRest/binary>> = Uneaten,
Chunk.
add_body_section(C, {data, _, Bin}) ->
[C, Bin];
add_body_section(C, Cs) ->
[C | Cs].
compile_body({data, Content, _}) ->
Content;
compile_body(Sections) ->
lists:reverse(Sections).
translate_header(Header10, Props) ->
Props#'P_basic'{
delivery_mode = case Header10#'v1_0.header'.durable of
true -> 2;
_ -> 1
end,
priority = unwrap(Header10#'v1_0.header'.priority),
expiration = to_expiration(Header10#'v1_0.header'.ttl),
type = undefined,
app_id = undefined,
cluster_id = undefined}.
translate_properties(Props10, Props10Bin,
Props = #'P_basic'{headers = Headers}) ->
Props#'P_basic'{
headers = set_header(?PROPERTIES_HEADER, Props10Bin,
Headers),
content_type = unwrap(Props10#'v1_0.properties'.content_type),
content_encoding = unwrap(Props10#'v1_0.properties'.content_encoding),
correlation_id = unwrap(Props10#'v1_0.properties'.correlation_id),
reply_to = case unwrap(Props10#'v1_0.properties'.reply_to) of
<<"/queue/", Q/binary>> -> Q;
Else -> Else
end,
message_id = unwrap(Props10#'v1_0.properties'.message_id),
user_id = unwrap(Props10#'v1_0.properties'.user_id),
timestamp = unwrap(Props10#'v1_0.properties'.creation_time)}.
routing_key(Props10) ->
unwrap(Props10#'v1_0.properties'.subject).
unwrap(undefined) -> undefined;
unwrap({_Type, Thing}) -> Thing.
to_expiration(undefined) ->
undefined;
to_expiration({uint, Num}) ->
list_to_binary(integer_to_list(Num)).
from_expiration(undefined) ->
undefined;
from_expiration(PBasic) ->
case rabbit_basic:parse_expiration(PBasic) of
{ok, undefined} -> undefined;
{ok, N} -> {uint, N};
_ -> undefined
end.
set_header(Header, Value, undefined) ->
set_header(Header, Value, []);
set_header(Header, Value, Headers) ->
rabbit_misc:set_table_value(Headers, Header, longstr, Value).
set_1_0_type(Type, Props = #'P_basic'{}) ->
Props#'P_basic'{type = Type}.
TODO create delivery - annotations
annotated_message(RKey, #'basic.deliver'{redelivered = Redelivered},
#amqp_msg{props = Props,
payload = Content}) ->
#'P_basic'{ headers = Headers } = Props,
Header10 = #'v1_0.header'
{durable = case Props#'P_basic'.delivery_mode of
2 -> true;
_ -> false
end,
priority = wrap(ubyte, Props#'P_basic'.priority),
ttl = from_expiration(Props),
first_acquirer = not Redelivered,
delivery_count = undefined},
HeadersBin = amqp10_framing:encode_bin(Header10),
MsgAnnoBin =
case table_lookup(Headers, ?MESSAGE_ANNOTATIONS_HEADER) of
undefined -> <<>>;
{_, MABin} -> MABin
end,
PropsBin =
case table_lookup(Headers, ?PROPERTIES_HEADER) of
{_, Props10Bin} ->
Props10Bin;
undefined ->
Props10 = #'v1_0.properties'{
message_id = wrap(utf8, Props#'P_basic'.message_id),
user_id = wrap(utf8, Props#'P_basic'.user_id),
to = undefined,
subject = wrap(utf8, RKey),
reply_to = case Props#'P_basic'.reply_to of
undefined ->
undefined;
_ ->
wrap(utf8,
<<"/queue/",
(Props#'P_basic'.reply_to)/binary>>)
end,
correlation_id = wrap(utf8, Props#'P_basic'.correlation_id),
content_type = wrap(symbol, Props#'P_basic'.content_type),
content_encoding = wrap(symbol, Props#'P_basic'.content_encoding),
creation_time = wrap(timestamp, Props#'P_basic'.timestamp)},
amqp10_framing:encode_bin(Props10)
end,
AppPropsBin =
case table_lookup(Headers, ?APP_PROPERTIES_HEADER) of
{_, AppProps10Bin} ->
AppProps10Bin;
undefined ->
[]
end,
DataBin = case Props#'P_basic'.type of
<<"amqp-1.0">> ->
Content;
e.g. , < < " binary " > > if originally from 1.0
amqp10_framing:encode_bin(
#'v1_0.data'{content = Content})
end,
FooterBin =
case table_lookup(Headers, ?FOOTER) of
undefined -> <<>>;
{_, FBin} -> FBin
end,
[HeadersBin, MsgAnnoBin, PropsBin, AppPropsBin, DataBin, FooterBin].
wrap(_Type, undefined) ->
undefined;
wrap(Type, Val) ->
{Type, Val}.
table_lookup(undefined, _) -> undefined;
table_lookup(Headers, Header) -> rabbit_misc:table_lookup(Headers, Header).
|
e54e0cfa08044957cd6ab9fb57e6938b74bcf613357917de2e3f49107f5f9fdf
|
facebookarchive/hs-zstd
|
Types.hs
|
Copyright ( c ) 2016 - present , Facebook , Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in
-- the LICENSE file in the root directory of this source tree. An
additional grant of patent rights can be found in the PATENTS file
-- in the same directory.
{-# LANGUAGE BangPatterns #-}
-- |
-- Module : Codec.Compression.Zstd.Types
Copyright : ( c ) 2016 - present , Facebook , Inc. All rights reserved .
--
-- License : BSD3
-- Maintainer :
-- Stability : experimental
Portability : GHC
--
Types supporting zstd compression and decompression .
module Codec.Compression.Zstd.Types
(
Decompress(..)
, Dict(..)
, fromDict
, mkDict
) where
import Control.DeepSeq (NFData(..))
import Data.ByteString (ByteString)
-- | The result of a decompression operation.
data Decompress =
Skip
-- ^ Either the compressed frame was empty, or it was compressed in
-- streaming mode and so its size is not known.
| Error String
-- ^ An error occurred.
| Decompress ByteString
-- ^ The payload was successfully decompressed.
deriving (Eq, Read, Show)
-- | Compression dictionary.
newtype Dict = Dict {
fromDict :: ByteString
} deriving (Eq, Ord)
-- | Smart constructor.
mkDict :: ByteString -> Dict
mkDict d = Dict d
instance Show Dict where
showsPrec n (Dict d) r = showsPrec n d r
instance Read Dict where
readsPrec n s = map (\(a,b) -> (Dict a, b)) (readsPrec n s)
instance NFData Dict where
rnf (Dict d) = rnf d
| null |
https://raw.githubusercontent.com/facebookarchive/hs-zstd/ae7f174bb614a2fb71cfbb36e93a136ff9430fd7/Codec/Compression/Zstd/Types.hs
|
haskell
|
All rights reserved.
This source code is licensed under the BSD-style license found in
the LICENSE file in the root directory of this source tree. An
in the same directory.
# LANGUAGE BangPatterns #
|
Module : Codec.Compression.Zstd.Types
License : BSD3
Maintainer :
Stability : experimental
| The result of a decompression operation.
^ Either the compressed frame was empty, or it was compressed in
streaming mode and so its size is not known.
^ An error occurred.
^ The payload was successfully decompressed.
| Compression dictionary.
| Smart constructor.
|
Copyright ( c ) 2016 - present , Facebook , Inc.
additional grant of patent rights can be found in the PATENTS file
Copyright : ( c ) 2016 - present , Facebook , Inc. All rights reserved .
Portability : GHC
Types supporting zstd compression and decompression .
module Codec.Compression.Zstd.Types
(
Decompress(..)
, Dict(..)
, fromDict
, mkDict
) where
import Control.DeepSeq (NFData(..))
import Data.ByteString (ByteString)
data Decompress =
Skip
| Error String
| Decompress ByteString
deriving (Eq, Read, Show)
newtype Dict = Dict {
fromDict :: ByteString
} deriving (Eq, Ord)
mkDict :: ByteString -> Dict
mkDict d = Dict d
instance Show Dict where
showsPrec n (Dict d) r = showsPrec n d r
instance Read Dict where
readsPrec n s = map (\(a,b) -> (Dict a, b)) (readsPrec n s)
instance NFData Dict where
rnf (Dict d) = rnf d
|
fc980087ea894af6d1df873b27efdb4331578124d6eb7aa75a5ada7d932f9bf6
|
LambdaScientist/CLaSH-by-example
|
SimulateDesign.hs
|
-- CONVERTED
import ClocksAndRegisters . TestClks_n_regs_4
import qualified ClocksAndRegisters . Models . Clks_n_regs_4 as CR
import ClocksAndRegisters . TestDflop_en_clr
import ClocksAndRegisters . Models . Dflop_en_clr
import ClocksAndRegisters . TestDflop_sync_enable
import ClocksAndRegisters . Models . Dflop_sync_enable
import ClocksAndRegisters . TestSimpleDFlop
import ClocksAndRegisters . Models . SimpleDFlop
import ClocksAndRegisters . TestSimpleDFlopWithReset
import ClocksAndRegisters . Models . SimpleDFlopWithReset
import InAndOut . TestBusBreakout
import InAndOut . Models . BusBreakout
import InAndOut . TestBusSignals
import InAndOut . Models . BusSignals
import InAndOut . TestIntermediateSignal
import InAndOut . Models . IntermediateSignal
import InAndOut . TestSimpleInOut
import InAndOut . Models . SimpleInOut
import InAndOut . TestStandardMux1
import InAndOut . Models . StandardMux1
import InAndOut .
import InAndOut . Models . StandardMux2
import StateMachines .
import StateMachines . Models . StateMachine
import StateMachines .
import StateMachines . Models . StateMachine2
-------------- DEAD ------------------------------------------------------------
These died because variables are not very sensible
import ClocksAndRegisters . TestClks_n_regs_5
import ClocksAndRegisters . Models . Clks_n_regs_5
-------------- END DEAD --------------------------------------------------------
-- import ModularDesign.TestModular1
-- import ModularDesign.Models.Modular1
import ModularDesign.TestModular2
-- import ModularDesign.Models.Modular2
import ModularDesign . Models . StateMachine
-- import SAFE.CommonClash
import SAFE.TestingTools
import Prelude
main = print ppSetupAndRun
-- topEntity = CR.topEntity
--
--
-- IO stuff
-- print A;
-- print B;
-- print C;
| null |
https://raw.githubusercontent.com/LambdaScientist/CLaSH-by-example/e783cd2f2408e67baf7f36c10398c27036a78ef3/HaskellClashExamples/src/SimulateDesign.hs
|
haskell
|
CONVERTED
------------ DEAD ------------------------------------------------------------
------------ END DEAD --------------------------------------------------------
import ModularDesign.TestModular1
import ModularDesign.Models.Modular1
import ModularDesign.Models.Modular2
import SAFE.CommonClash
topEntity = CR.topEntity
IO stuff
print A;
print B;
print C;
|
import ClocksAndRegisters . TestClks_n_regs_4
import qualified ClocksAndRegisters . Models . Clks_n_regs_4 as CR
import ClocksAndRegisters . TestDflop_en_clr
import ClocksAndRegisters . Models . Dflop_en_clr
import ClocksAndRegisters . TestDflop_sync_enable
import ClocksAndRegisters . Models . Dflop_sync_enable
import ClocksAndRegisters . TestSimpleDFlop
import ClocksAndRegisters . Models . SimpleDFlop
import ClocksAndRegisters . TestSimpleDFlopWithReset
import ClocksAndRegisters . Models . SimpleDFlopWithReset
import InAndOut . TestBusBreakout
import InAndOut . Models . BusBreakout
import InAndOut . TestBusSignals
import InAndOut . Models . BusSignals
import InAndOut . TestIntermediateSignal
import InAndOut . Models . IntermediateSignal
import InAndOut . TestSimpleInOut
import InAndOut . Models . SimpleInOut
import InAndOut . TestStandardMux1
import InAndOut . Models . StandardMux1
import InAndOut .
import InAndOut . Models . StandardMux2
import StateMachines .
import StateMachines . Models . StateMachine
import StateMachines .
import StateMachines . Models . StateMachine2
These died because variables are not very sensible
import ClocksAndRegisters . TestClks_n_regs_5
import ClocksAndRegisters . Models . Clks_n_regs_5
import ModularDesign.TestModular2
import ModularDesign . Models . StateMachine
import SAFE.TestingTools
import Prelude
main = print ppSetupAndRun
|
5b707c5cd25e80ab53d92f14613298453848c60020d594064437423f21bd9a9f
|
diagrams/diagrams-cairo
|
Reflect.hs
|
# LANGUAGE NoMonomorphismRestriction #
import Diagrams.Prelude
import Diagrams.Backend.Cairo.CmdLine
d = unitSquare <> reflectAbout (P (1/2,1/2)) (-2,-1) unitSquare
main = defaultMain (pad 1.1 d)
| null |
https://raw.githubusercontent.com/diagrams/diagrams-cairo/533e4f4f18f961543bb1d78493c750dec45fd4a3/test/Reflect.hs
|
haskell
|
# LANGUAGE NoMonomorphismRestriction #
import Diagrams.Prelude
import Diagrams.Backend.Cairo.CmdLine
d = unitSquare <> reflectAbout (P (1/2,1/2)) (-2,-1) unitSquare
main = defaultMain (pad 1.1 d)
|
|
9b8d1062dcc526ee24017d3abec5a12caa9b80753355f835b9d679ff367fbded
|
JacquesCarette/Drasil
|
MakeString.hs
|
-- | Defines types and functions for generating Makefiles.
module Build.Drasil.Make.MakeString where
-- * Types
-- | Type synonym for variable names.
type VarName = String
-- | Type synonym for variable values.
type VarVal = String
^ A string for Makefiles .
| Mv MVar -- ^ Holds a Makefile variable.
^ Concatenates two ' MakeString 's .
instance Semigroup MakeString where
(<>) = Mc
instance Monoid MakeString where
mempty = Mr ""
| For creating Makefile variables .
^ Operating System specific variable . Holds information for Windows , Mac , and Linux systems .
| Implicit VarName -- ^ Implicit OS.
| Free VarName -- ^ Independent of OS.
deriving Eq
-- * Functions
| Concatenates two ' MakeString 's with a space in between .
(+:+) :: MakeString -> MakeString -> MakeString
a +:+ (Mr "") = a
(Mr "") +:+ b = b
a +:+ b = a <> Mr " " <> b
| Renders a ' MakeString ' . Variables have the form \"$(@var@)\ " .
renderMS :: MakeString -> String
renderMS (Mr s) = s
renderMS (Mv v) = renderVar (\x -> "$(" ++ x ++ ")") v
renderMS (Mc a b) = renderMS a ++ renderMS b
-- | Renders variables. Takes in a function for the variable, and the type of variable.
renderVar :: (String -> String) -> MVar -> String
renderVar f (Os nm _ _ _) = f nm
renderVar f (Implicit nm) = "\"" ++ f nm ++ "\""
renderVar f (Free nm) = f nm
| Constructor for converting a ' String ' into a ' MakeString ' .
makeS :: String -> MakeString
makeS = Mr
| Constructor for Windows OS variables .
mkWindowsVar :: VarName -> VarVal -> VarVal -> MakeString
mkWindowsVar n w e = Mv $ Os n w e e
-- | Constructor for OS variables.
mkOSVar :: VarName -> VarVal -> VarVal -> VarVal -> MakeString
mkOSVar n w m l = Mv $ Os n w m l
-- | Constructor for 'Implicit' variables.
mkImplicitVar :: VarName -> MakeString
mkImplicitVar = Mv . Implicit
-- | Constructor for 'Free' variables.
mkFreeVar :: VarName -> MakeString
mkFreeVar = Mv . Free
| null |
https://raw.githubusercontent.com/JacquesCarette/Drasil/92dddf7a545ba5029f99ad5c5eddcd8dad56a2d8/code/drasil-build/lib/Build/Drasil/Make/MakeString.hs
|
haskell
|
| Defines types and functions for generating Makefiles.
* Types
| Type synonym for variable names.
| Type synonym for variable values.
^ Holds a Makefile variable.
^ Implicit OS.
^ Independent of OS.
* Functions
| Renders variables. Takes in a function for the variable, and the type of variable.
| Constructor for OS variables.
| Constructor for 'Implicit' variables.
| Constructor for 'Free' variables.
|
module Build.Drasil.Make.MakeString where
type VarName = String
type VarVal = String
^ A string for Makefiles .
^ Concatenates two ' MakeString 's .
instance Semigroup MakeString where
(<>) = Mc
instance Monoid MakeString where
mempty = Mr ""
| For creating Makefile variables .
^ Operating System specific variable . Holds information for Windows , Mac , and Linux systems .
deriving Eq
| Concatenates two ' MakeString 's with a space in between .
(+:+) :: MakeString -> MakeString -> MakeString
a +:+ (Mr "") = a
(Mr "") +:+ b = b
a +:+ b = a <> Mr " " <> b
| Renders a ' MakeString ' . Variables have the form \"$(@var@)\ " .
renderMS :: MakeString -> String
renderMS (Mr s) = s
renderMS (Mv v) = renderVar (\x -> "$(" ++ x ++ ")") v
renderMS (Mc a b) = renderMS a ++ renderMS b
renderVar :: (String -> String) -> MVar -> String
renderVar f (Os nm _ _ _) = f nm
renderVar f (Implicit nm) = "\"" ++ f nm ++ "\""
renderVar f (Free nm) = f nm
| Constructor for converting a ' String ' into a ' MakeString ' .
makeS :: String -> MakeString
makeS = Mr
| Constructor for Windows OS variables .
mkWindowsVar :: VarName -> VarVal -> VarVal -> MakeString
mkWindowsVar n w e = Mv $ Os n w e e
mkOSVar :: VarName -> VarVal -> VarVal -> VarVal -> MakeString
mkOSVar n w m l = Mv $ Os n w m l
mkImplicitVar :: VarName -> MakeString
mkImplicitVar = Mv . Implicit
mkFreeVar :: VarName -> MakeString
mkFreeVar = Mv . Free
|
161207a8bfe77770eda0d138731fe94e79dfe2bc1e2f41949f067015b284588f
|
reynir/ocaml-ssh-agent
|
util.ml
|
let cstruct_of_array arr =
let r = Cstruct.create (Array.length arr) in
Array.iteri (fun idx v -> Cstruct.set_uint8 r idx v) arr;
r
let id_sha1 = [|
type Sequence , length 0x21 ( 33 )
type Sequence , length 0x09
type OID , length 0x05
0x2b; 0x0e; 0x03; 0x02; 0x1a; (* id-sha1 OID *)
0x05; 0x00; (* NULL *)
string , length 0x14 ( 20 ) , followed by sha1 hash
|] |> cstruct_of_array
let id_sha256 = [|
type Sequence , length 0x31 ( 49 )
type Sequence , length 0x0d ( 13 )
type OID , length 0x09
0x60; 0x86; 0x48; 0x01; 0x65; 0x03; 0x04; 0x02; 0x01; (* id-sha256 *)
0x05; 0x00; (* NULL *)
Octet string , length 0x20 ( 32 ) , followed by sha256 hash
|] |> cstruct_of_array
let id_sha512 = [|
type Sequence , length 0x51 ( 81 )
type Sequence , length 0x0d ( 13 )
type OID , length 0x09
0x60; 0x86; 0x48; 0x01; 0x65; 0x03; 0x04; 0x02; 0x03; (* id-sha512 *)
0x05; 0x00; (* NULL *)
string , length 0x40 ( 64 ) , followed by sha512 hash
|] |> cstruct_of_array
| null |
https://raw.githubusercontent.com/reynir/ocaml-ssh-agent/574cf846d52c061efae20a238209ce25327ceb51/src/util.ml
|
ocaml
|
id-sha1 OID
NULL
id-sha256
NULL
id-sha512
NULL
|
let cstruct_of_array arr =
let r = Cstruct.create (Array.length arr) in
Array.iteri (fun idx v -> Cstruct.set_uint8 r idx v) arr;
r
let id_sha1 = [|
type Sequence , length 0x21 ( 33 )
type Sequence , length 0x09
type OID , length 0x05
string , length 0x14 ( 20 ) , followed by sha1 hash
|] |> cstruct_of_array
let id_sha256 = [|
type Sequence , length 0x31 ( 49 )
type Sequence , length 0x0d ( 13 )
type OID , length 0x09
Octet string , length 0x20 ( 32 ) , followed by sha256 hash
|] |> cstruct_of_array
let id_sha512 = [|
type Sequence , length 0x51 ( 81 )
type Sequence , length 0x0d ( 13 )
type OID , length 0x09
string , length 0x40 ( 64 ) , followed by sha512 hash
|] |> cstruct_of_array
|
d9d3762c68e32ba5862188f4def5180746ce366549c0532574cfbaa56a187942
|
era-platform/cene-for-racket
|
main.rkt
|
#lang parendown/slash racket/base
; cene
;
A Racket library with entrypoints to the Cene programming language .
Copyright 2018 - 2020 , 2022 The Era Authors
;
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
; you may not use this file except in compliance with the License.
; You may obtain a copy of the License at
;
; -2.0
;
; Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND ,
; either express or implied. See the License for the specific
; language governing permissions and limitations under the License.
(require cene/private/shim)
(init-shim)
(require /only-in cene/private
cenegetfx-done
extfx-run-sink-extfx
make-cene-root-info
make-fault-internal
make-sink-extfx
sink-authorized-name
sink-authorized-name-subname
sink-extfx-claim-and-split
sink-extfx-fuse
sink-extfx-run-directive-cexprs-in-string
sink-name-of-racket-string
sink-qualify
Defined in ` ( lib " cene / private / essentials.rkti " ) ` :
minimal-and-essential-tags
sink-extfx-init-essentials
sink-extfx-init-package)
; TODO: Document these exports.
(provide /recontract-out
cenegetfx-done
extfx-run-sink-extfx
make-cene-root-info)
(provide
make-fault-internal)
(provide /recontract-out
make-sink-extfx
minimal-and-essential-tags)
(provide
sink-authorized-name)
(provide /recontract-out
sink-authorized-name-subname
sink-extfx-claim-and-split
sink-extfx-fuse
sink-extfx-init-essentials
sink-extfx-init-package
sink-extfx-run-directive-cexprs-in-string
sink-name-of-racket-string)
(provide
sink-qualify)
| null |
https://raw.githubusercontent.com/era-platform/cene-for-racket/a7eff215758fe228735f41a349f3b7475cc0e652/cene-lib/main.rkt
|
racket
|
cene
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
either express or implied. See the License for the specific
language governing permissions and limitations under the License.
TODO: Document these exports.
|
#lang parendown/slash racket/base
A Racket library with entrypoints to the Cene programming language .
Copyright 2018 - 2020 , 2022 The Era Authors
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND ,
(require cene/private/shim)
(init-shim)
(require /only-in cene/private
cenegetfx-done
extfx-run-sink-extfx
make-cene-root-info
make-fault-internal
make-sink-extfx
sink-authorized-name
sink-authorized-name-subname
sink-extfx-claim-and-split
sink-extfx-fuse
sink-extfx-run-directive-cexprs-in-string
sink-name-of-racket-string
sink-qualify
Defined in ` ( lib " cene / private / essentials.rkti " ) ` :
minimal-and-essential-tags
sink-extfx-init-essentials
sink-extfx-init-package)
(provide /recontract-out
cenegetfx-done
extfx-run-sink-extfx
make-cene-root-info)
(provide
make-fault-internal)
(provide /recontract-out
make-sink-extfx
minimal-and-essential-tags)
(provide
sink-authorized-name)
(provide /recontract-out
sink-authorized-name-subname
sink-extfx-claim-and-split
sink-extfx-fuse
sink-extfx-init-essentials
sink-extfx-init-package
sink-extfx-run-directive-cexprs-in-string
sink-name-of-racket-string)
(provide
sink-qualify)
|
4243a8fa37c08a2b6c5f1d3543305f56bdfc9c4d72644e17dc6b7eb1cc9dc4bb
|
cedlemo/OCaml-Notty-introduction
|
basics_I_string.ml
|
open Notty
open Notty_unix
(* ocamlbuild -pkg notty -pkg notty.unix basics_I_string.native *)
let () =
I.string A.(fg lightred) "Wow!"
|> eol
|> Notty_unix.output_image
| null |
https://raw.githubusercontent.com/cedlemo/OCaml-Notty-introduction/9295b43382354c504d5efcad3ba56cff6f34d2eb/basics_I_string.ml
|
ocaml
|
ocamlbuild -pkg notty -pkg notty.unix basics_I_string.native
|
open Notty
open Notty_unix
let () =
I.string A.(fg lightred) "Wow!"
|> eol
|> Notty_unix.output_image
|
ff7c702f8295eced2f1a3f9d32d0858583dd6be7a04e70f083aee5b535c2bff6
|
tyage/tiny-c
|
Asm.hs
|
module Asm where
import Data.List
import Control.Applicative hiding (Const)
import Control.Monad.State
import Type
import Show
genAsmLabel :: State AsmEnvironment Label
genAsmLabel = do
env <- get
put $ env { asmLabelCounter = (asmLabelCounter env) + 1 }
return $ "L" ++ show (asmLabelCounter env)
putReturnLabel :: Label -> State AsmEnvironment ()
putReturnLabel l = do
env <- get
put $ env { returnLabel = l }
getReturnLabel :: State AsmEnvironment Label
getReturnLabel = do
env <- get
return $ (returnLabel env)
asmProgram :: Program -> Asm
asmProgram (ExDeclList e) = concat <$> mapM asmExternalDeclaration e
asmExternalDeclaration :: ExternalDeclaration -> Asm
asmExternalDeclaration (Decl d) = asmGlobalDeclaration d
asmExternalDeclaration (FuncDef f) = asmFunctionDefinition f
asmFunctionDefinition :: FunctionDefinition -> Asm
asmFunctionDefinition (FunctionDefinition d p c) = do
retLabel <- genAsmLabel
putReturnLabel retLabel
acs <- asmCompoundStatement c
return $ [
AsmGlobal $ show identifier,
AsmLabel $ show identifier,
AsmOp $ Op1 "push" "ebp",
AsmOp $ Op2 "mov" "ebp" "esp",
XXX
AsmOp $ Op2 "sub" "esp" "128"
] ++ acs ++ [
AsmLabel $ retLabel,
AsmOp $ Op2 "mov" "esp" "ebp",
AsmOp $ Op1 "pop" "ebp",
AsmOp $ Op0 "ret"
]
where
identifier = dec2ident d
dec2ident (Declarator i) = i
asmGlobalDeclaration :: Declaration -> Asm
asmGlobalDeclaration (Declaration d) = concat <$> mapM asmGlobalDeclarator (declList d)
where
declList (DeclaratorList d) = d
asmGlobalDeclarator :: Declarator -> Asm
asmGlobalDeclarator (Declarator i) = return [AsmCommon (show i) 4]
asmCompoundStatement :: CompoundStatement -> Asm
asmCompoundStatement (CompoundStatement d s) = case s of
(StatementList s) -> concat <$> mapM asmStatement s
asmStatement :: Statement -> Asm
asmStatement EmptyStatement = return []
asmStatement (ExpressionStmt e) = asmExpression e
asmStatement (CompoundStmt c) = asmCompoundStatement c
asmStatement (If e s1 s2) = do
elseLabel <- genAsmLabel
endifLabel <- genAsmLabel
ae <- asmExpression e
as1 <- asmStatement s1
as2 <- asmStatement s2
return $ ae ++ [AsmOp $ Op2 "cmp" "eax" "0", AsmOp $ Op1 "je\tnear" elseLabel] ++
as1 ++ [AsmOp $ Op1 "jmp\tnear" endifLabel, AsmLabel elseLabel] ++ as2 ++
[AsmLabel endifLabel]
asmStatement (While e s) = do
beginLabel <- genAsmLabel
endLabel <- genAsmLabel
ae <- asmExpression e
as <- asmStatement s
return $ [AsmLabel beginLabel] ++ ae ++
[AsmOp $ Op2 "cmp" "eax" "0", AsmOp $ Op1 "je\tnear" endLabel] ++ as ++
[AsmOp $ Op1 "jmp\tnear" beginLabel, AsmLabel endLabel]
asmStatement (Return e) = do
retLabel <- getReturnLabel
ae <- asmExpression e
return $ ae ++ [AsmOp $ Op1 "jmp\tnear" retLabel]
asmExpression :: Expr -> Asm
asmExpression (ExprList e) = concat <$> mapM asmExpression e
asmExpression (Assign i e) = do
ae <- asmExpression e
return $ ae ++ [AsmOp $ Op2 "mov" (showRegister i) "eax"]
asmExpression (Or e1 e2) = do
orLabel <- genAsmLabel
ae1 <- asmExpression e1
ae2 <- asmExpression e2
return $ [AsmOp $ Op1 "push" "1"] ++ ae1 ++
[AsmOp $ Op2 "cmp" "eax" "0", AsmOp $ Op1 "jne\tnear" orLabel] ++ ae2 ++
[AsmOp $ Op2 "cmp" "eax" "0", AsmOp $ Op1 "jne\tnear" orLabel,
AsmOp $ Op1 "pop" "eax", AsmOp $ Op1 "push" "0",
AsmLabel orLabel, AsmOp $ Op1 "pop" "eax"]
asmExpression (And e1 e2) = do
andLabel <- genAsmLabel
ae1 <- asmExpression e1
ae2 <- asmExpression e2
return $ [AsmOp $ Op1 "push" "0"] ++ ae1 ++
[AsmOp $ Op2 "cmp" "eax" "0", AsmOp $ Op1 "je\tnear" andLabel] ++ ae2 ++
[AsmOp $ Op2 "cmp" "eax" "0", AsmOp $ Op1 "je\tnear" andLabel,
AsmOp $ Op1 "pop" "eax", AsmOp $ Op1 "push" "1",
AsmLabel andLabel, AsmOp $ Op1 "pop" "eax"]
asmExpression (Equal e1 e2) = asmCompare e1 e2 "sete"
asmExpression (NotEqual e1 e2) = asmCompare e1 e2 "setne"
asmExpression (Lt e1 e2) = asmCompare e1 e2 "setl"
asmExpression (Gt e1 e2) = asmCompare e1 e2 "setg"
asmExpression (Le e1 e2) = asmCompare e1 e2 "setle"
asmExpression (Ge e1 e2) = asmCompare e1 e2 "setge"
asmExpression (Plus e1 e2) = asmArithmetic e1 e2 "add"
asmExpression (Minus e1 e2) = asmArithmetic e1 e2 "sub"
asmExpression (Multiple e1 e2) = asmArithmetic e1 e2 "imul"
asmExpression (Divide e1 e2) = do
aRSL <- asmRSL e1 e2
return $ aRSL ++ [AsmOp $ Op0 "cdq", AsmOp $ Op1 "idiv\tdword" "ecx"]
asmExpression (UnaryMinus e) = do
ae <- asmExpression e
return $ ae ++ [AsmOp $ Op2 "imul" "eax" "-1"]
asmExpression (FunctionCall i a) = do
aal <- asmArgumentList a
return $ aal ++ extern ++ [AsmOp $ Op1 "call" $ show i,
AsmOp $ Op2 "add" "esp" $ show $ 4 * (argLength a)]
where
extern = if (isUndefined i) then [AsmOp $ Op1 "EXTERN" $ show i] else []
isUndefined (TokenIdentifier (UndefinedFunctionToken i l p)) = True
isUndefined _ = False
argLength (ArgumentExprList e) = length e
asmExpression (Ident i) = return [AsmOp $ Op2 "mov" "eax" $ showRegister i]
asmExpression (Const c) = return [AsmOp $ Op2 "mov" "eax" $ show c]
asmExpression (Parens e) = asmExpression e
asmArgumentList :: ArgumentExprList -> Asm
asmArgumentList (ArgumentExprList a) = concat <$> mapM asmArgument (reverse a)
asmArgument :: Expr -> Asm
asmArgument e =do
ae <- asmExpression e
return $ ae ++ [AsmOp $ Op1 "push" "eax"]
asmCompare :: Expr -> Expr -> String -> Asm
asmCompare e1 e2 op = do
aRSL <- asmRSL e1 e2
return $ aRSL ++ [AsmOp $ Op2 "cmp" "eax" "ecx",
AsmOp $ Op1 op "al", AsmOp $ Op2 "movzx" "eax" "al"]
asmArithmetic :: Expr -> Expr -> String -> Asm
asmArithmetic e1 e2 op = do
aRSL <- asmRSL e1 e2
return $ aRSL ++ [AsmOp $ Op2 op "eax" "ecx"]
asmRSL :: Expr -> Expr -> Asm
asmRSL e1 e2 = do
ae2 <- asmExpression e2
ae1 <- asmExpression e1
return $ ae2++ [AsmOp $ Op1 "push" "eax"] ++
ae1 ++ [AsmOp $ Op1 "pop" "ecx"]
| null |
https://raw.githubusercontent.com/tyage/tiny-c/92aed366ad4e610b3daf15c9fccf4d5b6f3ba6ad/task8/Asm.hs
|
haskell
|
module Asm where
import Data.List
import Control.Applicative hiding (Const)
import Control.Monad.State
import Type
import Show
genAsmLabel :: State AsmEnvironment Label
genAsmLabel = do
env <- get
put $ env { asmLabelCounter = (asmLabelCounter env) + 1 }
return $ "L" ++ show (asmLabelCounter env)
putReturnLabel :: Label -> State AsmEnvironment ()
putReturnLabel l = do
env <- get
put $ env { returnLabel = l }
getReturnLabel :: State AsmEnvironment Label
getReturnLabel = do
env <- get
return $ (returnLabel env)
asmProgram :: Program -> Asm
asmProgram (ExDeclList e) = concat <$> mapM asmExternalDeclaration e
asmExternalDeclaration :: ExternalDeclaration -> Asm
asmExternalDeclaration (Decl d) = asmGlobalDeclaration d
asmExternalDeclaration (FuncDef f) = asmFunctionDefinition f
asmFunctionDefinition :: FunctionDefinition -> Asm
asmFunctionDefinition (FunctionDefinition d p c) = do
retLabel <- genAsmLabel
putReturnLabel retLabel
acs <- asmCompoundStatement c
return $ [
AsmGlobal $ show identifier,
AsmLabel $ show identifier,
AsmOp $ Op1 "push" "ebp",
AsmOp $ Op2 "mov" "ebp" "esp",
XXX
AsmOp $ Op2 "sub" "esp" "128"
] ++ acs ++ [
AsmLabel $ retLabel,
AsmOp $ Op2 "mov" "esp" "ebp",
AsmOp $ Op1 "pop" "ebp",
AsmOp $ Op0 "ret"
]
where
identifier = dec2ident d
dec2ident (Declarator i) = i
asmGlobalDeclaration :: Declaration -> Asm
asmGlobalDeclaration (Declaration d) = concat <$> mapM asmGlobalDeclarator (declList d)
where
declList (DeclaratorList d) = d
asmGlobalDeclarator :: Declarator -> Asm
asmGlobalDeclarator (Declarator i) = return [AsmCommon (show i) 4]
asmCompoundStatement :: CompoundStatement -> Asm
asmCompoundStatement (CompoundStatement d s) = case s of
(StatementList s) -> concat <$> mapM asmStatement s
asmStatement :: Statement -> Asm
asmStatement EmptyStatement = return []
asmStatement (ExpressionStmt e) = asmExpression e
asmStatement (CompoundStmt c) = asmCompoundStatement c
asmStatement (If e s1 s2) = do
elseLabel <- genAsmLabel
endifLabel <- genAsmLabel
ae <- asmExpression e
as1 <- asmStatement s1
as2 <- asmStatement s2
return $ ae ++ [AsmOp $ Op2 "cmp" "eax" "0", AsmOp $ Op1 "je\tnear" elseLabel] ++
as1 ++ [AsmOp $ Op1 "jmp\tnear" endifLabel, AsmLabel elseLabel] ++ as2 ++
[AsmLabel endifLabel]
asmStatement (While e s) = do
beginLabel <- genAsmLabel
endLabel <- genAsmLabel
ae <- asmExpression e
as <- asmStatement s
return $ [AsmLabel beginLabel] ++ ae ++
[AsmOp $ Op2 "cmp" "eax" "0", AsmOp $ Op1 "je\tnear" endLabel] ++ as ++
[AsmOp $ Op1 "jmp\tnear" beginLabel, AsmLabel endLabel]
asmStatement (Return e) = do
retLabel <- getReturnLabel
ae <- asmExpression e
return $ ae ++ [AsmOp $ Op1 "jmp\tnear" retLabel]
asmExpression :: Expr -> Asm
asmExpression (ExprList e) = concat <$> mapM asmExpression e
asmExpression (Assign i e) = do
ae <- asmExpression e
return $ ae ++ [AsmOp $ Op2 "mov" (showRegister i) "eax"]
asmExpression (Or e1 e2) = do
orLabel <- genAsmLabel
ae1 <- asmExpression e1
ae2 <- asmExpression e2
return $ [AsmOp $ Op1 "push" "1"] ++ ae1 ++
[AsmOp $ Op2 "cmp" "eax" "0", AsmOp $ Op1 "jne\tnear" orLabel] ++ ae2 ++
[AsmOp $ Op2 "cmp" "eax" "0", AsmOp $ Op1 "jne\tnear" orLabel,
AsmOp $ Op1 "pop" "eax", AsmOp $ Op1 "push" "0",
AsmLabel orLabel, AsmOp $ Op1 "pop" "eax"]
asmExpression (And e1 e2) = do
andLabel <- genAsmLabel
ae1 <- asmExpression e1
ae2 <- asmExpression e2
return $ [AsmOp $ Op1 "push" "0"] ++ ae1 ++
[AsmOp $ Op2 "cmp" "eax" "0", AsmOp $ Op1 "je\tnear" andLabel] ++ ae2 ++
[AsmOp $ Op2 "cmp" "eax" "0", AsmOp $ Op1 "je\tnear" andLabel,
AsmOp $ Op1 "pop" "eax", AsmOp $ Op1 "push" "1",
AsmLabel andLabel, AsmOp $ Op1 "pop" "eax"]
asmExpression (Equal e1 e2) = asmCompare e1 e2 "sete"
asmExpression (NotEqual e1 e2) = asmCompare e1 e2 "setne"
asmExpression (Lt e1 e2) = asmCompare e1 e2 "setl"
asmExpression (Gt e1 e2) = asmCompare e1 e2 "setg"
asmExpression (Le e1 e2) = asmCompare e1 e2 "setle"
asmExpression (Ge e1 e2) = asmCompare e1 e2 "setge"
asmExpression (Plus e1 e2) = asmArithmetic e1 e2 "add"
asmExpression (Minus e1 e2) = asmArithmetic e1 e2 "sub"
asmExpression (Multiple e1 e2) = asmArithmetic e1 e2 "imul"
asmExpression (Divide e1 e2) = do
aRSL <- asmRSL e1 e2
return $ aRSL ++ [AsmOp $ Op0 "cdq", AsmOp $ Op1 "idiv\tdword" "ecx"]
asmExpression (UnaryMinus e) = do
ae <- asmExpression e
return $ ae ++ [AsmOp $ Op2 "imul" "eax" "-1"]
asmExpression (FunctionCall i a) = do
aal <- asmArgumentList a
return $ aal ++ extern ++ [AsmOp $ Op1 "call" $ show i,
AsmOp $ Op2 "add" "esp" $ show $ 4 * (argLength a)]
where
extern = if (isUndefined i) then [AsmOp $ Op1 "EXTERN" $ show i] else []
isUndefined (TokenIdentifier (UndefinedFunctionToken i l p)) = True
isUndefined _ = False
argLength (ArgumentExprList e) = length e
asmExpression (Ident i) = return [AsmOp $ Op2 "mov" "eax" $ showRegister i]
asmExpression (Const c) = return [AsmOp $ Op2 "mov" "eax" $ show c]
asmExpression (Parens e) = asmExpression e
asmArgumentList :: ArgumentExprList -> Asm
asmArgumentList (ArgumentExprList a) = concat <$> mapM asmArgument (reverse a)
asmArgument :: Expr -> Asm
asmArgument e =do
ae <- asmExpression e
return $ ae ++ [AsmOp $ Op1 "push" "eax"]
asmCompare :: Expr -> Expr -> String -> Asm
asmCompare e1 e2 op = do
aRSL <- asmRSL e1 e2
return $ aRSL ++ [AsmOp $ Op2 "cmp" "eax" "ecx",
AsmOp $ Op1 op "al", AsmOp $ Op2 "movzx" "eax" "al"]
asmArithmetic :: Expr -> Expr -> String -> Asm
asmArithmetic e1 e2 op = do
aRSL <- asmRSL e1 e2
return $ aRSL ++ [AsmOp $ Op2 op "eax" "ecx"]
asmRSL :: Expr -> Expr -> Asm
asmRSL e1 e2 = do
ae2 <- asmExpression e2
ae1 <- asmExpression e1
return $ ae2++ [AsmOp $ Op1 "push" "eax"] ++
ae1 ++ [AsmOp $ Op1 "pop" "ecx"]
|
|
ec89b0d56caa01d49ef361a102d7d60dee7f5d439a2d38dfbc8e66ebbf0ef54e
|
mariachris/Concuerror
|
ets_delete.erl
|
-module(ets_delete).
-export([ets_delete/0]).
-export([scenarios/0]).
scenarios() -> [{?MODULE, inf, dpor}].
ets_delete() ->
ets:new(table, [public, named_table]),
spawn(fun() ->
ets:insert(table, {key, value})
end),
ets:delete(table),
receive
deadlock -> ok
end.
| null |
https://raw.githubusercontent.com/mariachris/Concuerror/87e63f10ac615bf2eeac5b0916ef54d11a933e0b/testsuite/suites/dpor/src/ets_delete.erl
|
erlang
|
-module(ets_delete).
-export([ets_delete/0]).
-export([scenarios/0]).
scenarios() -> [{?MODULE, inf, dpor}].
ets_delete() ->
ets:new(table, [public, named_table]),
spawn(fun() ->
ets:insert(table, {key, value})
end),
ets:delete(table),
receive
deadlock -> ok
end.
|
|
78ea252d6618b02dd1558e24fcacbcfaf6bfa3fb131acb7029ee5d6cac14de29
|
robert-strandh/SICL
|
map-local-instructions.lisp
|
(cl:in-package #:cleavir-ir)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;
Function MAP - LOCAL - INSTRUCTIONS
;;;
Traverse an instruction graph in depth first order . The traversal
;;; starts from an ENTER-INSTRUCTION, and does not descend into
;;; enclosed functions; thus all instructions mapped have the input
;;; ENTER as their owner, and all reachable instructions with the
;;; ENTER as their owner are mapped.
(defun map-local-instructions (function enter-instruction)
(let ((visited-instructions (make-hash-table :test #'eq))
(instructions-to-process '()))
(flet ((register-if-unvisited (instruction)
(unless (gethash instruction visited-instructions)
(setf (gethash instruction visited-instructions) t)
(push instruction instructions-to-process))))
(register-if-unvisited enter-instruction)
(loop until (null instructions-to-process)
do (let ((instruction (pop instructions-to-process)))
(funcall function instruction)
(mapc #'register-if-unvisited
(cleavir-ir:successors instruction)))))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;
;;; Function FILTER-LOCAL-INSTRUCTIONS
;;;
;;; Return, in some arbitrary order, a list of all local instructions
;;; that satisfy a predicate. Sort of like REMOVE-IF-NOT.
(defun filter-local-instructions (predicate enter-instruction)
(let (result)
(map-local-instructions
(lambda (instruction)
(when (funcall predicate instruction)
(push instruction result)))
enter-instruction)
result))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;
;;; Function LOCAL-INSTRUCTIONS-OF-TYPE
;;;
;;; Return, in some arbitrary order, a list of all local instructions
;;; that are of some type. This is useful for many transformations.
(defun local-instructions-of-type (enter-instruction type)
(filter-local-instructions (lambda (i) (typep i type))
enter-instruction))
(define-compiler-macro local-instructions-of-type (&whole form initial-instruction type)
(if (constantp type)
`(filter-local-instructions (lambda (i) (typep i ',(eval type))) ,initial-instruction)
form))
| null |
https://raw.githubusercontent.com/robert-strandh/SICL/a7d7b08607834198588e440693fa7d6b743e52c1/Code/Cleavir/Intermediate-representation/map-local-instructions.lisp
|
lisp
|
starts from an ENTER-INSTRUCTION, and does not descend into
enclosed functions; thus all instructions mapped have the input
ENTER as their owner, and all reachable instructions with the
ENTER as their owner are mapped.
Function FILTER-LOCAL-INSTRUCTIONS
Return, in some arbitrary order, a list of all local instructions
that satisfy a predicate. Sort of like REMOVE-IF-NOT.
Function LOCAL-INSTRUCTIONS-OF-TYPE
Return, in some arbitrary order, a list of all local instructions
that are of some type. This is useful for many transformations.
|
(cl:in-package #:cleavir-ir)
Function MAP - LOCAL - INSTRUCTIONS
Traverse an instruction graph in depth first order . The traversal
(defun map-local-instructions (function enter-instruction)
(let ((visited-instructions (make-hash-table :test #'eq))
(instructions-to-process '()))
(flet ((register-if-unvisited (instruction)
(unless (gethash instruction visited-instructions)
(setf (gethash instruction visited-instructions) t)
(push instruction instructions-to-process))))
(register-if-unvisited enter-instruction)
(loop until (null instructions-to-process)
do (let ((instruction (pop instructions-to-process)))
(funcall function instruction)
(mapc #'register-if-unvisited
(cleavir-ir:successors instruction)))))))
(defun filter-local-instructions (predicate enter-instruction)
(let (result)
(map-local-instructions
(lambda (instruction)
(when (funcall predicate instruction)
(push instruction result)))
enter-instruction)
result))
(defun local-instructions-of-type (enter-instruction type)
(filter-local-instructions (lambda (i) (typep i type))
enter-instruction))
(define-compiler-macro local-instructions-of-type (&whole form initial-instruction type)
(if (constantp type)
`(filter-local-instructions (lambda (i) (typep i ',(eval type))) ,initial-instruction)
form))
|
58b71c717efa0e651f42398160789bec8cb876a804ad1ecce1e3d2d339a2844b
|
songyahui/AlgebraicEffect
|
discard_continuation.ml
|
effect Zero: int
let rec times lst =
match lst with
| [] -> 1
| 0 :: rest -> perform Zero
| v :: rest -> v * times rest
let handler () =
match times [1;2;3;4;5] with
| v -> v
| effect Zero k -> 0
let main =
print_string (string_of_int (handler ()) ^ "\n")
| null |
https://raw.githubusercontent.com/songyahui/AlgebraicEffect/27688952b598a101a27523be796e8011d70b02de/src/sp_tests/discard_continuation.ml
|
ocaml
|
effect Zero: int
let rec times lst =
match lst with
| [] -> 1
| 0 :: rest -> perform Zero
| v :: rest -> v * times rest
let handler () =
match times [1;2;3;4;5] with
| v -> v
| effect Zero k -> 0
let main =
print_string (string_of_int (handler ()) ^ "\n")
|
|
1e0ce804dde003596996c5080e3a13b218cc3f031e86439746aaf3600124a9cb
|
orivej/pzmq
|
compat.lisp
|
(in-package #:pzmq)
(defun sendmsg (socket msg &key dontwait sndmore)
"Send a message part on a socket."
(msg-send msg socket :dontwait dontwait :sndmore sndmore))
(defun recvmsg (socket msg &key dontwait)
"Receive a message part from a socket."
(msg-recv msg socket :dontwait dontwait))
(defun init (io-threads)
"Initialise ΓMQ context."
(let ((context (ctx-new)))
(ctx-set context :io-threads io-threads)
context))
(setf (symbol-function 'term) (symbol-function 'ctx-destroy))
| null |
https://raw.githubusercontent.com/orivej/pzmq/91dd3a891e639f41d44bfb5f449daf0fcd408e29/compat.lisp
|
lisp
|
(in-package #:pzmq)
(defun sendmsg (socket msg &key dontwait sndmore)
"Send a message part on a socket."
(msg-send msg socket :dontwait dontwait :sndmore sndmore))
(defun recvmsg (socket msg &key dontwait)
"Receive a message part from a socket."
(msg-recv msg socket :dontwait dontwait))
(defun init (io-threads)
"Initialise ΓMQ context."
(let ((context (ctx-new)))
(ctx-set context :io-threads io-threads)
context))
(setf (symbol-function 'term) (symbol-function 'ctx-destroy))
|
|
431e9c2fddeabff727971cd3002fedd58f08ce9197a2883aa7389550eb066177
|
cabol/gen_buffer
|
gen_buffer_dist_SUITE.erl
|
-module(gen_buffer_dist_SUITE).
-include_lib("common_test/include/ct.hrl").
%% Common Test
-export([
all/0,
init_per_suite/1,
end_per_suite/1,
init_per_testcase/2,
end_per_testcase/2
]).
%% Common Test Cases
-include_lib("mixer/include/mixer.hrl").
-mixin([
{gen_buffer_test_cases, [
t_eval/1,
t_eval_error/1,
t_send_and_recv_errors/1,
t_send_recv/1,
t_fire_and_forget/1
]}
]).
%% Test Cases
-export([
t_send_and_recv/1,
t_get_set_workers/1,
t_size/1,
t_info_buffer/1,
t_info/1,
t_no_available_nodes/1
]).
-define(EXCLUDED_FUNS, [
module_info,
all,
init_per_suite,
end_per_suite,
init_per_testcase,
end_per_testcase
]).
-define(BUFFER, gen_buffer_test).
-define(SLAVES, ['[email protected]', '[email protected]']).
%%%===================================================================
%%% Common Test
%%%===================================================================
all() ->
Exports = ?MODULE:module_info(exports),
[F || {F, _} <- Exports, not lists:member(F, ?EXCLUDED_FUNS)].
init_per_suite(Config) ->
ok = start_primary_node(),
{ok, _} = application:ensure_all_started(gen_buffer),
ok = allow_boot(),
Nodes = start_slaves(?SLAVES),
Opts = #{
message_handler => test_message_handler,
send_replies => true,
init_args => ok
},
[{nodes, Nodes}, {opts, Opts}, {module, gen_buffer_dist} | Config].
end_per_suite(Config) ->
stop_slaves(?SLAVES),
Config.
init_per_testcase(_, Config) ->
Config.
end_per_testcase(_, Config) ->
ok = cleanup_remote_buffers(),
Config.
%%%===================================================================
%%% Tests Cases
%%%===================================================================
t_send_and_recv(Config) ->
Mod = ?config(module, Config),
Opts = ?config(opts, Config),
_ = gen_buffer_ct:create_buffer(?BUFFER, Opts, Mod, Config),
Ref1 = Mod:send(?BUFFER, "hello"),
{reply, Ref1, ?BUFFER, "hello"} = gen_buffer_ct:wait_for_msg(),
Ref2 = Mod:send(?BUFFER, "hello"),
{ok, "hello"} = Mod:recv(?BUFFER, Ref2),
ok = Mod:poll(?BUFFER),
{error, timeout} = gen_buffer_ct:wait_for_msg(200),
ok = Mod:stop(?BUFFER),
ok = Mod:stop(test).
t_get_set_workers(Config) ->
Mod = ?config(module, Config),
Opts = ?config(opts, Config),
_ = gen_buffer_ct:create_buffer(?BUFFER, Opts, Mod, Config),
[
{'[email protected]', Workers1_N1},
{'[email protected]', Workers1_N2},
{'[email protected]', Workers1_N3}
] = lists:usort(Mod:get_workers(?BUFFER)),
Len1 = erlang:system_info(schedulers_online),
ok = lists:foreach(fun(WL) ->
Len1 = length(WL)
end, [Workers1_N1, Workers1_N2, Workers1_N3]),
[
{'[email protected]', Workers2_N1},
{'[email protected]', Workers2_N2},
{'[email protected]', Workers2_N3}
] = lists:usort(Mod:set_workers(?BUFFER, 3)),
ok = lists:foreach(fun(WL) ->
3 = length(WL)
end, [Workers2_N1, Workers2_N2, Workers2_N3]),
{ok, _} = Mod:get_worker(?BUFFER).
t_size(Config) ->
Mod = ?config(module, Config),
Opts = ?config(opts, Config),
_ = gen_buffer_ct:create_buffer(?BUFFER, Opts, Mod, Config),
[
{'[email protected]', Size},
{'[email protected]', _},
{'[email protected]', _}
] = lists:usort(Mod:size(?BUFFER)),
true = is_integer(Size).
t_info_buffer(Config) ->
Mod = ?config(module, Config),
Opts = ?config(opts, Config),
_ = gen_buffer_ct:create_buffer(?BUFFER, Opts, Mod, Config),
[
{'[email protected]', Data},
{'[email protected]', _},
{'[email protected]', _}
] = lists:usort(Mod:info(?BUFFER)),
#{workers := _, size := _} = Data.
t_info(Config) ->
Mod = ?config(module, Config),
Opts = ?config(opts, Config),
_ = gen_buffer_ct:create_buffer(?BUFFER, Opts, Mod, Config),
_ = gen_buffer_ct:create_buffer(test, Opts, Mod, Config),
ok = pg2:create(yet_another_group),
[
{'[email protected]', Data},
{'[email protected]', _},
{'[email protected]', _}
] = lists:usort(Mod:info()),
#{
?BUFFER := #{workers := _, size := _},
test := #{workers := _, size := _}
} = Data.
t_no_available_nodes(Config) ->
Mod = ?config(module, Config),
Opts = ?config(opts, Config),
ok = pg2:delete(gen_buffer:pg2_namespace(?BUFFER)),
try
Mod:send(?BUFFER, "hello")
catch
error:no_available_nodes -> ok
end,
_ = gen_buffer_ct:create_buffer(?BUFFER, Opts, Mod, Config),
_ = Mod:send(?BUFFER, "hello").
%%%===================================================================
Internal functions
%%%===================================================================
@private
start_primary_node() ->
{ok, _} = net_kernel:start(['[email protected]']),
true = erlang:set_cookie(node(), gen_buffer),
ok.
@private
allow_boot() ->
_ = erl_boot_server:start([]),
{ok, IPv4} = inet:parse_ipv4_address("127.0.0.1"),
erl_boot_server:add_slave(IPv4).
@private
start_slaves(Slaves) ->
start_slaves(Slaves, []).
@private
start_slaves([], Acc) ->
lists:usort(Acc);
start_slaves([Node | T], Acc) ->
start_slaves(T, [spawn_node(Node) | Acc]).
@private
spawn_node(Node) ->
Cookie = atom_to_list(erlang:get_cookie()),
InetLoaderArgs = "-loader inet -hosts 127.0.0.1 -setcookie " ++ Cookie,
{ok, Node} =
slave:start(
"127.0.0.1",
node_name(Node),
InetLoaderArgs
),
ok = rpc:block_call(Node, code, add_paths, [code:get_path()]),
{ok, _} = rpc:block_call(Node, application, ensure_all_started, [gen_buffer]),
ok = load_support_files(Node),
Node.
@private
node_name(Node) ->
[Name, _] = binary:split(atom_to_binary(Node, utf8), <<"@">>),
binary_to_atom(Name, utf8).
@private
load_support_files(Node) ->
{module, gen_buffer_test_cases} =
rpc:block_call(Node, code, load_file, [gen_buffer_test_cases]),
ok.
@private
stop_slaves(Slaves) ->
stop_slaves(Slaves, []).
@private
stop_slaves([], Acc) ->
lists:usort(Acc);
stop_slaves([Node | T], Acc) ->
ok = slave:stop(Node),
pang = net_adm:ping(Node),
stop_slaves(T, [Node | Acc]).
cleanup_remote_buffers() ->
_ = register(ct, self()),
Buffers = [parent_gen_buffer_test, parent_test, parent_test2],
[begin
{Name, Node} ! exit,
gen_buffer_ct:wait_for_msg(300)
end || Name <- Buffers, Node <- ?SLAVES],
ok.
| null |
https://raw.githubusercontent.com/cabol/gen_buffer/35dbe556f84bc84e10527ecb0c98228815064d9d/test/gen_buffer_dist_SUITE.erl
|
erlang
|
Common Test
Common Test Cases
Test Cases
===================================================================
Common Test
===================================================================
===================================================================
Tests Cases
===================================================================
===================================================================
===================================================================
|
-module(gen_buffer_dist_SUITE).
-include_lib("common_test/include/ct.hrl").
-export([
all/0,
init_per_suite/1,
end_per_suite/1,
init_per_testcase/2,
end_per_testcase/2
]).
-include_lib("mixer/include/mixer.hrl").
-mixin([
{gen_buffer_test_cases, [
t_eval/1,
t_eval_error/1,
t_send_and_recv_errors/1,
t_send_recv/1,
t_fire_and_forget/1
]}
]).
-export([
t_send_and_recv/1,
t_get_set_workers/1,
t_size/1,
t_info_buffer/1,
t_info/1,
t_no_available_nodes/1
]).
-define(EXCLUDED_FUNS, [
module_info,
all,
init_per_suite,
end_per_suite,
init_per_testcase,
end_per_testcase
]).
-define(BUFFER, gen_buffer_test).
-define(SLAVES, ['[email protected]', '[email protected]']).
all() ->
Exports = ?MODULE:module_info(exports),
[F || {F, _} <- Exports, not lists:member(F, ?EXCLUDED_FUNS)].
init_per_suite(Config) ->
ok = start_primary_node(),
{ok, _} = application:ensure_all_started(gen_buffer),
ok = allow_boot(),
Nodes = start_slaves(?SLAVES),
Opts = #{
message_handler => test_message_handler,
send_replies => true,
init_args => ok
},
[{nodes, Nodes}, {opts, Opts}, {module, gen_buffer_dist} | Config].
end_per_suite(Config) ->
stop_slaves(?SLAVES),
Config.
init_per_testcase(_, Config) ->
Config.
end_per_testcase(_, Config) ->
ok = cleanup_remote_buffers(),
Config.
t_send_and_recv(Config) ->
Mod = ?config(module, Config),
Opts = ?config(opts, Config),
_ = gen_buffer_ct:create_buffer(?BUFFER, Opts, Mod, Config),
Ref1 = Mod:send(?BUFFER, "hello"),
{reply, Ref1, ?BUFFER, "hello"} = gen_buffer_ct:wait_for_msg(),
Ref2 = Mod:send(?BUFFER, "hello"),
{ok, "hello"} = Mod:recv(?BUFFER, Ref2),
ok = Mod:poll(?BUFFER),
{error, timeout} = gen_buffer_ct:wait_for_msg(200),
ok = Mod:stop(?BUFFER),
ok = Mod:stop(test).
t_get_set_workers(Config) ->
Mod = ?config(module, Config),
Opts = ?config(opts, Config),
_ = gen_buffer_ct:create_buffer(?BUFFER, Opts, Mod, Config),
[
{'[email protected]', Workers1_N1},
{'[email protected]', Workers1_N2},
{'[email protected]', Workers1_N3}
] = lists:usort(Mod:get_workers(?BUFFER)),
Len1 = erlang:system_info(schedulers_online),
ok = lists:foreach(fun(WL) ->
Len1 = length(WL)
end, [Workers1_N1, Workers1_N2, Workers1_N3]),
[
{'[email protected]', Workers2_N1},
{'[email protected]', Workers2_N2},
{'[email protected]', Workers2_N3}
] = lists:usort(Mod:set_workers(?BUFFER, 3)),
ok = lists:foreach(fun(WL) ->
3 = length(WL)
end, [Workers2_N1, Workers2_N2, Workers2_N3]),
{ok, _} = Mod:get_worker(?BUFFER).
t_size(Config) ->
Mod = ?config(module, Config),
Opts = ?config(opts, Config),
_ = gen_buffer_ct:create_buffer(?BUFFER, Opts, Mod, Config),
[
{'[email protected]', Size},
{'[email protected]', _},
{'[email protected]', _}
] = lists:usort(Mod:size(?BUFFER)),
true = is_integer(Size).
t_info_buffer(Config) ->
Mod = ?config(module, Config),
Opts = ?config(opts, Config),
_ = gen_buffer_ct:create_buffer(?BUFFER, Opts, Mod, Config),
[
{'[email protected]', Data},
{'[email protected]', _},
{'[email protected]', _}
] = lists:usort(Mod:info(?BUFFER)),
#{workers := _, size := _} = Data.
t_info(Config) ->
Mod = ?config(module, Config),
Opts = ?config(opts, Config),
_ = gen_buffer_ct:create_buffer(?BUFFER, Opts, Mod, Config),
_ = gen_buffer_ct:create_buffer(test, Opts, Mod, Config),
ok = pg2:create(yet_another_group),
[
{'[email protected]', Data},
{'[email protected]', _},
{'[email protected]', _}
] = lists:usort(Mod:info()),
#{
?BUFFER := #{workers := _, size := _},
test := #{workers := _, size := _}
} = Data.
t_no_available_nodes(Config) ->
Mod = ?config(module, Config),
Opts = ?config(opts, Config),
ok = pg2:delete(gen_buffer:pg2_namespace(?BUFFER)),
try
Mod:send(?BUFFER, "hello")
catch
error:no_available_nodes -> ok
end,
_ = gen_buffer_ct:create_buffer(?BUFFER, Opts, Mod, Config),
_ = Mod:send(?BUFFER, "hello").
Internal functions
@private
start_primary_node() ->
{ok, _} = net_kernel:start(['[email protected]']),
true = erlang:set_cookie(node(), gen_buffer),
ok.
@private
allow_boot() ->
_ = erl_boot_server:start([]),
{ok, IPv4} = inet:parse_ipv4_address("127.0.0.1"),
erl_boot_server:add_slave(IPv4).
@private
start_slaves(Slaves) ->
start_slaves(Slaves, []).
@private
start_slaves([], Acc) ->
lists:usort(Acc);
start_slaves([Node | T], Acc) ->
start_slaves(T, [spawn_node(Node) | Acc]).
@private
spawn_node(Node) ->
Cookie = atom_to_list(erlang:get_cookie()),
InetLoaderArgs = "-loader inet -hosts 127.0.0.1 -setcookie " ++ Cookie,
{ok, Node} =
slave:start(
"127.0.0.1",
node_name(Node),
InetLoaderArgs
),
ok = rpc:block_call(Node, code, add_paths, [code:get_path()]),
{ok, _} = rpc:block_call(Node, application, ensure_all_started, [gen_buffer]),
ok = load_support_files(Node),
Node.
@private
node_name(Node) ->
[Name, _] = binary:split(atom_to_binary(Node, utf8), <<"@">>),
binary_to_atom(Name, utf8).
@private
load_support_files(Node) ->
{module, gen_buffer_test_cases} =
rpc:block_call(Node, code, load_file, [gen_buffer_test_cases]),
ok.
@private
stop_slaves(Slaves) ->
stop_slaves(Slaves, []).
@private
stop_slaves([], Acc) ->
lists:usort(Acc);
stop_slaves([Node | T], Acc) ->
ok = slave:stop(Node),
pang = net_adm:ping(Node),
stop_slaves(T, [Node | Acc]).
cleanup_remote_buffers() ->
_ = register(ct, self()),
Buffers = [parent_gen_buffer_test, parent_test, parent_test2],
[begin
{Name, Node} ! exit,
gen_buffer_ct:wait_for_msg(300)
end || Name <- Buffers, Node <- ?SLAVES],
ok.
|
5e6b174130d1f4cc4752c458aa3d177fc46ae1f1bddb46d760e753dc166b3970
|
goldfirere/units
|
NoVector.hs
|
# OPTIONS_GHC -fno - warn - type - defaults -fno - warn - missing - signatures #
module Tests.Compile.NoVector where
import Data.Metrology
import Data.Metrology.SI
x = 5 % Meter
y = 2 % Second
vel = x |/| y
| null |
https://raw.githubusercontent.com/goldfirere/units/0ffc07627bb6c1eacd60469fd9366346cbfde334/units-test/Tests/Compile/NoVector.hs
|
haskell
|
# OPTIONS_GHC -fno - warn - type - defaults -fno - warn - missing - signatures #
module Tests.Compile.NoVector where
import Data.Metrology
import Data.Metrology.SI
x = 5 % Meter
y = 2 % Second
vel = x |/| y
|
|
c51b1887f1ecc79e4bbf13f5f8165f0e25ae35df6484323127d9a3861dcb9b0f
|
dgtized/shimmers
|
graph_test.cljc
|
(ns shimmers.math.graph-test
(:require
[clojure.test :as t :refer [deftest is] :include-macros true]
[loom.graph :as lg]
[shimmers.math.graph :as sut]
[thi.ng.geom.vector :as gv]))
;; a - b f
;; \
;; | e |
;; \
;; d - c g
(def points (map gv/vec2 [[0 0] [1 0] [1 1] [0 1] [0.5 0.5] [2 0] [2 1]]))
(def graph
(let [[a b c d] points]
(sut/edges->graph [[a b] [b c] [c d] [d a] [a c]])))
(def graph-e
(let [[a b c d e] points]
(sut/edges->graph [[a b] [b c] [c d] [d a] [a e] [e c]])))
(deftest edge-uniqueness
(is (= 10 (count (lg/edges graph))))
(is (= 5 (count (sut/unique-edges (lg/edges graph))))))
(deftest planarity
(is (every? (fn [[p q]] (sut/planar-edge? graph p q)) (lg/edges graph))
"every existing edge in a planar graph are planar with that graph")
(let [[a b c d e f g] points]
(is (not (sut/planar-edge? graph b d)) "crosses a-c")
(is (not (sut/planar-edge? graph d b)) "crosses a-c, reflexive")
(is (not (sut/planar-edge? graph a f)) "coincident to a-b")
(is (not (sut/planar-edge? graph a g)) "crosses b-c")
(is (not (sut/planar-edge? graph d f)) "crosses b-c")
(is (not (sut/planar-edge? graph d g)) "coincident to d-c")
(is (sut/planar-edge? graph b f) "coincident to a-b, but *only* at b")
(is (sut/planar-edge? graph b g))
(is (sut/planar-edge? graph c f))
(is (sut/planar-edge? graph c g) "coincident to c-d, but *only* at c")
(t/testing "given a new, central point e on a-c"
;; b-e and d-e only touch at e, so by current definition they are planar,
;; but this definition is debatable.
(is (sut/planar-edge? graph b e))
(is (sut/planar-edge? graph d e))
(is (not (sut/planar-edge? graph a e)) "coincident with a-c")
(is (not (sut/planar-edge? graph c e)) "coincident with a-c")
(is (not (sut/planar-edge? graph e f)) "crosses b-c")
(is (not (sut/planar-edge? graph e g)) "crosses b-c"))
(t/testing "with edges a-e and e-c instead of a-c"
(is (sut/planar-edge? graph-e e b))
(is (sut/planar-edge? graph-e e d))
(is (not (sut/planar-edge? graph-e a c)) "coincident with a-e and e-c")
(is (not (sut/planar-edge? graph-e b d)) "intersects a-e and e-c at e"))))
(comment (t/run-tests))
| null |
https://raw.githubusercontent.com/dgtized/shimmers/f096c20d7ebcb9796c7830efcd7e3f24767a46db/test/shimmers/math/graph_test.cljc
|
clojure
|
a - b f
\
| e |
\
d - c g
b-e and d-e only touch at e, so by current definition they are planar,
but this definition is debatable.
|
(ns shimmers.math.graph-test
(:require
[clojure.test :as t :refer [deftest is] :include-macros true]
[loom.graph :as lg]
[shimmers.math.graph :as sut]
[thi.ng.geom.vector :as gv]))
(def points (map gv/vec2 [[0 0] [1 0] [1 1] [0 1] [0.5 0.5] [2 0] [2 1]]))
(def graph
(let [[a b c d] points]
(sut/edges->graph [[a b] [b c] [c d] [d a] [a c]])))
(def graph-e
(let [[a b c d e] points]
(sut/edges->graph [[a b] [b c] [c d] [d a] [a e] [e c]])))
(deftest edge-uniqueness
(is (= 10 (count (lg/edges graph))))
(is (= 5 (count (sut/unique-edges (lg/edges graph))))))
(deftest planarity
(is (every? (fn [[p q]] (sut/planar-edge? graph p q)) (lg/edges graph))
"every existing edge in a planar graph are planar with that graph")
(let [[a b c d e f g] points]
(is (not (sut/planar-edge? graph b d)) "crosses a-c")
(is (not (sut/planar-edge? graph d b)) "crosses a-c, reflexive")
(is (not (sut/planar-edge? graph a f)) "coincident to a-b")
(is (not (sut/planar-edge? graph a g)) "crosses b-c")
(is (not (sut/planar-edge? graph d f)) "crosses b-c")
(is (not (sut/planar-edge? graph d g)) "coincident to d-c")
(is (sut/planar-edge? graph b f) "coincident to a-b, but *only* at b")
(is (sut/planar-edge? graph b g))
(is (sut/planar-edge? graph c f))
(is (sut/planar-edge? graph c g) "coincident to c-d, but *only* at c")
(t/testing "given a new, central point e on a-c"
(is (sut/planar-edge? graph b e))
(is (sut/planar-edge? graph d e))
(is (not (sut/planar-edge? graph a e)) "coincident with a-c")
(is (not (sut/planar-edge? graph c e)) "coincident with a-c")
(is (not (sut/planar-edge? graph e f)) "crosses b-c")
(is (not (sut/planar-edge? graph e g)) "crosses b-c"))
(t/testing "with edges a-e and e-c instead of a-c"
(is (sut/planar-edge? graph-e e b))
(is (sut/planar-edge? graph-e e d))
(is (not (sut/planar-edge? graph-e a c)) "coincident with a-e and e-c")
(is (not (sut/planar-edge? graph-e b d)) "intersects a-e and e-c at e"))))
(comment (t/run-tests))
|
3a019a81fd64a981e31d695e7304b571c0dc25c16c2c2b33bc3b3ce259e54ef2
|
UnBParadigmas2022-1/2022.1_G3_Funcional_ProjetoPacman
|
Djikstra.hs
|
module Algorithms.Djikstra (djikstraPath) where
import Data.Heap ( insert, null, view, empty, MinPrioHeap )
import Data.List ( (\\) )
import Graphics.Gloss ( Point )
import Game.Map
( freeAdjsPoints, generateNext, mapaAtual, point2Index )
import Types ( Mapa, MultValor )
calcCaminho :: Mapa -> [MultValor] -> Point -> MinPrioHeap Float MultValor -> (MultValor, MinPrioHeap Float MultValor)
calcCaminho mapa visitados posInicial heap = do
let Just ((pesoSelecionado, (selecionado, paiSelecionado)), heapPop) = view heap
let adjsLivres = (freeAdjsPoints selecionado) \\ map (\(filho, _) -> filho) visitados
let novaHeap = foldl (\heap adj -> insereHeap (adj, selecionado) ((mapa !! point2Index adj) + pesoSelecionado) heap) heapPop adjsLivres
((selecionado, paiSelecionado), novaHeap)
djikstra :: Point -> Point -> [MultValor] -> MinPrioHeap Float MultValor -> [MultValor]
djikstra posInicial posFinal visitados heap
| selecionado == posFinal = novosVisitados
| not isVazia = djikstra selecionado posFinal novosVisitados novaHeap
| otherwise = error "Caminho nao encontrado"
where
isVazia = Data.Heap.null heap
((selecionado, paiSelecionado), novaHeap) = calcCaminho mapaAtual visitados posInicial heap
novosVisitados
| not (selecionado `elem` map (\(filho, _) -> filho) visitados) = visitados ++ [(selecionado, paiSelecionado)]
| otherwise = visitados
insereHeap :: MultValor -> Float -> MinPrioHeap Float MultValor -> MinPrioHeap Float MultValor
insereHeap (posicao, pai) peso heap =
Data.Heap.insert (peso, (posicao, pai)) heap
menorCaminho :: Point -> Point -> [MultValor]
menorCaminho posInicial posFinal
| posInicial == posFinal = [(posFinal, posFinal)]
| otherwise = caminho
where
heap = insereHeap (posInicial, posInicial) 0 (empty :: MinPrioHeap Float MultValor)
caminho = djikstra posInicial posFinal [] heap
djikstraPath :: Point -> Point -> Point
djikstraPath posInicial posFinal = generateNext (reverse caminho) posInicial
where
caminho = menorCaminho posInicial posFinal
| null |
https://raw.githubusercontent.com/UnBParadigmas2022-1/2022.1_G3_Funcional_ProjetoPacman/ea190f2b1c38591c7c4becfc550eeaf1ec1b94c3/app/Algorithms/Djikstra.hs
|
haskell
|
module Algorithms.Djikstra (djikstraPath) where
import Data.Heap ( insert, null, view, empty, MinPrioHeap )
import Data.List ( (\\) )
import Graphics.Gloss ( Point )
import Game.Map
( freeAdjsPoints, generateNext, mapaAtual, point2Index )
import Types ( Mapa, MultValor )
calcCaminho :: Mapa -> [MultValor] -> Point -> MinPrioHeap Float MultValor -> (MultValor, MinPrioHeap Float MultValor)
calcCaminho mapa visitados posInicial heap = do
let Just ((pesoSelecionado, (selecionado, paiSelecionado)), heapPop) = view heap
let adjsLivres = (freeAdjsPoints selecionado) \\ map (\(filho, _) -> filho) visitados
let novaHeap = foldl (\heap adj -> insereHeap (adj, selecionado) ((mapa !! point2Index adj) + pesoSelecionado) heap) heapPop adjsLivres
((selecionado, paiSelecionado), novaHeap)
djikstra :: Point -> Point -> [MultValor] -> MinPrioHeap Float MultValor -> [MultValor]
djikstra posInicial posFinal visitados heap
| selecionado == posFinal = novosVisitados
| not isVazia = djikstra selecionado posFinal novosVisitados novaHeap
| otherwise = error "Caminho nao encontrado"
where
isVazia = Data.Heap.null heap
((selecionado, paiSelecionado), novaHeap) = calcCaminho mapaAtual visitados posInicial heap
novosVisitados
| not (selecionado `elem` map (\(filho, _) -> filho) visitados) = visitados ++ [(selecionado, paiSelecionado)]
| otherwise = visitados
insereHeap :: MultValor -> Float -> MinPrioHeap Float MultValor -> MinPrioHeap Float MultValor
insereHeap (posicao, pai) peso heap =
Data.Heap.insert (peso, (posicao, pai)) heap
menorCaminho :: Point -> Point -> [MultValor]
menorCaminho posInicial posFinal
| posInicial == posFinal = [(posFinal, posFinal)]
| otherwise = caminho
where
heap = insereHeap (posInicial, posInicial) 0 (empty :: MinPrioHeap Float MultValor)
caminho = djikstra posInicial posFinal [] heap
djikstraPath :: Point -> Point -> Point
djikstraPath posInicial posFinal = generateNext (reverse caminho) posInicial
where
caminho = menorCaminho posInicial posFinal
|
|
548419b3c31a8a8e3d8c734c1d654fdbd0f84f8291263632fac0e395c0a030e0
|
semaj/hask-raft
|
Server.hs
|
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE RecordWildCards #
module Server where
import Message
import Utils
import qualified Data.HashMap.Strict as HM
import qualified Data.HashSet as HS
import Data.List
import Data.Aeson
import Data.Maybe
import Data.Time
import System.Random
import Debug.Trace
data ServerState = Follower | Candidate | Leader deriving (Show, Eq)
( ! ) s just force a strict datatype , as is a lazy language by default
data Server = Server {
sState :: !ServerState,
sid :: !String, -- ID
others :: ![String], -- The other server IDs
store :: HM.HashMap String String, -- The hashmap of keys & values
sendMe :: ![Message], -- Messages to send this round
messQ :: HM.HashMap String Message, -- Server -> Next message to send (not heartbeat!)
timeQ :: HM.HashMap String UTCTime, -- Server -> Last time we sent a message (not heartbeat!)
pendingQ :: [Message], -- Followers/Candidates store this so when a leader is elected, flush
currentTerm :: Int,
votedFor :: !String, -- Our leader
slog :: ![Command], -- Log of commands to execute
commitIndex :: Int,
lastApplied :: Int,
nextIndices :: HM.HashMap String Int,
matchIndices :: HM.HashMap String Int,
votes :: HS.HashSet String, -- Set filled with servers that voted for us
timeout :: Int, -- When we will timeout to candidate state (ms)
lastHB :: UTCTime, -- Last time we sent a heartbeat
clock :: UTCTime -- last time we received a raft message OR started an election
} deriving (Show)
initServer :: String -> [String] -> UTCTime -> Int -> Server
initServer myID otherIDs time timeout = Server { sid = myID,
others = otherIDs,
sState = Follower,
store = HM.empty,
messQ = HM.empty,
timeQ = HM.fromList $ map (\x -> (x, time)) otherIDs,
pendingQ = [],
sendMe = [],
currentTerm = 0,
votedFor = "FFFF",
slog = [],
commitIndex = -1,
lastApplied = -1,
nextIndices = HM.fromList $ map (\x -> (x, 0)) otherIDs,
matchIndices = HM.fromList $ map (\x -> (x, (-1))) otherIDs,
lastHB = time,
votes = HS.empty,
timeout = timeout,
clock = time }
-- The main function for transitioning servers
step :: String -> UTCTime -> Server -> Server
step newMid now s@Server{..}
| sState == Follower = followerExecute s
| sState == Candidate = checkVotes $ serverSend now $ candidatePrepare newMid $ s
| sState == Leader = sendHBs now $ serverSend now $ leaderPrepare newMid $ leaderExecute s
-- Check if the candidate has enough votes. If so, transition to Leader.
checkVotes :: Server -> Server
checkVotes s@Server{..}
| HS.size votes > = majority = trace ( sid + + " to lead w/ " + + ( show $ HS.size votes ) + + " votes , term " + + ( show currentTerm ) ) $
| HS.size votes >= majority = s { sState = Leader,
votedFor = sid,
messQ = HM.empty,
timeQ = HM.fromList $ map (\x -> (x, clock)) others,
nextIndices = HM.map (const $ commitIndex + 1) nextIndices,
matchIndices = HM.map (const commitIndex) matchIndices,
sendMe = map (\srvr -> leaderAE
commitIndex
currentTerm
sid
("init" ++ srvr)
slog
(srvr, (commitIndex + 1))) others,
votes = HS.empty }
| otherwise = s
Produce a RequestVote for a given Candidate 's information
candidateRV :: Int -> String -> String -> [Command] -> String -> Message
candidateRV currentTerm src baseMid slog dst = Message src dst "FFFF" RAFT (baseMid ++ dst) Nothing Nothing rv
where lastLogIndex = getLastLogIndex slog
lastLogTerm = getLastLogTerm slog
rv = Just $ RV currentTerm src lastLogIndex lastLogTerm
-- Add missing messages (RVs) to our message Q so that they're ready
candidatePrepare :: String -> Server -> Server
candidatePrepare newMid s@Server{..} = s { messQ = newMessQ }
where recipients = filter (\ srvr -> (not $ HS.member srvr votes) && (not $ HM.member srvr messQ)) others
newRVs = map (candidateRV currentTerm sid newMid slog) recipients
newMessQ = zipAddAllM recipients newRVs messQ
-- Given a server, send all the things in our messQ if they've timed out in the timeQ
serverSend :: UTCTime -> Server -> Server
serverSend now s@Server{..} = s { sendMe = sendMe ++ resendMessages, timeQ = newTimeQ }
where resendMe = getNeedResending now timeQ
resendMessages = catMaybes $ map (\ srvr -> HM.lookup srvr messQ) resendMe
newTimeQ = zipAddAllT resendMe (replicate (length resendMe) now) timeQ
-- If enough time has elapsed, send blank, special heartbeat messages
sendHBs :: UTCTime -> Server -> Server
sendHBs now s@Server{..}
| timedOut lastHB now heartbeatRate = s { sendMe = push hb sendMe, lastHB = now }
| otherwise = s
where ae = Just $ AE (-5) sid (-5) (-5) [] (-5)
hb = Message sid "FFFF" sid RAFT "HEARTBEAT" Nothing Nothing ae
-- Prepare AEs that need to be send, constructing appropriate list of commands
leaderPrepare :: String -> Server -> Server
leaderPrepare newMid s@Server{..} = s { messQ = filteredMessQ }
where newAEs = map (\ srvr -> leaderAE commitIndex currentTerm sid newMid slog (srvr, (HM.!) nextIndices srvr)) others
newMessQ = zipAddAllM others newAEs messQ
filteredMessQ = HM.filter noHeartbeat newMessQ
-- Returns false if the entries we would send are empty
noHeartbeat :: Message -> Bool
noHeartbeat (Message _ _ _ _ _ _ _ (Just (AE _ _ _ _ [] _))) = False
noHeartbeat _ = True
leaderAE :: Int -> Int -> String -> String -> [Command] -> (String, Int) -> Message
leaderAE commitIndex currentTerm src baseMid slog (dst, nextIndex) = message
where entries = getNextCommands slog nextIndex
prevLogIndex = getPrevLogIndex nextIndex
prevLogTerm = getPrevLogTerm slog nextIndex
ae = Just $ AE currentTerm src prevLogIndex prevLogTerm entries commitIndex
message = Message src dst src RAFT (baseMid ++ dst) Nothing Nothing ae
-- Execute the commands in our slog that have been replicated sufficiently
leaderExecute :: Server -> Server
leaderExecute s@Server{..}
| commitIndex == toBeCommitted = s
| otherwise = executedServer { commitIndex = toBeCommitted, lastApplied = toBeCommitted }
( length slog ) - 1
toBeExecuted = take (toBeCommitted - commitIndex) $ drop (commitIndex + 1) slog
executedServer = execute s toBeExecuted
-- Execute the commands in our slog if the leader has told us they've been committed
followerExecute :: Server -> Server
followerExecute s@Server{..}
| commitIndex == lastApplied = s
| otherwise = executed { lastApplied = commitIndex }
where toBeExecuted = drop (lastApplied + 1) $ take (commitIndex + 1) slog
executed = (execute s toBeExecuted) { sendMe = sendMe }
Actually run the commands on our Store hashmap
execute :: Server -> [Command] -> Server
execute s [] = s
execute s@Server{..} (Command{..}:cs)
| ctype == CGET = execute s { sendMe = push (message (Just ckey) get) sendMe } cs
| ctype == CPUT = execute s { sendMe = push (message (Just ckey) (Just cvalue)) sendMe, store = newStore } cs
where get = HM.lookup ckey store
newStore = HM.insert ckey cvalue store
message k v = Message sid creator sid (if isNothing v then FAIL else OK) cmid k v Nothing
-- If we aren't a leader and we've timed out, transition to Candidate state
maybeToCandidate :: UTCTime -> Int -> Server -> Server
maybeToCandidate now newTimeout s
| (sState s) == Leader = s
| timedOut ( clock s ) now ( timeout s ) = trace ( ( sid s ) + + " timed out ( current leader " + + ( votedFor s ) + + " ) , moving to term " + + ( show $ currentTerm s + 1 ) ) candidate
| timedOut (clock s) now (timeout s) = candidate
| otherwise = s
where candidate = s { sState = Candidate,
timeout = newTimeout,
messQ = HM.empty,
sendMe = [],
votedFor = " FFFF " , not sure this is necessary yet TODO
clock = now,
votes = HS.empty,
currentTerm = (currentTerm s) + 1 }
-- If the message is nothing and we've expired, transition to Candidate
-- If not, respond to the message
receiveMessage :: Server -> UTCTime -> Int -> Maybe Message -> Server
receiveMessage s time newTimeout Nothing = maybeToCandidate time newTimeout s
receiveMessage s time newTimeout (Just m@Message{..})
| messType == GET = maybeToCandidate time newTimeout $ respondGet s m
| messType == PUT = maybeToCandidate time newTimeout $ respondPut s m
| messType == RAFT = maybeToCandidate time newTimeout $ respondRaft time s m
| otherwise = maybeToCandidate time newTimeout s
clearPendingQ :: Server -> Server
clearPendingQ s@Server{..}
| length pendingQ == 0 = s
| otherwise = clearPendingQ $ responded { pendingQ = tail pendingQ }
where pending = head pendingQ
responded = if (messType pending) == GET then respondGet s pending else respondPut s pending
-- If we aren't the leader, redirect to it. If we are, push this to our log.
respondGet :: Server -> Message -> Server
respondGet s@Server{..} m@Message{..}
| sState == Leader = s { slog = push command slog }
| sState == Candidate = s { pendingQ = push m pendingQ }
| otherwise = s { sendMe = push redirect sendMe }
where command = Command CGET currentTerm src mid (fromJust key) ""
redirect = Message sid src votedFor REDIRECT mid Nothing Nothing Nothing
-- If we aren't the leader, redirect. If we are, push to log
respondPut :: Server -> Message -> Server
respondPut s@Server{..} m@Message{..}
| sState == Leader = s { slog = push command slog }
| sState == Candidate = s { pendingQ = push m pendingQ }
| otherwise = s { sendMe = push redirect sendMe }
where command = Command CPUT currentTerm src mid (fromJust key) (fromJust value)
redirect = Message sid src votedFor REDIRECT mid Nothing Nothing Nothing
-- Respond to raft message - delegates based on current state
respondRaft :: UTCTime -> Server -> Message -> Server
respondRaft now s@Server{..} m@Message{..}
| sState == Follower = respondFollower now s m $ fromJust rmess
| sState == Candidate = respondCandidate s m $ fromJust rmess
| otherwise = respondLeader s m $ fromJust rmess
-- Get a RequestVote Response, given a follower's information
followerRVR :: String -> Int -> String -> String -> Int -> String -> Bool -> Message
followerRVR candidate term mid votedFor currentTerm src success = message
where realTerm = if success then term else currentTerm
realLeader = if success then candidate else votedFor
rvr = Just $ RVR realTerm success
message = Message src candidate realLeader RAFT mid Nothing Nothing rvr
-- Respond to a message. (as a follower)
respondFollower :: UTCTime -> Server -> Message -> RMessage -> Server
Respond to RV
-- | term < currentTerm = trace (sid ++ " rejecting " ++ candidateId ++ " for term. mine: " ++ (show currentTerm) ++ ", theirs: " ++ (show term)) reject
| term < currentTerm = reject
-- | upToDate slog lastLogTerm lastLogIndex = trace (sid ++ " granting " ++ candidateId ++ " for term " ++ show term) grant
| upToDate slog lastLogTerm lastLogIndex = grant
-- | otherwise = trace (sid ++ " rejecting " ++ candidateId ++ " for up-to-dateness, their term: " ++ (show term) ++ ", my term: " ++ show currentTerm ++ ", am I timed out? " ++ (show $ clock)) reject { currentTerm = term } -- should we update the term anyway?
| otherwise = reject { currentTerm = term } -- should we update the term anyway?
where baseMessage = followerRVR candidateId term mid votedFor currentTerm sid -- needs success (curried)
grant = s { sendMe = push (baseMessage True) sendMe, votedFor = candidateId, currentTerm = term }
reject = s { sendMe = push (baseMessage False) sendMe }
respondFollower now s@Server{..} m@Message{..} r@AE{..} -- Respond to AE
| mid == "HEARTBEAT" = s { clock = now }
| term < currentTerm = reject
| prevLogIndex <= 0 = succeed
| (length slog - 1 < prevLogIndex) = inconsistent
| (cterm $ (slog!!prevLogIndex)) /= prevLogTerm = inconsistent { slog = deleteSlog }
| otherwise = succeed
where mReject = Message sid src votedFor RAFT mid Nothing Nothing $ Just $ AER currentTerm (-1) False
reject = s { sendMe = push mReject sendMe, clock = now }
mIncons = Message sid src src RAFT mid Nothing Nothing $ Just $ AER term (-1) False
inconsistent = s { votedFor = src, currentTerm = term, sendMe = push mIncons sendMe, clock = now }
deleteSlog = cleanSlog slog prevLogIndex
addSlog = union slog entries
newCommitIndex = getNewCommitIndex leaderCommit commitIndex prevLogIndex (length entries)
mSucceed = Message sid src src RAFT mid Nothing Nothing $ Just $ AER term (length addSlog - 1) True
succeed = s { slog = addSlog,
commitIndex = newCommitIndex,
currentTerm = term,
sendMe = push mSucceed sendMe,
clock = now }
respondFollower _ s _ r = s
-- Respond to messages as a Leader
respondLeader :: Server -> Message -> RMessage -> Server
Respond to another AE
| term > currentTerm = s { sState = Follower, currentTerm = term, votedFor = src }
| otherwise = s
respondLeader s@Server{..} m@Message{..} r@AER{..} -- Respond to AE responses
| success == False = s { nextIndices = HM.adjust (\x -> if x <= 0 then 0 else x - 1) src nextIndices,
messQ = newMessQ }
| success == True = s { nextIndices = HM.insert src newNextIndex nextIndices,
matchIndices = HM.insert src newMatchIndex matchIndices,
messQ = newMessQ }
where newMessQ = HM.delete src messQ
newNextIndex = if lastIndex >= length slog then length slog - 1 else lastIndex + 1
newMatchIndex = if lastIndex >= length slog then length slog - 1 else lastIndex
respondLeader s@Server{..} m@Message{..} _ = s
-- Respond to messages as a Candidate
respondCandidate :: Server -> Message -> RMessage -> Server
respondCandidate s@Server{..} m@Message{..} r@RVR{..} -- respond to RequestVote responses
| voteGranted == True = s { votes = HS.insert src votes, messQ = HM.delete src messQ }
| otherwise = s
respondCandidate s@Server{..} m@Message{..} r@AE{..} -- respond to AE responses
| term >= currentTerm = clearPendingQ $ s { sState = Follower, currentTerm = term, votedFor = src }
| otherwise = s
respondCandidate s@Server{..} m@Message{..} r@RV{..} -- respond to other RVs
| term > currentTerm & & upToDate slog lastLogTerm lastLogIndex = trace ( sid + + " Cgranting " + + candidateId + + " for term " + + show term + + " , my term was " + + ( show currentTerm ) ) grant { sState = Follower }
| term > currentTerm && upToDate slog lastLogTerm lastLogIndex = grant { sState = Follower }
| otherwise = reject
where baseMessage = followerRVR candidateId term mid votedFor currentTerm sid -- needs success (curried)
grant = s { sendMe = push (baseMessage True) sendMe,
votedFor = candidateId,
currentTerm = term,
votes = HS.empty }
reject = s { sendMe = push (baseMessage False) sendMe }
respondCandidate s _ r = s
| null |
https://raw.githubusercontent.com/semaj/hask-raft/c5bd63e30d504db757fccdc0e586ca1a843cac2b/Server.hs
|
haskell
|
# LANGUAGE BangPatterns #
# LANGUAGE OverloadedStrings #
ID
The other server IDs
The hashmap of keys & values
Messages to send this round
Server -> Next message to send (not heartbeat!)
Server -> Last time we sent a message (not heartbeat!)
Followers/Candidates store this so when a leader is elected, flush
Our leader
Log of commands to execute
Set filled with servers that voted for us
When we will timeout to candidate state (ms)
Last time we sent a heartbeat
last time we received a raft message OR started an election
The main function for transitioning servers
Check if the candidate has enough votes. If so, transition to Leader.
Add missing messages (RVs) to our message Q so that they're ready
Given a server, send all the things in our messQ if they've timed out in the timeQ
If enough time has elapsed, send blank, special heartbeat messages
Prepare AEs that need to be send, constructing appropriate list of commands
Returns false if the entries we would send are empty
Execute the commands in our slog that have been replicated sufficiently
Execute the commands in our slog if the leader has told us they've been committed
If we aren't a leader and we've timed out, transition to Candidate state
If the message is nothing and we've expired, transition to Candidate
If not, respond to the message
If we aren't the leader, redirect to it. If we are, push this to our log.
If we aren't the leader, redirect. If we are, push to log
Respond to raft message - delegates based on current state
Get a RequestVote Response, given a follower's information
Respond to a message. (as a follower)
| term < currentTerm = trace (sid ++ " rejecting " ++ candidateId ++ " for term. mine: " ++ (show currentTerm) ++ ", theirs: " ++ (show term)) reject
| upToDate slog lastLogTerm lastLogIndex = trace (sid ++ " granting " ++ candidateId ++ " for term " ++ show term) grant
| otherwise = trace (sid ++ " rejecting " ++ candidateId ++ " for up-to-dateness, their term: " ++ (show term) ++ ", my term: " ++ show currentTerm ++ ", am I timed out? " ++ (show $ clock)) reject { currentTerm = term } -- should we update the term anyway?
should we update the term anyway?
needs success (curried)
Respond to AE
Respond to messages as a Leader
Respond to AE responses
Respond to messages as a Candidate
respond to RequestVote responses
respond to AE responses
respond to other RVs
needs success (curried)
|
# LANGUAGE RecordWildCards #
module Server where
import Message
import Utils
import qualified Data.HashMap.Strict as HM
import qualified Data.HashSet as HS
import Data.List
import Data.Aeson
import Data.Maybe
import Data.Time
import System.Random
import Debug.Trace
data ServerState = Follower | Candidate | Leader deriving (Show, Eq)
( ! ) s just force a strict datatype , as is a lazy language by default
data Server = Server {
sState :: !ServerState,
currentTerm :: Int,
commitIndex :: Int,
lastApplied :: Int,
nextIndices :: HM.HashMap String Int,
matchIndices :: HM.HashMap String Int,
} deriving (Show)
initServer :: String -> [String] -> UTCTime -> Int -> Server
initServer myID otherIDs time timeout = Server { sid = myID,
others = otherIDs,
sState = Follower,
store = HM.empty,
messQ = HM.empty,
timeQ = HM.fromList $ map (\x -> (x, time)) otherIDs,
pendingQ = [],
sendMe = [],
currentTerm = 0,
votedFor = "FFFF",
slog = [],
commitIndex = -1,
lastApplied = -1,
nextIndices = HM.fromList $ map (\x -> (x, 0)) otherIDs,
matchIndices = HM.fromList $ map (\x -> (x, (-1))) otherIDs,
lastHB = time,
votes = HS.empty,
timeout = timeout,
clock = time }
step :: String -> UTCTime -> Server -> Server
step newMid now s@Server{..}
| sState == Follower = followerExecute s
| sState == Candidate = checkVotes $ serverSend now $ candidatePrepare newMid $ s
| sState == Leader = sendHBs now $ serverSend now $ leaderPrepare newMid $ leaderExecute s
checkVotes :: Server -> Server
checkVotes s@Server{..}
| HS.size votes > = majority = trace ( sid + + " to lead w/ " + + ( show $ HS.size votes ) + + " votes , term " + + ( show currentTerm ) ) $
| HS.size votes >= majority = s { sState = Leader,
votedFor = sid,
messQ = HM.empty,
timeQ = HM.fromList $ map (\x -> (x, clock)) others,
nextIndices = HM.map (const $ commitIndex + 1) nextIndices,
matchIndices = HM.map (const commitIndex) matchIndices,
sendMe = map (\srvr -> leaderAE
commitIndex
currentTerm
sid
("init" ++ srvr)
slog
(srvr, (commitIndex + 1))) others,
votes = HS.empty }
| otherwise = s
Produce a RequestVote for a given Candidate 's information
candidateRV :: Int -> String -> String -> [Command] -> String -> Message
candidateRV currentTerm src baseMid slog dst = Message src dst "FFFF" RAFT (baseMid ++ dst) Nothing Nothing rv
where lastLogIndex = getLastLogIndex slog
lastLogTerm = getLastLogTerm slog
rv = Just $ RV currentTerm src lastLogIndex lastLogTerm
candidatePrepare :: String -> Server -> Server
candidatePrepare newMid s@Server{..} = s { messQ = newMessQ }
where recipients = filter (\ srvr -> (not $ HS.member srvr votes) && (not $ HM.member srvr messQ)) others
newRVs = map (candidateRV currentTerm sid newMid slog) recipients
newMessQ = zipAddAllM recipients newRVs messQ
serverSend :: UTCTime -> Server -> Server
serverSend now s@Server{..} = s { sendMe = sendMe ++ resendMessages, timeQ = newTimeQ }
where resendMe = getNeedResending now timeQ
resendMessages = catMaybes $ map (\ srvr -> HM.lookup srvr messQ) resendMe
newTimeQ = zipAddAllT resendMe (replicate (length resendMe) now) timeQ
sendHBs :: UTCTime -> Server -> Server
sendHBs now s@Server{..}
| timedOut lastHB now heartbeatRate = s { sendMe = push hb sendMe, lastHB = now }
| otherwise = s
where ae = Just $ AE (-5) sid (-5) (-5) [] (-5)
hb = Message sid "FFFF" sid RAFT "HEARTBEAT" Nothing Nothing ae
leaderPrepare :: String -> Server -> Server
leaderPrepare newMid s@Server{..} = s { messQ = filteredMessQ }
where newAEs = map (\ srvr -> leaderAE commitIndex currentTerm sid newMid slog (srvr, (HM.!) nextIndices srvr)) others
newMessQ = zipAddAllM others newAEs messQ
filteredMessQ = HM.filter noHeartbeat newMessQ
noHeartbeat :: Message -> Bool
noHeartbeat (Message _ _ _ _ _ _ _ (Just (AE _ _ _ _ [] _))) = False
noHeartbeat _ = True
leaderAE :: Int -> Int -> String -> String -> [Command] -> (String, Int) -> Message
leaderAE commitIndex currentTerm src baseMid slog (dst, nextIndex) = message
where entries = getNextCommands slog nextIndex
prevLogIndex = getPrevLogIndex nextIndex
prevLogTerm = getPrevLogTerm slog nextIndex
ae = Just $ AE currentTerm src prevLogIndex prevLogTerm entries commitIndex
message = Message src dst src RAFT (baseMid ++ dst) Nothing Nothing ae
leaderExecute :: Server -> Server
leaderExecute s@Server{..}
| commitIndex == toBeCommitted = s
| otherwise = executedServer { commitIndex = toBeCommitted, lastApplied = toBeCommitted }
( length slog ) - 1
toBeExecuted = take (toBeCommitted - commitIndex) $ drop (commitIndex + 1) slog
executedServer = execute s toBeExecuted
followerExecute :: Server -> Server
followerExecute s@Server{..}
| commitIndex == lastApplied = s
| otherwise = executed { lastApplied = commitIndex }
where toBeExecuted = drop (lastApplied + 1) $ take (commitIndex + 1) slog
executed = (execute s toBeExecuted) { sendMe = sendMe }
Actually run the commands on our Store hashmap
execute :: Server -> [Command] -> Server
execute s [] = s
execute s@Server{..} (Command{..}:cs)
| ctype == CGET = execute s { sendMe = push (message (Just ckey) get) sendMe } cs
| ctype == CPUT = execute s { sendMe = push (message (Just ckey) (Just cvalue)) sendMe, store = newStore } cs
where get = HM.lookup ckey store
newStore = HM.insert ckey cvalue store
message k v = Message sid creator sid (if isNothing v then FAIL else OK) cmid k v Nothing
maybeToCandidate :: UTCTime -> Int -> Server -> Server
maybeToCandidate now newTimeout s
| (sState s) == Leader = s
| timedOut ( clock s ) now ( timeout s ) = trace ( ( sid s ) + + " timed out ( current leader " + + ( votedFor s ) + + " ) , moving to term " + + ( show $ currentTerm s + 1 ) ) candidate
| timedOut (clock s) now (timeout s) = candidate
| otherwise = s
where candidate = s { sState = Candidate,
timeout = newTimeout,
messQ = HM.empty,
sendMe = [],
votedFor = " FFFF " , not sure this is necessary yet TODO
clock = now,
votes = HS.empty,
currentTerm = (currentTerm s) + 1 }
receiveMessage :: Server -> UTCTime -> Int -> Maybe Message -> Server
receiveMessage s time newTimeout Nothing = maybeToCandidate time newTimeout s
receiveMessage s time newTimeout (Just m@Message{..})
| messType == GET = maybeToCandidate time newTimeout $ respondGet s m
| messType == PUT = maybeToCandidate time newTimeout $ respondPut s m
| messType == RAFT = maybeToCandidate time newTimeout $ respondRaft time s m
| otherwise = maybeToCandidate time newTimeout s
clearPendingQ :: Server -> Server
clearPendingQ s@Server{..}
| length pendingQ == 0 = s
| otherwise = clearPendingQ $ responded { pendingQ = tail pendingQ }
where pending = head pendingQ
responded = if (messType pending) == GET then respondGet s pending else respondPut s pending
respondGet :: Server -> Message -> Server
respondGet s@Server{..} m@Message{..}
| sState == Leader = s { slog = push command slog }
| sState == Candidate = s { pendingQ = push m pendingQ }
| otherwise = s { sendMe = push redirect sendMe }
where command = Command CGET currentTerm src mid (fromJust key) ""
redirect = Message sid src votedFor REDIRECT mid Nothing Nothing Nothing
respondPut :: Server -> Message -> Server
respondPut s@Server{..} m@Message{..}
| sState == Leader = s { slog = push command slog }
| sState == Candidate = s { pendingQ = push m pendingQ }
| otherwise = s { sendMe = push redirect sendMe }
where command = Command CPUT currentTerm src mid (fromJust key) (fromJust value)
redirect = Message sid src votedFor REDIRECT mid Nothing Nothing Nothing
respondRaft :: UTCTime -> Server -> Message -> Server
respondRaft now s@Server{..} m@Message{..}
| sState == Follower = respondFollower now s m $ fromJust rmess
| sState == Candidate = respondCandidate s m $ fromJust rmess
| otherwise = respondLeader s m $ fromJust rmess
followerRVR :: String -> Int -> String -> String -> Int -> String -> Bool -> Message
followerRVR candidate term mid votedFor currentTerm src success = message
where realTerm = if success then term else currentTerm
realLeader = if success then candidate else votedFor
rvr = Just $ RVR realTerm success
message = Message src candidate realLeader RAFT mid Nothing Nothing rvr
respondFollower :: UTCTime -> Server -> Message -> RMessage -> Server
Respond to RV
| term < currentTerm = reject
| upToDate slog lastLogTerm lastLogIndex = grant
grant = s { sendMe = push (baseMessage True) sendMe, votedFor = candidateId, currentTerm = term }
reject = s { sendMe = push (baseMessage False) sendMe }
| mid == "HEARTBEAT" = s { clock = now }
| term < currentTerm = reject
| prevLogIndex <= 0 = succeed
| (length slog - 1 < prevLogIndex) = inconsistent
| (cterm $ (slog!!prevLogIndex)) /= prevLogTerm = inconsistent { slog = deleteSlog }
| otherwise = succeed
where mReject = Message sid src votedFor RAFT mid Nothing Nothing $ Just $ AER currentTerm (-1) False
reject = s { sendMe = push mReject sendMe, clock = now }
mIncons = Message sid src src RAFT mid Nothing Nothing $ Just $ AER term (-1) False
inconsistent = s { votedFor = src, currentTerm = term, sendMe = push mIncons sendMe, clock = now }
deleteSlog = cleanSlog slog prevLogIndex
addSlog = union slog entries
newCommitIndex = getNewCommitIndex leaderCommit commitIndex prevLogIndex (length entries)
mSucceed = Message sid src src RAFT mid Nothing Nothing $ Just $ AER term (length addSlog - 1) True
succeed = s { slog = addSlog,
commitIndex = newCommitIndex,
currentTerm = term,
sendMe = push mSucceed sendMe,
clock = now }
respondFollower _ s _ r = s
respondLeader :: Server -> Message -> RMessage -> Server
Respond to another AE
| term > currentTerm = s { sState = Follower, currentTerm = term, votedFor = src }
| otherwise = s
| success == False = s { nextIndices = HM.adjust (\x -> if x <= 0 then 0 else x - 1) src nextIndices,
messQ = newMessQ }
| success == True = s { nextIndices = HM.insert src newNextIndex nextIndices,
matchIndices = HM.insert src newMatchIndex matchIndices,
messQ = newMessQ }
where newMessQ = HM.delete src messQ
newNextIndex = if lastIndex >= length slog then length slog - 1 else lastIndex + 1
newMatchIndex = if lastIndex >= length slog then length slog - 1 else lastIndex
respondLeader s@Server{..} m@Message{..} _ = s
respondCandidate :: Server -> Message -> RMessage -> Server
| voteGranted == True = s { votes = HS.insert src votes, messQ = HM.delete src messQ }
| otherwise = s
| term >= currentTerm = clearPendingQ $ s { sState = Follower, currentTerm = term, votedFor = src }
| otherwise = s
| term > currentTerm & & upToDate slog lastLogTerm lastLogIndex = trace ( sid + + " Cgranting " + + candidateId + + " for term " + + show term + + " , my term was " + + ( show currentTerm ) ) grant { sState = Follower }
| term > currentTerm && upToDate slog lastLogTerm lastLogIndex = grant { sState = Follower }
| otherwise = reject
grant = s { sendMe = push (baseMessage True) sendMe,
votedFor = candidateId,
currentTerm = term,
votes = HS.empty }
reject = s { sendMe = push (baseMessage False) sendMe }
respondCandidate s _ r = s
|
c23d20875f23b036c08707059f9a24d1589a69fa004631161324178413833366
|
haskell-github/github
|
CommitComments.hs
|
module CommitComments where
import qualified Github.Repos.Commits as Github
import Data.List
import Data.Maybe (maybe)
main = do
possibleComments <- Github.commitCommentsFor "thoughtbot" "paperclip" "41f685f6e01396936bb8cd98e7cca517e2c7d96b"
case possibleComments of
(Left error) -> putStrLn $ "Error: " ++ (show error)
(Right comments) -> putStrLn $ intercalate "\n\n" $ map formatComment comments
formatComment :: Github.Comment -> String
formatComment comment =
"Author: " ++ (formatAuthor $ Github.commentUser comment) ++
"\nUpdated: " ++ (show $ Github.commentUpdatedAt comment) ++
(maybe "" ("\nURL: "++) $ Github.commentHtmlUrl comment) ++
"\n\n" ++ (Github.commentBody comment)
formatAuthor :: Github.Owner -> String
formatAuthor user =
(Github.githubOwnerLogin user) ++ " (" ++ (Github.githubOwnerUrl user) ++ ")"
| null |
https://raw.githubusercontent.com/haskell-github/github/81d9b658c33a706f18418211a78d2690752518a4/samples/Repos/Commits/CommitComments.hs
|
haskell
|
module CommitComments where
import qualified Github.Repos.Commits as Github
import Data.List
import Data.Maybe (maybe)
main = do
possibleComments <- Github.commitCommentsFor "thoughtbot" "paperclip" "41f685f6e01396936bb8cd98e7cca517e2c7d96b"
case possibleComments of
(Left error) -> putStrLn $ "Error: " ++ (show error)
(Right comments) -> putStrLn $ intercalate "\n\n" $ map formatComment comments
formatComment :: Github.Comment -> String
formatComment comment =
"Author: " ++ (formatAuthor $ Github.commentUser comment) ++
"\nUpdated: " ++ (show $ Github.commentUpdatedAt comment) ++
(maybe "" ("\nURL: "++) $ Github.commentHtmlUrl comment) ++
"\n\n" ++ (Github.commentBody comment)
formatAuthor :: Github.Owner -> String
formatAuthor user =
(Github.githubOwnerLogin user) ++ " (" ++ (Github.githubOwnerUrl user) ++ ")"
|
|
d73421118dd029f041b8a871e19b84b7d68c41ebf5c5ae145e29994e3d14e6ad
|
sbcl/sbcl
|
compiler-slow.pure.lisp
|
(with-test (:name (compile eval the type-error))
(checked-compile-and-assert (:optimize :safe)
'(lambda (v)
(list (the fixnum (the (real 0) (eval v)))))
((0.1) (condition 'type-error))
((-1) (condition 'type-error))))
(defun pick-acceptable-default (specifier)
(let ((parse (sb-kernel:specifier-type specifier)))
; (format t "~&testcase: ~s~%" specifier)
(typecase parse
(sb-kernel:character-set-type #\a)
(sb-kernel:numeric-type
(cond ((eq (sb-kernel:numeric-type-class parse) 'float)
(ecase (sb-kernel:numeric-type-complexp parse)
(:real
(ecase (sb-kernel:numeric-type-format parse)
(single-float 1009f0)
(double-float pi)))
(:complex
(ecase (sb-kernel:numeric-type-format parse)
(single-float #c(101f0 -1f0))
(double-float #c(2d0 3.5d0))))))
(t
1)))
(t
(cond ((equal specifier '(or (eql 1.0d0) (eql 10.0d0))) ; KLUDGE
1.0d0)
((equal specifier '(member 1 2 10))
2)
((equal specifier '(complex (member 10.0 20.0)))
(complex 10.0 10.0))
(t
'whatever))))))
(with-test (:name :array-type-predicates)
(dolist (et (list* '(integer -1 200) '(integer -256 1)
'(integer 0 128)
'(integer 0 (128))
'(double-float 0d0 (1d0))
'(single-float (0s0) (1s0))
'(or (eql 1d0) (eql 10d0))
'(member 1 2 10)
'(complex (member 10 20))
'(complex (member 10d0 20d0))
'(complex (member 10s0 20s0))
'(or integer double-float)
'(mod 1)
'(member #\a #\b)
'(eql #\a)
#+sb-unicode 'extended-char
#+sb-unicode '(eql #\cyrillic_small_letter_yu)
(map 'list 'sb-vm:saetp-specifier
sb-vm:*specialized-array-element-type-properties*)))
(when et
(let* ((v (make-array 3 :element-type et
Pick an initial element because of the ( ELT , v 0 )
:initial-element (pick-acceptable-default et))))
(checked-compile-and-assert ()
`(lambda ()
(list (if (typep ,v '(simple-array ,et (*)))
:good
',et)
(if (typep (elt ,v 0) '(simple-array ,et (*)))
',et
:good)))
(() '(:good :good)))))))
(with-test (:name (compile equal equalp :transforms))
(let* ((s "foo")
(bit-vector #*11001100)
(values `(nil 1 2 "test"
;; Floats duplicated here to ensure we get newly created instances
(read-from-string "1.1") (read-from-string "1.2d0")
(read-from-string "1.1") (read-from-string "1.2d0")
1.1 1.2d0 '("foo" "bar" "test")
#(1 2 3 4) #*101010 (make-broadcast-stream) #p"/tmp/file"
,s (copy-seq ,s) ,bit-vector (copy-seq ,bit-vector)
,(make-hash-table) #\a #\b #\A #\C
,(make-random-state) 1/2 2/3)))
(dolist (predicate '(equal equalp))
;; Test all permutations of different types
(loop for x in values
do (loop for y in values
do (checked-compile-and-assert (:optimize nil)
`(lambda (x y)
(,predicate (the ,(type-of x) x)
(the ,(type-of y) y)))
((x y) (funcall predicate x y)))))
(checked-compile-and-assert ()
`(lambda (x y)
(,predicate (the (cons (or simple-bit-vector simple-base-string))
x)
(the (cons (or (and bit-vector (not simple-array))
(simple-array character (*))))
y)))
(((list (string 'list)) (list "LIST")) t)))))
(with-test (:name (sb-c::mask-signed-field :randomized))
(let (result)
(dotimes (i 1000)
(let* ((ool (checked-compile '(lambda (s i)
(sb-c::mask-signed-field s i))))
(size (random (* sb-vm:n-word-bits 2)))
(constant (checked-compile `(lambda (i)
(sb-c::mask-signed-field ,size i))))
(arg (- (random (* most-positive-fixnum 8)) (* most-positive-fixnum 4)))
(declared (checked-compile `(lambda (i)
(declare (type (integer ,(- (abs arg)) ,(abs arg)) i))
(sb-c::mask-signed-field ,size i))))
(ool-answer (funcall ool size arg))
(constant-answer (funcall constant arg))
(declared-answer (funcall declared arg)))
(unless (= ool-answer constant-answer declared-answer)
(push (list size arg ool-answer constant-answer declared-answer) result))))
(assert (null result))))
(with-test (:name (multiple-value-call :type-checking-rest))
(checked-compile-and-assert (:allow-warnings t
:optimize :safe)
`(lambda (list)
(multiple-value-call
(lambda (&optional a &rest r)
(declare ((satisfies eval) r)
(ignore r))
(list a))
(values-list list)))
(('(1 list 2)) '(1))
(('(1)) (condition 'type-error))))
(with-test (:name (multiple-value-call :type-checking-rest.2))
(checked-compile-and-assert (:allow-warnings t
:optimize :safe)
`(lambda (list)
(multiple-value-call
(lambda (&optional a &rest r)
(declare (null r)
(ignore r))
(list a))
(values-list list)))
(('(1 list 2)) (condition 'type-error))
(('(1)) '(1))))
(with-test (:name (multiple-value-call :type-checking-rest :type-derivation))
(checked-compile-and-assert (:allow-warnings t
:optimize :safe)
`(lambda (list)
(multiple-value-call
(lambda (&optional a &rest r)
(declare (cons r)
(ignore r))
(list a))
(values-list list)))
(('(1 2)) '(1))
(('(1)) (condition 'type-error))))
(declaim (maybe-inline inline-recursive))
(defun inline-recursive (x)
(declare (muffle-conditions compiler-note
style-warning))
(if (zerop x)
x
(inline-recursive (1- x))))
(declaim (inline inline-recursive))
(with-test (:name :reanalyze-functionals-when-inlining)
(checked-compile-and-assert
()
`(lambda (x)
(inline-recursive x)
(inline-recursive x))
((5) 0)))
(with-test (:name :interval-div-zero)
(checked-compile-and-assert (:optimize :safe)
`(lambda (x y)
(truncate (the (integer 0 0) x)
(the (rational (1) (2)) y)))
((0 3/2) (values 0 0))))
(with-test (:name :float-quotient-rounding-errors)
(checked-compile-and-assert (:optimize :safe)
`(lambda ()
(floor -114658225103614 84619.58))
(() (values -1354984705 8473228.0)))
(checked-compile-and-assert (:optimize :safe)
`(lambda ()
(floor -302254842 50510.5))
(() (eval '(floor -302254842 50510.5))))
(checked-compile-and-assert (:optimize :safe)
`(lambda ()
(ceiling 114658225103614 84619.58))
(() (values 1354984705 -8473228.0)))
(checked-compile-and-assert (:optimize :safe)
`(lambda ()
(ceiling 285493348393 94189.93))
(() (values 3031039 0.0))))
(with-test (:name :check-function-designator-cast-key-lambda-var)
(checked-compile-and-assert
(:optimize '(:speed 3 :space 0))
`(lambda (p1 p4)
(declare (vector p1)
((member ,#'car "x" cdr) p4))
(stable-sort p1 #'<= :key p4))
(((vector '(2) '(3) '(1)) #'car) #((1) (2) (3)) :test #'equalp)))
(with-test (:name :functional-may-escape-p
INVALID - UNWIND - ERROR crashes fatally on ppc32 . Not sure as of when .
:skipped-on :ppc32)
(checked-compile-and-assert
(:optimize :safe)
'(lambda ()
(let (x)
(block nil
(flet ((x () (let (*)
(return 33))))
(setf x #'x)))
(funcall x)))
(() (condition 'control-error))))
(with-test (:name :lvar-fun-type-on-literal-funs)
(checked-compile-and-assert
()
`(lambda (p)
(declare (type (or null string) p))
(locally (declare (optimize (space 0)))
(stable-sort p ,#'string<)))
(((copy-seq "acb")) "abc" :test #'equal)))
(with-test (:name :ir2-optimize-jumps-multiway-branch-if-eq-delete-branch)
(checked-compile-and-assert
()
`(lambda (a)
(declare (type (integer -345 1) a))
(case (ldb (byte 24 5) a)
((4 47 61 17 10 39) 1)
((2 7 55) A)
((42 48 16 33 40 20) A)
((60 54 28) 3)
((15 1 44 29 57 41 52) 32771)
((46 64 3 18 36 49 37) 1)
(t A)))
((-5) -5)))
| null |
https://raw.githubusercontent.com/sbcl/sbcl/db5f383e5fc0fbe0155dc557ba6ac5ed77fd8a8f/tests/compiler-slow.pure.lisp
|
lisp
|
(format t "~&testcase: ~s~%" specifier)
KLUDGE
Floats duplicated here to ensure we get newly created instances
Test all permutations of different types
|
(with-test (:name (compile eval the type-error))
(checked-compile-and-assert (:optimize :safe)
'(lambda (v)
(list (the fixnum (the (real 0) (eval v)))))
((0.1) (condition 'type-error))
((-1) (condition 'type-error))))
(defun pick-acceptable-default (specifier)
(let ((parse (sb-kernel:specifier-type specifier)))
(typecase parse
(sb-kernel:character-set-type #\a)
(sb-kernel:numeric-type
(cond ((eq (sb-kernel:numeric-type-class parse) 'float)
(ecase (sb-kernel:numeric-type-complexp parse)
(:real
(ecase (sb-kernel:numeric-type-format parse)
(single-float 1009f0)
(double-float pi)))
(:complex
(ecase (sb-kernel:numeric-type-format parse)
(single-float #c(101f0 -1f0))
(double-float #c(2d0 3.5d0))))))
(t
1)))
(t
1.0d0)
((equal specifier '(member 1 2 10))
2)
((equal specifier '(complex (member 10.0 20.0)))
(complex 10.0 10.0))
(t
'whatever))))))
(with-test (:name :array-type-predicates)
(dolist (et (list* '(integer -1 200) '(integer -256 1)
'(integer 0 128)
'(integer 0 (128))
'(double-float 0d0 (1d0))
'(single-float (0s0) (1s0))
'(or (eql 1d0) (eql 10d0))
'(member 1 2 10)
'(complex (member 10 20))
'(complex (member 10d0 20d0))
'(complex (member 10s0 20s0))
'(or integer double-float)
'(mod 1)
'(member #\a #\b)
'(eql #\a)
#+sb-unicode 'extended-char
#+sb-unicode '(eql #\cyrillic_small_letter_yu)
(map 'list 'sb-vm:saetp-specifier
sb-vm:*specialized-array-element-type-properties*)))
(when et
(let* ((v (make-array 3 :element-type et
Pick an initial element because of the ( ELT , v 0 )
:initial-element (pick-acceptable-default et))))
(checked-compile-and-assert ()
`(lambda ()
(list (if (typep ,v '(simple-array ,et (*)))
:good
',et)
(if (typep (elt ,v 0) '(simple-array ,et (*)))
',et
:good)))
(() '(:good :good)))))))
(with-test (:name (compile equal equalp :transforms))
(let* ((s "foo")
(bit-vector #*11001100)
(values `(nil 1 2 "test"
(read-from-string "1.1") (read-from-string "1.2d0")
(read-from-string "1.1") (read-from-string "1.2d0")
1.1 1.2d0 '("foo" "bar" "test")
#(1 2 3 4) #*101010 (make-broadcast-stream) #p"/tmp/file"
,s (copy-seq ,s) ,bit-vector (copy-seq ,bit-vector)
,(make-hash-table) #\a #\b #\A #\C
,(make-random-state) 1/2 2/3)))
(dolist (predicate '(equal equalp))
(loop for x in values
do (loop for y in values
do (checked-compile-and-assert (:optimize nil)
`(lambda (x y)
(,predicate (the ,(type-of x) x)
(the ,(type-of y) y)))
((x y) (funcall predicate x y)))))
(checked-compile-and-assert ()
`(lambda (x y)
(,predicate (the (cons (or simple-bit-vector simple-base-string))
x)
(the (cons (or (and bit-vector (not simple-array))
(simple-array character (*))))
y)))
(((list (string 'list)) (list "LIST")) t)))))
(with-test (:name (sb-c::mask-signed-field :randomized))
(let (result)
(dotimes (i 1000)
(let* ((ool (checked-compile '(lambda (s i)
(sb-c::mask-signed-field s i))))
(size (random (* sb-vm:n-word-bits 2)))
(constant (checked-compile `(lambda (i)
(sb-c::mask-signed-field ,size i))))
(arg (- (random (* most-positive-fixnum 8)) (* most-positive-fixnum 4)))
(declared (checked-compile `(lambda (i)
(declare (type (integer ,(- (abs arg)) ,(abs arg)) i))
(sb-c::mask-signed-field ,size i))))
(ool-answer (funcall ool size arg))
(constant-answer (funcall constant arg))
(declared-answer (funcall declared arg)))
(unless (= ool-answer constant-answer declared-answer)
(push (list size arg ool-answer constant-answer declared-answer) result))))
(assert (null result))))
(with-test (:name (multiple-value-call :type-checking-rest))
(checked-compile-and-assert (:allow-warnings t
:optimize :safe)
`(lambda (list)
(multiple-value-call
(lambda (&optional a &rest r)
(declare ((satisfies eval) r)
(ignore r))
(list a))
(values-list list)))
(('(1 list 2)) '(1))
(('(1)) (condition 'type-error))))
(with-test (:name (multiple-value-call :type-checking-rest.2))
(checked-compile-and-assert (:allow-warnings t
:optimize :safe)
`(lambda (list)
(multiple-value-call
(lambda (&optional a &rest r)
(declare (null r)
(ignore r))
(list a))
(values-list list)))
(('(1 list 2)) (condition 'type-error))
(('(1)) '(1))))
(with-test (:name (multiple-value-call :type-checking-rest :type-derivation))
(checked-compile-and-assert (:allow-warnings t
:optimize :safe)
`(lambda (list)
(multiple-value-call
(lambda (&optional a &rest r)
(declare (cons r)
(ignore r))
(list a))
(values-list list)))
(('(1 2)) '(1))
(('(1)) (condition 'type-error))))
(declaim (maybe-inline inline-recursive))
(defun inline-recursive (x)
(declare (muffle-conditions compiler-note
style-warning))
(if (zerop x)
x
(inline-recursive (1- x))))
(declaim (inline inline-recursive))
(with-test (:name :reanalyze-functionals-when-inlining)
(checked-compile-and-assert
()
`(lambda (x)
(inline-recursive x)
(inline-recursive x))
((5) 0)))
(with-test (:name :interval-div-zero)
(checked-compile-and-assert (:optimize :safe)
`(lambda (x y)
(truncate (the (integer 0 0) x)
(the (rational (1) (2)) y)))
((0 3/2) (values 0 0))))
(with-test (:name :float-quotient-rounding-errors)
(checked-compile-and-assert (:optimize :safe)
`(lambda ()
(floor -114658225103614 84619.58))
(() (values -1354984705 8473228.0)))
(checked-compile-and-assert (:optimize :safe)
`(lambda ()
(floor -302254842 50510.5))
(() (eval '(floor -302254842 50510.5))))
(checked-compile-and-assert (:optimize :safe)
`(lambda ()
(ceiling 114658225103614 84619.58))
(() (values 1354984705 -8473228.0)))
(checked-compile-and-assert (:optimize :safe)
`(lambda ()
(ceiling 285493348393 94189.93))
(() (values 3031039 0.0))))
(with-test (:name :check-function-designator-cast-key-lambda-var)
(checked-compile-and-assert
(:optimize '(:speed 3 :space 0))
`(lambda (p1 p4)
(declare (vector p1)
((member ,#'car "x" cdr) p4))
(stable-sort p1 #'<= :key p4))
(((vector '(2) '(3) '(1)) #'car) #((1) (2) (3)) :test #'equalp)))
(with-test (:name :functional-may-escape-p
INVALID - UNWIND - ERROR crashes fatally on ppc32 . Not sure as of when .
:skipped-on :ppc32)
(checked-compile-and-assert
(:optimize :safe)
'(lambda ()
(let (x)
(block nil
(flet ((x () (let (*)
(return 33))))
(setf x #'x)))
(funcall x)))
(() (condition 'control-error))))
(with-test (:name :lvar-fun-type-on-literal-funs)
(checked-compile-and-assert
()
`(lambda (p)
(declare (type (or null string) p))
(locally (declare (optimize (space 0)))
(stable-sort p ,#'string<)))
(((copy-seq "acb")) "abc" :test #'equal)))
(with-test (:name :ir2-optimize-jumps-multiway-branch-if-eq-delete-branch)
(checked-compile-and-assert
()
`(lambda (a)
(declare (type (integer -345 1) a))
(case (ldb (byte 24 5) a)
((4 47 61 17 10 39) 1)
((2 7 55) A)
((42 48 16 33 40 20) A)
((60 54 28) 3)
((15 1 44 29 57 41 52) 32771)
((46 64 3 18 36 49 37) 1)
(t A)))
((-5) -5)))
|
bda930969b94aba42663deffc39263f8ae053914825ab6ad40eb6de2149de779
|
cirodrig/triolet
|
Parser.hs
|
{-# OPTIONS #-}
-----------------------------------------------------------------------------
-- |
Module : Language . Python . Version3.Parser
Copyright : ( c ) 2009
-- License : BSD-style
Maintainer :
-- Stability : experimental
Portability : ghc
--
A parser for Python version 3.x programs . Parsers are provided for
-- modules, statements, and expressions. The parsers produce comment tokens
-- in addition to the abstract syntax tree.
--
-- See:
--
-- * <> for an overview of the language.
--
-- * <> for the full grammar.
--
-- * <> for a description of
-- the various Python top-levels, which correspond to the parsers provided here.
-----------------------------------------------------------------------------
module Language.Python.Version3.Parser (
-- * Parsing modules
parseModule,
-- * Parsing statements
parseStmt,
-- * Parsing expressions
parseExpr) where
import Language.Python.Version3.Parser.Parser (parseFileInput, parseSingleInput, parseEval)
import Language.Python.Version3.Parser.Lexer (initStartCodeStack)
import Language.Python.Common.AST (ModuleSpan, StatementSpan, ExprSpan)
import Language.Python.Common.Token (Token)
import Language.Python.Common.SrcLocation (initialSrcLocation)
import Language.Python.Common.ParserMonad (execParser, execParserKeepComments, ParseError, initialState)
-- | Parse a whole Python source file. Return comments in addition to the parsed module.
parseModule :: String -- ^ The input stream (python module source code).
-> String -- ^ The name of the python source (filename or input device).
-> Either ParseError (ModuleSpan, [Token]) -- ^ An error or the abstract syntax tree (AST) of the python module and comment tokens.
parseModule input srcName =
execParserKeepComments parseFileInput state
where
initLoc = initialSrcLocation srcName
state = initialState initLoc input initStartCodeStack
| Parse one compound statement , or a sequence of simple statements . Generally used for interactive input , such as from the command line of an interpreter . Return comments in addition to the parsed statements .
parseStmt :: String -- ^ The input stream (python statement source code).
-> String -- ^ The name of the python source (filename or input device).
^ An error or maybe the abstract syntax tree ( AST ) of zero or more python statements , plus comments .
parseStmt input srcName =
execParserKeepComments parseSingleInput state
where
initLoc = initialSrcLocation srcName
state = initialState initLoc input initStartCodeStack
-- | Parse an expression. Generally used as input for the \'eval\' primitive. Return comments in addition to the parsed expression.
parseExpr :: String -- ^ The input stream (python statement source code).
-> String -- ^ The name of the python source (filename or input device).
-> Either ParseError (ExprSpan, [Token]) -- ^ An error or maybe the abstract syntax tree (AST) of the python expression, plus comment tokens.
parseExpr input srcName =
execParserKeepComments parseEval state
where
initLoc = initialSrcLocation srcName
state = initialState initLoc input initStartCodeStack
| null |
https://raw.githubusercontent.com/cirodrig/triolet/e515a1dc0d6b3e546320eac7b71fb36cea5b53d0/src/program/Language/Python/Version3/Parser.hs
|
haskell
|
# OPTIONS #
---------------------------------------------------------------------------
|
License : BSD-style
Stability : experimental
modules, statements, and expressions. The parsers produce comment tokens
in addition to the abstract syntax tree.
See:
* <> for an overview of the language.
* <> for the full grammar.
* <> for a description of
the various Python top-levels, which correspond to the parsers provided here.
---------------------------------------------------------------------------
* Parsing modules
* Parsing statements
* Parsing expressions
| Parse a whole Python source file. Return comments in addition to the parsed module.
^ The input stream (python module source code).
^ The name of the python source (filename or input device).
^ An error or the abstract syntax tree (AST) of the python module and comment tokens.
^ The input stream (python statement source code).
^ The name of the python source (filename or input device).
| Parse an expression. Generally used as input for the \'eval\' primitive. Return comments in addition to the parsed expression.
^ The input stream (python statement source code).
^ The name of the python source (filename or input device).
^ An error or maybe the abstract syntax tree (AST) of the python expression, plus comment tokens.
|
Module : Language . Python . Version3.Parser
Copyright : ( c ) 2009
Maintainer :
Portability : ghc
A parser for Python version 3.x programs . Parsers are provided for
module Language.Python.Version3.Parser (
parseModule,
parseStmt,
parseExpr) where
import Language.Python.Version3.Parser.Parser (parseFileInput, parseSingleInput, parseEval)
import Language.Python.Version3.Parser.Lexer (initStartCodeStack)
import Language.Python.Common.AST (ModuleSpan, StatementSpan, ExprSpan)
import Language.Python.Common.Token (Token)
import Language.Python.Common.SrcLocation (initialSrcLocation)
import Language.Python.Common.ParserMonad (execParser, execParserKeepComments, ParseError, initialState)
parseModule input srcName =
execParserKeepComments parseFileInput state
where
initLoc = initialSrcLocation srcName
state = initialState initLoc input initStartCodeStack
| Parse one compound statement , or a sequence of simple statements . Generally used for interactive input , such as from the command line of an interpreter . Return comments in addition to the parsed statements .
^ An error or maybe the abstract syntax tree ( AST ) of zero or more python statements , plus comments .
parseStmt input srcName =
execParserKeepComments parseSingleInput state
where
initLoc = initialSrcLocation srcName
state = initialState initLoc input initStartCodeStack
parseExpr input srcName =
execParserKeepComments parseEval state
where
initLoc = initialSrcLocation srcName
state = initialState initLoc input initStartCodeStack
|
052d0336966a864e4092bbb1465a759a01dba11cd5b7274e754cbf122ae30d6a
|
input-output-hk/plutus
|
Pairs.hs
|
module Benchmarks.Pairs (makeBenchmarks) where
import Common
import Generators
import PlutusCore
import Criterion.Main
import System.Random (StdGen)
-- The pair projection operations should be constant time, but we check that by
-- giving it a list of pairs whose components are of increasing size.
benchPairOp :: StdGen -> DefaultFun -> Benchmark
benchPairOp gen fun =
createOneTermBuiltinBench fun [integer, bytestring] pairs
where pairs = zip ints bytestrings
(ints, _) = makeSizedIntegers gen [1..100]
bytestrings = makeSizedByteStrings seedA [1..100]
makeBenchmarks :: StdGen -> [Benchmark]
makeBenchmarks gen = benchPairOp gen <$> [FstPair, SndPair]
| null |
https://raw.githubusercontent.com/input-output-hk/plutus/1f31e640e8a258185db01fa899da63f9018c0e85/plutus-core/cost-model/budgeting-bench/Benchmarks/Pairs.hs
|
haskell
|
The pair projection operations should be constant time, but we check that by
giving it a list of pairs whose components are of increasing size.
|
module Benchmarks.Pairs (makeBenchmarks) where
import Common
import Generators
import PlutusCore
import Criterion.Main
import System.Random (StdGen)
benchPairOp :: StdGen -> DefaultFun -> Benchmark
benchPairOp gen fun =
createOneTermBuiltinBench fun [integer, bytestring] pairs
where pairs = zip ints bytestrings
(ints, _) = makeSizedIntegers gen [1..100]
bytestrings = makeSizedByteStrings seedA [1..100]
makeBenchmarks :: StdGen -> [Benchmark]
makeBenchmarks gen = benchPairOp gen <$> [FstPair, SndPair]
|
bf01dae148e8b81cbb66bead36f2147371e55b127ea39f22fc11ee9a5c9e37d0
|
modular-macros/ocaml-macros
|
attr_helper.ml
|
(**************************************************************************)
(* *)
(* OCaml *)
(* *)
, Jane Street Europe
(* *)
Copyright 2015 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
open Asttypes
open Parsetree
type error =
| Multiple_attributes of string
| No_payload_expected of string
exception Error of Location.t * error
let get_no_payload_attribute alt_names attrs =
match List.filter (fun (n, _) -> List.mem n.txt alt_names) attrs with
| [] -> None
| [ (name, PStr []) ] -> Some name
| [ (name, _) ] ->
raise (Error (name.loc, No_payload_expected name.txt))
| _ :: (name, _) :: _ ->
raise (Error (name.loc, Multiple_attributes name.txt))
let has_no_payload_attribute alt_names attrs =
match get_no_payload_attribute alt_names attrs with
| None -> false
| Some _ -> true
open Format
let report_error ppf = function
| Multiple_attributes name ->
fprintf ppf "Too many `%s' attributes" name
| No_payload_expected name ->
fprintf ppf "Attribute `%s' does not accept a payload" name
let () =
Location.register_error_of_exn
(function
| Error (loc, err) ->
Some (Location.error_of_printer loc report_error err)
| _ ->
None
)
| null |
https://raw.githubusercontent.com/modular-macros/ocaml-macros/05372c7248b5a7b1aa507b3c581f710380f17fcd/parsing/attr_helper.ml
|
ocaml
|
************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
|
, Jane Street Europe
Copyright 2015 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
open Asttypes
open Parsetree
type error =
| Multiple_attributes of string
| No_payload_expected of string
exception Error of Location.t * error
let get_no_payload_attribute alt_names attrs =
match List.filter (fun (n, _) -> List.mem n.txt alt_names) attrs with
| [] -> None
| [ (name, PStr []) ] -> Some name
| [ (name, _) ] ->
raise (Error (name.loc, No_payload_expected name.txt))
| _ :: (name, _) :: _ ->
raise (Error (name.loc, Multiple_attributes name.txt))
let has_no_payload_attribute alt_names attrs =
match get_no_payload_attribute alt_names attrs with
| None -> false
| Some _ -> true
open Format
let report_error ppf = function
| Multiple_attributes name ->
fprintf ppf "Too many `%s' attributes" name
| No_payload_expected name ->
fprintf ppf "Attribute `%s' does not accept a payload" name
let () =
Location.register_error_of_exn
(function
| Error (loc, err) ->
Some (Location.error_of_printer loc report_error err)
| _ ->
None
)
|
53afbc0da059d9c904468e09a23381cf76a56141c64aba5914fcc063e05c9120
|
bmeurer/ocamljit2
|
equations.mli
|
(***********************************************************************)
(* *)
(* Objective Caml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
(* *)
(***********************************************************************)
$ Id$
open Terms
type rule =
{ number: int;
numvars: int;
lhs: term;
rhs: term }
val mk_rule: int -> term -> term -> rule
val check_rules: rule list -> int
val pretty_rule: rule -> unit
val pretty_rules: rule list -> unit
val reduce: term -> term -> term -> term
val reducible: term -> term -> bool
val mreduce: rule list -> term -> term
val mrewrite1: rule list -> term -> term
val mrewrite1_sons: rule list -> term list -> term list
val mrewrite_all: rule list -> term -> term
| null |
https://raw.githubusercontent.com/bmeurer/ocamljit2/ef06db5c688c1160acc1de1f63c29473bcd0055c/testsuite/tests/misc-kb/equations.mli
|
ocaml
|
*********************************************************************
Objective Caml
*********************************************************************
|
, projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
$ Id$
open Terms
type rule =
{ number: int;
numvars: int;
lhs: term;
rhs: term }
val mk_rule: int -> term -> term -> rule
val check_rules: rule list -> int
val pretty_rule: rule -> unit
val pretty_rules: rule list -> unit
val reduce: term -> term -> term -> term
val reducible: term -> term -> bool
val mreduce: rule list -> term -> term
val mrewrite1: rule list -> term -> term
val mrewrite1_sons: rule list -> term list -> term list
val mrewrite_all: rule list -> term -> term
|
d90801bdb6bb0222326ba6db9c3f7b9b11acee59e3fa35cd8bda22528c4d2322
|
green-labs/gosura
|
node.clj
|
(ns gosura.helpers.node
(:require [camel-snake-kebab.core :as csk]
[com.walmartlabs.lacinia.schema :as schema]))
(defn tag-with-subtype
[{:keys [subtype] :as row} subtype->node-type]
(let [node-type (get subtype->node-type subtype)]
(-> row
(assoc :node-type node-type)
(schema/tag-with-type (csk/->PascalCaseKeyword node-type)))))
| null |
https://raw.githubusercontent.com/green-labs/gosura/94f9dc493424668b7c5c8d497049d86d1b752305/src/gosura/helpers/node.clj
|
clojure
|
(ns gosura.helpers.node
(:require [camel-snake-kebab.core :as csk]
[com.walmartlabs.lacinia.schema :as schema]))
(defn tag-with-subtype
[{:keys [subtype] :as row} subtype->node-type]
(let [node-type (get subtype->node-type subtype)]
(-> row
(assoc :node-type node-type)
(schema/tag-with-type (csk/->PascalCaseKeyword node-type)))))
|
|
3589174318d3d9a75a9949789f4128c7e1e54130eff40ce574bb0a0544f4079e
|
exercism/common-lisp
|
pythagorean-triplet.lisp
|
(defpackage :pythagorean-triplet
(:use :cl)
(:export :triplets-with-sum))
(in-package :pythagorean-triplet)
(defun triplets-with-sum (n))
| null |
https://raw.githubusercontent.com/exercism/common-lisp/196a0e72bd57b42e16ff8a70ba1e67d20350b725/exercises/practice/pythagorean-triplet/pythagorean-triplet.lisp
|
lisp
|
(defpackage :pythagorean-triplet
(:use :cl)
(:export :triplets-with-sum))
(in-package :pythagorean-triplet)
(defun triplets-with-sum (n))
|
|
0946ca33c7a3fe61e07c5c93f466cc79cd0cbb93ede4e8ee467fa1d3d3549d45
|
exoscale/clojure-kubernetes-client
|
v1_subject.clj
|
(ns clojure-kubernetes-client.specs.v1-subject
(:require [clojure.spec.alpha :as s]
[spec-tools.data-spec :as ds]
)
(:import (java.io File)))
(declare v1-subject-data v1-subject)
(def v1-subject-data
{
(ds/opt :apiGroup) string?
(ds/req :kind) string?
(ds/req :name) string?
(ds/opt :namespace) string?
})
(def v1-subject
(ds/spec
{:name ::v1-subject
:spec v1-subject-data}))
| null |
https://raw.githubusercontent.com/exoscale/clojure-kubernetes-client/79d84417f28d048c5ac015c17e3926c73e6ac668/src/clojure_kubernetes_client/specs/v1_subject.clj
|
clojure
|
(ns clojure-kubernetes-client.specs.v1-subject
(:require [clojure.spec.alpha :as s]
[spec-tools.data-spec :as ds]
)
(:import (java.io File)))
(declare v1-subject-data v1-subject)
(def v1-subject-data
{
(ds/opt :apiGroup) string?
(ds/req :kind) string?
(ds/req :name) string?
(ds/opt :namespace) string?
})
(def v1-subject
(ds/spec
{:name ::v1-subject
:spec v1-subject-data}))
|
|
91a833099baef9f32e05a348085ab18b431c786bc0f719977ba928ff82d36db2
|
mfikes/fifth-postulate
|
ns199.cljs
|
(ns fifth-postulate.ns199)
(defn solve-for01 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for02 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for03 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for04 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for05 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for06 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for07 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for08 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for09 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for10 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for11 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for12 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for13 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for14 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for15 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for16 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for17 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for18 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for19 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
| null |
https://raw.githubusercontent.com/mfikes/fifth-postulate/22cfd5f8c2b4a2dead1c15a96295bfeb4dba235e/src/fifth_postulate/ns199.cljs
|
clojure
|
(ns fifth-postulate.ns199)
(defn solve-for01 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for02 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for03 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for04 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for05 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for06 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for07 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for08 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for09 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for10 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for11 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for12 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for13 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for14 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for15 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for16 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for17 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for18 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
(defn solve-for19 [xs v]
(for [ndx0 (range 0 (- (count xs) 3))
ndx1 (range (inc ndx0) (- (count xs) 2))
ndx2 (range (inc ndx1) (- (count xs) 1))
ndx3 (range (inc ndx2) (count xs))
:when (= v (+ (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3)))]
(list (xs ndx0) (xs ndx1) (xs ndx2) (xs ndx3))))
|
|
c8436843442b0f3754011676b77d2951abe2a52a650f5bd3f270311bd7a3242e
|
anik545/OwlPPL
|
test_empirical.ml
|
open Ppl
open Core
let todo () = ()
let test_disc =
Samples.(add_sample (add_sample (add_sample (add_sample empty 1) 2) 3) 1)
let test_cont = CSamples.from_dist (normal 0. 1.)
let d_pdf () =
let f = Samples.to_pdf test_disc in
let g = Samples.get_prob test_disc in
Alcotest.(check (list (float 0.000001)))
"check pdf function is correct"
(List.map ~f [ 1; 2; 3; 4 ] @ List.map ~f:g [ 1; 2; 3; 4 ])
([ 0.5; 0.25; 0.25; 0. ] @ [ 0.5; 0.25; 0.25; 0. ])
let d_supp () =
let l = Samples.support test_disc in
Alcotest.(check (list int)) "check support function is correct" l [ 1; 2; 3 ]
let c_pdf () =
let f = CSamples.to_pdf test_cont in
Alcotest.(check pass) "check pdf function is correct" (f 0.) 0.3
let c_cdf () =
let g = CSamples.to_cdf test_cont in
Alcotest.(check pass) "check cdf function is correct" (g 0.) 1.
let c_more_samples () =
let test_cont = CSamples.add_sample test_cont 10. in
let test_cont = CSamples.add_sample test_cont 20. in
let test_cont = CSamples.add_sample test_cont 30. in
(* let g = CSamples.to_cdf test_cont in *)
let v = CSamples.values test_cont in
let v', _ = CSamples.to_pdf_arr test_cont in
let open Float in
Alcotest.(check bool)
"check array out contains values" true
(Array.mem ~equal v 10. && Array.mem ~equal v' 10.)
let tests : unit Alcotest.test list =
[
( "discrete",
[
("from dist", `Quick, todo);
("pdf", `Quick, d_pdf);
(* ("cdf", `Quick, todo); *)
("support", `Quick, d_supp);
] );
( "continuous",
[
("from dist", `Quick, todo);
("pdf", `Quick, c_pdf);
("cdf", `Quick, c_cdf);
("support", `Quick, todo);
(* ("adding samples", `Quick, c_more_samples); *)
] );
]
| null |
https://raw.githubusercontent.com/anik545/OwlPPL/ad650219769d5f32564cc771d63c9a52289043a5/ppl/test/unit_tests/test_empirical.ml
|
ocaml
|
let g = CSamples.to_cdf test_cont in
("cdf", `Quick, todo);
("adding samples", `Quick, c_more_samples);
|
open Ppl
open Core
let todo () = ()
let test_disc =
Samples.(add_sample (add_sample (add_sample (add_sample empty 1) 2) 3) 1)
let test_cont = CSamples.from_dist (normal 0. 1.)
let d_pdf () =
let f = Samples.to_pdf test_disc in
let g = Samples.get_prob test_disc in
Alcotest.(check (list (float 0.000001)))
"check pdf function is correct"
(List.map ~f [ 1; 2; 3; 4 ] @ List.map ~f:g [ 1; 2; 3; 4 ])
([ 0.5; 0.25; 0.25; 0. ] @ [ 0.5; 0.25; 0.25; 0. ])
let d_supp () =
let l = Samples.support test_disc in
Alcotest.(check (list int)) "check support function is correct" l [ 1; 2; 3 ]
let c_pdf () =
let f = CSamples.to_pdf test_cont in
Alcotest.(check pass) "check pdf function is correct" (f 0.) 0.3
let c_cdf () =
let g = CSamples.to_cdf test_cont in
Alcotest.(check pass) "check cdf function is correct" (g 0.) 1.
let c_more_samples () =
let test_cont = CSamples.add_sample test_cont 10. in
let test_cont = CSamples.add_sample test_cont 20. in
let test_cont = CSamples.add_sample test_cont 30. in
let v = CSamples.values test_cont in
let v', _ = CSamples.to_pdf_arr test_cont in
let open Float in
Alcotest.(check bool)
"check array out contains values" true
(Array.mem ~equal v 10. && Array.mem ~equal v' 10.)
let tests : unit Alcotest.test list =
[
( "discrete",
[
("from dist", `Quick, todo);
("pdf", `Quick, d_pdf);
("support", `Quick, d_supp);
] );
( "continuous",
[
("from dist", `Quick, todo);
("pdf", `Quick, c_pdf);
("cdf", `Quick, c_cdf);
("support", `Quick, todo);
] );
]
|
7247ecaca570f6986585c6466fcab387e024f6a4bc69ec8e7e66f196d0372234
|
microsoft/SLAyer
|
CounterExample.mli
|
Copyright ( c ) Microsoft Corporation . All rights reserved .
* Generation of counter - example trace for sdvdefect viewer
val disprove : Analysis.t -> bool
| null |
https://raw.githubusercontent.com/microsoft/SLAyer/6f46f6999c18f415bc368b43b5ba3eb54f0b1c04/src/CounterExample.mli
|
ocaml
|
Copyright ( c ) Microsoft Corporation . All rights reserved .
* Generation of counter - example trace for sdvdefect viewer
val disprove : Analysis.t -> bool
|
|
9b3c035822ecf751637c04b9373256751263b3a5aa82681e8c0c7fd00cf151f4
|
tweag/webauthn
|
WebAuthn.hs
|
# OPTIONS_GHC -Wno - missing - import - lists #
-- | Stability: provisional
--
-- This is the main module of the library. It re-exports the most commonly
-- needed modules and should be the only module considered stable. The
-- following sections give an overview of how a web application can use the
exported symbols to make use of the WebAuthn standard .
--
= = WebAuthn basics
--
For a web application the [ WebAuthn standard]( / TR / webauthn-2/ )
-- allows the creation and use of public key-based credentials for the purpose
-- of strongly authenticating users.
--
The WebAuthn standard usually involves three different devices , each of which
can be [ WebAuthn conformant]( / TR / webauthn-2/#sctn - conformance ):
--
-- * A [Relying Party](-2/#webauthn-relying-party),
aka a _ _ website _ _ like @github.com@ or @google.com@
-- * A [User Agent](-2/#conforming-user-agent),
aka usually a _ _ browser _ _ like or Chrome
-- * A [Authenticator](-2/#authenticator),
aka something like a _ _ security key _ _ like a Yubikey or a fingerprint sensor
--
This library implements the server side of [ Relying Party conformance]( / TR / webauthn-2/#sctn - conforming - relying - parties )
-- and is therefore intended to be used by a website's server.
--
-- == Ceremonies
In order for a Relying Party to be conformant , it needs to implement two
-- [Relying Party Operations](-2/#sctn-rp-operations),
-- aka [ceremonies](-2/#ceremony):
--
-- * The [Registration Ceremony](-2/#registration-ceremony),
-- where a [public key credential](-2/#public-key-credential)
-- is created and associated with the user's account. The public key credential
-- may be [attested](-2/#sctn-attestation) via
-- one of the supported
-- [attestation statement formats](-2/#attestation-statement-format).
-- Afterwards the user can be authenticated by an Authentication Ceremony.
-- * The [Authentication Ceremony](-2/#authentication-ceremony),
-- where it is [asserted](-2/#assertion) that
-- the user controls the [credential private key](-2/#credential-private-key)
-- of a previously-registered [public key credential](-2/#public-key-credential).
--
-- Both Ceremonies have the same general flow:
--
1 . The user interacts with the website , triggering a registration or authentication
ceremony via some Relying Party website script , which sends a request to the Relying
Party server to request the credential creation / request options . Depending
-- on the scenario this request may include a username and more.
-- #step-2#
2 . The Relying Party creates an [ PublicKeyCredentialCreationOptions]( / TR / webauthn-2/#dictdef - publickeycredentialcreationoptions )
-- or [PublicKeyCredentialRequestOptions](-2/#dictdef-publickeycredentialrequestoptions) object respectively,
-- which encodes the parameters for the [ceremony](-2/#ceremony),
-- and sends it back as the response. This notably includes a
-- [Cryptographic Challenge](-2/#sctn-cryptographic-challenges),
-- generated by the server.
3 . Using the response as an argument , the script calls the
-- [@navigator.credentials.create()@](-credential-management/#dom-credentialscontainer-create)
-- or [@navigator.credentials.get()@](-credential-management/#dom-credentialscontainer-get)
-- functions of the browser respectively. This typically causes the browser
-- to request some gesture by the user. The result of these functions is
then sent to the Relying Party server with another request .
# step-4 #
4 . The Relying Party verifies the request according to
-- [Β§ 7.1 Registering a New Credential](-2/#sctn-registering-a-new-credential) or
-- [Β§ 7.2 Verifying an Authentication Assertion](-2/#sctn-verifying-assertion) respectively.
-- The response indicates the result of this verification. For successful
-- registration ceremonies, the server stores the resulting public key and
-- some additional information in its database. For successful authentication
-- ceremonies, the server may want to update the relevant database entry.
5 . The script handles the received response accordingly , displaying errors
-- as needed.
--
-- This library only implements the server side of these steps, since the browser
-- script is usually very specific to the website and use case, but also fairly simple.
-- The example server in the source of this implementation shows how
-- [a potential implementation]().
-- See also these [Sample API Usage Scenarios](-2/#sctn-sample-scenarios).
--
= = WebAuthn Security considerations
--
-- [Chapter 13](-2/#sctn-security-considerations)
details the security considerations of WebAuthn . It is highly recommend to
-- read at least the
-- [relying party section](-2/#sctn-security-considerations-rp)
-- of these considerations before implementing a relying party.
--
-- == Library
--
The two ceremonies described above are very similar in many ways . Because of
-- this, the library has many functions and types that are parametrized by
' CeremonyKind ' , which allows improved type safety .
--
-- The library consists of the following main parts
module Crypto.WebAuthn
( -- * Model Types
| A set of types representing credential options ( ' CredentialOptions ' )
-- and their resulting credentials responses ('Credential'), used in
[ step 2](#step-2 ) and [ step 4](#step-4 # ) respectively .
module Crypto.WebAuthn.Model,
-- * WebAuthn Encoding
| Includes everything needed to encode\/decode WebAuthn types between
serializations and types defined in " Crypto . WebAuthn . Model " .
-- Most notably this includes encoding and decoding functions for messages
-- exchanged with the
[ webauthn - json]( / github / webauthn - json ) JavaScript
library : Encoding ' CredentialOptions ' to intermediate JSON - serializable
-- types using 'wjEncodeCredentialOptionsRegistration' and
-- 'wjEncodeCredentialOptionsAuthentication', which can be used for [step
2](#step-2 ) . Also decoding ' Credential 's from intermediate
-- JSON-deserializable types using 'wjDecodeCredentialRegistration' and
-- 'wjDecodeCredentialAuthentication', which can be used for [step
4](#step-4 ) .
module Crypto.WebAuthn.Encoding,
-- * Attestation Statement Formats
-- | In case of a [registration ceremony](-2/#registration),
there is the possibility for the Relying Party to request an
-- [attestation](-2/#sctn-attestation),
which if returned may allow the Relying Party to make a trust decision
-- ('rrAttestationStatement') with the authenticator model used.
--
-- This module contains the 'allSupportedFormats' value, which contains
-- implementations of all standard attestation statement formats supported
-- by this library. It can be manually passed to the
-- 'wjDecodeCredentialRegistration'' to enable only specific formats or add
-- support for additional ones.
module Crypto.WebAuthn.AttestationStatementFormat,
-- * Operations
| Functions for verifying resulting credential responses , needed in [ step 4](#step-4 ) .
-- This is the main functionality implemented by the library. This module
exports these two main symbols :
--
-- * 'verifyRegistrationResponse': Verifies a 'Credential' response for
-- registration.
-- * 'verifyAuthenticationResponse': Verifies a 'Credential' response for
-- authentication.
module Crypto.WebAuthn.Operation,
-- * Metadata
-- | A function for decoding a [FIDO Alliance Metadata Service](/)
BLOB in order to be able to enforce a set of requirements on the authenticator
-- used, e.g. to only allow authenticators that have been
[ certified]( / certification / functional - certification/ ) .
--
-- Notably this library does not define any functions for fetching the
-- metadata, which is left to the user of the library. See the
-- [@MetadataFetch@]()
-- module in the example server for a potential implementation.
--
-- Currently the only function exported from this module is
--
* ' metadataBlobToRegistry ' : and verifies a Metadata BLOB to a
-- 'MetadataServiceRegistry', which can be passed to 'verifyRegistrationResponse'
module Crypto.WebAuthn.Metadata,
)
where
import Crypto.WebAuthn.AttestationStatementFormat
import Crypto.WebAuthn.Encoding
import Crypto.WebAuthn.Metadata
import Crypto.WebAuthn.Model
import Crypto.WebAuthn.Operation
| null |
https://raw.githubusercontent.com/tweag/webauthn/349a2b408a79107d9f07c017b72b03c9c306e5fa/src/Crypto/WebAuthn.hs
|
haskell
|
| Stability: provisional
This is the main module of the library. It re-exports the most commonly
needed modules and should be the only module considered stable. The
following sections give an overview of how a web application can use the
allows the creation and use of public key-based credentials for the purpose
of strongly authenticating users.
* A [Relying Party](-2/#webauthn-relying-party),
* A [User Agent](-2/#conforming-user-agent),
* A [Authenticator](-2/#authenticator),
and is therefore intended to be used by a website's server.
== Ceremonies
[Relying Party Operations](-2/#sctn-rp-operations),
aka [ceremonies](-2/#ceremony):
* The [Registration Ceremony](-2/#registration-ceremony),
where a [public key credential](-2/#public-key-credential)
is created and associated with the user's account. The public key credential
may be [attested](-2/#sctn-attestation) via
one of the supported
[attestation statement formats](-2/#attestation-statement-format).
Afterwards the user can be authenticated by an Authentication Ceremony.
* The [Authentication Ceremony](-2/#authentication-ceremony),
where it is [asserted](-2/#assertion) that
the user controls the [credential private key](-2/#credential-private-key)
of a previously-registered [public key credential](-2/#public-key-credential).
Both Ceremonies have the same general flow:
on the scenario this request may include a username and more.
#step-2#
or [PublicKeyCredentialRequestOptions](-2/#dictdef-publickeycredentialrequestoptions) object respectively,
which encodes the parameters for the [ceremony](-2/#ceremony),
and sends it back as the response. This notably includes a
[Cryptographic Challenge](-2/#sctn-cryptographic-challenges),
generated by the server.
[@navigator.credentials.create()@](-credential-management/#dom-credentialscontainer-create)
or [@navigator.credentials.get()@](-credential-management/#dom-credentialscontainer-get)
functions of the browser respectively. This typically causes the browser
to request some gesture by the user. The result of these functions is
[Β§ 7.1 Registering a New Credential](-2/#sctn-registering-a-new-credential) or
[Β§ 7.2 Verifying an Authentication Assertion](-2/#sctn-verifying-assertion) respectively.
The response indicates the result of this verification. For successful
registration ceremonies, the server stores the resulting public key and
some additional information in its database. For successful authentication
ceremonies, the server may want to update the relevant database entry.
as needed.
This library only implements the server side of these steps, since the browser
script is usually very specific to the website and use case, but also fairly simple.
The example server in the source of this implementation shows how
[a potential implementation]().
See also these [Sample API Usage Scenarios](-2/#sctn-sample-scenarios).
[Chapter 13](-2/#sctn-security-considerations)
read at least the
[relying party section](-2/#sctn-security-considerations-rp)
of these considerations before implementing a relying party.
== Library
this, the library has many functions and types that are parametrized by
The library consists of the following main parts
* Model Types
and their resulting credentials responses ('Credential'), used in
* WebAuthn Encoding
Most notably this includes encoding and decoding functions for messages
exchanged with the
types using 'wjEncodeCredentialOptionsRegistration' and
'wjEncodeCredentialOptionsAuthentication', which can be used for [step
JSON-deserializable types using 'wjDecodeCredentialRegistration' and
'wjDecodeCredentialAuthentication', which can be used for [step
* Attestation Statement Formats
| In case of a [registration ceremony](-2/#registration),
[attestation](-2/#sctn-attestation),
('rrAttestationStatement') with the authenticator model used.
This module contains the 'allSupportedFormats' value, which contains
implementations of all standard attestation statement formats supported
by this library. It can be manually passed to the
'wjDecodeCredentialRegistration'' to enable only specific formats or add
support for additional ones.
* Operations
This is the main functionality implemented by the library. This module
* 'verifyRegistrationResponse': Verifies a 'Credential' response for
registration.
* 'verifyAuthenticationResponse': Verifies a 'Credential' response for
authentication.
* Metadata
| A function for decoding a [FIDO Alliance Metadata Service](/)
used, e.g. to only allow authenticators that have been
Notably this library does not define any functions for fetching the
metadata, which is left to the user of the library. See the
[@MetadataFetch@]()
module in the example server for a potential implementation.
Currently the only function exported from this module is
'MetadataServiceRegistry', which can be passed to 'verifyRegistrationResponse'
|
# OPTIONS_GHC -Wno - missing - import - lists #
exported symbols to make use of the WebAuthn standard .
= = WebAuthn basics
For a web application the [ WebAuthn standard]( / TR / webauthn-2/ )
The WebAuthn standard usually involves three different devices , each of which
can be [ WebAuthn conformant]( / TR / webauthn-2/#sctn - conformance ):
aka a _ _ website _ _ like @github.com@ or @google.com@
aka usually a _ _ browser _ _ like or Chrome
aka something like a _ _ security key _ _ like a Yubikey or a fingerprint sensor
This library implements the server side of [ Relying Party conformance]( / TR / webauthn-2/#sctn - conforming - relying - parties )
In order for a Relying Party to be conformant , it needs to implement two
1 . The user interacts with the website , triggering a registration or authentication
ceremony via some Relying Party website script , which sends a request to the Relying
Party server to request the credential creation / request options . Depending
2 . The Relying Party creates an [ PublicKeyCredentialCreationOptions]( / TR / webauthn-2/#dictdef - publickeycredentialcreationoptions )
3 . Using the response as an argument , the script calls the
then sent to the Relying Party server with another request .
# step-4 #
4 . The Relying Party verifies the request according to
5 . The script handles the received response accordingly , displaying errors
= = WebAuthn Security considerations
details the security considerations of WebAuthn . It is highly recommend to
The two ceremonies described above are very similar in many ways . Because of
' CeremonyKind ' , which allows improved type safety .
module Crypto.WebAuthn
| A set of types representing credential options ( ' CredentialOptions ' )
[ step 2](#step-2 ) and [ step 4](#step-4 # ) respectively .
module Crypto.WebAuthn.Model,
| Includes everything needed to encode\/decode WebAuthn types between
serializations and types defined in " Crypto . WebAuthn . Model " .
[ webauthn - json]( / github / webauthn - json ) JavaScript
library : Encoding ' CredentialOptions ' to intermediate JSON - serializable
2](#step-2 ) . Also decoding ' Credential 's from intermediate
4](#step-4 ) .
module Crypto.WebAuthn.Encoding,
there is the possibility for the Relying Party to request an
which if returned may allow the Relying Party to make a trust decision
module Crypto.WebAuthn.AttestationStatementFormat,
| Functions for verifying resulting credential responses , needed in [ step 4](#step-4 ) .
exports these two main symbols :
module Crypto.WebAuthn.Operation,
BLOB in order to be able to enforce a set of requirements on the authenticator
[ certified]( / certification / functional - certification/ ) .
* ' metadataBlobToRegistry ' : and verifies a Metadata BLOB to a
module Crypto.WebAuthn.Metadata,
)
where
import Crypto.WebAuthn.AttestationStatementFormat
import Crypto.WebAuthn.Encoding
import Crypto.WebAuthn.Metadata
import Crypto.WebAuthn.Model
import Crypto.WebAuthn.Operation
|
099ac6de6f3e42995157459132840668ebc5af9aea3db65f9a27f884daee4572
|
ktakashi/sagittarius-scheme
|
document.scm
|
-*- mode : scheme ; coding : utf-8 -*-
;;;
;;; text/markdown/parser/document.scm - Document parser
;;;
Copyright ( c ) 2022 < >
;;;
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions
;;; are met:
;;;
;;; 1. Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
;;;
;;; 2. Redistributions in binary form must reproduce the above copyright
;;; notice, this list of conditions and the following disclaimer in the
;;; documentation and/or other materials provided with the distribution.
;;;
;;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
;;; LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
;;; A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
;;; OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED
;;; TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
;;; PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
;;; NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
;;; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
;;;
#!nounbound
(library (text markdown parser document)
(export make-document-parser document-parser?
document-parser:parse)
(import (rnrs)
(core misc)
(srfi :117 list-queues)
(text markdown parser blocks)
(text markdown parser factories)
(text markdown parser inlines)
(text markdown parser link-reference)
(text markdown parser nodes)
(text markdown parser parsing)
(text markdown parser source)
(util port))
(define-vector-type open-block-parser
(make-open-block-parser block-parser source-index)
open-block-parser?
(block-parser open-block-parser-block-parser)
(source-index open-block-parser-source-index
open-block-parser-source-index-set!))
(define-record-type document-parser
(fields block-parser-factories
inline-parser-factory
inline-content-parser-factories
delimiter-processors
reference-processors
document-block-parser
open-block-parsers
(mutable state)
(mutable column-in-tab?)
(mutable next-non-space-column)
block-parsers ;; for inline parser
definitions
)
(protocol
(lambda (p)
(lambda (block-parser-factories
inline-parser-factory
inline-content-parser-factories
delimiter-processors
reference-processors)
(let* ((document-block-parser (make-document-block-parser))
(r (p block-parser-factories inline-parser-factory
inline-content-parser-factories
delimiter-processors reference-processors
document-block-parser
(list-queue (make-open-block-parser
document-block-parser 0))
#f ;; state
#f
0
(list-queue)
(make-reference-definitions)))
(state (make-parser-state
(block-parser-block document-block-parser)
#f ;; line
-1 ;; line-index
0 ;; index
0 ;; column
0 ;; next-non-space-index
0 ;; indent
#f ;; blank
(lambda () (document-parser:active-block-parser r)))))
(document-parser-state-set! r state)
r)))))
(define (document-parser:parse document-parser input-port)
(port-for-each (lambda (line)
(document-parser:parse-line document-parser line))
(lambda () (get-line input-port)))
(document-parser:finalize document-parser))
;; private APIs
(define (document-parser:parse-line document-parser line)
(define open-block-parsers
(document-parser-open-block-parsers document-parser))
(define (check-open-block-parser document-parser open-block-parsers)
(if (= (list-queue-length open-block-parsers) 1)
1 ;; document-block-parser would always match so skip ;)
(let loop ((matches 1)
(i 1)
(obp* (cdr (list-queue-list open-block-parsers))))
(if (null? obp*)
matches
(let ((bp (open-block-parser-block-parser (car obp*)))
(state (document-parser-state document-parser)))
(document-parser:find-next-non-space! document-parser)
(cond ((block-parser:try-continue bp state) =>
(lambda (bc)
(open-block-parser-source-index-set!
(car obp*)
(parser-state-index state))
(cond ((block-continue-finalize? bc)
(document-parser:add-source-location!
document-parser)
(document-parser:close-block-parsers!
document-parser
(- (list-queue-length open-block-parsers) i))
#f)
(else
(cond ((not (= (block-continue-index bc) -1))
(document-parser:set-new-index!
document-parser
(block-continue-index bc)))
((not (= (block-continue-column bc) -1))
(document-parser:set-new-column!
document-parser
(block-continue-column bc))))
(loop (+ matches 1) (+ i 1) (cdr obp*))))))
(else matches)))))))
(define (check-new-blocks document-parser block-parser matches)
(define state (document-parser-state document-parser))
(define (prepare-active-block-parser! document-parser)
(let* ((obp (document-parser:deactivate-block-parser! document-parser))
(old (open-block-parser-block-parser obp)))
(when (definition-parser? old)
(document-parser:add-definition-from! document-parser old))
(block-parser:close-block! old)
(let ((block (block-parser-block old)))
(markdown-node:unlink! block)
block)))
(define (get-replaced-source-locs document-parser block-start)
(and (block-start-replace-active-block-parser? block-start)
(let ((replaced-block
(prepare-active-block-parser! document-parser)))
(markdown-node:source-locations replaced-block))))
(define (find-block-start document-parser block-parser)
(let ((mbp (make-matched-block-parser block-parser)))
(let loop ((factories
(document-parser-block-parser-factories document-parser)))
(cond ((null? factories) #f)
(((car factories) (document-parser-state document-parser) mbp))
(else (loop (cdr factories)))))))
(let loop ((unmatched (- (list-queue-length open-block-parsers) matches))
(block-parser block-parser)
(last-index (parser-state-index state))
(started-new-block? #f)
(try-block-starts?
(or (paragraph-node? (block-parser-block block-parser))
(block-parser-container? block-parser))))
(document-parser:find-next-non-space! document-parser)
(cond ((not try-block-starts?)
(values last-index started-new-block? unmatched block-parser))
((or (parser-state-blank? state)
(and (< (parser-state-indent state)
+parsing-code-block-indent+)
(source-line:letter?
(parser-state-line state)
(parser-state-next-non-space-index state))))
(document-parser:set-new-index! document-parser
(parser-state-next-non-space-index state))
(values last-index started-new-block? unmatched block-parser))
((find-block-start document-parser block-parser) =>
(lambda (block-start)
(let ((source-index (parser-state-index state)))
;; okay, from now on always new block started
(when (positive? unmatched)
;; close open block here as we are handling a new block
(document-parser:close-block-parsers!
document-parser unmatched))
(cond ((block-start-new-index block-start) =>
(lambda (i)
(document-parser:set-new-index! document-parser i)))
((block-start-new-column block-start) =>
(lambda (c)
(document-parser:set-new-column! document-parser c))))
(let ((replaced-source-locs
(get-replaced-source-locs document-parser block-start)))
(let lp2 ((new-bp* (block-start-parsers block-start))
(block-parser block-parser)
(try-block-starts? try-block-starts?))
(if (null? new-bp*)
(loop 0
block-parser
(parser-state-index state)
#t
try-block-starts?)
(let ((new-block-parser (car new-bp*)))
(document-parser:add-child! document-parser
(make-open-block-parser new-block-parser
source-index))
(markdown-node:source-locations-set!
(block-parser-block new-block-parser)
replaced-source-locs)
(lp2 (cdr new-bp*)
new-block-parser
(block-parser-container? new-block-parser))))))
)))
(else
(document-parser:set-new-index! document-parser
(parser-state-next-non-space-index state))
(values last-index started-new-block? unmatched block-parser)))))
(define state (document-parser-state document-parser))
(document-parser:set-line! document-parser line)
(let ((matches (check-open-block-parser document-parser open-block-parsers)))
(when matches
(let-values (((last-index started-new-block? unmatched block-parser)
(check-new-blocks document-parser
(open-block-parser-block-parser
(list-ref (list-queue-list open-block-parsers)
(- matches 1)))
matches)))
(cond ((and (not started-new-block?)
(not (parser-state-blank? state))
(block-parser-allow-lazy-continuation-line?
(document-parser:active-block-parser document-parser)))
(let ((obp (list-ref (list-queue-list open-block-parsers)
(- (list-queue-length open-block-parsers) 1))))
(open-block-parser-source-index-set! obp last-index)
(document-parser:add-line! document-parser)))
(else
(when (> unmatched 0)
(document-parser:close-block-parsers!
document-parser unmatched))
(cond ((not (block-parser-container? block-parser))
(document-parser:add-line! document-parser))
((not (parser-state-blank? state))
(let ((pb (make-paragraph-parser
(parser-state-document state))))
(document-parser:add-child! document-parser
(make-open-block-parser pb last-index))
(document-parser:add-line! document-parser)))
(else
(document-parser:add-source-location! document-parser)))
))))))
(define (document-parser:finalize document-parser)
(define open-block-parsers
(document-parser-open-block-parsers document-parser))
(define (process-inlines document-parser)
(define definitions (document-parser-definitions document-parser))
(define processors (document-parser-delimiter-processors document-parser))
(define references (document-parser-reference-processors document-parser))
(define content-parsers
(document-parser-inline-content-parser-factories document-parser))
(define context (make-inline-parser-context content-parsers
processors
definitions
references))
(define inline-parser
((document-parser-inline-parser-factory document-parser) context))
(list-queue-for-each
(lambda (bp) (block-parser:parse-inlines! bp inline-parser))
(document-parser-block-parsers document-parser)))
(document-parser:close-block-parsers! document-parser
(list-queue-length open-block-parsers))
(process-inlines document-parser)
(block-parser-block (document-parser-document-block-parser document-parser)))
;; private
(define (document-parser:find-next-non-space! document-parser)
(define state (document-parser-state document-parser))
(define line (parser-state-line state))
(define len (source-line:length line))
(define (finish document-parser state i cols)
(parser-state-next-non-space-index-set! state i)
(document-parser-next-non-space-column-set! document-parser cols)
(parser-state-indent-set! state (- cols (parser-state-column state))))
(parser-state-blank?-set! state #t)
(let loop ((i (parser-state-index state)) (cols (parser-state-column state)))
(if (< i len)
(let ((c (source-line:char-at line i)))
(case c
((#\space) (loop (+ i 1) (+ cols 1)))
((#\tab)
(loop (+ i 1) (+ cols (parsing:columns->next-tab-stop cols))))
(else (parser-state-blank?-set! state #f)
(finish document-parser state i cols))))
(finish document-parser state i cols))))
(define (document-parser:set-line! document-parser line)
(define state (document-parser-state document-parser))
(let ((line-index (+ (parser-state-line-index state) 1)))
(parser-state-line-index-set! state line-index)
(parser-state-index-set! state 0)
(parser-state-column-set! state 0)
(document-parser-column-in-tab?-set! document-parser #f)
TODO replace \x0 ; to \xFFFD ; ?
(let ((loc (source-location:of line-index 0 (string-length line))))
(parser-state-line-set! state (source-line:of line loc)))))
(define (document-parser:add-line! document-parser)
(define state (document-parser-state document-parser))
(define index (parser-state-index state))
(define line (parser-state-line state))
(define column-in-tab?
(document-parser-column-in-tab? document-parser))
(define (get-content)
(cond (column-in-tab?
(let* ((after-tab (+ index 1))
(rest (source-line:substring line after-tab))
(space (parsing:columns->next-tab-stop
(parser-state-column state))))
(string-append (make-string space #\space)
(source-line-content rest))))
((zero? index) (source-line-content line))
(else (source-line-content (source-line:substring line index)))))
(let* ((content (get-content))
(loc (source-location:of (parser-state-line-index state) index
(string-length content))))
(block-parser:add-line!
(document-parser:active-block-parser document-parser)
(source-line:of content loc))
(document-parser:add-source-location! document-parser)))
(define (document-parser:add-child! document-parser open-block-parser)
(define block-parser (open-block-parser-block-parser open-block-parser))
(define block (block-parser-block block-parser))
(do ()
((block-parser:can-contain?
(document-parser:active-block-parser document-parser) block))
(document-parser:close-block-parsers! document-parser 1))
(markdown-node:append-child!
(block-parser-block
(document-parser:active-block-parser document-parser))
block)
(document-parser:activate-block-parser! document-parser open-block-parser))
(define (document-parser:set-new-index! document-parser index)
(define state (document-parser-state document-parser))
(define next-non-space (parser-state-next-non-space-index state))
(when (>= index next-non-space)
(parser-state-index-set! state next-non-space)
(let ((col (document-parser-next-non-space-column document-parser)))
(parser-state-column-set! state col)))
(do ((len (source-line:length (parser-state-line state))))
((or (>= (parser-state-index state) index)
(= (parser-state-index state) len)))
(document-parser:advance! document-parser))
(document-parser-column-in-tab?-set! document-parser #f))
(define (document-parser:set-new-column! document-parser column)
(define state (document-parser-state document-parser))
(define next-non-space-column
(document-parser-next-non-space-column document-parser))
(when (>= column next-non-space-column)
(parser-state-index-set! state (parser-state-next-non-space-index state))
(parser-state-column-set! state next-non-space-column))
(do ((len (source-line:length (parser-state-line state))))
((or (>= (parser-state-column state) column)
(= (parser-state-index state) len)))
(document-parser:advance! document-parser))
(cond ((> (parser-state-column state) column)
(parser-state-index-set! state (- (parser-state-index state) 1))
(parser-state-column-set! state column)
(document-parser-column-in-tab?-set! document-parser #t))
(else
(document-parser-column-in-tab?-set! document-parser #f))))
(define (document-parser:advance! document-parser)
(define state (document-parser-state document-parser))
(define line (parser-state-line state))
(define index (parser-state-index state))
(define column (parser-state-column state))
(let ((c (source-line:char-at line index)))
(parser-state-index-set! state (+ index 1))
(if (eqv? c #\tab)
(parser-state-column-set! state
(+ column (parsing:columns->next-tab-stop column)))
(parser-state-column-set! state (+ column 1)))))
(define (document-parser:active-block-parser document-parser)
(define open-block-parsers
(document-parser-open-block-parsers document-parser))
(let-values (((first last) (list-queue-first-last open-block-parsers)))
(open-block-parser-block-parser (if (null? last) first (car last)))))
(define (document-parser:activate-block-parser! document-parser
open-block-parser)
(define open-block-parsers
(document-parser-open-block-parsers document-parser))
(list-queue-add-back! open-block-parsers open-block-parser))
(define (document-parser:deactivate-block-parser! document-parser)
(define open-block-parsers
(document-parser-open-block-parsers document-parser))
(list-queue-remove-back! open-block-parsers))
(define (document-parser:add-source-location! document-parser)
(define open-block-parsers
(document-parser-open-block-parsers document-parser))
(define state (document-parser-state document-parser))
(do ((obp (cdr (list-queue-list open-block-parsers)) (cdr obp)))
((null? obp))
(let* ((block-index (open-block-parser-source-index (car obp)))
(len (- (source-line:length (parser-state-line state)) block-index)))
(unless (zero? len)
(block-parser:add-source-location!
(open-block-parser-block-parser (car obp))
(source-location:of (parser-state-line-index state)
block-index len))))))
(define (document-parser:close-block-parsers! document-parser size)
(define (finalize document-parser bp)
(when (definition-parser? bp)
(document-parser:add-definition-from! document-parser bp))
(block-parser:close-block! bp))
(define all-block-parsers (document-parser-block-parsers document-parser))
(do ((i 0 (+ i 1)))
((= i size))
(let* ((obp (document-parser:deactivate-block-parser! document-parser))
(bp (open-block-parser-block-parser obp)))
(finalize document-parser bp)
(list-queue-add-back! all-block-parsers bp))))
(define (document-parser:add-definition-from! document-parser old)
(define definition* (definition-parser:definitions old))
(define definitions (document-parser-definitions document-parser))
;; (define definitions (document-parser-d
(do ((d* definition* (cdr d*)))
((null? d*))
;; We can't add definition to the block as it's not a defined node.
(reference-definitions:add! definitions (car d*))))
)
| null |
https://raw.githubusercontent.com/ktakashi/sagittarius-scheme/7aa51b5eb02e520b0902350251c4f5c3b3395990/sitelib/text/markdown/parser/document.scm
|
scheme
|
coding : utf-8 -*-
text/markdown/parser/document.scm - Document parser
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
for inline parser
state
line
line-index
index
column
next-non-space-index
indent
blank
private APIs
document-block-parser would always match so skip ;)
okay, from now on always new block started
close open block here as we are handling a new block
private
to \xFFFD ; ?
(define definitions (document-parser-d
We can't add definition to the block as it's not a defined node.
|
Copyright ( c ) 2022 < >
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED
LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
#!nounbound
(library (text markdown parser document)
(export make-document-parser document-parser?
document-parser:parse)
(import (rnrs)
(core misc)
(srfi :117 list-queues)
(text markdown parser blocks)
(text markdown parser factories)
(text markdown parser inlines)
(text markdown parser link-reference)
(text markdown parser nodes)
(text markdown parser parsing)
(text markdown parser source)
(util port))
(define-vector-type open-block-parser
(make-open-block-parser block-parser source-index)
open-block-parser?
(block-parser open-block-parser-block-parser)
(source-index open-block-parser-source-index
open-block-parser-source-index-set!))
(define-record-type document-parser
(fields block-parser-factories
inline-parser-factory
inline-content-parser-factories
delimiter-processors
reference-processors
document-block-parser
open-block-parsers
(mutable state)
(mutable column-in-tab?)
(mutable next-non-space-column)
definitions
)
(protocol
(lambda (p)
(lambda (block-parser-factories
inline-parser-factory
inline-content-parser-factories
delimiter-processors
reference-processors)
(let* ((document-block-parser (make-document-block-parser))
(r (p block-parser-factories inline-parser-factory
inline-content-parser-factories
delimiter-processors reference-processors
document-block-parser
(list-queue (make-open-block-parser
document-block-parser 0))
#f
0
(list-queue)
(make-reference-definitions)))
(state (make-parser-state
(block-parser-block document-block-parser)
(lambda () (document-parser:active-block-parser r)))))
(document-parser-state-set! r state)
r)))))
(define (document-parser:parse document-parser input-port)
(port-for-each (lambda (line)
(document-parser:parse-line document-parser line))
(lambda () (get-line input-port)))
(document-parser:finalize document-parser))
(define (document-parser:parse-line document-parser line)
(define open-block-parsers
(document-parser-open-block-parsers document-parser))
(define (check-open-block-parser document-parser open-block-parsers)
(if (= (list-queue-length open-block-parsers) 1)
(let loop ((matches 1)
(i 1)
(obp* (cdr (list-queue-list open-block-parsers))))
(if (null? obp*)
matches
(let ((bp (open-block-parser-block-parser (car obp*)))
(state (document-parser-state document-parser)))
(document-parser:find-next-non-space! document-parser)
(cond ((block-parser:try-continue bp state) =>
(lambda (bc)
(open-block-parser-source-index-set!
(car obp*)
(parser-state-index state))
(cond ((block-continue-finalize? bc)
(document-parser:add-source-location!
document-parser)
(document-parser:close-block-parsers!
document-parser
(- (list-queue-length open-block-parsers) i))
#f)
(else
(cond ((not (= (block-continue-index bc) -1))
(document-parser:set-new-index!
document-parser
(block-continue-index bc)))
((not (= (block-continue-column bc) -1))
(document-parser:set-new-column!
document-parser
(block-continue-column bc))))
(loop (+ matches 1) (+ i 1) (cdr obp*))))))
(else matches)))))))
(define (check-new-blocks document-parser block-parser matches)
(define state (document-parser-state document-parser))
(define (prepare-active-block-parser! document-parser)
(let* ((obp (document-parser:deactivate-block-parser! document-parser))
(old (open-block-parser-block-parser obp)))
(when (definition-parser? old)
(document-parser:add-definition-from! document-parser old))
(block-parser:close-block! old)
(let ((block (block-parser-block old)))
(markdown-node:unlink! block)
block)))
(define (get-replaced-source-locs document-parser block-start)
(and (block-start-replace-active-block-parser? block-start)
(let ((replaced-block
(prepare-active-block-parser! document-parser)))
(markdown-node:source-locations replaced-block))))
(define (find-block-start document-parser block-parser)
(let ((mbp (make-matched-block-parser block-parser)))
(let loop ((factories
(document-parser-block-parser-factories document-parser)))
(cond ((null? factories) #f)
(((car factories) (document-parser-state document-parser) mbp))
(else (loop (cdr factories)))))))
(let loop ((unmatched (- (list-queue-length open-block-parsers) matches))
(block-parser block-parser)
(last-index (parser-state-index state))
(started-new-block? #f)
(try-block-starts?
(or (paragraph-node? (block-parser-block block-parser))
(block-parser-container? block-parser))))
(document-parser:find-next-non-space! document-parser)
(cond ((not try-block-starts?)
(values last-index started-new-block? unmatched block-parser))
((or (parser-state-blank? state)
(and (< (parser-state-indent state)
+parsing-code-block-indent+)
(source-line:letter?
(parser-state-line state)
(parser-state-next-non-space-index state))))
(document-parser:set-new-index! document-parser
(parser-state-next-non-space-index state))
(values last-index started-new-block? unmatched block-parser))
((find-block-start document-parser block-parser) =>
(lambda (block-start)
(let ((source-index (parser-state-index state)))
(when (positive? unmatched)
(document-parser:close-block-parsers!
document-parser unmatched))
(cond ((block-start-new-index block-start) =>
(lambda (i)
(document-parser:set-new-index! document-parser i)))
((block-start-new-column block-start) =>
(lambda (c)
(document-parser:set-new-column! document-parser c))))
(let ((replaced-source-locs
(get-replaced-source-locs document-parser block-start)))
(let lp2 ((new-bp* (block-start-parsers block-start))
(block-parser block-parser)
(try-block-starts? try-block-starts?))
(if (null? new-bp*)
(loop 0
block-parser
(parser-state-index state)
#t
try-block-starts?)
(let ((new-block-parser (car new-bp*)))
(document-parser:add-child! document-parser
(make-open-block-parser new-block-parser
source-index))
(markdown-node:source-locations-set!
(block-parser-block new-block-parser)
replaced-source-locs)
(lp2 (cdr new-bp*)
new-block-parser
(block-parser-container? new-block-parser))))))
)))
(else
(document-parser:set-new-index! document-parser
(parser-state-next-non-space-index state))
(values last-index started-new-block? unmatched block-parser)))))
(define state (document-parser-state document-parser))
(document-parser:set-line! document-parser line)
(let ((matches (check-open-block-parser document-parser open-block-parsers)))
(when matches
(let-values (((last-index started-new-block? unmatched block-parser)
(check-new-blocks document-parser
(open-block-parser-block-parser
(list-ref (list-queue-list open-block-parsers)
(- matches 1)))
matches)))
(cond ((and (not started-new-block?)
(not (parser-state-blank? state))
(block-parser-allow-lazy-continuation-line?
(document-parser:active-block-parser document-parser)))
(let ((obp (list-ref (list-queue-list open-block-parsers)
(- (list-queue-length open-block-parsers) 1))))
(open-block-parser-source-index-set! obp last-index)
(document-parser:add-line! document-parser)))
(else
(when (> unmatched 0)
(document-parser:close-block-parsers!
document-parser unmatched))
(cond ((not (block-parser-container? block-parser))
(document-parser:add-line! document-parser))
((not (parser-state-blank? state))
(let ((pb (make-paragraph-parser
(parser-state-document state))))
(document-parser:add-child! document-parser
(make-open-block-parser pb last-index))
(document-parser:add-line! document-parser)))
(else
(document-parser:add-source-location! document-parser)))
))))))
(define (document-parser:finalize document-parser)
(define open-block-parsers
(document-parser-open-block-parsers document-parser))
(define (process-inlines document-parser)
(define definitions (document-parser-definitions document-parser))
(define processors (document-parser-delimiter-processors document-parser))
(define references (document-parser-reference-processors document-parser))
(define content-parsers
(document-parser-inline-content-parser-factories document-parser))
(define context (make-inline-parser-context content-parsers
processors
definitions
references))
(define inline-parser
((document-parser-inline-parser-factory document-parser) context))
(list-queue-for-each
(lambda (bp) (block-parser:parse-inlines! bp inline-parser))
(document-parser-block-parsers document-parser)))
(document-parser:close-block-parsers! document-parser
(list-queue-length open-block-parsers))
(process-inlines document-parser)
(block-parser-block (document-parser-document-block-parser document-parser)))
(define (document-parser:find-next-non-space! document-parser)
(define state (document-parser-state document-parser))
(define line (parser-state-line state))
(define len (source-line:length line))
(define (finish document-parser state i cols)
(parser-state-next-non-space-index-set! state i)
(document-parser-next-non-space-column-set! document-parser cols)
(parser-state-indent-set! state (- cols (parser-state-column state))))
(parser-state-blank?-set! state #t)
(let loop ((i (parser-state-index state)) (cols (parser-state-column state)))
(if (< i len)
(let ((c (source-line:char-at line i)))
(case c
((#\space) (loop (+ i 1) (+ cols 1)))
((#\tab)
(loop (+ i 1) (+ cols (parsing:columns->next-tab-stop cols))))
(else (parser-state-blank?-set! state #f)
(finish document-parser state i cols))))
(finish document-parser state i cols))))
(define (document-parser:set-line! document-parser line)
(define state (document-parser-state document-parser))
(let ((line-index (+ (parser-state-line-index state) 1)))
(parser-state-line-index-set! state line-index)
(parser-state-index-set! state 0)
(parser-state-column-set! state 0)
(document-parser-column-in-tab?-set! document-parser #f)
(let ((loc (source-location:of line-index 0 (string-length line))))
(parser-state-line-set! state (source-line:of line loc)))))
(define (document-parser:add-line! document-parser)
(define state (document-parser-state document-parser))
(define index (parser-state-index state))
(define line (parser-state-line state))
(define column-in-tab?
(document-parser-column-in-tab? document-parser))
(define (get-content)
(cond (column-in-tab?
(let* ((after-tab (+ index 1))
(rest (source-line:substring line after-tab))
(space (parsing:columns->next-tab-stop
(parser-state-column state))))
(string-append (make-string space #\space)
(source-line-content rest))))
((zero? index) (source-line-content line))
(else (source-line-content (source-line:substring line index)))))
(let* ((content (get-content))
(loc (source-location:of (parser-state-line-index state) index
(string-length content))))
(block-parser:add-line!
(document-parser:active-block-parser document-parser)
(source-line:of content loc))
(document-parser:add-source-location! document-parser)))
(define (document-parser:add-child! document-parser open-block-parser)
(define block-parser (open-block-parser-block-parser open-block-parser))
(define block (block-parser-block block-parser))
(do ()
((block-parser:can-contain?
(document-parser:active-block-parser document-parser) block))
(document-parser:close-block-parsers! document-parser 1))
(markdown-node:append-child!
(block-parser-block
(document-parser:active-block-parser document-parser))
block)
(document-parser:activate-block-parser! document-parser open-block-parser))
(define (document-parser:set-new-index! document-parser index)
(define state (document-parser-state document-parser))
(define next-non-space (parser-state-next-non-space-index state))
(when (>= index next-non-space)
(parser-state-index-set! state next-non-space)
(let ((col (document-parser-next-non-space-column document-parser)))
(parser-state-column-set! state col)))
(do ((len (source-line:length (parser-state-line state))))
((or (>= (parser-state-index state) index)
(= (parser-state-index state) len)))
(document-parser:advance! document-parser))
(document-parser-column-in-tab?-set! document-parser #f))
(define (document-parser:set-new-column! document-parser column)
(define state (document-parser-state document-parser))
(define next-non-space-column
(document-parser-next-non-space-column document-parser))
(when (>= column next-non-space-column)
(parser-state-index-set! state (parser-state-next-non-space-index state))
(parser-state-column-set! state next-non-space-column))
(do ((len (source-line:length (parser-state-line state))))
((or (>= (parser-state-column state) column)
(= (parser-state-index state) len)))
(document-parser:advance! document-parser))
(cond ((> (parser-state-column state) column)
(parser-state-index-set! state (- (parser-state-index state) 1))
(parser-state-column-set! state column)
(document-parser-column-in-tab?-set! document-parser #t))
(else
(document-parser-column-in-tab?-set! document-parser #f))))
(define (document-parser:advance! document-parser)
(define state (document-parser-state document-parser))
(define line (parser-state-line state))
(define index (parser-state-index state))
(define column (parser-state-column state))
(let ((c (source-line:char-at line index)))
(parser-state-index-set! state (+ index 1))
(if (eqv? c #\tab)
(parser-state-column-set! state
(+ column (parsing:columns->next-tab-stop column)))
(parser-state-column-set! state (+ column 1)))))
(define (document-parser:active-block-parser document-parser)
(define open-block-parsers
(document-parser-open-block-parsers document-parser))
(let-values (((first last) (list-queue-first-last open-block-parsers)))
(open-block-parser-block-parser (if (null? last) first (car last)))))
(define (document-parser:activate-block-parser! document-parser
open-block-parser)
(define open-block-parsers
(document-parser-open-block-parsers document-parser))
(list-queue-add-back! open-block-parsers open-block-parser))
(define (document-parser:deactivate-block-parser! document-parser)
(define open-block-parsers
(document-parser-open-block-parsers document-parser))
(list-queue-remove-back! open-block-parsers))
(define (document-parser:add-source-location! document-parser)
(define open-block-parsers
(document-parser-open-block-parsers document-parser))
(define state (document-parser-state document-parser))
(do ((obp (cdr (list-queue-list open-block-parsers)) (cdr obp)))
((null? obp))
(let* ((block-index (open-block-parser-source-index (car obp)))
(len (- (source-line:length (parser-state-line state)) block-index)))
(unless (zero? len)
(block-parser:add-source-location!
(open-block-parser-block-parser (car obp))
(source-location:of (parser-state-line-index state)
block-index len))))))
(define (document-parser:close-block-parsers! document-parser size)
(define (finalize document-parser bp)
(when (definition-parser? bp)
(document-parser:add-definition-from! document-parser bp))
(block-parser:close-block! bp))
(define all-block-parsers (document-parser-block-parsers document-parser))
(do ((i 0 (+ i 1)))
((= i size))
(let* ((obp (document-parser:deactivate-block-parser! document-parser))
(bp (open-block-parser-block-parser obp)))
(finalize document-parser bp)
(list-queue-add-back! all-block-parsers bp))))
(define (document-parser:add-definition-from! document-parser old)
(define definition* (definition-parser:definitions old))
(define definitions (document-parser-definitions document-parser))
(do ((d* definition* (cdr d*)))
((null? d*))
(reference-definitions:add! definitions (car d*))))
)
|
91c6703e1041d7bd1f874f9b54ff70a2090874e808aa6347c6179e17544e2aff
|
fyquah/hardcaml_zprize
|
top.mli
|
* Instantiate the parallel NTT cores along with the load and store state machines .
Exposes AXI streaming interfaces .
Exposes AXI streaming interfaces. *)
open! Base
open Hardcaml
module Make (Config : Top_config.S) : sig
module Four_step : module type of Hardcaml_ntt.Four_step.Make (Config)
module Axi_stream = Four_step.Axi_stream
module Gf = Four_step.Gf
module Load_sm : module type of Load_sm.Make (Config)
module Store_sm : module type of Store_sm.Make (Config)
module I : sig
type 'a t =
{ clock : 'a
; clear : 'a
; start : 'a (** Begin processing a pass. *)
; first_4step_pass : 'a
* If high we are performing the first pass of the 4 step algorithm . The
core will apply the twiddle correction factors and also stream in / out
data in the appropriate way .
core will apply the twiddle correction factors and also stream in/out
data in the appropriate way. *)
; data_in : 'a Axi_stream.Source.t
; data_out_dest : 'a Axi_stream.Dest.t
}
[@@deriving sexp_of, hardcaml]
end
module O : sig
type 'a t =
{ data_out : 'a Axi_stream.Source.t
; data_in_dest : 'a Axi_stream.Dest.t
; done_ : 'a (** Low while a pass is running. *)
}
[@@deriving sexp_of, hardcaml]
end
val create : build_mode:Build_mode.t -> Scope.t -> Signal.t Interface.Create_fn(I)(O).t
val hierarchy
: build_mode:Build_mode.t
-> Scope.t
-> Signal.t Interface.Create_fn(I)(O).t
end
| null |
https://raw.githubusercontent.com/fyquah/hardcaml_zprize/553b1be10ae9b977decbca850df6ee2d0595e7ff/zprize/ntt/hardcaml/src/top.mli
|
ocaml
|
* Begin processing a pass.
* Low while a pass is running.
|
* Instantiate the parallel NTT cores along with the load and store state machines .
Exposes AXI streaming interfaces .
Exposes AXI streaming interfaces. *)
open! Base
open Hardcaml
module Make (Config : Top_config.S) : sig
module Four_step : module type of Hardcaml_ntt.Four_step.Make (Config)
module Axi_stream = Four_step.Axi_stream
module Gf = Four_step.Gf
module Load_sm : module type of Load_sm.Make (Config)
module Store_sm : module type of Store_sm.Make (Config)
module I : sig
type 'a t =
{ clock : 'a
; clear : 'a
; first_4step_pass : 'a
* If high we are performing the first pass of the 4 step algorithm . The
core will apply the twiddle correction factors and also stream in / out
data in the appropriate way .
core will apply the twiddle correction factors and also stream in/out
data in the appropriate way. *)
; data_in : 'a Axi_stream.Source.t
; data_out_dest : 'a Axi_stream.Dest.t
}
[@@deriving sexp_of, hardcaml]
end
module O : sig
type 'a t =
{ data_out : 'a Axi_stream.Source.t
; data_in_dest : 'a Axi_stream.Dest.t
}
[@@deriving sexp_of, hardcaml]
end
val create : build_mode:Build_mode.t -> Scope.t -> Signal.t Interface.Create_fn(I)(O).t
val hierarchy
: build_mode:Build_mode.t
-> Scope.t
-> Signal.t Interface.Create_fn(I)(O).t
end
|
934f01e2d0962eecd7c356f27a3adaa7c4d26635774a8cb1b73808870fb949d7
|
mflatt/not-a-box
|
wrap.rkt
|
#lang racket/base
(require '#%linklet
(for-syntax racket/base))
(provide unwrap unwrap-list
wrap-pair? wrap-null? wrap-car wrap-cdr wrap-list?
wrap-eq? wrap-equal?
in-wrap-list)
;; ----------------------------------------
;; Get `correlated?` and `correlated-e` reflectively, so that this
;; module can be flattened by the expander to a linklet that doesn't
;; refer to syntax primitives
(define kernel-table (primitive-table '#%kernel))
(define correlated?
(or (and kernel-table
(hash-ref kernel-table 'syntax?))
(lambda (x) #f)))
(define correlated-e
(or (and kernel-table
(hash-ref kernel-table 'syntax-e))
(lambda (x) x)))
;; ----------------------------------------
(define (unwrap v)
(if (correlated? v)
(correlated-e v)
v))
(define (unwrap-list v)
(cond
[(null? v) null]
[(pair? v)
(let ([r (unwrap-list (cdr v))])
(if (eq? r (cdr v))
v
(cons (car v) r)))]
[(correlated? v) (unwrap-list (correlated-e v))]
[else v]))
(define (wrap-car v)
(if (correlated? v)
(car (correlated-e v))
(car v)))
(define (wrap-cdr v)
(if (correlated? v)
(cdr (correlated-e v))
(cdr v)))
(define (wrap-pair? v)
(pair? (unwrap v)))
(define (wrap-null? v)
(null? (unwrap v)))
(define (wrap-list? v)
(cond
[(null? v) #t]
[(correlated? v) (wrap-list? (correlated-e v))]
[(pair? v) (wrap-list? (cdr v))]
[else #f]))
(define (wrap-eq? a b)
(eq? (unwrap a) (unwrap b)))
(define (wrap-equal? a b)
(let ([b (unwrap b)])
(or (and (not (pair? a))
(equal? a b))
(and (pair? a)
(pair? b)
(wrap-equal? (car a) (car b))
(wrap-equal? (car a) (car b))))))
(define-sequence-syntax in-wrap-list
(lambda (stx) (raise-argument-error "allowed only in `for` forms" stx))
(lambda (stx)
(syntax-case stx ()
[[(id) (_ lst-expr)]
(for-clause-syntax-protect
#'[(id)
(:do-in
;;outer bindings
([(lst) lst-expr])
;; outer check
(void)
;; loop bindings
([lst lst])
;; pos check
(not (wrap-null? lst))
;; inner bindings
([(id) (if (wrap-pair? lst) (wrap-car lst) lst)]
[(rest) (if (wrap-pair? lst) (wrap-cdr lst) null)])
;; pre guard
#t
;; post guard
#t
;; loop args
(rest))])]
[_ #f])))
| null |
https://raw.githubusercontent.com/mflatt/not-a-box/b6c1af4fb0eb877610a3a20b5265a8c8d2dd28e9/schemify/wrap.rkt
|
racket
|
----------------------------------------
Get `correlated?` and `correlated-e` reflectively, so that this
module can be flattened by the expander to a linklet that doesn't
refer to syntax primitives
----------------------------------------
outer bindings
outer check
loop bindings
pos check
inner bindings
pre guard
post guard
loop args
|
#lang racket/base
(require '#%linklet
(for-syntax racket/base))
(provide unwrap unwrap-list
wrap-pair? wrap-null? wrap-car wrap-cdr wrap-list?
wrap-eq? wrap-equal?
in-wrap-list)
(define kernel-table (primitive-table '#%kernel))
(define correlated?
(or (and kernel-table
(hash-ref kernel-table 'syntax?))
(lambda (x) #f)))
(define correlated-e
(or (and kernel-table
(hash-ref kernel-table 'syntax-e))
(lambda (x) x)))
(define (unwrap v)
(if (correlated? v)
(correlated-e v)
v))
(define (unwrap-list v)
(cond
[(null? v) null]
[(pair? v)
(let ([r (unwrap-list (cdr v))])
(if (eq? r (cdr v))
v
(cons (car v) r)))]
[(correlated? v) (unwrap-list (correlated-e v))]
[else v]))
(define (wrap-car v)
(if (correlated? v)
(car (correlated-e v))
(car v)))
(define (wrap-cdr v)
(if (correlated? v)
(cdr (correlated-e v))
(cdr v)))
(define (wrap-pair? v)
(pair? (unwrap v)))
(define (wrap-null? v)
(null? (unwrap v)))
(define (wrap-list? v)
(cond
[(null? v) #t]
[(correlated? v) (wrap-list? (correlated-e v))]
[(pair? v) (wrap-list? (cdr v))]
[else #f]))
(define (wrap-eq? a b)
(eq? (unwrap a) (unwrap b)))
(define (wrap-equal? a b)
(let ([b (unwrap b)])
(or (and (not (pair? a))
(equal? a b))
(and (pair? a)
(pair? b)
(wrap-equal? (car a) (car b))
(wrap-equal? (car a) (car b))))))
(define-sequence-syntax in-wrap-list
(lambda (stx) (raise-argument-error "allowed only in `for` forms" stx))
(lambda (stx)
(syntax-case stx ()
[[(id) (_ lst-expr)]
(for-clause-syntax-protect
#'[(id)
(:do-in
([(lst) lst-expr])
(void)
([lst lst])
(not (wrap-null? lst))
([(id) (if (wrap-pair? lst) (wrap-car lst) lst)]
[(rest) (if (wrap-pair? lst) (wrap-cdr lst) null)])
#t
#t
(rest))])]
[_ #f])))
|
3b4bc3b013b198ff90bc21f765d44c1936a69261229cac380e73bbefda873c0b
|
haskell-mafia/projector
|
Backend.hs
|
# LANGUAGE NoImplicitPrelude #
{-# LANGUAGE OverloadedStrings #-}
module Projector.Html.Backend (
Backend (..)
, checkModule
, runPredicates
) where
import qualified Data.Map.Strict as M
import P
import Projector.Core
import Projector.Html.Data.Backend
import Projector.Html.Data.Module
import Projector.Html.Data.Prim
-- -----------------------------------------------------------------------------
-- Per-backend warnings and linting
checkModule :: Backend a e -> Module HtmlType PrimT b -> Either [e] ()
checkModule b m =
case predModule (predicates b) m of
PredOk ->
pure ()
PredError es ->
Left es
predModule :: [Predicate e] -> Module HtmlType PrimT b -> PredResult [e]
predModule preds =
fmap fold . predResults . fmap (runPredicates preds . meExpr) . M.elems . moduleExprs
runPredicates :: [Predicate e] -> HtmlExpr a -> PredResult [e]
runPredicates preds expr =
predResults . with preds $ \pred ->
case pred of
ExprPredicate p ->
foldrExpr (runPredicate . p) (const id) PredOk expr
PatPredicate p ->
foldrExpr (const id) (runPredicate . p) PredOk expr
runPredicate :: PredResult e -> PredResult e -> PredResult e
runPredicate r k =
case r of
PredError _ ->
r
PredOk ->
k
# INLINE runPredicate #
predResults :: [PredResult e] -> PredResult [e]
predResults =
foldr (withPredError (:) (:[]) id) PredOk
withPredError :: (a -> b -> c) -> (a -> c) -> (b -> c) -> PredResult a -> PredResult b -> PredResult c
withPredError bin un1 un2 r1 r2 =
case (r1, r2) of
(PredError a, PredError b) ->
PredError (bin a b)
(PredError a, PredOk) ->
PredError (un1 a)
(PredOk, PredError b) ->
PredError (un2 b)
(PredOk, PredOk) ->
PredOk
| null |
https://raw.githubusercontent.com/haskell-mafia/projector/6af7c7f1e8a428b14c2c5a508f7d4a3ac2decd52/projector-html/src/Projector/Html/Backend.hs
|
haskell
|
# LANGUAGE OverloadedStrings #
-----------------------------------------------------------------------------
Per-backend warnings and linting
|
# LANGUAGE NoImplicitPrelude #
module Projector.Html.Backend (
Backend (..)
, checkModule
, runPredicates
) where
import qualified Data.Map.Strict as M
import P
import Projector.Core
import Projector.Html.Data.Backend
import Projector.Html.Data.Module
import Projector.Html.Data.Prim
checkModule :: Backend a e -> Module HtmlType PrimT b -> Either [e] ()
checkModule b m =
case predModule (predicates b) m of
PredOk ->
pure ()
PredError es ->
Left es
predModule :: [Predicate e] -> Module HtmlType PrimT b -> PredResult [e]
predModule preds =
fmap fold . predResults . fmap (runPredicates preds . meExpr) . M.elems . moduleExprs
runPredicates :: [Predicate e] -> HtmlExpr a -> PredResult [e]
runPredicates preds expr =
predResults . with preds $ \pred ->
case pred of
ExprPredicate p ->
foldrExpr (runPredicate . p) (const id) PredOk expr
PatPredicate p ->
foldrExpr (const id) (runPredicate . p) PredOk expr
runPredicate :: PredResult e -> PredResult e -> PredResult e
runPredicate r k =
case r of
PredError _ ->
r
PredOk ->
k
# INLINE runPredicate #
predResults :: [PredResult e] -> PredResult [e]
predResults =
foldr (withPredError (:) (:[]) id) PredOk
withPredError :: (a -> b -> c) -> (a -> c) -> (b -> c) -> PredResult a -> PredResult b -> PredResult c
withPredError bin un1 un2 r1 r2 =
case (r1, r2) of
(PredError a, PredError b) ->
PredError (bin a b)
(PredError a, PredOk) ->
PredError (un1 a)
(PredOk, PredError b) ->
PredError (un2 b)
(PredOk, PredOk) ->
PredOk
|
f1c6caa6a03bc87d9305787ca384719e75d6017abd5f0c9d0be107c69e2edc7c
|
ulisses/Static-Code-Analyzer
|
Ops.hs
|
{-# LANGUAGE DeriveDataTypeable #-}
-----------------------------------------------------------------------------
-- |
-- Module : Language.C.Syntax.Ops
Copyright : ( c ) 2008
-- License : BSD-style
-- Maintainer :
-- Stability : experimental
Portability : ghc
--
Unary , binary and asssignment operators . Exported via AST .
-----------------------------------------------------------------------------
module Language.C.Syntax.Ops (
-- * Assignment operators
CAssignOp(..),
-- * Unary operators
CUnaryOp(..),
-- * Binary operators
CBinaryOp(..),
)
where
import Data.Generics
| C assignment operators ( )
data CAssignOp = CAssignOp
| CMulAssOp
| CDivAssOp
| CRmdAssOp -- ^ remainder and assignment
| CAddAssOp
| CSubAssOp
| CShlAssOp
| CShrAssOp
| CAndAssOp
| CXorAssOp
| COrAssOp
deriving (Eq, Ord,Data,Typeable)
instance Show CAssignOp where
show CAssignOp = "="
show CMulAssOp = "*="
show CDivAssOp = "/="
show CRmdAssOp = "%="
show CAddAssOp = "+="
show CSubAssOp = "-="
show CShlAssOp = "<<="
show CShrAssOp = ">>="
show CAndAssOp = "&="
show CXorAssOp = "^="
show COrAssOp = "|="
| C binary operators ( K&R A7.6 - 15 )
--
data CBinaryOp = CMulOp
| CDivOp
| CRmdOp -- ^ remainder of division
| CAddOp
| CSubOp
| CShlOp -- ^ shift left
| CShrOp -- ^ shift right
| CLeOp -- ^ less
| CGrOp -- ^ greater
| CLeqOp -- ^ less or equal
| CGeqOp -- ^ greater or equal
| CEqOp -- ^ equal
| CNeqOp -- ^ not equal
| CAndOp -- ^ bitwise and
| CXorOp -- ^ exclusive bitwise or
| COrOp -- ^ inclusive bitwise or
| CLndOp -- ^ logical and
| CLorOp -- ^ logical or
deriving (Eq,Ord,Data,Typeable)
instance Show CBinaryOp where
show CMulOp = "*"
show CDivOp = "/"
show CRmdOp = "%"
show CAddOp = "+"
show CSubOp = "-"
show CShlOp = "<<"
show CShrOp = ">>"
show CLeOp = "<"
show CGrOp = ">"
show CLeqOp = "<="
show CGeqOp = ">="
show CEqOp = "=="
show CNeqOp = "!="
show CAndOp = "&"
show CXorOp = "^"
show COrOp = "|"
show CLndOp = "&&"
show CLorOp = "||"
| C unary operator ( K&R A7.3 - 4 )
--
data CUnaryOp = CPreIncOp -- ^ prefix increment operator
| CPreDecOp -- ^ prefix decrement operator
| CPostIncOp -- ^ postfix increment operator
| CPostDecOp -- ^ postfix decrement operator
| CAdrOp -- ^ address operator
| CIndOp -- ^ indirection operator
| CPlusOp -- ^ prefix plus
| CMinOp -- ^ prefix minus
| CCompOp -- ^ one's complement
| CNegOp -- ^ logical negation
deriving (Eq,Ord,Data,Typeable)
instance Show CUnaryOp where
show CPreIncOp = "++"
show CPreDecOp = "--"
show CPostIncOp = "++"
show CPostDecOp = "--"
show CAdrOp = "&"
show CIndOp = "*"
show CPlusOp = "+"
show CMinOp = "-"
show CCompOp = "~"
show CNegOp = "!"
| null |
https://raw.githubusercontent.com/ulisses/Static-Code-Analyzer/4c3f6423d43e1bccb9d1cf04e74ae60d9170186f/Analyzer/language-c-0.3.2.1/src/Language/C/Syntax/Ops.hs
|
haskell
|
# LANGUAGE DeriveDataTypeable #
---------------------------------------------------------------------------
|
Module : Language.C.Syntax.Ops
License : BSD-style
Maintainer :
Stability : experimental
---------------------------------------------------------------------------
* Assignment operators
* Unary operators
* Binary operators
^ remainder and assignment
^ remainder of division
^ shift left
^ shift right
^ less
^ greater
^ less or equal
^ greater or equal
^ equal
^ not equal
^ bitwise and
^ exclusive bitwise or
^ inclusive bitwise or
^ logical and
^ logical or
^ prefix increment operator
^ prefix decrement operator
^ postfix increment operator
^ postfix decrement operator
^ address operator
^ indirection operator
^ prefix plus
^ prefix minus
^ one's complement
^ logical negation
|
Copyright : ( c ) 2008
Portability : ghc
Unary , binary and asssignment operators . Exported via AST .
module Language.C.Syntax.Ops (
CAssignOp(..),
CUnaryOp(..),
CBinaryOp(..),
)
where
import Data.Generics
| C assignment operators ( )
data CAssignOp = CAssignOp
| CMulAssOp
| CDivAssOp
| CAddAssOp
| CSubAssOp
| CShlAssOp
| CShrAssOp
| CAndAssOp
| CXorAssOp
| COrAssOp
deriving (Eq, Ord,Data,Typeable)
instance Show CAssignOp where
show CAssignOp = "="
show CMulAssOp = "*="
show CDivAssOp = "/="
show CRmdAssOp = "%="
show CAddAssOp = "+="
show CSubAssOp = "-="
show CShlAssOp = "<<="
show CShrAssOp = ">>="
show CAndAssOp = "&="
show CXorAssOp = "^="
show COrAssOp = "|="
| C binary operators ( K&R A7.6 - 15 )
data CBinaryOp = CMulOp
| CDivOp
| CAddOp
| CSubOp
deriving (Eq,Ord,Data,Typeable)
instance Show CBinaryOp where
show CMulOp = "*"
show CDivOp = "/"
show CRmdOp = "%"
show CAddOp = "+"
show CSubOp = "-"
show CShlOp = "<<"
show CShrOp = ">>"
show CLeOp = "<"
show CGrOp = ">"
show CLeqOp = "<="
show CGeqOp = ">="
show CEqOp = "=="
show CNeqOp = "!="
show CAndOp = "&"
show CXorOp = "^"
show COrOp = "|"
show CLndOp = "&&"
show CLorOp = "||"
| C unary operator ( K&R A7.3 - 4 )
deriving (Eq,Ord,Data,Typeable)
instance Show CUnaryOp where
show CPreIncOp = "++"
show CPreDecOp = "--"
show CPostIncOp = "++"
show CPostDecOp = "--"
show CAdrOp = "&"
show CIndOp = "*"
show CPlusOp = "+"
show CMinOp = "-"
show CCompOp = "~"
show CNegOp = "!"
|
1276413ae89fd3709cce99c26e60e9fd6e411e8ccb717cdace602133d55d7e77
|
alexander-yakushev/ns-graph
|
ns_graph.clj
|
(ns leiningen.ns-graph
"Leiningen plugin."
(:require [ns-graph.core :as core]
[ns-graph.plugin :as plugin]
[leiningen.core.main :as main])
(:import clojure.lang.ExceptionInfo))
(defn ns-graph [project & _]
(let [opts (merge {:name (str (:name project))
:source-paths (concat (:source-paths project)
(:java-source-paths project))}
(:ns-graph project))]
(try (plugin/validate-depgraph-options opts)
(catch ExceptionInfo e
(main/abort (.getMessage e))))
(main/info "Drawing namespace graph...")
(core/depgraph* opts)))
| null |
https://raw.githubusercontent.com/alexander-yakushev/ns-graph/608515958701edcd50c9a101d198bf38a16a2339/src/leiningen/ns_graph.clj
|
clojure
|
(ns leiningen.ns-graph
"Leiningen plugin."
(:require [ns-graph.core :as core]
[ns-graph.plugin :as plugin]
[leiningen.core.main :as main])
(:import clojure.lang.ExceptionInfo))
(defn ns-graph [project & _]
(let [opts (merge {:name (str (:name project))
:source-paths (concat (:source-paths project)
(:java-source-paths project))}
(:ns-graph project))]
(try (plugin/validate-depgraph-options opts)
(catch ExceptionInfo e
(main/abort (.getMessage e))))
(main/info "Drawing namespace graph...")
(core/depgraph* opts)))
|
|
80a07072aa8dfcc3020f500e666549bfcec60bc5d1fe12c3959fc9f40bf79354
|
hadolint/hadolint
|
DL3007Spec.hs
|
module Hadolint.Rule.DL3007Spec (spec) where
import Data.Default
import Helpers
import Test.Hspec
spec :: SpecWith ()
spec = do
let ?config = def
describe "DL3007 - Using latest is prone to errors if the image will ever up\
\date. Pin the version explicitly to a release tag." $ do
it "explicit latest" $ ruleCatches "DL3007" "FROM debian:latest"
it "explicit latest with name" $
ruleCatches "DL3007" "FROM debian:latest AS builder"
it "explicit tagged" $ ruleCatchesNot "DL3007" "FROM debian:jessie"
it "explicit SHA" $
ruleCatchesNot
"DL3007"
"FROM hub.docker.io/debian@sha256:\
\7959ed6f7e35f8b1aaa06d1d8259d4ee25aa85a086d5c125480c333183f9deeb"
it "explicit tagged and SHA" $
ruleCatchesNot
"DL3007"
"FROM hub.docker.io/debian:latest@sha256:\
\7959ed6f7e35f8b1aaa06d1d8259d4ee25aa85a086d5c125480c333183f9deeb"
it "explicit tagged with name" $
ruleCatchesNot "DL3007" "FROM debian:jessie AS builder"
| null |
https://raw.githubusercontent.com/hadolint/hadolint/43d2bfe9f71dea9ddd203d5bdbd2cc1fb512e4dd/test/Hadolint/Rule/DL3007Spec.hs
|
haskell
|
module Hadolint.Rule.DL3007Spec (spec) where
import Data.Default
import Helpers
import Test.Hspec
spec :: SpecWith ()
spec = do
let ?config = def
describe "DL3007 - Using latest is prone to errors if the image will ever up\
\date. Pin the version explicitly to a release tag." $ do
it "explicit latest" $ ruleCatches "DL3007" "FROM debian:latest"
it "explicit latest with name" $
ruleCatches "DL3007" "FROM debian:latest AS builder"
it "explicit tagged" $ ruleCatchesNot "DL3007" "FROM debian:jessie"
it "explicit SHA" $
ruleCatchesNot
"DL3007"
"FROM hub.docker.io/debian@sha256:\
\7959ed6f7e35f8b1aaa06d1d8259d4ee25aa85a086d5c125480c333183f9deeb"
it "explicit tagged and SHA" $
ruleCatchesNot
"DL3007"
"FROM hub.docker.io/debian:latest@sha256:\
\7959ed6f7e35f8b1aaa06d1d8259d4ee25aa85a086d5c125480c333183f9deeb"
it "explicit tagged with name" $
ruleCatchesNot "DL3007" "FROM debian:jessie AS builder"
|
|
53ddee1b3982a1588b2ae8e860a65ad92e089f3ac6b1a1365da9f1a9946b717b
|
dannypsnl/k
|
bool.rkt
|
#lang k/base
(provide (data-out Bool)
not)
(data Bool : Type
[true : Bool]
[false : Bool])
(def (not [b : Bool]) : Bool
[false => true]
[true => false])
(data True? : (-> Bool Type)
[true? : (True? true)])
| null |
https://raw.githubusercontent.com/dannypsnl/k/2b5f5066806a5bbd0733b781a2ed5fce6956a4f5/k-lib/k/data/bool.rkt
|
racket
|
#lang k/base
(provide (data-out Bool)
not)
(data Bool : Type
[true : Bool]
[false : Bool])
(def (not [b : Bool]) : Bool
[false => true]
[true => false])
(data True? : (-> Bool Type)
[true? : (True? true)])
|
|
e4a87f359341dc3e980e762e89fbd6618e7466b20db20ef25aff47489eb18f57
|
fragnix/fragnix
|
Data.Text.Internal.Encoding.Utf32.hs
|
# LANGUAGE Haskell98 #
# LINE 1 " Data / Text / Internal / Encoding / Utf32.hs " #
-- |
-- Module : Data.Text.Internal.Encoding.Utf32
Copyright : ( c ) 2008 , 2009 ,
( c ) 2009 , 2010 ,
( c ) 2009
--
-- License : BSD-style
-- Maintainer :
-- Stability : experimental
-- Portability : portable
--
-- /Warning/: this is an internal module, and does not have a stable
-- API or name. Functions in this module may not check or enforce
-- preconditions expected by public modules. Use at your own risk!
--
-- Basic UTF-32 validation.
module Data.Text.Internal.Encoding.Utf32
(
validate
) where
import Data.Word (Word32)
validate :: Word32 -> Bool
validate x1 = x1 < 0xD800 || (x1 > 0xDFFF && x1 <= 0x10FFFF)
# INLINE validate #
| null |
https://raw.githubusercontent.com/fragnix/fragnix/b9969e9c6366e2917a782f3ac4e77cce0835448b/tests/packages/scotty/Data.Text.Internal.Encoding.Utf32.hs
|
haskell
|
|
Module : Data.Text.Internal.Encoding.Utf32
License : BSD-style
Maintainer :
Stability : experimental
Portability : portable
/Warning/: this is an internal module, and does not have a stable
API or name. Functions in this module may not check or enforce
preconditions expected by public modules. Use at your own risk!
Basic UTF-32 validation.
|
# LANGUAGE Haskell98 #
# LINE 1 " Data / Text / Internal / Encoding / Utf32.hs " #
Copyright : ( c ) 2008 , 2009 ,
( c ) 2009 , 2010 ,
( c ) 2009
module Data.Text.Internal.Encoding.Utf32
(
validate
) where
import Data.Word (Word32)
validate :: Word32 -> Bool
validate x1 = x1 < 0xD800 || (x1 > 0xDFFF && x1 <= 0x10FFFF)
# INLINE validate #
|
a99f6fe3a2e38b8215499920f858b3ac2719a09a6a5983286e4ceb59735f74ac
|
cobbpg/lafonten
|
SimpleDistanceField.hs
|
# LANGUAGE ParallelListComp , OverloadedStrings , TypeOperators , DataKinds #
module LambdaCube.Font.SimpleDistanceField
( fontRenderer
, sampleDistance
) where
import Data.ByteString (ByteString)
import Data.Vect
import qualified Data.Trie as T
import qualified Data.Vector.Storable as SV
import Graphics.Text.TrueType
import LambdaCube.GL
import LambdaCube.GL.Mesh
import LambdaCube.Font.Atlas
import LambdaCube.Font.Common
type OutlineVertices = [([Vec2], [Vec2], [Vec2])]
type FillTriangles = [Vec2]
type OutlineTriangles = [(Vec2, Float)]
-- | A font renderer that uses a basic distance field to create curves that remain sharp even when magnified. The type of the
exported is @Float Red@.
fontRenderer :: FontRenderer
fontRenderer = FontRenderer pipeline clearSurface bakeGlyph
-- | A fragment shader snippet to sample the distance field. Takes the texture slot name and the uv coordinates as parameters.
sampleDistance :: ByteString -> Exp F V2F -> Exp F Float
sampleDistance slotName uv = texture' (Sampler LinearFilter Repeat tex) uv
where
tex = TextureSlot slotName (Texture2D (Float Red) n1)
pipeline :: FontAtlasOptions -> GPOutput SingleOutput
pipeline options = makeSamplerOut [PrjFrameBuffer "" tix0 bakePipeline]
where
makeSamplerOut = SamplerOut "atlas" . Sampler LinearFilter Repeat . Texture (Texture2D (Float Red) n1) textureSize NoMip
size = atlasSize options
textureSize = V2 (fromIntegral size) (fromIntegral size)
bakeGlyph :: FontAtlas -> OutlineCurves -> Vec2 -> Vec2 -> IO ()
bakeGlyph atlas @ FontAtlas { atlasFont = font, atlasOptions = options } curves bakeOffset atlasOffset = do
outlineOuterMesh <- compileMesh (makeOutlineMesh outlineOuterTriangles)
outlineInnerMesh <- compileMesh (makeOutlineMesh outlineInnerTriangles)
fillMesh <- compileMesh (makeFillMesh fillTriangles)
outlineOuterObject <- addMesh renderer "outlineOuterStream" outlineOuterMesh []
outlineInnerObject <- addMesh renderer "outlineInnerStream" outlineInnerMesh []
fillObject <- addMesh renderer "fillStream" fillMesh []
uniformM33F "charToAtlasMatrix" uniformMap charToAtlasMatrix
render renderer
removeObject renderer outlineOuterObject
removeObject renderer outlineInnerObject
removeObject renderer fillObject
where
outlineVertices = letterOutlineVertices outlineThickness curves
fillTriangles = letterFillTriangles outlineVertices
outlineOuterTriangles = letterOutlineOuterTriangles outlineVertices
outlineInnerTriangles = letterOutlineInnerTriangles outlineVertices
outlineThickness = padding * emSize / letterScale
renderer = atlasRenderer atlas
uniformMap = uniformSetter renderer
padding = fromIntegral (atlasLetterPadding options)
letterScale = fromIntegral (atlasLetterScale options)
emSize = fromIntegral (unitsPerEm font)
charToAtlasMatrix = V3 (V3 scale 0 0) (V3 0 scale 0) (V3 xt yt 1)
texelScale = 2 / fromIntegral (atlasSize options)
scale = texelScale * letterScale / emSize
padOffset = padding * texelScale
Vec2 xt yt = Vec2 padOffset padOffset &- bakeOffset &* scale &+ atlasOffset
clearSurface :: Renderer -> IO ()
clearSurface renderer = do
uniformM33F "charToAtlasMatrix" uniforms (V3 (V3 1 0 0) (V3 0 1 0) (V3 0 0 1))
clearMeshInner <- compileMesh blackQuad
clearMeshOuter <- compileMesh blackQuad
clearObjectInner <- addMesh renderer "outlineInnerStream" clearMeshInner []
clearObjectOuter <- addMesh renderer "outlineOuterStream" clearMeshOuter []
render renderer
removeObject renderer clearObjectInner
removeObject renderer clearObjectOuter
where
uniforms = uniformSetter renderer
blackQuad = Mesh
{ mAttributes = T.fromList
[ ("position", A_V2F (SV.fromList [V2 (-1) (-1), V2 1 (-1), V2 1 1, V2 1 1, V2 (-1) 1, V2 (-1) (-1)]))
, ("distance", A_Float (SV.fromList [0, 0, 0, 0, 0, 0]))
]
, mPrimitive = P_Triangles
, mGPUData = Nothing
}
bakePipeline :: Exp Obj (FrameBuffer 1 Float)
bakePipeline = (outlineInner . outlineOuter . fill) emptyBuffer
where
outlineOuter = Accumulate (outlineFragmentCtx Max) PassAll outlineFragmentShader (outlineFragmentStream "outlineOuterStream")
outlineInner = Accumulate (outlineFragmentCtx Min) PassAll outlineFragmentShader (outlineFragmentStream "outlineInnerStream")
fill = Accumulate fillFragmentCtx PassAll fillFragmentShader fillFragmentStream
emptyBuffer = FrameBuffer (UnclearedImage n1 :. ZT)
rasterCtx = TriangleCtx CullNone PolygonFill NoOffset LastVertex
outlineVertices slotName = Fetch slotName Triangles (IV2F "position", IFloat "distance")
outlineFragmentStream = Rasterize rasterCtx . Transform outlineVertexShader . outlineVertices
outlineFragmentCtx op = AccumulationContext Nothing (ColorOp (outlineBlending op) True :. ZT)
outlineBlending op = Blend (op, op) ((One, One), (One, One)) zero'
outlineVertexShader attr = VertexOut (transformVertex pos) (floatV 1) ZT (Smooth dist :. ZT)
where
(pos, dist) = untup2 attr
outlineFragmentShader distance = FragmentOut (distance :. ZT)
fillVertices = Fetch "fillStream" Triangles (IV2F "position")
fillFragmentCtx = AccumulationContext Nothing (ColorOp fillBlending True :. ZT)
fillBlending = Blend (FuncSubtract, FuncSubtract) ((One, One), (One, One)) zero'
fillFragmentStream = Rasterize rasterCtx fillPrimitiveStream
fillPrimitiveStream = Transform fillVertexShader fillVertices
fillVertexShader pos = VertexOut (transformVertex pos) (floatV 1) ZT ZT
fillFragmentShader _ = FragmentOut (floatF 1 :. ZT)
transformVertex vertex = v3v4 (charToAtlas @*. v2v3 vertex)
where
charToAtlas = Uni (IM33F "charToAtlasMatrix") :: Exp V M33F
letterOutlineVertices :: Float -> OutlineCurves -> OutlineVertices
letterOutlineVertices outlineThickness = map (offsetEdges . removeCollapsedEdges . duplicateLast)
where
miterLimit = 4
duplicateLast xs = last xs : xs
removeCollapsedEdges (v1:v2:vs) =
if lensqr (v1 &- v2) > outlineThickness * 0.001
then v1 : removeCollapsedEdges (v2:vs)
else removeCollapsedEdges (v1:vs)
removeCollapsedEdges vs = vs
offsetEdges vertices = (innerVertices, vertices, outerVertices)
where
outerVertices = [v &- o &* outlineThickness | v <- vertices | o <- cycle offsets]
innerVertices = [v &+ o &* outlineThickness | v <- vertices | o <- cycle offsets]
edges = [v2 &- v1 | v1 <- vertices | v2 <- tail vertices]
edgeNormals = map (normalize . turn) edges
vertexNormals = [normalize (n1 &+ n2) | n1 <- last edgeNormals : edgeNormals | n2 <- edgeNormals]
offsets = [nv &* min miterLimit (recip (nv &. ne)) | ne <- edgeNormals | nv <- vertexNormals]
letterFillTriangles :: OutlineVertices -> FillTriangles
letterFillTriangles = concatMap makePoly
where
makePoly (_, v:vs, _) = concat (zipWith makeTri (init vs) (tail vs))
where
makeTri v1 v2 = [v, v1, v2]
letterOutlineOuterTriangles :: OutlineVertices -> OutlineTriangles
letterOutlineOuterTriangles = concatMap makeQuads
where
makeQuads (i1:i2:is, m1:m2:ms, o1:o2:os) = (m1, m) : (o1, o) : (o2, o) : (o2, o) : (m2, m) : (m1, m) : makeQuads (i2:is, m2:ms, o2:os)
makeQuads _ = []
m = 0.5
o = 0
letterOutlineInnerTriangles :: OutlineVertices -> OutlineTriangles
letterOutlineInnerTriangles = concatMap makeQuads
where
makeQuads (i1:i2:is, m1:m2:ms, o1:o2:os) = (i1, i) : (m1, m) : (m2, m) : (m2, m) : (i2, i) : (i1, i) : makeQuads (i2:is, m2:ms, o2:os)
makeQuads _ = []
i = 1
m = 0.5
makeFillMesh :: FillTriangles -> Mesh
makeFillMesh fillTriangles = Mesh
{ mAttributes = T.fromList
[ ("position", A_V2F $ SV.fromList [V2 x y | Vec2 x y <- fillTriangles])
]
, mPrimitive = P_Triangles
, mGPUData = Nothing
}
makeOutlineMesh :: OutlineTriangles -> Mesh
makeOutlineMesh outlineTriangles = Mesh
{ mAttributes = T.fromList
[ ("position", A_V2F $ SV.fromList [V2 x y | (Vec2 x y, _) <- outlineTriangles])
, ("distance", A_Float $ SV.fromList [d | (_, d) <- outlineTriangles])
]
, mPrimitive = P_Triangles
, mGPUData = Nothing
}
| null |
https://raw.githubusercontent.com/cobbpg/lafonten/2d95172085d44ce3eedef5e50e7d4df693f4562a/src/lib/LambdaCube/Font/SimpleDistanceField.hs
|
haskell
|
| A font renderer that uses a basic distance field to create curves that remain sharp even when magnified. The type of the
| A fragment shader snippet to sample the distance field. Takes the texture slot name and the uv coordinates as parameters.
|
# LANGUAGE ParallelListComp , OverloadedStrings , TypeOperators , DataKinds #
module LambdaCube.Font.SimpleDistanceField
( fontRenderer
, sampleDistance
) where
import Data.ByteString (ByteString)
import Data.Vect
import qualified Data.Trie as T
import qualified Data.Vector.Storable as SV
import Graphics.Text.TrueType
import LambdaCube.GL
import LambdaCube.GL.Mesh
import LambdaCube.Font.Atlas
import LambdaCube.Font.Common
type OutlineVertices = [([Vec2], [Vec2], [Vec2])]
type FillTriangles = [Vec2]
type OutlineTriangles = [(Vec2, Float)]
exported is @Float Red@.
fontRenderer :: FontRenderer
fontRenderer = FontRenderer pipeline clearSurface bakeGlyph
sampleDistance :: ByteString -> Exp F V2F -> Exp F Float
sampleDistance slotName uv = texture' (Sampler LinearFilter Repeat tex) uv
where
tex = TextureSlot slotName (Texture2D (Float Red) n1)
pipeline :: FontAtlasOptions -> GPOutput SingleOutput
pipeline options = makeSamplerOut [PrjFrameBuffer "" tix0 bakePipeline]
where
makeSamplerOut = SamplerOut "atlas" . Sampler LinearFilter Repeat . Texture (Texture2D (Float Red) n1) textureSize NoMip
size = atlasSize options
textureSize = V2 (fromIntegral size) (fromIntegral size)
bakeGlyph :: FontAtlas -> OutlineCurves -> Vec2 -> Vec2 -> IO ()
bakeGlyph atlas @ FontAtlas { atlasFont = font, atlasOptions = options } curves bakeOffset atlasOffset = do
outlineOuterMesh <- compileMesh (makeOutlineMesh outlineOuterTriangles)
outlineInnerMesh <- compileMesh (makeOutlineMesh outlineInnerTriangles)
fillMesh <- compileMesh (makeFillMesh fillTriangles)
outlineOuterObject <- addMesh renderer "outlineOuterStream" outlineOuterMesh []
outlineInnerObject <- addMesh renderer "outlineInnerStream" outlineInnerMesh []
fillObject <- addMesh renderer "fillStream" fillMesh []
uniformM33F "charToAtlasMatrix" uniformMap charToAtlasMatrix
render renderer
removeObject renderer outlineOuterObject
removeObject renderer outlineInnerObject
removeObject renderer fillObject
where
outlineVertices = letterOutlineVertices outlineThickness curves
fillTriangles = letterFillTriangles outlineVertices
outlineOuterTriangles = letterOutlineOuterTriangles outlineVertices
outlineInnerTriangles = letterOutlineInnerTriangles outlineVertices
outlineThickness = padding * emSize / letterScale
renderer = atlasRenderer atlas
uniformMap = uniformSetter renderer
padding = fromIntegral (atlasLetterPadding options)
letterScale = fromIntegral (atlasLetterScale options)
emSize = fromIntegral (unitsPerEm font)
charToAtlasMatrix = V3 (V3 scale 0 0) (V3 0 scale 0) (V3 xt yt 1)
texelScale = 2 / fromIntegral (atlasSize options)
scale = texelScale * letterScale / emSize
padOffset = padding * texelScale
Vec2 xt yt = Vec2 padOffset padOffset &- bakeOffset &* scale &+ atlasOffset
clearSurface :: Renderer -> IO ()
clearSurface renderer = do
uniformM33F "charToAtlasMatrix" uniforms (V3 (V3 1 0 0) (V3 0 1 0) (V3 0 0 1))
clearMeshInner <- compileMesh blackQuad
clearMeshOuter <- compileMesh blackQuad
clearObjectInner <- addMesh renderer "outlineInnerStream" clearMeshInner []
clearObjectOuter <- addMesh renderer "outlineOuterStream" clearMeshOuter []
render renderer
removeObject renderer clearObjectInner
removeObject renderer clearObjectOuter
where
uniforms = uniformSetter renderer
blackQuad = Mesh
{ mAttributes = T.fromList
[ ("position", A_V2F (SV.fromList [V2 (-1) (-1), V2 1 (-1), V2 1 1, V2 1 1, V2 (-1) 1, V2 (-1) (-1)]))
, ("distance", A_Float (SV.fromList [0, 0, 0, 0, 0, 0]))
]
, mPrimitive = P_Triangles
, mGPUData = Nothing
}
bakePipeline :: Exp Obj (FrameBuffer 1 Float)
bakePipeline = (outlineInner . outlineOuter . fill) emptyBuffer
where
outlineOuter = Accumulate (outlineFragmentCtx Max) PassAll outlineFragmentShader (outlineFragmentStream "outlineOuterStream")
outlineInner = Accumulate (outlineFragmentCtx Min) PassAll outlineFragmentShader (outlineFragmentStream "outlineInnerStream")
fill = Accumulate fillFragmentCtx PassAll fillFragmentShader fillFragmentStream
emptyBuffer = FrameBuffer (UnclearedImage n1 :. ZT)
rasterCtx = TriangleCtx CullNone PolygonFill NoOffset LastVertex
outlineVertices slotName = Fetch slotName Triangles (IV2F "position", IFloat "distance")
outlineFragmentStream = Rasterize rasterCtx . Transform outlineVertexShader . outlineVertices
outlineFragmentCtx op = AccumulationContext Nothing (ColorOp (outlineBlending op) True :. ZT)
outlineBlending op = Blend (op, op) ((One, One), (One, One)) zero'
outlineVertexShader attr = VertexOut (transformVertex pos) (floatV 1) ZT (Smooth dist :. ZT)
where
(pos, dist) = untup2 attr
outlineFragmentShader distance = FragmentOut (distance :. ZT)
fillVertices = Fetch "fillStream" Triangles (IV2F "position")
fillFragmentCtx = AccumulationContext Nothing (ColorOp fillBlending True :. ZT)
fillBlending = Blend (FuncSubtract, FuncSubtract) ((One, One), (One, One)) zero'
fillFragmentStream = Rasterize rasterCtx fillPrimitiveStream
fillPrimitiveStream = Transform fillVertexShader fillVertices
fillVertexShader pos = VertexOut (transformVertex pos) (floatV 1) ZT ZT
fillFragmentShader _ = FragmentOut (floatF 1 :. ZT)
transformVertex vertex = v3v4 (charToAtlas @*. v2v3 vertex)
where
charToAtlas = Uni (IM33F "charToAtlasMatrix") :: Exp V M33F
letterOutlineVertices :: Float -> OutlineCurves -> OutlineVertices
letterOutlineVertices outlineThickness = map (offsetEdges . removeCollapsedEdges . duplicateLast)
where
miterLimit = 4
duplicateLast xs = last xs : xs
removeCollapsedEdges (v1:v2:vs) =
if lensqr (v1 &- v2) > outlineThickness * 0.001
then v1 : removeCollapsedEdges (v2:vs)
else removeCollapsedEdges (v1:vs)
removeCollapsedEdges vs = vs
offsetEdges vertices = (innerVertices, vertices, outerVertices)
where
outerVertices = [v &- o &* outlineThickness | v <- vertices | o <- cycle offsets]
innerVertices = [v &+ o &* outlineThickness | v <- vertices | o <- cycle offsets]
edges = [v2 &- v1 | v1 <- vertices | v2 <- tail vertices]
edgeNormals = map (normalize . turn) edges
vertexNormals = [normalize (n1 &+ n2) | n1 <- last edgeNormals : edgeNormals | n2 <- edgeNormals]
offsets = [nv &* min miterLimit (recip (nv &. ne)) | ne <- edgeNormals | nv <- vertexNormals]
letterFillTriangles :: OutlineVertices -> FillTriangles
letterFillTriangles = concatMap makePoly
where
makePoly (_, v:vs, _) = concat (zipWith makeTri (init vs) (tail vs))
where
makeTri v1 v2 = [v, v1, v2]
letterOutlineOuterTriangles :: OutlineVertices -> OutlineTriangles
letterOutlineOuterTriangles = concatMap makeQuads
where
makeQuads (i1:i2:is, m1:m2:ms, o1:o2:os) = (m1, m) : (o1, o) : (o2, o) : (o2, o) : (m2, m) : (m1, m) : makeQuads (i2:is, m2:ms, o2:os)
makeQuads _ = []
m = 0.5
o = 0
letterOutlineInnerTriangles :: OutlineVertices -> OutlineTriangles
letterOutlineInnerTriangles = concatMap makeQuads
where
makeQuads (i1:i2:is, m1:m2:ms, o1:o2:os) = (i1, i) : (m1, m) : (m2, m) : (m2, m) : (i2, i) : (i1, i) : makeQuads (i2:is, m2:ms, o2:os)
makeQuads _ = []
i = 1
m = 0.5
makeFillMesh :: FillTriangles -> Mesh
makeFillMesh fillTriangles = Mesh
{ mAttributes = T.fromList
[ ("position", A_V2F $ SV.fromList [V2 x y | Vec2 x y <- fillTriangles])
]
, mPrimitive = P_Triangles
, mGPUData = Nothing
}
makeOutlineMesh :: OutlineTriangles -> Mesh
makeOutlineMesh outlineTriangles = Mesh
{ mAttributes = T.fromList
[ ("position", A_V2F $ SV.fromList [V2 x y | (Vec2 x y, _) <- outlineTriangles])
, ("distance", A_Float $ SV.fromList [d | (_, d) <- outlineTriangles])
]
, mPrimitive = P_Triangles
, mGPUData = Nothing
}
|
c652bc2b56f86977a46f3a5261b186f063603ec6b149ca8bd3a43315c1f3bbcb
|
dgiot/dgiot
|
td_sup.erl
|
%%--------------------------------------------------------------------
Copyright ( c ) 2020 - 2021 DGIOT Technologies Co. , Ltd. All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(td_sup).
-behaviour(supervisor).
-export([start_link/1, init/1]).
start_link(Name) ->
supervisor:start_link({local, Name}, ?MODULE, []).
init([]) ->
Child = [{td_worker, {td_worker, start_link, []}, transient, 5000, worker, [td_worker]}],
{ok, {{simple_one_for_one, 5, 10}, Child}}.
| null |
https://raw.githubusercontent.com/dgiot/dgiot/c9f2f78af71692ba532e4806621b611db2afe0c9/apps/dgiot_tdengine/src/pools/td_sup.erl
|
erlang
|
--------------------------------------------------------------------
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
--------------------------------------------------------------------
|
Copyright ( c ) 2020 - 2021 DGIOT Technologies Co. , Ltd. All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(td_sup).
-behaviour(supervisor).
-export([start_link/1, init/1]).
start_link(Name) ->
supervisor:start_link({local, Name}, ?MODULE, []).
init([]) ->
Child = [{td_worker, {td_worker, start_link, []}, transient, 5000, worker, [td_worker]}],
{ok, {{simple_one_for_one, 5, 10}, Child}}.
|
bb7a063b46bff6b878a1a07715c6d51709b5723651f76fd9094dd52fc196ae3a
|
flavioc/cl-hurd
|
dir-unlink.lisp
|
(in-package :hurd-translator)
(def-fs-interface :dir-unlink ((port port)
(name :string))
(with-lookup protid port
(let* ((node (get-node protid))
(user (get-user protid))
(found-node (directory-lookup *translator*
node
user
name)))
(cond
(found-node
(let ((err (remove-directory-entry *translator*
node
user
name)))
(cond
((eq err t) t)
((eq err nil) :not-permitted)
(t err))))
(t :no-such-file)))))
| null |
https://raw.githubusercontent.com/flavioc/cl-hurd/982232f47d1a0ff4df5fde2edad03b9df871470a/translator/interfaces/dir-unlink.lisp
|
lisp
|
(in-package :hurd-translator)
(def-fs-interface :dir-unlink ((port port)
(name :string))
(with-lookup protid port
(let* ((node (get-node protid))
(user (get-user protid))
(found-node (directory-lookup *translator*
node
user
name)))
(cond
(found-node
(let ((err (remove-directory-entry *translator*
node
user
name)))
(cond
((eq err t) t)
((eq err nil) :not-permitted)
(t err))))
(t :no-such-file)))))
|
|
01d8989357dc66e57e80fc9bd503be59b41db721a5f37e57fbde0d5a82de35ec
|
ocurrent/ocaml-docs-ci
|
storage.ml
|
module Base = struct
type repository = HtmlRaw of Epoch.t | Linked of Epoch.t | Compile | Prep
let generation_folder stage generation = Fpath.(v ("epoch-" ^ Epoch.digest stage generation))
let folder = function
| HtmlRaw generation -> Fpath.(generation_folder `Html generation / "html-raw")
| Linked generation -> Fpath.(generation_folder `Linked generation / "linked")
| Compile -> Fpath.v "compile"
| Prep -> Fpath.v "prep"
end
type repository =
| HtmlRaw of (Epoch.t * Package.Blessing.t)
| Linked of (Epoch.t * Package.Blessing.t)
| Compile of Package.Blessing.t
| Prep
let to_base_repo = function
| HtmlRaw (t, _) -> Base.HtmlRaw t
| Linked (t, _) -> Linked t
| Compile _ -> Compile
| Prep -> Prep
let base_folder ~blessed ~prep package =
let universes = if prep then "universes" else "u" in
let universe = Package.universe package |> Package.Universe.hash in
let opam = Package.opam package in
let name = OpamPackage.name_to_string opam in
let version = OpamPackage.version_to_string opam in
if blessed then Fpath.(v "p" / name / version)
else Fpath.(v universes / universe / name / version)
let folder repository package =
let blessed =
match repository with HtmlRaw (_, b) | Linked (_, b) | Compile b -> b | Prep -> Universe
in
let blessed = blessed = Blessed in
Fpath.(
Base.folder (to_base_repo repository) // base_folder ~blessed ~prep:(repository = Prep) package)
let for_all packages command =
let data =
let pp_package f (repository, package) =
let dir = folder repository package |> Fpath.to_string in
let id = Package.id package in
Fmt.pf f "%s,%s,%s" dir id (Package.opam package |> OpamPackage.to_string)
in
Fmt.(to_to_string (list ~sep:(const string " ") pp_package) packages)
in
Fmt.str "for DATA in %s; do IFS=\",\"; set -- $DATA; %s done" data command
type id_hash = { id : string; hash : string } [@@deriving yojson]
module Tar = struct
let hash_command ?(extra_files = []) ~prefix () =
match extra_files with
| [] ->
Fmt.str
"HASH=$((sha256sum $1/content.tar | cut -d \" \" -f 1) || echo -n 'empty'); printf \
\"%s:$2:$HASH\\n\";"
prefix
| extra_files ->
Fmt.str
"HASH=$((sha256sum $1/content.tar %s | sort | sha256sum | cut -d \" \" -f 1) || echo -n \
'empty'); printf \"%s:$2:$HASH\\n\";"
(List.map (fun f -> "\"$1/" ^ f ^ "\"") extra_files |> String.concat " ")
prefix
end
let hash_command ~prefix =
Fmt.str
"HASH=$(find $1 -type f -exec sha256sum {} \\; | sort | sha256sum); printf \"%s:$2:$HASH\\n\";"
prefix
let parse_hash ~prefix line =
match String.split_on_char ':' line with
| [ prev; id; hash ] when Astring.String.is_suffix ~affix:prefix prev -> Some { id; hash }
| _ -> None
| null |
https://raw.githubusercontent.com/ocurrent/ocaml-docs-ci/cb5d4a54a7fd9883aec066b4bd1fcb50ca42e7bc/src/lib/storage.ml
|
ocaml
|
module Base = struct
type repository = HtmlRaw of Epoch.t | Linked of Epoch.t | Compile | Prep
let generation_folder stage generation = Fpath.(v ("epoch-" ^ Epoch.digest stage generation))
let folder = function
| HtmlRaw generation -> Fpath.(generation_folder `Html generation / "html-raw")
| Linked generation -> Fpath.(generation_folder `Linked generation / "linked")
| Compile -> Fpath.v "compile"
| Prep -> Fpath.v "prep"
end
type repository =
| HtmlRaw of (Epoch.t * Package.Blessing.t)
| Linked of (Epoch.t * Package.Blessing.t)
| Compile of Package.Blessing.t
| Prep
let to_base_repo = function
| HtmlRaw (t, _) -> Base.HtmlRaw t
| Linked (t, _) -> Linked t
| Compile _ -> Compile
| Prep -> Prep
let base_folder ~blessed ~prep package =
let universes = if prep then "universes" else "u" in
let universe = Package.universe package |> Package.Universe.hash in
let opam = Package.opam package in
let name = OpamPackage.name_to_string opam in
let version = OpamPackage.version_to_string opam in
if blessed then Fpath.(v "p" / name / version)
else Fpath.(v universes / universe / name / version)
let folder repository package =
let blessed =
match repository with HtmlRaw (_, b) | Linked (_, b) | Compile b -> b | Prep -> Universe
in
let blessed = blessed = Blessed in
Fpath.(
Base.folder (to_base_repo repository) // base_folder ~blessed ~prep:(repository = Prep) package)
let for_all packages command =
let data =
let pp_package f (repository, package) =
let dir = folder repository package |> Fpath.to_string in
let id = Package.id package in
Fmt.pf f "%s,%s,%s" dir id (Package.opam package |> OpamPackage.to_string)
in
Fmt.(to_to_string (list ~sep:(const string " ") pp_package) packages)
in
Fmt.str "for DATA in %s; do IFS=\",\"; set -- $DATA; %s done" data command
type id_hash = { id : string; hash : string } [@@deriving yojson]
module Tar = struct
let hash_command ?(extra_files = []) ~prefix () =
match extra_files with
| [] ->
Fmt.str
"HASH=$((sha256sum $1/content.tar | cut -d \" \" -f 1) || echo -n 'empty'); printf \
\"%s:$2:$HASH\\n\";"
prefix
| extra_files ->
Fmt.str
"HASH=$((sha256sum $1/content.tar %s | sort | sha256sum | cut -d \" \" -f 1) || echo -n \
'empty'); printf \"%s:$2:$HASH\\n\";"
(List.map (fun f -> "\"$1/" ^ f ^ "\"") extra_files |> String.concat " ")
prefix
end
let hash_command ~prefix =
Fmt.str
"HASH=$(find $1 -type f -exec sha256sum {} \\; | sort | sha256sum); printf \"%s:$2:$HASH\\n\";"
prefix
let parse_hash ~prefix line =
match String.split_on_char ':' line with
| [ prev; id; hash ] when Astring.String.is_suffix ~affix:prefix prev -> Some { id; hash }
| _ -> None
|
|
daa1798aa5f512fbd3178def72e4292f41a7e89768e59cfef3fbc284277c356c
|
gdamore/tree-sitter-d
|
shebang.scm
|
==================
Shebang
==================
#!/bin/sh
---
(source_file (shebang))
===================
Shebang + Directive
===================
#!/bin/sh
# something
# line 3
---
(source_file (shebang) (directive) (directive))
| null |
https://raw.githubusercontent.com/gdamore/tree-sitter-d/d7b3cc1f9f1f59518457696a8547e0e1bb1a64b4/test/corpus/shebang.scm
|
scheme
|
==================
Shebang
==================
#!/bin/sh
---
(source_file (shebang))
===================
Shebang + Directive
===================
#!/bin/sh
# something
# line 3
---
(source_file (shebang) (directive) (directive))
|
|
d552a3a5c173f29b017de7da0788e794cafcd05ff1aabeae20c80ddcfa47ef16
|
felipereigosa/mock-mechanics
|
core.clj
|
(ns mockmechanics.core
(:require [mockmechanics.library.vector :as vector]))
(load "world")
(load "analytic-geometry")
(load "camera")
(load "transforms")
(load "window")
(load "picture")
(load "debug")
(load "synthesizer")
(load "miscellaneous")
(load "output")
(load "parts")
(load "collision")
(load "weld-optimization")
(load "mechanical-tree")
(load "gears")
(load "undo")
(load "persistence")
(load "cables")
(load "forces")
(load "modes")
(load "replayer")
(load "commands")
(load "track-loop")
(load "hints")
(load "bone-animation")
(load "input-indicator")
(do
1
(defn create-world []
(delete-temp-files!)
(-> (create-base-world)
(merge (read-string (str "{" (slurp "settings.clj") "}")))
(assoc-in [:num-lines] 1)
(assoc-in [:background-meshes :grid] (create-grid-mesh 24 0.5))
(assoc-in [:info] (create-info))
(assoc-in [:parts] {})
(assoc-in [:parts :ground-part] (create-ground-part))
(assoc-in [:other-ground]
(create-cube-mesh [0 -0.1 0] [1 0 0 0] [12 0.2 12]
(make-color 40 40 40)))
(assoc-in [:graph-box] {:x 343 :y 530
:w 685 :h 150
:buffer (new-image 685 150)
})
(assoc-in [:motherboard-box] {:x 343 :y 530
:w 685 :h 150
:buffer (new-image 685 150)})
(assoc-in [:property-box]
(create-picture "property-menu" 240 340 -1 100))
(assoc-in [:layer-box]
(create-picture "layer-menu" 240 340 -1 200))
(create-layer-info)
(assoc-in [:command] "")
(assoc-in [:bindings] (get-bindings))
(assoc-in [:current-color] :red)
(assoc-in [:action-menu]
(create-picture "action-menu" 240 340 40 -1))
(assoc-in [:mode-menu]
(create-picture "mode-menu" 240 340 40 -1))
(assoc-in [:color-palette]
(create-picture "colors" 340 585 -1 40))
(assoc-in [:add-menu]
(create-picture "add-menu" 726 675 -1 50))
(assoc-in [:add-type] :block)
(assoc-in [:edit-menu]
(create-picture "edit-menu" 210 575 -1 50))
(assoc-in [:edit-subcommand] :move)
(assoc-in [:use-weld-groups] true)
(assoc-in [:graph-snap-value] 0.05)
(assoc-in [:graph-menu]
(create-picture "graph-menu" 210 575 -1 30))
(assoc-in [:graph-subcommand] :move)
(assoc-in [:motherboard-menu]
(create-picture "motherboard-menu" 210 575 -1 30))
(assoc-in [:motherboard-subcommand] :move)
(assoc-in [:selected-property] 0)
(create-physics-world)
(reset-undo! [:parts :gears])
(assoc-in [:mode] :simulation)
(assoc-in [:track-head-model]
(create-cube-mesh [0 -10000 0] [1 0 0 0] 0.2 :white))
(assoc-in [:cage] (create-wireframe-cube [0 0 0] [1 0 0 0]
[1 1 1] :white))
(reset-avatar)
(load-avatar-meshes)
(place-elements)
(create-weld-groups)
(create-update-cube)
))
(reset-world!)
)
(defn update-world [world elapsed]
(let [world (run-animation world elapsed)
world (if-let [dx (:camera-rotating world)]
(rotate-camera world dx 0)
world)
world (input-indicator-update world elapsed)]
(if (in? (:mode world) [:simulation :graph :motherboard
:property :avatar])
(let [world (-> world
(set-probe-values)
(apply-forces elapsed)
(run-chips elapsed)
(compute-transforms (if (:use-weld-groups world)
:weld-groups
:parts))
(update-motherboards)
(enforce-gears))]
(recompute-body-transforms! world)
(step-simulation! (:planet world) elapsed)
(if (= (:mode world) :avatar)
(avatar-mode-update world elapsed)
world))
world)))
(defn draw-3d! [world]
(doseq [mesh (vals (:background-meshes world))]
(draw-mesh! world mesh))
(if (> (get-in world [:camera :x-angle]) 0)
(draw-mesh! world (:other-ground world)))
(draw-spheres! world)
(if (:use-weld-groups world)
(do
(doseq [group (vals (:weld-groups world))]
(draw-mesh! world group))
(draw-textured-parts! world))
(doseq [[name part] (:parts world)]
(if (or (= name :ground-part)
(not (in? (:layer part) (:visible-layers world)))
(= (:type part) :cable))
nil
(draw-part! world part))))
(draw-track-head! world)
(draw-selection! world)
(draw-buttons! world)
(draw-lamps! world)
(draw-displays! world)
(draw-cables! world)
(if-let [fun (get-function (:mode world) :draw-3d)]
(fun world))
(if (:draw-update-cube world)
(draw-update-cube! world))
)
(defn show-buttons? [world]
(or
(= (:show-buttons world) :always)
(and
(= (:show-buttons world) :no-sim)
(not (= (:mode world) :simulation)))))
(do
1
(defn draw-2d! [world]
(clear!)
(when (show-buttons? world)
(let [{:keys [image x y w h]} (:action-menu world)]
(fill-rect! (make-color 70 70 70) x y (+ 20 w) (+ 20 h))
(draw-image! image x y))
(let [{:keys [image x y w h]} (:mode-menu world)]
(fill-rect! (make-color 70 70 70) x y (+ 20 w) (+ 20 h))
(draw-image! image x y))
(if-let [fun (get-function (:mode world) :draw)]
(fun world)))
(draw-buffer! world)
(draw-hint! world)
(draw-input-indicator! world)
)
(redraw!))
(defn mouse-scrolled [world event]
(let [world (-> world
(input-indicator-mouse-scrolled event)
(redraw))]
(cond
(and (= (:mode world) :graph)
(inside-box? (:graph-box world) (:x event) (:y event)))
(graph-mode-scrolled world event)
:else
(let [amount (+ 1 (* (:amount event) -0.05))]
(zoom-camera world amount)))))
(defn action-menu-pressed [world x y]
(if-let [region (get-region-at (:action-menu world) x y)]
(let [world (case region
:new (-> world
(new-file)
(tree-changed))
:view (view-all-parts world)
:save (save-machine-version world)
:open (open-machine-version world)
:undo (undo! world)
:redo (redo! world)
:cancel (cancel-action world))]
(show-hint world :action region))
world))
(defn mode-menu-pressed [world x y]
(if-let [region (get-region-at (:mode-menu world) x y)]
(-> world
(change-mode region)
(show-hint :mode region))
world))
(defn mouse-pressed [world event]
(let [x (:x event)
y (:y event)
world (-> world
(assoc-in [:press-time] (get-current-time))
(assoc-in [:press-point] [x y])
(input-indicator-mouse-pressed event)
(redraw))
world (replay-pressed world event)]
(cond
(and
(show-buttons? world)
(inside-box? (:action-menu world) x y))
(action-menu-pressed world x y)
(and
(show-buttons? world)
(inside-box? (:mode-menu world) x y))
(mode-menu-pressed world x y)
(and
(in? (:button event) [:middle :right])
(not (and (= (:mode world) :graph)
(inside-box? (:graph-box world) x y))))
(assoc-in world [:last-point] [x y])
:else
(mode-mouse-pressed world event))))
(defn mouse-moved [world event]
(let [world (replay-moved world event)]
(if (nil? (:last-point world))
(mode-mouse-moved world event)
(if (:shift-pressed world)
(mouse-pan world event)
(mouse-rotate world event)))))
(defn mouse-released [world event]
(let [elapsed (- (get-current-time) (:press-time world))
world (if (and (< elapsed 200)
(= (:button event) :right)
(< (vector/distance (:press-point world)
[(:x event) (:y event)]) 10))
(assoc-in world [:animation]
(create-pivot-animation world event))
world)
world (-> world
(dissoc-in [:press-point])
(input-indicator-mouse-released event)
(redraw))
world (if (= (:mode world) :avatar)
(mode-mouse-released world event)
world)
world (replay-released world event)]
(if (not-nil? (:last-point world))
(dissoc-in world [:last-point])
(mode-mouse-released world event))))
(defn window-changed [world event]
(let [{:keys [width height]} event]
(-> world
(recompute-viewport width height)
(place-elements))))
(defn window-focused [world focused]
(if focused
(-> world
update-scripts
redraw)
world))
(defn keep-active? [world]
(or (not (empty? (:spheres world)))
;; (:forces-active? @world)
(spheres-moving? world)
(not (nil? (:mouse-force world)))
(:camera-rotating world)))
| null |
https://raw.githubusercontent.com/felipereigosa/mock-mechanics/9ec779f2458b08809d6bf76910675266be0674de/src/mockmechanics/core.clj
|
clojure
|
(:forces-active? @world)
|
(ns mockmechanics.core
(:require [mockmechanics.library.vector :as vector]))
(load "world")
(load "analytic-geometry")
(load "camera")
(load "transforms")
(load "window")
(load "picture")
(load "debug")
(load "synthesizer")
(load "miscellaneous")
(load "output")
(load "parts")
(load "collision")
(load "weld-optimization")
(load "mechanical-tree")
(load "gears")
(load "undo")
(load "persistence")
(load "cables")
(load "forces")
(load "modes")
(load "replayer")
(load "commands")
(load "track-loop")
(load "hints")
(load "bone-animation")
(load "input-indicator")
(do
1
(defn create-world []
(delete-temp-files!)
(-> (create-base-world)
(merge (read-string (str "{" (slurp "settings.clj") "}")))
(assoc-in [:num-lines] 1)
(assoc-in [:background-meshes :grid] (create-grid-mesh 24 0.5))
(assoc-in [:info] (create-info))
(assoc-in [:parts] {})
(assoc-in [:parts :ground-part] (create-ground-part))
(assoc-in [:other-ground]
(create-cube-mesh [0 -0.1 0] [1 0 0 0] [12 0.2 12]
(make-color 40 40 40)))
(assoc-in [:graph-box] {:x 343 :y 530
:w 685 :h 150
:buffer (new-image 685 150)
})
(assoc-in [:motherboard-box] {:x 343 :y 530
:w 685 :h 150
:buffer (new-image 685 150)})
(assoc-in [:property-box]
(create-picture "property-menu" 240 340 -1 100))
(assoc-in [:layer-box]
(create-picture "layer-menu" 240 340 -1 200))
(create-layer-info)
(assoc-in [:command] "")
(assoc-in [:bindings] (get-bindings))
(assoc-in [:current-color] :red)
(assoc-in [:action-menu]
(create-picture "action-menu" 240 340 40 -1))
(assoc-in [:mode-menu]
(create-picture "mode-menu" 240 340 40 -1))
(assoc-in [:color-palette]
(create-picture "colors" 340 585 -1 40))
(assoc-in [:add-menu]
(create-picture "add-menu" 726 675 -1 50))
(assoc-in [:add-type] :block)
(assoc-in [:edit-menu]
(create-picture "edit-menu" 210 575 -1 50))
(assoc-in [:edit-subcommand] :move)
(assoc-in [:use-weld-groups] true)
(assoc-in [:graph-snap-value] 0.05)
(assoc-in [:graph-menu]
(create-picture "graph-menu" 210 575 -1 30))
(assoc-in [:graph-subcommand] :move)
(assoc-in [:motherboard-menu]
(create-picture "motherboard-menu" 210 575 -1 30))
(assoc-in [:motherboard-subcommand] :move)
(assoc-in [:selected-property] 0)
(create-physics-world)
(reset-undo! [:parts :gears])
(assoc-in [:mode] :simulation)
(assoc-in [:track-head-model]
(create-cube-mesh [0 -10000 0] [1 0 0 0] 0.2 :white))
(assoc-in [:cage] (create-wireframe-cube [0 0 0] [1 0 0 0]
[1 1 1] :white))
(reset-avatar)
(load-avatar-meshes)
(place-elements)
(create-weld-groups)
(create-update-cube)
))
(reset-world!)
)
(defn update-world [world elapsed]
(let [world (run-animation world elapsed)
world (if-let [dx (:camera-rotating world)]
(rotate-camera world dx 0)
world)
world (input-indicator-update world elapsed)]
(if (in? (:mode world) [:simulation :graph :motherboard
:property :avatar])
(let [world (-> world
(set-probe-values)
(apply-forces elapsed)
(run-chips elapsed)
(compute-transforms (if (:use-weld-groups world)
:weld-groups
:parts))
(update-motherboards)
(enforce-gears))]
(recompute-body-transforms! world)
(step-simulation! (:planet world) elapsed)
(if (= (:mode world) :avatar)
(avatar-mode-update world elapsed)
world))
world)))
(defn draw-3d! [world]
(doseq [mesh (vals (:background-meshes world))]
(draw-mesh! world mesh))
(if (> (get-in world [:camera :x-angle]) 0)
(draw-mesh! world (:other-ground world)))
(draw-spheres! world)
(if (:use-weld-groups world)
(do
(doseq [group (vals (:weld-groups world))]
(draw-mesh! world group))
(draw-textured-parts! world))
(doseq [[name part] (:parts world)]
(if (or (= name :ground-part)
(not (in? (:layer part) (:visible-layers world)))
(= (:type part) :cable))
nil
(draw-part! world part))))
(draw-track-head! world)
(draw-selection! world)
(draw-buttons! world)
(draw-lamps! world)
(draw-displays! world)
(draw-cables! world)
(if-let [fun (get-function (:mode world) :draw-3d)]
(fun world))
(if (:draw-update-cube world)
(draw-update-cube! world))
)
(defn show-buttons? [world]
(or
(= (:show-buttons world) :always)
(and
(= (:show-buttons world) :no-sim)
(not (= (:mode world) :simulation)))))
(do
1
(defn draw-2d! [world]
(clear!)
(when (show-buttons? world)
(let [{:keys [image x y w h]} (:action-menu world)]
(fill-rect! (make-color 70 70 70) x y (+ 20 w) (+ 20 h))
(draw-image! image x y))
(let [{:keys [image x y w h]} (:mode-menu world)]
(fill-rect! (make-color 70 70 70) x y (+ 20 w) (+ 20 h))
(draw-image! image x y))
(if-let [fun (get-function (:mode world) :draw)]
(fun world)))
(draw-buffer! world)
(draw-hint! world)
(draw-input-indicator! world)
)
(redraw!))
(defn mouse-scrolled [world event]
(let [world (-> world
(input-indicator-mouse-scrolled event)
(redraw))]
(cond
(and (= (:mode world) :graph)
(inside-box? (:graph-box world) (:x event) (:y event)))
(graph-mode-scrolled world event)
:else
(let [amount (+ 1 (* (:amount event) -0.05))]
(zoom-camera world amount)))))
(defn action-menu-pressed [world x y]
(if-let [region (get-region-at (:action-menu world) x y)]
(let [world (case region
:new (-> world
(new-file)
(tree-changed))
:view (view-all-parts world)
:save (save-machine-version world)
:open (open-machine-version world)
:undo (undo! world)
:redo (redo! world)
:cancel (cancel-action world))]
(show-hint world :action region))
world))
(defn mode-menu-pressed [world x y]
(if-let [region (get-region-at (:mode-menu world) x y)]
(-> world
(change-mode region)
(show-hint :mode region))
world))
(defn mouse-pressed [world event]
(let [x (:x event)
y (:y event)
world (-> world
(assoc-in [:press-time] (get-current-time))
(assoc-in [:press-point] [x y])
(input-indicator-mouse-pressed event)
(redraw))
world (replay-pressed world event)]
(cond
(and
(show-buttons? world)
(inside-box? (:action-menu world) x y))
(action-menu-pressed world x y)
(and
(show-buttons? world)
(inside-box? (:mode-menu world) x y))
(mode-menu-pressed world x y)
(and
(in? (:button event) [:middle :right])
(not (and (= (:mode world) :graph)
(inside-box? (:graph-box world) x y))))
(assoc-in world [:last-point] [x y])
:else
(mode-mouse-pressed world event))))
(defn mouse-moved [world event]
(let [world (replay-moved world event)]
(if (nil? (:last-point world))
(mode-mouse-moved world event)
(if (:shift-pressed world)
(mouse-pan world event)
(mouse-rotate world event)))))
(defn mouse-released [world event]
(let [elapsed (- (get-current-time) (:press-time world))
world (if (and (< elapsed 200)
(= (:button event) :right)
(< (vector/distance (:press-point world)
[(:x event) (:y event)]) 10))
(assoc-in world [:animation]
(create-pivot-animation world event))
world)
world (-> world
(dissoc-in [:press-point])
(input-indicator-mouse-released event)
(redraw))
world (if (= (:mode world) :avatar)
(mode-mouse-released world event)
world)
world (replay-released world event)]
(if (not-nil? (:last-point world))
(dissoc-in world [:last-point])
(mode-mouse-released world event))))
(defn window-changed [world event]
(let [{:keys [width height]} event]
(-> world
(recompute-viewport width height)
(place-elements))))
(defn window-focused [world focused]
(if focused
(-> world
update-scripts
redraw)
world))
(defn keep-active? [world]
(or (not (empty? (:spheres world)))
(spheres-moving? world)
(not (nil? (:mouse-force world)))
(:camera-rotating world)))
|
14e40f6bb759d6316862b4569627f9b081998912bd72c5a0d4864a81de73d8e0
|
cedlemo/OCaml-GI-ctypes-bindings-generator
|
Fixed.ml
|
open Ctypes
open Foreign
type t = unit ptr
let t_typ : t typ = ptr void
let create =
foreign "gtk_fixed_new" (void @-> returning (ptr Widget.t_typ))
let move =
foreign "gtk_fixed_move" (t_typ @-> ptr Widget.t_typ @-> int32_t @-> int32_t @-> returning (void))
let put =
foreign "gtk_fixed_put" (t_typ @-> ptr Widget.t_typ @-> int32_t @-> int32_t @-> returning (void))
| null |
https://raw.githubusercontent.com/cedlemo/OCaml-GI-ctypes-bindings-generator/21a4d449f9dbd6785131979b91aa76877bad2615/tools/Gtk3/Fixed.ml
|
ocaml
|
open Ctypes
open Foreign
type t = unit ptr
let t_typ : t typ = ptr void
let create =
foreign "gtk_fixed_new" (void @-> returning (ptr Widget.t_typ))
let move =
foreign "gtk_fixed_move" (t_typ @-> ptr Widget.t_typ @-> int32_t @-> int32_t @-> returning (void))
let put =
foreign "gtk_fixed_put" (t_typ @-> ptr Widget.t_typ @-> int32_t @-> int32_t @-> returning (void))
|
|
7cb58a6968a6e6e85790a67781045c78f9c0042cd1710e1b834ffb786082808b
|
neonsquare/mel-base
|
message-cache.lisp
|
-*- Mode : LISP ; Syntax : COMMON - LISP ; Package : CL - USER ; Base : 10 -*-
Copyright ( c ) 2004 , < > .
;;; All rights reserved.
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions
;;; are met:
;;; * Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
;;; * Redistributions in binary form must reproduce the above
;;; copyright notice, this list of conditions and the following
;;; disclaimer in the documentation and/or other materials
;;; provided with the distribution.
;;; THIS SOFTWARE IS PROVIDED BY THE AUTHOR 'AS IS' AND ANY EXPRESSED
;;; OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
;;; WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
;;; ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
;;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
;;; GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY ,
;;; WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
;;; NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
;;; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
(in-package :mel.internal)
(defclass message-cache-mixin ()
((message-cache :accessor message-cache)))
(defclass hash-table-message-cache-mixin (message-cache-mixin) ())
(defclass equal-message-cache-mixin (hash-table-message-cache-mixin) ())
(defclass eql-message-cache-mixin (hash-table-message-cache-mixin) ())
(defclass eq-message-cache-mixin (hash-table-message-cache-mixin) ())
(defmethod allocate-message-cache ((folder eq-message-cache-mixin))
(make-hash-table :test 'eq))
(defmethod allocate-message-cache ((folder eql-message-cache-mixin))
(make-hash-table :test 'eql))
(defmethod allocate-message-cache ((folder equal-message-cache-mixin))
(make-hash-table :test 'equal))
(defmethod initialize-instance :after ((object message-cache-mixin) &key)
(setf (message-cache object) (allocate-message-cache object)))
(defmethod map-message-cache (fn (folder hash-table-message-cache-mixin))
(maphash fn (message-cache folder)))
(defmethod uid= ((folder eq-message-cache-mixin) message1 message2)
(eq (uid message1) (uid message2)))
(defmethod uid= ((folder eql-message-cache-mixin) message1 message2)
(eql (uid message1) (uid message2)))
(defmethod uid= ((folder equal-message-cache-mixin) message1 message2)
(equal (uid message1) (uid message2)))
;; receiver-protocol
(defgeneric FIND-MESSAGE (folder uid &key if-does-not-exist))
(defmethod find-message ((folder hash-table-message-cache-mixin) uid &key (if-does-not-exist :error))
"This method provides caching of message objects. New message
objects get only consed when a message with the given uid
does not yet exist in the message-cache."
(declare (ignore if-does-not-exist))
(or (gethash uid (message-cache folder))
(setf (gethash uid (message-cache folder)) (call-next-method))))
;; folder-protocol
(defmethod close-folder :after ((folder hash-table-message-cache-mixin))
(clrhash (message-cache folder)))
| null |
https://raw.githubusercontent.com/neonsquare/mel-base/7edc8fb94f30d29637bae0831c55825b0021e0f8/protocols/message-cache.lisp
|
lisp
|
Syntax : COMMON - LISP ; Package : CL - USER ; Base : 10 -*-
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials
provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR 'AS IS' AND ANY EXPRESSED
OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
receiver-protocol
folder-protocol
|
Copyright ( c ) 2004 , < > .
DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY ,
(in-package :mel.internal)
(defclass message-cache-mixin ()
((message-cache :accessor message-cache)))
(defclass hash-table-message-cache-mixin (message-cache-mixin) ())
(defclass equal-message-cache-mixin (hash-table-message-cache-mixin) ())
(defclass eql-message-cache-mixin (hash-table-message-cache-mixin) ())
(defclass eq-message-cache-mixin (hash-table-message-cache-mixin) ())
(defmethod allocate-message-cache ((folder eq-message-cache-mixin))
(make-hash-table :test 'eq))
(defmethod allocate-message-cache ((folder eql-message-cache-mixin))
(make-hash-table :test 'eql))
(defmethod allocate-message-cache ((folder equal-message-cache-mixin))
(make-hash-table :test 'equal))
(defmethod initialize-instance :after ((object message-cache-mixin) &key)
(setf (message-cache object) (allocate-message-cache object)))
(defmethod map-message-cache (fn (folder hash-table-message-cache-mixin))
(maphash fn (message-cache folder)))
(defmethod uid= ((folder eq-message-cache-mixin) message1 message2)
(eq (uid message1) (uid message2)))
(defmethod uid= ((folder eql-message-cache-mixin) message1 message2)
(eql (uid message1) (uid message2)))
(defmethod uid= ((folder equal-message-cache-mixin) message1 message2)
(equal (uid message1) (uid message2)))
(defgeneric FIND-MESSAGE (folder uid &key if-does-not-exist))
(defmethod find-message ((folder hash-table-message-cache-mixin) uid &key (if-does-not-exist :error))
"This method provides caching of message objects. New message
objects get only consed when a message with the given uid
does not yet exist in the message-cache."
(declare (ignore if-does-not-exist))
(or (gethash uid (message-cache folder))
(setf (gethash uid (message-cache folder)) (call-next-method))))
(defmethod close-folder :after ((folder hash-table-message-cache-mixin))
(clrhash (message-cache folder)))
|
297762e57ec1c5d9366265582ae689ae7c57963c618dbc9a1001291bfddc4919
|
fosskers/aura
|
Test.hs
|
{-# LANGUAGE OverloadedStrings #-}
module Main where
import Linux.Arch.Aur
import Network.HTTP.Client
import Network.HTTP.Client.TLS
import Test.Tasty
import Test.Tasty.HUnit
---
suite :: Manager -> TestTree
suite m = testGroup "RPC Calls"
[ testCase "info on existing package" $ infoTest m
, testCase "info on nonexistant package" $ infoTest' m
, testCase "search" $ searchTest m
]
infoTest :: Manager -> Assertion
infoTest m = info m ["aura", "aura-bin"] >>= \x -> (length <$> x) @?= Right 2
infoTest' :: Manager -> Assertion
infoTest' m = info m ["aura1234567"] >>= \x -> (null <$> x) @?= Right True
searchTest :: Manager -> Assertion
searchTest m = search m "aura" >>= assertBool "Good search" . not . null
main :: IO ()
main = do
m <- newManager tlsManagerSettings
defaultMain $ suite m
| null |
https://raw.githubusercontent.com/fosskers/aura/08cd46eaa598094f7395455d66690d3d8c59e965/haskell/aur/tests/Test.hs
|
haskell
|
# LANGUAGE OverloadedStrings #
-
|
module Main where
import Linux.Arch.Aur
import Network.HTTP.Client
import Network.HTTP.Client.TLS
import Test.Tasty
import Test.Tasty.HUnit
suite :: Manager -> TestTree
suite m = testGroup "RPC Calls"
[ testCase "info on existing package" $ infoTest m
, testCase "info on nonexistant package" $ infoTest' m
, testCase "search" $ searchTest m
]
infoTest :: Manager -> Assertion
infoTest m = info m ["aura", "aura-bin"] >>= \x -> (length <$> x) @?= Right 2
infoTest' :: Manager -> Assertion
infoTest' m = info m ["aura1234567"] >>= \x -> (null <$> x) @?= Right True
searchTest :: Manager -> Assertion
searchTest m = search m "aura" >>= assertBool "Good search" . not . null
main :: IO ()
main = do
m <- newManager tlsManagerSettings
defaultMain $ suite m
|
69ed2417a210978bac4673f276c64d27ca72c86089e5b04ac6266dbfbb7a00ee
|
purcell/adventofcode2016
|
Day23.hs
|
module Main where
import AdventOfCode
import Control.Monad.State
import Data.Maybe (fromMaybe)
import Data.Map (Map)
import Data.Foldable (toList)
import qualified Data.Map as M
import Data.Sequence (Seq)
import qualified Data.Sequence as Sq
newtype Reg =
Reg Char
deriving (Eq, Show)
data Value
= LitVal !Int
| RegVal !Reg
deriving (Eq, Show)
data Instr
= Copy !Value
!Reg
| JumpNotZero !Value
!Value
| Inc !Reg
| Dec !Reg
| Toggle !Value
| Mult !Value
!Value
!Reg
| NoOp
deriving (Eq, Show)
parseInstr :: Parser Instr
parseInstr =
(Dec <$> (string "dec " *> reg)) <|> (Inc <$> (string "inc " *> reg)) <|>
(JumpNotZero <$> (string "jnz " *> val) <*> (string " " *> val)) <|>
(Toggle <$> (string "tgl " *> val)) <|>
(Copy <$> (string "cpy " *> val) <*> (string " " *> reg))
where
reg = Reg <$> letter
val = (RegVal <$> reg) <|> (LitVal <$> num)
num = do
sign <- option '+' (char '-')
digits <- many1 digit
return $
(if sign == '-'
then -1
else 1) *
read digits
data SimState = SimState
{ stPos :: Int
, stRegs :: Map Char Int
, stInstrs :: Seq Instr
}
instance Show SimState where
show (SimState pos regs instrs) =
unlines (show regs : toList (Sq.mapWithIndex showins instrs))
where
showins n i =
show n ++
(if n == pos
then ">> "
else " ") ++
show i
type Sim = (State SimState)
run :: Sim ()
run = do
ran <- runNext
when ran run
runNext :: Sim Bool
runNext = do
pos <- gets stPos
nextInstrs <- Sq.take 6 . Sq.drop pos <$> gets stInstrs
if Sq.null nextInstrs
then return False
else do
step <-
if nextInstrs == multPattern
then runMult
else runInstr (nextInstrs `Sq.index` 0)
modify
(\st ->
st
{ stPos = stPos st + step
})
return True
modifyReg :: Reg -> (Int -> Int) -> Sim ()
modifyReg r@(Reg n) f = do
prev <- getReg r
modify
(\st ->
let regs = stRegs st
in st
{ stRegs = M.insert n (f prev) regs
})
getReg :: Reg -> Sim Int
getReg (Reg n) = (fromMaybe 0 . M.lookup n) <$> gets stRegs
getVal :: Value -> Sim Int
getVal (LitVal v) = return v
getVal (RegVal r) = getReg r
runMult :: Sim Int
runMult = do
void $ runInstr (Mult (RegVal (Reg 'b')) (RegVal (Reg 'd')) (Reg 'a'))
void $ runInstr (Copy (LitVal 0) (Reg 'c'))
void $ runInstr (Copy (LitVal 0) (Reg 'd'))
return 6
multPattern :: Seq Instr
multPattern =
Sq.fromList
[ Copy (RegVal (Reg 'b')) (Reg 'c')
, Inc (Reg 'a')
, Dec (Reg 'c')
, JumpNotZero (RegVal (Reg 'c')) (LitVal (-2))
, Dec (Reg 'd')
, JumpNotZero (RegVal (Reg 'd')) (LitVal (-5))
]
runInstr :: Instr -> Sim Int
runInstr (Copy val reg) = (getVal val >>= modifyReg reg . const) >> return 1
runInstr (JumpNotZero val1 val2) = do
v <- getVal val1
v2 <- getVal val2
return $
if v /= 0
then v2
else 1
runInstr (Inc reg) = modifyReg reg (+ 1) >> return 1
runInstr (Dec reg) = modifyReg reg (\i -> i - 1) >> return 1
runInstr (Toggle val) = do
offset <- getVal val
pos <- gets stPos
toggleInstrAt (pos + offset)
return 1
runInstr NoOp = return 1
runInstr (Mult val1 val2 reg) = do
v1 <- getVal val1
v2 <- getVal val2
modifyReg reg (const (v1 * v2))
return 1
toggleInstrAt :: Int -> Sim ()
toggleInstrAt pos =
modify
(\s ->
s
{ stInstrs = Sq.adjust toggleInstr pos (stInstrs s)
})
toggleInstr :: Instr -> Instr
toggleInstr NoOp = error "toggling no-op"
toggleInstr (Copy val reg) = JumpNotZero val (RegVal reg)
toggleInstr (JumpNotZero val1 (RegVal val2)) = Copy val1 val2
toggleInstr (Inc reg) = Dec reg
toggleInstr (Dec reg) = Inc reg
toggleInstr (Toggle (RegVal reg)) = Inc reg
toggleInstr _ = NoOp
finalA :: Map Char Int -> [Instr] -> Int
finalA regs instrs =
(M.! 'a') $ stRegs $ execState run (SimState 0 regs (Sq.fromList instrs))
partA :: [Instr] -> Int
partA = finalA (M.singleton 'a' 7)
partB :: [Instr] -> Int
partB = finalA (M.singleton 'a' 12)
main :: IO ()
main = runDay day23
day23 :: Day [Instr]
day23 =
Day
23
(many (parseInstr <* newline))
(return . show . partA)
(return . show . partB)
| null |
https://raw.githubusercontent.com/purcell/adventofcode2016/081f30de4ea6b939e6c3736d83836f4dd72ab9a2/src/Day23.hs
|
haskell
|
module Main where
import AdventOfCode
import Control.Monad.State
import Data.Maybe (fromMaybe)
import Data.Map (Map)
import Data.Foldable (toList)
import qualified Data.Map as M
import Data.Sequence (Seq)
import qualified Data.Sequence as Sq
newtype Reg =
Reg Char
deriving (Eq, Show)
data Value
= LitVal !Int
| RegVal !Reg
deriving (Eq, Show)
data Instr
= Copy !Value
!Reg
| JumpNotZero !Value
!Value
| Inc !Reg
| Dec !Reg
| Toggle !Value
| Mult !Value
!Value
!Reg
| NoOp
deriving (Eq, Show)
parseInstr :: Parser Instr
parseInstr =
(Dec <$> (string "dec " *> reg)) <|> (Inc <$> (string "inc " *> reg)) <|>
(JumpNotZero <$> (string "jnz " *> val) <*> (string " " *> val)) <|>
(Toggle <$> (string "tgl " *> val)) <|>
(Copy <$> (string "cpy " *> val) <*> (string " " *> reg))
where
reg = Reg <$> letter
val = (RegVal <$> reg) <|> (LitVal <$> num)
num = do
sign <- option '+' (char '-')
digits <- many1 digit
return $
(if sign == '-'
then -1
else 1) *
read digits
data SimState = SimState
{ stPos :: Int
, stRegs :: Map Char Int
, stInstrs :: Seq Instr
}
instance Show SimState where
show (SimState pos regs instrs) =
unlines (show regs : toList (Sq.mapWithIndex showins instrs))
where
showins n i =
show n ++
(if n == pos
then ">> "
else " ") ++
show i
type Sim = (State SimState)
run :: Sim ()
run = do
ran <- runNext
when ran run
runNext :: Sim Bool
runNext = do
pos <- gets stPos
nextInstrs <- Sq.take 6 . Sq.drop pos <$> gets stInstrs
if Sq.null nextInstrs
then return False
else do
step <-
if nextInstrs == multPattern
then runMult
else runInstr (nextInstrs `Sq.index` 0)
modify
(\st ->
st
{ stPos = stPos st + step
})
return True
modifyReg :: Reg -> (Int -> Int) -> Sim ()
modifyReg r@(Reg n) f = do
prev <- getReg r
modify
(\st ->
let regs = stRegs st
in st
{ stRegs = M.insert n (f prev) regs
})
getReg :: Reg -> Sim Int
getReg (Reg n) = (fromMaybe 0 . M.lookup n) <$> gets stRegs
getVal :: Value -> Sim Int
getVal (LitVal v) = return v
getVal (RegVal r) = getReg r
runMult :: Sim Int
runMult = do
void $ runInstr (Mult (RegVal (Reg 'b')) (RegVal (Reg 'd')) (Reg 'a'))
void $ runInstr (Copy (LitVal 0) (Reg 'c'))
void $ runInstr (Copy (LitVal 0) (Reg 'd'))
return 6
multPattern :: Seq Instr
multPattern =
Sq.fromList
[ Copy (RegVal (Reg 'b')) (Reg 'c')
, Inc (Reg 'a')
, Dec (Reg 'c')
, JumpNotZero (RegVal (Reg 'c')) (LitVal (-2))
, Dec (Reg 'd')
, JumpNotZero (RegVal (Reg 'd')) (LitVal (-5))
]
runInstr :: Instr -> Sim Int
runInstr (Copy val reg) = (getVal val >>= modifyReg reg . const) >> return 1
runInstr (JumpNotZero val1 val2) = do
v <- getVal val1
v2 <- getVal val2
return $
if v /= 0
then v2
else 1
runInstr (Inc reg) = modifyReg reg (+ 1) >> return 1
runInstr (Dec reg) = modifyReg reg (\i -> i - 1) >> return 1
runInstr (Toggle val) = do
offset <- getVal val
pos <- gets stPos
toggleInstrAt (pos + offset)
return 1
runInstr NoOp = return 1
runInstr (Mult val1 val2 reg) = do
v1 <- getVal val1
v2 <- getVal val2
modifyReg reg (const (v1 * v2))
return 1
toggleInstrAt :: Int -> Sim ()
toggleInstrAt pos =
modify
(\s ->
s
{ stInstrs = Sq.adjust toggleInstr pos (stInstrs s)
})
toggleInstr :: Instr -> Instr
toggleInstr NoOp = error "toggling no-op"
toggleInstr (Copy val reg) = JumpNotZero val (RegVal reg)
toggleInstr (JumpNotZero val1 (RegVal val2)) = Copy val1 val2
toggleInstr (Inc reg) = Dec reg
toggleInstr (Dec reg) = Inc reg
toggleInstr (Toggle (RegVal reg)) = Inc reg
toggleInstr _ = NoOp
finalA :: Map Char Int -> [Instr] -> Int
finalA regs instrs =
(M.! 'a') $ stRegs $ execState run (SimState 0 regs (Sq.fromList instrs))
partA :: [Instr] -> Int
partA = finalA (M.singleton 'a' 7)
partB :: [Instr] -> Int
partB = finalA (M.singleton 'a' 12)
main :: IO ()
main = runDay day23
day23 :: Day [Instr]
day23 =
Day
23
(many (parseInstr <* newline))
(return . show . partA)
(return . show . partB)
|
|
ab595f9a054d0425a47a4e52b3dca01d7514dfd6e78dad22c77575fd0494b356
|
hexlet-basics/exercises-clojure
|
test.clj
|
(ns cond-test
(:require [test-helper :refer [assert-solution]]
[index :refer [programmer-level]]))
(assert-solution
[[3] [18] [40]]
["junior" "middle" "senior"]
programmer-level)
| null |
https://raw.githubusercontent.com/hexlet-basics/exercises-clojure/ede14102d01f9ef736e0af811cd92f5b22a83bc2/modules/20-logic/40-cond/test.clj
|
clojure
|
(ns cond-test
(:require [test-helper :refer [assert-solution]]
[index :refer [programmer-level]]))
(assert-solution
[[3] [18] [40]]
["junior" "middle" "senior"]
programmer-level)
|
|
4799d17717c220dd7e83a85d9d022941b069af4f63d07e1868ebf577a23b0758
|
duo-lang/duo-lang
|
Minimize.hs
|
module TypeAutomata.Minimize ( minimize ) where
import Data.Graph.Inductive.Graph ( lab, lpre, nodes, Graph(labEdges), Node )
import Data.List (partition, sort)
import Data.Maybe (fromMaybe, catMaybes, fromJust)
import Data.Set (Set)
import Data.Set qualified as S
import qualified Data.Map as M
import Syntax.RST.Types ( Polarity(..) )
import TypeAutomata.Definition
getAlphabet :: TypeGr -> [EdgeLabelNormal]
getAlphabet gr = nub $ map (\(_,_,b) -> b) (labEdges gr)
-- map a pair of a node and an edge label to the node's predecessors along the label
type Preds = M.Map (Node,EdgeLabelNormal) (Set Node)
-- find all predecessors with connecting edge labelled by specified label
predsWith :: Preds -> [Node] -> EdgeLabelNormal -> [Node]
predsWith preds ns x = S.toList $ S.unions $ (\n -> fromMaybe S.empty $ M.lookup (n,x) preds) <$> ns
predsMap :: TypeGr -> Preds
predsMap gr =
let alph = getAlphabet gr
ns = nodes gr
preds :: M.Map Node [(Node,EdgeLabelNormal)]
preds = M.fromList $ fmap(\n -> (n, lpre gr n)) ns
getPred :: Node -> EdgeLabelNormal -> Set Node
getPred n l = S.fromList $ map fst . filter ((== l) . snd) $ fromMaybe [] $ M.lookup n preds
addCharNode :: EdgeLabelNormal -> Preds -> Node -> Preds
addCharNode a m n = M.insert (n,a) (getPred n a) m
addChar :: Preds -> EdgeLabelNormal -> Preds
addChar m a = foldl (addCharNode a) m ns
in foldl addChar M.empty alph
an implementation of 's minimisation algorithm
-- with simplifications found in
Re - describing an algorithm by ( , 2001 )
the original
π/π β { πΉ , π β§΅ πΉ }
-- πΏ β {πΉ}
-- while there exists π΄ β πΏ do
-- πΏ β πΏ β§΅ {π΄}
-- for each π₯ β Ξ£ do
let πΏβ1
-- π₯ (π΄)
for each π β π/π ( π β² = π β© π β β
) β§ ( π β³ = π β§΅ π β β
) do
π/π β ( π/π β§΅ { π } ) { π β² , π β³ }
-- if π β πΏ then
πΏ β ( πΏ β§΅ { π } ) { π β² , π β³ }
-- else
-- πΏ β πΏ βͺ {min(π β², π β³)}
-- end
-- end
-- end
--
-- becomes the following variant (since πΏ β π/π is a loop invariant)
--
-- Let π
= π/π β§΅ πΏ
π
β { π β§΅ πΉ }
-- πΏ β {πΉ}
-- while there exists π΄ β πΏ do
-- πΏ β πΏ β§΅ {π΄}
-- π
β π
βͺ {π΄}
-- for each π₯ β Ξ£ do
let πΏβ1_π₯ ( π΄ )
for each π β π
s.t . ( π β² = π β© π β β
) β§ ( π β³ = π β§΅ π β β
) do
-- π
β (π
β§΅ {π}) βͺ {max(π β², π β³)}
-- πΏ β πΏ βͺ {min(π β², π β³)}
-- end
for each π β πΏ s.t . ( π β² = π β© π β β
) β§ ( π β³ = π β§΅ π β β
) do
πΏ β ( πΏ β§΅ { π } ) { π β² , π β³ }
-- end
-- end
-- end
type EquivalenceClass = [Node]
minimize' :: Preds -> [EdgeLabelNormal] -> [EquivalenceClass] -> [EquivalenceClass] -> [EquivalenceClass]
minimize' _preds _alph [] rs = rs
minimize' preds alph (w:ws) rs = minimize' preds alph ws' rs'
where
(ws',rs') = refineAllLetters alph (ws, w:rs)
refineAllLetters :: [EdgeLabelNormal] -> ([EquivalenceClass], [EquivalenceClass]) -> ([EquivalenceClass], [EquivalenceClass])
refineAllLetters [] acc = acc
refineAllLetters (a:alph) (ws,rs) = let pre = sort $ predsWith preds w a
ws'' = refineWaiting pre ws
(ws',rs') = refineRest pre rs ([],[])
in refineAllLetters alph (ws' ++ ws'', rs')
refineRest :: [Node] -> [EquivalenceClass] -> ([EquivalenceClass], [EquivalenceClass]) -> ([EquivalenceClass], [EquivalenceClass])
refineRest _pre [] acc = acc
refineRest pre (r:rs) (ws',rs') = let (r1, r2, n1, n2) = splitSorted pre r
-- take the smaller one as r1'
(r1', r2') = if n1 < n2 then (r1, r2) else (r2, r1)
-- r1' might be empty. If so, don't add it
ws'' = if null r1' then ws' else r1':ws'
rs'' = r2' : rs'
in refineRest pre rs (ws'',rs'')
refineWaiting :: [Node] -> [EquivalenceClass] -> [EquivalenceClass]
refineWaiting pre ls = concatMap (splitWaiting pre) ls
splitWaiting :: [Node] -> EquivalenceClass -> [EquivalenceClass]
splitWaiting pre l = let (l1,l2,_,_) = splitSorted pre l
in if null l1 || null l2 then [l] else [l1, l2]
splitSorted :: (Ord a) => [a] -> [a] -> ([a], [a], Int, Int)
splitSorted splitter splittee = (reverse inter, reverse diff, ni, nd)
where
(inter, diff, ni, nd) = go splitter splittee ([], [], 0, 0)
go :: (Ord a) => [a] -> [a] -> ([a], [a], Int, Int) -> ([a], [a], Int, Int)
go [] ps (is, ds, ni, nd) = (is, reverse ps ++ ds, ni, nd + length ps)
go _ss [] (is, ds, ni, nd) = (is, ds, ni, nd)
go (s:ss) (p:ps) acc@(is, ds, ni, nd) =
case compare s p of
LT -> go ss (p:ps) acc
EQ -> go ss ps (p:is, ds, ni+1, nd)
GT -> go (s:ss) ps (is, p:ds, ni, nd+1)
-- partition list by equivalence (given as a function)
myGroupBy :: (a -> a -> Bool) -> [a] -> [[a]]
myGroupBy _ [] = []
myGroupBy p (x:xs) = let (xs1,xs2) = partition (p x) xs in (x:xs1) : myGroupBy p xs2
flowNeighbors :: TypeAutCore EdgeLabelNormal -> Node -> Set Node
flowNeighbors aut i =
S.fromList $ [n | (j,n) <- aut.ta_flowEdges, i == j] ++ [n | (n,j) <- aut.ta_flowEdges, i == j]
-- nodes are considered equal if they have the same label and the same neighbors along flow edges
equalNodes :: TypeAutCore EdgeLabelNormal -> Node -> Node -> Bool
equalNodes aut i j =
(lab aut.ta_gr i == lab aut.ta_gr j) && flowNeighbors aut i == flowNeighbors aut j
-- We don't have a direct notion for accepting states, so we unroll the definition of the
-- minimisation algorithm once
initialSplit :: TypeAutCore EdgeLabelNormal -> ([EquivalenceClass], [EquivalenceClass])
initialSplit aut = (rest,catMaybes [posMin,negMin])
where
distGroups :: [EquivalenceClass]
distGroups = myGroupBy (equalNodes aut) (nodes aut.ta_gr)
(posMin,negMin,rest) = getMins $ sort <$> distGroups
getMins :: [EquivalenceClass]
-> (Maybe EquivalenceClass, Maybe EquivalenceClass, [EquivalenceClass])
getMins [] = (Nothing, Nothing, [])
getMins ([] : _iss) = error "Minimize: Empty equivalence class should not exist"
getMins (eq@(nd : _) : iss) =
let l = fromJust $ lab aut.ta_gr nd
pol = getLabelPol l
(p,n,iss') = getMins iss
(p',n',iss'') = case (pol, p, n) of
(Pos, Nothing, _) -> (Just eq, n, iss')
(Pos, Just ns, _) ->
if length ns > length eq
then (Just eq, n, ns : iss')
else (Just ns, n, eq : iss')
(Neg, _, Nothing) -> (p, Just eq, iss')
(Neg, _, Just ns) ->
if length ns > length eq
then (p, Just eq, ns : iss')
else (p, Just ns, eq : iss')
in (p', n', iss'')
getLabelPol :: NodeLabel -> Polarity
getLabelPol nl@MkNodeLabel{} = nl.nl_pol
getLabelPol nl@MkPrimitiveNodeLabel{} = nl.pl_pol
-- generate a function that maps each node to the representative of its respective equivalence class
genMinimizeFun :: TypeAutCore EdgeLabelNormal -> (Node -> Node)
genMinimizeFun aut = getNewNode
where
preds = predsMap aut.ta_gr
alph = getAlphabet aut.ta_gr
(ls,ps) = initialSplit aut
nodeSets = minimize' preds alph ls ps
getNewNode n = head $ head $ filter (n `elem`) nodeSets
minimize :: TypeAutDet pol -> TypeAutDet pol
minimize aut = aut'
where
ta_core' = removeRedundantEdgesCore aut.ta_core
fun = genMinimizeFun ta_core'
aut' = mapTypeAut fun aut
| null |
https://raw.githubusercontent.com/duo-lang/duo-lang/62305c16e219477d6e33287a7752c258b5342c3e/src/TypeAutomata/Minimize.hs
|
haskell
|
map a pair of a node and an edge label to the node's predecessors along the label
find all predecessors with connecting edge labelled by specified label
with simplifications found in
πΏ β {πΉ}
while there exists π΄ β πΏ do
πΏ β πΏ β§΅ {π΄}
for each π₯ β Ξ£ do
π₯ (π΄)
if π β πΏ then
else
πΏ β πΏ βͺ {min(π β², π β³)}
end
end
end
becomes the following variant (since πΏ β π/π is a loop invariant)
Let π
= π/π β§΅ πΏ
πΏ β {πΉ}
while there exists π΄ β πΏ do
πΏ β πΏ β§΅ {π΄}
π
β π
βͺ {π΄}
for each π₯ β Ξ£ do
π
β (π
β§΅ {π}) βͺ {max(π β², π β³)}
πΏ β πΏ βͺ {min(π β², π β³)}
end
end
end
end
take the smaller one as r1'
r1' might be empty. If so, don't add it
partition list by equivalence (given as a function)
nodes are considered equal if they have the same label and the same neighbors along flow edges
We don't have a direct notion for accepting states, so we unroll the definition of the
minimisation algorithm once
generate a function that maps each node to the representative of its respective equivalence class
|
module TypeAutomata.Minimize ( minimize ) where
import Data.Graph.Inductive.Graph ( lab, lpre, nodes, Graph(labEdges), Node )
import Data.List (partition, sort)
import Data.Maybe (fromMaybe, catMaybes, fromJust)
import Data.Set (Set)
import Data.Set qualified as S
import qualified Data.Map as M
import Syntax.RST.Types ( Polarity(..) )
import TypeAutomata.Definition
getAlphabet :: TypeGr -> [EdgeLabelNormal]
getAlphabet gr = nub $ map (\(_,_,b) -> b) (labEdges gr)
type Preds = M.Map (Node,EdgeLabelNormal) (Set Node)
predsWith :: Preds -> [Node] -> EdgeLabelNormal -> [Node]
predsWith preds ns x = S.toList $ S.unions $ (\n -> fromMaybe S.empty $ M.lookup (n,x) preds) <$> ns
predsMap :: TypeGr -> Preds
predsMap gr =
let alph = getAlphabet gr
ns = nodes gr
preds :: M.Map Node [(Node,EdgeLabelNormal)]
preds = M.fromList $ fmap(\n -> (n, lpre gr n)) ns
getPred :: Node -> EdgeLabelNormal -> Set Node
getPred n l = S.fromList $ map fst . filter ((== l) . snd) $ fromMaybe [] $ M.lookup n preds
addCharNode :: EdgeLabelNormal -> Preds -> Node -> Preds
addCharNode a m n = M.insert (n,a) (getPred n a) m
addChar :: Preds -> EdgeLabelNormal -> Preds
addChar m a = foldl (addCharNode a) m ns
in foldl addChar M.empty alph
an implementation of 's minimisation algorithm
Re - describing an algorithm by ( , 2001 )
the original
π/π β { πΉ , π β§΅ πΉ }
let πΏβ1
for each π β π/π ( π β² = π β© π β β
) β§ ( π β³ = π β§΅ π β β
) do
π/π β ( π/π β§΅ { π } ) { π β² , π β³ }
πΏ β ( πΏ β§΅ { π } ) { π β² , π β³ }
π
β { π β§΅ πΉ }
let πΏβ1_π₯ ( π΄ )
for each π β π
s.t . ( π β² = π β© π β β
) β§ ( π β³ = π β§΅ π β β
) do
for each π β πΏ s.t . ( π β² = π β© π β β
) β§ ( π β³ = π β§΅ π β β
) do
πΏ β ( πΏ β§΅ { π } ) { π β² , π β³ }
type EquivalenceClass = [Node]
minimize' :: Preds -> [EdgeLabelNormal] -> [EquivalenceClass] -> [EquivalenceClass] -> [EquivalenceClass]
minimize' _preds _alph [] rs = rs
minimize' preds alph (w:ws) rs = minimize' preds alph ws' rs'
where
(ws',rs') = refineAllLetters alph (ws, w:rs)
refineAllLetters :: [EdgeLabelNormal] -> ([EquivalenceClass], [EquivalenceClass]) -> ([EquivalenceClass], [EquivalenceClass])
refineAllLetters [] acc = acc
refineAllLetters (a:alph) (ws,rs) = let pre = sort $ predsWith preds w a
ws'' = refineWaiting pre ws
(ws',rs') = refineRest pre rs ([],[])
in refineAllLetters alph (ws' ++ ws'', rs')
refineRest :: [Node] -> [EquivalenceClass] -> ([EquivalenceClass], [EquivalenceClass]) -> ([EquivalenceClass], [EquivalenceClass])
refineRest _pre [] acc = acc
refineRest pre (r:rs) (ws',rs') = let (r1, r2, n1, n2) = splitSorted pre r
(r1', r2') = if n1 < n2 then (r1, r2) else (r2, r1)
ws'' = if null r1' then ws' else r1':ws'
rs'' = r2' : rs'
in refineRest pre rs (ws'',rs'')
refineWaiting :: [Node] -> [EquivalenceClass] -> [EquivalenceClass]
refineWaiting pre ls = concatMap (splitWaiting pre) ls
splitWaiting :: [Node] -> EquivalenceClass -> [EquivalenceClass]
splitWaiting pre l = let (l1,l2,_,_) = splitSorted pre l
in if null l1 || null l2 then [l] else [l1, l2]
splitSorted :: (Ord a) => [a] -> [a] -> ([a], [a], Int, Int)
splitSorted splitter splittee = (reverse inter, reverse diff, ni, nd)
where
(inter, diff, ni, nd) = go splitter splittee ([], [], 0, 0)
go :: (Ord a) => [a] -> [a] -> ([a], [a], Int, Int) -> ([a], [a], Int, Int)
go [] ps (is, ds, ni, nd) = (is, reverse ps ++ ds, ni, nd + length ps)
go _ss [] (is, ds, ni, nd) = (is, ds, ni, nd)
go (s:ss) (p:ps) acc@(is, ds, ni, nd) =
case compare s p of
LT -> go ss (p:ps) acc
EQ -> go ss ps (p:is, ds, ni+1, nd)
GT -> go (s:ss) ps (is, p:ds, ni, nd+1)
myGroupBy :: (a -> a -> Bool) -> [a] -> [[a]]
myGroupBy _ [] = []
myGroupBy p (x:xs) = let (xs1,xs2) = partition (p x) xs in (x:xs1) : myGroupBy p xs2
flowNeighbors :: TypeAutCore EdgeLabelNormal -> Node -> Set Node
flowNeighbors aut i =
S.fromList $ [n | (j,n) <- aut.ta_flowEdges, i == j] ++ [n | (n,j) <- aut.ta_flowEdges, i == j]
equalNodes :: TypeAutCore EdgeLabelNormal -> Node -> Node -> Bool
equalNodes aut i j =
(lab aut.ta_gr i == lab aut.ta_gr j) && flowNeighbors aut i == flowNeighbors aut j
initialSplit :: TypeAutCore EdgeLabelNormal -> ([EquivalenceClass], [EquivalenceClass])
initialSplit aut = (rest,catMaybes [posMin,negMin])
where
distGroups :: [EquivalenceClass]
distGroups = myGroupBy (equalNodes aut) (nodes aut.ta_gr)
(posMin,negMin,rest) = getMins $ sort <$> distGroups
getMins :: [EquivalenceClass]
-> (Maybe EquivalenceClass, Maybe EquivalenceClass, [EquivalenceClass])
getMins [] = (Nothing, Nothing, [])
getMins ([] : _iss) = error "Minimize: Empty equivalence class should not exist"
getMins (eq@(nd : _) : iss) =
let l = fromJust $ lab aut.ta_gr nd
pol = getLabelPol l
(p,n,iss') = getMins iss
(p',n',iss'') = case (pol, p, n) of
(Pos, Nothing, _) -> (Just eq, n, iss')
(Pos, Just ns, _) ->
if length ns > length eq
then (Just eq, n, ns : iss')
else (Just ns, n, eq : iss')
(Neg, _, Nothing) -> (p, Just eq, iss')
(Neg, _, Just ns) ->
if length ns > length eq
then (p, Just eq, ns : iss')
else (p, Just ns, eq : iss')
in (p', n', iss'')
getLabelPol :: NodeLabel -> Polarity
getLabelPol nl@MkNodeLabel{} = nl.nl_pol
getLabelPol nl@MkPrimitiveNodeLabel{} = nl.pl_pol
genMinimizeFun :: TypeAutCore EdgeLabelNormal -> (Node -> Node)
genMinimizeFun aut = getNewNode
where
preds = predsMap aut.ta_gr
alph = getAlphabet aut.ta_gr
(ls,ps) = initialSplit aut
nodeSets = minimize' preds alph ls ps
getNewNode n = head $ head $ filter (n `elem`) nodeSets
minimize :: TypeAutDet pol -> TypeAutDet pol
minimize aut = aut'
where
ta_core' = removeRedundantEdgesCore aut.ta_core
fun = genMinimizeFun ta_core'
aut' = mapTypeAut fun aut
|
20f4dbf106e852742b3d03346d64604820f17c176b484ba9d937b3a273f2a2f8
|
wdebeaum/step
|
whiz.lisp
|
;;;;
;;;; W::whiz
;;;;
(define-words :pos W::V :TEMPL AGENT-AFFECTED-XP-NP-TEMPL
:words (
(W::whiz
(wordfeats (W::morph (:forms (-vb) :past W::whizzed :ing W::whizzing)))
(SENSES
((meta-data :origin "verbnet-1.5" :entry-date 20051219 :change-date nil :comments nil :vn ("run-51.3.2"))
(LF-PARENT ONT::move-rapidly)
(TEMPL agent-templ) ; like stroll,walk
)
)
)
))
| null |
https://raw.githubusercontent.com/wdebeaum/step/f38c07d9cd3a58d0e0183159d4445de9a0eafe26/src/LexiconManager/Data/new/whiz.lisp
|
lisp
|
W::whiz
like stroll,walk
|
(define-words :pos W::V :TEMPL AGENT-AFFECTED-XP-NP-TEMPL
:words (
(W::whiz
(wordfeats (W::morph (:forms (-vb) :past W::whizzed :ing W::whizzing)))
(SENSES
((meta-data :origin "verbnet-1.5" :entry-date 20051219 :change-date nil :comments nil :vn ("run-51.3.2"))
(LF-PARENT ONT::move-rapidly)
)
)
)
))
|
6e9c060c0c2863905cecf3420e3e356a43a0a934cbd736e0e49efe64ae2d9bbc
|
ermine/xmpp
|
lwt_test.ml
|
open StanzaError
module PlainSocket =
struct
open Lwt
type 'a z = 'a Lwt.t
type fd = Lwt_unix.file_descr
type socket = fd
let get_fd fd = fd
let open_connection sockaddr =
let fd = Lwt_unix.socket Unix.PF_INET Unix.SOCK_STREAM 0 in
Lwt_unix.connect fd sockaddr >>= fun () ->
return fd
let read fd buf start len =
Lwt_unix.read fd buf start len >>=
(fun size ->
if size > 0 then
print_string "IN: "; print_endline (String.sub buf start size);
return size
)
let write fd str =
print_string "OUT: ";
print_endline str;
let len = String.length str in
let rec aux_send start =
Lwt_unix.write fd str start (len - start) >>= fun sent ->
if sent = 0 then
return ()
else
aux_send (start + sent)
in
aux_send 0
let close fd =
Lwt_unix.close fd
end
module TLSSocket =
struct
open Lwt
type 'a z = 'a Lwt.t
type fd = Lwt_unix.file_descr
type socket = {
fd : fd;
socket : Lwt_ssl.socket;
}
let read s buf start len =
Lwt_ssl.read s.socket buf start len >>=
(fun size ->
if size > 0 then
print_string "IN: "; print_endline (String.sub buf start size);
return size
)
let write s str =
print_string "OUT: ";
print_endline str;
let len = String.length str in
let rec aux_send start =
Lwt_ssl.write s.socket str start (len - start) >>= fun sent ->
if sent = 0 then
return ()
else
aux_send (start + sent)
in
aux_send 0
let switch fd =
Ssl.init ();
let ctx = Ssl.create_context Ssl.TLSv1 Ssl.Client_context in
Lwt_ssl.ssl_connect fd ctx >>= fun socket ->
return {
fd;
socket;
}
let close s =
Lwt_ssl.close s.socket
end
module ID =
struct
type t = string
let compare = Pervasives.compare
end
module IDCallback = Map.Make(ID)
module XMPPClient = XMPP.Make (Lwt) (Xmlstream.XmlStream) (IDCallback)
open XMPPClient
module Version = XEP_version.Make (XMPPClient)
let message_callback t stanza =
(match stanza.content.body with
| None -> ()
| Some v -> print_endline v);
send_message t ?jid_to:stanza.jid_from
?id:stanza.id
?kind:stanza.content.message_type
?lang:stanza.lang
?body:stanza.content.body ()
let message_error t ?id ?jid_from ?jid_to ?lang error =
print_endline ("message error: " ^ error.err_text);
return ()
let presence_callback t stanza =
(match stanza.content.presence_type with
| None -> print_endline "available"
| Some _ -> print_endline "something"
); return ()
let presence_error t ?id ?jid_from ?jid_to ?lang error =
print_endline ("presence error: " ^ error.err_text);
return ()
let session t =
register_iq_request_handler t Version.ns_version
(fun ev _jid_from _jid_to _lang () ->
match ev with
| IQGet _el ->
let el = Version.encode {Version.name = "xmpptest";
Version.version = "2.0";
Version.os = Sys.os_type} in
return (IQResult (Some el))
| IQSet _el ->
fail BadRequest
);
register_stanza_handler t (ns_client, "message")
(parse_message ~callback:message_callback ~callback_error:message_error);
register_stanza_handler t (ns_client, "presence")
(parse_presence ~callback:presence_callback ~callback_error:presence_error);
return ()
let _ =
let server = Sys.argv.(1)
and username = Sys.argv.(2)
and password = Sys.argv.(3)
and resource = "xmpp3.0"
and port =
if Array.length Sys.argv < 5 then 5222 else int_of_string Sys.argv.(4) in
let myjid = JID.make_jid username server resource in
let inet_addr =
try Unix.inet_addr_of_string server
with Failure("inet_addr_of_string") ->
(Unix.gethostbyname server).Unix.h_addr_list.(0) in
let sockaddr = Unix.ADDR_INET (inet_addr, port) in
Lwt_main.run (
PlainSocket.open_connection sockaddr >>= fun socket_data ->
let module Socket_module = struct type t = PlainSocket.socket
let socket = socket_data
include PlainSocket
end in
let make_tls () =
TLSSocket.switch (PlainSocket.get_fd socket_data) >>= fun socket_data ->
let module TLS_module = struct type t = TLSSocket.socket
let socket = socket_data
include TLSSocket
end in
return (module TLS_module : XMPPClient.Socket)
in
XMPPClient.setup_session
~user_data:()
~myjid
~plain_socket:(module Socket_module : XMPPClient.Socket)
~tls_socket:make_tls
~password session >>=
(fun session_data -> XMPPClient.parse session_data >>= fun () ->
let module S = (val session_data.socket : Socket) in
S.close S.socket
)
)
| null |
https://raw.githubusercontent.com/ermine/xmpp/85f31c5aaac6e3b5042694ff33ac8afdfb845c06/tests/lwt_test.ml
|
ocaml
|
open StanzaError
module PlainSocket =
struct
open Lwt
type 'a z = 'a Lwt.t
type fd = Lwt_unix.file_descr
type socket = fd
let get_fd fd = fd
let open_connection sockaddr =
let fd = Lwt_unix.socket Unix.PF_INET Unix.SOCK_STREAM 0 in
Lwt_unix.connect fd sockaddr >>= fun () ->
return fd
let read fd buf start len =
Lwt_unix.read fd buf start len >>=
(fun size ->
if size > 0 then
print_string "IN: "; print_endline (String.sub buf start size);
return size
)
let write fd str =
print_string "OUT: ";
print_endline str;
let len = String.length str in
let rec aux_send start =
Lwt_unix.write fd str start (len - start) >>= fun sent ->
if sent = 0 then
return ()
else
aux_send (start + sent)
in
aux_send 0
let close fd =
Lwt_unix.close fd
end
module TLSSocket =
struct
open Lwt
type 'a z = 'a Lwt.t
type fd = Lwt_unix.file_descr
type socket = {
fd : fd;
socket : Lwt_ssl.socket;
}
let read s buf start len =
Lwt_ssl.read s.socket buf start len >>=
(fun size ->
if size > 0 then
print_string "IN: "; print_endline (String.sub buf start size);
return size
)
let write s str =
print_string "OUT: ";
print_endline str;
let len = String.length str in
let rec aux_send start =
Lwt_ssl.write s.socket str start (len - start) >>= fun sent ->
if sent = 0 then
return ()
else
aux_send (start + sent)
in
aux_send 0
let switch fd =
Ssl.init ();
let ctx = Ssl.create_context Ssl.TLSv1 Ssl.Client_context in
Lwt_ssl.ssl_connect fd ctx >>= fun socket ->
return {
fd;
socket;
}
let close s =
Lwt_ssl.close s.socket
end
module ID =
struct
type t = string
let compare = Pervasives.compare
end
module IDCallback = Map.Make(ID)
module XMPPClient = XMPP.Make (Lwt) (Xmlstream.XmlStream) (IDCallback)
open XMPPClient
module Version = XEP_version.Make (XMPPClient)
let message_callback t stanza =
(match stanza.content.body with
| None -> ()
| Some v -> print_endline v);
send_message t ?jid_to:stanza.jid_from
?id:stanza.id
?kind:stanza.content.message_type
?lang:stanza.lang
?body:stanza.content.body ()
let message_error t ?id ?jid_from ?jid_to ?lang error =
print_endline ("message error: " ^ error.err_text);
return ()
let presence_callback t stanza =
(match stanza.content.presence_type with
| None -> print_endline "available"
| Some _ -> print_endline "something"
); return ()
let presence_error t ?id ?jid_from ?jid_to ?lang error =
print_endline ("presence error: " ^ error.err_text);
return ()
let session t =
register_iq_request_handler t Version.ns_version
(fun ev _jid_from _jid_to _lang () ->
match ev with
| IQGet _el ->
let el = Version.encode {Version.name = "xmpptest";
Version.version = "2.0";
Version.os = Sys.os_type} in
return (IQResult (Some el))
| IQSet _el ->
fail BadRequest
);
register_stanza_handler t (ns_client, "message")
(parse_message ~callback:message_callback ~callback_error:message_error);
register_stanza_handler t (ns_client, "presence")
(parse_presence ~callback:presence_callback ~callback_error:presence_error);
return ()
let _ =
let server = Sys.argv.(1)
and username = Sys.argv.(2)
and password = Sys.argv.(3)
and resource = "xmpp3.0"
and port =
if Array.length Sys.argv < 5 then 5222 else int_of_string Sys.argv.(4) in
let myjid = JID.make_jid username server resource in
let inet_addr =
try Unix.inet_addr_of_string server
with Failure("inet_addr_of_string") ->
(Unix.gethostbyname server).Unix.h_addr_list.(0) in
let sockaddr = Unix.ADDR_INET (inet_addr, port) in
Lwt_main.run (
PlainSocket.open_connection sockaddr >>= fun socket_data ->
let module Socket_module = struct type t = PlainSocket.socket
let socket = socket_data
include PlainSocket
end in
let make_tls () =
TLSSocket.switch (PlainSocket.get_fd socket_data) >>= fun socket_data ->
let module TLS_module = struct type t = TLSSocket.socket
let socket = socket_data
include TLSSocket
end in
return (module TLS_module : XMPPClient.Socket)
in
XMPPClient.setup_session
~user_data:()
~myjid
~plain_socket:(module Socket_module : XMPPClient.Socket)
~tls_socket:make_tls
~password session >>=
(fun session_data -> XMPPClient.parse session_data >>= fun () ->
let module S = (val session_data.socket : Socket) in
S.close S.socket
)
)
|
|
3d6d1de425c3cb1d2ae2c13a559c4fa851865e7bedd083da69d5ee76ebe5acc3
|
Clojure2D/clojure2d-examples
|
bvh.clj
|
(ns rt4.the-next-week.ch04b.bvh
(:require [rt4.the-next-week.ch04b.hittable :as hittable]
[rt4.the-next-week.ch04b.aabb :as aabb]
[rt4.the-next-week.ch04b.interval :as interval]
[fastmath.core :as m]))
(set! *warn-on-reflection* true)
(set! *unchecked-math* :warn-on-boxed)
(m/use-primitive-operators)
(defrecord BVHNode [left right bbox]
hittable/HittableProto
(hit [_ r ray-t]
(when (aabb/hit bbox r ray-t)
(let [rec-left (and left (hittable/hit left r ray-t))
rec-right (and right (hittable/hit right r (interval/interval (:mn ray-t)
(or (:t rec-left)
(:mx ray-t)))))]
(or rec-right rec-left)))))
(defmacro ^:private compare-hittables
[selector]
`(fn [a# b#]
(< (double (:mn (~selector (:bbox a#))))
(double (:mn (~selector (:bbox b#)))))))
(def comparators [(compare-hittables :x)
(compare-hittables :y)
(compare-hittables :z)])
(defn- build-tree
[[a b :as objects]]
(let [comparator (rand-nth comparators)
object-span (count objects)
[left right] (condp = object-span
1 [nil a]
2 (if (comparator a b) [a b] [b a])
(let [sorted-objects (sort comparator objects)
mid (/ object-span 2)]
(map build-tree (split-at mid sorted-objects))))]
(->BVHNode left right (if left (aabb/merge-boxes (:bbox left) (:bbox right)) (:bbox right)))))
(defn bvh-node
([hittable-list] (build-tree (:objects hittable-list))))
| null |
https://raw.githubusercontent.com/Clojure2D/clojure2d-examples/ead92d6f17744b91070e6308157364ad4eab8a1b/src/rt4/the_next_week/ch04b/bvh.clj
|
clojure
|
(ns rt4.the-next-week.ch04b.bvh
(:require [rt4.the-next-week.ch04b.hittable :as hittable]
[rt4.the-next-week.ch04b.aabb :as aabb]
[rt4.the-next-week.ch04b.interval :as interval]
[fastmath.core :as m]))
(set! *warn-on-reflection* true)
(set! *unchecked-math* :warn-on-boxed)
(m/use-primitive-operators)
(defrecord BVHNode [left right bbox]
hittable/HittableProto
(hit [_ r ray-t]
(when (aabb/hit bbox r ray-t)
(let [rec-left (and left (hittable/hit left r ray-t))
rec-right (and right (hittable/hit right r (interval/interval (:mn ray-t)
(or (:t rec-left)
(:mx ray-t)))))]
(or rec-right rec-left)))))
(defmacro ^:private compare-hittables
[selector]
`(fn [a# b#]
(< (double (:mn (~selector (:bbox a#))))
(double (:mn (~selector (:bbox b#)))))))
(def comparators [(compare-hittables :x)
(compare-hittables :y)
(compare-hittables :z)])
(defn- build-tree
[[a b :as objects]]
(let [comparator (rand-nth comparators)
object-span (count objects)
[left right] (condp = object-span
1 [nil a]
2 (if (comparator a b) [a b] [b a])
(let [sorted-objects (sort comparator objects)
mid (/ object-span 2)]
(map build-tree (split-at mid sorted-objects))))]
(->BVHNode left right (if left (aabb/merge-boxes (:bbox left) (:bbox right)) (:bbox right)))))
(defn bvh-node
([hittable-list] (build-tree (:objects hittable-list))))
|
|
0bd85f1d8fbab8fa812605b4044202c42a6e62e065ecf355f77ba078da49c876
|
KaroshiBee/weevil
|
mdb_stepper_config.ml
|
module Client_context = Tezos_client_base.Client_context
module Client_context_unix = Tezos_client_base_unix.Client_context_unix
open Mdb_import.Tez
TODO make sure to link with correct protocol modules and mockup maker
type t = {
chain_id:Chain_id.t;
rpc_context:Env.Updater.rpc_context;
unix_mockup:Client_context_unix.unix_mockup;
}
let chain_id t = t.chain_id
let mock_context t = t.unix_mockup
let make_alpha_context t =
let open Lwt_result_syntax in
let timestamp = t.rpc_context.block_header.timestamp in
let level = Int32.succ t.rpc_context.block_header.level in (* `Successor_level is safer? *)
let* (alpha_context, _, _) =
Ctxt.prepare
~level
~predecessor_timestamp:timestamp
~timestamp
~fitness:[] (* TODO not sure about this fitness *)
t.rpc_context.context
|> Lwt.map Env.wrap_tzresult
in
return alpha_context
NOTE we are in Tezos_base.TzPervasives.tzresult Lwt.t because of Tezos_mockup lib calls
let setup_mockup_rpc_client_config ~base_dir cctxt protocol_hash =
let open Lwt_result_syntax in
let in_memory_mockup (protocol : Protocol_hash.t option) =
match protocol with
| None -> Tezos_mockup.Persistence.default_mockup_context cctxt
| Some protocol_hash ->
let*! () = Logs_lwt.debug (fun m -> m "making with a protocol hash") in
Tezos_mockup.Persistence.init_mockup_context_by_protocol_hash
~cctxt
~protocol_hash
~constants_overrides_json:None
~bootstrap_accounts_json:None
in
let* b = Tezos_mockup.Persistence.classify_base_dir base_dir in
let* (mockup_env, {chain = chain_id; rpc_context; protocol_data}), mem_only =
match b with
| Tezos_mockup.Persistence.Base_dir_is_empty
| Tezos_mockup.Persistence.Base_dir_is_file
| Tezos_mockup.Persistence.Base_dir_is_nonempty
| Tezos_mockup.Persistence.Base_dir_does_not_exist ->
let mem_only = true in
let* res = in_memory_mockup protocol_hash in
return (res, mem_only)
| Tezos_mockup.Persistence.Base_dir_is_mockup ->
let mem_only = false in
let* res =
Tezos_mockup.Persistence.get_mockup_context_from_disk
~base_dir
~protocol_hash
cctxt
in
return (res, mem_only)
in
let unix_mockup = new Client_context_unix.unix_mockup
~base_dir
~mem_only
~mockup_env
~chain_id
~rpc_context
~protocol_data
in
return {
chain_id;
rpc_context;
unix_mockup;
}
| null |
https://raw.githubusercontent.com/KaroshiBee/weevil/ddb8d0fa8618db3ed2a7eaf4f635200e7a1cc8b6/bin/weevil_mdb_011/src/mdb_stepper_config.ml
|
ocaml
|
`Successor_level is safer?
TODO not sure about this fitness
|
module Client_context = Tezos_client_base.Client_context
module Client_context_unix = Tezos_client_base_unix.Client_context_unix
open Mdb_import.Tez
TODO make sure to link with correct protocol modules and mockup maker
type t = {
chain_id:Chain_id.t;
rpc_context:Env.Updater.rpc_context;
unix_mockup:Client_context_unix.unix_mockup;
}
let chain_id t = t.chain_id
let mock_context t = t.unix_mockup
let make_alpha_context t =
let open Lwt_result_syntax in
let timestamp = t.rpc_context.block_header.timestamp in
let* (alpha_context, _, _) =
Ctxt.prepare
~level
~predecessor_timestamp:timestamp
~timestamp
t.rpc_context.context
|> Lwt.map Env.wrap_tzresult
in
return alpha_context
NOTE we are in Tezos_base.TzPervasives.tzresult Lwt.t because of Tezos_mockup lib calls
let setup_mockup_rpc_client_config ~base_dir cctxt protocol_hash =
let open Lwt_result_syntax in
let in_memory_mockup (protocol : Protocol_hash.t option) =
match protocol with
| None -> Tezos_mockup.Persistence.default_mockup_context cctxt
| Some protocol_hash ->
let*! () = Logs_lwt.debug (fun m -> m "making with a protocol hash") in
Tezos_mockup.Persistence.init_mockup_context_by_protocol_hash
~cctxt
~protocol_hash
~constants_overrides_json:None
~bootstrap_accounts_json:None
in
let* b = Tezos_mockup.Persistence.classify_base_dir base_dir in
let* (mockup_env, {chain = chain_id; rpc_context; protocol_data}), mem_only =
match b with
| Tezos_mockup.Persistence.Base_dir_is_empty
| Tezos_mockup.Persistence.Base_dir_is_file
| Tezos_mockup.Persistence.Base_dir_is_nonempty
| Tezos_mockup.Persistence.Base_dir_does_not_exist ->
let mem_only = true in
let* res = in_memory_mockup protocol_hash in
return (res, mem_only)
| Tezos_mockup.Persistence.Base_dir_is_mockup ->
let mem_only = false in
let* res =
Tezos_mockup.Persistence.get_mockup_context_from_disk
~base_dir
~protocol_hash
cctxt
in
return (res, mem_only)
in
let unix_mockup = new Client_context_unix.unix_mockup
~base_dir
~mem_only
~mockup_env
~chain_id
~rpc_context
~protocol_data
in
return {
chain_id;
rpc_context;
unix_mockup;
}
|
94d1791d85c5a31c9d197c45d2de5de512bde56bf95867ce0c8699c2a86e6aef
|
michiakig/LispInSmallPieces
|
chap8d.scm
|
$ I d : chap8d.scm , v 4.1 2006/11/24 18:41:05 queinnec Exp $
;;;(((((((((((((((((((((((((((((((( L i S P ))))))))))))))))))))))))))))))))
;;; This file is part of the files that accompany the book:
LISP Implantation Semantique Programmation ( InterEditions , France )
By Christian Queinnec < >
;;; Newest version may be retrieved from:
( IP 128.93.2.54 ) ftp.inria.fr : INRIA / Projects / icsla / Books / LiSP*.tar.gz
;;; Check the README file before using this file.
;;;(((((((((((((((((((((((((((((((( L i S P ))))))))))))))))))))))))))))))))
;;; variant of chap8c.scm for the bytecode compiler chap7g.scm
(define (meaning e r tail?)
(if (atom? e)
(if (symbol? e) (meaning-reference e r tail?)
(meaning-quotation e r tail?) )
(case (car e)
((quote) (meaning-quotation (cadr e) r tail?))
((lambda) (meaning-abstraction (cadr e) (cddr e) r tail?))
((if) (meaning-alternative (cadr e) (caddr e) (cadddr e) r tail?))
((begin) (meaning-sequence (cdr e) r tail?))
((set!) (meaning-assignment (cadr e) (caddr e) r tail?))
((bind-exit) (meaning-bind-exit (caadr e) (cddr e) r tail?))
((dynamic) (meaning-dynamic-reference (cadr e) r tail?))
((dynamic-let) (meaning-dynamic-let (car (cadr e))
(cadr (cadr e))
(cddr e) r tail? ))
((monitor) (meaning-monitor (cadr e) (cddr e) r tail?))
((eval) (meaning-eval (cadr e) r tail?))
(else (meaning-application (car e) (cdr e) r tail?)) ) ) )
(define (meaning-eval e r tail?)
(let ((m (meaning e r #f)))
(EVAL/CE m r) ) )
(define (EVAL/CE m r)
(append (PRESERVE-ENV) (CONSTANT r) (PUSH-VALUE)
m (COMPILE-RUN) (RESTORE-ENV) ) )
(define (COMPILE-RUN) (list 255))
(define (compile-and-run v r tail?)
(unless tail? (stack-push *pc*))
(set! *pc* (compile-on-the-fly v r)) )
;;; Compile program v within environment r, install resulting code and
;;; return its entry point.
(define (compile-on-the-fly v r)
(set! g.current '())
(for-each g.current-extend! sg.current.names)
(set! *quotations* (vector->list *constants*))
(set! *dynamic-variables* *dynamic-variables*)
(let ((code (apply vector (append (meaning v r #f) (RETURN)))))
(set! sg.current.names (map car (reverse g.current)))
(let ((v (make-vector (length sg.current.names)
undefined-value )))
(vector-copy! sg.current v 0 (vector-length sg.current))
(set! sg.current v) )
(set! *constants* (apply vector *quotations*))
(set! *dynamic-variables* *dynamic-variables*)
(install-code! code) ) )
;;; Exercice: Share the compilation.
(define (prepare e)
(eval/ce `(lambda () ,e)) )
Exercice : eval / at with eval / ce without clash .
(define (eval/at e)
(let ((g (gensym)))
(eval/ce `(lambda (,g) (eval/ce ,g))) ) )
;;; end of chap8d.scm
| null |
https://raw.githubusercontent.com/michiakig/LispInSmallPieces/0a2762d539a5f4c7488fffe95722790ac475c2ea/src/chap8d.scm
|
scheme
|
(((((((((((((((((((((((((((((((( L i S P ))))))))))))))))))))))))))))))))
This file is part of the files that accompany the book:
Newest version may be retrieved from:
Check the README file before using this file.
(((((((((((((((((((((((((((((((( L i S P ))))))))))))))))))))))))))))))))
variant of chap8c.scm for the bytecode compiler chap7g.scm
Compile program v within environment r, install resulting code and
return its entry point.
Exercice: Share the compilation.
end of chap8d.scm
|
$ I d : chap8d.scm , v 4.1 2006/11/24 18:41:05 queinnec Exp $
LISP Implantation Semantique Programmation ( InterEditions , France )
By Christian Queinnec < >
( IP 128.93.2.54 ) ftp.inria.fr : INRIA / Projects / icsla / Books / LiSP*.tar.gz
(define (meaning e r tail?)
(if (atom? e)
(if (symbol? e) (meaning-reference e r tail?)
(meaning-quotation e r tail?) )
(case (car e)
((quote) (meaning-quotation (cadr e) r tail?))
((lambda) (meaning-abstraction (cadr e) (cddr e) r tail?))
((if) (meaning-alternative (cadr e) (caddr e) (cadddr e) r tail?))
((begin) (meaning-sequence (cdr e) r tail?))
((set!) (meaning-assignment (cadr e) (caddr e) r tail?))
((bind-exit) (meaning-bind-exit (caadr e) (cddr e) r tail?))
((dynamic) (meaning-dynamic-reference (cadr e) r tail?))
((dynamic-let) (meaning-dynamic-let (car (cadr e))
(cadr (cadr e))
(cddr e) r tail? ))
((monitor) (meaning-monitor (cadr e) (cddr e) r tail?))
((eval) (meaning-eval (cadr e) r tail?))
(else (meaning-application (car e) (cdr e) r tail?)) ) ) )
(define (meaning-eval e r tail?)
(let ((m (meaning e r #f)))
(EVAL/CE m r) ) )
(define (EVAL/CE m r)
(append (PRESERVE-ENV) (CONSTANT r) (PUSH-VALUE)
m (COMPILE-RUN) (RESTORE-ENV) ) )
(define (COMPILE-RUN) (list 255))
(define (compile-and-run v r tail?)
(unless tail? (stack-push *pc*))
(set! *pc* (compile-on-the-fly v r)) )
(define (compile-on-the-fly v r)
(set! g.current '())
(for-each g.current-extend! sg.current.names)
(set! *quotations* (vector->list *constants*))
(set! *dynamic-variables* *dynamic-variables*)
(let ((code (apply vector (append (meaning v r #f) (RETURN)))))
(set! sg.current.names (map car (reverse g.current)))
(let ((v (make-vector (length sg.current.names)
undefined-value )))
(vector-copy! sg.current v 0 (vector-length sg.current))
(set! sg.current v) )
(set! *constants* (apply vector *quotations*))
(set! *dynamic-variables* *dynamic-variables*)
(install-code! code) ) )
(define (prepare e)
(eval/ce `(lambda () ,e)) )
Exercice : eval / at with eval / ce without clash .
(define (eval/at e)
(let ((g (gensym)))
(eval/ce `(lambda (,g) (eval/ce ,g))) ) )
|
5c6b48a4a1b9f1b49e6f7b3c73bc27dfa7f6659653e12c60085c81eb29ace06b
|
martinsumner/leveled
|
perf_SUITE.erl
|
-module(perf_SUITE).
-include_lib("common_test/include/ct.hrl").
-include("include/leveled.hrl").
-export([all/0]).
-export([bigpcl_bucketlist/1
]).
all() -> [bigpcl_bucketlist].
bigpcl_bucketlist(_Config) ->
%%
In OTP 22 + there appear to be issues with anonymous functions which
%% have a reference to loop state, requiring a copy of all the loop state
%% to be made when returning the function.
%% This test creates alarge loop state on the leveled_penciller to prove
%% this.
%% The problem can be resolved simply by renaming the element of the loop
%% state using within the anonymous function.
RootPath = testutil:reset_filestructure(),
BucketCount = 500,
ObjectCount = 100,
StartOpts1 = [{root_path, RootPath},
{max_journalsize, 50000000},
{cache_size, 4000},
{max_pencillercachesize, 128000},
{max_sstslots, 256},
{sync_strategy, testutil:sync_strategy()},
{compression_point, on_compact}],
{ok, Bookie1} = leveled_bookie:book_start(StartOpts1),
BucketList =
lists:map(fun(I) -> list_to_binary(integer_to_list(I)) end,
lists:seq(1, BucketCount)),
MapFun =
fun(B) ->
testutil:generate_objects(ObjectCount, 1, [],
leveled_rand:rand_bytes(100),
fun() -> [] end,
B)
end,
ObjLofL = lists:map(MapFun, BucketList),
lists:foreach(fun(ObjL) -> testutil:riakload(Bookie1, ObjL) end, ObjLofL),
BucketFold =
fun(B, _K, _V, Acc) ->
case sets:is_element(B, Acc) of
true ->
Acc;
false ->
sets:add_element(B, Acc)
end
end,
FBAccT = {BucketFold, sets:new()},
{async, BucketFolder1} =
leveled_bookie:book_headfold(Bookie1,
?RIAK_TAG,
{bucket_list, BucketList},
FBAccT,
false, false, false),
{FoldTime1, BucketList1} = timer:tc(BucketFolder1, []),
true = BucketCount == sets:size(BucketList1),
ok = leveled_bookie:book_close(Bookie1),
{ok, Bookie2} = leveled_bookie:book_start(StartOpts1),
{async, BucketFolder2} =
leveled_bookie:book_headfold(Bookie2,
?RIAK_TAG,
{bucket_list, BucketList},
FBAccT,
false, false, false),
{FoldTime2, BucketList2} = timer:tc(BucketFolder2, []),
true = BucketCount == sets:size(BucketList2),
io:format("Fold pre-close ~w ms post-close ~w ms~n",
[FoldTime1 div 1000, FoldTime2 div 1000]),
true = FoldTime1 < 10 * FoldTime2,
%% The fold in-memory should be the same order of magnitude of response
%% time as the fold post-persistence
ok = leveled_bookie:book_destroy(Bookie2).
| null |
https://raw.githubusercontent.com/martinsumner/leveled/ed0301e2cfc767b6b65947e8283235b0b25ebf02/test/end_to_end/perf_SUITE.erl
|
erlang
|
have a reference to loop state, requiring a copy of all the loop state
to be made when returning the function.
This test creates alarge loop state on the leveled_penciller to prove
this.
The problem can be resolved simply by renaming the element of the loop
state using within the anonymous function.
The fold in-memory should be the same order of magnitude of response
time as the fold post-persistence
|
-module(perf_SUITE).
-include_lib("common_test/include/ct.hrl").
-include("include/leveled.hrl").
-export([all/0]).
-export([bigpcl_bucketlist/1
]).
all() -> [bigpcl_bucketlist].
bigpcl_bucketlist(_Config) ->
In OTP 22 + there appear to be issues with anonymous functions which
RootPath = testutil:reset_filestructure(),
BucketCount = 500,
ObjectCount = 100,
StartOpts1 = [{root_path, RootPath},
{max_journalsize, 50000000},
{cache_size, 4000},
{max_pencillercachesize, 128000},
{max_sstslots, 256},
{sync_strategy, testutil:sync_strategy()},
{compression_point, on_compact}],
{ok, Bookie1} = leveled_bookie:book_start(StartOpts1),
BucketList =
lists:map(fun(I) -> list_to_binary(integer_to_list(I)) end,
lists:seq(1, BucketCount)),
MapFun =
fun(B) ->
testutil:generate_objects(ObjectCount, 1, [],
leveled_rand:rand_bytes(100),
fun() -> [] end,
B)
end,
ObjLofL = lists:map(MapFun, BucketList),
lists:foreach(fun(ObjL) -> testutil:riakload(Bookie1, ObjL) end, ObjLofL),
BucketFold =
fun(B, _K, _V, Acc) ->
case sets:is_element(B, Acc) of
true ->
Acc;
false ->
sets:add_element(B, Acc)
end
end,
FBAccT = {BucketFold, sets:new()},
{async, BucketFolder1} =
leveled_bookie:book_headfold(Bookie1,
?RIAK_TAG,
{bucket_list, BucketList},
FBAccT,
false, false, false),
{FoldTime1, BucketList1} = timer:tc(BucketFolder1, []),
true = BucketCount == sets:size(BucketList1),
ok = leveled_bookie:book_close(Bookie1),
{ok, Bookie2} = leveled_bookie:book_start(StartOpts1),
{async, BucketFolder2} =
leveled_bookie:book_headfold(Bookie2,
?RIAK_TAG,
{bucket_list, BucketList},
FBAccT,
false, false, false),
{FoldTime2, BucketList2} = timer:tc(BucketFolder2, []),
true = BucketCount == sets:size(BucketList2),
io:format("Fold pre-close ~w ms post-close ~w ms~n",
[FoldTime1 div 1000, FoldTime2 div 1000]),
true = FoldTime1 < 10 * FoldTime2,
ok = leveled_bookie:book_destroy(Bookie2).
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.