_id
stringlengths 64
64
| repository
stringlengths 6
84
| name
stringlengths 4
110
| content
stringlengths 0
248k
| license
null | download_url
stringlengths 89
454
| language
stringclasses 7
values | comments
stringlengths 0
74.6k
| code
stringlengths 0
248k
|
---|---|---|---|---|---|---|---|---|
c4b21139973db689a2855866e0239914b23b6fb6e26ffcbf6698202e323823b6
|
2600hz-archive/whistle
|
rabbit_exchange_type.erl
|
The contents of this file are subject to the Mozilla Public License
%% Version 1.1 (the "License"); you may not use this file except in
%% compliance with the License. You may obtain a copy of the License
%% at /
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and
%% limitations under the License.
%%
The Original Code is RabbitMQ .
%%
The Initial Developer of the Original Code is VMware , Inc.
Copyright ( c ) 2007 - 2011 VMware , Inc. All rights reserved .
%%
-module(rabbit_exchange_type).
-export([behaviour_info/1]).
behaviour_info(callbacks) ->
[
{description, 0},
{route, 2},
called BEFORE declaration , to check args etc ; may exit with # amqp_error { }
{validate, 1},
%% called after declaration when previously absent
{create, 2},
%% called when recovering
{recover, 2},
%% called after exchange deletion.
{delete, 3},
%% called after a binding has been added
{add_binding, 3},
%% called after bindings have been deleted.
{remove_bindings, 3},
%% called when comparing exchanges for equivalence - should return ok or
%% exit with #amqp_error{}
{assert_args_equivalence, 2}
];
behaviour_info(_Other) ->
undefined.
| null |
https://raw.githubusercontent.com/2600hz-archive/whistle/1a256604f0d037fac409ad5a55b6b17e545dcbf9/lib/rabbitmq_server-2.4.1/src/rabbit_exchange_type.erl
|
erlang
|
Version 1.1 (the "License"); you may not use this file except in
compliance with the License. You may obtain a copy of the License
at /
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and
limitations under the License.
called after declaration when previously absent
called when recovering
called after exchange deletion.
called after a binding has been added
called after bindings have been deleted.
called when comparing exchanges for equivalence - should return ok or
exit with #amqp_error{}
|
The contents of this file are subject to the Mozilla Public License
Software distributed under the License is distributed on an " AS IS "
The Original Code is RabbitMQ .
The Initial Developer of the Original Code is VMware , Inc.
Copyright ( c ) 2007 - 2011 VMware , Inc. All rights reserved .
-module(rabbit_exchange_type).
-export([behaviour_info/1]).
behaviour_info(callbacks) ->
[
{description, 0},
{route, 2},
called BEFORE declaration , to check args etc ; may exit with # amqp_error { }
{validate, 1},
{create, 2},
{recover, 2},
{delete, 3},
{add_binding, 3},
{remove_bindings, 3},
{assert_args_equivalence, 2}
];
behaviour_info(_Other) ->
undefined.
|
1ca40ff9b0622dd8b930afa84e235dd10f96e356ed32217479b12593a0aa0efe
|
aradarbel10/MyOwnTT
|
semantics.ml
|
open Common
module Syn = Syntax
type lvl = Lvl of int
(** Values use levels (counting the environment from the left) which gives us weakening for free. *)
(** We separate out of values a distinct subdomain [neut] for neutral values,
those which get "stuck" on variables. Intuitively, when the variables would
be expanded in the future, we'd be able to perform more β-reductions on neutral
terms. Non-neutral values are already fully β-reduced. *)
type value =
| Pi of name * value * closure
| Lam of name * value * closure
| Sig of tele
| Rcd of (name * value) list
| Prod of value list
| Tup of value list
| Uni
| Bool
| True
| False
| Nat
| NatZ
| NatS of value
| Neut of head * spine * value
and head =
| Var of lvl
| Glue of lvl * value Lazy.t (** Glued Evaluation
Allows us to unfold top-level definitions lazily, leads to reduced term sizes.
[Glue] stores along with the [neut] itself another lazy version of the same
value in which all top level definitions are unfolded. *)
and elim =
| Proj of name
| ProjAt of int
| App of {arg : value; base : value}
(** [base] is the type of [arg] (base of the pi's type-family),
used later in the type-directed conversion. *)
| BoolInd of {motive : value; tcase : value; fcase : value}
| NatInd of {motive : value; zcase : value; scase : value}
and spine = elim list
* We use a spine based representation of neutral terms where the head is the variable
it got stuck on , and it can be easily accessed in constant time . Example in pseudo - notation :
A spine [ [ App " y " , , IfThenElse 1 -1 , Snd ] ] with head [ " x " ] represents the expression
` snd ( if ( fst ( x y ) ) then 1 else -1 ) `
it got stuck on, and it can be easily accessed in constant time. Example in pseudo-notation:
A spine [[App "y", Fst, IfThenElse 1 -1, Snd]] with head [Var "x"] represents the expression
`snd (if (fst (x y)) then 1 else -1)` *)
and closure =
| C of {bdr : Syn.term binder; env : env}
and tele =
| T of {bdrs : (name * Syn.term) list; env : env}
and env = (name * value Lazy.t) list
| Emp
| Local of env * name * value
| Toplevel of env * name * value ( * TODO need Lazy.t here ?
| Emp
| Local of env * name * value
| Toplevel of env * name * value (*TODO need Lazy.t here?*)
*)
exception OutOfBounds of string
let rec atIdx (env : env) (Idx i : Syn.idx) : value =
match env with
| [] -> raise (OutOfBounds ("idx" ^ string_of_int i))
| (_, v) :: env' ->
if i == 0
then Lazy.force v
else atIdx env' (Idx (i - 1))
TODO still need this ? ideally just use scn.hi always
let rec aux (env : env) : int =
match env with
| [] -> 0
| _ :: env' -> 1 + aux env'
in Lvl (aux env)
TODO still need this ? ideally store names separately in scene
match env with
| [] -> []
| (x, _) :: env' -> x :: names env'
(** We use this helper function to propagate projections (lazily!) through
[Glue] into the unfolded version of the value. *)
let head_map (f : value -> value) (hd : head) : head =
match hd with
| Var _ -> hd
| Glue (i, unfd) -> Glue (i, Lazy.map f unfd)
let inc (Lvl l : lvl) : lvl = Lvl (l + 1)
let nextvar (siz : lvl) (typ : value) : value = Neut (Var siz, [], typ)
let var (i : lvl) (typ : value) : value = Neut (Var i, [], typ)
let rec force_head (vl : value) : value =
match vl with
| Neut (Glue (_, unfd), _, _) -> force_head (Lazy.force unfd)
| _ -> vl
| null |
https://raw.githubusercontent.com/aradarbel10/MyOwnTT/cd3ac399fc09510f569e1fff25cfda6982f58d7b/lib/semantics.ml
|
ocaml
|
* Values use levels (counting the environment from the left) which gives us weakening for free.
* We separate out of values a distinct subdomain [neut] for neutral values,
those which get "stuck" on variables. Intuitively, when the variables would
be expanded in the future, we'd be able to perform more β-reductions on neutral
terms. Non-neutral values are already fully β-reduced.
* Glued Evaluation
Allows us to unfold top-level definitions lazily, leads to reduced term sizes.
[Glue] stores along with the [neut] itself another lazy version of the same
value in which all top level definitions are unfolded.
* [base] is the type of [arg] (base of the pi's type-family),
used later in the type-directed conversion.
TODO need Lazy.t here?
* We use this helper function to propagate projections (lazily!) through
[Glue] into the unfolded version of the value.
|
open Common
module Syn = Syntax
type lvl = Lvl of int
type value =
| Pi of name * value * closure
| Lam of name * value * closure
| Sig of tele
| Rcd of (name * value) list
| Prod of value list
| Tup of value list
| Uni
| Bool
| True
| False
| Nat
| NatZ
| NatS of value
| Neut of head * spine * value
and head =
| Var of lvl
and elim =
| Proj of name
| ProjAt of int
| App of {arg : value; base : value}
| BoolInd of {motive : value; tcase : value; fcase : value}
| NatInd of {motive : value; zcase : value; scase : value}
and spine = elim list
* We use a spine based representation of neutral terms where the head is the variable
it got stuck on , and it can be easily accessed in constant time . Example in pseudo - notation :
A spine [ [ App " y " , , IfThenElse 1 -1 , Snd ] ] with head [ " x " ] represents the expression
` snd ( if ( fst ( x y ) ) then 1 else -1 ) `
it got stuck on, and it can be easily accessed in constant time. Example in pseudo-notation:
A spine [[App "y", Fst, IfThenElse 1 -1, Snd]] with head [Var "x"] represents the expression
`snd (if (fst (x y)) then 1 else -1)` *)
and closure =
| C of {bdr : Syn.term binder; env : env}
and tele =
| T of {bdrs : (name * Syn.term) list; env : env}
and env = (name * value Lazy.t) list
| Emp
| Local of env * name * value
| Toplevel of env * name * value ( * TODO need Lazy.t here ?
| Emp
| Local of env * name * value
*)
exception OutOfBounds of string
let rec atIdx (env : env) (Idx i : Syn.idx) : value =
match env with
| [] -> raise (OutOfBounds ("idx" ^ string_of_int i))
| (_, v) :: env' ->
if i == 0
then Lazy.force v
else atIdx env' (Idx (i - 1))
TODO still need this ? ideally just use scn.hi always
let rec aux (env : env) : int =
match env with
| [] -> 0
| _ :: env' -> 1 + aux env'
in Lvl (aux env)
TODO still need this ? ideally store names separately in scene
match env with
| [] -> []
| (x, _) :: env' -> x :: names env'
let head_map (f : value -> value) (hd : head) : head =
match hd with
| Var _ -> hd
| Glue (i, unfd) -> Glue (i, Lazy.map f unfd)
let inc (Lvl l : lvl) : lvl = Lvl (l + 1)
let nextvar (siz : lvl) (typ : value) : value = Neut (Var siz, [], typ)
let var (i : lvl) (typ : value) : value = Neut (Var i, [], typ)
let rec force_head (vl : value) : value =
match vl with
| Neut (Glue (_, unfd), _, _) -> force_head (Lazy.force unfd)
| _ -> vl
|
87418626e761eeac79e533c76c72301df598df2da935d24d729934964ac025f5
|
kowey/GenI
|
Internal.hs
|
-- GenI surface realiser
Copyright ( C ) 2005 - 2009 and
--
-- This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation ; either version 2
of the License , or ( at your option ) any later version .
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
You should have received a copy of the GNU General Public License
-- along with this program; if not, write to the Free Software
Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
{-# LANGUAGE DeriveDataTypeable #-}
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE OverloadedStrings #
-- | Internals of lexical entry manipulation
module NLP.GenI.Lexicon.Internal where
-- import Debug.Trace -- for test stuff
import Data.Binary
import Data.FullList
import Data.Function
import Data.Generics (Data)
import Data.List (sortBy)
import Data.Text (Text)
import qualified Data.Text as T
import Data.Typeable (Typeable)
import NLP.GenI.FeatureStructure
import NLP.GenI.GeniShow
import NLP.GenI.GeniVal
import NLP.GenI.Polarity.Types (SemPols)
import NLP.GenI.Pretty
import NLP.GenI.Semantics
import Control.DeepSeq
--instance Show (IO()) where
-- show _ = ""
-- | Collection of lexical entries
type Lexicon = [LexEntry]
-- | Lexical entry
data LexEntry = LexEntry
{ iword :: FullList Text -- ^ normally just a singleton,
-- useful for merging synonyms
, ifamname :: Text -- ^ tree family to anchor to
^ parameters ( deprecrated ; use the interface )
, iinterface :: Flist GeniVal -- ^ features to unify with tree schema interface
, ifilters :: Flist GeniVal -- ^ features to pick out family members we want
, iequations :: Flist GeniVal -- ^ path equations
, isemantics :: Sem -- ^ lexical semantics
, isempols :: [SemPols] -- ^ polarities (must be same length as 'isemantics')
}
deriving (Eq, Data, Typeable)
-- | See also 'mkFullLexEntry'
-- This version comes with some sensible defaults.
mkLexEntry :: FullList Text -- ^ word
-> Text -- ^ family name
-> [GeniVal] -- ^ parameters list (deprecated)
-> Flist GeniVal -- ^ interface (use instead of params)
-> Flist GeniVal -- ^ filters
-> Flist GeniVal -- ^ equations
-> Sem -- ^ semantics
-> LexEntry
mkLexEntry word famname params interface filters equations sem =
mkFullLexEntry word famname params interface filters equations
sem (map noSemPols sem)
where
noSemPols l = replicate (length (lArgs l)) 0
-- | Variant of 'mkLexEntry' but with more control
mkFullLexEntry :: FullList Text -- ^ word
-> Text -- ^ family name
-> [GeniVal] -- ^ parameters list (deprecated)
-> Flist GeniVal -- ^ interface (use instead of params)
-> Flist GeniVal -- ^ filters
-> Flist GeniVal -- ^ equations
-> Sem -- ^ semantics
-> [SemPols] -- ^ semantic polarities
-> LexEntry
mkFullLexEntry word famname params interface filters equations sem sempols =
LexEntry
(sortNub word)
famname
params
(sortFlist interface)
(sortFlist filters)
(sortFlist equations)
sem2
sempols2
where
(sem2, sempols2) = unzip $ sortBy (compareOnLiteral `on` fst) (zip sem sempols)
instance DescendGeniVal LexEntry where
descendGeniVal s i =
i { iinterface = descendGeniVal s (iinterface i)
, iequations = descendGeniVal s (iequations i)
, isemantics = descendGeniVal s (isemantics i)
, iparams = descendGeniVal s (iparams i) }
instance Collectable LexEntry where
collect l = (collect $ iinterface l) . (collect $ iparams l) .
(collect $ ifilters l) . (collect $ iequations l) .
(collect $ isemantics l)
-- ----------------------------------------------------------------------
-- lexicon semantics
-- ----------------------------------------------------------------------
| An annotated GeniVal . This is for a rather old , obscure
-- variant on the polarity filtering optimisation. To account
for zero literal semantics , we annotate each value in the
-- semantics with a positive/negative marker. These markers
-- are then counted up to determine with we need to insert
-- more literals into the semantics or not. See the manual
-- on polarity filtering for more details
type PolValue = (GeniVal, Int)
-- | Separate an input lexical semantics into the actual semantics
-- and the semantic polarity entries (which aren't used very much
-- in practice, being a sort of experimental feature to solve an
-- obscure-ish technical problem)
fromLexSem :: [Literal PolValue] -> (Sem, [SemPols])
fromLexSem = unzip . map fromLexLiteral
-- | Note that by convention we ignore the polarity associated
-- with the predicate itself
fromLexLiteral :: Literal PolValue -> (Literal GeniVal, SemPols)
fromLexLiteral (Literal h pr vs) =
(lit, pols)
where
lit = Literal (fst h) (fst pr) (map fst vs)
pols = snd h : map snd vs
-- ----------------------------------------------------------------------
-- converting to text
-- ----------------------------------------------------------------------
TODO : does not support semantic polarities yet
instance GeniShow LexEntry where
geniShowText l = T.intercalate "\n"
[ T.unwords
[ geniShowText . mkGConst $ iword l
, ifamname l
, paramT
]
, geniKeyword "equations" $ geniShowText (iequations l)
, geniKeyword "filters" $ geniShowText (ifilters l)
, geniKeyword "semantics" $ geniShowText (isemantics l)
]
where
paramT = parens . T.unwords . concat $
[ map geniShowText (iparams l)
, ["!"]
, map geniShowText (iinterface l)
]
instance GeniShow [LexEntry] where
geniShowText = T.intercalate "\n\n" . map geniShowText
instance Pretty LexEntry where
pretty = geniShowText
-- ----------------------------------------------------------------------
--
-- ----------------------------------------------------------------------
!
deriving instance Binary LexEntry
deriving instance NFData LexEntry
!
deriving instance Binary LexEntry
deriving instance NFData LexEntry
!-}
-- GENERATED START
instance Binary LexEntry where
put (LexEntry x1 x2 x3 x4 x5 x6 x7 x8)
= do put x1
put x2
put x3
put x4
put x5
put x6
put x7
put x8
get
= do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
x7 <- get
x8 <- get
return (LexEntry x1 x2 x3 x4 x5 x6 x7 x8)
instance NFData LexEntry where
rnf (LexEntry x1 x2 x3 x4 x5 x6 x7 x8)
= rnf x1 `seq`
rnf x2 `seq`
rnf x3 `seq`
rnf x4 `seq` rnf x5 `seq` rnf x6 `seq` rnf x7 `seq` rnf x8 `seq` ()
-- GENERATED STOP
| null |
https://raw.githubusercontent.com/kowey/GenI/570a6ef70e61a7cb01fe0fc29732cd9c1c8f2d7a/src/NLP/GenI/Lexicon/Internal.hs
|
haskell
|
GenI surface realiser
This program is free software; you can redistribute it and/or
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this program; if not, write to the Free Software
# LANGUAGE DeriveDataTypeable #
| Internals of lexical entry manipulation
import Debug.Trace -- for test stuff
instance Show (IO()) where
show _ = ""
| Collection of lexical entries
| Lexical entry
^ normally just a singleton,
useful for merging synonyms
^ tree family to anchor to
^ features to unify with tree schema interface
^ features to pick out family members we want
^ path equations
^ lexical semantics
^ polarities (must be same length as 'isemantics')
| See also 'mkFullLexEntry'
This version comes with some sensible defaults.
^ word
^ family name
^ parameters list (deprecated)
^ interface (use instead of params)
^ filters
^ equations
^ semantics
| Variant of 'mkLexEntry' but with more control
^ word
^ family name
^ parameters list (deprecated)
^ interface (use instead of params)
^ filters
^ equations
^ semantics
^ semantic polarities
----------------------------------------------------------------------
lexicon semantics
----------------------------------------------------------------------
variant on the polarity filtering optimisation. To account
semantics with a positive/negative marker. These markers
are then counted up to determine with we need to insert
more literals into the semantics or not. See the manual
on polarity filtering for more details
| Separate an input lexical semantics into the actual semantics
and the semantic polarity entries (which aren't used very much
in practice, being a sort of experimental feature to solve an
obscure-ish technical problem)
| Note that by convention we ignore the polarity associated
with the predicate itself
----------------------------------------------------------------------
converting to text
----------------------------------------------------------------------
----------------------------------------------------------------------
----------------------------------------------------------------------
GENERATED START
GENERATED STOP
|
Copyright ( C ) 2005 - 2009 and
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation ; either version 2
of the License , or ( at your option ) any later version .
You should have received a copy of the GNU General Public License
Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE OverloadedStrings #
module NLP.GenI.Lexicon.Internal where
import Data.Binary
import Data.FullList
import Data.Function
import Data.Generics (Data)
import Data.List (sortBy)
import Data.Text (Text)
import qualified Data.Text as T
import Data.Typeable (Typeable)
import NLP.GenI.FeatureStructure
import NLP.GenI.GeniShow
import NLP.GenI.GeniVal
import NLP.GenI.Polarity.Types (SemPols)
import NLP.GenI.Pretty
import NLP.GenI.Semantics
import Control.DeepSeq
type Lexicon = [LexEntry]
data LexEntry = LexEntry
^ parameters ( deprecrated ; use the interface )
}
deriving (Eq, Data, Typeable)
-> LexEntry
mkLexEntry word famname params interface filters equations sem =
mkFullLexEntry word famname params interface filters equations
sem (map noSemPols sem)
where
noSemPols l = replicate (length (lArgs l)) 0
-> LexEntry
mkFullLexEntry word famname params interface filters equations sem sempols =
LexEntry
(sortNub word)
famname
params
(sortFlist interface)
(sortFlist filters)
(sortFlist equations)
sem2
sempols2
where
(sem2, sempols2) = unzip $ sortBy (compareOnLiteral `on` fst) (zip sem sempols)
instance DescendGeniVal LexEntry where
descendGeniVal s i =
i { iinterface = descendGeniVal s (iinterface i)
, iequations = descendGeniVal s (iequations i)
, isemantics = descendGeniVal s (isemantics i)
, iparams = descendGeniVal s (iparams i) }
instance Collectable LexEntry where
collect l = (collect $ iinterface l) . (collect $ iparams l) .
(collect $ ifilters l) . (collect $ iequations l) .
(collect $ isemantics l)
| An annotated GeniVal . This is for a rather old , obscure
for zero literal semantics , we annotate each value in the
type PolValue = (GeniVal, Int)
fromLexSem :: [Literal PolValue] -> (Sem, [SemPols])
fromLexSem = unzip . map fromLexLiteral
fromLexLiteral :: Literal PolValue -> (Literal GeniVal, SemPols)
fromLexLiteral (Literal h pr vs) =
(lit, pols)
where
lit = Literal (fst h) (fst pr) (map fst vs)
pols = snd h : map snd vs
TODO : does not support semantic polarities yet
instance GeniShow LexEntry where
geniShowText l = T.intercalate "\n"
[ T.unwords
[ geniShowText . mkGConst $ iword l
, ifamname l
, paramT
]
, geniKeyword "equations" $ geniShowText (iequations l)
, geniKeyword "filters" $ geniShowText (ifilters l)
, geniKeyword "semantics" $ geniShowText (isemantics l)
]
where
paramT = parens . T.unwords . concat $
[ map geniShowText (iparams l)
, ["!"]
, map geniShowText (iinterface l)
]
instance GeniShow [LexEntry] where
geniShowText = T.intercalate "\n\n" . map geniShowText
instance Pretty LexEntry where
pretty = geniShowText
!
deriving instance Binary LexEntry
deriving instance NFData LexEntry
!
deriving instance Binary LexEntry
deriving instance NFData LexEntry
!-}
instance Binary LexEntry where
put (LexEntry x1 x2 x3 x4 x5 x6 x7 x8)
= do put x1
put x2
put x3
put x4
put x5
put x6
put x7
put x8
get
= do x1 <- get
x2 <- get
x3 <- get
x4 <- get
x5 <- get
x6 <- get
x7 <- get
x8 <- get
return (LexEntry x1 x2 x3 x4 x5 x6 x7 x8)
instance NFData LexEntry where
rnf (LexEntry x1 x2 x3 x4 x5 x6 x7 x8)
= rnf x1 `seq`
rnf x2 `seq`
rnf x3 `seq`
rnf x4 `seq` rnf x5 `seq` rnf x6 `seq` rnf x7 `seq` rnf x8 `seq` ()
|
d3d9ec15617096bb20341728d229a0763e965af1beb25496fb0dd892ed77da38
|
camlp5/camlp5
|
pa_macro_gram.ml
|
(* camlp5r *)
(* pa_macro_gram.ml,v *)
Copyright ( c ) INRIA 2007 - 2017
(* #load "pa_macro.cmo" *)
(* #load "pa_extend.cmo" *)
(* #load "q_MLast.cmo" *)
open Pa_macro;;
open Pa_extend;;
Grammar.Unsafe.clear_entry rule_list;;
Grammar.Unsafe.clear_entry level_list;;
let must_flatten_opts ll =
List.fold_right
(fun levs acc ->
match levs with
None -> acc
| Some v -> v @ acc)
ll []
;;
Grammar.safe_extend
(let _ = (dexpr : 'dexpr Grammar.Entry.e)
and _ = (rule : 'rule Grammar.Entry.e)
and _ = (rule_list : 'rule_list Grammar.Entry.e)
and _ = (level : 'level Grammar.Entry.e)
and _ = (level_list : 'level_list Grammar.Entry.e) in
let grammar_entry_create s =
Grammar.create_local_entry (Grammar.of_entry dexpr) s
in
let rule_or_ifdef0 : 'rule_or_ifdef0 Grammar.Entry.e =
grammar_entry_create "rule_or_ifdef0"
and rule_or_ifdef : 'rule_or_ifdef Grammar.Entry.e =
grammar_entry_create "rule_or_ifdef"
and rule_or_ifdef_list : 'rule_or_ifdef_list Grammar.Entry.e =
grammar_entry_create "rule_or_ifdef_list"
and else_rule_or_ifdef : 'else_rule_or_ifdef Grammar.Entry.e =
grammar_entry_create "else_rule_or_ifdef"
and level_or_ifdef_opt : 'level_or_ifdef_opt Grammar.Entry.e =
grammar_entry_create "level_or_ifdef_opt"
and levels_or_ifdef_opt : 'levels_or_ifdef_opt Grammar.Entry.e =
grammar_entry_create "levels_or_ifdef_opt"
and else_levels_or_ifdef_opt : 'else_levels_or_ifdef_opt Grammar.Entry.e =
grammar_entry_create "else_levels_or_ifdef_opt"
in
[Grammar.extension (rule_list : 'rule_list Grammar.Entry.e) None
[None, None,
[Grammar.production
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next Grammar.r_stop (Grammar.s_token ("", "[")))
(Grammar.s_list1sep
(Grammar.s_nterm
(rule_or_ifdef0 : 'rule_or_ifdef0 Grammar.Entry.e))
(Grammar.s_token ("", "|")) false))
(Grammar.s_token ("", "]")),
"194fe98d",
(fun _ (rules : 'rule_or_ifdef0 list) _ (loc : Ploc.t) ->
({au_loc = loc; au_rules = List.concat rules} : 'rule_list)));
Grammar.production
(Grammar.r_next
(Grammar.r_next Grammar.r_stop (Grammar.s_token ("", "[")))
(Grammar.s_token ("", "]")),
"194fe98d",
(fun _ _ (loc : Ploc.t) ->
({au_loc = loc; au_rules = []} : 'rule_list)))]];
Grammar.extension (rule_or_ifdef0 : 'rule_or_ifdef0 Grammar.Entry.e) None
[None, None,
[Grammar.production
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next Grammar.r_stop
(Grammar.s_token ("", "IFDEF")))
(Grammar.s_nterm (dexpr : 'dexpr Grammar.Entry.e)))
(Grammar.s_token ("", "THEN")))
(Grammar.s_nterm
(rule_or_ifdef_list :
'rule_or_ifdef_list Grammar.Entry.e)))
(Grammar.s_nterm
(else_rule_or_ifdef :
'else_rule_or_ifdef Grammar.Entry.e)))
(Grammar.s_token ("", "END")),
"194fe98d",
(fun _ (e2 : 'else_rule_or_ifdef) (e1 : 'rule_or_ifdef_list) _
(e : 'dexpr) _ (loc : Ploc.t) ->
(if e then e1 else e2 : 'rule_or_ifdef0)))];
None, None,
[Grammar.production
(Grammar.r_next Grammar.r_stop
(Grammar.s_nterm (rule : 'rule Grammar.Entry.e)),
"194fe98d",
(fun (r : 'rule) (loc : Ploc.t) -> ([r] : 'rule_or_ifdef0)))]];
Grammar.extension (rule_or_ifdef : 'rule_or_ifdef Grammar.Entry.e) None
[None, None,
[Grammar.production
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next Grammar.r_stop
(Grammar.s_token ("", "IFDEF")))
(Grammar.s_nterm (dexpr : 'dexpr Grammar.Entry.e)))
(Grammar.s_token ("", "THEN")))
(Grammar.s_nterm
(rule_or_ifdef_list :
'rule_or_ifdef_list Grammar.Entry.e)))
(Grammar.s_nterm
(else_rule_or_ifdef :
'else_rule_or_ifdef Grammar.Entry.e)))
(Grammar.s_token ("", "END")),
"194fe98d",
(fun _ (e2 : 'else_rule_or_ifdef) (e1 : 'rule_or_ifdef_list) _
(e : 'dexpr) _ (loc : Ploc.t) ->
(if e then e1 else e2 : 'rule_or_ifdef)))];
None, None,
[Grammar.production
(Grammar.r_next
(Grammar.r_next Grammar.r_stop (Grammar.s_token ("", "|")))
(Grammar.s_nterm (rule : 'rule Grammar.Entry.e)),
"194fe98d",
(fun (r : 'rule) _ (loc : Ploc.t) -> ([r] : 'rule_or_ifdef)))]];
Grammar.extension
(rule_or_ifdef_list : 'rule_or_ifdef_list Grammar.Entry.e) None
[None, None,
[Grammar.production
(Grammar.r_next Grammar.r_stop
(Grammar.s_list0
(Grammar.s_nterm
(rule_or_ifdef : 'rule_or_ifdef Grammar.Entry.e))),
"194fe98d",
(fun (l : 'rule_or_ifdef list) (loc : Ploc.t) ->
(List.concat l : 'rule_or_ifdef_list)))]];
Grammar.extension
(else_rule_or_ifdef : 'else_rule_or_ifdef Grammar.Entry.e) None
[None, None,
[Grammar.production
(Grammar.r_next
(Grammar.r_next Grammar.r_stop (Grammar.s_token ("", "ELSE")))
(Grammar.s_nterm
(rule_or_ifdef_list : 'rule_or_ifdef_list Grammar.Entry.e)),
"194fe98d",
(fun (e : 'rule_or_ifdef_list) _ (loc : Ploc.t) ->
(e : 'else_rule_or_ifdef)));
Grammar.production
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next Grammar.r_stop
(Grammar.s_token ("", "ELSIFNDEF")))
(Grammar.s_nterm (dexpr : 'dexpr Grammar.Entry.e)))
(Grammar.s_token ("", "THEN")))
(Grammar.s_nterm
(rule_or_ifdef_list :
'rule_or_ifdef_list Grammar.Entry.e)))
Grammar.s_self,
"194fe98d",
(fun (e2 : 'else_rule_or_ifdef) (e1 : 'rule_or_ifdef_list) _
(e : 'dexpr) _ (loc : Ploc.t) ->
(if not e then e1 else e2 : 'else_rule_or_ifdef)));
Grammar.production
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next Grammar.r_stop
(Grammar.s_token ("", "ELSIFDEF")))
(Grammar.s_nterm (dexpr : 'dexpr Grammar.Entry.e)))
(Grammar.s_token ("", "THEN")))
(Grammar.s_nterm
(rule_or_ifdef_list :
'rule_or_ifdef_list Grammar.Entry.e)))
Grammar.s_self,
"194fe98d",
(fun (e2 : 'else_rule_or_ifdef) (e1 : 'rule_or_ifdef_list) _
(e : 'dexpr) _ (loc : Ploc.t) ->
(if e then e1 else e2 : 'else_rule_or_ifdef)))]];
Grammar.extension (level_list : 'level_list Grammar.Entry.e) None
[None, None,
[Grammar.production
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next Grammar.r_stop (Grammar.s_token ("", "[")))
(Grammar.s_list1sep
(Grammar.s_nterm
(level_or_ifdef_opt :
'level_or_ifdef_opt Grammar.Entry.e))
(Grammar.s_token ("", "|")) false))
(Grammar.s_token ("", "]")),
"194fe98d",
(fun _ (ll : 'level_or_ifdef_opt list) _ (loc : Ploc.t) ->
(must_flatten_opts ll : 'level_list)))]];
Grammar.extension
(level_or_ifdef_opt : 'level_or_ifdef_opt Grammar.Entry.e) None
[None, None,
[Grammar.production
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next Grammar.r_stop
(Grammar.s_token ("", "IFDEF")))
(Grammar.s_nterm (dexpr : 'dexpr Grammar.Entry.e)))
(Grammar.s_token ("", "THEN")))
(Grammar.s_nterm
(levels_or_ifdef_opt :
'levels_or_ifdef_opt Grammar.Entry.e)))
(Grammar.s_nterm
(else_levels_or_ifdef_opt :
'else_levels_or_ifdef_opt Grammar.Entry.e)))
(Grammar.s_token ("", "END")),
"194fe98d",
(fun _ (e2 : 'else_levels_or_ifdef_opt) (e1 : 'levels_or_ifdef_opt)
_ (e : 'dexpr) _ (loc : Ploc.t) ->
(if e then e1 else e2 : 'level_or_ifdef_opt)))];
None, None,
[Grammar.production
(Grammar.r_next Grammar.r_stop
(Grammar.s_nterm (level : 'level Grammar.Entry.e)),
"194fe98d",
(fun (l : 'level) (loc : Ploc.t) ->
(Some [l] : 'level_or_ifdef_opt)))];
None, None,
[Grammar.production
(Grammar.r_stop, "194fe98d",
(fun (loc : Ploc.t) -> (None : 'level_or_ifdef_opt)))]];
Grammar.extension
(levels_or_ifdef_opt : 'levels_or_ifdef_opt Grammar.Entry.e) None
[None, None,
[Grammar.production
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next Grammar.r_stop
(Grammar.s_token ("", "IFDEF")))
(Grammar.s_nterm (dexpr : 'dexpr Grammar.Entry.e)))
(Grammar.s_token ("", "THEN")))
Grammar.s_self)
(Grammar.s_nterm
(else_levels_or_ifdef_opt :
'else_levels_or_ifdef_opt Grammar.Entry.e)))
(Grammar.s_token ("", "END")),
"194fe98d",
(fun _ (e2 : 'else_levels_or_ifdef_opt) (e1 : 'levels_or_ifdef_opt)
_ (e : 'dexpr) _ (loc : Ploc.t) ->
(if e then e1 else e2 : 'levels_or_ifdef_opt)))];
None, None,
[Grammar.production
(Grammar.r_next Grammar.r_stop
(Grammar.s_list1sep
(Grammar.s_nterm (level : 'level Grammar.Entry.e))
(Grammar.s_token ("", "|")) false),
"194fe98d",
(fun (ll : 'level list) (loc : Ploc.t) ->
(Some ll : 'levels_or_ifdef_opt)))];
None, None,
[Grammar.production
(Grammar.r_stop, "194fe98d",
(fun (loc : Ploc.t) -> (None : 'levels_or_ifdef_opt)))]];
Grammar.extension
(else_levels_or_ifdef_opt : 'else_levels_or_ifdef_opt Grammar.Entry.e)
None
[None, None,
[Grammar.production
(Grammar.r_next
(Grammar.r_next Grammar.r_stop (Grammar.s_token ("", "ELSE")))
(Grammar.s_nterm
(levels_or_ifdef_opt : 'levels_or_ifdef_opt Grammar.Entry.e)),
"194fe98d",
(fun (e : 'levels_or_ifdef_opt) _ (loc : Ploc.t) ->
(e : 'else_levels_or_ifdef_opt)));
Grammar.production
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next Grammar.r_stop
(Grammar.s_token ("", "ELSIFNDEF")))
(Grammar.s_nterm (dexpr : 'dexpr Grammar.Entry.e)))
(Grammar.s_token ("", "THEN")))
(Grammar.s_nterm
(levels_or_ifdef_opt :
'levels_or_ifdef_opt Grammar.Entry.e)))
Grammar.s_self,
"194fe98d",
(fun (e2 : 'else_levels_or_ifdef_opt) (e1 : 'levels_or_ifdef_opt) _
(e : 'dexpr) _ (loc : Ploc.t) ->
(if not e then e1 else e2 : 'else_levels_or_ifdef_opt)));
Grammar.production
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next Grammar.r_stop
(Grammar.s_token ("", "ELSIFDEF")))
(Grammar.s_nterm (dexpr : 'dexpr Grammar.Entry.e)))
(Grammar.s_token ("", "THEN")))
(Grammar.s_nterm
(levels_or_ifdef_opt :
'levels_or_ifdef_opt Grammar.Entry.e)))
Grammar.s_self,
"194fe98d",
(fun (e2 : 'else_levels_or_ifdef_opt) (e1 : 'levels_or_ifdef_opt) _
(e : 'dexpr) _ (loc : Ploc.t) ->
(if e then e1 else e2 : 'else_levels_or_ifdef_opt)))]]]);;
| null |
https://raw.githubusercontent.com/camlp5/camlp5/15e03f56f55b2856dafe7dd3ca232799069f5dda/ocaml_src/meta/pa_macro_gram.ml
|
ocaml
|
camlp5r
pa_macro_gram.ml,v
#load "pa_macro.cmo"
#load "pa_extend.cmo"
#load "q_MLast.cmo"
|
Copyright ( c ) INRIA 2007 - 2017
open Pa_macro;;
open Pa_extend;;
Grammar.Unsafe.clear_entry rule_list;;
Grammar.Unsafe.clear_entry level_list;;
let must_flatten_opts ll =
List.fold_right
(fun levs acc ->
match levs with
None -> acc
| Some v -> v @ acc)
ll []
;;
Grammar.safe_extend
(let _ = (dexpr : 'dexpr Grammar.Entry.e)
and _ = (rule : 'rule Grammar.Entry.e)
and _ = (rule_list : 'rule_list Grammar.Entry.e)
and _ = (level : 'level Grammar.Entry.e)
and _ = (level_list : 'level_list Grammar.Entry.e) in
let grammar_entry_create s =
Grammar.create_local_entry (Grammar.of_entry dexpr) s
in
let rule_or_ifdef0 : 'rule_or_ifdef0 Grammar.Entry.e =
grammar_entry_create "rule_or_ifdef0"
and rule_or_ifdef : 'rule_or_ifdef Grammar.Entry.e =
grammar_entry_create "rule_or_ifdef"
and rule_or_ifdef_list : 'rule_or_ifdef_list Grammar.Entry.e =
grammar_entry_create "rule_or_ifdef_list"
and else_rule_or_ifdef : 'else_rule_or_ifdef Grammar.Entry.e =
grammar_entry_create "else_rule_or_ifdef"
and level_or_ifdef_opt : 'level_or_ifdef_opt Grammar.Entry.e =
grammar_entry_create "level_or_ifdef_opt"
and levels_or_ifdef_opt : 'levels_or_ifdef_opt Grammar.Entry.e =
grammar_entry_create "levels_or_ifdef_opt"
and else_levels_or_ifdef_opt : 'else_levels_or_ifdef_opt Grammar.Entry.e =
grammar_entry_create "else_levels_or_ifdef_opt"
in
[Grammar.extension (rule_list : 'rule_list Grammar.Entry.e) None
[None, None,
[Grammar.production
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next Grammar.r_stop (Grammar.s_token ("", "[")))
(Grammar.s_list1sep
(Grammar.s_nterm
(rule_or_ifdef0 : 'rule_or_ifdef0 Grammar.Entry.e))
(Grammar.s_token ("", "|")) false))
(Grammar.s_token ("", "]")),
"194fe98d",
(fun _ (rules : 'rule_or_ifdef0 list) _ (loc : Ploc.t) ->
({au_loc = loc; au_rules = List.concat rules} : 'rule_list)));
Grammar.production
(Grammar.r_next
(Grammar.r_next Grammar.r_stop (Grammar.s_token ("", "[")))
(Grammar.s_token ("", "]")),
"194fe98d",
(fun _ _ (loc : Ploc.t) ->
({au_loc = loc; au_rules = []} : 'rule_list)))]];
Grammar.extension (rule_or_ifdef0 : 'rule_or_ifdef0 Grammar.Entry.e) None
[None, None,
[Grammar.production
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next Grammar.r_stop
(Grammar.s_token ("", "IFDEF")))
(Grammar.s_nterm (dexpr : 'dexpr Grammar.Entry.e)))
(Grammar.s_token ("", "THEN")))
(Grammar.s_nterm
(rule_or_ifdef_list :
'rule_or_ifdef_list Grammar.Entry.e)))
(Grammar.s_nterm
(else_rule_or_ifdef :
'else_rule_or_ifdef Grammar.Entry.e)))
(Grammar.s_token ("", "END")),
"194fe98d",
(fun _ (e2 : 'else_rule_or_ifdef) (e1 : 'rule_or_ifdef_list) _
(e : 'dexpr) _ (loc : Ploc.t) ->
(if e then e1 else e2 : 'rule_or_ifdef0)))];
None, None,
[Grammar.production
(Grammar.r_next Grammar.r_stop
(Grammar.s_nterm (rule : 'rule Grammar.Entry.e)),
"194fe98d",
(fun (r : 'rule) (loc : Ploc.t) -> ([r] : 'rule_or_ifdef0)))]];
Grammar.extension (rule_or_ifdef : 'rule_or_ifdef Grammar.Entry.e) None
[None, None,
[Grammar.production
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next Grammar.r_stop
(Grammar.s_token ("", "IFDEF")))
(Grammar.s_nterm (dexpr : 'dexpr Grammar.Entry.e)))
(Grammar.s_token ("", "THEN")))
(Grammar.s_nterm
(rule_or_ifdef_list :
'rule_or_ifdef_list Grammar.Entry.e)))
(Grammar.s_nterm
(else_rule_or_ifdef :
'else_rule_or_ifdef Grammar.Entry.e)))
(Grammar.s_token ("", "END")),
"194fe98d",
(fun _ (e2 : 'else_rule_or_ifdef) (e1 : 'rule_or_ifdef_list) _
(e : 'dexpr) _ (loc : Ploc.t) ->
(if e then e1 else e2 : 'rule_or_ifdef)))];
None, None,
[Grammar.production
(Grammar.r_next
(Grammar.r_next Grammar.r_stop (Grammar.s_token ("", "|")))
(Grammar.s_nterm (rule : 'rule Grammar.Entry.e)),
"194fe98d",
(fun (r : 'rule) _ (loc : Ploc.t) -> ([r] : 'rule_or_ifdef)))]];
Grammar.extension
(rule_or_ifdef_list : 'rule_or_ifdef_list Grammar.Entry.e) None
[None, None,
[Grammar.production
(Grammar.r_next Grammar.r_stop
(Grammar.s_list0
(Grammar.s_nterm
(rule_or_ifdef : 'rule_or_ifdef Grammar.Entry.e))),
"194fe98d",
(fun (l : 'rule_or_ifdef list) (loc : Ploc.t) ->
(List.concat l : 'rule_or_ifdef_list)))]];
Grammar.extension
(else_rule_or_ifdef : 'else_rule_or_ifdef Grammar.Entry.e) None
[None, None,
[Grammar.production
(Grammar.r_next
(Grammar.r_next Grammar.r_stop (Grammar.s_token ("", "ELSE")))
(Grammar.s_nterm
(rule_or_ifdef_list : 'rule_or_ifdef_list Grammar.Entry.e)),
"194fe98d",
(fun (e : 'rule_or_ifdef_list) _ (loc : Ploc.t) ->
(e : 'else_rule_or_ifdef)));
Grammar.production
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next Grammar.r_stop
(Grammar.s_token ("", "ELSIFNDEF")))
(Grammar.s_nterm (dexpr : 'dexpr Grammar.Entry.e)))
(Grammar.s_token ("", "THEN")))
(Grammar.s_nterm
(rule_or_ifdef_list :
'rule_or_ifdef_list Grammar.Entry.e)))
Grammar.s_self,
"194fe98d",
(fun (e2 : 'else_rule_or_ifdef) (e1 : 'rule_or_ifdef_list) _
(e : 'dexpr) _ (loc : Ploc.t) ->
(if not e then e1 else e2 : 'else_rule_or_ifdef)));
Grammar.production
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next Grammar.r_stop
(Grammar.s_token ("", "ELSIFDEF")))
(Grammar.s_nterm (dexpr : 'dexpr Grammar.Entry.e)))
(Grammar.s_token ("", "THEN")))
(Grammar.s_nterm
(rule_or_ifdef_list :
'rule_or_ifdef_list Grammar.Entry.e)))
Grammar.s_self,
"194fe98d",
(fun (e2 : 'else_rule_or_ifdef) (e1 : 'rule_or_ifdef_list) _
(e : 'dexpr) _ (loc : Ploc.t) ->
(if e then e1 else e2 : 'else_rule_or_ifdef)))]];
Grammar.extension (level_list : 'level_list Grammar.Entry.e) None
[None, None,
[Grammar.production
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next Grammar.r_stop (Grammar.s_token ("", "[")))
(Grammar.s_list1sep
(Grammar.s_nterm
(level_or_ifdef_opt :
'level_or_ifdef_opt Grammar.Entry.e))
(Grammar.s_token ("", "|")) false))
(Grammar.s_token ("", "]")),
"194fe98d",
(fun _ (ll : 'level_or_ifdef_opt list) _ (loc : Ploc.t) ->
(must_flatten_opts ll : 'level_list)))]];
Grammar.extension
(level_or_ifdef_opt : 'level_or_ifdef_opt Grammar.Entry.e) None
[None, None,
[Grammar.production
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next Grammar.r_stop
(Grammar.s_token ("", "IFDEF")))
(Grammar.s_nterm (dexpr : 'dexpr Grammar.Entry.e)))
(Grammar.s_token ("", "THEN")))
(Grammar.s_nterm
(levels_or_ifdef_opt :
'levels_or_ifdef_opt Grammar.Entry.e)))
(Grammar.s_nterm
(else_levels_or_ifdef_opt :
'else_levels_or_ifdef_opt Grammar.Entry.e)))
(Grammar.s_token ("", "END")),
"194fe98d",
(fun _ (e2 : 'else_levels_or_ifdef_opt) (e1 : 'levels_or_ifdef_opt)
_ (e : 'dexpr) _ (loc : Ploc.t) ->
(if e then e1 else e2 : 'level_or_ifdef_opt)))];
None, None,
[Grammar.production
(Grammar.r_next Grammar.r_stop
(Grammar.s_nterm (level : 'level Grammar.Entry.e)),
"194fe98d",
(fun (l : 'level) (loc : Ploc.t) ->
(Some [l] : 'level_or_ifdef_opt)))];
None, None,
[Grammar.production
(Grammar.r_stop, "194fe98d",
(fun (loc : Ploc.t) -> (None : 'level_or_ifdef_opt)))]];
Grammar.extension
(levels_or_ifdef_opt : 'levels_or_ifdef_opt Grammar.Entry.e) None
[None, None,
[Grammar.production
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next Grammar.r_stop
(Grammar.s_token ("", "IFDEF")))
(Grammar.s_nterm (dexpr : 'dexpr Grammar.Entry.e)))
(Grammar.s_token ("", "THEN")))
Grammar.s_self)
(Grammar.s_nterm
(else_levels_or_ifdef_opt :
'else_levels_or_ifdef_opt Grammar.Entry.e)))
(Grammar.s_token ("", "END")),
"194fe98d",
(fun _ (e2 : 'else_levels_or_ifdef_opt) (e1 : 'levels_or_ifdef_opt)
_ (e : 'dexpr) _ (loc : Ploc.t) ->
(if e then e1 else e2 : 'levels_or_ifdef_opt)))];
None, None,
[Grammar.production
(Grammar.r_next Grammar.r_stop
(Grammar.s_list1sep
(Grammar.s_nterm (level : 'level Grammar.Entry.e))
(Grammar.s_token ("", "|")) false),
"194fe98d",
(fun (ll : 'level list) (loc : Ploc.t) ->
(Some ll : 'levels_or_ifdef_opt)))];
None, None,
[Grammar.production
(Grammar.r_stop, "194fe98d",
(fun (loc : Ploc.t) -> (None : 'levels_or_ifdef_opt)))]];
Grammar.extension
(else_levels_or_ifdef_opt : 'else_levels_or_ifdef_opt Grammar.Entry.e)
None
[None, None,
[Grammar.production
(Grammar.r_next
(Grammar.r_next Grammar.r_stop (Grammar.s_token ("", "ELSE")))
(Grammar.s_nterm
(levels_or_ifdef_opt : 'levels_or_ifdef_opt Grammar.Entry.e)),
"194fe98d",
(fun (e : 'levels_or_ifdef_opt) _ (loc : Ploc.t) ->
(e : 'else_levels_or_ifdef_opt)));
Grammar.production
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next Grammar.r_stop
(Grammar.s_token ("", "ELSIFNDEF")))
(Grammar.s_nterm (dexpr : 'dexpr Grammar.Entry.e)))
(Grammar.s_token ("", "THEN")))
(Grammar.s_nterm
(levels_or_ifdef_opt :
'levels_or_ifdef_opt Grammar.Entry.e)))
Grammar.s_self,
"194fe98d",
(fun (e2 : 'else_levels_or_ifdef_opt) (e1 : 'levels_or_ifdef_opt) _
(e : 'dexpr) _ (loc : Ploc.t) ->
(if not e then e1 else e2 : 'else_levels_or_ifdef_opt)));
Grammar.production
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next
(Grammar.r_next Grammar.r_stop
(Grammar.s_token ("", "ELSIFDEF")))
(Grammar.s_nterm (dexpr : 'dexpr Grammar.Entry.e)))
(Grammar.s_token ("", "THEN")))
(Grammar.s_nterm
(levels_or_ifdef_opt :
'levels_or_ifdef_opt Grammar.Entry.e)))
Grammar.s_self,
"194fe98d",
(fun (e2 : 'else_levels_or_ifdef_opt) (e1 : 'levels_or_ifdef_opt) _
(e : 'dexpr) _ (loc : Ploc.t) ->
(if e then e1 else e2 : 'else_levels_or_ifdef_opt)))]]]);;
|
551162ed0f1c1c62776613c76cbcbe149ac6b8cd201fa45c6bfd2a75ba7abba2
|
alexandroid000/improv
|
InertiaStamped.hs
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveDataTypeable #-}
# LANGUAGE DeriveGeneric #
# LANGUAGE TemplateHaskell #
module Ros.Geometry_msgs.InertiaStamped where
import qualified Prelude as P
import Prelude ((.), (+), (*))
import qualified Data.Typeable as T
import Control.Applicative
import Ros.Internal.RosBinary
import Ros.Internal.Msg.MsgInfo
import qualified GHC.Generics as G
import qualified Data.Default.Generics as D
import Ros.Internal.Msg.HeaderSupport
import qualified Ros.Geometry_msgs.Inertia as Inertia
import qualified Ros.Std_msgs.Header as Header
import Lens.Family.TH (makeLenses)
import Lens.Family (view, set)
data InertiaStamped = InertiaStamped { _header :: Header.Header
, _inertia :: Inertia.Inertia
} deriving (P.Show, P.Eq, P.Ord, T.Typeable, G.Generic)
$(makeLenses ''InertiaStamped)
instance RosBinary InertiaStamped where
put obj' = put (_header obj') *> put (_inertia obj')
get = InertiaStamped <$> get <*> get
putMsg = putStampedMsg
instance HasHeader InertiaStamped where
getSequence = view (header . Header.seq)
getFrame = view (header . Header.frame_id)
getStamp = view (header . Header.stamp)
setSequence = set (header . Header.seq)
instance MsgInfo InertiaStamped where
sourceMD5 _ = "ddee48caeab5a966c5e8d166654a9ac7"
msgTypeName _ = "geometry_msgs/InertiaStamped"
instance D.Default InertiaStamped
| null |
https://raw.githubusercontent.com/alexandroid000/improv/ef0f4a6a5f99a9c7ff3d25f50529417aba9f757c/roshask/msgs/Geometry_msgs/Ros/Geometry_msgs/InertiaStamped.hs
|
haskell
|
# LANGUAGE OverloadedStrings #
# LANGUAGE DeriveDataTypeable #
|
# LANGUAGE DeriveGeneric #
# LANGUAGE TemplateHaskell #
module Ros.Geometry_msgs.InertiaStamped where
import qualified Prelude as P
import Prelude ((.), (+), (*))
import qualified Data.Typeable as T
import Control.Applicative
import Ros.Internal.RosBinary
import Ros.Internal.Msg.MsgInfo
import qualified GHC.Generics as G
import qualified Data.Default.Generics as D
import Ros.Internal.Msg.HeaderSupport
import qualified Ros.Geometry_msgs.Inertia as Inertia
import qualified Ros.Std_msgs.Header as Header
import Lens.Family.TH (makeLenses)
import Lens.Family (view, set)
data InertiaStamped = InertiaStamped { _header :: Header.Header
, _inertia :: Inertia.Inertia
} deriving (P.Show, P.Eq, P.Ord, T.Typeable, G.Generic)
$(makeLenses ''InertiaStamped)
instance RosBinary InertiaStamped where
put obj' = put (_header obj') *> put (_inertia obj')
get = InertiaStamped <$> get <*> get
putMsg = putStampedMsg
instance HasHeader InertiaStamped where
getSequence = view (header . Header.seq)
getFrame = view (header . Header.frame_id)
getStamp = view (header . Header.stamp)
setSequence = set (header . Header.seq)
instance MsgInfo InertiaStamped where
sourceMD5 _ = "ddee48caeab5a966c5e8d166654a9ac7"
msgTypeName _ = "geometry_msgs/InertiaStamped"
instance D.Default InertiaStamped
|
9403d14b8ced6abea1916008ec94bad64b48eaaad81bd967ed41d6765ffbf9e1
|
madjestic/Haskell-OpenGL-Tutorial
|
Rendering.hs
|
# LANGUAGE FlexibleInstances #
# LANGUAGE InstanceSigs #
module NGL.Rendering where
import Graphics.Rendering.OpenGL as GL hiding (Color, Constant)
import Graphics.UI.GLFW as GLFW
import Control.Monad
import Control.Applicative
import System.Exit ( exitWith, ExitCode(..) )
import Foreign.Marshal.Array
import Foreign.Ptr
import Foreign.Storable
import NGL.LoadShaders
import NGL.Shape
import Graphics.GLUtil
data Descriptor = Descriptor VertexArrayObject ArrayIndex NumArrayIndices
draw :: GLFW.Window -> Drawable -> IO ()
draw win xs = do
descriptor <- initResources xs
onDisplay win descriptor
bufferOffset :: Integral a => a -> Ptr b
bufferOffset = plusPtr nullPtr . fromIntegral
loadTex :: FilePath -> IO TextureObject
loadTex f = do t <- either error id <$> readTexture f
textureFilter Texture2D $= ((Linear', Nothing), Linear')
texture2DWrap $= (Repeated, ClampToEdge)
return t
initResources :: ([Vertex4 Float],[TexCoord2 Float],String) -> IO Descriptor
initResources (vs, uv, tex) = do
triangles <- genObjectName
bindVertexArrayObject $= Just triangles
--
-- Declaring VBO: vertices
--
let vertices = vs
numVertices = length vertices
vertexBuffer <- genObjectName
bindBuffer ArrayBuffer $= Just vertexBuffer
withArray vs $ \ptr -> do
let size = fromIntegral (numVertices * sizeOf (head vs))
bufferData ArrayBuffer $= (size, ptr, StaticDraw)
let firstIndex = 0
vPosition = AttribLocation 0
vertexAttribPointer vPosition $=
(ToFloat, VertexArrayDescriptor 4 Float 0 (bufferOffset firstIndex))
vertexAttribArray vPosition $= Enabled
--
-- Declaring VBO: UVs
--
let uv = toUV Planar
textureBuffer <- genObjectName
bindBuffer ArrayBuffer $= Just textureBuffer
withArray uv $ \ptr -> do
let size = fromIntegral (numVertices * sizeOf (head uv))
bufferData ArrayBuffer $= (size, ptr, StaticDraw)
let firstIndex = 0
uvCoords = AttribLocation 2
vertexAttribPointer uvCoords $=
(ToFloat, VertexArrayDescriptor 2 Float 0 (bufferOffset firstIndex))
vertexAttribArray uvCoords $= Enabled
tx <- loadTex tex
texture Texture2D $= Enabled
activeTexture $= TextureUnit 0
textureBinding Texture2D $= Just tx
program <- loadShaders [
ShaderInfo VertexShader (FileSource "Shaders/shader.vert"),
ShaderInfo FragmentShader (FileSource "Shaders/shader.frag")]
currentProgram $= Just program
return $ Descriptor triangles firstIndex (fromIntegral numVertices)
keyPressed :: GLFW.KeyCallback
keyPressed win GLFW.Key'Escape _ GLFW.KeyState'Pressed _ = shutdown win
keyPressed _ _ _ _ _ = return ()
shutdown :: GLFW.WindowCloseCallback
shutdown win = do
GLFW.destroyWindow win
GLFW.terminate
_ <- exitWith ExitSuccess
return ()
resizeWindow :: GLFW.WindowSizeCallback
resizeWindow win w h =
do
GL.viewport $= (GL.Position 0 0, GL.Size (fromIntegral w) (fromIntegral h))
GL.matrixMode $= GL.Projection
GL.loadIdentity
GL.ortho2D 0 (realToFrac w) (realToFrac h) 0
createWindow :: String -> (Int, Int) -> IO GLFW.Window
createWindow title (sizex,sizey) = do
GLFW.init
GLFW.defaultWindowHints
GLFW.windowHint (GLFW.WindowHint'Resizable False)
Just win <- GLFW.createWindow sizex sizey title Nothing Nothing
GLFW.makeContextCurrent (Just win)
GLFW.setWindowSizeCallback win (Just resizeWindow)
GLFW.setKeyCallback win (Just keyPressed)
GLFW.setWindowCloseCallback win (Just shutdown)
return win
closeWindow :: GLFW.Window -> IO ()
closeWindow win = do
GLFW.destroyWindow win
GLFW.terminate
onDisplay :: GLFW.Window -> Descriptor -> IO ()
onDisplay win descriptor@(Descriptor triangles firstIndex numVertices) = do
GL.clearColor $= Color4 0 0 0 1
GL.clear [ColorBuffer]
bindVertexArrayObject $= Just triangles
drawArrays Triangles firstIndex numVertices
GLFW.swapBuffers win
forever $ do
GLFW.pollEvents
onDisplay win descriptor
| null |
https://raw.githubusercontent.com/madjestic/Haskell-OpenGL-Tutorial/9f685ddde9d6c5d2cc9c2c62f214ca0d43e717c7/tutorial10/NGL/Rendering.hs
|
haskell
|
Declaring VBO: vertices
Declaring VBO: UVs
|
# LANGUAGE FlexibleInstances #
# LANGUAGE InstanceSigs #
module NGL.Rendering where
import Graphics.Rendering.OpenGL as GL hiding (Color, Constant)
import Graphics.UI.GLFW as GLFW
import Control.Monad
import Control.Applicative
import System.Exit ( exitWith, ExitCode(..) )
import Foreign.Marshal.Array
import Foreign.Ptr
import Foreign.Storable
import NGL.LoadShaders
import NGL.Shape
import Graphics.GLUtil
data Descriptor = Descriptor VertexArrayObject ArrayIndex NumArrayIndices
draw :: GLFW.Window -> Drawable -> IO ()
draw win xs = do
descriptor <- initResources xs
onDisplay win descriptor
bufferOffset :: Integral a => a -> Ptr b
bufferOffset = plusPtr nullPtr . fromIntegral
loadTex :: FilePath -> IO TextureObject
loadTex f = do t <- either error id <$> readTexture f
textureFilter Texture2D $= ((Linear', Nothing), Linear')
texture2DWrap $= (Repeated, ClampToEdge)
return t
initResources :: ([Vertex4 Float],[TexCoord2 Float],String) -> IO Descriptor
initResources (vs, uv, tex) = do
triangles <- genObjectName
bindVertexArrayObject $= Just triangles
let vertices = vs
numVertices = length vertices
vertexBuffer <- genObjectName
bindBuffer ArrayBuffer $= Just vertexBuffer
withArray vs $ \ptr -> do
let size = fromIntegral (numVertices * sizeOf (head vs))
bufferData ArrayBuffer $= (size, ptr, StaticDraw)
let firstIndex = 0
vPosition = AttribLocation 0
vertexAttribPointer vPosition $=
(ToFloat, VertexArrayDescriptor 4 Float 0 (bufferOffset firstIndex))
vertexAttribArray vPosition $= Enabled
let uv = toUV Planar
textureBuffer <- genObjectName
bindBuffer ArrayBuffer $= Just textureBuffer
withArray uv $ \ptr -> do
let size = fromIntegral (numVertices * sizeOf (head uv))
bufferData ArrayBuffer $= (size, ptr, StaticDraw)
let firstIndex = 0
uvCoords = AttribLocation 2
vertexAttribPointer uvCoords $=
(ToFloat, VertexArrayDescriptor 2 Float 0 (bufferOffset firstIndex))
vertexAttribArray uvCoords $= Enabled
tx <- loadTex tex
texture Texture2D $= Enabled
activeTexture $= TextureUnit 0
textureBinding Texture2D $= Just tx
program <- loadShaders [
ShaderInfo VertexShader (FileSource "Shaders/shader.vert"),
ShaderInfo FragmentShader (FileSource "Shaders/shader.frag")]
currentProgram $= Just program
return $ Descriptor triangles firstIndex (fromIntegral numVertices)
keyPressed :: GLFW.KeyCallback
keyPressed win GLFW.Key'Escape _ GLFW.KeyState'Pressed _ = shutdown win
keyPressed _ _ _ _ _ = return ()
shutdown :: GLFW.WindowCloseCallback
shutdown win = do
GLFW.destroyWindow win
GLFW.terminate
_ <- exitWith ExitSuccess
return ()
resizeWindow :: GLFW.WindowSizeCallback
resizeWindow win w h =
do
GL.viewport $= (GL.Position 0 0, GL.Size (fromIntegral w) (fromIntegral h))
GL.matrixMode $= GL.Projection
GL.loadIdentity
GL.ortho2D 0 (realToFrac w) (realToFrac h) 0
createWindow :: String -> (Int, Int) -> IO GLFW.Window
createWindow title (sizex,sizey) = do
GLFW.init
GLFW.defaultWindowHints
GLFW.windowHint (GLFW.WindowHint'Resizable False)
Just win <- GLFW.createWindow sizex sizey title Nothing Nothing
GLFW.makeContextCurrent (Just win)
GLFW.setWindowSizeCallback win (Just resizeWindow)
GLFW.setKeyCallback win (Just keyPressed)
GLFW.setWindowCloseCallback win (Just shutdown)
return win
closeWindow :: GLFW.Window -> IO ()
closeWindow win = do
GLFW.destroyWindow win
GLFW.terminate
onDisplay :: GLFW.Window -> Descriptor -> IO ()
onDisplay win descriptor@(Descriptor triangles firstIndex numVertices) = do
GL.clearColor $= Color4 0 0 0 1
GL.clear [ColorBuffer]
bindVertexArrayObject $= Just triangles
drawArrays Triangles firstIndex numVertices
GLFW.swapBuffers win
forever $ do
GLFW.pollEvents
onDisplay win descriptor
|
1b6fdb65af7757893ff05893bc999fad8e90e1826e3b549cf6e6da99d02cfa7e
|
mirage/ptt
|
hm.ml
|
open Rresult
open Ptt_tuyau.Lwt_backend
open Lwt.Infix
let src = Logs.Src.create "ptt.hm"
module Log : Logs.LOG = (val Logs.src_log src)
module Make
(Random : Mirage_random.S)
(Time : Mirage_time.S)
(Mclock : Mirage_clock.MCLOCK)
(Pclock : Mirage_clock.PCLOCK)
(Resolver : Ptt.Sigs.RESOLVER with type +'a io = 'a Lwt.t)
(Stack : Tcpip.Stack.V4V6)
(DNS : Dns_client_mirage.S
with type Transport.stack = Stack.t
and type 'a Transport.io = 'a Lwt.t) =
struct
include Ptt_tuyau.Client (Stack)
module Random = struct
type g = Random.g
type +'a io = 'a Lwt.t
let generate ?g buf =
let len = Bytes.length buf in
let raw = Random.generate ?g len in
Cstruct.blit_to_bytes raw 0 buf 0 len
; Lwt.return ()
end
module Flow = Rdwr.Make (Stack.TCP)
module Verifier =
Ptt.Relay.Make (Lwt_scheduler) (Lwt_io) (Flow) (Resolver) (Random)
module Server = Ptt_tuyau.Server (Time) (Stack)
include Ptt_transmit.Make (Pclock) (Stack) (Verifier.Md)
module Lwt_scheduler = Uspf.Sigs.Make (Lwt)
module Uspf_dns = struct
type t = DNS.t
type backend = Lwt_scheduler.t
type error =
[ `Msg of string
| `No_data of [ `raw ] Domain_name.t * Dns.Soa.t
| `No_domain of [ `raw ] Domain_name.t * Dns.Soa.t ]
let getrrecord dns key domain_name =
Lwt_scheduler.inj @@ DNS.get_resource_record dns key domain_name
end
let smtp_verifier_service ~pool ?stop ~port stack resolver conf_server =
Server.init ~port stack >>= fun service ->
let handler pool flow =
let ip, port = Stack.TCP.dst flow in
let v = Flow.make flow in
Lwt.catch
(fun () ->
Lwt_pool.use pool @@ fun (encoder, decoder, queue) ->
Verifier.accept ~encoder:(Fun.const encoder)
~decoder:(Fun.const decoder) ~queue:(Fun.const queue) ~ipaddr:ip v
resolver conf_server
>|= R.reword_error (R.msgf "%a" Verifier.pp_error)
>>= fun res ->
Stack.TCP.close flow >>= fun () -> Lwt.return res)
(function
| Failure err -> Lwt.return (R.error_msg err)
| exn -> Lwt.return (Error (`Exn exn)))
>>= function
| Ok () ->
Log.info (fun m -> m "<%a:%d> submitted a message" Ipaddr.pp ip port)
; Lwt.return ()
| Error (`Msg err) ->
Log.err (fun m -> m "<%a:%d> %s" Ipaddr.pp ip port err)
; Lwt.return ()
| Error (`Exn exn) ->
Log.err (fun m ->
m "<%a:%d> raised an unknown exception: %s" Ipaddr.pp ip port
(Printexc.to_string exn))
; Lwt.return () in
let (`Initialized fiber) =
Server.serve_when_ready ?stop ~handler:(handler pool) service in
fiber
let state =
let open Uspf.Sigs in
let open Lwt_scheduler in
{
return= (fun x -> inj (Lwt.return x))
; bind= (fun x f -> inj (prj x >>= fun x -> prj (f x)))
}
let stream_of_list lst =
let lst = ref lst in
fun () ->
match !lst with
| [] -> Lwt.return_none
| str :: rest ->
lst := rest
; Lwt.return_some (str, 0, String.length str)
let stream_of_field (field_name : Mrmime.Field_name.t) unstrctrd =
stream_of_list
[
(field_name :> string); ": "; Unstrctrd.to_utf_8_string unstrctrd; "\r\n"
]
let concat_stream a b =
let current = ref a in
let rec next () =
let v = !current () in
v >>= function
| Some _ -> v
| None ->
if !current == b then Lwt.return_none
else (
current := b
; next ()) in
next
let smtp_logic ~pool ~info ~tls stack resolver messaged map dns =
let rec go () =
Verifier.Md.await messaged >>= fun () ->
Verifier.Md.pop messaged >>= function
| None -> Lwt.pause () >>= go
| Some (key, queue, consumer) ->
Log.debug (fun m -> m "Got an email.")
; let verify_and_transmit () =
Verifier.resolve_recipients ~domain:info.Ptt.SSMTP.domain resolver
map
(List.map fst (Ptt.Messaged.recipients key))
>>= fun recipients ->
let sender, _ = Ptt.Messaged.from key in
let ctx =
Uspf.empty |> Uspf.with_ip (Ptt.Messaged.ipaddr key) |> fun ctx ->
Option.fold ~none:ctx
~some:(fun sender -> Uspf.with_sender (`MAILFROM sender) ctx)
sender in
Uspf.get ~ctx state dns (module Uspf_dns) |> Lwt_scheduler.prj
>>= function
| Error (`Msg err) ->
Log.err (fun m -> m "Got an error from the SPF verifier: %s." err)
; (* TODO(dinosaure): save this result into the incoming email. *)
transmit ~pool ~info ~tls stack (key, queue, consumer)
recipients
| Ok record ->
Uspf.check ~ctx state dns (module Uspf_dns) record
|> Lwt_scheduler.prj
>>= fun res ->
let receiver =
`Domain (Domain_name.to_strings info.Ptt.SSMTP.domain) in
let field_name, unstrctrd = Uspf.to_field ~ctx ~receiver res in
let stream = stream_of_field field_name unstrctrd in
let consumer = concat_stream stream consumer in
transmit ~pool ~info ~tls stack (key, queue, consumer) recipients
in
Lwt.async verify_and_transmit
; Lwt.pause () >>= go in
go ()
let fiber ?(limit = 20) ?stop ?locals ~port ~tls stack resolver info dns =
let conf_server = Verifier.create ~info in
let messaged = Verifier.messaged conf_server in
let pool0 =
Lwt_pool.create limit @@ fun () ->
let encoder = Bytes.create Colombe.Encoder.io_buffer_size in
let decoder = Bytes.create Colombe.Decoder.io_buffer_size in
let queue = Ke.Rke.create ~capacity:0x1000 Bigarray.char in
Lwt.return (encoder, decoder, queue) in
let pool1 =
Lwt_pool.create limit @@ fun () ->
let encoder = Bytes.create Colombe.Encoder.io_buffer_size in
let decoder = Bytes.create Colombe.Decoder.io_buffer_size in
let queue = Ke.Rke.create ~capacity:0x1000 Bigarray.char in
Lwt.return (encoder, decoder, queue) in
Lwt.join
[
smtp_verifier_service ~pool:pool0 ?stop ~port stack resolver conf_server
; smtp_logic ~pool:pool1 ~info ~tls stack resolver messaged locals dns
]
end
| null |
https://raw.githubusercontent.com/mirage/ptt/92a2e6e9ecb0daec4eeea286de606aa3a0e2e1e4/lib/hm.ml
|
ocaml
|
TODO(dinosaure): save this result into the incoming email.
|
open Rresult
open Ptt_tuyau.Lwt_backend
open Lwt.Infix
let src = Logs.Src.create "ptt.hm"
module Log : Logs.LOG = (val Logs.src_log src)
module Make
(Random : Mirage_random.S)
(Time : Mirage_time.S)
(Mclock : Mirage_clock.MCLOCK)
(Pclock : Mirage_clock.PCLOCK)
(Resolver : Ptt.Sigs.RESOLVER with type +'a io = 'a Lwt.t)
(Stack : Tcpip.Stack.V4V6)
(DNS : Dns_client_mirage.S
with type Transport.stack = Stack.t
and type 'a Transport.io = 'a Lwt.t) =
struct
include Ptt_tuyau.Client (Stack)
module Random = struct
type g = Random.g
type +'a io = 'a Lwt.t
let generate ?g buf =
let len = Bytes.length buf in
let raw = Random.generate ?g len in
Cstruct.blit_to_bytes raw 0 buf 0 len
; Lwt.return ()
end
module Flow = Rdwr.Make (Stack.TCP)
module Verifier =
Ptt.Relay.Make (Lwt_scheduler) (Lwt_io) (Flow) (Resolver) (Random)
module Server = Ptt_tuyau.Server (Time) (Stack)
include Ptt_transmit.Make (Pclock) (Stack) (Verifier.Md)
module Lwt_scheduler = Uspf.Sigs.Make (Lwt)
module Uspf_dns = struct
type t = DNS.t
type backend = Lwt_scheduler.t
type error =
[ `Msg of string
| `No_data of [ `raw ] Domain_name.t * Dns.Soa.t
| `No_domain of [ `raw ] Domain_name.t * Dns.Soa.t ]
let getrrecord dns key domain_name =
Lwt_scheduler.inj @@ DNS.get_resource_record dns key domain_name
end
let smtp_verifier_service ~pool ?stop ~port stack resolver conf_server =
Server.init ~port stack >>= fun service ->
let handler pool flow =
let ip, port = Stack.TCP.dst flow in
let v = Flow.make flow in
Lwt.catch
(fun () ->
Lwt_pool.use pool @@ fun (encoder, decoder, queue) ->
Verifier.accept ~encoder:(Fun.const encoder)
~decoder:(Fun.const decoder) ~queue:(Fun.const queue) ~ipaddr:ip v
resolver conf_server
>|= R.reword_error (R.msgf "%a" Verifier.pp_error)
>>= fun res ->
Stack.TCP.close flow >>= fun () -> Lwt.return res)
(function
| Failure err -> Lwt.return (R.error_msg err)
| exn -> Lwt.return (Error (`Exn exn)))
>>= function
| Ok () ->
Log.info (fun m -> m "<%a:%d> submitted a message" Ipaddr.pp ip port)
; Lwt.return ()
| Error (`Msg err) ->
Log.err (fun m -> m "<%a:%d> %s" Ipaddr.pp ip port err)
; Lwt.return ()
| Error (`Exn exn) ->
Log.err (fun m ->
m "<%a:%d> raised an unknown exception: %s" Ipaddr.pp ip port
(Printexc.to_string exn))
; Lwt.return () in
let (`Initialized fiber) =
Server.serve_when_ready ?stop ~handler:(handler pool) service in
fiber
let state =
let open Uspf.Sigs in
let open Lwt_scheduler in
{
return= (fun x -> inj (Lwt.return x))
; bind= (fun x f -> inj (prj x >>= fun x -> prj (f x)))
}
let stream_of_list lst =
let lst = ref lst in
fun () ->
match !lst with
| [] -> Lwt.return_none
| str :: rest ->
lst := rest
; Lwt.return_some (str, 0, String.length str)
let stream_of_field (field_name : Mrmime.Field_name.t) unstrctrd =
stream_of_list
[
(field_name :> string); ": "; Unstrctrd.to_utf_8_string unstrctrd; "\r\n"
]
let concat_stream a b =
let current = ref a in
let rec next () =
let v = !current () in
v >>= function
| Some _ -> v
| None ->
if !current == b then Lwt.return_none
else (
current := b
; next ()) in
next
let smtp_logic ~pool ~info ~tls stack resolver messaged map dns =
let rec go () =
Verifier.Md.await messaged >>= fun () ->
Verifier.Md.pop messaged >>= function
| None -> Lwt.pause () >>= go
| Some (key, queue, consumer) ->
Log.debug (fun m -> m "Got an email.")
; let verify_and_transmit () =
Verifier.resolve_recipients ~domain:info.Ptt.SSMTP.domain resolver
map
(List.map fst (Ptt.Messaged.recipients key))
>>= fun recipients ->
let sender, _ = Ptt.Messaged.from key in
let ctx =
Uspf.empty |> Uspf.with_ip (Ptt.Messaged.ipaddr key) |> fun ctx ->
Option.fold ~none:ctx
~some:(fun sender -> Uspf.with_sender (`MAILFROM sender) ctx)
sender in
Uspf.get ~ctx state dns (module Uspf_dns) |> Lwt_scheduler.prj
>>= function
| Error (`Msg err) ->
Log.err (fun m -> m "Got an error from the SPF verifier: %s." err)
transmit ~pool ~info ~tls stack (key, queue, consumer)
recipients
| Ok record ->
Uspf.check ~ctx state dns (module Uspf_dns) record
|> Lwt_scheduler.prj
>>= fun res ->
let receiver =
`Domain (Domain_name.to_strings info.Ptt.SSMTP.domain) in
let field_name, unstrctrd = Uspf.to_field ~ctx ~receiver res in
let stream = stream_of_field field_name unstrctrd in
let consumer = concat_stream stream consumer in
transmit ~pool ~info ~tls stack (key, queue, consumer) recipients
in
Lwt.async verify_and_transmit
; Lwt.pause () >>= go in
go ()
let fiber ?(limit = 20) ?stop ?locals ~port ~tls stack resolver info dns =
let conf_server = Verifier.create ~info in
let messaged = Verifier.messaged conf_server in
let pool0 =
Lwt_pool.create limit @@ fun () ->
let encoder = Bytes.create Colombe.Encoder.io_buffer_size in
let decoder = Bytes.create Colombe.Decoder.io_buffer_size in
let queue = Ke.Rke.create ~capacity:0x1000 Bigarray.char in
Lwt.return (encoder, decoder, queue) in
let pool1 =
Lwt_pool.create limit @@ fun () ->
let encoder = Bytes.create Colombe.Encoder.io_buffer_size in
let decoder = Bytes.create Colombe.Decoder.io_buffer_size in
let queue = Ke.Rke.create ~capacity:0x1000 Bigarray.char in
Lwt.return (encoder, decoder, queue) in
Lwt.join
[
smtp_verifier_service ~pool:pool0 ?stop ~port stack resolver conf_server
; smtp_logic ~pool:pool1 ~info ~tls stack resolver messaged locals dns
]
end
|
d55951a3a3eb0c535fcddd36c3cc86e1e2777c549977cadd1251615f673509b9
|
trevorbernard/dinghy
|
core.clj
|
(ns dinghy.core
(:require [clojure.tools.logging :as log]
[clojure.tools.cli :refer [cli]])
(:gen-class))
(defn- parse-args
[args]
(cli args
["-h" "--help" "Show available parameters" :flag true]
["-c" "--cluster" "Comma separated list of nodes"]
["-p" "--port" "Listen on this port" :parse-fn #(Integer/parseInt %) :default 8121]
["-t" "--transaction-log" "Path to transaction log" :default "raftlog"]))
(defn -main [& args]
(let [[options args banner] (parse-args args)
{:keys [help cluster transaction-log]} options]
(when help
(println banner)
(System/exit 0))))
| null |
https://raw.githubusercontent.com/trevorbernard/dinghy/c35ab75ff6e6e569c85651a74c4435d86eee8775/src/dinghy/core.clj
|
clojure
|
(ns dinghy.core
(:require [clojure.tools.logging :as log]
[clojure.tools.cli :refer [cli]])
(:gen-class))
(defn- parse-args
[args]
(cli args
["-h" "--help" "Show available parameters" :flag true]
["-c" "--cluster" "Comma separated list of nodes"]
["-p" "--port" "Listen on this port" :parse-fn #(Integer/parseInt %) :default 8121]
["-t" "--transaction-log" "Path to transaction log" :default "raftlog"]))
(defn -main [& args]
(let [[options args banner] (parse-args args)
{:keys [help cluster transaction-log]} options]
(when help
(println banner)
(System/exit 0))))
|
|
4961cc818db0d701ed76a3d7f854a4faa59f70bb0a1165124e3b48c5990e2780
|
RDTK/generator
|
options.lisp
|
;;;; options.lisp --- Option info classes use in the commandline-options module.
;;;;
Copyright ( C ) 2017 , 2018 , 2019 Jan Moringen
;;;;
Author : < >
(cl:in-package #:build-generator.commandline-options)
;;; `option-info'
(defclass option-info ()
((option :initarg :option
:reader option)
(designators :initarg :designators
:type list
:reader designators)
(argument-name :initarg :argument-name
:type (or null string)
:reader argument-name)
(mandatory? :initarg :mandatory?
:type boolean
:reader mandatory?
:initform nil)))
;;; `named-without-argument-option-info'
(defclass named-without-argument-option-info (option-info)
())
(defmethod option-value ((info named-without-argument-option-info)
(index integer)
(designator string)
(included-value t)
(maybe-value t))
(if included-value
(error 'option-does-not-accept-argument-error :option info)
(values t 1)))
(defmethod option-synopsis ((info named-without-argument-option-info)
(stream t)
&key long?)
(let+ (((&accessors-r/o designators) info))
(format stream "~{~A~^,~}"
(if long? designators (list (first designators))))))
;;; `named-with-argument-option-info'
(defclass named-with-argument-option-info (option-info)
())
(defmethod option-value ((info named-with-argument-option-info)
(index integer)
(designator string)
(included-value t)
(maybe-value t))
(cond
(included-value
(values included-value 1))
((or (not maybe-value) (named-option-designator? maybe-value))
(error 'mandatory-argument-not-supplied-error
:option info
:argument-name (argument-name info)))
(t
(values maybe-value 2))))
(defmethod option-synopsis ((info named-with-argument-option-info)
(stream t)
&key long?)
(let+ (((&accessors-r/o option designators argument-name) info)
(type (configuration.options:option-type option))
((&values default default?)
(configuration.options:option-default
option :if-does-not-exist nil))
(default (when default?
(configuration.options:value->string
option default))))
(format stream "~{~A~^,~}=~A"
(if long? designators (list (first designators)))
argument-name)
(when long?
default : ~A , ~]~
type: ~A)"
(and default? (not (eq (option-multiplicity info) '*))) default
type))))
;;; `positional-option-info'
(defclass positional-option-info (option-info)
())
(defmethod option-value ((info positional-option-info)
(index integer)
(designator string)
(included-value t)
(maybe-value t))
(values designator 1))
(defmethod option-synopsis ((info positional-option-info)
(stream t)
&key long?)
(let+ (((&accessors-r/o
option argument-name mandatory? (multiplicity option-multiplicity))
info)
(type (configuration.options:option-type option)))
(format stream "~:[[~A~@[*~]]~;~A~@[*~]~]"
mandatory? argument-name (eq multiplicity '*))
(when long?
(format stream " (type: ~A)" type))))
| null |
https://raw.githubusercontent.com/RDTK/generator/8d9e6e47776f2ccb7b5ed934337d2db50ecbe2f5/src/commandline-options/options.lisp
|
lisp
|
options.lisp --- Option info classes use in the commandline-options module.
`option-info'
`named-without-argument-option-info'
`named-with-argument-option-info'
`positional-option-info'
|
Copyright ( C ) 2017 , 2018 , 2019 Jan Moringen
Author : < >
(cl:in-package #:build-generator.commandline-options)
(defclass option-info ()
((option :initarg :option
:reader option)
(designators :initarg :designators
:type list
:reader designators)
(argument-name :initarg :argument-name
:type (or null string)
:reader argument-name)
(mandatory? :initarg :mandatory?
:type boolean
:reader mandatory?
:initform nil)))
(defclass named-without-argument-option-info (option-info)
())
(defmethod option-value ((info named-without-argument-option-info)
(index integer)
(designator string)
(included-value t)
(maybe-value t))
(if included-value
(error 'option-does-not-accept-argument-error :option info)
(values t 1)))
(defmethod option-synopsis ((info named-without-argument-option-info)
(stream t)
&key long?)
(let+ (((&accessors-r/o designators) info))
(format stream "~{~A~^,~}"
(if long? designators (list (first designators))))))
(defclass named-with-argument-option-info (option-info)
())
(defmethod option-value ((info named-with-argument-option-info)
(index integer)
(designator string)
(included-value t)
(maybe-value t))
(cond
(included-value
(values included-value 1))
((or (not maybe-value) (named-option-designator? maybe-value))
(error 'mandatory-argument-not-supplied-error
:option info
:argument-name (argument-name info)))
(t
(values maybe-value 2))))
(defmethod option-synopsis ((info named-with-argument-option-info)
(stream t)
&key long?)
(let+ (((&accessors-r/o option designators argument-name) info)
(type (configuration.options:option-type option))
((&values default default?)
(configuration.options:option-default
option :if-does-not-exist nil))
(default (when default?
(configuration.options:value->string
option default))))
(format stream "~{~A~^,~}=~A"
(if long? designators (list (first designators)))
argument-name)
(when long?
default : ~A , ~]~
type: ~A)"
(and default? (not (eq (option-multiplicity info) '*))) default
type))))
(defclass positional-option-info (option-info)
())
(defmethod option-value ((info positional-option-info)
(index integer)
(designator string)
(included-value t)
(maybe-value t))
(values designator 1))
(defmethod option-synopsis ((info positional-option-info)
(stream t)
&key long?)
(let+ (((&accessors-r/o
option argument-name mandatory? (multiplicity option-multiplicity))
info)
(type (configuration.options:option-type option)))
(format stream "~:[[~A~@[*~]]~;~A~@[*~]~]"
mandatory? argument-name (eq multiplicity '*))
(when long?
(format stream " (type: ~A)" type))))
|
b82c07ec1779a7e5d4ee5875455fab56d20446f4867efe721ea71a3d13b0279a
|
sondresl/AdventOfCode
|
Day02.hs
|
# LANGUAGE ViewPatterns #
module Day02 where
import Lib (tuple, intoEndo)
import Data.Monoid (Endo(..), Dual(..), appEndo)
import Linear (V3(..), _xy, _xz)
import Control.Lens (productOf, each)
encode :: (String, String) -> V3 Int -> V3 Int
encode (str, read -> v) =
case str of
"up" -> (+ V3 0 0 (-v))
"down" -> (+ V3 0 0 v)
"forward" -> \(V3 x y z) -> V3 (x + v) (y + v * z) z
main :: IO ()
main = do
input <- intoEndo (encode . tuple . words) . lines <$> readFile "../data/day02.in"
let run f = productOf (f . each) . (`appEndo` V3 0 0 0)
print $ run _xz input
print $ run _xy input
2039256
1856459736
| null |
https://raw.githubusercontent.com/sondresl/AdventOfCode/ab30e8be110e424728511de51e389b1fc6a2f61a/2021/Haskell/src/Day02.hs
|
haskell
|
# LANGUAGE ViewPatterns #
module Day02 where
import Lib (tuple, intoEndo)
import Data.Monoid (Endo(..), Dual(..), appEndo)
import Linear (V3(..), _xy, _xz)
import Control.Lens (productOf, each)
encode :: (String, String) -> V3 Int -> V3 Int
encode (str, read -> v) =
case str of
"up" -> (+ V3 0 0 (-v))
"down" -> (+ V3 0 0 v)
"forward" -> \(V3 x y z) -> V3 (x + v) (y + v * z) z
main :: IO ()
main = do
input <- intoEndo (encode . tuple . words) . lines <$> readFile "../data/day02.in"
let run f = productOf (f . each) . (`appEndo` V3 0 0 0)
print $ run _xz input
print $ run _xy input
2039256
1856459736
|
|
b2c064a28296ad7597cb5b22ad62a983d0fffd424cecc82f2a5739f658658e04
|
ocaml-gospel/gospel
|
t6.mli
|
(**************************************************************************)
(* *)
GOSPEL -- A Specification Language for OCaml
(* *)
Copyright ( c ) 2018- The VOCaL Project
(* *)
This software is free software , distributed under the MIT license
(* (as described in file LICENSE enclosed). *)
(**************************************************************************)
(*@ function p (x:integer):integer = x *)
@ requires x > 0
variant x = 0
ensures x = 2
ensures x > 2
ensures x > 1
variant x = 0
ensures x = 2
ensures x > 2
ensures x > 1 *)
(* ERROR: the term in the variant clause should be of type integer *)
{ gospel_expected|
[ 125 ] File " t6.mli " , line 13 , characters 12 - 17 :
13 | variant x = 0
^^^^^
Error : A term was expected .
|gospel_expected }
[125] File "t6.mli", line 13, characters 12-17:
13 | variant x = 0
^^^^^
Error: A term was expected.
|gospel_expected} *)
| null |
https://raw.githubusercontent.com/ocaml-gospel/gospel/79841c510baeb396d9a695ae33b290899188380b/test/negative/t6.mli
|
ocaml
|
************************************************************************
(as described in file LICENSE enclosed).
************************************************************************
@ function p (x:integer):integer = x
ERROR: the term in the variant clause should be of type integer
|
GOSPEL -- A Specification Language for OCaml
Copyright ( c ) 2018- The VOCaL Project
This software is free software , distributed under the MIT license
@ requires x > 0
variant x = 0
ensures x = 2
ensures x > 2
ensures x > 1
variant x = 0
ensures x = 2
ensures x > 2
ensures x > 1 *)
{ gospel_expected|
[ 125 ] File " t6.mli " , line 13 , characters 12 - 17 :
13 | variant x = 0
^^^^^
Error : A term was expected .
|gospel_expected }
[125] File "t6.mli", line 13, characters 12-17:
13 | variant x = 0
^^^^^
Error: A term was expected.
|gospel_expected} *)
|
c77ba16fb7e1bd1275a454318e2f6aee7263f50c50899c36a87355ad8224d62e
|
RyanGlScott/text-show
|
TH.hs
|
# LANGUAGE TemplateHaskell #
# OPTIONS_GHC -fno - warn - orphans #
|
Module : TextShow . TH
Copyright : ( C ) 2014 - 2017
License : BSD - style ( see the file LICENSE )
Maintainer :
Stability : Provisional
Portability : GHC
Functions to mechanically derive ' TextShow ' , ' TextShow1 ' , or ' TextShow2 ' instances ,
or to splice @show@-related expressions into source code . You need to enable
the @TemplateHaskell@ language extension in order to use this module .
/Since : 2/
Module: TextShow.TH
Copyright: (C) 2014-2017 Ryan Scott
License: BSD-style (see the file LICENSE)
Maintainer: Ryan Scott
Stability: Provisional
Portability: GHC
Functions to mechanically derive 'TextShow', 'TextShow1', or 'TextShow2' instances,
or to splice @show@-related expressions into Haskell source code. You need to enable
the @TemplateHaskell@ language extension in order to use this module.
/Since: 2/
-}
module TextShow.TH (module TextShow.TH.Internal) where
import TextShow.Instances ()
import TextShow.TH.Internal
-------------------------------------------------------------------------------
$(deriveTextShow ''GenTextMethods)
$(deriveTextShow ''Options)
| null |
https://raw.githubusercontent.com/RyanGlScott/text-show/5ea297d0c7ae2d043f000c791cc12ac53f469944/src/TextShow/TH.hs
|
haskell
|
-----------------------------------------------------------------------------
|
# LANGUAGE TemplateHaskell #
# OPTIONS_GHC -fno - warn - orphans #
|
Module : TextShow . TH
Copyright : ( C ) 2014 - 2017
License : BSD - style ( see the file LICENSE )
Maintainer :
Stability : Provisional
Portability : GHC
Functions to mechanically derive ' TextShow ' , ' TextShow1 ' , or ' TextShow2 ' instances ,
or to splice @show@-related expressions into source code . You need to enable
the @TemplateHaskell@ language extension in order to use this module .
/Since : 2/
Module: TextShow.TH
Copyright: (C) 2014-2017 Ryan Scott
License: BSD-style (see the file LICENSE)
Maintainer: Ryan Scott
Stability: Provisional
Portability: GHC
Functions to mechanically derive 'TextShow', 'TextShow1', or 'TextShow2' instances,
or to splice @show@-related expressions into Haskell source code. You need to enable
the @TemplateHaskell@ language extension in order to use this module.
/Since: 2/
-}
module TextShow.TH (module TextShow.TH.Internal) where
import TextShow.Instances ()
import TextShow.TH.Internal
$(deriveTextShow ''GenTextMethods)
$(deriveTextShow ''Options)
|
086787dc83ec168ee0cfc10e56fd34e33571ae83d8547aec54683bdfcddbab95
|
ucsd-progsys/liquidhaskell
|
UnboundFunInSpec1.hs
|
{-@ LIQUID "--expect-error-containing=Illegal type specification for `UnboundFunInSpec1.foo`" @-}
module UnboundFunInSpec1 where
{-@ foo :: xs:_ -> {v:_ | this = rubbish } @-}
foo _ _ = 0
| null |
https://raw.githubusercontent.com/ucsd-progsys/liquidhaskell/f46dbafd6ce1f61af5b56f31924c21639c982a8a/tests/errors/UnboundFunInSpec1.hs
|
haskell
|
@ LIQUID "--expect-error-containing=Illegal type specification for `UnboundFunInSpec1.foo`" @
@ foo :: xs:_ -> {v:_ | this = rubbish } @
|
module UnboundFunInSpec1 where
foo _ _ = 0
|
51c96f917a33fa23a9511f73ab93a834d0c330007109d2113f3574632f4bb16a
|
con-kitty/categorifier
|
Instances.hs
|
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeFamilies #
# LANGUAGE UndecidableInstances #
# OPTIONS_GHC -Wno - orphans #
-- | Orphan instances for the plugin test categories. This also re-exports the types for the
-- categories to make using it less fraught. I.e., if you need this, you should get the types from
here rather than from " Categorifier . Test . Hask " , etc .
module Categorifier.Test.ConCatExtensions.Instances (Hask (..), Term (..), TotOrd (..)) where
import Categorifier.ConCatExtensions
( ApplicativeCat (..),
BindableCat (..),
FixedCat (..),
FloatingCat' (..),
FloatingPointClassifyCat (..),
FloatingPointConvertCat (..),
IntegralCat' (..),
LaxMonoidalFunctorCat (..),
MonadCat (..),
NumCat' (..),
OrdCat' (..),
PowICat (..),
RealToFracCat (..),
SemigroupCat (..),
TranscendentalCat (..),
TraversableCat' (..),
)
import Categorifier.Test.ConCat.Instances (Hask (..), Term (..), unaryZero)
import Categorifier.Test.TotOrd (TotOrd (..))
import qualified ConCat.Category as ConCat
import qualified GHC.Float
import qualified GHC.Real
-- Term
instance Ord a => OrdCat' Term a where
compareK = ZeroId
instance PowICat Term a where
powIK _ = ZeroId
instance IntegralCat' Term a where
evenK = ZeroId
oddK = ZeroId
quotK = ZeroId
remK = ZeroId
instance FloatingCat' Term m where
powK = ZeroId
instance TranscendentalCat Term a where
tanK = ZeroId
asinK = ZeroId
acosK = ZeroId
atanK = ZeroId
sinhK = ZeroId
coshK = ZeroId
tanhK = ZeroId
asinhK = ZeroId
acoshK = ZeroId
atanhK = ZeroId
instance FloatingPointConvertCat Term where
floatToDoubleK = ZeroId
doubleToFloatK = ZeroId
instance FloatingPointClassifyCat Term a where
isNegativeZeroK = ZeroId
isInfiniteK = ZeroId
isFiniteK = ZeroId
isNaNK = ZeroId
isDenormalK = ZeroId
instance RealToFracCat Term a b where
realToFracK = ZeroId
instance LaxMonoidalFunctorCat Term m where
liftA2K = unaryZero
instance ApplicativeCat Term m where
apK = ZeroId
instance Functor m => MonadCat Term m where
joinK = ZeroId
mmapK = unaryZero
instance Functor m => BindableCat Term m where
bindK = ZeroId
instance TraversableCat' Term t f where
traverseK = unaryZero
instance NumCat' Term a where
absK = ZeroId
signumK = ZeroId
instance SemigroupCat Term m where
appendK = ZeroId
instance FixedCat Term where
fixK = unaryZero
Hask
instance Ord a => OrdCat' Hask a where
compareK = Hask compareK
instance Num a => PowICat Hask a where
powIK i = Hask (powIK i)
instance LaxMonoidalFunctorCat (->) m => LaxMonoidalFunctorCat Hask m where
liftA2K (Hask f) = Hask $ liftA2K f
instance ApplicativeCat (->) m => ApplicativeCat Hask m where
apK = Hask apK
instance MonadCat (->) m => MonadCat Hask m where
joinK = Hask joinK
mmapK (Hask fn) = Hask $ mmapK fn
instance BindableCat (->) m => BindableCat Hask m where
bindK = Hask bindK
instance TraversableCat' (->) t f => TraversableCat' Hask t f where
traverseK (Hask fn) = Hask (traverseK fn)
instance NumCat' (->) m => NumCat' Hask m where
absK = Hask absK
signumK = Hask signumK
instance Integral a => IntegralCat' Hask a where
evenK = Hask evenK
oddK = Hask oddK
quotK = Hask quotK
remK = Hask remK
instance FloatingCat' (->) m => FloatingCat' Hask m where
powK = Hask powK
instance (Floating a, TranscendentalCat (->) a) => TranscendentalCat Hask a where
tanK = Hask tanK
asinK = Hask asinK
acosK = Hask acosK
atanK = Hask atanK
sinhK = Hask sinhK
coshK = Hask coshK
tanhK = Hask tanhK
asinhK = Hask asinhK
acoshK = Hask acoshK
atanhK = Hask atanhK
instance SemigroupCat (->) m => SemigroupCat Hask m where
appendK = Hask appendK
instance FixedCat Hask where
fixK (Hask f) = Hask (fixK f)
instance (Real a, Fractional b) => RealToFracCat Hask a b where
realToFracK = Hask realToFracK
instance FloatingPointConvertCat Hask where
floatToDoubleK = Hask floatToDoubleK
doubleToFloatK = Hask doubleToFloatK
instance FloatingPointClassifyCat Hask Double where
isNegativeZeroK = Hask isNegativeZeroK
isInfiniteK = Hask isInfiniteK
isFiniteK = Hask isFiniteK
isNaNK = Hask isNaNK
isDenormalK = Hask isDenormalK
instance FloatingPointClassifyCat Hask Float where
isNegativeZeroK = Hask isNegativeZeroK
isInfiniteK = Hask isInfiniteK
isFiniteK = Hask isFiniteK
isNaNK = Hask isNaNK
isDenormalK = Hask isDenormalK
-- TotOrd
instance Applicative f => LaxMonoidalFunctorCat TotOrd f where
liftA2K (TotOrd fn) = TotOrd $ liftA2K fn
instance Monad f => MonadCat TotOrd f where
joinK = TotOrd joinK
mmapK (TotOrd fn) = TotOrd $ mmapK fn
instance (Floating a, Ord a) => FloatingCat' TotOrd a where
powK = TotOrd powK
instance (Floating a, Ord a) => TranscendentalCat TotOrd a where
tanK = TotOrd tanK
asinK = TotOrd asinK
acosK = TotOrd acosK
atanK = TotOrd atanK
sinhK = TotOrd sinhK
coshK = TotOrd coshK
tanhK = TotOrd tanhK
asinhK = TotOrd asinhK
acoshK = TotOrd acoshK
atanhK = TotOrd atanhK
instance FloatingPointConvertCat TotOrd where
floatToDoubleK = TotOrd $ ConCat.Constrained GHC.Float.float2Double
doubleToFloatK = TotOrd $ ConCat.Constrained GHC.Float.double2Float
instance
(Num a, FloatingPointClassifyCat (->) a) =>
FloatingPointClassifyCat TotOrd a
where
isNegativeZeroK = TotOrd $ ConCat.Constrained isNegativeZeroK
isInfiniteK = TotOrd $ ConCat.Constrained isInfiniteK
isFiniteK = TotOrd $ ConCat.Constrained isFiniteK
isNaNK = TotOrd $ ConCat.Constrained isNaNK
isDenormalK = TotOrd $ ConCat.Constrained isDenormalK
instance (Real a, Fractional b) => RealToFracCat TotOrd a b where
realToFracK = TotOrd $ ConCat.Constrained GHC.Real.realToFrac
instance Num a => NumCat' TotOrd a where
absK = TotOrd $ ConCat.Constrained abs
signumK = TotOrd $ ConCat.Constrained signum
instance Integral a => IntegralCat' TotOrd a where
evenK = TotOrd evenK
oddK = TotOrd oddK
quotK = TotOrd quotK
remK = TotOrd remK
instance Semigroup m => SemigroupCat TotOrd m where
appendK = TotOrd . ConCat.Constrained $ uncurry (<>)
| This should live in " Categorifier . ConCatExtensions " , but ca n't until
@`ConCat . TracedCat ` ` ConCat . Constrained`@ is moved upstream .
instance
(FixedCat k, ConCat.OpSat (ConCat.Prod k) con) =>
FixedCat (ConCat.Constrained con k)
where
fixK (ConCat.Constrained fn) = ConCat.Constrained $ fixK fn
instance FixedCat TotOrd where
fixK (TotOrd fn) = TotOrd $ fixK fn
| null |
https://raw.githubusercontent.com/con-kitty/categorifier/d8dc1106c4600c2168889519d2c3f843db2e9410/integrations/concat-extensions/integration-test/Categorifier/Test/ConCatExtensions/Instances.hs
|
haskell
|
| Orphan instances for the plugin test categories. This also re-exports the types for the
categories to make using it less fraught. I.e., if you need this, you should get the types from
Term
TotOrd
|
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeFamilies #
# LANGUAGE UndecidableInstances #
# OPTIONS_GHC -Wno - orphans #
here rather than from " Categorifier . Test . Hask " , etc .
module Categorifier.Test.ConCatExtensions.Instances (Hask (..), Term (..), TotOrd (..)) where
import Categorifier.ConCatExtensions
( ApplicativeCat (..),
BindableCat (..),
FixedCat (..),
FloatingCat' (..),
FloatingPointClassifyCat (..),
FloatingPointConvertCat (..),
IntegralCat' (..),
LaxMonoidalFunctorCat (..),
MonadCat (..),
NumCat' (..),
OrdCat' (..),
PowICat (..),
RealToFracCat (..),
SemigroupCat (..),
TranscendentalCat (..),
TraversableCat' (..),
)
import Categorifier.Test.ConCat.Instances (Hask (..), Term (..), unaryZero)
import Categorifier.Test.TotOrd (TotOrd (..))
import qualified ConCat.Category as ConCat
import qualified GHC.Float
import qualified GHC.Real
instance Ord a => OrdCat' Term a where
compareK = ZeroId
instance PowICat Term a where
powIK _ = ZeroId
instance IntegralCat' Term a where
evenK = ZeroId
oddK = ZeroId
quotK = ZeroId
remK = ZeroId
instance FloatingCat' Term m where
powK = ZeroId
instance TranscendentalCat Term a where
tanK = ZeroId
asinK = ZeroId
acosK = ZeroId
atanK = ZeroId
sinhK = ZeroId
coshK = ZeroId
tanhK = ZeroId
asinhK = ZeroId
acoshK = ZeroId
atanhK = ZeroId
instance FloatingPointConvertCat Term where
floatToDoubleK = ZeroId
doubleToFloatK = ZeroId
instance FloatingPointClassifyCat Term a where
isNegativeZeroK = ZeroId
isInfiniteK = ZeroId
isFiniteK = ZeroId
isNaNK = ZeroId
isDenormalK = ZeroId
instance RealToFracCat Term a b where
realToFracK = ZeroId
instance LaxMonoidalFunctorCat Term m where
liftA2K = unaryZero
instance ApplicativeCat Term m where
apK = ZeroId
instance Functor m => MonadCat Term m where
joinK = ZeroId
mmapK = unaryZero
instance Functor m => BindableCat Term m where
bindK = ZeroId
instance TraversableCat' Term t f where
traverseK = unaryZero
instance NumCat' Term a where
absK = ZeroId
signumK = ZeroId
instance SemigroupCat Term m where
appendK = ZeroId
instance FixedCat Term where
fixK = unaryZero
Hask
instance Ord a => OrdCat' Hask a where
compareK = Hask compareK
instance Num a => PowICat Hask a where
powIK i = Hask (powIK i)
instance LaxMonoidalFunctorCat (->) m => LaxMonoidalFunctorCat Hask m where
liftA2K (Hask f) = Hask $ liftA2K f
instance ApplicativeCat (->) m => ApplicativeCat Hask m where
apK = Hask apK
instance MonadCat (->) m => MonadCat Hask m where
joinK = Hask joinK
mmapK (Hask fn) = Hask $ mmapK fn
instance BindableCat (->) m => BindableCat Hask m where
bindK = Hask bindK
instance TraversableCat' (->) t f => TraversableCat' Hask t f where
traverseK (Hask fn) = Hask (traverseK fn)
instance NumCat' (->) m => NumCat' Hask m where
absK = Hask absK
signumK = Hask signumK
instance Integral a => IntegralCat' Hask a where
evenK = Hask evenK
oddK = Hask oddK
quotK = Hask quotK
remK = Hask remK
instance FloatingCat' (->) m => FloatingCat' Hask m where
powK = Hask powK
instance (Floating a, TranscendentalCat (->) a) => TranscendentalCat Hask a where
tanK = Hask tanK
asinK = Hask asinK
acosK = Hask acosK
atanK = Hask atanK
sinhK = Hask sinhK
coshK = Hask coshK
tanhK = Hask tanhK
asinhK = Hask asinhK
acoshK = Hask acoshK
atanhK = Hask atanhK
instance SemigroupCat (->) m => SemigroupCat Hask m where
appendK = Hask appendK
instance FixedCat Hask where
fixK (Hask f) = Hask (fixK f)
instance (Real a, Fractional b) => RealToFracCat Hask a b where
realToFracK = Hask realToFracK
instance FloatingPointConvertCat Hask where
floatToDoubleK = Hask floatToDoubleK
doubleToFloatK = Hask doubleToFloatK
instance FloatingPointClassifyCat Hask Double where
isNegativeZeroK = Hask isNegativeZeroK
isInfiniteK = Hask isInfiniteK
isFiniteK = Hask isFiniteK
isNaNK = Hask isNaNK
isDenormalK = Hask isDenormalK
instance FloatingPointClassifyCat Hask Float where
isNegativeZeroK = Hask isNegativeZeroK
isInfiniteK = Hask isInfiniteK
isFiniteK = Hask isFiniteK
isNaNK = Hask isNaNK
isDenormalK = Hask isDenormalK
instance Applicative f => LaxMonoidalFunctorCat TotOrd f where
liftA2K (TotOrd fn) = TotOrd $ liftA2K fn
instance Monad f => MonadCat TotOrd f where
joinK = TotOrd joinK
mmapK (TotOrd fn) = TotOrd $ mmapK fn
instance (Floating a, Ord a) => FloatingCat' TotOrd a where
powK = TotOrd powK
instance (Floating a, Ord a) => TranscendentalCat TotOrd a where
tanK = TotOrd tanK
asinK = TotOrd asinK
acosK = TotOrd acosK
atanK = TotOrd atanK
sinhK = TotOrd sinhK
coshK = TotOrd coshK
tanhK = TotOrd tanhK
asinhK = TotOrd asinhK
acoshK = TotOrd acoshK
atanhK = TotOrd atanhK
instance FloatingPointConvertCat TotOrd where
floatToDoubleK = TotOrd $ ConCat.Constrained GHC.Float.float2Double
doubleToFloatK = TotOrd $ ConCat.Constrained GHC.Float.double2Float
instance
(Num a, FloatingPointClassifyCat (->) a) =>
FloatingPointClassifyCat TotOrd a
where
isNegativeZeroK = TotOrd $ ConCat.Constrained isNegativeZeroK
isInfiniteK = TotOrd $ ConCat.Constrained isInfiniteK
isFiniteK = TotOrd $ ConCat.Constrained isFiniteK
isNaNK = TotOrd $ ConCat.Constrained isNaNK
isDenormalK = TotOrd $ ConCat.Constrained isDenormalK
instance (Real a, Fractional b) => RealToFracCat TotOrd a b where
realToFracK = TotOrd $ ConCat.Constrained GHC.Real.realToFrac
instance Num a => NumCat' TotOrd a where
absK = TotOrd $ ConCat.Constrained abs
signumK = TotOrd $ ConCat.Constrained signum
instance Integral a => IntegralCat' TotOrd a where
evenK = TotOrd evenK
oddK = TotOrd oddK
quotK = TotOrd quotK
remK = TotOrd remK
instance Semigroup m => SemigroupCat TotOrd m where
appendK = TotOrd . ConCat.Constrained $ uncurry (<>)
| This should live in " Categorifier . ConCatExtensions " , but ca n't until
@`ConCat . TracedCat ` ` ConCat . Constrained`@ is moved upstream .
instance
(FixedCat k, ConCat.OpSat (ConCat.Prod k) con) =>
FixedCat (ConCat.Constrained con k)
where
fixK (ConCat.Constrained fn) = ConCat.Constrained $ fixK fn
instance FixedCat TotOrd where
fixK (TotOrd fn) = TotOrd $ fixK fn
|
2abd2bd3fd56d918b551be457f8feb4fbfa555f0d9018e95bd10013b56aa36d8
|
zk/clojuredocs
|
mail.clj
|
(ns clojuredocs.mail
(:require [clj-http.client :as client]
[clojuredocs.util :as util]
[clojuredocs.config :as config]
[somnium.congomongo :as mon]))
(defn migrate-account-content [migration-key]
(format
"Hey There,
You're receiving this message because somebody (probably you) requested that we migrate your ClojureDocs account. You can do this by visiting the following link:
%s
If you didn't request this email, you can safely ignore it.
Thanks!"
(config/url "/migrate-account/migrate/" migration-key)))
(defn migration-request [to-email migration-key]
(let [{:keys [endpoint api-key from]} config/mailgun-config]
{:method :post
:url endpoint
:basic-auth ["api" api-key]
:form-params {:from from
:to to-email
:subject "Migrate Your ClojureDocs Account"
:text (migrate-account-content migration-key)}}))
(defn send-email [payload]
(let [res (client/request payload)]
(mon/insert! :events
{:tag "email-sent"
:payload (assoc payload :basic-auth "REDACTED")
:response res})))
| null |
https://raw.githubusercontent.com/zk/clojuredocs/28f5ee500f4349039ee81c70d7ac40acbb19e5d8/src/clj/clojuredocs/mail.clj
|
clojure
|
(ns clojuredocs.mail
(:require [clj-http.client :as client]
[clojuredocs.util :as util]
[clojuredocs.config :as config]
[somnium.congomongo :as mon]))
(defn migrate-account-content [migration-key]
(format
"Hey There,
You're receiving this message because somebody (probably you) requested that we migrate your ClojureDocs account. You can do this by visiting the following link:
%s
If you didn't request this email, you can safely ignore it.
Thanks!"
(config/url "/migrate-account/migrate/" migration-key)))
(defn migration-request [to-email migration-key]
(let [{:keys [endpoint api-key from]} config/mailgun-config]
{:method :post
:url endpoint
:basic-auth ["api" api-key]
:form-params {:from from
:to to-email
:subject "Migrate Your ClojureDocs Account"
:text (migrate-account-content migration-key)}}))
(defn send-email [payload]
(let [res (client/request payload)]
(mon/insert! :events
{:tag "email-sent"
:payload (assoc payload :basic-auth "REDACTED")
:response res})))
|
|
7b58c1f8ddd62d3a9613ef634731db67ffbc819c4241b9c35276197b6cbd6489
|
mattdw/stemmers
|
porter.clj
|
An implementation of the algorithm , detailed at
;; </~martin/PorterStemmer/>
(ns stemmers.porter
(:require [clojure.string :as str]))
(def stem
^String
(let [c "[^aeiou]"
v "[aeiouy]"
cs (str c "[^aeiouy]*")
vs (str v "[aeiou]*")
mgr0 (re-pattern (str "^(" cs ")?" vs cs))
meq1 (re-pattern (str "^(" cs ")?" vs cs "(" vs ")?$"))
mgr1 (re-pattern (str "^(" cs ")?" vs cs vs cs))
s_v (re-pattern (str "^(" cs ")?" vs))
step1a-re1 #"^(.+?)(ss|i)es$"
step1a-re2 #"^(.+?)([^s])s$"
step1a (fn [w]
(cond
(re-find step1a-re1 w) (str/replace w step1a-re1 "$1$2")
(re-find step1a-re2 w) (str/replace w step1a-re2 "$1$2")
:else w))
step1b-re1 #"^(.+?)eed$"
step1b-re2 #"^(.+?)(ed|ing)$"
step1b-stem1 #"(at|bl|iz)$"
step1b-stem2 #"([^aeiouylsz])\1$"
step1b-stem3 (re-pattern (str "^" cs v "[^aeiouwxy]$"))
step1b (fn [w]
(let [groups1 (re-find step1b-re1 w)
groups2 (re-find step1b-re2 w)]
(cond
groups1 (if (re-find mgr0 (groups1 1))
(apply str (butlast w))
w)
groups2 (let [stem (groups2 1)]
(if (re-find s_v stem)
(cond
(re-find step1b-stem1 stem) (str stem "e")
(re-find step1b-stem2 stem) (apply str (butlast stem))
(re-find step1b-stem3 stem) (str stem "e")
:else stem)
w))
:else w)))
step1c-re1 #"^(.+?)y$"
step1c (fn [w]
(if-let [[_ stem & _] (re-find step1c-re1 w)]
(if (re-find s_v stem) (str stem "i") w)
w))
apply-suffix-map (fn [suffix-map ^String w]
(if-let [[stem suffix]
(first (for [key (keys suffix-map)
:when (.endsWith w key)]
[(.substring w 0 (- (count w) (count key))) key]))]
(if (re-find mgr0 stem)
(str stem (suffix-map suffix))
w)
w))
step2-map {"ational" "ate"
"tional" "tion"
"enci" "ence"
"anci" "ance"
"izer" "ize"
"bli" "ble"
"alli" "al"
"entli" "ent"
"eli" "e"
"ousli" "ous"
"ization" "ize"
"ation" "ate"
"ator" "ate"
"alism" "al"
"iveness" "ive"
"fulness" "ful"
"ousness" "ous"
"aliti" "al"
"iviti" "ive"
"biliti" "ble"
"logi" "log"}
step2 (partial apply-suffix-map step2-map)
step3-map {"icate" "ic"
"ative" ""
"alize" "al"
"iciti" "ic"
"ical" "ic"
"ful" ""
"ness" ""}
step3 (partial apply-suffix-map step3-map)
step4-suffixes1 ["al" "ance" "ence" "er" "ic" "able" "ible" "ant" "ement"
"ment" "ent" "ou" "ism" "ate" "iti" "ous" "ive" "ize"]
step4-re1 #"^(.+?)(s|t)(ion)$"
step4 (fn [^String w]
(if-let [stem (first (for [suffix step4-suffixes1
:when (.endsWith w suffix)]
(.substring w 0 (- (count w) (count suffix)))))]
(if (re-find mgr1 stem) stem w)
(if-let [groups (re-find step4-re1 w)]
(let [stem (str (groups 1) (groups 2))]
(if (re-find mgr1 stem) stem w))
w)))
step5-re1 #"^(.+?)e$"
step5-re2 (re-pattern (str "^" cs v "[^aeiouwxy]$"))
step5 (fn [w]
(if-let [[_ stem & _] (re-find step5-re1 w)]
(if (or (re-find mgr1 stem)
(and (re-find meq1 stem) (not (re-find step5-re2 stem))))
stem
w)
w))
step6-re #"ll$"
step6 (fn [w]
(if (and (re-find step6-re w) (re-find mgr1 w))
(apply str (butlast w))
w))
step-y1 (fn [^String w]
(let [firstch (.substring w 0 1)
firstch-y? (= firstch "y")]
[firstch-y? (if firstch-y?
(str "Y" (.substring w 1))
w)]))
step-y2 (fn [firstch-y? ^String w]
(if firstch-y?
(str (.toLowerCase ^String (.substring w 0 1))
(.substring w 1))
w))]
(fn [word]
(if (< (count word) 3)
word
(let [[starts-with-y? w] (step-y1 word)]
(->> w
step1a step1b step1c step2 step3 step4 step5 step6
(step-y2 starts-with-y?)))))))
| null |
https://raw.githubusercontent.com/mattdw/stemmers/7a29b412352ebb604058b357ba332a8b53d0565f/src/stemmers/porter.clj
|
clojure
|
</~martin/PorterStemmer/>
|
An implementation of the algorithm , detailed at
(ns stemmers.porter
(:require [clojure.string :as str]))
(def stem
^String
(let [c "[^aeiou]"
v "[aeiouy]"
cs (str c "[^aeiouy]*")
vs (str v "[aeiou]*")
mgr0 (re-pattern (str "^(" cs ")?" vs cs))
meq1 (re-pattern (str "^(" cs ")?" vs cs "(" vs ")?$"))
mgr1 (re-pattern (str "^(" cs ")?" vs cs vs cs))
s_v (re-pattern (str "^(" cs ")?" vs))
step1a-re1 #"^(.+?)(ss|i)es$"
step1a-re2 #"^(.+?)([^s])s$"
step1a (fn [w]
(cond
(re-find step1a-re1 w) (str/replace w step1a-re1 "$1$2")
(re-find step1a-re2 w) (str/replace w step1a-re2 "$1$2")
:else w))
step1b-re1 #"^(.+?)eed$"
step1b-re2 #"^(.+?)(ed|ing)$"
step1b-stem1 #"(at|bl|iz)$"
step1b-stem2 #"([^aeiouylsz])\1$"
step1b-stem3 (re-pattern (str "^" cs v "[^aeiouwxy]$"))
step1b (fn [w]
(let [groups1 (re-find step1b-re1 w)
groups2 (re-find step1b-re2 w)]
(cond
groups1 (if (re-find mgr0 (groups1 1))
(apply str (butlast w))
w)
groups2 (let [stem (groups2 1)]
(if (re-find s_v stem)
(cond
(re-find step1b-stem1 stem) (str stem "e")
(re-find step1b-stem2 stem) (apply str (butlast stem))
(re-find step1b-stem3 stem) (str stem "e")
:else stem)
w))
:else w)))
step1c-re1 #"^(.+?)y$"
step1c (fn [w]
(if-let [[_ stem & _] (re-find step1c-re1 w)]
(if (re-find s_v stem) (str stem "i") w)
w))
apply-suffix-map (fn [suffix-map ^String w]
(if-let [[stem suffix]
(first (for [key (keys suffix-map)
:when (.endsWith w key)]
[(.substring w 0 (- (count w) (count key))) key]))]
(if (re-find mgr0 stem)
(str stem (suffix-map suffix))
w)
w))
step2-map {"ational" "ate"
"tional" "tion"
"enci" "ence"
"anci" "ance"
"izer" "ize"
"bli" "ble"
"alli" "al"
"entli" "ent"
"eli" "e"
"ousli" "ous"
"ization" "ize"
"ation" "ate"
"ator" "ate"
"alism" "al"
"iveness" "ive"
"fulness" "ful"
"ousness" "ous"
"aliti" "al"
"iviti" "ive"
"biliti" "ble"
"logi" "log"}
step2 (partial apply-suffix-map step2-map)
step3-map {"icate" "ic"
"ative" ""
"alize" "al"
"iciti" "ic"
"ical" "ic"
"ful" ""
"ness" ""}
step3 (partial apply-suffix-map step3-map)
step4-suffixes1 ["al" "ance" "ence" "er" "ic" "able" "ible" "ant" "ement"
"ment" "ent" "ou" "ism" "ate" "iti" "ous" "ive" "ize"]
step4-re1 #"^(.+?)(s|t)(ion)$"
step4 (fn [^String w]
(if-let [stem (first (for [suffix step4-suffixes1
:when (.endsWith w suffix)]
(.substring w 0 (- (count w) (count suffix)))))]
(if (re-find mgr1 stem) stem w)
(if-let [groups (re-find step4-re1 w)]
(let [stem (str (groups 1) (groups 2))]
(if (re-find mgr1 stem) stem w))
w)))
step5-re1 #"^(.+?)e$"
step5-re2 (re-pattern (str "^" cs v "[^aeiouwxy]$"))
step5 (fn [w]
(if-let [[_ stem & _] (re-find step5-re1 w)]
(if (or (re-find mgr1 stem)
(and (re-find meq1 stem) (not (re-find step5-re2 stem))))
stem
w)
w))
step6-re #"ll$"
step6 (fn [w]
(if (and (re-find step6-re w) (re-find mgr1 w))
(apply str (butlast w))
w))
step-y1 (fn [^String w]
(let [firstch (.substring w 0 1)
firstch-y? (= firstch "y")]
[firstch-y? (if firstch-y?
(str "Y" (.substring w 1))
w)]))
step-y2 (fn [firstch-y? ^String w]
(if firstch-y?
(str (.toLowerCase ^String (.substring w 0 1))
(.substring w 1))
w))]
(fn [word]
(if (< (count word) 3)
word
(let [[starts-with-y? w] (step-y1 word)]
(->> w
step1a step1b step1c step2 step3 step4 step5 step6
(step-y2 starts-with-y?)))))))
|
1cb9ccf20c98630fc825fd34d5f981f68740ef12222589b3d93984d9dff15509
|
jordanthayer/ocaml-search
|
meta_run.ml
|
* - Runs experiments on algorithms across all domains .
Whenever a new domain is added , meta_run will need to be updated so that
algorithms are checked against the new domain as well .
Jordan - August 2009
Whenever a new domain is added, meta_run will need to be updated so that
algorithms are checked against the new domain as well.
Jordan - August 2009 *)
let drops_duplicates string =
false
Str.string_match ( Str.regexp " dd " ) ( Str.last_chars string 2 ) 0
let do_basic_batch ?(time_limit = (Some Experiments.default_time_limit))
?(node_limit = (Some Experiments.default_node_limit)) ?(overwrite = false)
alg =
Notify.start_metatime();
Msa_runs.do_basic_batch
~time_limit : time_limit ~node_limit : node_limit alg ;
~time_limit:time_limit ~node_limit:node_limit alg;*)
Tiles_runs.do_basic_batch ~models:["korf"]
~time_limit:time_limit ~node_limit:node_limit alg;
Tpl_runs_reg.do_basic_batch
~time_limit : time_limit ~node_limit : node_limit alg ;
~time_limit:time_limit ~node_limit:node_limit alg;*)
Grid_runs.do_basic_batch
~overwrite:overwrite ~time_limit:time_limit ~node_limit:node_limit alg;
Drn_runs.do_basic_batch
~time_limit:time_limit ~node_limit:node_limit alg;
Vacuum_runs.do_basic_batch
~time_limit:time_limit ~node_limit:node_limit alg;
Pancake_runs.do_basic_batch
~time_limit:time_limit ~node_limit:node_limit alg;
Tsp_runs.do_basic_batch
~time_limit:time_limit ~node_limit:node_limit alg;
; Synth_runs.do_basic_batch
~time_limit : time_limit ~node_limit : node_limit alg ;
Rucksack_runs.do_basic_batch
~node_limit : node_limit ~time_limit : time_limit alg
~time_limit:time_limit ~node_limit:node_limit alg;
Rucksack_runs.do_basic_batch
~node_limit:node_limit ~time_limit:time_limit alg*)
Notify.send_metarun_completed_mail "Basic" alg
let do_wted_batch ?(time_limit = (Some Experiments.default_time_limit))
?(node_limit = (Some Experiments.default_node_limit))
?(weights = Experiments.low_res_weights) ?(overwrite = false) alg =
Notify.start_metatime();
Msa_runs.do_wted_batches
~time_limit : time_limit ~node_limit : node_limit ~weights : weights alg ;
~time_limit:time_limit ~node_limit:node_limit ~weights:weights alg;*)
Tiles_runs.do_wted_batch ~models:["korf"] ~overwrite:overwrite
~time_limit:time_limit ~node_limit:node_limit ~weights:weights alg;
Grid_runs.do_wted_batches ~time_limit:time_limit ~node_limit:node_limit
~weights:weights ~overwrite:overwrite alg;
Drn_runs.do_wted_batches ~overwrite:overwrite
~time_limit:time_limit ~node_limit:node_limit ~weights:weights alg;
if not (drops_duplicates alg)
Tpl_runs_reg.do_wted_batch
~time_limit : time_limit ~node_limit : node_limit ~weights : weights alg ;
~time_limit:time_limit ~node_limit:node_limit ~weights:weights alg;*)
Vacuum_runs.do_wted_batches ~overwrite:overwrite
~time_limit:time_limit ~node_limit:node_limit alg;
Pancake_runs.do_wted_batches ~overwrite:overwrite
~time_limit:time_limit ~node_limit:node_limit alg;
Tsp_runs.do_wted_batches ~overwrite:overwrite
;
~overwrite : overwrite
~time_limit : time_limit ~node_limit : node_limit alg ;
Rucksack_runs.do_wted_batches
~node_limit : node_limit ~time_limit : time_limit alg
Synth_runs.do_wted_batches ~overwrite:overwrite
~time_limit:time_limit ~node_limit:node_limit alg;
Rucksack_runs.do_wted_batches
~node_limit:node_limit ~time_limit:time_limit alg*));
Notify.send_metarun_completed_mail "Weighted" alg
let do_beam_batch ?(time_limit = (Some Experiments.default_time_limit))
?(node_limit = (Some Experiments.default_node_limit))
?(beam_widths = Experiments.full_beams) ?(overwrite = false) alg =
Notify.start_metatime();
Msa_runs.do_beam_batches
~time_limit : time_limit ~node_limit : node_limit
~beam_widths : beam_widths alg ;
~time_limit:time_limit ~node_limit:node_limit
~beam_widths:beam_widths alg;*)
Tiles_runs.do_beam_batch
~time_limit:time_limit ~node_limit:node_limit ~beam_widths:beam_widths alg;
Grid_runs.do_beam_batches
~time_limit:time_limit ~node_limit:node_limit ~beam_widths:beam_widths
~overwrite:overwrite alg;
Drn_runs.do_beam_batches
~time_limit:time_limit ~node_limit:node_limit ~beam_widths:beam_widths alg;
Tpl_runs_reg.do_beam_batch
~time_limit:time_limit ~node_limit:node_limit ~beam_widths:beam_widths alg;
Tsp_runs.do_beam_batches
~time_limit:time_limit ~node_limit:node_limit ~beam_widths:beam_widths alg;
Vacuum_runs.do_beam_batches
~time_limit:time_limit ~node_limit:node_limit alg;
Pancake_runs.do_beam_batches
;
Synth_runs.do_beam_batches
~time_limit : time_limit ~node_limit : node_limit alg ;
Rucksack_runs.do_beam_batches
~node_limit : node_limit ~time_limit : time_limit alg
Synth_runs.do_beam_batches
~time_limit:time_limit ~node_limit:node_limit alg;
Rucksack_runs.do_beam_batches
~node_limit:node_limit ~time_limit:time_limit alg*)
Notify.send_metarun_completed_mail "Beam" alg
let do_optimistic_batch
?(time_limit = (Some Experiments.default_time_limit))
?(node_limit = (Some Experiments.default_node_limit))
?(weights = Experiments.low_res_weights)
?(optimisms = Experiments.optimisms) ?(overwrite = false) alg =
Notify.start_metatime();
Grid_runs.do_optimistic_batches ~time_limit:time_limit ~node_limit:node_limit
~weights:weights ~opt:optimisms ~overwrite:overwrite alg;
Msa_runs.do_optimistic_batches ~time_limit : time_limit ~node_limit : node_limit
~weights : weights ~opt : optimisms alg ;
~weights:weights ~opt:optimisms alg;*)
Drn_runs.do_optimistic_batches ~time_limit:time_limit ~node_limit:node_limit
~weights:weights ~opt:optimisms alg;
Tiles_runs.do_optimistic_batches ~time_limit:time_limit ~node_limit:node_limit
~weights:weights ~opt:optimisms alg;
Synth_runs.do_optimistic_batches
~time_limit : time_limit ~node_limit : node_limit alg ;
~time_limit:time_limit ~node_limit:node_limit alg;*)
if not (drops_duplicates alg)
Tpl_runs_reg.do_optimistic_batches ~time_limit : time_limit ~node_limit : node_limit
~weights : weights ~opt : optimisms alg ;
~weights:weights ~opt:optimisms alg;*)
Tsp_runs.do_optimistic_batches ~time_limit:time_limit ~node_limit:node_limit
~weights:weights ~opt:optimisms alg;
Vacuum_runs.do_optimistic_batches
~time_limit:time_limit ~node_limit:node_limit alg;
Pancake_runs.do_optimistic_batches
~time_limit:time_limit ~node_limit:node_limit alg
(*;
Rucksack_runs.do_optimistic_batches
~node_limit:node_limit ~time_limit:time_limit alg*));
Notify.send_metarun_completed_mail "Optimistic" alg
EOF
| null |
https://raw.githubusercontent.com/jordanthayer/ocaml-search/57cfc85417aa97ee5d8fbcdb84c333aae148175f/experiments/meta_run.ml
|
ocaml
|
;
Rucksack_runs.do_optimistic_batches
~node_limit:node_limit ~time_limit:time_limit alg
|
* - Runs experiments on algorithms across all domains .
Whenever a new domain is added , meta_run will need to be updated so that
algorithms are checked against the new domain as well .
Jordan - August 2009
Whenever a new domain is added, meta_run will need to be updated so that
algorithms are checked against the new domain as well.
Jordan - August 2009 *)
let drops_duplicates string =
false
Str.string_match ( Str.regexp " dd " ) ( Str.last_chars string 2 ) 0
let do_basic_batch ?(time_limit = (Some Experiments.default_time_limit))
?(node_limit = (Some Experiments.default_node_limit)) ?(overwrite = false)
alg =
Notify.start_metatime();
Msa_runs.do_basic_batch
~time_limit : time_limit ~node_limit : node_limit alg ;
~time_limit:time_limit ~node_limit:node_limit alg;*)
Tiles_runs.do_basic_batch ~models:["korf"]
~time_limit:time_limit ~node_limit:node_limit alg;
Tpl_runs_reg.do_basic_batch
~time_limit : time_limit ~node_limit : node_limit alg ;
~time_limit:time_limit ~node_limit:node_limit alg;*)
Grid_runs.do_basic_batch
~overwrite:overwrite ~time_limit:time_limit ~node_limit:node_limit alg;
Drn_runs.do_basic_batch
~time_limit:time_limit ~node_limit:node_limit alg;
Vacuum_runs.do_basic_batch
~time_limit:time_limit ~node_limit:node_limit alg;
Pancake_runs.do_basic_batch
~time_limit:time_limit ~node_limit:node_limit alg;
Tsp_runs.do_basic_batch
~time_limit:time_limit ~node_limit:node_limit alg;
; Synth_runs.do_basic_batch
~time_limit : time_limit ~node_limit : node_limit alg ;
Rucksack_runs.do_basic_batch
~node_limit : node_limit ~time_limit : time_limit alg
~time_limit:time_limit ~node_limit:node_limit alg;
Rucksack_runs.do_basic_batch
~node_limit:node_limit ~time_limit:time_limit alg*)
Notify.send_metarun_completed_mail "Basic" alg
let do_wted_batch ?(time_limit = (Some Experiments.default_time_limit))
?(node_limit = (Some Experiments.default_node_limit))
?(weights = Experiments.low_res_weights) ?(overwrite = false) alg =
Notify.start_metatime();
Msa_runs.do_wted_batches
~time_limit : time_limit ~node_limit : node_limit ~weights : weights alg ;
~time_limit:time_limit ~node_limit:node_limit ~weights:weights alg;*)
Tiles_runs.do_wted_batch ~models:["korf"] ~overwrite:overwrite
~time_limit:time_limit ~node_limit:node_limit ~weights:weights alg;
Grid_runs.do_wted_batches ~time_limit:time_limit ~node_limit:node_limit
~weights:weights ~overwrite:overwrite alg;
Drn_runs.do_wted_batches ~overwrite:overwrite
~time_limit:time_limit ~node_limit:node_limit ~weights:weights alg;
if not (drops_duplicates alg)
Tpl_runs_reg.do_wted_batch
~time_limit : time_limit ~node_limit : node_limit ~weights : weights alg ;
~time_limit:time_limit ~node_limit:node_limit ~weights:weights alg;*)
Vacuum_runs.do_wted_batches ~overwrite:overwrite
~time_limit:time_limit ~node_limit:node_limit alg;
Pancake_runs.do_wted_batches ~overwrite:overwrite
~time_limit:time_limit ~node_limit:node_limit alg;
Tsp_runs.do_wted_batches ~overwrite:overwrite
;
~overwrite : overwrite
~time_limit : time_limit ~node_limit : node_limit alg ;
Rucksack_runs.do_wted_batches
~node_limit : node_limit ~time_limit : time_limit alg
Synth_runs.do_wted_batches ~overwrite:overwrite
~time_limit:time_limit ~node_limit:node_limit alg;
Rucksack_runs.do_wted_batches
~node_limit:node_limit ~time_limit:time_limit alg*));
Notify.send_metarun_completed_mail "Weighted" alg
let do_beam_batch ?(time_limit = (Some Experiments.default_time_limit))
?(node_limit = (Some Experiments.default_node_limit))
?(beam_widths = Experiments.full_beams) ?(overwrite = false) alg =
Notify.start_metatime();
Msa_runs.do_beam_batches
~time_limit : time_limit ~node_limit : node_limit
~beam_widths : beam_widths alg ;
~time_limit:time_limit ~node_limit:node_limit
~beam_widths:beam_widths alg;*)
Tiles_runs.do_beam_batch
~time_limit:time_limit ~node_limit:node_limit ~beam_widths:beam_widths alg;
Grid_runs.do_beam_batches
~time_limit:time_limit ~node_limit:node_limit ~beam_widths:beam_widths
~overwrite:overwrite alg;
Drn_runs.do_beam_batches
~time_limit:time_limit ~node_limit:node_limit ~beam_widths:beam_widths alg;
Tpl_runs_reg.do_beam_batch
~time_limit:time_limit ~node_limit:node_limit ~beam_widths:beam_widths alg;
Tsp_runs.do_beam_batches
~time_limit:time_limit ~node_limit:node_limit ~beam_widths:beam_widths alg;
Vacuum_runs.do_beam_batches
~time_limit:time_limit ~node_limit:node_limit alg;
Pancake_runs.do_beam_batches
;
Synth_runs.do_beam_batches
~time_limit : time_limit ~node_limit : node_limit alg ;
Rucksack_runs.do_beam_batches
~node_limit : node_limit ~time_limit : time_limit alg
Synth_runs.do_beam_batches
~time_limit:time_limit ~node_limit:node_limit alg;
Rucksack_runs.do_beam_batches
~node_limit:node_limit ~time_limit:time_limit alg*)
Notify.send_metarun_completed_mail "Beam" alg
let do_optimistic_batch
?(time_limit = (Some Experiments.default_time_limit))
?(node_limit = (Some Experiments.default_node_limit))
?(weights = Experiments.low_res_weights)
?(optimisms = Experiments.optimisms) ?(overwrite = false) alg =
Notify.start_metatime();
Grid_runs.do_optimistic_batches ~time_limit:time_limit ~node_limit:node_limit
~weights:weights ~opt:optimisms ~overwrite:overwrite alg;
Msa_runs.do_optimistic_batches ~time_limit : time_limit ~node_limit : node_limit
~weights : weights ~opt : optimisms alg ;
~weights:weights ~opt:optimisms alg;*)
Drn_runs.do_optimistic_batches ~time_limit:time_limit ~node_limit:node_limit
~weights:weights ~opt:optimisms alg;
Tiles_runs.do_optimistic_batches ~time_limit:time_limit ~node_limit:node_limit
~weights:weights ~opt:optimisms alg;
Synth_runs.do_optimistic_batches
~time_limit : time_limit ~node_limit : node_limit alg ;
~time_limit:time_limit ~node_limit:node_limit alg;*)
if not (drops_duplicates alg)
Tpl_runs_reg.do_optimistic_batches ~time_limit : time_limit ~node_limit : node_limit
~weights : weights ~opt : optimisms alg ;
~weights:weights ~opt:optimisms alg;*)
Tsp_runs.do_optimistic_batches ~time_limit:time_limit ~node_limit:node_limit
~weights:weights ~opt:optimisms alg;
Vacuum_runs.do_optimistic_batches
~time_limit:time_limit ~node_limit:node_limit alg;
Pancake_runs.do_optimistic_batches
~time_limit:time_limit ~node_limit:node_limit alg
Notify.send_metarun_completed_mail "Optimistic" alg
EOF
|
f03222419d6d7b1a03e5729fd2238a5f0d7ce7d49ac550a545d6a51285f1ac36
|
funcool/rumext
|
core.cljs
|
(ns rumext.examples.core
(:require
;; [clojure.string :as str]
;; [goog.dom :as dom]
[rumext.alpha :as mf]
;; [rumext.examples.util :as util]
[rumext.examples.binary-clock :as binary-clock]
[rumext.examples.timer-reactive :as timer-reactive]
[rumext.examples.local-state :as local-state]
[rumext.examples.refs :as refs]
[rumext.examples.controls :as controls]
;; [rumext.examples.errors :as errors]
[rumext.examples.board :as board]
;; [rumext.examples.portals :as portals]
))
(enable-console-print!)
(binary-clock/mount!)
(timer-reactive/mount!)
(local-state/mount!)
(refs/mount!)
(controls/mount!)
(board/mount!)
(defn main
[& args]
(js/console.log "main" args))
| null |
https://raw.githubusercontent.com/funcool/rumext/530a7ef960a6802961356ee6bf5bddea92cdbada/examples/rumext/examples/core.cljs
|
clojure
|
[clojure.string :as str]
[goog.dom :as dom]
[rumext.examples.util :as util]
[rumext.examples.errors :as errors]
[rumext.examples.portals :as portals]
|
(ns rumext.examples.core
(:require
[rumext.alpha :as mf]
[rumext.examples.binary-clock :as binary-clock]
[rumext.examples.timer-reactive :as timer-reactive]
[rumext.examples.local-state :as local-state]
[rumext.examples.refs :as refs]
[rumext.examples.controls :as controls]
[rumext.examples.board :as board]
))
(enable-console-print!)
(binary-clock/mount!)
(timer-reactive/mount!)
(local-state/mount!)
(refs/mount!)
(controls/mount!)
(board/mount!)
(defn main
[& args]
(js/console.log "main" args))
|
1d1d75c2425ec85ccf03fd4c3bd6b757e13533f9f23f3db047a7cd8d346d77cf
|
kowainik/tomland
|
Integer.hs
|
module Test.Toml.Parser.Integer
( integerSpecs
) where
import Test.Hspec (Spec, context, describe, it)
import Test.Toml.Parser.Common (integerFailOn, parseInteger)
integerSpecs :: Spec
integerSpecs = describe "integerP" $ do
context "when the integer is in decimal representation" $ do
it "can parse positive integer numbers" $ do
parseInteger "10" 10
parseInteger "+3" 3
parseInteger "0" 0
it "can parse negative integer numbers" $
parseInteger "-123" (-123)
it "can parse sign-prefixed zero as an unprefixed zero" $ do
parseInteger "+0" 0
parseInteger "-0" 0
it "can parse both the minimum and maximum numbers in the 64 bit range" $ do
parseInteger "-9223372036854775808" (-9223372036854775808)
parseInteger "9223372036854775807" 9223372036854775807
it "can parse numbers with underscores between digits" $ do
parseInteger "1_000" 1000
parseInteger "5_349_221" 5349221
parseInteger "1_2_3_4_5" 12345
it "does not parse incorrect underscores" $ do
integerFailOn "1_2_3_"
integerFailOn "13_"
integerFailOn "_123_"
integerFailOn "_13"
integerFailOn "_"
it "does not parse numbers with leading zeros" $ do
integerFailOn "0123"
integerFailOn "00123"
integerFailOn "-023"
integerFailOn "-0023"
context "when the integer is in binary representation" $ do
it "can parse numbers prefixed with `0b`" $ do
parseInteger "0b1101" 13
parseInteger "0b0" 0
it "does not parse numbers prefixed with `0B`" $
parseInteger "0B1101" 0
it "can parse numbers with leading zeros after the prefix" $ do
parseInteger "0b000" 0
parseInteger "0b00011" 3
it "does not parse negative numbers" $
parseInteger "-0b101" 0
it "does not parse numbers with non-valid binary digits" $
parseInteger "0b123" 1
context "when the integer is in octal representation" $ do
it "can parse numbers prefixed with `0o`" $ do
parseInteger "0o567" 0o567
parseInteger "0o0" 0
it "does not parse numbers prefixed with `0O`" $
parseInteger "0O567" 0
it "can parse numbers with leading zeros after the prefix" $ do
parseInteger "0o000000" 0
parseInteger "0o000567" 0o567
it "does not parse negative numbers" $
parseInteger "-0o123" 0
it "does not parse numbers with non-valid octal digits" $
parseInteger "0o789" 0o7
context "when the integer is in hexadecimal representation" $ do
it "can parse numbers prefixed with `0x`" $ do
parseInteger "0x12af" 0x12af
parseInteger "0x0" 0
it "does not parse numbers prefixed with `0X`" $
parseInteger "0Xfff" 0
it "can parse numbers with leading zeros after the prefix" $ do
parseInteger "0x00000" 0
parseInteger "0x012af" 0x12af
it "does not parse negative numbers" $
parseInteger "-0xfff" 0
it "does not parse numbers with non-valid hexadecimal digits" $
parseInteger "0xfgh" 0xf
it "can parse numbers when hex digits are lowercase" $
parseInteger "0xabcdef" 0xabcdef
it "can parse numbers when hex digits are uppercase" $
parseInteger "0xABCDEF" 0xABCDEF
it "can parse numbers when hex digits are in both lowercase and uppercase" $ do
parseInteger "0xAbCdEf" 0xAbCdEf
parseInteger "0xaBcDeF" 0xaBcDeF
context "when there is underscore in hexadecimal, octal and binary representation" $ do
it "can parse numbers with underscore in hexadecimal representation" $ do
parseInteger "0xAb_Cd_Ef" 0xabcdef
parseInteger "0xA_bcd_ef" 0xabcdef
parseInteger "0x123_abc" 0x123abc
parseInteger "0xa_b_c_1_2_3" 0xabc123
it "can't parse when underscore is between hexadecimal prefix and suffix" $ do
integerFailOn "0x_Abab_ca"
integerFailOn "0x_ababbac"
it "can parse numbers with underscore in octal representation" $ do
parseInteger "0o12_34_56" 0o123456
parseInteger "0o1_2345_6" 0o123456
parseInteger "0o76_54_21" 0o765421
parseInteger "0o4_5_3_2_6" 0o45326
it "can't parse when underscore is between octal prefix and suffix" $ do
integerFailOn "0o_123_4567"
integerFailOn "0o_1234567"
it "can parse numbers with underscore in binary representation" $ do
parseInteger "0b10_101_0" 42
parseInteger "0b10_10_10" 42
parseInteger "0b1_0_1" 5
parseInteger "0b1_0" 2
it "can't parse numbers when underscore is between binary prefix and suffix" $ do
integerFailOn "0b_10101_0"
integerFailOn "0b_101010"
it "doesn't parse underscore not followed by any numbers" $ do
integerFailOn "0b_"
integerFailOn "0o_"
integerFailOn "0x_"
it "doesn't parse when number is ending with underscore" $ do
integerFailOn "0b101_110_"
integerFailOn "0b10101_"
integerFailOn "0x1_23_daf_"
integerFailOn "0x1214adf_"
integerFailOn "0o1_15_41_"
integerFailOn "0o1215147_"
| null |
https://raw.githubusercontent.com/kowainik/tomland/2b4bcc465b79873a61bccfc7131d423a9a0aec1d/test/Test/Toml/Parser/Integer.hs
|
haskell
|
module Test.Toml.Parser.Integer
( integerSpecs
) where
import Test.Hspec (Spec, context, describe, it)
import Test.Toml.Parser.Common (integerFailOn, parseInteger)
integerSpecs :: Spec
integerSpecs = describe "integerP" $ do
context "when the integer is in decimal representation" $ do
it "can parse positive integer numbers" $ do
parseInteger "10" 10
parseInteger "+3" 3
parseInteger "0" 0
it "can parse negative integer numbers" $
parseInteger "-123" (-123)
it "can parse sign-prefixed zero as an unprefixed zero" $ do
parseInteger "+0" 0
parseInteger "-0" 0
it "can parse both the minimum and maximum numbers in the 64 bit range" $ do
parseInteger "-9223372036854775808" (-9223372036854775808)
parseInteger "9223372036854775807" 9223372036854775807
it "can parse numbers with underscores between digits" $ do
parseInteger "1_000" 1000
parseInteger "5_349_221" 5349221
parseInteger "1_2_3_4_5" 12345
it "does not parse incorrect underscores" $ do
integerFailOn "1_2_3_"
integerFailOn "13_"
integerFailOn "_123_"
integerFailOn "_13"
integerFailOn "_"
it "does not parse numbers with leading zeros" $ do
integerFailOn "0123"
integerFailOn "00123"
integerFailOn "-023"
integerFailOn "-0023"
context "when the integer is in binary representation" $ do
it "can parse numbers prefixed with `0b`" $ do
parseInteger "0b1101" 13
parseInteger "0b0" 0
it "does not parse numbers prefixed with `0B`" $
parseInteger "0B1101" 0
it "can parse numbers with leading zeros after the prefix" $ do
parseInteger "0b000" 0
parseInteger "0b00011" 3
it "does not parse negative numbers" $
parseInteger "-0b101" 0
it "does not parse numbers with non-valid binary digits" $
parseInteger "0b123" 1
context "when the integer is in octal representation" $ do
it "can parse numbers prefixed with `0o`" $ do
parseInteger "0o567" 0o567
parseInteger "0o0" 0
it "does not parse numbers prefixed with `0O`" $
parseInteger "0O567" 0
it "can parse numbers with leading zeros after the prefix" $ do
parseInteger "0o000000" 0
parseInteger "0o000567" 0o567
it "does not parse negative numbers" $
parseInteger "-0o123" 0
it "does not parse numbers with non-valid octal digits" $
parseInteger "0o789" 0o7
context "when the integer is in hexadecimal representation" $ do
it "can parse numbers prefixed with `0x`" $ do
parseInteger "0x12af" 0x12af
parseInteger "0x0" 0
it "does not parse numbers prefixed with `0X`" $
parseInteger "0Xfff" 0
it "can parse numbers with leading zeros after the prefix" $ do
parseInteger "0x00000" 0
parseInteger "0x012af" 0x12af
it "does not parse negative numbers" $
parseInteger "-0xfff" 0
it "does not parse numbers with non-valid hexadecimal digits" $
parseInteger "0xfgh" 0xf
it "can parse numbers when hex digits are lowercase" $
parseInteger "0xabcdef" 0xabcdef
it "can parse numbers when hex digits are uppercase" $
parseInteger "0xABCDEF" 0xABCDEF
it "can parse numbers when hex digits are in both lowercase and uppercase" $ do
parseInteger "0xAbCdEf" 0xAbCdEf
parseInteger "0xaBcDeF" 0xaBcDeF
context "when there is underscore in hexadecimal, octal and binary representation" $ do
it "can parse numbers with underscore in hexadecimal representation" $ do
parseInteger "0xAb_Cd_Ef" 0xabcdef
parseInteger "0xA_bcd_ef" 0xabcdef
parseInteger "0x123_abc" 0x123abc
parseInteger "0xa_b_c_1_2_3" 0xabc123
it "can't parse when underscore is between hexadecimal prefix and suffix" $ do
integerFailOn "0x_Abab_ca"
integerFailOn "0x_ababbac"
it "can parse numbers with underscore in octal representation" $ do
parseInteger "0o12_34_56" 0o123456
parseInteger "0o1_2345_6" 0o123456
parseInteger "0o76_54_21" 0o765421
parseInteger "0o4_5_3_2_6" 0o45326
it "can't parse when underscore is between octal prefix and suffix" $ do
integerFailOn "0o_123_4567"
integerFailOn "0o_1234567"
it "can parse numbers with underscore in binary representation" $ do
parseInteger "0b10_101_0" 42
parseInteger "0b10_10_10" 42
parseInteger "0b1_0_1" 5
parseInteger "0b1_0" 2
it "can't parse numbers when underscore is between binary prefix and suffix" $ do
integerFailOn "0b_10101_0"
integerFailOn "0b_101010"
it "doesn't parse underscore not followed by any numbers" $ do
integerFailOn "0b_"
integerFailOn "0o_"
integerFailOn "0x_"
it "doesn't parse when number is ending with underscore" $ do
integerFailOn "0b101_110_"
integerFailOn "0b10101_"
integerFailOn "0x1_23_daf_"
integerFailOn "0x1214adf_"
integerFailOn "0o1_15_41_"
integerFailOn "0o1215147_"
|
|
467b0e311fae2b275216c1c91884f22599e32fa5c753499c56496b9e5040e323
|
andersfugmann/ppx_protocol_conv
|
test_arrays.ml
|
open Sexplib.Std
module Make(Driver: Testable.Driver) = struct
module M = Testable.Make(Driver)
module EmptyArray : M.Testable = struct
let name = "SingleElem"
type t = int array
[@@deriving protocol ~driver:(module Driver), sexp]
let t = [||]
end
module Singleton : M.Testable = struct
let name = "SingleElem"
type t = int array
[@@deriving protocol ~driver:(module Driver), sexp]
let t = [|2|]
end
module LongArray : M.Testable = struct
let name = "Longarray"
type t = int array
[@@deriving protocol ~driver:(module Driver), sexp]
let t = [|4; 2; 3; 1|]
end
module EmptyInsideRec : M.Testable = struct
let name = "EmptyInsideRec"
type v = int [@key "A"]
and t = { a : string;
b : v array; [@key "V"]
c : string;
}
[@@deriving protocol ~driver:(module Driver), sexp]
let t = { a= "a"; b = [||]; c = "c" }
end
module SingleInsideRec : M.Testable = struct
let name = "SingleInsideRec"
type v = int [@key "A"]
and t = { a : string;
b : v array; [@key "V"]
c : string;
}
[@@deriving protocol ~driver:(module Driver), sexp]
let t = { a= "a"; b = [|2|]; c = "c" }
end
module MultiInsideRec : M.Testable = struct
let name = "MultiInsideRec"
type v = int [@key "A"]
and t = { a : string;
b : v array; [@key "V"]
c : string;
}
[@@deriving protocol ~driver:(module Driver), sexp]
let t = { a= "a"; b = [|4; 2; 3; 1|]; c = "c" }
end
module ArrayOfArrays : M.Testable = struct
let name = "ArrayOfArrays"
type v = int array
and t = { a : v array; }
[@@deriving protocol ~driver:(module Driver), sexp]
let t = { a = [| [|2;3|]; [|4;5|] |] }
end
module ArrayOfArrays2 : M.Testable = struct
let name = "ArrayOfArrays2"
type t = int array array array
[@@deriving protocol ~driver:(module Driver), sexp]
let t = [| [||]; [| [||]; [|2|]; [|3;4|]; |]; [| [||] |]; [| [|2|] |]; |]
end
let unittest = __MODULE__, [
M.test (module EmptyArray);
M.test (module Singleton);
M.test (module LongArray);
M.test (module EmptyInsideRec);
M.test (module SingleInsideRec);
M.test (module MultiInsideRec);
M.test (module ArrayOfArrays);
M.test (module ArrayOfArrays2);
]
end
| null |
https://raw.githubusercontent.com/andersfugmann/ppx_protocol_conv/e93eb01ca8ba8c7dd734070316cd281a199dee0d/test/test_arrays.ml
|
ocaml
|
open Sexplib.Std
module Make(Driver: Testable.Driver) = struct
module M = Testable.Make(Driver)
module EmptyArray : M.Testable = struct
let name = "SingleElem"
type t = int array
[@@deriving protocol ~driver:(module Driver), sexp]
let t = [||]
end
module Singleton : M.Testable = struct
let name = "SingleElem"
type t = int array
[@@deriving protocol ~driver:(module Driver), sexp]
let t = [|2|]
end
module LongArray : M.Testable = struct
let name = "Longarray"
type t = int array
[@@deriving protocol ~driver:(module Driver), sexp]
let t = [|4; 2; 3; 1|]
end
module EmptyInsideRec : M.Testable = struct
let name = "EmptyInsideRec"
type v = int [@key "A"]
and t = { a : string;
b : v array; [@key "V"]
c : string;
}
[@@deriving protocol ~driver:(module Driver), sexp]
let t = { a= "a"; b = [||]; c = "c" }
end
module SingleInsideRec : M.Testable = struct
let name = "SingleInsideRec"
type v = int [@key "A"]
and t = { a : string;
b : v array; [@key "V"]
c : string;
}
[@@deriving protocol ~driver:(module Driver), sexp]
let t = { a= "a"; b = [|2|]; c = "c" }
end
module MultiInsideRec : M.Testable = struct
let name = "MultiInsideRec"
type v = int [@key "A"]
and t = { a : string;
b : v array; [@key "V"]
c : string;
}
[@@deriving protocol ~driver:(module Driver), sexp]
let t = { a= "a"; b = [|4; 2; 3; 1|]; c = "c" }
end
module ArrayOfArrays : M.Testable = struct
let name = "ArrayOfArrays"
type v = int array
and t = { a : v array; }
[@@deriving protocol ~driver:(module Driver), sexp]
let t = { a = [| [|2;3|]; [|4;5|] |] }
end
module ArrayOfArrays2 : M.Testable = struct
let name = "ArrayOfArrays2"
type t = int array array array
[@@deriving protocol ~driver:(module Driver), sexp]
let t = [| [||]; [| [||]; [|2|]; [|3;4|]; |]; [| [||] |]; [| [|2|] |]; |]
end
let unittest = __MODULE__, [
M.test (module EmptyArray);
M.test (module Singleton);
M.test (module LongArray);
M.test (module EmptyInsideRec);
M.test (module SingleInsideRec);
M.test (module MultiInsideRec);
M.test (module ArrayOfArrays);
M.test (module ArrayOfArrays2);
]
end
|
|
b413198daf1e8fae159717fa32d3006550f780952b3b1475683214875de90a9c
|
gotthardp/lorawan-server
|
lorawan_connector_http.erl
|
%
Copyright ( c ) 2016 - 2019 < >
% All rights reserved.
Distributed under the terms of the MIT License . See the LICENSE file .
%
-module(lorawan_connector_http).
-behaviour(gen_server).
-export([start_connector/1, stop_connector/1]).
-export([start_link/1]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
-include("lorawan_db.hrl").
-record(state, {conn, pid, mref, ready, streams, prefix, publish_uplinks, publish_events, auth, nc}).
start_connector(#connector{connid=Id, received=Received}=Connector) ->
case lorawan_connector:pattern_for_cowboy(Received) of
undefined ->
ok;
error ->
lorawan_connector:raise_failed(Id, {badarg, Received});
Pattern ->
lorawan_http_registry:update({http, Id},
#{routes => [{Pattern, lorawan_connector_http_in, [Connector]}]})
end,
lorawan_connector_sup:start_child(Id, ?MODULE, [Connector]).
stop_connector(Id) ->
lorawan_http_registry:delete({http, Id}),
lorawan_connector_sup:stop_child(Id).
start_link(Connector) ->
gen_server:start_link(?MODULE, [Connector], []).
init([#connector{connid=Id, app=App,
publish_uplinks=PubUp, publish_events=PubEv, name=UserName, pass=Password}=Conn]) ->
ok = pg2:join({backend, App}, self()),
try
{ok, ensure_gun(
#state{conn=Conn,
publish_uplinks=lorawan_connector:prepare_filling(PubUp),
publish_events=lorawan_connector:prepare_filling(PubEv),
auth=lorawan_connector:prepare_filling([UserName, Password]),
nc=1})}
catch
_:Error ->
lorawan_connector:raise_failed(Id, Error),
{stop, shutdown}
end.
handle_call(_Request, _From, State) ->
{reply, {error, unknownmsg}, State}.
handle_cast(_Msg, State) ->
{noreply, State}.
handle_info(nodes_changed, State) ->
% nothing to do here
{noreply, State};
handle_info({uplink, _Node, _Vars0}, #state{publish_uplinks=PatPub}=State)
when PatPub == undefined; PatPub == ?EMPTY_PATTERN ->
{noreply, State};
handle_info({uplink, _Node, Vars0}, #state{conn=Conn}=State) ->
case ensure_connected(ensure_gun(State)) of
{ok, State2} ->
{noreply, handle_uplinks(Vars0, State2)};
{error, State2} ->
lager:warning("Connector ~p not connected, uplink lost", [Conn#connector.connid]),
{noreply, State2}
end;
handle_info({event, _Node, _Vars0}, #state{publish_events=PatPub}=State)
when PatPub == undefined; PatPub == ?EMPTY_PATTERN ->
{noreply, State};
handle_info({event, _Node, Vars0}, #state{conn=Conn}=State) ->
case ensure_connected(ensure_gun(State)) of
{ok, State2} ->
{noreply, handle_event(Vars0, State2)};
{error, State2} ->
lager:warning("Connector ~p not connected, event lost", [Conn#connector.connid]),
{noreply, State2}
end;
handle_info({gun_up, C, _Proto}, State=#state{pid=C}) ->
{noreply, State#state{ready=true}};
handle_info({gun_down, C, _Proto, _Reason, Killed, Unprocessed},
State=#state{pid=C, streams=Streams}) ->
{noreply, State#state{ready=false, streams=remove_list(remove_list(Streams, Killed), Unprocessed)}};
handle_info({gun_response, C, StreamRef, Fin, 401, Headers},
State=#state{pid=C, streams=Streams}) ->
State3 =
case proplists:get_value(<<"www-authenticate">>, Headers) of
undefined ->
lager:warning("HTTP request failed: 401"),
State;
WWWAuthenticate ->
{URI, Auth, Headers, Body} = maps:get(StreamRef, Streams),
case handle_authenticate([digest, basic], URI, Auth, Body,
cow_http_hd:parse_www_authenticate(WWWAuthenticate), State) of
{[], State2} ->
lager:warning("Authentication failed: ~p", [WWWAuthenticate]),
State2;
{Auth2, State2} ->
do_publish({URI, authenticated, Headers++Auth2, Body}, State2)
end
end,
{noreply, fin_stream(StreamRef, Fin, State3)};
handle_info({gun_response, C, StreamRef, Fin, Status, Headers},
State=#state{pid = C, streams = Streams, conn = #connector{uri = Uri}}) ->
if
Status < 300 ->
ok;
Status < 400 ->
case proplists:get_value(<<"location">>, Headers) of
undefined ->
lager:warning("Bad HTTP redirection: location header missing");
URI2 ->
{_, Auth, ReqHeaders, Body} = maps:get(StreamRef, Streams),
do_publish({URI2, Auth, ReqHeaders, Body}, State)
end;
true ->
{Path, _, _, _} = maps:get(StreamRef, Streams),
lager:debug("HTTP request failed: ~p, ~p", [Status, {Uri, Path}]),
lorawan_utils:throw_warning(connector_http, {http_error, {Status, Uri, Path}})
end,
{noreply, fin_stream(StreamRef, Fin, State)};
handle_info({gun_data, C, StreamRef, Fin, _Data}, State=#state{pid=C}) ->
{noreply, fin_stream(StreamRef, Fin, State)};
handle_info({'DOWN', _MRef, process, C, Reason}, #state{conn=Conn, pid=C}=State) ->
lager:warning("Connector ~s failed: ~p", [Conn#connector.connid, Reason]),
{noreply, State#state{pid=undefined}};
handle_info({status, From}, #state{conn=#connector{uri= <<"http:">>}, pid=undefined}=State) ->
From ! {status, []},
{noreply, State};
handle_info({status, From}, #state{conn=#connector{connid=Id, app=App, uri=Uri}}=State) ->
From ! {status, [
set_status(State,
#{module => <<"http">>, pid => lorawan_connector:pid_to_binary(self()),
connid => Id, app => App, uri => Uri})]},
{noreply, State};
handle_info(Unknown, State) ->
lager:debug("Unknown message: ~p", [Unknown]),
{noreply, State}.
terminate(normal, #state{conn=#connector{connid=ConnId}, pid=C}) ->
lager:debug("Connector ~s terminated: normal", [ConnId]),
disconnect(C);
terminate(Reason, #state{conn=#connector{connid=ConnId}, pid=C}) ->
lager:warning("Connector ~s terminated: ~p", [ConnId, Reason]),
disconnect(C).
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
ensure_gun(#state{pid=Pid}=State) when is_pid(Pid) ->
% is running
State;
ensure_gun(#state{conn=#connector{uri= <<"http:">>}, pid=undefined}=State) ->
% should not be running
State;
ensure_gun(#state{conn=#connector{connid=ConnId, uri=Uri}, pid=undefined}=State) ->
lager:debug("Connecting ~s to ~s", [ConnId, Uri]),
{ConnPid, Prefix} =
case http_uri:parse(binary_to_list(Uri), [{scheme_defaults, [{http, 80}, {https, 443}]}]) of
{ok, {http, _UserInfo, HostName, Port, Path, _Query}} ->
{ok, Pid} = gun:open(HostName, Port),
{Pid, Path};
{ok, {https, _UserInfo, HostName, Port, Path, _Query}} ->
Opts = application:get_env(lorawan_server, ssl_options, []),
{ok, Pid} = gun:open(HostName, Port, #{transport=>ssl, transport_opts=>Opts}),
{Pid, Path}
end,
MRef = monitor(process, ConnPid),
State#state{pid=ConnPid, mref=MRef, ready=false, streams=#{}, prefix=Prefix}.
ensure_connected(#state{ready=true}=State) ->
{ok, State};
ensure_connected(#state{pid=undefined, ready=false}=State) ->
{error, State};
ensure_connected(#state{conn=Conn, pid=ConnPid, mref=MRef, ready=false}=State) ->
case gun:await_up(ConnPid, MRef) of
{ok, _Protocol} ->
{ok, State#state{ready=true}};
{error, Reason} ->
lager:debug("~s failed to connect: ~p", [Conn#connector.connid, Reason]),
{error, State}
end.
disconnect(undefined) ->
ok;
disconnect(ConnPid) ->
gun:close(ConnPid).
handle_uplinks(Vars0, State) when is_list(Vars0) ->
lists:foldl(
fun(V0, S) -> handle_uplink(V0, S) end,
State, Vars0);
handle_uplinks(Vars0, State) ->
handle_uplink(Vars0, State).
handle_uplink(Vars0, #state{conn=#connector{format=Format}, publish_uplinks=Publish}=State) ->
{ContentType, Body} = encode_uplink(Format, Vars0),
send_publish(lorawan_admin:build(Vars0), Publish, ContentType, Body, State).
handle_event(Vars0, #state{publish_events=Publish}=State) ->
Vars = lorawan_admin:build(Vars0),
send_publish(Vars, Publish, <<"application/json">>, jsx:encode(Vars), State).
send_publish(Vars, Publish, ContentType, Body, #state{conn=Conn, prefix=Prefix, auth=AuthP}=State) ->
URI = binary:list_to_bin([Prefix | lorawan_connector:fill_pattern(Publish, Vars)]),
[User, Pass] = lorawan_connector:fill_pattern(AuthP, Vars),
case Conn of
#connector{auth = <<"token">>} ->
do_publish({URI, authenticated, [{<<"content-type">>, ContentType}, {User, Pass}], Body}, State);
#connector{} ->
do_publish({URI, [User, Pass], [{<<"content-type">>, ContentType}], Body}, State)
end.
do_publish({URI, _Auth, Headers, Body}=Msg, State=#state{pid=C, streams=Streams}) ->
StreamRef = gun:post(C, URI, Headers, Body),
State#state{streams=maps:put(StreamRef, Msg, Streams)}.
fin_stream(StreamRef, fin, State=#state{streams=Streams}) ->
State#state{streams=maps:remove(StreamRef, Streams)};
fin_stream(_StreamRef, nofin, State) ->
State.
remove_list(Map, List) ->
lists:foldl(
fun(Item, Map2) -> maps:remove(Item, Map2) end,
Map, List).
encode_uplink(<<"raw">>, Vars) ->
{<<"application/octet-stream">>, maps:get(data, Vars, <<>>)};
encode_uplink(<<"json">>, Vars) ->
{<<"application/json">>, jsx:encode(lorawan_admin:build(Vars))};
encode_uplink(<<"www-form">>, Vars) ->
{<<"application/x-www-form-urlencoded">>, lorawan_connector:form_encode(Vars)}.
handle_authenticate(_, _, authenticated, _, _, State) ->
{[], State};
handle_authenticate([Scheme | Rest], URI, Auth, Body, WWWAuthenticate, State) ->
case proplists:get_value(Scheme, WWWAuthenticate) of
undefined ->
handle_authenticate(Rest, URI, Auth, Body, WWWAuthenticate, State);
Value ->
handle_authenticate0(Scheme, Value, URI, Auth, Body, State)
end;
handle_authenticate([], _, _, _, _, State) ->
{[], State}.
handle_authenticate0(_, _, _URI, [Name, Pass], _, State)
when Name == undefined; Pass == undefined ->
lager:error("No credentials for HTTP authentication"),
{[], State};
handle_authenticate0(basic, _, _, [Name, Pass], _, State) ->
Cred = base64:encode(<<Name/binary, $:, Pass/binary>>),
{[lorawan_http_digest:authorization_header(basic, Cred)], State};
handle_authenticate0(digest, Value, URI, [Name, Pass], Body, State=#state{nc=Nc0}) ->
Realm = proplists:get_value(<<"realm">>, Value, <<>>),
Nonce = proplists:get_value(<<"nonce">>, Value, <<>>),
Opaque = proplists:get_value(<<"opaque">>, Value, <<>>),
case proplists:get_value(<<"qop">>, Value) of
undefined ->
Response = lorawan_http_digest:response(<<"POST">>, URI, Body, {Name, Realm, Pass}, Nonce),
{[lorawan_http_digest:authorization_header(digest, [{<<"username">>, Name}, {<<"realm">>, Realm},
{<<"nonce">>, Nonce}, {<<"uri">>, URI}, {<<"algorithm">>, <<"MD5">>},
{<<"response">>, Response}, {<<"opaque">>, Opaque}])], State};
Qop0 ->
[Qop|_] = binary:split(Qop0, [<<",">>], [global]),
Nc = lorawan_http_digest:nc(Nc0),
CNonce = lorawan_http_digest:nonce(4),
Response = lorawan_http_digest:response(<<"POST">>, URI, Body, {Name, Realm, Pass}, Nonce, Nc, CNonce, Qop),
{[lorawan_http_digest:authorization_header(digest, [{<<"username">>, Name}, {<<"realm">>, Realm},
{<<"nonce">>, Nonce}, {<<"uri">>, URI}, {<<"algorithm">>, <<"MD5">>},
{<<"response">>, Response}, {<<"opaque">>, Opaque}, {<<"qop">>, Qop},
{<<"nc">>, Nc}, {<<"cnonce">>, CNonce}])], State#state{nc=Nc0+1}}
end.
set_status(#state{pid=Pid, ready=true}, Map) when is_pid(Pid) ->
Map#{status => <<"connected">>};
set_status(#state{pid=Pid, ready=false}, Map) when is_pid(Pid) ->
Map#{status => <<"connecting">>};
set_status(#state{pid=undefined}, Map) ->
Map#{status => <<"disconnected">>}.
% end of file
| null |
https://raw.githubusercontent.com/gotthardp/lorawan-server/1a5c0f2f4d8238a54a326c57a5eb0e6015821b1d/src/lorawan_connector_http.erl
|
erlang
|
All rights reserved.
nothing to do here
is running
should not be running
end of file
|
Copyright ( c ) 2016 - 2019 < >
Distributed under the terms of the MIT License . See the LICENSE file .
-module(lorawan_connector_http).
-behaviour(gen_server).
-export([start_connector/1, stop_connector/1]).
-export([start_link/1]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
-include("lorawan_db.hrl").
-record(state, {conn, pid, mref, ready, streams, prefix, publish_uplinks, publish_events, auth, nc}).
start_connector(#connector{connid=Id, received=Received}=Connector) ->
case lorawan_connector:pattern_for_cowboy(Received) of
undefined ->
ok;
error ->
lorawan_connector:raise_failed(Id, {badarg, Received});
Pattern ->
lorawan_http_registry:update({http, Id},
#{routes => [{Pattern, lorawan_connector_http_in, [Connector]}]})
end,
lorawan_connector_sup:start_child(Id, ?MODULE, [Connector]).
stop_connector(Id) ->
lorawan_http_registry:delete({http, Id}),
lorawan_connector_sup:stop_child(Id).
start_link(Connector) ->
gen_server:start_link(?MODULE, [Connector], []).
init([#connector{connid=Id, app=App,
publish_uplinks=PubUp, publish_events=PubEv, name=UserName, pass=Password}=Conn]) ->
ok = pg2:join({backend, App}, self()),
try
{ok, ensure_gun(
#state{conn=Conn,
publish_uplinks=lorawan_connector:prepare_filling(PubUp),
publish_events=lorawan_connector:prepare_filling(PubEv),
auth=lorawan_connector:prepare_filling([UserName, Password]),
nc=1})}
catch
_:Error ->
lorawan_connector:raise_failed(Id, Error),
{stop, shutdown}
end.
handle_call(_Request, _From, State) ->
{reply, {error, unknownmsg}, State}.
handle_cast(_Msg, State) ->
{noreply, State}.
handle_info(nodes_changed, State) ->
{noreply, State};
handle_info({uplink, _Node, _Vars0}, #state{publish_uplinks=PatPub}=State)
when PatPub == undefined; PatPub == ?EMPTY_PATTERN ->
{noreply, State};
handle_info({uplink, _Node, Vars0}, #state{conn=Conn}=State) ->
case ensure_connected(ensure_gun(State)) of
{ok, State2} ->
{noreply, handle_uplinks(Vars0, State2)};
{error, State2} ->
lager:warning("Connector ~p not connected, uplink lost", [Conn#connector.connid]),
{noreply, State2}
end;
handle_info({event, _Node, _Vars0}, #state{publish_events=PatPub}=State)
when PatPub == undefined; PatPub == ?EMPTY_PATTERN ->
{noreply, State};
handle_info({event, _Node, Vars0}, #state{conn=Conn}=State) ->
case ensure_connected(ensure_gun(State)) of
{ok, State2} ->
{noreply, handle_event(Vars0, State2)};
{error, State2} ->
lager:warning("Connector ~p not connected, event lost", [Conn#connector.connid]),
{noreply, State2}
end;
handle_info({gun_up, C, _Proto}, State=#state{pid=C}) ->
{noreply, State#state{ready=true}};
handle_info({gun_down, C, _Proto, _Reason, Killed, Unprocessed},
State=#state{pid=C, streams=Streams}) ->
{noreply, State#state{ready=false, streams=remove_list(remove_list(Streams, Killed), Unprocessed)}};
handle_info({gun_response, C, StreamRef, Fin, 401, Headers},
State=#state{pid=C, streams=Streams}) ->
State3 =
case proplists:get_value(<<"www-authenticate">>, Headers) of
undefined ->
lager:warning("HTTP request failed: 401"),
State;
WWWAuthenticate ->
{URI, Auth, Headers, Body} = maps:get(StreamRef, Streams),
case handle_authenticate([digest, basic], URI, Auth, Body,
cow_http_hd:parse_www_authenticate(WWWAuthenticate), State) of
{[], State2} ->
lager:warning("Authentication failed: ~p", [WWWAuthenticate]),
State2;
{Auth2, State2} ->
do_publish({URI, authenticated, Headers++Auth2, Body}, State2)
end
end,
{noreply, fin_stream(StreamRef, Fin, State3)};
handle_info({gun_response, C, StreamRef, Fin, Status, Headers},
State=#state{pid = C, streams = Streams, conn = #connector{uri = Uri}}) ->
if
Status < 300 ->
ok;
Status < 400 ->
case proplists:get_value(<<"location">>, Headers) of
undefined ->
lager:warning("Bad HTTP redirection: location header missing");
URI2 ->
{_, Auth, ReqHeaders, Body} = maps:get(StreamRef, Streams),
do_publish({URI2, Auth, ReqHeaders, Body}, State)
end;
true ->
{Path, _, _, _} = maps:get(StreamRef, Streams),
lager:debug("HTTP request failed: ~p, ~p", [Status, {Uri, Path}]),
lorawan_utils:throw_warning(connector_http, {http_error, {Status, Uri, Path}})
end,
{noreply, fin_stream(StreamRef, Fin, State)};
handle_info({gun_data, C, StreamRef, Fin, _Data}, State=#state{pid=C}) ->
{noreply, fin_stream(StreamRef, Fin, State)};
handle_info({'DOWN', _MRef, process, C, Reason}, #state{conn=Conn, pid=C}=State) ->
lager:warning("Connector ~s failed: ~p", [Conn#connector.connid, Reason]),
{noreply, State#state{pid=undefined}};
handle_info({status, From}, #state{conn=#connector{uri= <<"http:">>}, pid=undefined}=State) ->
From ! {status, []},
{noreply, State};
handle_info({status, From}, #state{conn=#connector{connid=Id, app=App, uri=Uri}}=State) ->
From ! {status, [
set_status(State,
#{module => <<"http">>, pid => lorawan_connector:pid_to_binary(self()),
connid => Id, app => App, uri => Uri})]},
{noreply, State};
handle_info(Unknown, State) ->
lager:debug("Unknown message: ~p", [Unknown]),
{noreply, State}.
terminate(normal, #state{conn=#connector{connid=ConnId}, pid=C}) ->
lager:debug("Connector ~s terminated: normal", [ConnId]),
disconnect(C);
terminate(Reason, #state{conn=#connector{connid=ConnId}, pid=C}) ->
lager:warning("Connector ~s terminated: ~p", [ConnId, Reason]),
disconnect(C).
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
ensure_gun(#state{pid=Pid}=State) when is_pid(Pid) ->
State;
ensure_gun(#state{conn=#connector{uri= <<"http:">>}, pid=undefined}=State) ->
State;
ensure_gun(#state{conn=#connector{connid=ConnId, uri=Uri}, pid=undefined}=State) ->
lager:debug("Connecting ~s to ~s", [ConnId, Uri]),
{ConnPid, Prefix} =
case http_uri:parse(binary_to_list(Uri), [{scheme_defaults, [{http, 80}, {https, 443}]}]) of
{ok, {http, _UserInfo, HostName, Port, Path, _Query}} ->
{ok, Pid} = gun:open(HostName, Port),
{Pid, Path};
{ok, {https, _UserInfo, HostName, Port, Path, _Query}} ->
Opts = application:get_env(lorawan_server, ssl_options, []),
{ok, Pid} = gun:open(HostName, Port, #{transport=>ssl, transport_opts=>Opts}),
{Pid, Path}
end,
MRef = monitor(process, ConnPid),
State#state{pid=ConnPid, mref=MRef, ready=false, streams=#{}, prefix=Prefix}.
ensure_connected(#state{ready=true}=State) ->
{ok, State};
ensure_connected(#state{pid=undefined, ready=false}=State) ->
{error, State};
ensure_connected(#state{conn=Conn, pid=ConnPid, mref=MRef, ready=false}=State) ->
case gun:await_up(ConnPid, MRef) of
{ok, _Protocol} ->
{ok, State#state{ready=true}};
{error, Reason} ->
lager:debug("~s failed to connect: ~p", [Conn#connector.connid, Reason]),
{error, State}
end.
disconnect(undefined) ->
ok;
disconnect(ConnPid) ->
gun:close(ConnPid).
handle_uplinks(Vars0, State) when is_list(Vars0) ->
lists:foldl(
fun(V0, S) -> handle_uplink(V0, S) end,
State, Vars0);
handle_uplinks(Vars0, State) ->
handle_uplink(Vars0, State).
handle_uplink(Vars0, #state{conn=#connector{format=Format}, publish_uplinks=Publish}=State) ->
{ContentType, Body} = encode_uplink(Format, Vars0),
send_publish(lorawan_admin:build(Vars0), Publish, ContentType, Body, State).
handle_event(Vars0, #state{publish_events=Publish}=State) ->
Vars = lorawan_admin:build(Vars0),
send_publish(Vars, Publish, <<"application/json">>, jsx:encode(Vars), State).
send_publish(Vars, Publish, ContentType, Body, #state{conn=Conn, prefix=Prefix, auth=AuthP}=State) ->
URI = binary:list_to_bin([Prefix | lorawan_connector:fill_pattern(Publish, Vars)]),
[User, Pass] = lorawan_connector:fill_pattern(AuthP, Vars),
case Conn of
#connector{auth = <<"token">>} ->
do_publish({URI, authenticated, [{<<"content-type">>, ContentType}, {User, Pass}], Body}, State);
#connector{} ->
do_publish({URI, [User, Pass], [{<<"content-type">>, ContentType}], Body}, State)
end.
do_publish({URI, _Auth, Headers, Body}=Msg, State=#state{pid=C, streams=Streams}) ->
StreamRef = gun:post(C, URI, Headers, Body),
State#state{streams=maps:put(StreamRef, Msg, Streams)}.
fin_stream(StreamRef, fin, State=#state{streams=Streams}) ->
State#state{streams=maps:remove(StreamRef, Streams)};
fin_stream(_StreamRef, nofin, State) ->
State.
remove_list(Map, List) ->
lists:foldl(
fun(Item, Map2) -> maps:remove(Item, Map2) end,
Map, List).
encode_uplink(<<"raw">>, Vars) ->
{<<"application/octet-stream">>, maps:get(data, Vars, <<>>)};
encode_uplink(<<"json">>, Vars) ->
{<<"application/json">>, jsx:encode(lorawan_admin:build(Vars))};
encode_uplink(<<"www-form">>, Vars) ->
{<<"application/x-www-form-urlencoded">>, lorawan_connector:form_encode(Vars)}.
handle_authenticate(_, _, authenticated, _, _, State) ->
{[], State};
handle_authenticate([Scheme | Rest], URI, Auth, Body, WWWAuthenticate, State) ->
case proplists:get_value(Scheme, WWWAuthenticate) of
undefined ->
handle_authenticate(Rest, URI, Auth, Body, WWWAuthenticate, State);
Value ->
handle_authenticate0(Scheme, Value, URI, Auth, Body, State)
end;
handle_authenticate([], _, _, _, _, State) ->
{[], State}.
handle_authenticate0(_, _, _URI, [Name, Pass], _, State)
when Name == undefined; Pass == undefined ->
lager:error("No credentials for HTTP authentication"),
{[], State};
handle_authenticate0(basic, _, _, [Name, Pass], _, State) ->
Cred = base64:encode(<<Name/binary, $:, Pass/binary>>),
{[lorawan_http_digest:authorization_header(basic, Cred)], State};
handle_authenticate0(digest, Value, URI, [Name, Pass], Body, State=#state{nc=Nc0}) ->
Realm = proplists:get_value(<<"realm">>, Value, <<>>),
Nonce = proplists:get_value(<<"nonce">>, Value, <<>>),
Opaque = proplists:get_value(<<"opaque">>, Value, <<>>),
case proplists:get_value(<<"qop">>, Value) of
undefined ->
Response = lorawan_http_digest:response(<<"POST">>, URI, Body, {Name, Realm, Pass}, Nonce),
{[lorawan_http_digest:authorization_header(digest, [{<<"username">>, Name}, {<<"realm">>, Realm},
{<<"nonce">>, Nonce}, {<<"uri">>, URI}, {<<"algorithm">>, <<"MD5">>},
{<<"response">>, Response}, {<<"opaque">>, Opaque}])], State};
Qop0 ->
[Qop|_] = binary:split(Qop0, [<<",">>], [global]),
Nc = lorawan_http_digest:nc(Nc0),
CNonce = lorawan_http_digest:nonce(4),
Response = lorawan_http_digest:response(<<"POST">>, URI, Body, {Name, Realm, Pass}, Nonce, Nc, CNonce, Qop),
{[lorawan_http_digest:authorization_header(digest, [{<<"username">>, Name}, {<<"realm">>, Realm},
{<<"nonce">>, Nonce}, {<<"uri">>, URI}, {<<"algorithm">>, <<"MD5">>},
{<<"response">>, Response}, {<<"opaque">>, Opaque}, {<<"qop">>, Qop},
{<<"nc">>, Nc}, {<<"cnonce">>, CNonce}])], State#state{nc=Nc0+1}}
end.
set_status(#state{pid=Pid, ready=true}, Map) when is_pid(Pid) ->
Map#{status => <<"connected">>};
set_status(#state{pid=Pid, ready=false}, Map) when is_pid(Pid) ->
Map#{status => <<"connecting">>};
set_status(#state{pid=undefined}, Map) ->
Map#{status => <<"disconnected">>}.
|
80eabd24ea5cb02d225d8e605f2c088f803837e540436e8d4a843f31a803ace5
|
froggey/Mezzano
|
thread-pool.lisp
|
;;;; A thread pool
(defpackage :mezzano.sync.thread-pool
(:use :cl)
(:local-nicknames (:sup :mezzano.supervisor))
(:export #:*default-keepalive-time*
#:thread-pool
#:work-item
#:make-thread-pool
#:thread-pool-add
#:thread-pool-add-many
#:thread-pool-cancel-item
#:thread-pool-flush
#:thread-pool-shutdown
;; Catch tag that can be used as a throw target to
;; leave the current task.
#:terminate-work))
(in-package :mezzano.sync.thread-pool)
(defparameter *default-keepalive-time* 60
"Default value for the idle worker thread keepalive time.")
(defclass thread-pool ()
((%name :initarg :name :reader thread-pool-name)
(%initial-bindings :initarg :initial-bindings :reader thread-pool-initial-bindings)
(%lock :reader thread-pool-lock)
(%cvar :reader thread-pool-cvar)
(%pending :initform '() :accessor thread-pool-pending)
(%working-threads :initform '() :accessor thread-pool-working-threads)
(%idle-threads :initform '() :accessor thread-pool-idle-threads)
(%n-total-threads :initform 0 :accessor thread-pool-n-total-threads)
(%n-blocked-threads :initform 0 :accessor thread-pool-n-blocked-threads)
(%shutdown :initform nil :accessor thread-pool-shutdown-p)
(%keepalive-time :initarg :keepalive-time :accessor thread-pool-keepalive-time))
(:default-initargs :name nil :initial-bindings '()))
(defmethod initialize-instance :after ((instance thread-pool) &key)
(setf (slot-value instance '%lock) (sup:make-mutex instance)
(slot-value instance '%cvar) (sup:make-condition-variable instance)))
(defclass work-item ()
((%name :initarg :name :reader work-item-name)
(%function :initarg :function :reader work-item-function)
(%thread-pool :initarg :thread-pool :reader work-item-thread-pool)))
(defun make-thread-pool (&key name initial-bindings (keepalive-time *default-keepalive-time*))
"Create a new thread-pool."
(check-type keepalive-time (rational 0))
(make-instance 'thread-pool
:name name
:initial-bindings initial-bindings
:keepalive-time keepalive-time))
(defmethod sup:thread-pool-block ((thread-pool thread-pool) blocking-function &rest arguments)
(declare (dynamic-extent arguments))
(when (and (eql blocking-function 'mezzano.supervisor:acquire-mutex)
(eql (first arguments) (thread-pool-lock thread-pool)))
;; Don't suspend when acquiring the thread-pool lock, this causes
;; recursive locking on it.
(return-from sup:thread-pool-block
(apply blocking-function arguments)))
(unwind-protect
(progn
(sup:with-mutex ((thread-pool-lock thread-pool))
(incf (thread-pool-n-blocked-threads thread-pool)))
(apply blocking-function arguments))
(sup:with-mutex ((thread-pool-lock thread-pool))
(decf (thread-pool-n-blocked-threads thread-pool)))))
(defun thread-pool-n-concurrent-threads (thread-pool)
"Return the number of threads in the pool are not blocked."
(- (thread-pool-n-total-threads thread-pool)
(thread-pool-n-blocked-threads thread-pool)))
(defun thread-pool-main (thread-pool)
(let* ((self (sup:current-thread))
(thread-name (sup:thread-name self)))
(loop
(let ((work nil))
(sup:with-mutex ((thread-pool-lock thread-pool))
;; Move from active to idle.
(setf (thread-pool-working-threads thread-pool)
(remove self (thread-pool-working-threads thread-pool)))
(push self (thread-pool-idle-threads thread-pool))
(setf (third thread-name) nil)
(let ((start-idle-time (get-internal-run-time)))
(flet ((exit-while-idle ()
(setf (thread-pool-idle-threads thread-pool)
(remove self (thread-pool-idle-threads thread-pool)))
(decf (thread-pool-n-total-threads thread-pool))
(return-from thread-pool-main)))
(loop
(when (thread-pool-shutdown-p thread-pool)
(exit-while-idle))
(when (not (endp (thread-pool-pending thread-pool)))
(setf work (pop (thread-pool-pending thread-pool)))
(setf (third thread-name) work)
;; Back to active from idle.
(setf (thread-pool-idle-threads thread-pool)
(remove self (thread-pool-idle-threads thread-pool)))
(push self (thread-pool-working-threads thread-pool))
(return))
;; If there is no work available and there are more
;; unblocked threads than cores, then terminate this thread.
(when (> (thread-pool-n-concurrent-threads thread-pool)
(sup:logical-core-count))
(exit-while-idle))
(let* ((end-idle-time (+ start-idle-time (* (thread-pool-keepalive-time thread-pool) internal-time-units-per-second)))
(idle-time-remaining (- end-idle-time (get-internal-run-time))))
(when (minusp idle-time-remaining)
(exit-while-idle))
(sup:condition-wait (thread-pool-cvar thread-pool)
(thread-pool-lock thread-pool)
(/ idle-time-remaining internal-time-units-per-second)))))))
(setf (sup:thread-thread-pool self) thread-pool)
(mezzano.internals::unwind-protect-unwind-only
(catch 'terminate-work
(funcall (work-item-function work)))
;; Getting here means an unwind occured in the work item and
;; this thread is terminating in the active state. Clean up.
(setf (thread-pool-working-threads thread-pool)
(remove self (thread-pool-working-threads thread-pool)))
(decf (thread-pool-n-total-threads thread-pool)))
(setf (sup:thread-thread-pool self) nil)))))
(defun thread-pool-add (function thread-pool &key name priority bindings)
"Add a work item to the thread-pool.
Functions are called concurrently and in FIFO order.
A work item is returned, which can be passed to THREAD-POOL-CANCEL-ITEM
to attempt cancel the work.
BINDINGS is a list of (SYMBOL VALUE) pairs which specify special bindings
that should be active when FUNCTION is called. These override the
thread pool's initial-bindings."
TODO
(check-type function function)
(let ((work (make-instance 'work-item
:function (if bindings
(let ((vars (mapcar #'first bindings))
(vals (mapcar #'second bindings)))
(lambda ()
(progv vars vals
(funcall function))))
function)
:name name
:thread-pool thread-pool)))
(sup:with-mutex ((thread-pool-lock thread-pool) :resignal-errors t)
(when (thread-pool-shutdown-p thread-pool)
(error "Attempted to add work item to shut down thread pool ~S" thread-pool))
(setf (thread-pool-pending thread-pool) (append (thread-pool-pending thread-pool) (list work)))
(when (and (endp (thread-pool-idle-threads thread-pool))
(< (thread-pool-n-concurrent-threads thread-pool)
(sup:logical-core-count)))
;; There are no idle threads and there are more logical cores than
;; currently running threads. Create a new thread for this work item.
;; Push it on the active list to make the logic in T-P-MAIN work out.
(push (sup:make-thread (lambda () (thread-pool-main thread-pool))
:name `(thread-pool-worker ,thread-pool nil)
:initial-bindings (thread-pool-initial-bindings thread-pool))
(thread-pool-working-threads thread-pool))
(incf (thread-pool-n-total-threads thread-pool)))
(sup:condition-notify (thread-pool-cvar thread-pool)))
work))
(defun thread-pool-add-many (function values thread-pool &key name priority bindings)
"Add many work items to the pool.
A work item is created for each element of VALUES and FUNCTION is called
in the pool with that element.
Returns a list of the work items added."
(loop
for value in values
collect (thread-pool-add
(let ((value value))
(lambda () (funcall function value)))
thread-pool
:name name
:priority priority
:bindings bindings)))
(defun thread-pool-cancel-item (item)
"Cancel a work item, removing it from its thread-pool.
Returns true if the item was successfully cancelled,
false if the item had finished or is currently running on a worker thread."
(let ((thread-pool (work-item-thread-pool item)))
(sup:with-mutex ((thread-pool-lock thread-pool))
(cond ((find item (thread-pool-pending thread-pool))
(setf (thread-pool-pending thread-pool) (remove item (thread-pool-pending thread-pool)))
t)
(t
nil)))))
(defun thread-pool-flush (thread-pool)
"Cancel all outstanding work on THREAD-POOL.
Returns a list of all cancelled items.
Does not cancel work in progress."
(sup:with-mutex ((thread-pool-lock thread-pool))
(prog1
(thread-pool-pending thread-pool)
(setf (thread-pool-pending thread-pool) '()))))
(defun thread-pool-shutdown (thread-pool &key abort)
"Shutdown THREAD-POOL.
This cancels all outstanding work on THREAD-POOL
and notifies the worker threads that they should
exit once their active work is complete.
Once a thread pool has been shut down, no further work
can be added.
If ABORT is true then worker threads will be terminated
via TERMINATE-THREAD."
(sup:with-mutex ((thread-pool-lock thread-pool))
(setf (thread-pool-shutdown-p thread-pool) t)
(setf (thread-pool-pending thread-pool) '())
(when abort
(dolist (thread (thread-pool-working-threads thread-pool))
(sup:terminate-thread thread))
(dolist (thread (thread-pool-idle-threads thread-pool))
(sup:terminate-thread thread)))
(sup:condition-notify (thread-pool-cvar thread-pool) t))
(values))
| null |
https://raw.githubusercontent.com/froggey/Mezzano/9d34948fee1cfd54875ae909f12b3367a888f3cf/system/thread-pool.lisp
|
lisp
|
A thread pool
Catch tag that can be used as a throw target to
leave the current task.
Don't suspend when acquiring the thread-pool lock, this causes
recursive locking on it.
Move from active to idle.
Back to active from idle.
If there is no work available and there are more
unblocked threads than cores, then terminate this thread.
Getting here means an unwind occured in the work item and
this thread is terminating in the active state. Clean up.
There are no idle threads and there are more logical cores than
currently running threads. Create a new thread for this work item.
Push it on the active list to make the logic in T-P-MAIN work out.
|
(defpackage :mezzano.sync.thread-pool
(:use :cl)
(:local-nicknames (:sup :mezzano.supervisor))
(:export #:*default-keepalive-time*
#:thread-pool
#:work-item
#:make-thread-pool
#:thread-pool-add
#:thread-pool-add-many
#:thread-pool-cancel-item
#:thread-pool-flush
#:thread-pool-shutdown
#:terminate-work))
(in-package :mezzano.sync.thread-pool)
(defparameter *default-keepalive-time* 60
"Default value for the idle worker thread keepalive time.")
(defclass thread-pool ()
((%name :initarg :name :reader thread-pool-name)
(%initial-bindings :initarg :initial-bindings :reader thread-pool-initial-bindings)
(%lock :reader thread-pool-lock)
(%cvar :reader thread-pool-cvar)
(%pending :initform '() :accessor thread-pool-pending)
(%working-threads :initform '() :accessor thread-pool-working-threads)
(%idle-threads :initform '() :accessor thread-pool-idle-threads)
(%n-total-threads :initform 0 :accessor thread-pool-n-total-threads)
(%n-blocked-threads :initform 0 :accessor thread-pool-n-blocked-threads)
(%shutdown :initform nil :accessor thread-pool-shutdown-p)
(%keepalive-time :initarg :keepalive-time :accessor thread-pool-keepalive-time))
(:default-initargs :name nil :initial-bindings '()))
(defmethod initialize-instance :after ((instance thread-pool) &key)
(setf (slot-value instance '%lock) (sup:make-mutex instance)
(slot-value instance '%cvar) (sup:make-condition-variable instance)))
(defclass work-item ()
((%name :initarg :name :reader work-item-name)
(%function :initarg :function :reader work-item-function)
(%thread-pool :initarg :thread-pool :reader work-item-thread-pool)))
(defun make-thread-pool (&key name initial-bindings (keepalive-time *default-keepalive-time*))
"Create a new thread-pool."
(check-type keepalive-time (rational 0))
(make-instance 'thread-pool
:name name
:initial-bindings initial-bindings
:keepalive-time keepalive-time))
(defmethod sup:thread-pool-block ((thread-pool thread-pool) blocking-function &rest arguments)
(declare (dynamic-extent arguments))
(when (and (eql blocking-function 'mezzano.supervisor:acquire-mutex)
(eql (first arguments) (thread-pool-lock thread-pool)))
(return-from sup:thread-pool-block
(apply blocking-function arguments)))
(unwind-protect
(progn
(sup:with-mutex ((thread-pool-lock thread-pool))
(incf (thread-pool-n-blocked-threads thread-pool)))
(apply blocking-function arguments))
(sup:with-mutex ((thread-pool-lock thread-pool))
(decf (thread-pool-n-blocked-threads thread-pool)))))
(defun thread-pool-n-concurrent-threads (thread-pool)
"Return the number of threads in the pool are not blocked."
(- (thread-pool-n-total-threads thread-pool)
(thread-pool-n-blocked-threads thread-pool)))
(defun thread-pool-main (thread-pool)
(let* ((self (sup:current-thread))
(thread-name (sup:thread-name self)))
(loop
(let ((work nil))
(sup:with-mutex ((thread-pool-lock thread-pool))
(setf (thread-pool-working-threads thread-pool)
(remove self (thread-pool-working-threads thread-pool)))
(push self (thread-pool-idle-threads thread-pool))
(setf (third thread-name) nil)
(let ((start-idle-time (get-internal-run-time)))
(flet ((exit-while-idle ()
(setf (thread-pool-idle-threads thread-pool)
(remove self (thread-pool-idle-threads thread-pool)))
(decf (thread-pool-n-total-threads thread-pool))
(return-from thread-pool-main)))
(loop
(when (thread-pool-shutdown-p thread-pool)
(exit-while-idle))
(when (not (endp (thread-pool-pending thread-pool)))
(setf work (pop (thread-pool-pending thread-pool)))
(setf (third thread-name) work)
(setf (thread-pool-idle-threads thread-pool)
(remove self (thread-pool-idle-threads thread-pool)))
(push self (thread-pool-working-threads thread-pool))
(return))
(when (> (thread-pool-n-concurrent-threads thread-pool)
(sup:logical-core-count))
(exit-while-idle))
(let* ((end-idle-time (+ start-idle-time (* (thread-pool-keepalive-time thread-pool) internal-time-units-per-second)))
(idle-time-remaining (- end-idle-time (get-internal-run-time))))
(when (minusp idle-time-remaining)
(exit-while-idle))
(sup:condition-wait (thread-pool-cvar thread-pool)
(thread-pool-lock thread-pool)
(/ idle-time-remaining internal-time-units-per-second)))))))
(setf (sup:thread-thread-pool self) thread-pool)
(mezzano.internals::unwind-protect-unwind-only
(catch 'terminate-work
(funcall (work-item-function work)))
(setf (thread-pool-working-threads thread-pool)
(remove self (thread-pool-working-threads thread-pool)))
(decf (thread-pool-n-total-threads thread-pool)))
(setf (sup:thread-thread-pool self) nil)))))
(defun thread-pool-add (function thread-pool &key name priority bindings)
"Add a work item to the thread-pool.
Functions are called concurrently and in FIFO order.
A work item is returned, which can be passed to THREAD-POOL-CANCEL-ITEM
to attempt cancel the work.
BINDINGS is a list of (SYMBOL VALUE) pairs which specify special bindings
that should be active when FUNCTION is called. These override the
thread pool's initial-bindings."
TODO
(check-type function function)
(let ((work (make-instance 'work-item
:function (if bindings
(let ((vars (mapcar #'first bindings))
(vals (mapcar #'second bindings)))
(lambda ()
(progv vars vals
(funcall function))))
function)
:name name
:thread-pool thread-pool)))
(sup:with-mutex ((thread-pool-lock thread-pool) :resignal-errors t)
(when (thread-pool-shutdown-p thread-pool)
(error "Attempted to add work item to shut down thread pool ~S" thread-pool))
(setf (thread-pool-pending thread-pool) (append (thread-pool-pending thread-pool) (list work)))
(when (and (endp (thread-pool-idle-threads thread-pool))
(< (thread-pool-n-concurrent-threads thread-pool)
(sup:logical-core-count)))
(push (sup:make-thread (lambda () (thread-pool-main thread-pool))
:name `(thread-pool-worker ,thread-pool nil)
:initial-bindings (thread-pool-initial-bindings thread-pool))
(thread-pool-working-threads thread-pool))
(incf (thread-pool-n-total-threads thread-pool)))
(sup:condition-notify (thread-pool-cvar thread-pool)))
work))
(defun thread-pool-add-many (function values thread-pool &key name priority bindings)
"Add many work items to the pool.
A work item is created for each element of VALUES and FUNCTION is called
in the pool with that element.
Returns a list of the work items added."
(loop
for value in values
collect (thread-pool-add
(let ((value value))
(lambda () (funcall function value)))
thread-pool
:name name
:priority priority
:bindings bindings)))
(defun thread-pool-cancel-item (item)
"Cancel a work item, removing it from its thread-pool.
Returns true if the item was successfully cancelled,
false if the item had finished or is currently running on a worker thread."
(let ((thread-pool (work-item-thread-pool item)))
(sup:with-mutex ((thread-pool-lock thread-pool))
(cond ((find item (thread-pool-pending thread-pool))
(setf (thread-pool-pending thread-pool) (remove item (thread-pool-pending thread-pool)))
t)
(t
nil)))))
(defun thread-pool-flush (thread-pool)
"Cancel all outstanding work on THREAD-POOL.
Returns a list of all cancelled items.
Does not cancel work in progress."
(sup:with-mutex ((thread-pool-lock thread-pool))
(prog1
(thread-pool-pending thread-pool)
(setf (thread-pool-pending thread-pool) '()))))
(defun thread-pool-shutdown (thread-pool &key abort)
"Shutdown THREAD-POOL.
This cancels all outstanding work on THREAD-POOL
and notifies the worker threads that they should
exit once their active work is complete.
Once a thread pool has been shut down, no further work
can be added.
If ABORT is true then worker threads will be terminated
via TERMINATE-THREAD."
(sup:with-mutex ((thread-pool-lock thread-pool))
(setf (thread-pool-shutdown-p thread-pool) t)
(setf (thread-pool-pending thread-pool) '())
(when abort
(dolist (thread (thread-pool-working-threads thread-pool))
(sup:terminate-thread thread))
(dolist (thread (thread-pool-idle-threads thread-pool))
(sup:terminate-thread thread)))
(sup:condition-notify (thread-pool-cvar thread-pool) t))
(values))
|
5ef9f4e9d91dcbac40b743fc72d8bbf1a29445232bf1f88505d83a8279b27914
|
shirok/Gauche
|
future.scm
|
;;;
;;; control.future - future implementation
;;;
Copyright ( c ) 2020 - 2022 < >
;;;
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions
;;; are met:
;;;
;;; 1. Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
;;;
;;; 2. Redistributions in binary form must reproduce the above copyright
;;; notice, this list of conditions and the following disclaimer in the
;;; documentation and/or other materials provided with the distribution.
;;;
;;; 3. Neither the name of the authors nor the names of its contributors
;;; may be used to endorse or promote products derived from this
;;; software without specific prior written permission.
;;;
;;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
;;; LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
;;; A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
;;; OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED
;;; TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
;;; PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
;;; NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
;;; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
;;;
;; This is a provisional implementation; we'll use implicit thread pool
;; to avoid thread creation overhead eventually.
;; Guile and Racket uses 'touch' to retrive the result of a future, but
;; that name seems too generic. We adopt 'future-get'.
;; If a future already finished computation, 'future-get' returns immediately
;; with the result value. Otherwise, it blocks until the result is available,
;; unless timeout is specified. Subsequent 'future-get' returns the same
;; result.
;;
;; If the concurrent computation raises an exception, it is caught, and
re - raised when first ' future - get ' . It is undefined if future - get is
;; called again on such a future---it depends on the behavior of thread-join!,
;; but the behavior of calling thread-join! again on exceptionally terminated
thread is n't defined either . Currently , the second call of future - get
;; won't raise an exception and returns #<undef>, but do not count on
;; the behavior.
(define-module control.future
(use gauche.threads)
(export <future> future? future make-future future-done? future-get))
(select-module control.future)
(define-class <future> ()
;; all slots must be private
((%thread :init-keyword :thread)))
(define-syntax future
(syntax-rules ()
[(_ expr)
(make <future>
:thread (thread-start! (make-thread (lambda () (values->list expr)))))]))
(define (make-future thunk)
(future (thunk)))
(define (future? obj) (is-a? obj <future>))
(define (future-done? future)
(assume-type future <future>)
(eq? (thread-state (~ future'%thread)) 'terminated))
(define (future-get future :optional (timeout #f) (timeout-val #f))
(assume-type future <future>)
(guard (e [(<uncaught-exception> e) (raise (~ e'reason))])
(apply values (thread-join! (~ future'%thread) timeout timeout-val))))
| null |
https://raw.githubusercontent.com/shirok/Gauche/b773899dbe0b2955e1c4f1daa066da874070c1e4/lib/control/future.scm
|
scheme
|
control.future - future implementation
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the authors nor the names of its contributors
may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
This is a provisional implementation; we'll use implicit thread pool
to avoid thread creation overhead eventually.
Guile and Racket uses 'touch' to retrive the result of a future, but
that name seems too generic. We adopt 'future-get'.
If a future already finished computation, 'future-get' returns immediately
with the result value. Otherwise, it blocks until the result is available,
unless timeout is specified. Subsequent 'future-get' returns the same
result.
If the concurrent computation raises an exception, it is caught, and
called again on such a future---it depends on the behavior of thread-join!,
but the behavior of calling thread-join! again on exceptionally terminated
won't raise an exception and returns #<undef>, but do not count on
the behavior.
all slots must be private
|
Copyright ( c ) 2020 - 2022 < >
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED
LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
re - raised when first ' future - get ' . It is undefined if future - get is
thread is n't defined either . Currently , the second call of future - get
(define-module control.future
(use gauche.threads)
(export <future> future? future make-future future-done? future-get))
(select-module control.future)
(define-class <future> ()
((%thread :init-keyword :thread)))
(define-syntax future
(syntax-rules ()
[(_ expr)
(make <future>
:thread (thread-start! (make-thread (lambda () (values->list expr)))))]))
(define (make-future thunk)
(future (thunk)))
(define (future? obj) (is-a? obj <future>))
(define (future-done? future)
(assume-type future <future>)
(eq? (thread-state (~ future'%thread)) 'terminated))
(define (future-get future :optional (timeout #f) (timeout-val #f))
(assume-type future <future>)
(guard (e [(<uncaught-exception> e) (raise (~ e'reason))])
(apply values (thread-join! (~ future'%thread) timeout timeout-val))))
|
b7f504706429e206d19d5eeb4aab4e370302354859d90debe19fd2ea3181d992
|
mwand/eopl3
|
checker.scm
|
(module checker (lib "eopl.ss" "eopl")
(require "drscheme-init.scm")
(require "lang.scm")
(require "static-data-structures.scm")
(require "expand-type.scm")
(provide type-of)
;; check-equal-type! : Type * Type * Exp -> Unspecified
Page : 242
(define check-equal-type!
(lambda (ty1 ty2 exp)
(when (not (equal? ty1 ty2))
(report-unequal-types ty1 ty2 exp))))
;; report-unequal-types : Type * Type * Exp -> Unspecified
Page : 243
(define report-unequal-types
(lambda (ty1 ty2 exp)
(eopl:error 'check-equal-type!
"Types didn't match: ~s != ~a in~%~a"
(type-to-external-form ty1)
(type-to-external-form ty2)
exp)))
The Type Checker ; ; ; ; ; ; ; ; ; ; ; ; ; ; ; ;
;; moved to check-modules.scm
;; type-of-program : Program -> Type
Page : 244
;; (define type-of-program
( lambda ( pgm )
( cases program pgm
;; (a-program (exp1)
;; (type-of exp1 (init-tenv))))))
;; type-of : Exp * Tenv -> Type
Page 244 - -246 . See also page 285 .
(define type-of
(lambda (exp tenv)
(cases expression exp
(const-exp (num) (int-type))
(diff-exp (exp1 exp2)
(let ((type1 (type-of exp1 tenv))
(type2 (type-of exp2 tenv)))
(check-equal-type! type1 (int-type) exp1)
(check-equal-type! type2 (int-type) exp2)
(int-type)))
(zero?-exp (exp1)
(let ((type1 (type-of exp1 tenv)))
(check-equal-type! type1 (int-type) exp1)
(bool-type)))
(if-exp (exp1 exp2 exp3)
(let ((ty1 (type-of exp1 tenv))
(ty2 (type-of exp2 tenv))
(ty3 (type-of exp3 tenv)))
(check-equal-type! ty1 (bool-type) exp1)
(check-equal-type! ty2 ty3 exp)
ty2))
(var-exp (var) (apply-tenv tenv var))
lookup - qualified - var - in - tenv defined on page 285 .
(qualified-var-exp (m-name var-name)
(lookup-qualified-var-in-tenv m-name var-name tenv))
(let-exp (var exp1 body)
(let ((rhs-type (type-of exp1 tenv)))
(type-of body (extend-tenv var rhs-type tenv))))
(proc-exp (bvar bvar-type body)
(let ((expanded-bvar-type
(expand-type bvar-type tenv)))
(let ((result-type
(type-of body
(extend-tenv
bvar
expanded-bvar-type
tenv))))
(proc-type expanded-bvar-type result-type))))
(call-exp (rator rand)
(let ((rator-type (type-of rator tenv))
(rand-type (type-of rand tenv)))
(cases type rator-type
(proc-type (arg-type result-type)
(begin
(check-equal-type! arg-type rand-type rand)
result-type))
(else
(eopl:error 'type-of
"Rator not a proc type:~%~s~%had rator type ~s"
rator (type-to-external-form rator-type))))))
(letrec-exp (proc-result-type proc-name
bvar bvar-type
proc-body
letrec-body)
(let ((tenv-for-letrec-body
(extend-tenv
proc-name
(expand-type
(proc-type bvar-type proc-result-type)
tenv)
tenv)))
(let ((proc-result-type
(expand-type proc-result-type tenv))
(proc-body-type
(type-of proc-body
(extend-tenv
bvar
(expand-type bvar-type tenv)
tenv-for-letrec-body))))
(check-equal-type!
proc-body-type proc-result-type proc-body)
(type-of letrec-body tenv-for-letrec-body))))
)))
type environments are now in static-data-structures.scm .
)
| null |
https://raw.githubusercontent.com/mwand/eopl3/b50e015be7f021d94c1af5f0e3a05d40dd2b0cbf/chapter8/abstract-types-lang/checker.scm
|
scheme
|
check-equal-type! : Type * Type * Exp -> Unspecified
report-unequal-types : Type * Type * Exp -> Unspecified
; ; ; ; ; ; ; ; ; ; ; ; ; ; ;
moved to check-modules.scm
type-of-program : Program -> Type
(define type-of-program
(a-program (exp1)
(type-of exp1 (init-tenv))))))
type-of : Exp * Tenv -> Type
|
(module checker (lib "eopl.ss" "eopl")
(require "drscheme-init.scm")
(require "lang.scm")
(require "static-data-structures.scm")
(require "expand-type.scm")
(provide type-of)
Page : 242
(define check-equal-type!
(lambda (ty1 ty2 exp)
(when (not (equal? ty1 ty2))
(report-unequal-types ty1 ty2 exp))))
Page : 243
(define report-unequal-types
(lambda (ty1 ty2 exp)
(eopl:error 'check-equal-type!
"Types didn't match: ~s != ~a in~%~a"
(type-to-external-form ty1)
(type-to-external-form ty2)
exp)))
Page : 244
( lambda ( pgm )
( cases program pgm
Page 244 - -246 . See also page 285 .
(define type-of
(lambda (exp tenv)
(cases expression exp
(const-exp (num) (int-type))
(diff-exp (exp1 exp2)
(let ((type1 (type-of exp1 tenv))
(type2 (type-of exp2 tenv)))
(check-equal-type! type1 (int-type) exp1)
(check-equal-type! type2 (int-type) exp2)
(int-type)))
(zero?-exp (exp1)
(let ((type1 (type-of exp1 tenv)))
(check-equal-type! type1 (int-type) exp1)
(bool-type)))
(if-exp (exp1 exp2 exp3)
(let ((ty1 (type-of exp1 tenv))
(ty2 (type-of exp2 tenv))
(ty3 (type-of exp3 tenv)))
(check-equal-type! ty1 (bool-type) exp1)
(check-equal-type! ty2 ty3 exp)
ty2))
(var-exp (var) (apply-tenv tenv var))
lookup - qualified - var - in - tenv defined on page 285 .
(qualified-var-exp (m-name var-name)
(lookup-qualified-var-in-tenv m-name var-name tenv))
(let-exp (var exp1 body)
(let ((rhs-type (type-of exp1 tenv)))
(type-of body (extend-tenv var rhs-type tenv))))
(proc-exp (bvar bvar-type body)
(let ((expanded-bvar-type
(expand-type bvar-type tenv)))
(let ((result-type
(type-of body
(extend-tenv
bvar
expanded-bvar-type
tenv))))
(proc-type expanded-bvar-type result-type))))
(call-exp (rator rand)
(let ((rator-type (type-of rator tenv))
(rand-type (type-of rand tenv)))
(cases type rator-type
(proc-type (arg-type result-type)
(begin
(check-equal-type! arg-type rand-type rand)
result-type))
(else
(eopl:error 'type-of
"Rator not a proc type:~%~s~%had rator type ~s"
rator (type-to-external-form rator-type))))))
(letrec-exp (proc-result-type proc-name
bvar bvar-type
proc-body
letrec-body)
(let ((tenv-for-letrec-body
(extend-tenv
proc-name
(expand-type
(proc-type bvar-type proc-result-type)
tenv)
tenv)))
(let ((proc-result-type
(expand-type proc-result-type tenv))
(proc-body-type
(type-of proc-body
(extend-tenv
bvar
(expand-type bvar-type tenv)
tenv-for-letrec-body))))
(check-equal-type!
proc-body-type proc-result-type proc-body)
(type-of letrec-body tenv-for-letrec-body))))
)))
type environments are now in static-data-structures.scm .
)
|
1ede4024606b3435616da6f5ccd75f5c36ebbb5220ec723342032436dba31bc8
|
binsec/haunted
|
region_bitvector.mli
|
(**************************************************************************)
This file is part of BINSEC .
(* *)
Copyright ( C ) 2016 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
(* alternatives) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
(** Bitvectors with low-level regions *)
type 'a symbol
module rec SubSymb : sig
type t = (int SymbMap.t) symbol
val compare : t -> t -> int
end
and SymbMap : Map.S with type key = (int SymbMap.t) symbol
type t =
[ Smt_bitvectors.basic_value
| `Symb of (int SymbMap.t) symbol
| `SymbSmt of Smt_bitvectors.smtBvExprAlt ]
val region_equal : Dba.region -> Dba.region -> bool
val equal : t -> t -> bool
(** {2 Construction functions} *)
val create_constant : Bigint.t -> int -> t
(** [create_constant v sz] creates a bitvector of value [bv] with size
[sz] in region [`Constant]
*)
val zeros : int -> t
* [ zeros size ] creates a bitvector of value 0 with size [ size ] in region
[ ` Constant ]
[`Constant]*)
val undefined : int -> t
(** [undefined n] creates an undefined value of bitsize [n] *)
* { 2 Pretty - printers }
val pp : Format.formatter -> t -> unit
val to_string : t -> string
(** {2 Accessors} *)
val region_of : t -> Dba.region
val value_of : t -> Bigint.t
val bitvector_of : t -> Bitvector.t
val size_of : t -> int
FIXME
(** {2 Constructors} *)
val append : t -> t -> t
val non_deterministic: Dba.region -> int -> t
val restrict : t -> int -> int -> t
val succ : t -> t
include Sigs.Arithmetic with type t := t
val lshift : t -> t -> t
val rshiftS : t -> t -> t
val rshiftU : t -> t -> t
val rotate_left : t -> t -> t
val rotate_right : t -> t -> t
val extension : t -> int -> t
val signed_extension : t -> int -> t
val eq : t -> t -> t
val diff : t -> t -> t
val leqU : t -> t -> t
val leqS : t -> t -> t
val ltU : t -> t -> t
val ltS : t -> t -> t
val gtU : t -> t -> t
val gtS : t -> t -> t
val geqU : t -> t -> t
val geqS : t -> t -> t
val lognot : t -> t
val logxor : t -> t -> t
val logor : t -> t -> t
val logand : t -> t -> t
val is_zero : t -> bool
val display_statistics : Format.formatter -> unit -> unit
val get_value :
Smt_bitvectors.smtBvExprAlt -> int ->
Smt_bitvectors.smtBvExprAlt list -> Dba_types.Caddress.Set.t -> t
val get_expr :
Smt_bitvectors.smtBvExprAlt -> int ->
Smt_bitvectors.smtBvExprAlt list -> Dba_types.Caddress.Set.t -> Dba.Expr.t
val get_byte_region_at : Bigint.t -> t
* [ get_byte_region_at addr ] returns the value read at the lone byte cell
[ addr ] .
@throws [ Invalid_address msg ] if [ addr ] is out of range
[addr].
@throws [Invalid_address msg] if [addr] is out of range
*)
val default_get_byte_region_at : Bigint.t -> t
* [ default_get_byte_region_at addr ] is [ get_byte_region_at addr ] but catches
the possible exception and returns an undefined byte instead .
the possible exception and returns an undefined byte instead.
*)
| null |
https://raw.githubusercontent.com/binsec/haunted/7ffc5f4072950fe138f53fe953ace98fff181c73/src/static/types/region_bitvector.mli
|
ocaml
|
************************************************************************
alternatives)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************
* Bitvectors with low-level regions
* {2 Construction functions}
* [create_constant v sz] creates a bitvector of value [bv] with size
[sz] in region [`Constant]
* [undefined n] creates an undefined value of bitsize [n]
* {2 Accessors}
* {2 Constructors}
|
This file is part of BINSEC .
Copyright ( C ) 2016 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
type 'a symbol
module rec SubSymb : sig
type t = (int SymbMap.t) symbol
val compare : t -> t -> int
end
and SymbMap : Map.S with type key = (int SymbMap.t) symbol
type t =
[ Smt_bitvectors.basic_value
| `Symb of (int SymbMap.t) symbol
| `SymbSmt of Smt_bitvectors.smtBvExprAlt ]
val region_equal : Dba.region -> Dba.region -> bool
val equal : t -> t -> bool
val create_constant : Bigint.t -> int -> t
val zeros : int -> t
* [ zeros size ] creates a bitvector of value 0 with size [ size ] in region
[ ` Constant ]
[`Constant]*)
val undefined : int -> t
* { 2 Pretty - printers }
val pp : Format.formatter -> t -> unit
val to_string : t -> string
val region_of : t -> Dba.region
val value_of : t -> Bigint.t
val bitvector_of : t -> Bitvector.t
val size_of : t -> int
FIXME
val append : t -> t -> t
val non_deterministic: Dba.region -> int -> t
val restrict : t -> int -> int -> t
val succ : t -> t
include Sigs.Arithmetic with type t := t
val lshift : t -> t -> t
val rshiftS : t -> t -> t
val rshiftU : t -> t -> t
val rotate_left : t -> t -> t
val rotate_right : t -> t -> t
val extension : t -> int -> t
val signed_extension : t -> int -> t
val eq : t -> t -> t
val diff : t -> t -> t
val leqU : t -> t -> t
val leqS : t -> t -> t
val ltU : t -> t -> t
val ltS : t -> t -> t
val gtU : t -> t -> t
val gtS : t -> t -> t
val geqU : t -> t -> t
val geqS : t -> t -> t
val lognot : t -> t
val logxor : t -> t -> t
val logor : t -> t -> t
val logand : t -> t -> t
val is_zero : t -> bool
val display_statistics : Format.formatter -> unit -> unit
val get_value :
Smt_bitvectors.smtBvExprAlt -> int ->
Smt_bitvectors.smtBvExprAlt list -> Dba_types.Caddress.Set.t -> t
val get_expr :
Smt_bitvectors.smtBvExprAlt -> int ->
Smt_bitvectors.smtBvExprAlt list -> Dba_types.Caddress.Set.t -> Dba.Expr.t
val get_byte_region_at : Bigint.t -> t
* [ get_byte_region_at addr ] returns the value read at the lone byte cell
[ addr ] .
@throws [ Invalid_address msg ] if [ addr ] is out of range
[addr].
@throws [Invalid_address msg] if [addr] is out of range
*)
val default_get_byte_region_at : Bigint.t -> t
* [ default_get_byte_region_at addr ] is [ get_byte_region_at addr ] but catches
the possible exception and returns an undefined byte instead .
the possible exception and returns an undefined byte instead.
*)
|
52988025c3183818fa91f8ea223836c7d561fbd208a32854652f11ec637490e1
|
nominolo/lambdachine
|
V76.hs
|
# LANGUAGE CPP #
# LANGUAGE NamedFieldPuns #
# LANGUAGE PatternGuards #
module Ghc.Api.V76
( module Ghc.Api.V76
, Phase(..)
, ModDetails
)
where
import HscTypes
import DriverPhases
--import HscMain
import Ghc.Api.V76Hsc
import ErrUtils
import Panic
import DynFlags
import Module
import SysTools
import Outputable
import FastString
import LlvmCodeGen ( llvmFixupAsm )
import HeaderInfo
import Platform
import ParserCoreUtils ( getCoreModuleName )
import StringBuffer ( hGetStringBuffer )
import SrcLoc
import Finder
import Util
import Packages
import Exception
import Config
import StaticFlags
import System.IO
import System.Directory
import Control.Monad ( when, unless )
import Data.Maybe
import System.FilePath
import Data.IORef
import Data.List
compileOneShot :: HscEnv -> FileHooks
-> Phase -> [(FilePath, Maybe Phase)] -> IO ()
compileOneShot hsc_env hooks stop_phase srcs = do
outputFiles <- mapM (compileFile hsc_env hooks stop_phase) srcs
return ()
#include "ghcplatform.h"
compileFile :: HscEnv -> FileHooks -> Phase -> (FilePath, Maybe Phase)
-> IO FilePath
compileFile hsc_env hooks stop_phase (src, mb_phase) = do
exists <- doesFileExist src
when (not exists) $
ghcError (CmdLineError ("does not exist: " ++ src))
let
dflags = hsc_dflags hsc_env
split = dopt Opt_SplitObjs dflags
mb_o_file = outputFile dflags
ghc_link = ghcLink dflags -- Set by -c or -no-link
-- When linking, the -o argument refers to the linker's output.
-- otherwise, we use it as the name for the pipeline's output.
output
| StopLn <- stop_phase, not (isNoLink ghc_link) = Persistent
-- -o foo applies to linker
| Just o_file <- mb_o_file = SpecificFile o_file
-- -o foo applies to the file we are compiling now
| otherwise = Persistent
stop_phase' = case stop_phase of
As | split -> SplitAs
_ -> stop_phase
( _, out_file) <- runPipeline stop_phase' hsc_env
(src, mb_phase) Nothing output
no ModLocation
return out_file
data PipelineOutput
= Temporary
-- ^ Output should be to a temporary file: we're going to
-- run more compilation steps on this output later.
| Persistent
-- ^ We want a persistent file, i.e. a file in the current directory
-- derived from the input filename, but with the appropriate extension.
eg . in " ghc -c Foo.hs " the output goes into ./Foo.o .
| SpecificFile FilePath
-- ^ The output must go into the specified file.
runPipeline :: Phase -- ^ When to stop
-> HscEnv -- ^ Compilation environment
-> (FilePath,Maybe Phase) -- ^ Input filename (and maybe -x suffix)
-> Maybe FilePath -- ^ original basename (if different from input name)
-> PipelineOutput -- ^ Output filename
^ A ModLocation , if this is a Haskell module
^ stub object , if we have one
-> FileHooks
-> IO (DynFlags, FilePath) -- ^ (final flags, output filename)
runPipeline stop_phase hsc_env0 (input_fn, mb_phase)
mb_basename output maybe_loc maybe_stub_o hooks = do
let dflags0 = hsc_dflags hsc_env0
(input_basename, suffix) = splitExtension input_fn
suffix' = drop 1 suffix -- strip off the .
basename | Just b <- mb_basename = b
| otherwise = input_basename
-- Decide where dump files should go based on the pipeline output
dflags = dflags0 { dumpPrefix = Just (basename ++ ".") }
hsc_env = hsc_env0 {hsc_dflags = dflags}
-- If we were given a -x flag, then use that phase to start from
start_phase = fromMaybe (startPhase suffix') mb_phase
-- We want to catch cases of "you can't get there from here" before
-- we start the pipeline, because otherwise it will just run off the
-- end.
--
-- There is a partial ordering on phases, where A < B iff A occurs
-- before B in a normal compilation pipeline.
when (not (start_phase `happensBefore` stop_phase)) $
ghcError (UsageError
("cannot compile this file to desired target: "
++ input_fn))
-- this is a function which will be used to calculate output file names
-- as we go along (we partially apply it to some of its inputs here)
let get_output_fn = getOutputFilename stop_phase output basename
-- Execute the pipeline...
let env = PipeEnv{ stop_phase,
src_basename = basename,
src_suffix = suffix',
output_spec = output }
state = PipeState{ hsc_env, maybe_loc, maybe_stub_o = maybe_stub_o }
(state', output_fn) <- unP (pipeLoop start_phase input_fn hooks) env state
let PipeState{ hsc_env=hsc_env', maybe_loc } = state'
dflags' = hsc_dflags hsc_env'
-- Sometimes, a compilation phase doesn't actually generate any output
( eg . the CPP phase when -fcpp is not turned on ) . If we end on this
-- stage, but we wanted to keep the output, then we have to explicitly
-- copy the file, remembering to prepend a {-# LINE #-} pragma so that
-- further compilation stages can tell what the original filename was.
case output of
Temporary ->
return (dflags', output_fn)
_other ->
do final_fn <- get_output_fn dflags' stop_phase maybe_loc
when (final_fn /= output_fn) $ do
let msg = ("Copying `" ++ output_fn ++"' to `" ++ final_fn ++ "'")
line_prag = Just ("{-# LINE 1 \"" ++ input_fn ++ "\" #-}\n")
copyWithHeader dflags msg line_prag output_fn final_fn
return (dflags', final_fn)
-- -----------------------------------------------------------------------------
-- The pipeline uses a monad to carry around various bits of information
PipeEnv : invariant information passed down
data PipeEnv = PipeEnv {
stop_phase :: Phase, -- ^ Stop just before this phase
src_basename :: String, -- ^ basename of original input source
src_suffix :: String, -- ^ its extension
output_spec :: PipelineOutput -- ^ says where to put the pipeline output
}
PipeState : information that might change during a pipeline run
data PipeState = PipeState {
hsc_env :: HscEnv,
^ only the DynFlags change in the HscEnv . The DynFlags change
-- at various points, for example when we read the OPTIONS_GHC
pragmas in the phase .
maybe_loc :: Maybe ModLocation,
^ the ModLocation . This is discovered during compilation ,
in the Hsc phase where we read the module header .
maybe_stub_o :: Maybe FilePath
^ the stub object . This is set by the Hsc phase if a stub
-- object was created. The stub object will be joined with
-- the main compilation object using "ld -r" at the end.
}
newtype CompPipeline a = P { unP :: PipeEnv -> PipeState -> IO (PipeState, a) }
instance Monad CompPipeline where
return a = P $ \_env state -> return (state, a)
P m >>= k = P $ \env state -> do (state',a) <- m env state
unP (k a) env state'
setDynFlags :: DynFlags -> CompPipeline ()
setDynFlags dflags = P $ \_env state ->
return (state{hsc_env= (hsc_env state){ hsc_dflags = dflags }}, ())
setModLocation :: ModLocation -> CompPipeline ()
setModLocation loc = P $ \_env state ->
return (state{ maybe_loc = Just loc }, ())
setStubO :: FilePath -> CompPipeline ()
setStubO stub_o = P $ \_env state ->
return (state{ maybe_stub_o = Just stub_o }, ())
io :: IO a -> CompPipeline a
io m = P $ \_env state -> do a <- m; return (state, a)
------------------------------------------------------------------------
-- In each phase, we need to know into what filename to generate the
-- output. All the logic about which filenames we generate output
-- into is embodied in the following function.
getOutputFilename
:: Phase -> PipelineOutput -> String
-> DynFlags -> Phase{-next phase-} -> Maybe ModLocation -> IO FilePath
getOutputFilename stop_phase output basename
= func
where
func dflags next_phase maybe_location
| is_last_phase, Persistent <- output = persistent_fn
| is_last_phase, SpecificFile f <- output = return f
| keep_this_output = persistent_fn
| otherwise = newTempName dflags suffix
where
hcsuf = hcSuf dflags
odir = objectDir dflags
osuf = objectSuf dflags
keep_hc = dopt Opt_KeepHcFiles dflags
keep_s = dopt Opt_KeepSFiles dflags
keep_bc = dopt Opt_KeepLlvmFiles dflags
myPhaseInputExt HCc = hcsuf
myPhaseInputExt MergeStub = osuf
myPhaseInputExt StopLn = osuf
myPhaseInputExt other = phaseInputExt other
is_last_phase = next_phase `eqPhase` stop_phase
-- sometimes, we keep output from intermediate stages
keep_this_output =
case next_phase of
As | keep_s -> True
LlvmOpt | keep_bc -> True
HCc | keep_hc -> True
_other -> False
suffix = myPhaseInputExt next_phase
-- persistent object files get put in odir
persistent_fn
| StopLn <- next_phase = return odir_persistent
| otherwise = return persistent
persistent = basename <.> suffix
odir_persistent
| Just loc <- maybe_location = ml_obj_file loc
| Just d <- odir = d </> persistent
| otherwise = persistent
phaseOutputFilename :: Phase{-next phase-} -> CompPipeline FilePath
phaseOutputFilename next_phase = do
PipeEnv{stop_phase, src_basename, output_spec} <- getPipeEnv
PipeState{maybe_loc, hsc_env} <- getPipeState
let dflags = hsc_dflags hsc_env
io $ getOutputFilename stop_phase output_spec
src_basename dflags next_phase maybe_loc
-- | pipeLoop runs phases until we reach the stop phase
pipeLoop :: Phase -> FilePath -> FileHooks -> CompPipeline FilePath
pipeLoop phase input_fn hooks = do
PipeEnv{stop_phase} <- getPipeEnv
PipeState{hsc_env} <- getPipeState
case () of
_ | phase `eqPhase` stop_phase -- All done
-> return input_fn
| not (phase `happensBefore` stop_phase)
-- Something has gone wrong. We'll try to cover all the cases when
-- this could happen, so if we reach here it is a panic.
-- eg. it might happen if the -C flag is used on a source file that
-- has {-# OPTIONS -fasm #-}.
-> panic ("pipeLoop: at phase " ++ show phase ++
" but I wanted to stop at phase " ++ show stop_phase)
| otherwise
-> do io $ debugTraceMsg (hsc_dflags hsc_env) 4
(ptext (sLit "Running phase") <+> ppr phase)
dflags <- getDynFlags
(next_phase, output_fn) <- runPhase phase input_fn dflags hooks
pipeLoop next_phase output_fn hooks
getPipeEnv :: CompPipeline PipeEnv
getPipeEnv = P $ \env state -> return (state, env)
getPipeState :: CompPipeline PipeState
getPipeState = P $ \_env state -> return (state, state)
instance HasDynFlags CompPipeline where
getDynFlags = P $ \_env state -> return (state, hsc_dflags (hsc_env state))
-- | Each phase in the pipeline returns the next phase to execute, and the
-- name of the file in which the output was placed.
--
-- We must do things dynamically this way, because we often don't know
-- what the rest of the phases will be until part-way through the
-- compilation: for example, an {-# OPTIONS -fasm #-} at the beginning
-- of a source file can change the latter stages of the pipeline from
-- taking the via-C route to using the native code generator.
--
runPhase :: Phase -- ^ Run this phase
-> FilePath -- ^ name of the input file
^ for convenience , we pass the current dflags in
-> FileHooks
-> CompPipeline (Phase, -- next phase to run
FilePath) -- output filename
-- Invariant: the output filename always contains the output
Interesting case : Hsc when there is no recompilation to do
-- Then the output filename is still a .o file
-------------------------------------------------------------------------------
phase
runPhase (Unlit sf) input_fn dflags hooks
= do
output_fn <- phaseOutputFilename (Cpp sf)
let unlit_flags = getOpts dflags opt_L
flags = map SysTools.Option unlit_flags ++
[ -- The -h option passes the file name for unlit to
put in a # line directive
SysTools.Option "-h"
, SysTools.Option $ escape $ normalise input_fn
, SysTools.FileOption "" input_fn
, SysTools.FileOption "" output_fn
]
io $ SysTools.runUnlit dflags flags
return (Cpp sf, output_fn)
where
-- escape the characters \, ", and ', but don't try to escape
Unicode or anything else ( so we do n't use Util.charToC
-- here). If we get this wrong, then in
-- Coverage.addTicksToBinds where we check that the filename in
a SrcLoc is the same as the source filenaame , the two will
-- look bogusly different. See test:
-- libraries/hpc/tests/function/subdir/tough2.lhs
escape ('\\':cs) = '\\':'\\': escape cs
escape ('\"':cs) = '\\':'\"': escape cs
escape ('\'':cs) = '\\':'\'': escape cs
escape (c:cs) = c : escape cs
escape [] = []
-------------------------------------------------------------------------------
phase : ( a ) gets OPTIONS out of file
( b ) runs if necessary
runPhase (Cpp sf) input_fn dflags0 hooks
= do
src_opts <- io $ getOptionsFromFile dflags0 input_fn
(dflags1, unhandled_flags, warns)
<- io $ parseDynamicFilePragma dflags0 src_opts
setDynFlags dflags1
io $ checkProcessArgsResult dflags1 unhandled_flags
if not (xopt Opt_Cpp dflags1) then do
-- we have to be careful to emit warnings only once.
unless (dopt Opt_Pp dflags1) $ io $ handleFlagWarnings dflags1 warns
no need to preprocess CPP , just pass input file along
-- to the next phase of the pipeline.
return (HsPp sf, input_fn)
else do
output_fn <- phaseOutputFilename (HsPp sf)
no CC opts
-- re-read the pragmas now that we've preprocessed the file
See # 2464,#3457
src_opts <- io $ getOptionsFromFile dflags0 output_fn
(dflags2, unhandled_flags, warns)
<- io $ parseDynamicFilePragma dflags0 src_opts
io $ checkProcessArgsResult dflags2 unhandled_flags
unless (dopt Opt_Pp dflags2) $ io $ handleFlagWarnings dflags2 warns
the HsPp pass below will emit warnings
setDynFlags dflags2
return (HsPp sf, output_fn)
-------------------------------------------------------------------------------
phase
runPhase (HsPp sf) input_fn dflags hooks
= do
if not (dopt Opt_Pp dflags) then
-- no need to preprocess, just pass input file along
-- to the next phase of the pipeline.
return (Hsc sf, input_fn)
else do
let hspp_opts = getOpts dflags opt_F
PipeEnv{src_basename, src_suffix} <- getPipeEnv
let orig_fn = src_basename <.> src_suffix
output_fn <- phaseOutputFilename (Hsc sf)
io $ SysTools.runPp dflags
( [ SysTools.Option orig_fn
, SysTools.Option input_fn
, SysTools.FileOption "" output_fn
] ++
map SysTools.Option hspp_opts
)
re - read pragmas now that we 've parsed the file ( see # 3674 )
src_opts <- io $ getOptionsFromFile dflags output_fn
(dflags1, unhandled_flags, warns)
<- io $ parseDynamicFilePragma dflags src_opts
setDynFlags dflags1
io $ checkProcessArgsResult dflags1 unhandled_flags
io $ handleFlagWarnings dflags1 warns
return (Hsc sf, output_fn)
-----------------------------------------------------------------------------
Hsc phase
-- Compilation of a single module, in "legacy" mode (_not_ under
-- the direction of the compilation manager).
runPhase (Hsc src_flavour) input_fn dflags0 hooks
normal Hsc mode , not mkdependHS
PipeEnv{ stop_phase=stop,
src_basename=basename,
src_suffix=suff } <- getPipeEnv
-- we add the current directory (i.e. the directory in which
-- the .hs files resides) to the include path, since this is
-- what gcc does, and it's probably what you want.
let current_dir = takeDirectory basename
paths = includePaths dflags0
dflags = dflags0 { includePaths = current_dir : paths }
setDynFlags dflags
-- gather the imports and module name
(hspp_buf,mod_name,imps,src_imps) <- io $
case src_flavour of
no explicit imports in ExtCore input .
m <- getCoreModuleName input_fn
return (Nothing, mkModuleName m, [], [])
_ -> do
buf <- hGetStringBuffer input_fn
(src_imps,imps,L _ mod_name) <- getImports dflags buf input_fn (basename <.> suff)
return (Just buf, mod_name, imps, src_imps)
Build a ModLocation to pass to hscMain .
-- The source filename is rather irrelevant by now, but it's used
-- by hscMain for messages. hscMain also needs
the .hi and .o filenames , and this is as good a way
-- as any to generate them, and better than most. (e.g. takes
-- into accout the -osuf flags)
location1 <- io $ mkHomeModLocation2 dflags mod_name basename suff
-- Boot-ify it if necessary
let location2 | isHsBoot src_flavour = addBootSuffixLocn location1
| otherwise = location1
-- Take -ohi into account if present
-- This can't be done in mkHomeModuleLocation because
-- it only applies to the module being compiles
let ohi = outputHi dflags
location3 | Just fn <- ohi = location2{ ml_hi_file = fn }
| otherwise = location2
-- Take -o into account if present
-- Very like -ohi, but we must *only* do this if we aren't linking
-- (If we're linking then the -o applies to the linked thing, not to
the object file for one module . )
-- Note the nasty duplication with the same computation in compileFile above
let expl_o_file = outputFile dflags
location4 | Just ofile <- expl_o_file
, isNoLink (ghcLink dflags)
= location3 { ml_obj_file = ofile }
| otherwise = location3
o_file = ml_obj_file location4 -- The real object file
setModLocation location4
-- Figure out if the source has changed, for recompilation avoidance.
--
-- Setting source_unchanged to True means that M.o seems
to be up to date wrt M.hs ; so no need to recompile unless imports have
-- changed (which the compiler itself figures out).
-- Setting source_unchanged to False tells the compiler that M.o is out of
date wrt M.hs ( or M.o does n't exist ) so we must recompile regardless .
src_timestamp <- io $ getModificationUTCTime (basename <.> suff)
let hsc_lang = hscTarget dflags
source_unchanged <- io $
if not (isStopLn stop)
SourceModified unconditionally if
-- (a) recompilation checker is off, or
-- (b) we aren't going all the way to .o file (e.g. ghc -S)
then return SourceModified
-- Otherwise look at file modification dates
else do o_file_exists <- doesFileExist o_file
if not o_file_exists
then return SourceModified -- Need to recompile
else do t2 <- getModificationUTCTime o_file
if t2 > src_timestamp
then return SourceUnmodified
else return SourceModified
get the DynFlags
let next_phase =
hookPostBackendPhase hooks hscPostBackendPhase
dflags src_flavour hsc_lang
output_fn <- phaseOutputFilename next_phase
let dflags' = dflags { hscTarget = hsc_lang,
hscOutName = output_fn,
extCoreName = basename ++ ".hcr" }
setDynFlags dflags'
PipeState{hsc_env=hsc_env'} <- getPipeState
-- Tell the finder cache about this module
mod <- io $ addHomeModuleToFinder hsc_env' mod_name location4
Make the ModSummary to hand to hscMain
let
mod_summary = ModSummary { ms_mod = mod,
ms_hsc_src = src_flavour,
ms_hspp_file = input_fn,
ms_hspp_opts = dflags,
ms_hspp_buf = hspp_buf,
ms_location = location4,
ms_hs_date = src_timestamp,
ms_obj_date = Nothing,
ms_textual_imps = imps,
ms_srcimps = src_imps }
-- run the compiler!
result <- io $ hscCompileOneShot hooks hsc_env'
mod_summary source_unchanged
Nothing -- No iface
Nothing -- No "module i of n" progress info
case result of
HscNoRecomp
-> do io $ touchObjectFile dflags' o_file
-- The .o file must have a later modification date
than the source file ( else we would n't be in HscNoRecomp )
-- but we touch it anyway, to keep 'make' happy (we think).
return (StopLn, o_file)
(HscRecomp hasStub _)
-> do case hasStub of
Nothing -> return ()
Just stub_c ->
do stub_o <- io $ compileStub hsc_env' hooks stub_c
setStubO stub_o
-- In the case of hs-boot files, generate a dummy .o-boot
-- stamp file for the benefit of Make
when (isHsBoot src_flavour) $
io $ touchObjectFile dflags' o_file
return (next_phase, output_fn)
-----------------------------------------------------------------------------
Cmm phase
runPhase CmmCpp input_fn dflags hooks
= do
output_fn <- phaseOutputFilename Cmm
include CC opts
input_fn output_fn
return (Cmm, output_fn)
runPhase Cmm input_fn dflags hooks
= do
PipeEnv{src_basename} <- getPipeEnv
let hsc_lang = hscTarget dflags
let next_phase =
hookPostBackendPhase hooks hscPostBackendPhase
dflags HsSrcFile hsc_lang
output_fn <- phaseOutputFilename next_phase
let dflags' = dflags { hscTarget = hsc_lang,
hscOutName = output_fn,
extCoreName = src_basename ++ ".hcr" }
setDynFlags dflags'
PipeState{hsc_env} <- getPipeState
io $ hscCompileCmmFile hsc_env input_fn
return (next_phase, output_fn)
-----------------------------------------------------------------------------
-- Cc phase
-- we don't support preprocessing .c files (with -E) now. Doing so introduces
-- way too many hacks, and I can't say I've ever used it anyway.
runPhase cc_phase input_fn dflags hooks
| any (cc_phase `eqPhase`) [Cc, Ccpp, HCc, Cobjc, Cobjcpp]
= do
let platform = targetPlatform dflags
cc_opts = getOpts dflags opt_c
hcc = cc_phase `eqPhase` HCc
let cmdline_include_paths = includePaths dflags
HC files have the dependent packages stamped into them
pkgs <- if hcc then io $ getHCFilePackages input_fn else return []
-- add package include paths even if we're just compiling .c
files ; this is the Value Add(TM ) that using ghc instead of
-- gcc gives you :)
pkg_include_dirs <- io $ getPackageIncludePath dflags pkgs
let include_paths = foldr (\ x xs -> "-I" : x : xs) []
(cmdline_include_paths ++ pkg_include_dirs)
let gcc_extra_viac_flags = extraGccViaCFlags dflags
let pic_c_flags = picCCOpts dflags
let verbFlags = getVerbFlags dflags
-- cc-options are not passed when compiling .hc files. Our
-- hc code doesn't not #include any header files anyway, so these
-- options aren't necessary.
pkg_extra_cc_opts <- io $
if cc_phase `eqPhase` HCc
then return []
else getPackageExtraCcOpts dflags pkgs
framework_paths <-
case platformOS platform of
OSDarwin ->
do pkgFrameworkPaths <- io $ getPackageFrameworkPath dflags pkgs
let cmdlineFrameworkPaths = frameworkPaths dflags
return $ map ("-F"++)
(cmdlineFrameworkPaths ++ pkgFrameworkPaths)
_ ->
return []
let split_objs = dopt Opt_SplitObjs dflags
split_opt | hcc && split_objs = [ "-DUSE_SPLIT_MARKERS" ]
| otherwise = [ ]
let cc_opt | optLevel dflags >= 2 = "-O2"
| otherwise = "-O"
-- Decide next phase
let next_phase = As
output_fn <- phaseOutputFilename next_phase
let
more_hcc_opts =
-- on x86 the floating point regs have greater precision
-- than a double, which leads to unpredictable results.
-- By default, we turn this off with -ffloat-store unless
-- the user specified -fexcess-precision.
(if platformArch platform == ArchX86 &&
not (dopt Opt_ExcessPrecision dflags)
then [ "-ffloat-store" ]
else []) ++
gcc 's -fstrict - aliasing allows two accesses to memory
-- to be considered non-aliasing if they have different types.
-- This interacts badly with the C code we generate, which is
-- very weakly typed, being derived from C--.
["-fno-strict-aliasing"]
let gcc_lang_opt | cc_phase `eqPhase` Ccpp = "c++"
| cc_phase `eqPhase` Cobjc = "objective-c"
| cc_phase `eqPhase` Cobjcpp = "objective-c++"
| otherwise = "c"
io $ SysTools.runCc dflags (
-- force the C compiler to interpret this file as C when
-- compiling .hc files, by adding the -x c option.
Also useful for plain .c files , just in case GHC saw a
-- -x c option.
[ SysTools.Option "-x", SysTools.Option gcc_lang_opt
, SysTools.FileOption "" input_fn
, SysTools.Option "-o"
, SysTools.FileOption "" output_fn
]
++ map SysTools.Option (
pic_c_flags
-- Stub files generated for foreign exports references the runIO_closure
-- and runNonIO_closure symbols, which are defined in the base package.
These symbols are imported into the stub.c file via RtsAPI.h , and the
-- way we do the import depends on whether we're currently compiling
-- the base package or not.
++ (if platformOS platform == OSMinGW32 &&
thisPackage dflags == basePackageId
then [ "-DCOMPILING_BASE_PACKAGE" ]
else [])
We only support SparcV9 and better because V8 lacks an atomic CAS
-- instruction. Note that the user can still override this
( e.g. , -mcpu = ultrasparc ) as GCC picks the " best " -mcpu flag
-- regardless of the ordering.
--
-- This is a temporary hack.
++ (if platformArch platform == ArchSPARC
then ["-mcpu=v9"]
else [])
GCC 4.6 + does n't like -Wimplicit when compiling C++ .
++ (if (cc_phase /= Ccpp && cc_phase /= Cobjcpp)
then ["-Wimplicit"]
else [])
++ (if hcc
then gcc_extra_viac_flags ++ more_hcc_opts
else [])
++ verbFlags
++ [ "-S", cc_opt ]
++ [ "-D__GLASGOW_HASKELL__="++cProjectVersionInt ]
++ framework_paths
++ cc_opts
++ split_opt
++ include_paths
++ pkg_extra_cc_opts
))
return (next_phase, output_fn)
-----------------------------------------------------------------------------
-- Splitting phase
runPhase Splitter input_fn dflags hooks
= do -- tmp_pfx is the prefix used for the split .s files
split_s_prefix <- io $ SysTools.newTempName dflags "split"
let n_files_fn = split_s_prefix
io $ SysTools.runSplit dflags
[ SysTools.FileOption "" input_fn
, SysTools.FileOption "" split_s_prefix
, SysTools.FileOption "" n_files_fn
]
-- Save the number of split files for future references
s <- io $ readFile n_files_fn
let n_files = read s :: Int
dflags' = dflags { splitInfo = Just (split_s_prefix, n_files) }
setDynFlags dflags'
-- Remember to delete all these files
io $ addFilesToClean dflags' [ split_s_prefix ++ "__" ++ show n ++ ".s"
| n <- [1..n_files]]
return (SplitAs,
we do n't use the filename in SplitAs
-----------------------------------------------------------------------------
-- As, SpitAs phase : Assembler
-- This is for calling the assembler on a regular assembly file (not split).
runPhase As input_fn dflags hooks
= do
LLVM from version 3.0 onwards does n't support the OS X system
assembler , so we use clang as the assembler instead . ( # 5636 )
let whichAsProg | hscTarget dflags == HscLlvm &&
platformOS (targetPlatform dflags) == OSDarwin
= do
llvmVer <- io $ figureLlvmVersion dflags
return $ case llvmVer of
-- using cGccLinkerOpts here but not clear if
-- opt_c isn't a better choice
Just n | n >= 30 ->
(SysTools.runClang, cGccLinkerOpts)
_ -> (SysTools.runAs, getOpts dflags opt_a)
| otherwise
= return (SysTools.runAs, getOpts dflags opt_a)
(as_prog, as_opts) <- whichAsProg
let cmdline_include_paths = includePaths dflags
next_phase <- maybeMergeStub
output_fn <- phaseOutputFilename next_phase
-- we create directories for the object file, because it
-- might be a hierarchical module.
io $ createDirectoryIfMissing True (takeDirectory output_fn)
io $ as_prog dflags
(map SysTools.Option as_opts
++ [ SysTools.Option ("-I" ++ p) | p <- cmdline_include_paths ]
We only support SparcV9 and better because V8 lacks an atomic CAS
-- instruction so we have to make sure that the assembler accepts the
-- instruction set. Note that the user can still override this
( e.g. , -mcpu = ultrasparc ) . GCC picks the " best " -mcpu flag
-- regardless of the ordering.
--
-- This is a temporary hack.
++ (if platformArch (targetPlatform dflags) == ArchSPARC
then [SysTools.Option "-mcpu=v9"]
else [])
++ [ SysTools.Option "-c"
, SysTools.FileOption "" input_fn
, SysTools.Option "-o"
, SysTools.FileOption "" output_fn
])
return (next_phase, output_fn)
-- This is for calling the assembler on a split assembly file (so a collection
-- of assembly files)
runPhase SplitAs _input_fn dflags hooks
= do
we 'll handle the stub_o file in this phase , so do n't MergeStub ,
-- just jump straight to StopLn afterwards.
let next_phase = StopLn
output_fn <- phaseOutputFilename next_phase
let base_o = dropExtension output_fn
osuf = objectSuf dflags
split_odir = base_o ++ "_" ++ osuf ++ "_split"
io $ createDirectoryIfMissing True split_odir
-- remove M_split/ *.o, because we're going to archive M_split/ *.o
-- later and we don't want to pick up any old objects.
fs <- io $ getDirectoryContents split_odir
io $ mapM_ removeFile $
map (split_odir </>) $ filter (osuf `isSuffixOf`) fs
let as_opts = getOpts dflags opt_a
let (split_s_prefix, n) = case splitInfo dflags of
Nothing -> panic "No split info"
Just x -> x
let split_s n = split_s_prefix ++ "__" ++ show n <.> "s"
split_obj :: Int -> FilePath
split_obj n = split_odir </>
takeFileName base_o ++ "__" ++ show n <.> osuf
let assemble_file n
= SysTools.runAs dflags
(map SysTools.Option as_opts ++
We only support SparcV9 and better because V8 lacks an atomic CAS
-- instruction so we have to make sure that the assembler accepts the
-- instruction set. Note that the user can still override this
( e.g. , -mcpu = ultrasparc ) . GCC picks the " best " -mcpu flag
-- regardless of the ordering.
--
-- This is a temporary hack.
(if platformArch (targetPlatform dflags) == ArchSPARC
then [SysTools.Option "-mcpu=v9"]
else []) ++
[ SysTools.Option "-c"
, SysTools.Option "-o"
, SysTools.FileOption "" (split_obj n)
, SysTools.FileOption "" (split_s n)
])
io $ mapM_ assemble_file [1..n]
-- Note [pipeline-split-init]
-- If we have a stub file, it may contain constructor
-- functions for initialisation of this module. We can't
-- simply leave the stub as a separate object file, because it
-- will never be linked in: nothing refers to it. We need to
-- ensure that if we ever refer to the data in this module
-- that needs initialisation, then we also pull in the
-- initialisation routine.
--
-- To that end, we make a DANGEROUS ASSUMPTION here: the data
that needs to be initialised is all in the FIRST split
object . See Note [ codegen - split - init ] .
PipeState{maybe_stub_o} <- getPipeState
case maybe_stub_o of
Nothing -> return ()
Just stub_o -> io $ do
tmp_split_1 <- newTempName dflags osuf
let split_1 = split_obj 1
copyFile split_1 tmp_split_1
removeFile split_1
joinObjectFiles dflags [tmp_split_1, stub_o] split_1
-- join them into a single .o file
io $ joinObjectFiles dflags (map split_obj [1..n]) output_fn
return (next_phase, output_fn)
-----------------------------------------------------------------------------
-- LlvmOpt phase
runPhase LlvmOpt input_fn dflags hooks
= do
ver <- io $ readIORef (llvmVersion dflags)
let lo_opts = getOpts dflags opt_lo
opt_lvl = max 0 (min 2 $ optLevel dflags)
-- don't specify anything if user has specified commands. We do this
for opt but not llc since opt is very specifically for optimisation
-- passes only, so if the user is passing us extra options we assume
-- they know what they are doing and don't get in the way.
optFlag = if null lo_opts
then [SysTools.Option (llvmOpts !! opt_lvl)]
else []
no tbaa in 2.8 and earlier
| dopt Opt_LlvmTBAA dflags = "--enable-tbaa=true"
| otherwise = "--enable-tbaa=false"
output_fn <- phaseOutputFilename LlvmLlc
io $ SysTools.runLlvmOpt dflags
([ SysTools.FileOption "" input_fn,
SysTools.Option "-o",
SysTools.FileOption "" output_fn]
++ optFlag
++ [SysTools.Option tbaa]
++ map SysTools.Option lo_opts)
return (LlvmLlc, output_fn)
where
-- we always (unless -optlo specified) run Opt since we rely on it to
-- fix up some pretty big deficiencies in the code we generate
llvmOpts = ["-mem2reg", "-O1", "-O2"]
-----------------------------------------------------------------------------
LlvmLlc phase
runPhase LlvmLlc input_fn dflags hooks
= do
ver <- io $ readIORef (llvmVersion dflags)
let lc_opts = getOpts dflags opt_lc
opt_lvl = max 0 (min 2 $ optLevel dflags)
rmodel | opt_PIC = "pic"
| not opt_Static = "dynamic-no-pic"
| otherwise = "static"
no tbaa in 2.8 and earlier
| dopt Opt_LlvmTBAA dflags = "--enable-tbaa=true"
| otherwise = "--enable-tbaa=false"
-- hidden debugging flag '-dno-llvm-mangler' to skip mangling
let next_phase = case dopt Opt_NoLlvmMangler dflags of
False -> LlvmMangle
True | dopt Opt_SplitObjs dflags -> Splitter
True -> As
output_fn <- phaseOutputFilename next_phase
io $ SysTools.runLlvmLlc dflags
([ SysTools.Option (llvmOpts !! opt_lvl),
SysTools.Option $ "-relocation-model=" ++ rmodel,
SysTools.FileOption "" input_fn,
SysTools.Option "-o", SysTools.FileOption "" output_fn]
++ map SysTools.Option lc_opts
++ [SysTools.Option tbaa]
++ map SysTools.Option fpOpts
++ map SysTools.Option abiOpts)
return (next_phase, output_fn)
where
Bug in at O3 on OSX .
llvmOpts = if platformOS (targetPlatform dflags) == OSDarwin
then ["-O1", "-O2", "-O2"]
else ["-O1", "-O2", "-O3"]
On ARMv7 using , fails to allocate floating point registers
while compiling GHC source code . It 's probably due to fact that it
does not enable VFP by default . Let 's do this manually here
fpOpts = case platformArch (targetPlatform dflags) of
ArchARM ARMv7 ext _ -> if (elem VFPv3 ext)
then ["-mattr=+v7,+vfp3"]
else if (elem VFPv3D16 ext)
then ["-mattr=+v7,+vfp3,+d16"]
else []
_ -> []
On Ubuntu / Debian with ARM hard float ABI , LLVM 's llc still
compiles into soft - float ABI . We need to explicitly set abi
-- to hard
abiOpts = case platformArch (targetPlatform dflags) of
ArchARM ARMv7 _ HARD -> ["-float-abi=hard"]
ArchARM ARMv7 _ _ -> []
_ -> []
-----------------------------------------------------------------------------
LlvmMangle phase
runPhase LlvmMangle input_fn dflags hooks
= do
let next_phase = if dopt Opt_SplitObjs dflags then Splitter else As
output_fn <- phaseOutputFilename next_phase
io $ llvmFixupAsm dflags input_fn output_fn
return (next_phase, output_fn)
-----------------------------------------------------------------------------
-- merge in stub objects
runPhase MergeStub input_fn dflags hooks
= do
PipeState{maybe_stub_o} <- getPipeState
output_fn <- phaseOutputFilename StopLn
case maybe_stub_o of
Nothing ->
panic "runPhase(MergeStub): no stub"
Just stub_o -> do
io $ joinObjectFiles dflags [input_fn, stub_o] output_fn
return (StopLn, output_fn)
-- warning suppression
runPhase other _input_fn _dflags _hooks =
panic ("runPhase: don't know how to run phase " ++ show other)
------------------------------------------------------------------------
maybeMergeStub :: CompPipeline Phase
maybeMergeStub = do
PipeState{maybe_stub_o} <- getPipeState
if isJust maybe_stub_o then return MergeStub else return StopLn
------------------------------------------------------------------------
stub .h and .c files ( for foreign export support )
-- The _stub.c file is derived from the haskell source file, possibly taking
into account the -stubdir option .
--
-- The object file created by compiling the _stub.c file is put into a
-- temporary file, which will be later combined with the main .o file
( see the MergeStubs phase ) .
compileStub :: HscEnv -> FileHooks -> FilePath -> IO FilePath
compileStub hsc_env hooks stub_c = do
(_, stub_o) <- runPipeline StopLn hsc_env (stub_c,Nothing) Nothing
no ModLocation
return stub_o
------------------------------------------------------------------------
-- Look for the /* GHC_PACKAGES ... */ comment at the top of a .hc file
getHCFilePackages :: FilePath -> IO [PackageId]
getHCFilePackages filename =
Exception.bracket (openFile filename ReadMode) hClose $ \h -> do
l <- hGetLine h
case l of
'/':'*':' ':'G':'H':'C':'_':'P':'A':'C':'K':'A':'G':'E':'S':rest ->
return (map stringToPackageId (words rest))
_other ->
return []
------------------------------------------------------------------------
doCpp :: DynFlags -> Bool -> Bool -> FilePath -> FilePath -> IO ()
doCpp dflags raw include_cc_opts input_fn output_fn = do
let hscpp_opts = getOpts dflags opt_P
let cmdline_include_paths = includePaths dflags
pkg_include_dirs <- getPackageIncludePath dflags []
let include_paths = foldr (\ x xs -> "-I" : x : xs) []
(cmdline_include_paths ++ pkg_include_dirs)
let verbFlags = getVerbFlags dflags
let cc_opts
| include_cc_opts = getOpts dflags opt_c
| otherwise = []
let cpp_prog args | raw = SysTools.runCpp dflags args
| otherwise = SysTools.runCc dflags (SysTools.Option "-E" : args)
let target_defs =
[ "-D" ++ HOST_OS ++ "_BUILD_OS=1",
"-D" ++ HOST_ARCH ++ "_BUILD_ARCH=1",
"-D" ++ TARGET_OS ++ "_HOST_OS=1",
"-D" ++ TARGET_ARCH ++ "_HOST_ARCH=1" ]
remember , in code we * compile * , the is the same our TARGET ,
-- and BUILD is the same as our HOST.
cpp_prog ( map SysTools.Option verbFlags
++ map SysTools.Option include_paths
++ map SysTools.Option hsSourceCppOpts
++ map SysTools.Option target_defs
++ map SysTools.Option hscpp_opts
++ map SysTools.Option cc_opts
++ [ SysTools.Option "-x"
, SysTools.Option "c"
, SysTools.Option input_fn
We hackily use Option instead of FileOption here , so that the file
name is not back - slashed on Windows . cpp is capable of
-- dealing with / in filenames, so it works fine. Furthermore
-- if we put in backslashes, cpp outputs #line directives
-- with *double* backslashes. And that in turn means that
-- our error messages get double backslashes in them.
-- In due course we should arrange that the lexer deals
-- with these \\ escapes properly.
, SysTools.Option "-o"
, SysTools.FileOption "" output_fn
])
hsSourceCppOpts :: [String]
Default CPP defines in Haskell source
hsSourceCppOpts =
[ "-D__GLASGOW_HASKELL__="++cProjectVersionInt ]
------------------------------------------------------------------------
| What phase to run after one of the backend code generators has run
hscPostBackendPhase :: DynFlags -> HscSource -> HscTarget -> Phase
hscPostBackendPhase _ HsBootFile _ = StopLn
hscPostBackendPhase dflags _ hsc_lang =
case hsc_lang of
HscC -> HCc
HscAsm | dopt Opt_SplitObjs dflags -> Splitter
| otherwise -> As
HscLlvm -> LlvmOpt
HscNothing -> StopLn
HscInterpreted -> StopLn
touchObjectFile :: DynFlags -> FilePath -> IO ()
touchObjectFile dflags path = do
createDirectoryIfMissing True $ takeDirectory path
SysTools.touch dflags "Touching object file" path
haveRtsOptsFlags :: DynFlags -> Bool
haveRtsOptsFlags dflags =
isJust (rtsOpts dflags) || case rtsOptsEnabled dflags of
RtsOptsSafeOnly -> False
_ -> True
-- ---------------------------------------------------------------------------
-- join object files into a single relocatable object file, using ld -r
joinObjectFiles :: DynFlags -> [FilePath] -> FilePath -> IO ()
joinObjectFiles dflags o_files output_fn = do
let ldIsGnuLd = cLdIsGNULd == "YES"
ld_r args = SysTools.runLink dflags ([
SysTools.Option "-nostdlib",
SysTools.Option "-nodefaultlibs",
SysTools.Option "-Wl,-r"
]
gcc on sparc sets -Wl,--relax implicitly , but
-- -r and --relax are incompatible for ld, so
-- disable --relax explicitly.
++ (if platformArch (targetPlatform dflags) == ArchSPARC
&& ldIsGnuLd
then [SysTools.Option "-Wl,-no-relax"]
else [])
++ [
SysTools.Option ld_build_id,
SysTools . Option ld_x_flag ,
SysTools.Option "-o",
SysTools.FileOption "" output_fn ]
++ args)
-- Do *not* add the -x flag to ld, because we want to keep those
-- local symbols around for the benefit of external tools. e.g.
-- the 'perf report' output is much less useful if all the local
-- symbols have been stripped out.
--
ld_x_flag | null = " "
-- | otherwise = "-Wl,-x"
-- suppress the generation of the .note.gnu.build-id section,
-- which we don't need and sometimes causes ld to emit a
-- warning:
ld_build_id | cLdHasBuildId == "YES" = "-Wl,--build-id=none"
| otherwise = ""
if ldIsGnuLd
then do
script <- newTempName dflags "ldscript"
writeFile script $ "INPUT(" ++ unwords o_files ++ ")"
ld_r [SysTools.FileOption "" script]
else do
ld_r (map (SysTools.FileOption "") o_files)
| null |
https://raw.githubusercontent.com/nominolo/lambdachine/49d97cf7a367a650ab421f7aa19feb90bfe14731/compiler/Ghc/Api/V76.hs
|
haskell
|
import HscMain
Set by -c or -no-link
When linking, the -o argument refers to the linker's output.
otherwise, we use it as the name for the pipeline's output.
-o foo applies to linker
-o foo applies to the file we are compiling now
^ Output should be to a temporary file: we're going to
run more compilation steps on this output later.
^ We want a persistent file, i.e. a file in the current directory
derived from the input filename, but with the appropriate extension.
^ The output must go into the specified file.
^ When to stop
^ Compilation environment
^ Input filename (and maybe -x suffix)
^ original basename (if different from input name)
^ Output filename
^ (final flags, output filename)
strip off the .
Decide where dump files should go based on the pipeline output
If we were given a -x flag, then use that phase to start from
We want to catch cases of "you can't get there from here" before
we start the pipeline, because otherwise it will just run off the
end.
There is a partial ordering on phases, where A < B iff A occurs
before B in a normal compilation pipeline.
this is a function which will be used to calculate output file names
as we go along (we partially apply it to some of its inputs here)
Execute the pipeline...
Sometimes, a compilation phase doesn't actually generate any output
stage, but we wanted to keep the output, then we have to explicitly
copy the file, remembering to prepend a {-# LINE #-} pragma so that
further compilation stages can tell what the original filename was.
-----------------------------------------------------------------------------
The pipeline uses a monad to carry around various bits of information
^ Stop just before this phase
^ basename of original input source
^ its extension
^ says where to put the pipeline output
at various points, for example when we read the OPTIONS_GHC
object was created. The stub object will be joined with
the main compilation object using "ld -r" at the end.
----------------------------------------------------------------------
In each phase, we need to know into what filename to generate the
output. All the logic about which filenames we generate output
into is embodied in the following function.
next phase
sometimes, we keep output from intermediate stages
persistent object files get put in odir
next phase
| pipeLoop runs phases until we reach the stop phase
All done
Something has gone wrong. We'll try to cover all the cases when
this could happen, so if we reach here it is a panic.
eg. it might happen if the -C flag is used on a source file that
has {-# OPTIONS -fasm #-}.
| Each phase in the pipeline returns the next phase to execute, and the
name of the file in which the output was placed.
We must do things dynamically this way, because we often don't know
what the rest of the phases will be until part-way through the
compilation: for example, an {-# OPTIONS -fasm #-} at the beginning
of a source file can change the latter stages of the pipeline from
taking the via-C route to using the native code generator.
^ Run this phase
^ name of the input file
next phase to run
output filename
Invariant: the output filename always contains the output
Then the output filename is still a .o file
-----------------------------------------------------------------------------
The -h option passes the file name for unlit to
escape the characters \, ", and ', but don't try to escape
here). If we get this wrong, then in
Coverage.addTicksToBinds where we check that the filename in
look bogusly different. See test:
libraries/hpc/tests/function/subdir/tough2.lhs
-----------------------------------------------------------------------------
we have to be careful to emit warnings only once.
to the next phase of the pipeline.
re-read the pragmas now that we've preprocessed the file
-----------------------------------------------------------------------------
no need to preprocess, just pass input file along
to the next phase of the pipeline.
---------------------------------------------------------------------------
Compilation of a single module, in "legacy" mode (_not_ under
the direction of the compilation manager).
we add the current directory (i.e. the directory in which
the .hs files resides) to the include path, since this is
what gcc does, and it's probably what you want.
gather the imports and module name
The source filename is rather irrelevant by now, but it's used
by hscMain for messages. hscMain also needs
as any to generate them, and better than most. (e.g. takes
into accout the -osuf flags)
Boot-ify it if necessary
Take -ohi into account if present
This can't be done in mkHomeModuleLocation because
it only applies to the module being compiles
Take -o into account if present
Very like -ohi, but we must *only* do this if we aren't linking
(If we're linking then the -o applies to the linked thing, not to
Note the nasty duplication with the same computation in compileFile above
The real object file
Figure out if the source has changed, for recompilation avoidance.
Setting source_unchanged to True means that M.o seems
changed (which the compiler itself figures out).
Setting source_unchanged to False tells the compiler that M.o is out of
(a) recompilation checker is off, or
(b) we aren't going all the way to .o file (e.g. ghc -S)
Otherwise look at file modification dates
Need to recompile
Tell the finder cache about this module
run the compiler!
No iface
No "module i of n" progress info
The .o file must have a later modification date
but we touch it anyway, to keep 'make' happy (we think).
In the case of hs-boot files, generate a dummy .o-boot
stamp file for the benefit of Make
---------------------------------------------------------------------------
---------------------------------------------------------------------------
Cc phase
we don't support preprocessing .c files (with -E) now. Doing so introduces
way too many hacks, and I can't say I've ever used it anyway.
add package include paths even if we're just compiling .c
gcc gives you :)
cc-options are not passed when compiling .hc files. Our
hc code doesn't not #include any header files anyway, so these
options aren't necessary.
Decide next phase
on x86 the floating point regs have greater precision
than a double, which leads to unpredictable results.
By default, we turn this off with -ffloat-store unless
the user specified -fexcess-precision.
to be considered non-aliasing if they have different types.
This interacts badly with the C code we generate, which is
very weakly typed, being derived from C--.
force the C compiler to interpret this file as C when
compiling .hc files, by adding the -x c option.
-x c option.
Stub files generated for foreign exports references the runIO_closure
and runNonIO_closure symbols, which are defined in the base package.
way we do the import depends on whether we're currently compiling
the base package or not.
instruction. Note that the user can still override this
regardless of the ordering.
This is a temporary hack.
---------------------------------------------------------------------------
Splitting phase
tmp_pfx is the prefix used for the split .s files
Save the number of split files for future references
Remember to delete all these files
---------------------------------------------------------------------------
As, SpitAs phase : Assembler
This is for calling the assembler on a regular assembly file (not split).
using cGccLinkerOpts here but not clear if
opt_c isn't a better choice
we create directories for the object file, because it
might be a hierarchical module.
instruction so we have to make sure that the assembler accepts the
instruction set. Note that the user can still override this
regardless of the ordering.
This is a temporary hack.
This is for calling the assembler on a split assembly file (so a collection
of assembly files)
just jump straight to StopLn afterwards.
remove M_split/ *.o, because we're going to archive M_split/ *.o
later and we don't want to pick up any old objects.
instruction so we have to make sure that the assembler accepts the
instruction set. Note that the user can still override this
regardless of the ordering.
This is a temporary hack.
Note [pipeline-split-init]
If we have a stub file, it may contain constructor
functions for initialisation of this module. We can't
simply leave the stub as a separate object file, because it
will never be linked in: nothing refers to it. We need to
ensure that if we ever refer to the data in this module
that needs initialisation, then we also pull in the
initialisation routine.
To that end, we make a DANGEROUS ASSUMPTION here: the data
join them into a single .o file
---------------------------------------------------------------------------
LlvmOpt phase
don't specify anything if user has specified commands. We do this
passes only, so if the user is passing us extra options we assume
they know what they are doing and don't get in the way.
we always (unless -optlo specified) run Opt since we rely on it to
fix up some pretty big deficiencies in the code we generate
---------------------------------------------------------------------------
hidden debugging flag '-dno-llvm-mangler' to skip mangling
to hard
---------------------------------------------------------------------------
---------------------------------------------------------------------------
merge in stub objects
warning suppression
----------------------------------------------------------------------
----------------------------------------------------------------------
The _stub.c file is derived from the haskell source file, possibly taking
The object file created by compiling the _stub.c file is put into a
temporary file, which will be later combined with the main .o file
----------------------------------------------------------------------
Look for the /* GHC_PACKAGES ... */ comment at the top of a .hc file
----------------------------------------------------------------------
and BUILD is the same as our HOST.
dealing with / in filenames, so it works fine. Furthermore
if we put in backslashes, cpp outputs #line directives
with *double* backslashes. And that in turn means that
our error messages get double backslashes in them.
In due course we should arrange that the lexer deals
with these \\ escapes properly.
----------------------------------------------------------------------
---------------------------------------------------------------------------
join object files into a single relocatable object file, using ld -r
relax implicitly , but
-r and --relax are incompatible for ld, so
disable --relax explicitly.
Do *not* add the -x flag to ld, because we want to keep those
local symbols around for the benefit of external tools. e.g.
the 'perf report' output is much less useful if all the local
symbols have been stripped out.
| otherwise = "-Wl,-x"
suppress the generation of the .note.gnu.build-id section,
which we don't need and sometimes causes ld to emit a
warning:
|
# LANGUAGE CPP #
# LANGUAGE NamedFieldPuns #
# LANGUAGE PatternGuards #
module Ghc.Api.V76
( module Ghc.Api.V76
, Phase(..)
, ModDetails
)
where
import HscTypes
import DriverPhases
import Ghc.Api.V76Hsc
import ErrUtils
import Panic
import DynFlags
import Module
import SysTools
import Outputable
import FastString
import LlvmCodeGen ( llvmFixupAsm )
import HeaderInfo
import Platform
import ParserCoreUtils ( getCoreModuleName )
import StringBuffer ( hGetStringBuffer )
import SrcLoc
import Finder
import Util
import Packages
import Exception
import Config
import StaticFlags
import System.IO
import System.Directory
import Control.Monad ( when, unless )
import Data.Maybe
import System.FilePath
import Data.IORef
import Data.List
compileOneShot :: HscEnv -> FileHooks
-> Phase -> [(FilePath, Maybe Phase)] -> IO ()
compileOneShot hsc_env hooks stop_phase srcs = do
outputFiles <- mapM (compileFile hsc_env hooks stop_phase) srcs
return ()
#include "ghcplatform.h"
compileFile :: HscEnv -> FileHooks -> Phase -> (FilePath, Maybe Phase)
-> IO FilePath
compileFile hsc_env hooks stop_phase (src, mb_phase) = do
exists <- doesFileExist src
when (not exists) $
ghcError (CmdLineError ("does not exist: " ++ src))
let
dflags = hsc_dflags hsc_env
split = dopt Opt_SplitObjs dflags
mb_o_file = outputFile dflags
output
| StopLn <- stop_phase, not (isNoLink ghc_link) = Persistent
| Just o_file <- mb_o_file = SpecificFile o_file
| otherwise = Persistent
stop_phase' = case stop_phase of
As | split -> SplitAs
_ -> stop_phase
( _, out_file) <- runPipeline stop_phase' hsc_env
(src, mb_phase) Nothing output
no ModLocation
return out_file
data PipelineOutput
= Temporary
| Persistent
eg . in " ghc -c Foo.hs " the output goes into ./Foo.o .
| SpecificFile FilePath
^ A ModLocation , if this is a Haskell module
^ stub object , if we have one
-> FileHooks
runPipeline stop_phase hsc_env0 (input_fn, mb_phase)
mb_basename output maybe_loc maybe_stub_o hooks = do
let dflags0 = hsc_dflags hsc_env0
(input_basename, suffix) = splitExtension input_fn
basename | Just b <- mb_basename = b
| otherwise = input_basename
dflags = dflags0 { dumpPrefix = Just (basename ++ ".") }
hsc_env = hsc_env0 {hsc_dflags = dflags}
start_phase = fromMaybe (startPhase suffix') mb_phase
when (not (start_phase `happensBefore` stop_phase)) $
ghcError (UsageError
("cannot compile this file to desired target: "
++ input_fn))
let get_output_fn = getOutputFilename stop_phase output basename
let env = PipeEnv{ stop_phase,
src_basename = basename,
src_suffix = suffix',
output_spec = output }
state = PipeState{ hsc_env, maybe_loc, maybe_stub_o = maybe_stub_o }
(state', output_fn) <- unP (pipeLoop start_phase input_fn hooks) env state
let PipeState{ hsc_env=hsc_env', maybe_loc } = state'
dflags' = hsc_dflags hsc_env'
( eg . the CPP phase when -fcpp is not turned on ) . If we end on this
case output of
Temporary ->
return (dflags', output_fn)
_other ->
do final_fn <- get_output_fn dflags' stop_phase maybe_loc
when (final_fn /= output_fn) $ do
let msg = ("Copying `" ++ output_fn ++"' to `" ++ final_fn ++ "'")
line_prag = Just ("{-# LINE 1 \"" ++ input_fn ++ "\" #-}\n")
copyWithHeader dflags msg line_prag output_fn final_fn
return (dflags', final_fn)
PipeEnv : invariant information passed down
data PipeEnv = PipeEnv {
}
PipeState : information that might change during a pipeline run
data PipeState = PipeState {
hsc_env :: HscEnv,
^ only the DynFlags change in the HscEnv . The DynFlags change
pragmas in the phase .
maybe_loc :: Maybe ModLocation,
^ the ModLocation . This is discovered during compilation ,
in the Hsc phase where we read the module header .
maybe_stub_o :: Maybe FilePath
^ the stub object . This is set by the Hsc phase if a stub
}
newtype CompPipeline a = P { unP :: PipeEnv -> PipeState -> IO (PipeState, a) }
instance Monad CompPipeline where
return a = P $ \_env state -> return (state, a)
P m >>= k = P $ \env state -> do (state',a) <- m env state
unP (k a) env state'
setDynFlags :: DynFlags -> CompPipeline ()
setDynFlags dflags = P $ \_env state ->
return (state{hsc_env= (hsc_env state){ hsc_dflags = dflags }}, ())
setModLocation :: ModLocation -> CompPipeline ()
setModLocation loc = P $ \_env state ->
return (state{ maybe_loc = Just loc }, ())
setStubO :: FilePath -> CompPipeline ()
setStubO stub_o = P $ \_env state ->
return (state{ maybe_stub_o = Just stub_o }, ())
io :: IO a -> CompPipeline a
io m = P $ \_env state -> do a <- m; return (state, a)
getOutputFilename
:: Phase -> PipelineOutput -> String
getOutputFilename stop_phase output basename
= func
where
func dflags next_phase maybe_location
| is_last_phase, Persistent <- output = persistent_fn
| is_last_phase, SpecificFile f <- output = return f
| keep_this_output = persistent_fn
| otherwise = newTempName dflags suffix
where
hcsuf = hcSuf dflags
odir = objectDir dflags
osuf = objectSuf dflags
keep_hc = dopt Opt_KeepHcFiles dflags
keep_s = dopt Opt_KeepSFiles dflags
keep_bc = dopt Opt_KeepLlvmFiles dflags
myPhaseInputExt HCc = hcsuf
myPhaseInputExt MergeStub = osuf
myPhaseInputExt StopLn = osuf
myPhaseInputExt other = phaseInputExt other
is_last_phase = next_phase `eqPhase` stop_phase
keep_this_output =
case next_phase of
As | keep_s -> True
LlvmOpt | keep_bc -> True
HCc | keep_hc -> True
_other -> False
suffix = myPhaseInputExt next_phase
persistent_fn
| StopLn <- next_phase = return odir_persistent
| otherwise = return persistent
persistent = basename <.> suffix
odir_persistent
| Just loc <- maybe_location = ml_obj_file loc
| Just d <- odir = d </> persistent
| otherwise = persistent
phaseOutputFilename next_phase = do
PipeEnv{stop_phase, src_basename, output_spec} <- getPipeEnv
PipeState{maybe_loc, hsc_env} <- getPipeState
let dflags = hsc_dflags hsc_env
io $ getOutputFilename stop_phase output_spec
src_basename dflags next_phase maybe_loc
pipeLoop :: Phase -> FilePath -> FileHooks -> CompPipeline FilePath
pipeLoop phase input_fn hooks = do
PipeEnv{stop_phase} <- getPipeEnv
PipeState{hsc_env} <- getPipeState
case () of
-> return input_fn
| not (phase `happensBefore` stop_phase)
-> panic ("pipeLoop: at phase " ++ show phase ++
" but I wanted to stop at phase " ++ show stop_phase)
| otherwise
-> do io $ debugTraceMsg (hsc_dflags hsc_env) 4
(ptext (sLit "Running phase") <+> ppr phase)
dflags <- getDynFlags
(next_phase, output_fn) <- runPhase phase input_fn dflags hooks
pipeLoop next_phase output_fn hooks
getPipeEnv :: CompPipeline PipeEnv
getPipeEnv = P $ \env state -> return (state, env)
getPipeState :: CompPipeline PipeState
getPipeState = P $ \_env state -> return (state, state)
instance HasDynFlags CompPipeline where
getDynFlags = P $ \_env state -> return (state, hsc_dflags (hsc_env state))
^ for convenience , we pass the current dflags in
-> FileHooks
Interesting case : Hsc when there is no recompilation to do
phase
runPhase (Unlit sf) input_fn dflags hooks
= do
output_fn <- phaseOutputFilename (Cpp sf)
let unlit_flags = getOpts dflags opt_L
flags = map SysTools.Option unlit_flags ++
put in a # line directive
SysTools.Option "-h"
, SysTools.Option $ escape $ normalise input_fn
, SysTools.FileOption "" input_fn
, SysTools.FileOption "" output_fn
]
io $ SysTools.runUnlit dflags flags
return (Cpp sf, output_fn)
where
Unicode or anything else ( so we do n't use Util.charToC
a SrcLoc is the same as the source filenaame , the two will
escape ('\\':cs) = '\\':'\\': escape cs
escape ('\"':cs) = '\\':'\"': escape cs
escape ('\'':cs) = '\\':'\'': escape cs
escape (c:cs) = c : escape cs
escape [] = []
phase : ( a ) gets OPTIONS out of file
( b ) runs if necessary
runPhase (Cpp sf) input_fn dflags0 hooks
= do
src_opts <- io $ getOptionsFromFile dflags0 input_fn
(dflags1, unhandled_flags, warns)
<- io $ parseDynamicFilePragma dflags0 src_opts
setDynFlags dflags1
io $ checkProcessArgsResult dflags1 unhandled_flags
if not (xopt Opt_Cpp dflags1) then do
unless (dopt Opt_Pp dflags1) $ io $ handleFlagWarnings dflags1 warns
no need to preprocess CPP , just pass input file along
return (HsPp sf, input_fn)
else do
output_fn <- phaseOutputFilename (HsPp sf)
no CC opts
See # 2464,#3457
src_opts <- io $ getOptionsFromFile dflags0 output_fn
(dflags2, unhandled_flags, warns)
<- io $ parseDynamicFilePragma dflags0 src_opts
io $ checkProcessArgsResult dflags2 unhandled_flags
unless (dopt Opt_Pp dflags2) $ io $ handleFlagWarnings dflags2 warns
the HsPp pass below will emit warnings
setDynFlags dflags2
return (HsPp sf, output_fn)
phase
runPhase (HsPp sf) input_fn dflags hooks
= do
if not (dopt Opt_Pp dflags) then
return (Hsc sf, input_fn)
else do
let hspp_opts = getOpts dflags opt_F
PipeEnv{src_basename, src_suffix} <- getPipeEnv
let orig_fn = src_basename <.> src_suffix
output_fn <- phaseOutputFilename (Hsc sf)
io $ SysTools.runPp dflags
( [ SysTools.Option orig_fn
, SysTools.Option input_fn
, SysTools.FileOption "" output_fn
] ++
map SysTools.Option hspp_opts
)
re - read pragmas now that we 've parsed the file ( see # 3674 )
src_opts <- io $ getOptionsFromFile dflags output_fn
(dflags1, unhandled_flags, warns)
<- io $ parseDynamicFilePragma dflags src_opts
setDynFlags dflags1
io $ checkProcessArgsResult dflags1 unhandled_flags
io $ handleFlagWarnings dflags1 warns
return (Hsc sf, output_fn)
Hsc phase
runPhase (Hsc src_flavour) input_fn dflags0 hooks
normal Hsc mode , not mkdependHS
PipeEnv{ stop_phase=stop,
src_basename=basename,
src_suffix=suff } <- getPipeEnv
let current_dir = takeDirectory basename
paths = includePaths dflags0
dflags = dflags0 { includePaths = current_dir : paths }
setDynFlags dflags
(hspp_buf,mod_name,imps,src_imps) <- io $
case src_flavour of
no explicit imports in ExtCore input .
m <- getCoreModuleName input_fn
return (Nothing, mkModuleName m, [], [])
_ -> do
buf <- hGetStringBuffer input_fn
(src_imps,imps,L _ mod_name) <- getImports dflags buf input_fn (basename <.> suff)
return (Just buf, mod_name, imps, src_imps)
Build a ModLocation to pass to hscMain .
the .hi and .o filenames , and this is as good a way
location1 <- io $ mkHomeModLocation2 dflags mod_name basename suff
let location2 | isHsBoot src_flavour = addBootSuffixLocn location1
| otherwise = location1
let ohi = outputHi dflags
location3 | Just fn <- ohi = location2{ ml_hi_file = fn }
| otherwise = location2
the object file for one module . )
let expl_o_file = outputFile dflags
location4 | Just ofile <- expl_o_file
, isNoLink (ghcLink dflags)
= location3 { ml_obj_file = ofile }
| otherwise = location3
setModLocation location4
to be up to date wrt M.hs ; so no need to recompile unless imports have
date wrt M.hs ( or M.o does n't exist ) so we must recompile regardless .
src_timestamp <- io $ getModificationUTCTime (basename <.> suff)
let hsc_lang = hscTarget dflags
source_unchanged <- io $
if not (isStopLn stop)
SourceModified unconditionally if
then return SourceModified
else do o_file_exists <- doesFileExist o_file
if not o_file_exists
else do t2 <- getModificationUTCTime o_file
if t2 > src_timestamp
then return SourceUnmodified
else return SourceModified
get the DynFlags
let next_phase =
hookPostBackendPhase hooks hscPostBackendPhase
dflags src_flavour hsc_lang
output_fn <- phaseOutputFilename next_phase
let dflags' = dflags { hscTarget = hsc_lang,
hscOutName = output_fn,
extCoreName = basename ++ ".hcr" }
setDynFlags dflags'
PipeState{hsc_env=hsc_env'} <- getPipeState
mod <- io $ addHomeModuleToFinder hsc_env' mod_name location4
Make the ModSummary to hand to hscMain
let
mod_summary = ModSummary { ms_mod = mod,
ms_hsc_src = src_flavour,
ms_hspp_file = input_fn,
ms_hspp_opts = dflags,
ms_hspp_buf = hspp_buf,
ms_location = location4,
ms_hs_date = src_timestamp,
ms_obj_date = Nothing,
ms_textual_imps = imps,
ms_srcimps = src_imps }
result <- io $ hscCompileOneShot hooks hsc_env'
mod_summary source_unchanged
case result of
HscNoRecomp
-> do io $ touchObjectFile dflags' o_file
than the source file ( else we would n't be in HscNoRecomp )
return (StopLn, o_file)
(HscRecomp hasStub _)
-> do case hasStub of
Nothing -> return ()
Just stub_c ->
do stub_o <- io $ compileStub hsc_env' hooks stub_c
setStubO stub_o
when (isHsBoot src_flavour) $
io $ touchObjectFile dflags' o_file
return (next_phase, output_fn)
Cmm phase
runPhase CmmCpp input_fn dflags hooks
= do
output_fn <- phaseOutputFilename Cmm
include CC opts
input_fn output_fn
return (Cmm, output_fn)
runPhase Cmm input_fn dflags hooks
= do
PipeEnv{src_basename} <- getPipeEnv
let hsc_lang = hscTarget dflags
let next_phase =
hookPostBackendPhase hooks hscPostBackendPhase
dflags HsSrcFile hsc_lang
output_fn <- phaseOutputFilename next_phase
let dflags' = dflags { hscTarget = hsc_lang,
hscOutName = output_fn,
extCoreName = src_basename ++ ".hcr" }
setDynFlags dflags'
PipeState{hsc_env} <- getPipeState
io $ hscCompileCmmFile hsc_env input_fn
return (next_phase, output_fn)
runPhase cc_phase input_fn dflags hooks
| any (cc_phase `eqPhase`) [Cc, Ccpp, HCc, Cobjc, Cobjcpp]
= do
let platform = targetPlatform dflags
cc_opts = getOpts dflags opt_c
hcc = cc_phase `eqPhase` HCc
let cmdline_include_paths = includePaths dflags
HC files have the dependent packages stamped into them
pkgs <- if hcc then io $ getHCFilePackages input_fn else return []
files ; this is the Value Add(TM ) that using ghc instead of
pkg_include_dirs <- io $ getPackageIncludePath dflags pkgs
let include_paths = foldr (\ x xs -> "-I" : x : xs) []
(cmdline_include_paths ++ pkg_include_dirs)
let gcc_extra_viac_flags = extraGccViaCFlags dflags
let pic_c_flags = picCCOpts dflags
let verbFlags = getVerbFlags dflags
pkg_extra_cc_opts <- io $
if cc_phase `eqPhase` HCc
then return []
else getPackageExtraCcOpts dflags pkgs
framework_paths <-
case platformOS platform of
OSDarwin ->
do pkgFrameworkPaths <- io $ getPackageFrameworkPath dflags pkgs
let cmdlineFrameworkPaths = frameworkPaths dflags
return $ map ("-F"++)
(cmdlineFrameworkPaths ++ pkgFrameworkPaths)
_ ->
return []
let split_objs = dopt Opt_SplitObjs dflags
split_opt | hcc && split_objs = [ "-DUSE_SPLIT_MARKERS" ]
| otherwise = [ ]
let cc_opt | optLevel dflags >= 2 = "-O2"
| otherwise = "-O"
let next_phase = As
output_fn <- phaseOutputFilename next_phase
let
more_hcc_opts =
(if platformArch platform == ArchX86 &&
not (dopt Opt_ExcessPrecision dflags)
then [ "-ffloat-store" ]
else []) ++
gcc 's -fstrict - aliasing allows two accesses to memory
["-fno-strict-aliasing"]
let gcc_lang_opt | cc_phase `eqPhase` Ccpp = "c++"
| cc_phase `eqPhase` Cobjc = "objective-c"
| cc_phase `eqPhase` Cobjcpp = "objective-c++"
| otherwise = "c"
io $ SysTools.runCc dflags (
Also useful for plain .c files , just in case GHC saw a
[ SysTools.Option "-x", SysTools.Option gcc_lang_opt
, SysTools.FileOption "" input_fn
, SysTools.Option "-o"
, SysTools.FileOption "" output_fn
]
++ map SysTools.Option (
pic_c_flags
These symbols are imported into the stub.c file via RtsAPI.h , and the
++ (if platformOS platform == OSMinGW32 &&
thisPackage dflags == basePackageId
then [ "-DCOMPILING_BASE_PACKAGE" ]
else [])
We only support SparcV9 and better because V8 lacks an atomic CAS
( e.g. , -mcpu = ultrasparc ) as GCC picks the " best " -mcpu flag
++ (if platformArch platform == ArchSPARC
then ["-mcpu=v9"]
else [])
GCC 4.6 + does n't like -Wimplicit when compiling C++ .
++ (if (cc_phase /= Ccpp && cc_phase /= Cobjcpp)
then ["-Wimplicit"]
else [])
++ (if hcc
then gcc_extra_viac_flags ++ more_hcc_opts
else [])
++ verbFlags
++ [ "-S", cc_opt ]
++ [ "-D__GLASGOW_HASKELL__="++cProjectVersionInt ]
++ framework_paths
++ cc_opts
++ split_opt
++ include_paths
++ pkg_extra_cc_opts
))
return (next_phase, output_fn)
runPhase Splitter input_fn dflags hooks
split_s_prefix <- io $ SysTools.newTempName dflags "split"
let n_files_fn = split_s_prefix
io $ SysTools.runSplit dflags
[ SysTools.FileOption "" input_fn
, SysTools.FileOption "" split_s_prefix
, SysTools.FileOption "" n_files_fn
]
s <- io $ readFile n_files_fn
let n_files = read s :: Int
dflags' = dflags { splitInfo = Just (split_s_prefix, n_files) }
setDynFlags dflags'
io $ addFilesToClean dflags' [ split_s_prefix ++ "__" ++ show n ++ ".s"
| n <- [1..n_files]]
return (SplitAs,
we do n't use the filename in SplitAs
runPhase As input_fn dflags hooks
= do
LLVM from version 3.0 onwards does n't support the OS X system
assembler , so we use clang as the assembler instead . ( # 5636 )
let whichAsProg | hscTarget dflags == HscLlvm &&
platformOS (targetPlatform dflags) == OSDarwin
= do
llvmVer <- io $ figureLlvmVersion dflags
return $ case llvmVer of
Just n | n >= 30 ->
(SysTools.runClang, cGccLinkerOpts)
_ -> (SysTools.runAs, getOpts dflags opt_a)
| otherwise
= return (SysTools.runAs, getOpts dflags opt_a)
(as_prog, as_opts) <- whichAsProg
let cmdline_include_paths = includePaths dflags
next_phase <- maybeMergeStub
output_fn <- phaseOutputFilename next_phase
io $ createDirectoryIfMissing True (takeDirectory output_fn)
io $ as_prog dflags
(map SysTools.Option as_opts
++ [ SysTools.Option ("-I" ++ p) | p <- cmdline_include_paths ]
We only support SparcV9 and better because V8 lacks an atomic CAS
( e.g. , -mcpu = ultrasparc ) . GCC picks the " best " -mcpu flag
++ (if platformArch (targetPlatform dflags) == ArchSPARC
then [SysTools.Option "-mcpu=v9"]
else [])
++ [ SysTools.Option "-c"
, SysTools.FileOption "" input_fn
, SysTools.Option "-o"
, SysTools.FileOption "" output_fn
])
return (next_phase, output_fn)
runPhase SplitAs _input_fn dflags hooks
= do
we 'll handle the stub_o file in this phase , so do n't MergeStub ,
let next_phase = StopLn
output_fn <- phaseOutputFilename next_phase
let base_o = dropExtension output_fn
osuf = objectSuf dflags
split_odir = base_o ++ "_" ++ osuf ++ "_split"
io $ createDirectoryIfMissing True split_odir
fs <- io $ getDirectoryContents split_odir
io $ mapM_ removeFile $
map (split_odir </>) $ filter (osuf `isSuffixOf`) fs
let as_opts = getOpts dflags opt_a
let (split_s_prefix, n) = case splitInfo dflags of
Nothing -> panic "No split info"
Just x -> x
let split_s n = split_s_prefix ++ "__" ++ show n <.> "s"
split_obj :: Int -> FilePath
split_obj n = split_odir </>
takeFileName base_o ++ "__" ++ show n <.> osuf
let assemble_file n
= SysTools.runAs dflags
(map SysTools.Option as_opts ++
We only support SparcV9 and better because V8 lacks an atomic CAS
( e.g. , -mcpu = ultrasparc ) . GCC picks the " best " -mcpu flag
(if platformArch (targetPlatform dflags) == ArchSPARC
then [SysTools.Option "-mcpu=v9"]
else []) ++
[ SysTools.Option "-c"
, SysTools.Option "-o"
, SysTools.FileOption "" (split_obj n)
, SysTools.FileOption "" (split_s n)
])
io $ mapM_ assemble_file [1..n]
that needs to be initialised is all in the FIRST split
object . See Note [ codegen - split - init ] .
PipeState{maybe_stub_o} <- getPipeState
case maybe_stub_o of
Nothing -> return ()
Just stub_o -> io $ do
tmp_split_1 <- newTempName dflags osuf
let split_1 = split_obj 1
copyFile split_1 tmp_split_1
removeFile split_1
joinObjectFiles dflags [tmp_split_1, stub_o] split_1
io $ joinObjectFiles dflags (map split_obj [1..n]) output_fn
return (next_phase, output_fn)
runPhase LlvmOpt input_fn dflags hooks
= do
ver <- io $ readIORef (llvmVersion dflags)
let lo_opts = getOpts dflags opt_lo
opt_lvl = max 0 (min 2 $ optLevel dflags)
for opt but not llc since opt is very specifically for optimisation
optFlag = if null lo_opts
then [SysTools.Option (llvmOpts !! opt_lvl)]
else []
no tbaa in 2.8 and earlier
| dopt Opt_LlvmTBAA dflags = "--enable-tbaa=true"
| otherwise = "--enable-tbaa=false"
output_fn <- phaseOutputFilename LlvmLlc
io $ SysTools.runLlvmOpt dflags
([ SysTools.FileOption "" input_fn,
SysTools.Option "-o",
SysTools.FileOption "" output_fn]
++ optFlag
++ [SysTools.Option tbaa]
++ map SysTools.Option lo_opts)
return (LlvmLlc, output_fn)
where
llvmOpts = ["-mem2reg", "-O1", "-O2"]
LlvmLlc phase
runPhase LlvmLlc input_fn dflags hooks
= do
ver <- io $ readIORef (llvmVersion dflags)
let lc_opts = getOpts dflags opt_lc
opt_lvl = max 0 (min 2 $ optLevel dflags)
rmodel | opt_PIC = "pic"
| not opt_Static = "dynamic-no-pic"
| otherwise = "static"
no tbaa in 2.8 and earlier
| dopt Opt_LlvmTBAA dflags = "--enable-tbaa=true"
| otherwise = "--enable-tbaa=false"
let next_phase = case dopt Opt_NoLlvmMangler dflags of
False -> LlvmMangle
True | dopt Opt_SplitObjs dflags -> Splitter
True -> As
output_fn <- phaseOutputFilename next_phase
io $ SysTools.runLlvmLlc dflags
([ SysTools.Option (llvmOpts !! opt_lvl),
SysTools.Option $ "-relocation-model=" ++ rmodel,
SysTools.FileOption "" input_fn,
SysTools.Option "-o", SysTools.FileOption "" output_fn]
++ map SysTools.Option lc_opts
++ [SysTools.Option tbaa]
++ map SysTools.Option fpOpts
++ map SysTools.Option abiOpts)
return (next_phase, output_fn)
where
Bug in at O3 on OSX .
llvmOpts = if platformOS (targetPlatform dflags) == OSDarwin
then ["-O1", "-O2", "-O2"]
else ["-O1", "-O2", "-O3"]
On ARMv7 using , fails to allocate floating point registers
while compiling GHC source code . It 's probably due to fact that it
does not enable VFP by default . Let 's do this manually here
fpOpts = case platformArch (targetPlatform dflags) of
ArchARM ARMv7 ext _ -> if (elem VFPv3 ext)
then ["-mattr=+v7,+vfp3"]
else if (elem VFPv3D16 ext)
then ["-mattr=+v7,+vfp3,+d16"]
else []
_ -> []
On Ubuntu / Debian with ARM hard float ABI , LLVM 's llc still
compiles into soft - float ABI . We need to explicitly set abi
abiOpts = case platformArch (targetPlatform dflags) of
ArchARM ARMv7 _ HARD -> ["-float-abi=hard"]
ArchARM ARMv7 _ _ -> []
_ -> []
LlvmMangle phase
runPhase LlvmMangle input_fn dflags hooks
= do
let next_phase = if dopt Opt_SplitObjs dflags then Splitter else As
output_fn <- phaseOutputFilename next_phase
io $ llvmFixupAsm dflags input_fn output_fn
return (next_phase, output_fn)
runPhase MergeStub input_fn dflags hooks
= do
PipeState{maybe_stub_o} <- getPipeState
output_fn <- phaseOutputFilename StopLn
case maybe_stub_o of
Nothing ->
panic "runPhase(MergeStub): no stub"
Just stub_o -> do
io $ joinObjectFiles dflags [input_fn, stub_o] output_fn
return (StopLn, output_fn)
runPhase other _input_fn _dflags _hooks =
panic ("runPhase: don't know how to run phase " ++ show other)
maybeMergeStub :: CompPipeline Phase
maybeMergeStub = do
PipeState{maybe_stub_o} <- getPipeState
if isJust maybe_stub_o then return MergeStub else return StopLn
stub .h and .c files ( for foreign export support )
into account the -stubdir option .
( see the MergeStubs phase ) .
compileStub :: HscEnv -> FileHooks -> FilePath -> IO FilePath
compileStub hsc_env hooks stub_c = do
(_, stub_o) <- runPipeline StopLn hsc_env (stub_c,Nothing) Nothing
no ModLocation
return stub_o
getHCFilePackages :: FilePath -> IO [PackageId]
getHCFilePackages filename =
Exception.bracket (openFile filename ReadMode) hClose $ \h -> do
l <- hGetLine h
case l of
'/':'*':' ':'G':'H':'C':'_':'P':'A':'C':'K':'A':'G':'E':'S':rest ->
return (map stringToPackageId (words rest))
_other ->
return []
doCpp :: DynFlags -> Bool -> Bool -> FilePath -> FilePath -> IO ()
doCpp dflags raw include_cc_opts input_fn output_fn = do
let hscpp_opts = getOpts dflags opt_P
let cmdline_include_paths = includePaths dflags
pkg_include_dirs <- getPackageIncludePath dflags []
let include_paths = foldr (\ x xs -> "-I" : x : xs) []
(cmdline_include_paths ++ pkg_include_dirs)
let verbFlags = getVerbFlags dflags
let cc_opts
| include_cc_opts = getOpts dflags opt_c
| otherwise = []
let cpp_prog args | raw = SysTools.runCpp dflags args
| otherwise = SysTools.runCc dflags (SysTools.Option "-E" : args)
let target_defs =
[ "-D" ++ HOST_OS ++ "_BUILD_OS=1",
"-D" ++ HOST_ARCH ++ "_BUILD_ARCH=1",
"-D" ++ TARGET_OS ++ "_HOST_OS=1",
"-D" ++ TARGET_ARCH ++ "_HOST_ARCH=1" ]
remember , in code we * compile * , the is the same our TARGET ,
cpp_prog ( map SysTools.Option verbFlags
++ map SysTools.Option include_paths
++ map SysTools.Option hsSourceCppOpts
++ map SysTools.Option target_defs
++ map SysTools.Option hscpp_opts
++ map SysTools.Option cc_opts
++ [ SysTools.Option "-x"
, SysTools.Option "c"
, SysTools.Option input_fn
We hackily use Option instead of FileOption here , so that the file
name is not back - slashed on Windows . cpp is capable of
, SysTools.Option "-o"
, SysTools.FileOption "" output_fn
])
hsSourceCppOpts :: [String]
Default CPP defines in Haskell source
hsSourceCppOpts =
[ "-D__GLASGOW_HASKELL__="++cProjectVersionInt ]
| What phase to run after one of the backend code generators has run
hscPostBackendPhase :: DynFlags -> HscSource -> HscTarget -> Phase
hscPostBackendPhase _ HsBootFile _ = StopLn
hscPostBackendPhase dflags _ hsc_lang =
case hsc_lang of
HscC -> HCc
HscAsm | dopt Opt_SplitObjs dflags -> Splitter
| otherwise -> As
HscLlvm -> LlvmOpt
HscNothing -> StopLn
HscInterpreted -> StopLn
touchObjectFile :: DynFlags -> FilePath -> IO ()
touchObjectFile dflags path = do
createDirectoryIfMissing True $ takeDirectory path
SysTools.touch dflags "Touching object file" path
haveRtsOptsFlags :: DynFlags -> Bool
haveRtsOptsFlags dflags =
isJust (rtsOpts dflags) || case rtsOptsEnabled dflags of
RtsOptsSafeOnly -> False
_ -> True
joinObjectFiles :: DynFlags -> [FilePath] -> FilePath -> IO ()
joinObjectFiles dflags o_files output_fn = do
let ldIsGnuLd = cLdIsGNULd == "YES"
ld_r args = SysTools.runLink dflags ([
SysTools.Option "-nostdlib",
SysTools.Option "-nodefaultlibs",
SysTools.Option "-Wl,-r"
]
++ (if platformArch (targetPlatform dflags) == ArchSPARC
&& ldIsGnuLd
then [SysTools.Option "-Wl,-no-relax"]
else [])
++ [
SysTools.Option ld_build_id,
SysTools . Option ld_x_flag ,
SysTools.Option "-o",
SysTools.FileOption "" output_fn ]
++ args)
ld_x_flag | null = " "
ld_build_id | cLdHasBuildId == "YES" = "-Wl,--build-id=none"
| otherwise = ""
if ldIsGnuLd
then do
script <- newTempName dflags "ldscript"
writeFile script $ "INPUT(" ++ unwords o_files ++ ")"
ld_r [SysTools.FileOption "" script]
else do
ld_r (map (SysTools.FileOption "") o_files)
|
9c5ffffdb57ea8982983ecce7a622ae9775d1de8e04e9808aa0f26180260ae93
|
bmeurer/ocamljit2
|
loadprinter.mli
|
(***********************************************************************)
(* *)
(* Objective Caml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1997 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
(* *)
(***********************************************************************)
$ Id$
(* Loading and installation of user-defined printer functions *)
open Format
val loadfile : formatter -> string -> unit
val install_printer : formatter -> Longident.t -> unit
val remove_printer : Longident.t -> unit
(* Error report *)
type error =
| Load_failure of Dynlink.error
| Unbound_identifier of Longident.t
| Unavailable_module of string * Longident.t
| Wrong_type of Longident.t
| No_active_printer of Longident.t
exception Error of error
val report_error: formatter -> error -> unit
| null |
https://raw.githubusercontent.com/bmeurer/ocamljit2/ef06db5c688c1160acc1de1f63c29473bcd0055c/debugger/loadprinter.mli
|
ocaml
|
*********************************************************************
Objective Caml
*********************************************************************
Loading and installation of user-defined printer functions
Error report
|
, projet Cristal , INRIA Rocquencourt
Copyright 1997 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
$ Id$
open Format
val loadfile : formatter -> string -> unit
val install_printer : formatter -> Longident.t -> unit
val remove_printer : Longident.t -> unit
type error =
| Load_failure of Dynlink.error
| Unbound_identifier of Longident.t
| Unavailable_module of string * Longident.t
| Wrong_type of Longident.t
| No_active_printer of Longident.t
exception Error of error
val report_error: formatter -> error -> unit
|
89837fdf0a33c1359d80f4395d1c7323b4cba9cb09a74ef0f47e2ae0ffb466ea
|
Ericson2314/lighthouse
|
HpcParser.hs
|
{-# OPTIONS -fglasgow-exts -cpp #-}
# OPTIONS -w #
-- The above warning supression flag is a temporary kludge.
-- While working on this module you are encouraged to remove it and fix
-- any warnings in the module. See
-- #Warnings
-- for details
module HpcParser where
import HpcLexer
#if __GLASGOW_HASKELL__ >= 503
import Data.Array
#else
import Array
#endif
#if __GLASGOW_HASKELL__ >= 503
import GHC.Exts
#else
import GlaExts
#endif
parser produced by Happy Version 1.16
newtype HappyAbsSyn = HappyAbsSyn HappyAny
#if __GLASGOW_HASKELL__ >= 607
type HappyAny = GHC.Exts.Any
#else
type HappyAny = forall a . a
#endif
happyIn4 :: (Spec) -> (HappyAbsSyn )
happyIn4 x = unsafeCoerce# x
# INLINE happyIn4 #
happyOut4 :: (HappyAbsSyn ) -> (Spec)
happyOut4 x = unsafeCoerce# x
# INLINE happyOut4 #
happyIn5 :: (L (ModuleName,[Tick])) -> (HappyAbsSyn )
happyIn5 x = unsafeCoerce# x
# INLINE happyIn5 #
happyOut5 :: (HappyAbsSyn ) -> (L (ModuleName,[Tick]))
happyOut5 x = unsafeCoerce# x
# INLINE happyOut5 #
happyIn6 :: ((ModuleName,[Tick])) -> (HappyAbsSyn )
happyIn6 x = unsafeCoerce# x
# INLINE happyIn6 #
happyOut6 :: (HappyAbsSyn ) -> ((ModuleName,[Tick]))
happyOut6 x = unsafeCoerce# x
# INLINE happyOut6 #
happyIn7 :: (L Tick) -> (HappyAbsSyn )
happyIn7 x = unsafeCoerce# x
# INLINE happyIn7 #
happyOut7 :: (HappyAbsSyn ) -> (L Tick)
happyOut7 x = unsafeCoerce# x
# INLINE happyOut7 #
happyIn8 :: (Tick) -> (HappyAbsSyn )
happyIn8 x = unsafeCoerce# x
# INLINE happyIn8 #
happyOut8 :: (HappyAbsSyn ) -> (Tick)
happyOut8 x = unsafeCoerce# x
# INLINE happyOut8 #
happyIn9 :: (L ExprTick) -> (HappyAbsSyn )
happyIn9 x = unsafeCoerce# x
# INLINE happyIn9 #
happyOut9 :: (HappyAbsSyn ) -> (L ExprTick)
happyOut9 x = unsafeCoerce# x
# INLINE happyOut9 #
happyIn10 :: (ExprTick) -> (HappyAbsSyn )
happyIn10 x = unsafeCoerce# x
# INLINE happyIn10 #
happyOut10 :: (HappyAbsSyn ) -> (ExprTick)
happyOut10 x = unsafeCoerce# x
# INLINE happyOut10 #
happyIn11 :: (Maybe String) -> (HappyAbsSyn )
happyIn11 x = unsafeCoerce# x
# INLINE happyIn11 #
happyOut11 :: (HappyAbsSyn ) -> (Maybe String)
happyOut11 x = unsafeCoerce# x
# INLINE happyOut11 #
happyIn12 :: (Maybe Qualifier) -> (HappyAbsSyn )
happyIn12 x = unsafeCoerce# x
# INLINE happyIn12 #
happyOut12 :: (HappyAbsSyn ) -> (Maybe Qualifier)
happyOut12 x = unsafeCoerce# x
# INLINE happyOut12 #
happyIn13 :: (Maybe String) -> (HappyAbsSyn )
happyIn13 x = unsafeCoerce# x
# INLINE happyIn13 #
happyOut13 :: (HappyAbsSyn ) -> (Maybe String)
happyOut13 x = unsafeCoerce# x
# INLINE happyOut13 #
happyInTok :: Token -> (HappyAbsSyn )
happyInTok x = unsafeCoerce# x
# INLINE happyInTok #
happyOutTok :: (HappyAbsSyn ) -> Token
happyOutTok x = unsafeCoerce# x
# INLINE happyOutTok #
happyActOffsets :: HappyAddr
happyActOffsets = HappyA# "\x00\x00\x00\x00\x33\x00\x26\x00\x2e\x00\x00\x00\x24\x00\x0a\x00\x00\x00\x00\x00\x23\x00\x25\x00\x0b\x00\x29\x00\x1d\x00\x22\x00\x21\x00\x1f\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x20\x00\x1e\x00\x00\x00\x00\x00\xfb\xff\x1c\x00\x00\x00\x1b\x00\x19\x00\x1a\x00\x18\x00\x0a\x00\x00\x00\xff\xff\x08\x00\x17\x00\x15\x00\x11\x00\x00\x00\x00\x00\x00\x00\x00\x00"#
happyGotoOffsets :: HappyAddr
happyGotoOffsets = HappyA# "\x0c\x00\x16\x00\x04\x00\x00\x00\x14\x00\x00\x00\x13\x00\x10\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x12\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0d\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfc\xff\x0f\x00\x02\x00\x07\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"#
happyDefActions :: HappyAddr
happyDefActions = HappyA# "\xf4\xff\x00\x00\xfc\xff\x00\x00\xfe\xff\xf5\xff\xf1\xff\xee\xff\xf2\xff\xfd\xff\x00\x00\x00\x00\xec\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xed\xff\xf9\xff\x00\x00\xf3\xff\xf0\xff\x00\x00\x00\x00\xfa\xff\xf8\xff\xf1\xff\x00\x00\xfb\xff\x00\x00\x00\x00\x00\x00\x00\x00\xee\xff\xf9\xff\x00\x00\xec\xff\x00\x00\x00\x00\x00\x00\xf6\xff\xf7\xff\xef\xff"#
happyCheck :: HappyAddr
happyCheck = HappyA# "\xff\xff\x02\x00\x07\x00\x02\x00\x08\x00\x01\x00\x04\x00\x08\x00\x06\x00\x08\x00\x06\x00\x10\x00\x00\x00\x0e\x00\x04\x00\x0e\x00\x09\x00\x05\x00\x03\x00\x09\x00\x07\x00\x03\x00\x02\x00\x09\x00\x08\x00\x11\x00\x07\x00\x05\x00\x11\x00\x0c\x00\xff\xff\xff\xff\xff\xff\x0a\x00\xff\xff\x06\x00\x0f\x00\x0b\x00\xff\xff\x0f\x00\x0d\x00\x10\x00\x0a\x00\x0c\x00\x10\x00\x0f\x00\x05\x00\x01\x00\x0f\x00\x0f\x00\x0d\x00\x10\x00\x10\x00\x02\x00\xff\xff\xff\xff\x12\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"#
happyTable :: HappyAddr
happyTable = HappyA# "\x00\x00\x1c\x00\x20\x00\x1c\x00\x25\x00\x04\x00\x19\x00\x1d\x00\x1a\x00\x1d\x00\x05\x00\x09\x00\x03\x00\x2a\x00\x0e\x00\x1e\x00\x28\x00\x02\x00\x24\x00\x0f\x00\x07\x00\x14\x00\x09\x00\x11\x00\x0c\x00\x13\x00\x07\x00\x02\x00\x13\x00\x2b\x00\x00\x00\x00\x00\x00\x00\x28\x00\x00\x00\x10\x00\x2c\x00\x22\x00\x00\x00\x27\x00\x24\x00\x23\x00\x19\x00\x16\x00\x1f\x00\x21\x00\x11\x00\x0b\x00\x17\x00\x18\x00\x14\x00\x0c\x00\x09\x00\x07\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"#
happyReduceArr = array (1, 19) [
(1 , happyReduce_1),
(2 , happyReduce_2),
(3 , happyReduce_3),
(4 , happyReduce_4),
(5 , happyReduce_5),
(6 , happyReduce_6),
(7 , happyReduce_7),
(8 , happyReduce_8),
(9 , happyReduce_9),
(10 , happyReduce_10),
(11 , happyReduce_11),
(12 , happyReduce_12),
(13 , happyReduce_13),
(14 , happyReduce_14),
(15 , happyReduce_15),
(16 , happyReduce_16),
(17 , happyReduce_17),
(18 , happyReduce_18),
(19 , happyReduce_19)
]
happy_n_terms = 19 :: Int
happy_n_nonterms = 10 :: Int
happyReduce_1 = happySpecReduce_2 0# happyReduction_1
happyReduction_1 happy_x_2
happy_x_1
= case happyOut9 happy_x_1 of { happy_var_1 ->
case happyOut5 happy_x_2 of { happy_var_2 ->
happyIn4
(Spec (happy_var_1 []) (happy_var_2 [])
)}}
happyReduce_2 = happySpecReduce_2 1# happyReduction_2
happyReduction_2 happy_x_2
happy_x_1
= case happyOut5 happy_x_1 of { happy_var_1 ->
case happyOut6 happy_x_2 of { happy_var_2 ->
happyIn5
(happy_var_1 . ((:) happy_var_2)
)}}
happyReduce_3 = happySpecReduce_0 1# happyReduction_3
happyReduction_3 = happyIn5
(id
)
happyReduce_4 = happyReduce 5# 2# happyReduction_4
happyReduction_4 (happy_x_5 `HappyStk`
happy_x_4 `HappyStk`
happy_x_3 `HappyStk`
happy_x_2 `HappyStk`
happy_x_1 `HappyStk`
happyRest)
= case happyOutTok happy_x_2 of { (STR happy_var_2) ->
case happyOut7 happy_x_4 of { happy_var_4 ->
happyIn6
((happy_var_2,happy_var_4 [])
) `HappyStk` happyRest}}
happyReduce_5 = happySpecReduce_2 3# happyReduction_5
happyReduction_5 happy_x_2
happy_x_1
= case happyOut7 happy_x_1 of { happy_var_1 ->
case happyOut8 happy_x_2 of { happy_var_2 ->
happyIn7
(happy_var_1 . ((:) happy_var_2)
)}}
happyReduce_6 = happySpecReduce_0 3# happyReduction_6
happyReduction_6 = happyIn7
(id
)
happyReduce_7 = happySpecReduce_1 4# happyReduction_7
happyReduction_7 happy_x_1
= case happyOut10 happy_x_1 of { happy_var_1 ->
happyIn8
(ExprTick happy_var_1
)}
happyReduce_8 = happyReduce 6# 4# happyReduction_8
happyReduction_8 (happy_x_6 `HappyStk`
happy_x_5 `HappyStk`
happy_x_4 `HappyStk`
happy_x_3 `HappyStk`
happy_x_2 `HappyStk`
happy_x_1 `HappyStk`
happyRest)
= case happyOutTok happy_x_3 of { (STR happy_var_3) ->
case happyOut12 happy_x_4 of { happy_var_4 ->
case happyOut13 happy_x_5 of { happy_var_5 ->
happyIn8
(TickFunction happy_var_3 happy_var_4 happy_var_5
) `HappyStk` happyRest}}}
happyReduce_9 = happyReduce 5# 4# happyReduction_9
happyReduction_9 (happy_x_5 `HappyStk`
happy_x_4 `HappyStk`
happy_x_3 `HappyStk`
happy_x_2 `HappyStk`
happy_x_1 `HappyStk`
happyRest)
= case happyOutTok happy_x_2 of { (STR happy_var_2) ->
case happyOut7 happy_x_4 of { happy_var_4 ->
happyIn8
(InsideFunction happy_var_2 (happy_var_4 [])
) `HappyStk` happyRest}}
happyReduce_10 = happySpecReduce_2 5# happyReduction_10
happyReduction_10 happy_x_2
happy_x_1
= case happyOut9 happy_x_1 of { happy_var_1 ->
case happyOut10 happy_x_2 of { happy_var_2 ->
happyIn9
(happy_var_1 . ((:) happy_var_2)
)}}
happyReduce_11 = happySpecReduce_0 5# happyReduction_11
happyReduction_11 = happyIn9
(id
)
happyReduce_12 = happyReduce 5# 6# happyReduction_12
happyReduction_12 (happy_x_5 `HappyStk`
happy_x_4 `HappyStk`
happy_x_3 `HappyStk`
happy_x_2 `HappyStk`
happy_x_1 `HappyStk`
happyRest)
= case happyOut11 happy_x_2 of { happy_var_2 ->
case happyOut12 happy_x_3 of { happy_var_3 ->
case happyOut13 happy_x_4 of { happy_var_4 ->
happyIn10
(TickExpression False happy_var_2 happy_var_3 happy_var_4
) `HappyStk` happyRest}}}
happyReduce_13 = happySpecReduce_1 7# happyReduction_13
happyReduction_13 happy_x_1
= case happyOutTok happy_x_1 of { (STR happy_var_1) ->
happyIn11
(Just happy_var_1
)}
happyReduce_14 = happySpecReduce_0 7# happyReduction_14
happyReduction_14 = happyIn11
(Nothing
)
happyReduce_15 = happySpecReduce_3 8# happyReduction_15
happyReduction_15 happy_x_3
happy_x_2
happy_x_1
= case happyOutTok happy_x_3 of { (INT happy_var_3) ->
happyIn12
(Just (OnLine happy_var_3)
)}
happyReduce_16 = happyReduce 9# 8# happyReduction_16
happyReduction_16 (happy_x_9 `HappyStk`
happy_x_8 `HappyStk`
happy_x_7 `HappyStk`
happy_x_6 `HappyStk`
happy_x_5 `HappyStk`
happy_x_4 `HappyStk`
happy_x_3 `HappyStk`
happy_x_2 `HappyStk`
happy_x_1 `HappyStk`
happyRest)
= case happyOutTok happy_x_3 of { (INT happy_var_3) ->
case happyOutTok happy_x_5 of { (INT happy_var_5) ->
case happyOutTok happy_x_7 of { (INT happy_var_7) ->
case happyOutTok happy_x_9 of { (INT happy_var_9) ->
happyIn12
(Just (AtPosition happy_var_3 happy_var_5 happy_var_7 happy_var_9)
) `HappyStk` happyRest}}}}
happyReduce_17 = happySpecReduce_0 8# happyReduction_17
happyReduction_17 = happyIn12
(Nothing
)
happyReduce_18 = happySpecReduce_1 9# happyReduction_18
happyReduction_18 happy_x_1
= case happyOutTok happy_x_1 of { (CAT happy_var_1) ->
happyIn13
(Just happy_var_1
)}
happyReduce_19 = happySpecReduce_0 9# happyReduction_19
happyReduction_19 = happyIn13
(Nothing
)
happyNewToken action sts stk [] =
happyDoAction 18# notHappyAtAll action sts stk []
happyNewToken action sts stk (tk:tks) =
let cont i = happyDoAction i tk action sts stk tks in
case tk of {
ID "module" -> cont 1#;
ID "tick" -> cont 2#;
ID "expression" -> cont 3#;
ID "on" -> cont 4#;
ID "line" -> cont 5#;
ID "position" -> cont 6#;
ID "function" -> cont 7#;
ID "inside" -> cont 8#;
ID "at" -> cont 9#;
SYM ':' -> cont 10#;
SYM '-' -> cont 11#;
SYM ';' -> cont 12#;
SYM '{' -> cont 13#;
SYM '}' -> cont 14#;
INT happy_dollar_dollar -> cont 15#;
STR happy_dollar_dollar -> cont 16#;
CAT happy_dollar_dollar -> cont 17#;
_ -> happyError' (tk:tks)
}
happyError_ tk tks = happyError' (tk:tks)
newtype HappyIdentity a = HappyIdentity a
happyIdentity = HappyIdentity
happyRunIdentity (HappyIdentity a) = a
instance Monad HappyIdentity where
return = HappyIdentity
(HappyIdentity p) >>= q = q p
happyThen :: () => HappyIdentity a -> (a -> HappyIdentity b) -> HappyIdentity b
happyThen = (>>=)
happyReturn :: () => a -> HappyIdentity a
happyReturn = (return)
happyThen1 m k tks = (>>=) m (\a -> k a tks)
happyReturn1 :: () => a -> b -> HappyIdentity a
happyReturn1 = \a tks -> (return) a
happyError' :: () => [Token] -> HappyIdentity a
happyError' = HappyIdentity . happyError
parser tks = happyRunIdentity happySomeParser where
happySomeParser = happyThen (happyParse 0# tks) (\x -> happyReturn (happyOut4 x))
happySeq = happyDontSeq
type L a = [a] -> [a]
type ModuleName = String
data Spec
= Spec [ExprTick] [(ModuleName,[Tick])]
deriving (Show)
data ExprTick
= TickExpression Bool (Maybe String) (Maybe Qualifier) (Maybe String)
deriving (Show)
data Tick
= ExprTick ExprTick
| TickFunction String (Maybe Qualifier) (Maybe String)
| InsideFunction String [Tick]
deriving (Show)
data Qualifier = OnLine Int
| AtPosition Int Int Int Int
deriving (Show)
hpcParser :: String -> IO Spec
hpcParser filename = do
txt <- readFile filename
let tokens = initLexer txt
return $ parser tokens
happyError e = error $ show (take 10 e)
{-# LINE 1 "GenericTemplate.hs" #-}
{-# LINE 1 "GenericTemplate.hs" #-}
{-# LINE 1 "<built-in>" #-}
{-# LINE 1 "<command line>" #-}
{-# LINE 1 "GenericTemplate.hs" #-}
I d : GenericTemplate.hs , v 1.26 2005/01/14 14:47:22
{-# LINE 28 "GenericTemplate.hs" #-}
data Happy_IntList = HappyCons Int# Happy_IntList
# LINE 49 " GenericTemplate.hs " #
# LINE 59 " GenericTemplate.hs " #
{-# LINE 68 "GenericTemplate.hs" #-}
infixr 9 `HappyStk`
data HappyStk a = HappyStk a (HappyStk a)
-----------------------------------------------------------------------------
-- starting the parse
happyParse start_state = happyNewToken start_state notHappyAtAll notHappyAtAll
-----------------------------------------------------------------------------
-- Accepting the parse
-- If the current token is 0#, it means we've just accepted a partial
-- parse (a %partial parser). We must ignore the saved token on the top of
-- the stack in this case.
happyAccept 0# tk st sts (_ `HappyStk` ans `HappyStk` _) =
happyReturn1 ans
happyAccept j tk st sts (HappyStk ans _) =
(happyTcHack j (happyTcHack st)) (happyReturn1 ans)
-----------------------------------------------------------------------------
-- Arrays only: do the next action
happyDoAction i tk st
= {- nothing -}
case action of
0# -> {- nothing -}
happyFail i tk st
-1# -> {- nothing -}
happyAccept i tk st
n | (n <# (0# :: Int#)) -> {- nothing -}
(happyReduceArr ! rule) i tk st
where rule = (I# ((negateInt# ((n +# (1# :: Int#))))))
n -> {- nothing -}
happyShift new_state i tk st
where new_state = (n -# (1# :: Int#))
where off = indexShortOffAddr happyActOffsets st
off_i = (off +# i)
check = if (off_i >=# (0# :: Int#))
then (indexShortOffAddr happyCheck off_i ==# i)
else False
action | check = indexShortOffAddr happyTable off_i
| otherwise = indexShortOffAddr happyDefActions st
# LINE 127 " GenericTemplate.hs " #
indexShortOffAddr (HappyA# arr) off =
#if __GLASGOW_HASKELL__ > 500
narrow16Int# i
#elif __GLASGOW_HASKELL__ == 500
intToInt16# i
#else
(i `iShiftL#` 16#) `iShiftRA#` 16#
#endif
where
#if __GLASGOW_HASKELL__ >= 503
i = word2Int# ((high `uncheckedShiftL#` 8#) `or#` low)
#else
i = word2Int# ((high `shiftL#` 8#) `or#` low)
#endif
high = int2Word# (ord# (indexCharOffAddr# arr (off' +# 1#)))
low = int2Word# (ord# (indexCharOffAddr# arr off'))
off' = off *# 2#
data HappyAddr = HappyA# Addr#
-----------------------------------------------------------------------------
HappyState data type ( not arrays )
# LINE 170 " GenericTemplate.hs " #
-----------------------------------------------------------------------------
-- Shifting a token
happyShift new_state 0# tk st sts stk@(x `HappyStk` _) =
let i = (case unsafeCoerce# x of { (I# (i)) -> i }) in
-- trace "shifting the error token" $
happyDoAction i tk new_state (HappyCons (st) (sts)) (stk)
happyShift new_state i tk st sts stk =
happyNewToken new_state (HappyCons (st) (sts)) ((happyInTok (tk))`HappyStk`stk)
happyReduce is specialised for the common cases .
happySpecReduce_0 i fn 0# tk st sts stk
= happyFail 0# tk st sts stk
happySpecReduce_0 nt fn j tk st@((action)) sts stk
= happyGoto nt j tk st (HappyCons (st) (sts)) (fn `HappyStk` stk)
happySpecReduce_1 i fn 0# tk st sts stk
= happyFail 0# tk st sts stk
happySpecReduce_1 nt fn j tk _ sts@((HappyCons (st@(action)) (_))) (v1`HappyStk`stk')
= let r = fn v1 in
happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk'))
happySpecReduce_2 i fn 0# tk st sts stk
= happyFail 0# tk st sts stk
happySpecReduce_2 nt fn j tk _ (HappyCons (_) (sts@((HappyCons (st@(action)) (_))))) (v1`HappyStk`v2`HappyStk`stk')
= let r = fn v1 v2 in
happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk'))
happySpecReduce_3 i fn 0# tk st sts stk
= happyFail 0# tk st sts stk
happySpecReduce_3 nt fn j tk _ (HappyCons (_) ((HappyCons (_) (sts@((HappyCons (st@(action)) (_))))))) (v1`HappyStk`v2`HappyStk`v3`HappyStk`stk')
= let r = fn v1 v2 v3 in
happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk'))
happyReduce k i fn 0# tk st sts stk
= happyFail 0# tk st sts stk
happyReduce k nt fn j tk st sts stk
= case happyDrop (k -# (1# :: Int#)) sts of
sts1@((HappyCons (st1@(action)) (_))) ->
let r = fn stk in -- it doesn't hurt to always seq here...
happyDoSeq r (happyGoto nt j tk st1 sts1 r)
happyMonadReduce k nt fn 0# tk st sts stk
= happyFail 0# tk st sts stk
happyMonadReduce k nt fn j tk st sts stk =
happyThen1 (fn stk tk) (\r -> happyGoto nt j tk st1 sts1 (r `HappyStk` drop_stk))
where sts1@((HappyCons (st1@(action)) (_))) = happyDrop k (HappyCons (st) (sts))
drop_stk = happyDropStk k stk
happyMonad2Reduce k nt fn 0# tk st sts stk
= happyFail 0# tk st sts stk
happyMonad2Reduce k nt fn j tk st sts stk =
happyThen1 (fn stk tk) (\r -> happyNewToken new_state sts1 (r `HappyStk` drop_stk))
where sts1@((HappyCons (st1@(action)) (_))) = happyDrop k (HappyCons (st) (sts))
drop_stk = happyDropStk k stk
off = indexShortOffAddr happyGotoOffsets st1
off_i = (off +# nt)
new_state = indexShortOffAddr happyTable off_i
happyDrop 0# l = l
happyDrop n (HappyCons (_) (t)) = happyDrop (n -# (1# :: Int#)) t
happyDropStk 0# l = l
happyDropStk n (x `HappyStk` xs) = happyDropStk (n -# (1#::Int#)) xs
-----------------------------------------------------------------------------
-- Moving to a new state after a reduction
happyGoto nt j tk st =
{- nothing -}
happyDoAction j tk new_state
where off = indexShortOffAddr happyGotoOffsets st
off_i = (off +# nt)
new_state = indexShortOffAddr happyTable off_i
-----------------------------------------------------------------------------
-- Error recovery (0# is the error token)
-- parse error if we are in recovery and we fail again
happyFail 0# tk old_st _ stk =
-- trace "failing" $
happyError_ tk
We do n't need state discarding for our restricted implementation of
" error " . In fact , it can cause some bogus parses , so I 've disabled it
for now --SDM
-- discard a state
happyFail 0 # tk old_st ( HappyCons ( ( action ) ) ( sts ) )
( saved_tok ` HappyStk ` _ ` HappyStk ` stk ) =
-- trace ( " discarding state , depth " + + show ( length stk ) ) $
happyDoAction 0 # tk action sts ( ( saved_tok`HappyStk`stk ) )
"error". In fact, it can cause some bogus parses, so I've disabled it
for now --SDM
-- discard a state
happyFail 0# tk old_st (HappyCons ((action)) (sts))
(saved_tok `HappyStk` _ `HappyStk` stk) =
-- trace ("discarding state, depth " ++ show (length stk)) $
happyDoAction 0# tk action sts ((saved_tok`HappyStk`stk))
-}
-- Enter error recovery: generate an error token,
-- save the old token and carry on.
happyFail i tk (action) sts stk =
-- trace "entering error recovery" $
happyDoAction 0# tk action sts ( (unsafeCoerce# (I# (i))) `HappyStk` stk)
Internal happy errors :
notHappyAtAll = error "Internal Happy error\n"
-----------------------------------------------------------------------------
-- Hack to get the typechecker to accept our action functions
happyTcHack :: Int# -> a -> a
happyTcHack x y = y
# INLINE happyTcHack #
-----------------------------------------------------------------------------
Seq - ing . If the --strict flag is given , then Happy emits
-- happySeq = happyDoSeq
-- otherwise it emits
-- happySeq = happyDontSeq
happyDoSeq, happyDontSeq :: a -> b -> b
happyDoSeq a b = a `seq` b
happyDontSeq a b = b
-----------------------------------------------------------------------------
Do n't inline any functions from the template . GHC has a nasty habit
-- of deciding to inline happyGoto everywhere, which increases the size of
-- the generated parser quite a bit.
# NOINLINE happyDoAction #
# NOINLINE happyTable #
# NOINLINE happyCheck #
# NOINLINE happyActOffsets #
# NOINLINE happyGotoOffsets #
# NOINLINE happyDefActions #
# NOINLINE happyShift #
{-# NOINLINE happySpecReduce_0 #-}
# NOINLINE happySpecReduce_1 #
{-# NOINLINE happySpecReduce_2 #-}
# NOINLINE happySpecReduce_3 #
# NOINLINE happyReduce #
# NOINLINE happyMonadReduce #
# NOINLINE happyGoto #
# NOINLINE happyFail #
-- end of Happy Template.
| null |
https://raw.githubusercontent.com/Ericson2314/lighthouse/210078b846ebd6c43b89b5f0f735362a01a9af02/ghc-6.8.2/utils/hpc/HpcParser.hs
|
haskell
|
# OPTIONS -fglasgow-exts -cpp #
The above warning supression flag is a temporary kludge.
While working on this module you are encouraged to remove it and fix
any warnings in the module. See
#Warnings
for details
# LINE 1 "GenericTemplate.hs" #
# LINE 1 "GenericTemplate.hs" #
# LINE 1 "<built-in>" #
# LINE 1 "<command line>" #
# LINE 1 "GenericTemplate.hs" #
# LINE 28 "GenericTemplate.hs" #
# LINE 68 "GenericTemplate.hs" #
---------------------------------------------------------------------------
starting the parse
---------------------------------------------------------------------------
Accepting the parse
If the current token is 0#, it means we've just accepted a partial
parse (a %partial parser). We must ignore the saved token on the top of
the stack in this case.
---------------------------------------------------------------------------
Arrays only: do the next action
nothing
nothing
nothing
nothing
nothing
---------------------------------------------------------------------------
---------------------------------------------------------------------------
Shifting a token
trace "shifting the error token" $
it doesn't hurt to always seq here...
---------------------------------------------------------------------------
Moving to a new state after a reduction
nothing
---------------------------------------------------------------------------
Error recovery (0# is the error token)
parse error if we are in recovery and we fail again
trace "failing" $
SDM
discard a state
trace ( " discarding state , depth " + + show ( length stk ) ) $
SDM
discard a state
trace ("discarding state, depth " ++ show (length stk)) $
Enter error recovery: generate an error token,
save the old token and carry on.
trace "entering error recovery" $
---------------------------------------------------------------------------
Hack to get the typechecker to accept our action functions
---------------------------------------------------------------------------
strict flag is given , then Happy emits
happySeq = happyDoSeq
otherwise it emits
happySeq = happyDontSeq
---------------------------------------------------------------------------
of deciding to inline happyGoto everywhere, which increases the size of
the generated parser quite a bit.
# NOINLINE happySpecReduce_0 #
# NOINLINE happySpecReduce_2 #
end of Happy Template.
|
# OPTIONS -w #
module HpcParser where
import HpcLexer
#if __GLASGOW_HASKELL__ >= 503
import Data.Array
#else
import Array
#endif
#if __GLASGOW_HASKELL__ >= 503
import GHC.Exts
#else
import GlaExts
#endif
parser produced by Happy Version 1.16
newtype HappyAbsSyn = HappyAbsSyn HappyAny
#if __GLASGOW_HASKELL__ >= 607
type HappyAny = GHC.Exts.Any
#else
type HappyAny = forall a . a
#endif
happyIn4 :: (Spec) -> (HappyAbsSyn )
happyIn4 x = unsafeCoerce# x
# INLINE happyIn4 #
happyOut4 :: (HappyAbsSyn ) -> (Spec)
happyOut4 x = unsafeCoerce# x
# INLINE happyOut4 #
happyIn5 :: (L (ModuleName,[Tick])) -> (HappyAbsSyn )
happyIn5 x = unsafeCoerce# x
# INLINE happyIn5 #
happyOut5 :: (HappyAbsSyn ) -> (L (ModuleName,[Tick]))
happyOut5 x = unsafeCoerce# x
# INLINE happyOut5 #
happyIn6 :: ((ModuleName,[Tick])) -> (HappyAbsSyn )
happyIn6 x = unsafeCoerce# x
# INLINE happyIn6 #
happyOut6 :: (HappyAbsSyn ) -> ((ModuleName,[Tick]))
happyOut6 x = unsafeCoerce# x
# INLINE happyOut6 #
happyIn7 :: (L Tick) -> (HappyAbsSyn )
happyIn7 x = unsafeCoerce# x
# INLINE happyIn7 #
happyOut7 :: (HappyAbsSyn ) -> (L Tick)
happyOut7 x = unsafeCoerce# x
# INLINE happyOut7 #
happyIn8 :: (Tick) -> (HappyAbsSyn )
happyIn8 x = unsafeCoerce# x
# INLINE happyIn8 #
happyOut8 :: (HappyAbsSyn ) -> (Tick)
happyOut8 x = unsafeCoerce# x
# INLINE happyOut8 #
happyIn9 :: (L ExprTick) -> (HappyAbsSyn )
happyIn9 x = unsafeCoerce# x
# INLINE happyIn9 #
happyOut9 :: (HappyAbsSyn ) -> (L ExprTick)
happyOut9 x = unsafeCoerce# x
# INLINE happyOut9 #
happyIn10 :: (ExprTick) -> (HappyAbsSyn )
happyIn10 x = unsafeCoerce# x
# INLINE happyIn10 #
happyOut10 :: (HappyAbsSyn ) -> (ExprTick)
happyOut10 x = unsafeCoerce# x
# INLINE happyOut10 #
happyIn11 :: (Maybe String) -> (HappyAbsSyn )
happyIn11 x = unsafeCoerce# x
# INLINE happyIn11 #
happyOut11 :: (HappyAbsSyn ) -> (Maybe String)
happyOut11 x = unsafeCoerce# x
# INLINE happyOut11 #
happyIn12 :: (Maybe Qualifier) -> (HappyAbsSyn )
happyIn12 x = unsafeCoerce# x
# INLINE happyIn12 #
happyOut12 :: (HappyAbsSyn ) -> (Maybe Qualifier)
happyOut12 x = unsafeCoerce# x
# INLINE happyOut12 #
happyIn13 :: (Maybe String) -> (HappyAbsSyn )
happyIn13 x = unsafeCoerce# x
# INLINE happyIn13 #
happyOut13 :: (HappyAbsSyn ) -> (Maybe String)
happyOut13 x = unsafeCoerce# x
# INLINE happyOut13 #
happyInTok :: Token -> (HappyAbsSyn )
happyInTok x = unsafeCoerce# x
# INLINE happyInTok #
happyOutTok :: (HappyAbsSyn ) -> Token
happyOutTok x = unsafeCoerce# x
# INLINE happyOutTok #
happyActOffsets :: HappyAddr
happyActOffsets = HappyA# "\x00\x00\x00\x00\x33\x00\x26\x00\x2e\x00\x00\x00\x24\x00\x0a\x00\x00\x00\x00\x00\x23\x00\x25\x00\x0b\x00\x29\x00\x1d\x00\x22\x00\x21\x00\x1f\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x20\x00\x1e\x00\x00\x00\x00\x00\xfb\xff\x1c\x00\x00\x00\x1b\x00\x19\x00\x1a\x00\x18\x00\x0a\x00\x00\x00\xff\xff\x08\x00\x17\x00\x15\x00\x11\x00\x00\x00\x00\x00\x00\x00\x00\x00"#
happyGotoOffsets :: HappyAddr
happyGotoOffsets = HappyA# "\x0c\x00\x16\x00\x04\x00\x00\x00\x14\x00\x00\x00\x13\x00\x10\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x12\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0d\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfc\xff\x0f\x00\x02\x00\x07\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"#
happyDefActions :: HappyAddr
happyDefActions = HappyA# "\xf4\xff\x00\x00\xfc\xff\x00\x00\xfe\xff\xf5\xff\xf1\xff\xee\xff\xf2\xff\xfd\xff\x00\x00\x00\x00\xec\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xed\xff\xf9\xff\x00\x00\xf3\xff\xf0\xff\x00\x00\x00\x00\xfa\xff\xf8\xff\xf1\xff\x00\x00\xfb\xff\x00\x00\x00\x00\x00\x00\x00\x00\xee\xff\xf9\xff\x00\x00\xec\xff\x00\x00\x00\x00\x00\x00\xf6\xff\xf7\xff\xef\xff"#
happyCheck :: HappyAddr
happyCheck = HappyA# "\xff\xff\x02\x00\x07\x00\x02\x00\x08\x00\x01\x00\x04\x00\x08\x00\x06\x00\x08\x00\x06\x00\x10\x00\x00\x00\x0e\x00\x04\x00\x0e\x00\x09\x00\x05\x00\x03\x00\x09\x00\x07\x00\x03\x00\x02\x00\x09\x00\x08\x00\x11\x00\x07\x00\x05\x00\x11\x00\x0c\x00\xff\xff\xff\xff\xff\xff\x0a\x00\xff\xff\x06\x00\x0f\x00\x0b\x00\xff\xff\x0f\x00\x0d\x00\x10\x00\x0a\x00\x0c\x00\x10\x00\x0f\x00\x05\x00\x01\x00\x0f\x00\x0f\x00\x0d\x00\x10\x00\x10\x00\x02\x00\xff\xff\xff\xff\x12\x00\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"#
happyTable :: HappyAddr
happyTable = HappyA# "\x00\x00\x1c\x00\x20\x00\x1c\x00\x25\x00\x04\x00\x19\x00\x1d\x00\x1a\x00\x1d\x00\x05\x00\x09\x00\x03\x00\x2a\x00\x0e\x00\x1e\x00\x28\x00\x02\x00\x24\x00\x0f\x00\x07\x00\x14\x00\x09\x00\x11\x00\x0c\x00\x13\x00\x07\x00\x02\x00\x13\x00\x2b\x00\x00\x00\x00\x00\x00\x00\x28\x00\x00\x00\x10\x00\x2c\x00\x22\x00\x00\x00\x27\x00\x24\x00\x23\x00\x19\x00\x16\x00\x1f\x00\x21\x00\x11\x00\x0b\x00\x17\x00\x18\x00\x14\x00\x0c\x00\x09\x00\x07\x00\x00\x00\x00\x00\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"#
happyReduceArr = array (1, 19) [
(1 , happyReduce_1),
(2 , happyReduce_2),
(3 , happyReduce_3),
(4 , happyReduce_4),
(5 , happyReduce_5),
(6 , happyReduce_6),
(7 , happyReduce_7),
(8 , happyReduce_8),
(9 , happyReduce_9),
(10 , happyReduce_10),
(11 , happyReduce_11),
(12 , happyReduce_12),
(13 , happyReduce_13),
(14 , happyReduce_14),
(15 , happyReduce_15),
(16 , happyReduce_16),
(17 , happyReduce_17),
(18 , happyReduce_18),
(19 , happyReduce_19)
]
happy_n_terms = 19 :: Int
happy_n_nonterms = 10 :: Int
happyReduce_1 = happySpecReduce_2 0# happyReduction_1
happyReduction_1 happy_x_2
happy_x_1
= case happyOut9 happy_x_1 of { happy_var_1 ->
case happyOut5 happy_x_2 of { happy_var_2 ->
happyIn4
(Spec (happy_var_1 []) (happy_var_2 [])
)}}
happyReduce_2 = happySpecReduce_2 1# happyReduction_2
happyReduction_2 happy_x_2
happy_x_1
= case happyOut5 happy_x_1 of { happy_var_1 ->
case happyOut6 happy_x_2 of { happy_var_2 ->
happyIn5
(happy_var_1 . ((:) happy_var_2)
)}}
happyReduce_3 = happySpecReduce_0 1# happyReduction_3
happyReduction_3 = happyIn5
(id
)
happyReduce_4 = happyReduce 5# 2# happyReduction_4
happyReduction_4 (happy_x_5 `HappyStk`
happy_x_4 `HappyStk`
happy_x_3 `HappyStk`
happy_x_2 `HappyStk`
happy_x_1 `HappyStk`
happyRest)
= case happyOutTok happy_x_2 of { (STR happy_var_2) ->
case happyOut7 happy_x_4 of { happy_var_4 ->
happyIn6
((happy_var_2,happy_var_4 [])
) `HappyStk` happyRest}}
happyReduce_5 = happySpecReduce_2 3# happyReduction_5
happyReduction_5 happy_x_2
happy_x_1
= case happyOut7 happy_x_1 of { happy_var_1 ->
case happyOut8 happy_x_2 of { happy_var_2 ->
happyIn7
(happy_var_1 . ((:) happy_var_2)
)}}
happyReduce_6 = happySpecReduce_0 3# happyReduction_6
happyReduction_6 = happyIn7
(id
)
happyReduce_7 = happySpecReduce_1 4# happyReduction_7
happyReduction_7 happy_x_1
= case happyOut10 happy_x_1 of { happy_var_1 ->
happyIn8
(ExprTick happy_var_1
)}
happyReduce_8 = happyReduce 6# 4# happyReduction_8
happyReduction_8 (happy_x_6 `HappyStk`
happy_x_5 `HappyStk`
happy_x_4 `HappyStk`
happy_x_3 `HappyStk`
happy_x_2 `HappyStk`
happy_x_1 `HappyStk`
happyRest)
= case happyOutTok happy_x_3 of { (STR happy_var_3) ->
case happyOut12 happy_x_4 of { happy_var_4 ->
case happyOut13 happy_x_5 of { happy_var_5 ->
happyIn8
(TickFunction happy_var_3 happy_var_4 happy_var_5
) `HappyStk` happyRest}}}
happyReduce_9 = happyReduce 5# 4# happyReduction_9
happyReduction_9 (happy_x_5 `HappyStk`
happy_x_4 `HappyStk`
happy_x_3 `HappyStk`
happy_x_2 `HappyStk`
happy_x_1 `HappyStk`
happyRest)
= case happyOutTok happy_x_2 of { (STR happy_var_2) ->
case happyOut7 happy_x_4 of { happy_var_4 ->
happyIn8
(InsideFunction happy_var_2 (happy_var_4 [])
) `HappyStk` happyRest}}
happyReduce_10 = happySpecReduce_2 5# happyReduction_10
happyReduction_10 happy_x_2
happy_x_1
= case happyOut9 happy_x_1 of { happy_var_1 ->
case happyOut10 happy_x_2 of { happy_var_2 ->
happyIn9
(happy_var_1 . ((:) happy_var_2)
)}}
happyReduce_11 = happySpecReduce_0 5# happyReduction_11
happyReduction_11 = happyIn9
(id
)
happyReduce_12 = happyReduce 5# 6# happyReduction_12
happyReduction_12 (happy_x_5 `HappyStk`
happy_x_4 `HappyStk`
happy_x_3 `HappyStk`
happy_x_2 `HappyStk`
happy_x_1 `HappyStk`
happyRest)
= case happyOut11 happy_x_2 of { happy_var_2 ->
case happyOut12 happy_x_3 of { happy_var_3 ->
case happyOut13 happy_x_4 of { happy_var_4 ->
happyIn10
(TickExpression False happy_var_2 happy_var_3 happy_var_4
) `HappyStk` happyRest}}}
happyReduce_13 = happySpecReduce_1 7# happyReduction_13
happyReduction_13 happy_x_1
= case happyOutTok happy_x_1 of { (STR happy_var_1) ->
happyIn11
(Just happy_var_1
)}
happyReduce_14 = happySpecReduce_0 7# happyReduction_14
happyReduction_14 = happyIn11
(Nothing
)
happyReduce_15 = happySpecReduce_3 8# happyReduction_15
happyReduction_15 happy_x_3
happy_x_2
happy_x_1
= case happyOutTok happy_x_3 of { (INT happy_var_3) ->
happyIn12
(Just (OnLine happy_var_3)
)}
happyReduce_16 = happyReduce 9# 8# happyReduction_16
happyReduction_16 (happy_x_9 `HappyStk`
happy_x_8 `HappyStk`
happy_x_7 `HappyStk`
happy_x_6 `HappyStk`
happy_x_5 `HappyStk`
happy_x_4 `HappyStk`
happy_x_3 `HappyStk`
happy_x_2 `HappyStk`
happy_x_1 `HappyStk`
happyRest)
= case happyOutTok happy_x_3 of { (INT happy_var_3) ->
case happyOutTok happy_x_5 of { (INT happy_var_5) ->
case happyOutTok happy_x_7 of { (INT happy_var_7) ->
case happyOutTok happy_x_9 of { (INT happy_var_9) ->
happyIn12
(Just (AtPosition happy_var_3 happy_var_5 happy_var_7 happy_var_9)
) `HappyStk` happyRest}}}}
happyReduce_17 = happySpecReduce_0 8# happyReduction_17
happyReduction_17 = happyIn12
(Nothing
)
happyReduce_18 = happySpecReduce_1 9# happyReduction_18
happyReduction_18 happy_x_1
= case happyOutTok happy_x_1 of { (CAT happy_var_1) ->
happyIn13
(Just happy_var_1
)}
happyReduce_19 = happySpecReduce_0 9# happyReduction_19
happyReduction_19 = happyIn13
(Nothing
)
happyNewToken action sts stk [] =
happyDoAction 18# notHappyAtAll action sts stk []
happyNewToken action sts stk (tk:tks) =
let cont i = happyDoAction i tk action sts stk tks in
case tk of {
ID "module" -> cont 1#;
ID "tick" -> cont 2#;
ID "expression" -> cont 3#;
ID "on" -> cont 4#;
ID "line" -> cont 5#;
ID "position" -> cont 6#;
ID "function" -> cont 7#;
ID "inside" -> cont 8#;
ID "at" -> cont 9#;
SYM ':' -> cont 10#;
SYM '-' -> cont 11#;
SYM ';' -> cont 12#;
SYM '{' -> cont 13#;
SYM '}' -> cont 14#;
INT happy_dollar_dollar -> cont 15#;
STR happy_dollar_dollar -> cont 16#;
CAT happy_dollar_dollar -> cont 17#;
_ -> happyError' (tk:tks)
}
happyError_ tk tks = happyError' (tk:tks)
newtype HappyIdentity a = HappyIdentity a
happyIdentity = HappyIdentity
happyRunIdentity (HappyIdentity a) = a
instance Monad HappyIdentity where
return = HappyIdentity
(HappyIdentity p) >>= q = q p
happyThen :: () => HappyIdentity a -> (a -> HappyIdentity b) -> HappyIdentity b
happyThen = (>>=)
happyReturn :: () => a -> HappyIdentity a
happyReturn = (return)
happyThen1 m k tks = (>>=) m (\a -> k a tks)
happyReturn1 :: () => a -> b -> HappyIdentity a
happyReturn1 = \a tks -> (return) a
happyError' :: () => [Token] -> HappyIdentity a
happyError' = HappyIdentity . happyError
parser tks = happyRunIdentity happySomeParser where
happySomeParser = happyThen (happyParse 0# tks) (\x -> happyReturn (happyOut4 x))
happySeq = happyDontSeq
type L a = [a] -> [a]
type ModuleName = String
data Spec
= Spec [ExprTick] [(ModuleName,[Tick])]
deriving (Show)
data ExprTick
= TickExpression Bool (Maybe String) (Maybe Qualifier) (Maybe String)
deriving (Show)
data Tick
= ExprTick ExprTick
| TickFunction String (Maybe Qualifier) (Maybe String)
| InsideFunction String [Tick]
deriving (Show)
data Qualifier = OnLine Int
| AtPosition Int Int Int Int
deriving (Show)
hpcParser :: String -> IO Spec
hpcParser filename = do
txt <- readFile filename
let tokens = initLexer txt
return $ parser tokens
happyError e = error $ show (take 10 e)
I d : GenericTemplate.hs , v 1.26 2005/01/14 14:47:22
data Happy_IntList = HappyCons Int# Happy_IntList
# LINE 49 " GenericTemplate.hs " #
# LINE 59 " GenericTemplate.hs " #
infixr 9 `HappyStk`
data HappyStk a = HappyStk a (HappyStk a)
happyParse start_state = happyNewToken start_state notHappyAtAll notHappyAtAll
happyAccept 0# tk st sts (_ `HappyStk` ans `HappyStk` _) =
happyReturn1 ans
happyAccept j tk st sts (HappyStk ans _) =
(happyTcHack j (happyTcHack st)) (happyReturn1 ans)
happyDoAction i tk st
case action of
happyFail i tk st
happyAccept i tk st
(happyReduceArr ! rule) i tk st
where rule = (I# ((negateInt# ((n +# (1# :: Int#))))))
happyShift new_state i tk st
where new_state = (n -# (1# :: Int#))
where off = indexShortOffAddr happyActOffsets st
off_i = (off +# i)
check = if (off_i >=# (0# :: Int#))
then (indexShortOffAddr happyCheck off_i ==# i)
else False
action | check = indexShortOffAddr happyTable off_i
| otherwise = indexShortOffAddr happyDefActions st
# LINE 127 " GenericTemplate.hs " #
indexShortOffAddr (HappyA# arr) off =
#if __GLASGOW_HASKELL__ > 500
narrow16Int# i
#elif __GLASGOW_HASKELL__ == 500
intToInt16# i
#else
(i `iShiftL#` 16#) `iShiftRA#` 16#
#endif
where
#if __GLASGOW_HASKELL__ >= 503
i = word2Int# ((high `uncheckedShiftL#` 8#) `or#` low)
#else
i = word2Int# ((high `shiftL#` 8#) `or#` low)
#endif
high = int2Word# (ord# (indexCharOffAddr# arr (off' +# 1#)))
low = int2Word# (ord# (indexCharOffAddr# arr off'))
off' = off *# 2#
data HappyAddr = HappyA# Addr#
HappyState data type ( not arrays )
# LINE 170 " GenericTemplate.hs " #
happyShift new_state 0# tk st sts stk@(x `HappyStk` _) =
let i = (case unsafeCoerce# x of { (I# (i)) -> i }) in
happyDoAction i tk new_state (HappyCons (st) (sts)) (stk)
happyShift new_state i tk st sts stk =
happyNewToken new_state (HappyCons (st) (sts)) ((happyInTok (tk))`HappyStk`stk)
happyReduce is specialised for the common cases .
happySpecReduce_0 i fn 0# tk st sts stk
= happyFail 0# tk st sts stk
happySpecReduce_0 nt fn j tk st@((action)) sts stk
= happyGoto nt j tk st (HappyCons (st) (sts)) (fn `HappyStk` stk)
happySpecReduce_1 i fn 0# tk st sts stk
= happyFail 0# tk st sts stk
happySpecReduce_1 nt fn j tk _ sts@((HappyCons (st@(action)) (_))) (v1`HappyStk`stk')
= let r = fn v1 in
happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk'))
happySpecReduce_2 i fn 0# tk st sts stk
= happyFail 0# tk st sts stk
happySpecReduce_2 nt fn j tk _ (HappyCons (_) (sts@((HappyCons (st@(action)) (_))))) (v1`HappyStk`v2`HappyStk`stk')
= let r = fn v1 v2 in
happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk'))
happySpecReduce_3 i fn 0# tk st sts stk
= happyFail 0# tk st sts stk
happySpecReduce_3 nt fn j tk _ (HappyCons (_) ((HappyCons (_) (sts@((HappyCons (st@(action)) (_))))))) (v1`HappyStk`v2`HappyStk`v3`HappyStk`stk')
= let r = fn v1 v2 v3 in
happySeq r (happyGoto nt j tk st sts (r `HappyStk` stk'))
happyReduce k i fn 0# tk st sts stk
= happyFail 0# tk st sts stk
happyReduce k nt fn j tk st sts stk
= case happyDrop (k -# (1# :: Int#)) sts of
sts1@((HappyCons (st1@(action)) (_))) ->
happyDoSeq r (happyGoto nt j tk st1 sts1 r)
happyMonadReduce k nt fn 0# tk st sts stk
= happyFail 0# tk st sts stk
happyMonadReduce k nt fn j tk st sts stk =
happyThen1 (fn stk tk) (\r -> happyGoto nt j tk st1 sts1 (r `HappyStk` drop_stk))
where sts1@((HappyCons (st1@(action)) (_))) = happyDrop k (HappyCons (st) (sts))
drop_stk = happyDropStk k stk
happyMonad2Reduce k nt fn 0# tk st sts stk
= happyFail 0# tk st sts stk
happyMonad2Reduce k nt fn j tk st sts stk =
happyThen1 (fn stk tk) (\r -> happyNewToken new_state sts1 (r `HappyStk` drop_stk))
where sts1@((HappyCons (st1@(action)) (_))) = happyDrop k (HappyCons (st) (sts))
drop_stk = happyDropStk k stk
off = indexShortOffAddr happyGotoOffsets st1
off_i = (off +# nt)
new_state = indexShortOffAddr happyTable off_i
happyDrop 0# l = l
happyDrop n (HappyCons (_) (t)) = happyDrop (n -# (1# :: Int#)) t
happyDropStk 0# l = l
happyDropStk n (x `HappyStk` xs) = happyDropStk (n -# (1#::Int#)) xs
happyGoto nt j tk st =
happyDoAction j tk new_state
where off = indexShortOffAddr happyGotoOffsets st
off_i = (off +# nt)
new_state = indexShortOffAddr happyTable off_i
happyFail 0# tk old_st _ stk =
happyError_ tk
We do n't need state discarding for our restricted implementation of
" error " . In fact , it can cause some bogus parses , so I 've disabled it
happyFail 0 # tk old_st ( HappyCons ( ( action ) ) ( sts ) )
( saved_tok ` HappyStk ` _ ` HappyStk ` stk ) =
happyDoAction 0 # tk action sts ( ( saved_tok`HappyStk`stk ) )
"error". In fact, it can cause some bogus parses, so I've disabled it
happyFail 0# tk old_st (HappyCons ((action)) (sts))
(saved_tok `HappyStk` _ `HappyStk` stk) =
happyDoAction 0# tk action sts ((saved_tok`HappyStk`stk))
-}
happyFail i tk (action) sts stk =
happyDoAction 0# tk action sts ( (unsafeCoerce# (I# (i))) `HappyStk` stk)
Internal happy errors :
notHappyAtAll = error "Internal Happy error\n"
happyTcHack :: Int# -> a -> a
happyTcHack x y = y
# INLINE happyTcHack #
happyDoSeq, happyDontSeq :: a -> b -> b
happyDoSeq a b = a `seq` b
happyDontSeq a b = b
Do n't inline any functions from the template . GHC has a nasty habit
# NOINLINE happyDoAction #
# NOINLINE happyTable #
# NOINLINE happyCheck #
# NOINLINE happyActOffsets #
# NOINLINE happyGotoOffsets #
# NOINLINE happyDefActions #
# NOINLINE happyShift #
# NOINLINE happySpecReduce_1 #
# NOINLINE happySpecReduce_3 #
# NOINLINE happyReduce #
# NOINLINE happyMonadReduce #
# NOINLINE happyGoto #
# NOINLINE happyFail #
|
bb197b03de6274246e04105eff206d70d5a328446761bae57ef30217badba822
|
statebox/cql
|
Options.hs
|
SPDX - License - Identifier : AGPL-3.0 - only
This file is part of ` statebox / cql ` , the categorical query language .
Copyright ( C ) 2019 <
This program is free software : you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
This program is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU Affero General Public License for more details .
You should have received a copy of the GNU Affero General Public License
along with this program . If not , see < / > .
SPDX-License-Identifier: AGPL-3.0-only
This file is part of `statebox/cql`, the categorical query language.
Copyright (C) 2019 Stichting Statebox <>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see </>.
-}
{-# LANGUAGE EmptyDataDeriving #-}
module Language.CQL.Options where
import Data.Void
import Language.CQL.Common (Err, intercalate, lower)
import Text.Read
data Options = Options {
iOps :: IntOption -> Integer,
bOps :: BoolOption -> Bool,
sOps :: StringOption -> String
cOps : : Map CharOption Char -- not needed for now
}
instance Show Options where
show y = intercalate "\n" (map (\x -> show x ++ " = " ++ show (iOps y x)) opsI) ++ "\n" ++
intercalate "\n" (map (\x -> show x ++ " = " ++ show (bOps y x)) opsB) ++ "\n" ++
intercalate "\n" (map (\x -> show x ++ " = " ++ (sOps y x)) opsS)
toOptions :: Options -> [(String, String)] -> Err Options
toOptions o [] = return o
toOptions def ((k,v):l) = do
Options s t u <- toOptions def l
case a of
Left _ -> case b of
Left _ -> do { (o, i) <- c ; return $ Options s t (f o i u) }
Right (o, i) -> return $ Options s (f o i t) u
Right (o, i) -> return $ Options (f o i s) t u
where
a = toIntegerOption (k, v)
b = toBoolOption (k, v)
c = toStringOption (k, v)
f j u m x = if j == x then u else m x
toIntegerOption :: (String, String) -> Err (IntOption, Integer)
toIntegerOption (k, v) = case matches of
[] -> Left $ "No option called " ++ k
(x:_) -> do { a <- parseInt v ; return (x, a) }
where
matches = [ k' | k' <- opsI, lower (show k') == k ]
parseInt :: String -> Err Integer
parseInt x = case readMaybe x of
Nothing -> Left $ "Not an int: " ++ x
Just y -> Right y
toStringOption :: (String, String) -> Err (StringOption, String)
toStringOption (k,v) = case matches of
[] -> Left $ "No option called " ++ k
(x:_) -> return (x, v)
where
matches = [ k' | k' <- opsS, lower (show k') == k ]
toBoolOption :: (String, String) -> Err (BoolOption, Bool)
toBoolOption (k,v) = case matches of
[] -> Left $ "No option called " ++ k
(x:_) -> do { a <- parseBool v ; return (x, a) }
where
matches = [ k' | k' <- opsB, lower (show k') == k ]
parseBool z = case z of
"true" -> Right True
"false" -> Right False
x -> Left $ "Not a bool: " ++ x
| Default values for Boolean options .
boolDef :: BoolOption -> Bool
boolDef o = case o of
Program_Allow_Nontermination_Unsafe -> False
Allow_Empty_Sorts_Unsafe -> False
Program_Allow_Nonconfluence_Unsafe -> False
Dont_Validate_Unsafe -> False
Interpret_As_Algebra -> False
Require_Consistency -> True
| Default values for Integer options .
intDef :: IntOption -> Integer
intDef o = case o of
Timeout -> 30
-- | Default values for String options.
stringDef :: StringOption -> String
stringDef o = case o of
Prover -> "auto"
-- | Default options.
defaultOptions :: Options
defaultOptions = Options intDef boolDef stringDef
-- | Returns a list of all enums in a given class.
generateEnumValues :: (Enum a) => [a]
generateEnumValues = enumFrom (toEnum 0)
| All the Boolean options .
opsB :: [BoolOption]
opsB = generateEnumValues
| All the Integer options .
opsI :: [IntOption]
opsI = generateEnumValues
-- | All the String options.
opsS :: [StringOption]
opsS = generateEnumValues
-- comment out options we can't handle yet.
data BoolOption =
Require_Consistency
| Dont_Validate_Unsafe
-- | Always_Reload
| Program_Allow_Nonconfluence_Unsafe
| Interpret_As_Algebra
| Program_Allow_Nontermination_Unsafe
| Allow_Empty_Sorts_Unsafe
-- | Schema_Only
-- | Query_Remove_Redundancy
-- | Import_As_Theory
-- | Import_Joined
-- | Prepend_Entity_On_Ids
-- | Csv_Generate_Ids
-- | Completion_Sort
-- | Completion_Compose
-- | Completion_Filter_Subsumed
-- | Completion_Syntactic_Ac
-- | Eval_Reorder_Joins
-- | Eval_Join_Selectivity
-- | Eval_Use_Indices
-- | Eval_Approx_Sql_Unsafe
-- | Eval_Sql_PersistentIndices
-- | Coproduct_Allow_Collisions
deriving (Eq, Ord, Show, Enum)
data StringOption =
-- Csv_File_Extension
-- | Id_Column_
-- | Jdbc_Default_Class
-- | Jdbc_Default_String
-- | Completion_Precedence
Prover
deriving (Eq, Ord, Show, Enum)
-- | Accessor due to namespace colision.
prover_name :: StringOption
prover_name = Prover -- for name collision
data IntOption =
-- Num_Threads
-- | Random_Seed
Timeout
-- | Varchar_Length
-- | Start_Ids_At
-- | Gui_Max_Graph_Size
-- | Gui_Max_String_Size
-- | Gui_Rows_To_Display
| Eval_Max_Plan_Depth
deriving (Eq, Ord, Show, Enum)
type CharOption = Void
data CharOption =
Csv_Escape_Char
Csv_Quote_Char
deriving ( Eq , Ord , Show , )
| null |
https://raw.githubusercontent.com/statebox/cql/b155e737ef4977ec753e44790f236686ff6a4558/src/Language/CQL/Options.hs
|
haskell
|
# LANGUAGE EmptyDataDeriving #
not needed for now
| Default values for String options.
| Default options.
| Returns a list of all enums in a given class.
| All the String options.
comment out options we can't handle yet.
| Always_Reload
| Schema_Only
| Query_Remove_Redundancy
| Import_As_Theory
| Import_Joined
| Prepend_Entity_On_Ids
| Csv_Generate_Ids
| Completion_Sort
| Completion_Compose
| Completion_Filter_Subsumed
| Completion_Syntactic_Ac
| Eval_Reorder_Joins
| Eval_Join_Selectivity
| Eval_Use_Indices
| Eval_Approx_Sql_Unsafe
| Eval_Sql_PersistentIndices
| Coproduct_Allow_Collisions
Csv_File_Extension
| Id_Column_
| Jdbc_Default_Class
| Jdbc_Default_String
| Completion_Precedence
| Accessor due to namespace colision.
for name collision
Num_Threads
| Random_Seed
| Varchar_Length
| Start_Ids_At
| Gui_Max_Graph_Size
| Gui_Max_String_Size
| Gui_Rows_To_Display
|
SPDX - License - Identifier : AGPL-3.0 - only
This file is part of ` statebox / cql ` , the categorical query language .
Copyright ( C ) 2019 <
This program is free software : you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
This program is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU Affero General Public License for more details .
You should have received a copy of the GNU Affero General Public License
along with this program . If not , see < / > .
SPDX-License-Identifier: AGPL-3.0-only
This file is part of `statebox/cql`, the categorical query language.
Copyright (C) 2019 Stichting Statebox <>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see </>.
-}
module Language.CQL.Options where
import Data.Void
import Language.CQL.Common (Err, intercalate, lower)
import Text.Read
data Options = Options {
iOps :: IntOption -> Integer,
bOps :: BoolOption -> Bool,
sOps :: StringOption -> String
}
instance Show Options where
show y = intercalate "\n" (map (\x -> show x ++ " = " ++ show (iOps y x)) opsI) ++ "\n" ++
intercalate "\n" (map (\x -> show x ++ " = " ++ show (bOps y x)) opsB) ++ "\n" ++
intercalate "\n" (map (\x -> show x ++ " = " ++ (sOps y x)) opsS)
toOptions :: Options -> [(String, String)] -> Err Options
toOptions o [] = return o
toOptions def ((k,v):l) = do
Options s t u <- toOptions def l
case a of
Left _ -> case b of
Left _ -> do { (o, i) <- c ; return $ Options s t (f o i u) }
Right (o, i) -> return $ Options s (f o i t) u
Right (o, i) -> return $ Options (f o i s) t u
where
a = toIntegerOption (k, v)
b = toBoolOption (k, v)
c = toStringOption (k, v)
f j u m x = if j == x then u else m x
toIntegerOption :: (String, String) -> Err (IntOption, Integer)
toIntegerOption (k, v) = case matches of
[] -> Left $ "No option called " ++ k
(x:_) -> do { a <- parseInt v ; return (x, a) }
where
matches = [ k' | k' <- opsI, lower (show k') == k ]
parseInt :: String -> Err Integer
parseInt x = case readMaybe x of
Nothing -> Left $ "Not an int: " ++ x
Just y -> Right y
toStringOption :: (String, String) -> Err (StringOption, String)
toStringOption (k,v) = case matches of
[] -> Left $ "No option called " ++ k
(x:_) -> return (x, v)
where
matches = [ k' | k' <- opsS, lower (show k') == k ]
toBoolOption :: (String, String) -> Err (BoolOption, Bool)
toBoolOption (k,v) = case matches of
[] -> Left $ "No option called " ++ k
(x:_) -> do { a <- parseBool v ; return (x, a) }
where
matches = [ k' | k' <- opsB, lower (show k') == k ]
parseBool z = case z of
"true" -> Right True
"false" -> Right False
x -> Left $ "Not a bool: " ++ x
| Default values for Boolean options .
boolDef :: BoolOption -> Bool
boolDef o = case o of
Program_Allow_Nontermination_Unsafe -> False
Allow_Empty_Sorts_Unsafe -> False
Program_Allow_Nonconfluence_Unsafe -> False
Dont_Validate_Unsafe -> False
Interpret_As_Algebra -> False
Require_Consistency -> True
| Default values for Integer options .
intDef :: IntOption -> Integer
intDef o = case o of
Timeout -> 30
stringDef :: StringOption -> String
stringDef o = case o of
Prover -> "auto"
defaultOptions :: Options
defaultOptions = Options intDef boolDef stringDef
generateEnumValues :: (Enum a) => [a]
generateEnumValues = enumFrom (toEnum 0)
| All the Boolean options .
opsB :: [BoolOption]
opsB = generateEnumValues
| All the Integer options .
opsI :: [IntOption]
opsI = generateEnumValues
opsS :: [StringOption]
opsS = generateEnumValues
data BoolOption =
Require_Consistency
| Dont_Validate_Unsafe
| Program_Allow_Nonconfluence_Unsafe
| Interpret_As_Algebra
| Program_Allow_Nontermination_Unsafe
| Allow_Empty_Sorts_Unsafe
deriving (Eq, Ord, Show, Enum)
data StringOption =
Prover
deriving (Eq, Ord, Show, Enum)
prover_name :: StringOption
data IntOption =
Timeout
| Eval_Max_Plan_Depth
deriving (Eq, Ord, Show, Enum)
type CharOption = Void
data CharOption =
Csv_Escape_Char
Csv_Quote_Char
deriving ( Eq , Ord , Show , )
|
ec7c788e7752a06703d96573ffae8c9950695a5438fbc215671031b480632adf
|
billosys/sockets
|
project.clj
|
(defproject systems.billo/sockets "0.1.1"
:description "A Clojure wrapper for the family of Java Socket classes"
:url ""
:license {
:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [
[org.clojure/clojure "1.9.0"]
[org.clojure/core.async "0.4.474"]
[potemkin "0.4.5"]
[systems.billo/inet-address "0.1.1"]]
:profiles {
:docs {
:dependencies [
[systems.billo/superhero-codox-theme "0.3.0"]]
:plugins [
[lein-codox "0.10.4"]
[lein-marginalia "0.9.1"]]
:source-paths ^:replace ["src"]
:codox {
:project {:name "sockets"}
:themes [:superhero]
:output-path "docs/current"
:doc-paths ["resources/docs"]
:metadata {:doc/format :markdown}}}
:dev {
:dependencies [
[clojusc/trifl "0.3.0"]
[org.clojure/tools.namespace "0.2.11"]]
:source-paths [
"dev-resources/src"
"test"
"examples/src"]
:repl-options {
:init-ns sockets.dev
:prompt (fn [ns] (str "\u001B[35m[\u001B[34m"
ns
"\u001B[35m]\u001B[33m λ\u001B[m=> "))
:welcome ~(do
(println (slurp "resources/text/banner.txt"))
(println (slurp "resources/text/loading.txt")))}}
:test {
:plugins [
[jonase/eastwood "0.2.8" :exclusions [org.clojure/clojure]]
[lein-kibit "0.1.6" :exclusions [org.clojure/clojure]]
[lein-ancient "0.6.15"]]}}
:aliases {
"check-deps" ["with-profile" "+test" "ancient" "check" "all"]
"lint" ["with-profile" "+test" "kibit"]
"docs" ["with-profile" "+docs" "do"
["codox"]
["marg" "--dir" "docs/current"
"--file" "marginalia.html"
"--name" "sockets"]]
"build" ["with-profile" "+test" "do"
["check-deps"]
["lint"]
["test"]
["compile"]
["docs"]
["uberjar"]]})
| null |
https://raw.githubusercontent.com/billosys/sockets/21a8949c6a0ed8c52ac590753e95354dc4629777/project.clj
|
clojure
|
(defproject systems.billo/sockets "0.1.1"
:description "A Clojure wrapper for the family of Java Socket classes"
:url ""
:license {
:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [
[org.clojure/clojure "1.9.0"]
[org.clojure/core.async "0.4.474"]
[potemkin "0.4.5"]
[systems.billo/inet-address "0.1.1"]]
:profiles {
:docs {
:dependencies [
[systems.billo/superhero-codox-theme "0.3.0"]]
:plugins [
[lein-codox "0.10.4"]
[lein-marginalia "0.9.1"]]
:source-paths ^:replace ["src"]
:codox {
:project {:name "sockets"}
:themes [:superhero]
:output-path "docs/current"
:doc-paths ["resources/docs"]
:metadata {:doc/format :markdown}}}
:dev {
:dependencies [
[clojusc/trifl "0.3.0"]
[org.clojure/tools.namespace "0.2.11"]]
:source-paths [
"dev-resources/src"
"test"
"examples/src"]
:repl-options {
:init-ns sockets.dev
:prompt (fn [ns] (str "\u001B[35m[\u001B[34m"
ns
"\u001B[35m]\u001B[33m λ\u001B[m=> "))
:welcome ~(do
(println (slurp "resources/text/banner.txt"))
(println (slurp "resources/text/loading.txt")))}}
:test {
:plugins [
[jonase/eastwood "0.2.8" :exclusions [org.clojure/clojure]]
[lein-kibit "0.1.6" :exclusions [org.clojure/clojure]]
[lein-ancient "0.6.15"]]}}
:aliases {
"check-deps" ["with-profile" "+test" "ancient" "check" "all"]
"lint" ["with-profile" "+test" "kibit"]
"docs" ["with-profile" "+docs" "do"
["codox"]
["marg" "--dir" "docs/current"
"--file" "marginalia.html"
"--name" "sockets"]]
"build" ["with-profile" "+test" "do"
["check-deps"]
["lint"]
["test"]
["compile"]
["docs"]
["uberjar"]]})
|
|
6ff7bfd783a3918d8489728b4f140a422149c94838c2696bb6479a258b514f83
|
byteally/webapi
|
ParamSpec.hs
|
# OPTIONS_GHC -fno - warn - unused - binds #
# LANGUAGE MultiParamTypeClasses , TypeFamilies , OverloadedStrings , DataKinds , TypeOperators , TypeSynonymInstances , FlexibleInstances , DeriveGeneric #
module WebApi.ParamSpec (spec) where
import WebApi
import GHC.Generics
import Data.Text
import Test.Hspec
data User = User { name :: Text
, age :: Int
} deriving (Show, Eq, Generic)
instance FromParam 'FormParam User
TODO : Fix the failing test
-- When Maybe T is configure as param in contract , param overflow in case of Nothing : : Maybe T causes problem . One sol might be forcing a additional constraint like when a Maybe is configured in the contract
-- When Maybe T is configure as param in contract, param overflow in case of Nothing :: Maybe T causes problem. One sol might be forcing a additional constraint like HasKeys when a Maybe is configured in the contract
-}
spec = describe "extra keys should be ignored when using Maybe at top level" $ do
pure () {-
it "should return Nothing" $
fromFormParam [ ("key", "value")
] == (Validation (Right Nothing) :: Validation [ParamErr] (Maybe User))
-}
| null |
https://raw.githubusercontent.com/byteally/webapi/8d712d0ae786475b177beacee32dc40db320af4c/webapi/tests/WebApi/ParamSpec.hs
|
haskell
|
When Maybe T is configure as param in contract , param overflow in case of Nothing : : Maybe T causes problem . One sol might be forcing a additional constraint like when a Maybe is configured in the contract
When Maybe T is configure as param in contract, param overflow in case of Nothing :: Maybe T causes problem. One sol might be forcing a additional constraint like HasKeys when a Maybe is configured in the contract
it "should return Nothing" $
fromFormParam [ ("key", "value")
] == (Validation (Right Nothing) :: Validation [ParamErr] (Maybe User))
|
# OPTIONS_GHC -fno - warn - unused - binds #
# LANGUAGE MultiParamTypeClasses , TypeFamilies , OverloadedStrings , DataKinds , TypeOperators , TypeSynonymInstances , FlexibleInstances , DeriveGeneric #
module WebApi.ParamSpec (spec) where
import WebApi
import GHC.Generics
import Data.Text
import Test.Hspec
data User = User { name :: Text
, age :: Int
} deriving (Show, Eq, Generic)
instance FromParam 'FormParam User
TODO : Fix the failing test
-}
spec = describe "extra keys should be ignored when using Maybe at top level" $ do
|
c11f6443f455607da6ac79c9ebf14068eca72237209725d26a162f5d26613433
|
tmattio/js-bindings
|
vscode_languageserver_protocol_protocol_show_document.ml
|
[@@@js.dummy "!! This code has been generated by gen_js_api !!"]
[@@@ocaml.warning "-7-32-39"]
[@@@ocaml.warning "-7-11-32-33-39"]
open Es5
open Vscode_jsonrpc
open Vscode_languageserver_types
open Vscode_languageserver_protocol_messages
module ShowDocumentClientCapabilities =
struct
type t = Ojs.t
let rec t_of_js : Ojs.t -> t = fun (x2 : Ojs.t) -> x2
and t_to_js : t -> Ojs.t = fun (x1 : Ojs.t) -> x1
let (get_support : t -> bool) =
fun (x3 : t) ->
Ojs.bool_of_js (Ojs.get_prop_ascii (t_to_js x3) "support")
let (set_support : t -> bool -> unit) =
fun (x4 : t) ->
fun (x5 : bool) ->
Ojs.set_prop_ascii (t_to_js x4) "support" (Ojs.bool_to_js x5)
end
module ShowDocumentParams =
struct
type t = Ojs.t
let rec t_of_js : Ojs.t -> t = fun (x7 : Ojs.t) -> x7
and t_to_js : t -> Ojs.t = fun (x6 : Ojs.t) -> x6
let (get_uri : t -> URI.t) =
fun (x8 : t) -> URI.t_of_js (Ojs.get_prop_ascii (t_to_js x8) "uri")
let (set_uri : t -> URI.t -> unit) =
fun (x9 : t) ->
fun (x10 : URI.t) ->
Ojs.set_prop_ascii (t_to_js x9) "uri" (URI.t_to_js x10)
let (get_external : t -> bool) =
fun (x11 : t) ->
Ojs.bool_of_js (Ojs.get_prop_ascii (t_to_js x11) "external")
let (set_external : t -> bool -> unit) =
fun (x12 : t) ->
fun (x13 : bool) ->
Ojs.set_prop_ascii (t_to_js x12) "external" (Ojs.bool_to_js x13)
let (get_take_focus : t -> bool) =
fun (x14 : t) ->
Ojs.bool_of_js (Ojs.get_prop_ascii (t_to_js x14) "takeFocus")
let (set_take_focus : t -> bool -> unit) =
fun (x15 : t) ->
fun (x16 : bool) ->
Ojs.set_prop_ascii (t_to_js x15) "takeFocus" (Ojs.bool_to_js x16)
let (get_selection : t -> Range.t) =
fun (x17 : t) ->
Range.t_of_js (Ojs.get_prop_ascii (t_to_js x17) "selection")
let (set_selection : t -> Range.t -> unit) =
fun (x18 : t) ->
fun (x19 : Range.t) ->
Ojs.set_prop_ascii (t_to_js x18) "selection" (Range.t_to_js x19)
end
module ShowDocumentResult =
struct
type t = Ojs.t
let rec t_of_js : Ojs.t -> t = fun (x21 : Ojs.t) -> x21
and t_to_js : t -> Ojs.t = fun (x20 : Ojs.t) -> x20
let (get_success : t -> bool) =
fun (x22 : t) ->
Ojs.bool_of_js (Ojs.get_prop_ascii (t_to_js x22) "success")
let (set_success : t -> bool -> unit) =
fun (x23 : t) ->
fun (x24 : bool) ->
Ojs.set_prop_ascii (t_to_js x23) "success" (Ojs.bool_to_js x24)
end
module ShowDocumentRequest =
struct
let (method_ : [ `L_s0_window_showDocument ]) =
let x25 =
Ojs.get_prop_ascii
(Ojs.get_prop_ascii Ojs.global "ShowDocumentRequest") "method" in
match Ojs.string_of_js x25 with
| "window/showDocument" -> `L_s0_window_showDocument
| _ -> assert false
let (type_ :
(ShowDocumentParams.t, ShowDocumentResult.t, unit, unit, unit)
ProtocolRequestType.t)
=
ProtocolRequestType.t_of_js ShowDocumentParams.t_of_js
ShowDocumentResult.t_of_js Ojs.unit_of_js Ojs.unit_of_js
Ojs.unit_of_js
(Ojs.get_prop_ascii
(Ojs.get_prop_ascii Ojs.global "ShowDocumentRequest") "type")
module HandlerSignature =
struct
type t =
(ShowDocumentParams.t, ShowDocumentResult.t, unit) RequestHandler.t
let rec t_of_js : Ojs.t -> t =
fun (x35 : Ojs.t) ->
RequestHandler.t_of_js ShowDocumentParams.t_of_js
ShowDocumentResult.t_of_js Ojs.unit_of_js x35
and t_to_js : t -> Ojs.t =
fun
(x31 :
(ShowDocumentParams.t, ShowDocumentResult.t, unit)
RequestHandler.t)
->
RequestHandler.t_to_js ShowDocumentParams.t_to_js
ShowDocumentResult.t_to_js Ojs.unit_to_js x31
end
module MiddlewareSignature =
struct
type t = Ojs.t
let rec t_of_js : Ojs.t -> t = fun (x40 : Ojs.t) -> x40
and t_to_js : t -> Ojs.t = fun (x39 : Ojs.t) -> x39
let (apply :
t ->
params:ShowDocumentParams.t ->
next:ShowDocumentRequest_HandlerSignature.t ->
(ShowDocumentResult.t, unit) HandlerResult.t)
=
fun (x43 : t) ->
fun ~params:(x41 : ShowDocumentParams.t) ->
fun ~next:(x42 : ShowDocumentRequest_HandlerSignature.t) ->
HandlerResult.t_of_js ShowDocumentResult.t_of_js
Ojs.unit_of_js
(Ojs.apply (t_to_js x43)
[|(ShowDocumentParams.t_to_js x41);(ShowDocumentRequest_HandlerSignature.t_to_js
x42)|])
end
end
| null |
https://raw.githubusercontent.com/tmattio/js-bindings/ca3bd6a12db519c8de7f41b303f14cf70cfd4c5f/lib/vscode-languageserver-protocol/vscode_languageserver_protocol_protocol_show_document.ml
|
ocaml
|
[@@@js.dummy "!! This code has been generated by gen_js_api !!"]
[@@@ocaml.warning "-7-32-39"]
[@@@ocaml.warning "-7-11-32-33-39"]
open Es5
open Vscode_jsonrpc
open Vscode_languageserver_types
open Vscode_languageserver_protocol_messages
module ShowDocumentClientCapabilities =
struct
type t = Ojs.t
let rec t_of_js : Ojs.t -> t = fun (x2 : Ojs.t) -> x2
and t_to_js : t -> Ojs.t = fun (x1 : Ojs.t) -> x1
let (get_support : t -> bool) =
fun (x3 : t) ->
Ojs.bool_of_js (Ojs.get_prop_ascii (t_to_js x3) "support")
let (set_support : t -> bool -> unit) =
fun (x4 : t) ->
fun (x5 : bool) ->
Ojs.set_prop_ascii (t_to_js x4) "support" (Ojs.bool_to_js x5)
end
module ShowDocumentParams =
struct
type t = Ojs.t
let rec t_of_js : Ojs.t -> t = fun (x7 : Ojs.t) -> x7
and t_to_js : t -> Ojs.t = fun (x6 : Ojs.t) -> x6
let (get_uri : t -> URI.t) =
fun (x8 : t) -> URI.t_of_js (Ojs.get_prop_ascii (t_to_js x8) "uri")
let (set_uri : t -> URI.t -> unit) =
fun (x9 : t) ->
fun (x10 : URI.t) ->
Ojs.set_prop_ascii (t_to_js x9) "uri" (URI.t_to_js x10)
let (get_external : t -> bool) =
fun (x11 : t) ->
Ojs.bool_of_js (Ojs.get_prop_ascii (t_to_js x11) "external")
let (set_external : t -> bool -> unit) =
fun (x12 : t) ->
fun (x13 : bool) ->
Ojs.set_prop_ascii (t_to_js x12) "external" (Ojs.bool_to_js x13)
let (get_take_focus : t -> bool) =
fun (x14 : t) ->
Ojs.bool_of_js (Ojs.get_prop_ascii (t_to_js x14) "takeFocus")
let (set_take_focus : t -> bool -> unit) =
fun (x15 : t) ->
fun (x16 : bool) ->
Ojs.set_prop_ascii (t_to_js x15) "takeFocus" (Ojs.bool_to_js x16)
let (get_selection : t -> Range.t) =
fun (x17 : t) ->
Range.t_of_js (Ojs.get_prop_ascii (t_to_js x17) "selection")
let (set_selection : t -> Range.t -> unit) =
fun (x18 : t) ->
fun (x19 : Range.t) ->
Ojs.set_prop_ascii (t_to_js x18) "selection" (Range.t_to_js x19)
end
module ShowDocumentResult =
struct
type t = Ojs.t
let rec t_of_js : Ojs.t -> t = fun (x21 : Ojs.t) -> x21
and t_to_js : t -> Ojs.t = fun (x20 : Ojs.t) -> x20
let (get_success : t -> bool) =
fun (x22 : t) ->
Ojs.bool_of_js (Ojs.get_prop_ascii (t_to_js x22) "success")
let (set_success : t -> bool -> unit) =
fun (x23 : t) ->
fun (x24 : bool) ->
Ojs.set_prop_ascii (t_to_js x23) "success" (Ojs.bool_to_js x24)
end
module ShowDocumentRequest =
struct
let (method_ : [ `L_s0_window_showDocument ]) =
let x25 =
Ojs.get_prop_ascii
(Ojs.get_prop_ascii Ojs.global "ShowDocumentRequest") "method" in
match Ojs.string_of_js x25 with
| "window/showDocument" -> `L_s0_window_showDocument
| _ -> assert false
let (type_ :
(ShowDocumentParams.t, ShowDocumentResult.t, unit, unit, unit)
ProtocolRequestType.t)
=
ProtocolRequestType.t_of_js ShowDocumentParams.t_of_js
ShowDocumentResult.t_of_js Ojs.unit_of_js Ojs.unit_of_js
Ojs.unit_of_js
(Ojs.get_prop_ascii
(Ojs.get_prop_ascii Ojs.global "ShowDocumentRequest") "type")
module HandlerSignature =
struct
type t =
(ShowDocumentParams.t, ShowDocumentResult.t, unit) RequestHandler.t
let rec t_of_js : Ojs.t -> t =
fun (x35 : Ojs.t) ->
RequestHandler.t_of_js ShowDocumentParams.t_of_js
ShowDocumentResult.t_of_js Ojs.unit_of_js x35
and t_to_js : t -> Ojs.t =
fun
(x31 :
(ShowDocumentParams.t, ShowDocumentResult.t, unit)
RequestHandler.t)
->
RequestHandler.t_to_js ShowDocumentParams.t_to_js
ShowDocumentResult.t_to_js Ojs.unit_to_js x31
end
module MiddlewareSignature =
struct
type t = Ojs.t
let rec t_of_js : Ojs.t -> t = fun (x40 : Ojs.t) -> x40
and t_to_js : t -> Ojs.t = fun (x39 : Ojs.t) -> x39
let (apply :
t ->
params:ShowDocumentParams.t ->
next:ShowDocumentRequest_HandlerSignature.t ->
(ShowDocumentResult.t, unit) HandlerResult.t)
=
fun (x43 : t) ->
fun ~params:(x41 : ShowDocumentParams.t) ->
fun ~next:(x42 : ShowDocumentRequest_HandlerSignature.t) ->
HandlerResult.t_of_js ShowDocumentResult.t_of_js
Ojs.unit_of_js
(Ojs.apply (t_to_js x43)
[|(ShowDocumentParams.t_to_js x41);(ShowDocumentRequest_HandlerSignature.t_to_js
x42)|])
end
end
|
|
c82e8ba91f36ff1c3349e9ebeaafb749b758bd9d1426ec8b654e62459dc293de
|
vlaaad/reveal
|
action.clj
|
(ns vlaaad.reveal.action
(:require [clojure.datafy :as d]
[vlaaad.reveal.stream :as stream]
[clojure.spec.alpha :as s]
[clojure.core.specs.alpha :as specs]
[vlaaad.reveal.event :as event]
[lambdaisland.deep-diff2.diff-impl :as diff])
(:import [clojure.lang IDeref]
[java.awt Desktop]
[java.net URI URL]
[java.io File]
[java.util.concurrent Future]))
(defonce ^:private *registry
(atom {}))
(defn register! [id check]
(swap! *registry assoc id check)
id)
(s/def ::id qualified-keyword?)
(s/fdef defaction
:args (s/cat :id ::id
:bindings (s/every ::specs/binding-form :kind vector? :min-count 1 :max-count 2)
:body (s/+ any?))
:ret ::id)
(defmacro defaction [id bindings & body]
(let [name (symbol (str (name id) "-action-body"))]
`(register! ~id (fn ~name ~@(case (count bindings)
1 `((~bindings ~@body)
([~'value ~'annotation] (~name ~'value)))
2 `(([~'value] (~name ~'value nil))
(~bindings ~@body)))))))
(defn collect [annotated-value]
(let [{:keys [value annotation]} annotated-value
actions (->> @*registry
(keep (fn [[id check]]
(try
(when-let [f (check value annotation)]
(let [label (name id)]
{:id id
:label label
:form (stream/horizontal
(stream/raw-string "(" {:fill :util})
(stream/raw-string label {:fill :symbol})
stream/separator
(stream/stream value annotation)
(stream/raw-string ")" {:fill :util}))
:invoke f}))
(catch Exception _)))))
freqs (->> actions (map :label) frequencies)]
(->> actions
(sort-by (juxt :label :id))
(map #(cond-> % (< 1 (freqs (:label %))) (assoc :label (str (symbol (:id %))))))
(into []))))
(defaction ::datafy [x]
(let [d (d/datafy x)]
(when-not (= d x)
(constantly d))))
(defaction ::nav [x {:vlaaad.reveal.nav/keys [coll key val]
:or {key ::not-found
val ::not-found}}]
(let [datafied-coll (d/datafy coll)]
(when (= datafied-coll coll)
(cond
(not= key ::not-found) #(d/nav datafied-coll key x)
(not= val ::not-found) #(d/nav datafied-coll x val)))))
(defaction ::deref [v]
(when (or (instance? IDeref v)
(instance? Future v))
#(deref v)))
(defaction ::meta [v]
(when-let [m (meta v)]
(constantly m)))
(defn- open-uri-result [^URI uri]
(with-meta #(deref (future (.browse (Desktop/getDesktop) uri)))
{:vlaaad.reveal.ui/ignore-action-result true}))
(defaction ::browse:external [v]
(cond
(instance? URI v)
(open-uri-result v)
(instance? URL v)
(recur (.toURI ^URL v))
(and (instance? File v) (.exists ^File v))
(recur (.normalize (.toURI ^File v)))
(and (string? v) (re-matches #"^https?://.+" v))
(recur (URI. v))))
(defaction ::vec [v]
(when (and v (.isArray (class v)))
#(vec v)))
(defaction ::diff [x]
(cond
(and (vector? x) (= 2 (count x)))
#(apply diff/diff x)
(and (map? x) (contains? x :expected) (contains? x :actual))
#(let [{:keys [expected actual]} x]
(if (and (sequential? expected)
(= '= (first expected))
(sequential? actual)
(= 'not (first actual))
(sequential? (second actual))
(= '= (first (second actual)))
(= 3 (bounded-count 4 (second actual))))
(apply diff/diff (drop 1 (second actual)))
(diff/diff expected actual)))))
(defaction ::why-is-this-boolean-red? [x]
(when (and (boolean? x)
(not (or (identical? Boolean/TRUE x)
(identical? Boolean/FALSE x))))
(open-uri-result (URI. "-booleans"))))
(defn execute [id x ann]
(event/daemon-future
(if-let [action (@*registry id)]
(if-let [invoke (action x ann)]
(invoke)
(throw (ex-info "Action unavailable" {:action id :value x :annotation ann})))
(throw (ex-info "Action does not exist" {:action id})))))
| null |
https://raw.githubusercontent.com/vlaaad/reveal/d472efebaafe0ba94034fbe5d2d0a991aae8248e/src/vlaaad/reveal/action.clj
|
clojure
|
(ns vlaaad.reveal.action
(:require [clojure.datafy :as d]
[vlaaad.reveal.stream :as stream]
[clojure.spec.alpha :as s]
[clojure.core.specs.alpha :as specs]
[vlaaad.reveal.event :as event]
[lambdaisland.deep-diff2.diff-impl :as diff])
(:import [clojure.lang IDeref]
[java.awt Desktop]
[java.net URI URL]
[java.io File]
[java.util.concurrent Future]))
(defonce ^:private *registry
(atom {}))
(defn register! [id check]
(swap! *registry assoc id check)
id)
(s/def ::id qualified-keyword?)
(s/fdef defaction
:args (s/cat :id ::id
:bindings (s/every ::specs/binding-form :kind vector? :min-count 1 :max-count 2)
:body (s/+ any?))
:ret ::id)
(defmacro defaction [id bindings & body]
(let [name (symbol (str (name id) "-action-body"))]
`(register! ~id (fn ~name ~@(case (count bindings)
1 `((~bindings ~@body)
([~'value ~'annotation] (~name ~'value)))
2 `(([~'value] (~name ~'value nil))
(~bindings ~@body)))))))
(defn collect [annotated-value]
(let [{:keys [value annotation]} annotated-value
actions (->> @*registry
(keep (fn [[id check]]
(try
(when-let [f (check value annotation)]
(let [label (name id)]
{:id id
:label label
:form (stream/horizontal
(stream/raw-string "(" {:fill :util})
(stream/raw-string label {:fill :symbol})
stream/separator
(stream/stream value annotation)
(stream/raw-string ")" {:fill :util}))
:invoke f}))
(catch Exception _)))))
freqs (->> actions (map :label) frequencies)]
(->> actions
(sort-by (juxt :label :id))
(map #(cond-> % (< 1 (freqs (:label %))) (assoc :label (str (symbol (:id %))))))
(into []))))
(defaction ::datafy [x]
(let [d (d/datafy x)]
(when-not (= d x)
(constantly d))))
(defaction ::nav [x {:vlaaad.reveal.nav/keys [coll key val]
:or {key ::not-found
val ::not-found}}]
(let [datafied-coll (d/datafy coll)]
(when (= datafied-coll coll)
(cond
(not= key ::not-found) #(d/nav datafied-coll key x)
(not= val ::not-found) #(d/nav datafied-coll x val)))))
(defaction ::deref [v]
(when (or (instance? IDeref v)
(instance? Future v))
#(deref v)))
(defaction ::meta [v]
(when-let [m (meta v)]
(constantly m)))
(defn- open-uri-result [^URI uri]
(with-meta #(deref (future (.browse (Desktop/getDesktop) uri)))
{:vlaaad.reveal.ui/ignore-action-result true}))
(defaction ::browse:external [v]
(cond
(instance? URI v)
(open-uri-result v)
(instance? URL v)
(recur (.toURI ^URL v))
(and (instance? File v) (.exists ^File v))
(recur (.normalize (.toURI ^File v)))
(and (string? v) (re-matches #"^https?://.+" v))
(recur (URI. v))))
(defaction ::vec [v]
(when (and v (.isArray (class v)))
#(vec v)))
(defaction ::diff [x]
(cond
(and (vector? x) (= 2 (count x)))
#(apply diff/diff x)
(and (map? x) (contains? x :expected) (contains? x :actual))
#(let [{:keys [expected actual]} x]
(if (and (sequential? expected)
(= '= (first expected))
(sequential? actual)
(= 'not (first actual))
(sequential? (second actual))
(= '= (first (second actual)))
(= 3 (bounded-count 4 (second actual))))
(apply diff/diff (drop 1 (second actual)))
(diff/diff expected actual)))))
(defaction ::why-is-this-boolean-red? [x]
(when (and (boolean? x)
(not (or (identical? Boolean/TRUE x)
(identical? Boolean/FALSE x))))
(open-uri-result (URI. "-booleans"))))
(defn execute [id x ann]
(event/daemon-future
(if-let [action (@*registry id)]
(if-let [invoke (action x ann)]
(invoke)
(throw (ex-info "Action unavailable" {:action id :value x :annotation ann})))
(throw (ex-info "Action does not exist" {:action id})))))
|
|
5959fd58683a3763cffb6c38b3b3f52778b8680f5c23f11fda417386c8569dbc
|
haskell/lsp
|
LocationSpec.hs
|
{-# LANGUAGE OverloadedStrings #-}
module LocationSpec where
import Language.LSP.Types
import Test.Hspec
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "isSubrangeOf" $ do
it "is true if the first range is totally inside the second range" $
isSubrangeOf (mkRange 1 2 1 5) (mkRange 1 1 1 6) `shouldBe` True
it "is true if two ranges equal" $
isSubrangeOf (mkRange 1 2 1 5) (mkRange 1 2 1 5) `shouldBe` True
it "is false if the first range is outside of the second" $
isSubrangeOf (mkRange 1 1 1 5) (mkRange 1 2 1 5) `shouldBe` False
describe "positionInRange" $ do
it "is false if position is after the end of a single line range" $
positionInRange (Position 1 10) (Range (Position 1 1) (Position 1 3)) `shouldBe` False
it "is false if position is before the begining of a single line range" $
positionInRange (Position 1 0) (Range (Position 1 1) (Position 1 6)) `shouldBe` False
it "is true if position is in a single line range" $
positionInRange (Position 1 5) (Range (Position 1 1) (Position 1 6)) `shouldBe` True
it "is false if position is right at the end of the range" $
positionInRange (Position 1 5) (Range (Position 1 1) (Position 1 5)) `shouldBe` False
it "is true if position is in the middle of a multiline range" $
positionInRange (Position 3 5) (Range (Position 1 1) (Position 5 6)) `shouldBe` True
it "is false if position is before the beginning of a multiline range" $
positionInRange (Position 3 5) (Range (Position 3 6) (Position 4 10)) `shouldBe` False
it "is false if position is right at the end of a multiline range" $
positionInRange (Position 4 10) (Range (Position 3 6) (Position 4 10)) `shouldBe` False
| null |
https://raw.githubusercontent.com/haskell/lsp/a41954e356117d8f5cb64b20e1a9d32e8fc6a884/lsp-types/test/LocationSpec.hs
|
haskell
|
# LANGUAGE OverloadedStrings #
|
module LocationSpec where
import Language.LSP.Types
import Test.Hspec
main :: IO ()
main = hspec spec
spec :: Spec
spec = do
describe "isSubrangeOf" $ do
it "is true if the first range is totally inside the second range" $
isSubrangeOf (mkRange 1 2 1 5) (mkRange 1 1 1 6) `shouldBe` True
it "is true if two ranges equal" $
isSubrangeOf (mkRange 1 2 1 5) (mkRange 1 2 1 5) `shouldBe` True
it "is false if the first range is outside of the second" $
isSubrangeOf (mkRange 1 1 1 5) (mkRange 1 2 1 5) `shouldBe` False
describe "positionInRange" $ do
it "is false if position is after the end of a single line range" $
positionInRange (Position 1 10) (Range (Position 1 1) (Position 1 3)) `shouldBe` False
it "is false if position is before the begining of a single line range" $
positionInRange (Position 1 0) (Range (Position 1 1) (Position 1 6)) `shouldBe` False
it "is true if position is in a single line range" $
positionInRange (Position 1 5) (Range (Position 1 1) (Position 1 6)) `shouldBe` True
it "is false if position is right at the end of the range" $
positionInRange (Position 1 5) (Range (Position 1 1) (Position 1 5)) `shouldBe` False
it "is true if position is in the middle of a multiline range" $
positionInRange (Position 3 5) (Range (Position 1 1) (Position 5 6)) `shouldBe` True
it "is false if position is before the beginning of a multiline range" $
positionInRange (Position 3 5) (Range (Position 3 6) (Position 4 10)) `shouldBe` False
it "is false if position is right at the end of a multiline range" $
positionInRange (Position 4 10) (Range (Position 3 6) (Position 4 10)) `shouldBe` False
|
89f79f1c87417a03d04f5246ee355ee7344ed0b59dab1c18790d9688b38fe906
|
shayne-fletcher/zen
|
access.ml
|
let x = [`On; `Off];;
(*
val x : [> `Off | `On ] list = [`On; `Off]
- x is a list of values of a type that at least contain the
constructors `Off and `On
*)
let n = `Number 1;;
(*
val n : [> `Number of int ] = `Number 1
- n is a type that at least has the constructor `Number of int
*)
let f = function | `On -> 1 | `Off -> 0 | `Number n -> n;;
(*
val f : [< `Number of int | `Off | `On ] -> int = <fun>
- The argument to f has a type that at most has the constructors `Off,
`On and `Number
*)
let f x = (x :> [`A | `B]);;
f : [ < ` A | ` B ] - > [ ` A | ` B ] = < fun >
- We see ` x ` needs to be coercible to type type [ ` A | ` B ]
- So , we read [ < ` A | ` B ] as a type that at most contains the tags
` A and ` B
- [ ` A ] , [ ` B ] and [ ` A | ` B ] , are the sub - types of [ ` A | ` B ]
- [ < ` A | ` B ] is the set of sub - types of [ ` A | ` B ]
- We see `x` needs to be coercible to type type [`A | `B]
- So, we read [< `A | `B ] as a type that at most contains the tags
`A and `B
- [ `A ], [ `B ] and [ `A | `B ], are the sub-types of [ `A | `B ]
- [< `A | `B] is the set of sub-types of [`A | `B]
*)
let f x = (x :> [`A | `B] * [`C | `D]);;
val f : [ < ` A | ` B ] * [ < ` C | ` D ] - > [ ` A | ` B ] * [ ` C | ` D ] = < fun >
- We see ` x ` needs to be corecible to [ ` A | ` B ] * [ ` C | D ` ]
- This coercion is valid if
- x is a pair where
- The first component is a sub - type of [ ` A | ` B ]
- The second component is a sub - type of [ ` C | ` D ]
- [ < ` A | ` B ] * [ < ` C | ` D ] is the set of sub - types of [ ` A | ` B ] *
[ ` C | ` D ]
val f : [< `A | `B ] * [< `C | `D ] -> [ `A | `B ] * [ `C | `D ] = <fun>
- We see `x` needs to be corecible to [ `A | `B ] * [ `C | D` ]
- This coercion is valid if
- x is a pair where
- The first component is a sub-type of [`A | `B]
- The second component is a sub-type of [`C | `D]
- [< `A | `B] * [< `C | `D] is the set of sub-types of [ `A | `B ] *
[ `C | `D ]
*)
let f x = (x :> [`A] -> [`C | `D]);;
f : ( [ > ` A ] - > [ < ` C | ` D ] ) - > [ ` A ] - > [ ` C | ` D ] = < fun >
- We see ` x ` needs to be coercible to [ ` A ] - > [ ` C | ` D ]
- This coercion is valid if
- x is an arrow where
- The argument is of a type that at least contains the tag ` A ,
that is [ ` A ] and [ ` A | ... ]
- [ > ` A ] is the set of of super - types of [ ` A ]
- The return value is a sub - type of [ ` C | ` D ]
- [ > ` A ] - > [ < ` C | ` D ] is the set of sub - types of [ ` A ] - > [
` C | ` D ]
( * --
- We see `x` needs to be coercible to [`A] -> [`C | `D]
- This coercion is valid if
- x is an arrow where
- The argument is of a type that at least contains the tag `A,
that is [ `A ] and [ `A | ... ]
- [> `A] is the set of of super-types of [`A]
- The return value is a sub-type of [`C | `D]
- [> `A ] -> [< `C | `D ] is the set of sub-types of [ `A ] -> [
`C | `D ]
(* -- *)
(*-and-a/*)
module type S = sig
type (+'a, +'b) s
type (-'a, +'b) t
end;;
module M : S = struct
type ('a, 'b) s = 'a * 'b
type ('a, 'b) t = 'a -> 'b
end;;
let f x = (x : ([`A], [`B]) M.s :> ([`A | `C], [`B | `D]) M.s);;
let f x = (x : ([`A | `B], [`C]) M.t :> ([`A], [`C | `D]) M.t);;
(* -- *)
(**)
(*The presence of the constraint disables variance inference*)
type +'a t = {x : 'a} constraint 'a = [< `A | `B];;
let f x = (x : [`A] t :> [`A | `B] t);;
type 'a t = {x : int};;
let f x = (x : [`A] t :> [`B] t);;
*)
module type S = sig
type 'a t constraint 'a = [< `A | `B ]
val init : [`A] t
val f : [`A] t -> [`B] t
end;;
module T : S = struct
type 'a t = {x : int} constraint 'a = [< `A | `B]
let init = {x = 0}
let f x = x
end;;
module M : S = struct
type 'a t = unit constraint 'a = [< `A | `B ]
let init = ()
let f x = x
end;;
| null |
https://raw.githubusercontent.com/shayne-fletcher/zen/10a1d0b9bf261bb133918dd62fb1593c3d4d21cb/ocaml/access/access.ml
|
ocaml
|
val x : [> `Off | `On ] list = [`On; `Off]
- x is a list of values of a type that at least contain the
constructors `Off and `On
val n : [> `Number of int ] = `Number 1
- n is a type that at least has the constructor `Number of int
val f : [< `Number of int | `Off | `On ] -> int = <fun>
- The argument to f has a type that at most has the constructors `Off,
`On and `Number
--
-and-a/
--
The presence of the constraint disables variance inference
|
let x = [`On; `Off];;
let n = `Number 1;;
let f = function | `On -> 1 | `Off -> 0 | `Number n -> n;;
let f x = (x :> [`A | `B]);;
f : [ < ` A | ` B ] - > [ ` A | ` B ] = < fun >
- We see ` x ` needs to be coercible to type type [ ` A | ` B ]
- So , we read [ < ` A | ` B ] as a type that at most contains the tags
` A and ` B
- [ ` A ] , [ ` B ] and [ ` A | ` B ] , are the sub - types of [ ` A | ` B ]
- [ < ` A | ` B ] is the set of sub - types of [ ` A | ` B ]
- We see `x` needs to be coercible to type type [`A | `B]
- So, we read [< `A | `B ] as a type that at most contains the tags
`A and `B
- [ `A ], [ `B ] and [ `A | `B ], are the sub-types of [ `A | `B ]
- [< `A | `B] is the set of sub-types of [`A | `B]
*)
let f x = (x :> [`A | `B] * [`C | `D]);;
val f : [ < ` A | ` B ] * [ < ` C | ` D ] - > [ ` A | ` B ] * [ ` C | ` D ] = < fun >
- We see ` x ` needs to be corecible to [ ` A | ` B ] * [ ` C | D ` ]
- This coercion is valid if
- x is a pair where
- The first component is a sub - type of [ ` A | ` B ]
- The second component is a sub - type of [ ` C | ` D ]
- [ < ` A | ` B ] * [ < ` C | ` D ] is the set of sub - types of [ ` A | ` B ] *
[ ` C | ` D ]
val f : [< `A | `B ] * [< `C | `D ] -> [ `A | `B ] * [ `C | `D ] = <fun>
- We see `x` needs to be corecible to [ `A | `B ] * [ `C | D` ]
- This coercion is valid if
- x is a pair where
- The first component is a sub-type of [`A | `B]
- The second component is a sub-type of [`C | `D]
- [< `A | `B] * [< `C | `D] is the set of sub-types of [ `A | `B ] *
[ `C | `D ]
*)
let f x = (x :> [`A] -> [`C | `D]);;
f : ( [ > ` A ] - > [ < ` C | ` D ] ) - > [ ` A ] - > [ ` C | ` D ] = < fun >
- We see ` x ` needs to be coercible to [ ` A ] - > [ ` C | ` D ]
- This coercion is valid if
- x is an arrow where
- The argument is of a type that at least contains the tag ` A ,
that is [ ` A ] and [ ` A | ... ]
- [ > ` A ] is the set of of super - types of [ ` A ]
- The return value is a sub - type of [ ` C | ` D ]
- [ > ` A ] - > [ < ` C | ` D ] is the set of sub - types of [ ` A ] - > [
` C | ` D ]
( * --
- We see `x` needs to be coercible to [`A] -> [`C | `D]
- This coercion is valid if
- x is an arrow where
- The argument is of a type that at least contains the tag `A,
that is [ `A ] and [ `A | ... ]
- [> `A] is the set of of super-types of [`A]
- The return value is a sub-type of [`C | `D]
- [> `A ] -> [< `C | `D ] is the set of sub-types of [ `A ] -> [
`C | `D ]
module type S = sig
type (+'a, +'b) s
type (-'a, +'b) t
end;;
module M : S = struct
type ('a, 'b) s = 'a * 'b
type ('a, 'b) t = 'a -> 'b
end;;
let f x = (x : ([`A], [`B]) M.s :> ([`A | `C], [`B | `D]) M.s);;
let f x = (x : ([`A | `B], [`C]) M.t :> ([`A], [`C | `D]) M.t);;
type +'a t = {x : 'a} constraint 'a = [< `A | `B];;
let f x = (x : [`A] t :> [`A | `B] t);;
type 'a t = {x : int};;
let f x = (x : [`A] t :> [`B] t);;
*)
module type S = sig
type 'a t constraint 'a = [< `A | `B ]
val init : [`A] t
val f : [`A] t -> [`B] t
end;;
module T : S = struct
type 'a t = {x : int} constraint 'a = [< `A | `B]
let init = {x = 0}
let f x = x
end;;
module M : S = struct
type 'a t = unit constraint 'a = [< `A | `B ]
let init = ()
let f x = x
end;;
|
e59a6dc9a4ca9c453e01d94857596a080e6cc495793ada8b55b573eb6bd03a4c
|
dtgoitia/civil-autolisp
|
GetTrustedPaths.lsp
|
(defun c:zz ( / paths )
(DT:GetTrustedPaths)
( DT : StringToList " a " " . " )
( DT : StringToList " ... a.a ... a ... " " . " )
( DT : StringToList " .hi . I am.splitted.and .. very properly.splitted ..... a ... a. " " . " )
)
| null |
https://raw.githubusercontent.com/dtgoitia/civil-autolisp/72d68139d372c84014d160f8e4918f062356349f/Dump%20folder/GetTrustedPaths.lsp
|
lisp
|
(defun c:zz ( / paths )
(DT:GetTrustedPaths)
( DT : StringToList " a " " . " )
( DT : StringToList " ... a.a ... a ... " " . " )
( DT : StringToList " .hi . I am.splitted.and .. very properly.splitted ..... a ... a. " " . " )
)
|
|
01aa0f4715d772f9f42070602cc7e5fbfac9d47a33c64cffb9195d7afb6d23c4
|
backtracking/bibtex2html
|
version.mli
|
(**************************************************************************)
(* bibtex2html - A BibTeX to HTML translator *)
Copyright ( C ) 1997 - 2014 and
(* *)
(* This software is free software; you can redistribute it and/or *)
modify it under the terms of the GNU General Public
License version 2 , as published by the Free Software Foundation .
(* *)
(* This software is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. *)
(* *)
See the GNU General Public License version 2 for more details
(* (enclosed in the file GPL). *)
(**************************************************************************)
(*s Version and compilation date of bibtex2html and bib2bib.
The implementation is automatically generated at compilation. *)
val version : string
val date : string
| null |
https://raw.githubusercontent.com/backtracking/bibtex2html/7c9547da79a13c3accffc9947c846df96a6edd68/version.mli
|
ocaml
|
************************************************************************
bibtex2html - A BibTeX to HTML translator
This software is free software; you can redistribute it and/or
This software is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
(enclosed in the file GPL).
************************************************************************
s Version and compilation date of bibtex2html and bib2bib.
The implementation is automatically generated at compilation.
|
Copyright ( C ) 1997 - 2014 and
modify it under the terms of the GNU General Public
License version 2 , as published by the Free Software Foundation .
See the GNU General Public License version 2 for more details
val version : string
val date : string
|
9d34855ea4d21624fea6eb018090c34301ab193cf6d8675c73297cab6cbea301
|
pfdietz/ansi-test
|
listp.lsp
|
;-*- Mode: Lisp -*-
Author :
Created : Sat Apr 19 22:03:37 2003
Contains : Tests of LISTP
(deftest listp-nil
(notnot-mv (listp nil))
t)
(deftest listp-symbol
(listp 'a)
nil)
(deftest listp-singleton-list
(notnot-mv (listp '(a)))
t)
(deftest listp-circular-list
(let ((x (cons nil nil)))
(setf (cdr x) x)
(notnot-mv (listp x)))
t)
(deftest listp-longer-list
(notnot-mv (listp '(a b c d e f g h)))
t)
Check that ( ) = = ( typep x ' list )
(deftest listp-universe
(check-type-predicate 'listp 'list)
nil)
(deftest listp.order.1
(let ((i 0))
(values (listp (incf i)) i))
nil 1)
(deftest listp.error.1
(signals-error (listp) program-error)
t)
(deftest listp.error.2
(signals-error (listp nil nil) program-error)
t)
| null |
https://raw.githubusercontent.com/pfdietz/ansi-test/3f4b9d31c3408114f0467eaeca4fd13b28e2ce31/cons/listp.lsp
|
lisp
|
-*- Mode: Lisp -*-
|
Author :
Created : Sat Apr 19 22:03:37 2003
Contains : Tests of LISTP
(deftest listp-nil
(notnot-mv (listp nil))
t)
(deftest listp-symbol
(listp 'a)
nil)
(deftest listp-singleton-list
(notnot-mv (listp '(a)))
t)
(deftest listp-circular-list
(let ((x (cons nil nil)))
(setf (cdr x) x)
(notnot-mv (listp x)))
t)
(deftest listp-longer-list
(notnot-mv (listp '(a b c d e f g h)))
t)
Check that ( ) = = ( typep x ' list )
(deftest listp-universe
(check-type-predicate 'listp 'list)
nil)
(deftest listp.order.1
(let ((i 0))
(values (listp (incf i)) i))
nil 1)
(deftest listp.error.1
(signals-error (listp) program-error)
t)
(deftest listp.error.2
(signals-error (listp nil nil) program-error)
t)
|
bfbde7a3cb4cff4f9c09ab36b64d30786616a1d2efb25f55b07c7c2dc4c25483
|
wiseman/turboshrimp
|
navdata_test.clj
|
(ns com.lemondronor.turboshrimp.navdata-test
(:require [clojure.java.io :as io]
[clojure.test :refer :all]
[com.lemondronor.turboshrimp.navdata :as navdata]
[com.lemonodor.xio :as xio])
(:import (java.nio ByteBuffer ByteOrder)))
matrix 33 is 9 floats
vector 31 is 3 floats
(def b-matrix33 (vec (repeat (* 9 4) 0 )))
(def b-vector31 (vec (repeat (* 3 4) 0 )))
(def b-header [-120 119 102 85])
(def b-state [-48 4 -128 15])
(def b-seqnum [102 3 0 0])
(def b-vision [0 0 0 0])
(def b-demo-option-id [0 0])
(def b-demo-option-size [-108 0])
(def b-demo-control-state [0 0 2 0])
(def b-demo-battery [100 0 0 0])
(def b-demo-pitch [0 96 -122 -60])
(def b-demo-roll [0 -128 53 -59])
(def b-demo-yaw [0 0 87 -61])
(def b-demo-altitude [0 0 0 0])
(def b-demo-velocity-x [0 0 0 0])
(def b-demo-velocity-y [0 0 0 0])
(def b-demo-velocity-z [0 0 0 0])
(def b-demo-num-frames [0 0 0 0])
(def b-demo-detect-camera-rot b-matrix33)
(def b-demo-detect-camera-trans b-vector31)
(def b-demo-detect-tag-index [0 0 0 0])
(def b-demo-detect-camera-type [4 0 0 0])
(def b-demo-drone-camera-rot b-matrix33)
(def b-demo-drone-camera-trans b-vector31)
(def b-demo-option (flatten (conj b-demo-option-id b-demo-option-size
b-demo-control-state b-demo-battery
b-demo-pitch b-demo-roll b-demo-yaw
b-demo-altitude b-demo-velocity-x
b-demo-velocity-y b-demo-velocity-z
b-demo-num-frames
b-demo-detect-camera-rot b-demo-detect-camera-trans
b-demo-detect-tag-index
b-demo-detect-camera-type b-demo-drone-camera-rot
b-demo-drone-camera-trans)))
(def b-vision-detect-option-id [16 0])
(def b-vision-detect-option-size [72 1])
(def b-vision-detect-num-tags-detected [2 0 0 0])
(def b-vision-detect-type [1 0 0 0 2 0 0 0 3 0 0 0 4 0 0 0])
(def b-vision-detect-xc [1 0 0 0 2 0 0 0 3 0 0 0 4 0 0 0])
(def b-vision-detect-yc [1 0 0 0 2 0 0 0 3 0 0 0 4 0 0 0])
(def b-vision-detect-width [1 0 0 0 2 0 0 0 3 0 0 0 4 0 0 0])
(def b-vision-detect-height [1 0 0 0 2 0 0 0 3 0 0 0 4 0 0 0])
(def b-vision-detect-dist [1 0 0 0 2 0 0 0 3 0 0 0 4 0 0 0])
(def b-vision-detect-orient-angle [0 96 -122 -60 0 96 -122 -60 0 96 -122 -60 0 96 -122 -60])
(def b-vision-detect-rotation (flatten (conj b-matrix33 b-matrix33 b-matrix33 b-matrix33)))
(def b-vision-detect-translation (flatten (conj b-vector31 b-vector31 b-vector31 b-vector31)))
(def b-vision-detect-camera-source [1 0 0 0 2 0 0 0 2 0 0 0 2 0 0 0])
(def b-vision-detect-option (flatten (conj b-vision-detect-option-id b-vision-detect-option-size
b-vision-detect-num-tags-detected
b-vision-detect-type b-vision-detect-xc b-vision-detect-yc
b-vision-detect-width b-vision-detect-height b-vision-detect-dist
b-vision-detect-orient-angle b-vision-detect-rotation b-vision-detect-translation
b-vision-detect-camera-source)))
(def b-checksum-option-id [-1 -1])
(def b-checksum-option-size [0x08 0x00])
(def b-checksum-option-checksum [0x08 0x10 0x00 0x00])
(def b-checksum-option
(flatten (conj b-checksum-option-id b-checksum-option-size
b-checksum-option-checksum)))
(def header (map byte [-120 119 102 85]))
(def nav-input
(byte-array (map byte (flatten (conj b-header b-state b-seqnum b-vision
b-demo-option b-vision-detect-option
b-checksum-option)))))
(deftest navdata-unit-tests
(testing "about parse-nav-state"
(testing "parse-nav-state"
(let [state 260048080
result (navdata/parse-nav-state state)
{:keys [flying video vision control altitude-control
user-feedback command-ack camera travelling
usb demo bootstrap motors communication
software battery emergency-landing timer
magneto angles wind ultrasound cutout
pic-version atcodec-thread navdata-thread
video-thread acquisition-thread ctrl-watchdog
adc-watchdog com-watchdog emergency]} result]
(is (= flying :landed))
(is (= video :off))
(is (= vision :off))
(is (= control :euler-angles))
(is (= altitude-control :on))
(is (= user-feedback :off))
(is (= command-ack :received))
(is (= camera :ready))
(is (= travelling :off))
(is (= usb :not-ready))
(is (= demo :on))
(is (= bootstrap :off))
(is (= motors :ok))
(is (= communication :ok))
(is (= software :ok))
(is (= battery :ok))
(is (= emergency-landing :off))
(is (= timer :not-elapsed))
(is (= magneto :ok))
(is (= angles :ok))
(is (= wind :ok))
(is (= ultrasound :ok))
(is (= cutout :ok))
(is (= pic-version :ok))
(is (= atcodec-thread :on))
(is (= navdata-thread :on))
(is (= video-thread :on))
(is (= acquisition-thread :on))
(is (= ctrl-watchdog :ok))
(is (= adc-watchdog :ok))
(is (= com-watchdog :ok))
(is (= emergency :ok)))))
(testing "which-option-type"
(is (= (navdata/which-option-type 0) :demo))
(is (= (navdata/which-option-type 16) :vision-detect))
(is (= (navdata/which-option-type 2342342) nil)))
(testing "about parse-control-state"
(testing "parse-control-state"
(let [bb (doto (gloss.io/to-byte-buffer b-demo-control-state)
(.order ByteOrder/LITTLE_ENDIAN))
control-state (.getInt bb)]
(is (= (navdata/parse-control-state control-state) :landed)))))
(testing "about parse-demo-option"
(testing "parse-demo-option"
(let [bb (doto (gloss.io/to-byte-buffer
;; Skip past the option ID and option size.
(drop 4 b-demo-option))
(.order ByteOrder/LITTLE_ENDIAN))
option (navdata/parse-demo-option bb)]
(is (= (:control-state option) :landed))
(is (= (:battery-percentage option) 100))
(is (= (:theta option) (float -1.075)))
(is (= (:phi option) (float -2.904)))
(is (= (:psi option) (float -0.215)))
(is (= (:altitude option) 0.0))
(is (= (:velocity option) {:x 0.0 :y 0.0 :z 0.0}))
(is (= (:detect-camera-type option) :roundel-under-drone)))))
(testing "about parse-vision-detect-option"
(let [detections
(navdata/parse-vision-detect-option
(doto (gloss.io/to-byte-buffer
;; Skip past the option ID and option size.
(drop 4
b-vision-detect-option))
(.order ByteOrder/LITTLE_ENDIAN)))]
(is (= (count detections) 2))
(testing "first detection"
(let [det (nth detections 0)]
(is (= (:type det) :vertical-deprecated))
(is (= (:xc det) 1))
(is (= (:yc det) 1))
(is (= (:width det) 1))
(is (= (:height det) 1))
(is (= (:dist det) 1))
(is (= (:orientation-angle det) -1075.0))
(is (= (:camera-source det) :vertical))
(is (= (:translation det) {:x 0.0 :y 0.0 :z 0.0}))
(is (= (:rotation det)
{:m11 0.0, :m12 0.0, :m13 0.0,
:m21 0.0, :m22 0.0, :m23 0.0,
:m31 0.0, :m32 0.0, :m33 0.0}))))
(testing "second detection"
(let [det (nth detections 1)]
(is (= (:type det) :horizontal-drone-shell))
(is (= (:xc det) 2))
(is (= (:yc det) 2))
(is (= (:width det) 2))
(is (= (:height det) 2))
(is (= (:dist det) 2))
(is (= (:orientation-angle det) -1075.0))
(is (= (:camera-source det) :vertical-hsync))
(is (= (:translation det) {:x 0.0 :y 0.0 :z 0.0}))
(is (= (:rotation det) {:m11 0.0, :m12 0.0, :m13 0.0,
:m21 0.0, :m22 0.0, :m23 0.0,
:m31 0.0, :m32 0.0, :m33 0.0}))))))
(testing "about parse-navdata"
(testing "parse-navdata"
(testing "hand-crafted input"
(let [navdata (navdata/parse-navdata nav-input)]
(is (= (:header navdata) 0x55667788))
(is (= (:seq-num navdata) 870))
(is (= (:vision-flag navdata) false))
(testing "state"
(let [state (:state navdata)]
(is (= (:battery state) :ok))
(is (= (:flying state) :landed))))
(testing "demo"
(let [demo (:demo navdata)]
(is (= (:control-state demo) :landed))
(is (= (:battery-percentage demo) 100))
(is (= (:theta demo) (float -1.075)))
(is (= (:phi demo) (float -2.904)))
(is (= (:psi demo) (float -0.215)))
(is (= (:altitude demo) 0.0))
(is (= (:velocity demo) {:x 0.0 :y 0.0 :z 0.0})))))))))
(defn test-adc-data-frame-option [navdata]
(testing "adc-data-frame option"
(let [adc (:adc-data-frame navdata)]
(is (= (:version adc) 0))
(is (= (:data-frame adc) (repeat 32 0))))))
(defn test-gps-option [navdata]
(testing "gps option"
(let [gps (:gps navdata)]
(are [x y] (= x y)
(:latitude gps) 34.0905016
(:longitude gps) -118.2766877
(:elevation gps) 122.64
(:hdop gps) 1.0
(:data-available gps) 7
(:zero-validated gps) 1
(:wpt-validated gps) 0
(:lat0 gps) 34.0905016
(:lon0 gps) -118.2766877
(:lat-fuse gps) 34.0904833
(:lon-fuse gps) -118.2766982
(:gps-state gps) 1
(:x-traj gps) 0.0
(:x-ref gps) 0.0
(:y-traj gps) 0.0
(:y-ref gps) 0.0
(:theta-p gps) 0.0
(:phi-p gps) 0.0
(:theta-i gps) 0.0
(:phi-i gps) 0.0
(:theta-d gps) 0.0
(:phi-d gps) 0.0
(:vdop gps) 0.0
(:pdop gps) 0.0
(:speed gps) (float 0.1)
(:last-frame-timestamp gps) 2.409591
(:degree gps) (float 141.01)
(:degree-mag gps) 0.0
(:ehpe gps) (float 8.26)
(:ehve gps) (float 0.42999998)
(:c-n0 gps) 28.0
(:num-satellites gps) 9
(:channels gps) [{:cn0 26, :sat 10}
{:cn0 21, :sat 5}
{:cn0 27, :sat 8}
{:cn0 17, :sat 3}
{:cn0 18, :sat 13}
{:cn0 32, :sat 7}
{:cn0 23, :sat 9}
{:cn0 9, :sat 27}
{:cn0 19, :sat 19}
{:cn0 29, :sat 28}
{:cn0 26, :sat 30}
{:cn0 0, :sat 138}]
(:gps-plugged gps) 1
(:ephemeris-status gps) 73
(:vx-traj gps) 0.0
(:vy-traj gps) 0.0
(:firmware-status gps) 1))))
(defn test-trackers-send-option [navdata]
(testing "trackers-send option"
(let [ts (:trackers-send navdata)]
(is (= (:locked ts) (repeat 30 0)))
(is (= (:point ts) (repeat 30 {:x 0 :y 0}))))))
(defn test-vision-option [navdata]
(testing "vision option"
(let [v (:vision navdata)]
(are [x y] (= x y)
(:state v) 2
(:misc v) 0
(:phi v) {:trim 0.0 :ref-prop 0.0}
(:theta v) {:trim 0.0 :ref-prop 0.0}
(:new-raw-picture v) 0
(:capture v) {:theta (float 0.05190306529402733)
:phi (float 0.009620788507163525)
:psi (float 0.033727407455444336)
:altitude 243
:time 0.362969}
(:body-v v) {:x (float 0.05845191329717636)
:y (float -0.8817280530929565)
:z (float 0.011505687609314919)}
(:delta v) {:phi 0.0
:theta 0.0
:psi 0.0}
(:gold v) {:defined 0
:reset 0
:x 0.0
:y 0.0}))))
(defn test-vision-perf-option [navdata]
(testing "vision-perf option"
(let [v (:vision-perf navdata)]
(are [x y] (= x y)
(:szo v) 0.0
(:corners v) 0.0
(:compute v) 0.0
(:tracking v) 0.0
(:trans v) 0.0
(:update v) 0.0
(:custom v) [0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0
0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0]))))
(defn test-watchdog-option [navdata]
(testing "watchdog option"
(let [w (:watchdog navdata)]
(is (= w 4822)))))
(deftest navdata-specimen-tests
(testing "parse-navdata on specimen"
(let [navdata-bytes (xio/binary-slurp (io/resource "navdata.bin"))]
;;(println "Benchmarking parse-navdata")
;;(criterium/bench (parse-navdata navdata-bytes))
;;(criterium/bench (:gps (parse-navdata navdata-bytes)))
(let [navdata (navdata/parse-navdata navdata-bytes)]
(testing "navdata"
(are [x y] (= x y)
(:header navdata) 0x55667788
(:seq-num navdata) 300711
(:vision-flag navdata) true))
(testing "state"
(let [state (:state navdata)]
(are [x y] (every? (fn [[k v]] (= (x k) v)) y)
state {:flying :landed}
state {:video :off}
state {:vision :off}
state {:altitude-control :on}
state {:command-ack :received}
state {:camera :ready}
state {:travelling :off}
state {:usb :not-ready}
state {:demo :off}
state {:bootstrap :off}
state {:motors :ok}
state {:communication :ok}
state {:software :ok}
state {:bootstrap :off}
state {:battery :ok}
state {:emergency-landing :off}
state {:timer :not-elapsed}
state {:magneto :ok}
state {:angles :ok}
state {:wind :ok}
state {:ultrasound :ok}
state {:cutout :ok}
state {:pic-version :ok}
state {:atcodec-thread :on}
state {:navdata-thread :on}
state {:video-thread :on}
state {:acquisition-thread :on}
state {:ctrl-watchdog :ok}
state {:adc-watchdog :ok}
state {:com-watchdog :problem}
state {:emergency-landing :off})))
(test-adc-data-frame-option navdata)
(testing "time option"
(is (= (:time navdata) 362.979125)))
(testing "raw-measures option"
(let [raw-meas (:raw-measures navdata)]
(is (= (:accelerometers raw-meas)
{:x 2040 :y 2036 :z 2528}))
(is (= (:gyroscopes raw-meas)
{:x -23 :y 15 :z 0}))
(is (= (:gyroscopes-110 raw-meas)
{:x 0 :y 0}))
(is (= (:battery-millivolts raw-meas) 11686))
(is (= (:us-echo raw-meas)
{:start 0 :end 0 :association 3758 :distance 0}))
(is (= (:us-curve raw-meas)
{:time 21423 :value 0 :ref 120}))
(is (= (:echo raw-meas) {:flag-ini 1 :num 1 :sum 3539193}))
(is (= (:alt-temp-raw raw-meas) 243))
(is (= (:gradient raw-meas) 41))))
(testing "phys-measures option"
(let [phys-meas (:phys-measures navdata)]
(is (= (:temperature phys-meas)
{:accelerometer 45.309303283691406 :gyroscope 55738}))
(is (= (:accelerometers phys-meas)
{:x 80.2970962524414
:y -33.318603515625
:z -942.5283203125}))
(is (= (:gyroscopes phys-meas)
{:x -0.11236488074064255
:y 0.06872134655714035
:z 0.06200997903943062}))
(is (= (:alim3v3 phys-meas) 0))
(is (= (:vref-epson phys-meas) 0))
(is (= (:vref-idg phys-meas) 0))))
(testing "wifi option"
(let [wifi (:wifi navdata)]
(is (= (:link-quality wifi) 1.0))))
(testing "altitude option"
(let [alt (:altitude navdata)]
(are [x y] (= x y)
(:vision alt) 243
(:velocity alt) 0.0
(:ref alt) 0
(:raw alt) 243
(:observer alt) {:acceleration 0.0
:altitude 0.0
:x {:x 0.0
:y 0.0
:z 0.0}
:state 0}
(:estimated alt) {:vb {:x 0.0
:y 0.0}
:state 0})))
(testing "demo option"
(let [demo (:demo navdata)]
(are [x y] (= x y)
(:control-state demo) :landed
(:battery-percentage demo) 50
(:theta demo) (float 2.974)
(:phi demo) (float 0.55)
(:psi demo) (float 1.933)
(:altitude demo) 0.0
(:velocity demo) {:x 0.0585307739675045
:y -0.8817979097366333
:z 0.0})))
(testing "euler angles option"
(let [euler (:euler-angles navdata)]
(is (= (:theta euler) 4866.0))
(is (= (:phi euler) 2024.0))))
(testing "games option"
(let [games (:games navdata)]
(is (= games {:counters {:double-tap 0 :finish-line 0}}))))
(test-gps-option navdata)
(testing "gryos offsets option"
(let [gyros (:gyros-offsets navdata)]
(is (= gyros {:x -0.5329172611236572
:y 0.1788240224123001,
:z 0.0}))))
(testing "magneto option"
(let [magneto (:magneto navdata)]
(are [x y] (= x y)
(:mx magneto) 30
(:my magneto) -56
(:mz magneto) 80
(:raw magneto) {:x 189.0 :y -100.8984375 :z -278.4375}
(:rectified magneto) {:x 145.08058166503906
:y -84.93736267089844
:z -287.18157958984375}
(:offset magneto) {:x 29.21237564086914
:y -13.282999038696289
:z 0.0}
(:heading magneto) {:unwrapped (float 0.0)
:gyro-unwrapped (float 4.132266E-4)
:fusion-unwrapped (float 1.9333557)}
(:calibration-ok magneto) 1
(:state magneto) 2
(:radius magneto) (float 387.31146)
(:error magneto) {:mean (float -211.51361)
:variance (float 79.36719)})))
(testing "pressure raw option"
(let [pressure-raw (:pressure-raw navdata)]
(is (= pressure-raw
{:pressure 101586
:temperature 435
:ut 32556
:up 39148}))))
(testing "pwm option"
(let [pwm (:pwm navdata)]
(are [x y] (= x y)
(:motors pwm) [0 0 0 0]
(:sat-motors pwm) [255 255 255 255]
(:gaz-feed-forward pwm) 0.0
(:gaz-altitude pwm) 0.0
(:altitude-integral pwm) 0.0
(:vz-ref pwm) 0.0
(:u-pitch pwm) 0
(:u-roll pwm) 0
(:u-yaw pwm) 0
(:yaw-u-i pwm) 0
(:u-pitch-planif pwm) 0
(:u-roll-planif pwm) 0
(:u-yaw-planif pwm) 0
(:u-gaz-planif pwm) 0
(:motor-currents pwm) [0 0 0 0]
(:altitude-prop pwm) 0.0
(:altitude-der pwm) 0.0)))
(testing "rc references option"
(let [rc-ref (:rc-references navdata)]
(are [x y] (= x y)
(:pitch rc-ref) 0
(:roll rc-ref) 0
(:yaw rc-ref) 0
(:gaz rc-ref) 0
(:az rc-ref) 0)))
(testing "references option"
(let [ref (:references navdata)]
(are [x y] (= x y)
(:theta ref) 0
(:phi ref) 0
(:psi ref) 0
(:theta-i ref) 0
(:phi-i ref) 0
(:pitch ref) 0
(:roll ref) 0
(:yaw ref) 0
(:psi ref) 0
(:vx ref) 0.0
(:vy ref) 0.0
(:theta-mod ref) 0.0
(:phi-mod ref) 0.0
(:k-v-x ref) 0.0
(:k-v-y ref) 0.0
(:k-mode ref) 0.0
(:ui ref) {:time 0.0
:theta 0.0
:phi 0.0
:psi 0.0
:psi-accuracy 0.0
:seq 0})))
(test-trackers-send-option navdata)
(testing "trims option"
(let [trims (:trims navdata)]
(is (= (:angular-rates trims) {:r 0.0}))
(is (= (:euler-angles trims) {:theta (float 3028.916)
:phi (float 1544.3184)}))))
(testing "video-stream option"
(let [video-stream (:video-stream navdata)]
(is (= video-stream
{:at-cmd {:mean-gap 0 :quality 0 :sequence 0 :var-gap 0}
:bitrate {:desired 0 :out 0}
:data [0 0 0 0 0]
:fifo-queue-level 0
:frame {:number 46105 :size 4597}
:quant 0
:tcp-queue-level 0}))))
(test-vision-option navdata)
(testing "vision-detect option"
(let [detections (:vision-detect navdata)]
(is (= (count detections) 0))))
(testing "vision-of option"
(let [v (:vision-of navdata)]
(is (= (:dx v) [0.0 0.0 0.0 0.0 0.0]))
(is (= (:dy v) [0.0 0.0 0.0 0.0 0.0]))))
(test-vision-perf-option navdata)
(testing "vision-raw option"
(let [v (:vision-raw navdata)]
(is (= (:tx v) 1.3266397714614868))
(is (= (:ty v) -0.7230937480926514))
(is (= (:tz v) 0.0))))
(test-watchdog-option navdata)
(testing "windspeed option"
(let [wind-speed (:wind-speed navdata)]
(is (= wind-speed
{:angle (float 0.0)
:compensation {:phi (float 0.0) :theta (float 0.0)}
:debug [(float 0.0)
(float 0.0)
(float 0.0)]
:speed (float 0.0)
:state-x [(float 0.058451913)
(float -0.88172805)
(float 0.0)
(float 0.0)
(float 305.59628)
(float -236.80516)]}))))
))))
(deftest navdata-bytes-seq-tests
(testing "navdata-bytes-seq on specimen"
(let [navdata-bytes (xio/binary-slurp (io/resource "navdata.bin"))
navdata-seq (navdata/navdata-bytes-seq navdata-bytes)]
(is (= (count navdata-seq) 2))
(let [navdata-bb (first navdata-seq)
options-bbs (second navdata-seq)]
(is (instance? ByteBuffer navdata-bb))
(is (= (.remaining navdata-bb) 16))
(is (= (count options-bbs) 29))
(is (every? #(= (count %) 2) options-bbs))
(is (every? #(or (keyword? %) (number? %)) (map first options-bbs)))
(is (every? #(instance? ByteBuffer %) (map second options-bbs)))))))
| null |
https://raw.githubusercontent.com/wiseman/turboshrimp/a1f53003bd4afd084e1c9e8cb80109ba732854bc/test/com/lemondronor/turboshrimp/navdata_test.clj
|
clojure
|
Skip past the option ID and option size.
Skip past the option ID and option size.
(println "Benchmarking parse-navdata")
(criterium/bench (parse-navdata navdata-bytes))
(criterium/bench (:gps (parse-navdata navdata-bytes)))
|
(ns com.lemondronor.turboshrimp.navdata-test
(:require [clojure.java.io :as io]
[clojure.test :refer :all]
[com.lemondronor.turboshrimp.navdata :as navdata]
[com.lemonodor.xio :as xio])
(:import (java.nio ByteBuffer ByteOrder)))
matrix 33 is 9 floats
vector 31 is 3 floats
(def b-matrix33 (vec (repeat (* 9 4) 0 )))
(def b-vector31 (vec (repeat (* 3 4) 0 )))
(def b-header [-120 119 102 85])
(def b-state [-48 4 -128 15])
(def b-seqnum [102 3 0 0])
(def b-vision [0 0 0 0])
(def b-demo-option-id [0 0])
(def b-demo-option-size [-108 0])
(def b-demo-control-state [0 0 2 0])
(def b-demo-battery [100 0 0 0])
(def b-demo-pitch [0 96 -122 -60])
(def b-demo-roll [0 -128 53 -59])
(def b-demo-yaw [0 0 87 -61])
(def b-demo-altitude [0 0 0 0])
(def b-demo-velocity-x [0 0 0 0])
(def b-demo-velocity-y [0 0 0 0])
(def b-demo-velocity-z [0 0 0 0])
(def b-demo-num-frames [0 0 0 0])
(def b-demo-detect-camera-rot b-matrix33)
(def b-demo-detect-camera-trans b-vector31)
(def b-demo-detect-tag-index [0 0 0 0])
(def b-demo-detect-camera-type [4 0 0 0])
(def b-demo-drone-camera-rot b-matrix33)
(def b-demo-drone-camera-trans b-vector31)
(def b-demo-option (flatten (conj b-demo-option-id b-demo-option-size
b-demo-control-state b-demo-battery
b-demo-pitch b-demo-roll b-demo-yaw
b-demo-altitude b-demo-velocity-x
b-demo-velocity-y b-demo-velocity-z
b-demo-num-frames
b-demo-detect-camera-rot b-demo-detect-camera-trans
b-demo-detect-tag-index
b-demo-detect-camera-type b-demo-drone-camera-rot
b-demo-drone-camera-trans)))
(def b-vision-detect-option-id [16 0])
(def b-vision-detect-option-size [72 1])
(def b-vision-detect-num-tags-detected [2 0 0 0])
(def b-vision-detect-type [1 0 0 0 2 0 0 0 3 0 0 0 4 0 0 0])
(def b-vision-detect-xc [1 0 0 0 2 0 0 0 3 0 0 0 4 0 0 0])
(def b-vision-detect-yc [1 0 0 0 2 0 0 0 3 0 0 0 4 0 0 0])
(def b-vision-detect-width [1 0 0 0 2 0 0 0 3 0 0 0 4 0 0 0])
(def b-vision-detect-height [1 0 0 0 2 0 0 0 3 0 0 0 4 0 0 0])
(def b-vision-detect-dist [1 0 0 0 2 0 0 0 3 0 0 0 4 0 0 0])
(def b-vision-detect-orient-angle [0 96 -122 -60 0 96 -122 -60 0 96 -122 -60 0 96 -122 -60])
(def b-vision-detect-rotation (flatten (conj b-matrix33 b-matrix33 b-matrix33 b-matrix33)))
(def b-vision-detect-translation (flatten (conj b-vector31 b-vector31 b-vector31 b-vector31)))
(def b-vision-detect-camera-source [1 0 0 0 2 0 0 0 2 0 0 0 2 0 0 0])
(def b-vision-detect-option (flatten (conj b-vision-detect-option-id b-vision-detect-option-size
b-vision-detect-num-tags-detected
b-vision-detect-type b-vision-detect-xc b-vision-detect-yc
b-vision-detect-width b-vision-detect-height b-vision-detect-dist
b-vision-detect-orient-angle b-vision-detect-rotation b-vision-detect-translation
b-vision-detect-camera-source)))
(def b-checksum-option-id [-1 -1])
(def b-checksum-option-size [0x08 0x00])
(def b-checksum-option-checksum [0x08 0x10 0x00 0x00])
(def b-checksum-option
(flatten (conj b-checksum-option-id b-checksum-option-size
b-checksum-option-checksum)))
(def header (map byte [-120 119 102 85]))
(def nav-input
(byte-array (map byte (flatten (conj b-header b-state b-seqnum b-vision
b-demo-option b-vision-detect-option
b-checksum-option)))))
(deftest navdata-unit-tests
(testing "about parse-nav-state"
(testing "parse-nav-state"
(let [state 260048080
result (navdata/parse-nav-state state)
{:keys [flying video vision control altitude-control
user-feedback command-ack camera travelling
usb demo bootstrap motors communication
software battery emergency-landing timer
magneto angles wind ultrasound cutout
pic-version atcodec-thread navdata-thread
video-thread acquisition-thread ctrl-watchdog
adc-watchdog com-watchdog emergency]} result]
(is (= flying :landed))
(is (= video :off))
(is (= vision :off))
(is (= control :euler-angles))
(is (= altitude-control :on))
(is (= user-feedback :off))
(is (= command-ack :received))
(is (= camera :ready))
(is (= travelling :off))
(is (= usb :not-ready))
(is (= demo :on))
(is (= bootstrap :off))
(is (= motors :ok))
(is (= communication :ok))
(is (= software :ok))
(is (= battery :ok))
(is (= emergency-landing :off))
(is (= timer :not-elapsed))
(is (= magneto :ok))
(is (= angles :ok))
(is (= wind :ok))
(is (= ultrasound :ok))
(is (= cutout :ok))
(is (= pic-version :ok))
(is (= atcodec-thread :on))
(is (= navdata-thread :on))
(is (= video-thread :on))
(is (= acquisition-thread :on))
(is (= ctrl-watchdog :ok))
(is (= adc-watchdog :ok))
(is (= com-watchdog :ok))
(is (= emergency :ok)))))
(testing "which-option-type"
(is (= (navdata/which-option-type 0) :demo))
(is (= (navdata/which-option-type 16) :vision-detect))
(is (= (navdata/which-option-type 2342342) nil)))
(testing "about parse-control-state"
(testing "parse-control-state"
(let [bb (doto (gloss.io/to-byte-buffer b-demo-control-state)
(.order ByteOrder/LITTLE_ENDIAN))
control-state (.getInt bb)]
(is (= (navdata/parse-control-state control-state) :landed)))))
(testing "about parse-demo-option"
(testing "parse-demo-option"
(let [bb (doto (gloss.io/to-byte-buffer
(drop 4 b-demo-option))
(.order ByteOrder/LITTLE_ENDIAN))
option (navdata/parse-demo-option bb)]
(is (= (:control-state option) :landed))
(is (= (:battery-percentage option) 100))
(is (= (:theta option) (float -1.075)))
(is (= (:phi option) (float -2.904)))
(is (= (:psi option) (float -0.215)))
(is (= (:altitude option) 0.0))
(is (= (:velocity option) {:x 0.0 :y 0.0 :z 0.0}))
(is (= (:detect-camera-type option) :roundel-under-drone)))))
(testing "about parse-vision-detect-option"
(let [detections
(navdata/parse-vision-detect-option
(doto (gloss.io/to-byte-buffer
(drop 4
b-vision-detect-option))
(.order ByteOrder/LITTLE_ENDIAN)))]
(is (= (count detections) 2))
(testing "first detection"
(let [det (nth detections 0)]
(is (= (:type det) :vertical-deprecated))
(is (= (:xc det) 1))
(is (= (:yc det) 1))
(is (= (:width det) 1))
(is (= (:height det) 1))
(is (= (:dist det) 1))
(is (= (:orientation-angle det) -1075.0))
(is (= (:camera-source det) :vertical))
(is (= (:translation det) {:x 0.0 :y 0.0 :z 0.0}))
(is (= (:rotation det)
{:m11 0.0, :m12 0.0, :m13 0.0,
:m21 0.0, :m22 0.0, :m23 0.0,
:m31 0.0, :m32 0.0, :m33 0.0}))))
(testing "second detection"
(let [det (nth detections 1)]
(is (= (:type det) :horizontal-drone-shell))
(is (= (:xc det) 2))
(is (= (:yc det) 2))
(is (= (:width det) 2))
(is (= (:height det) 2))
(is (= (:dist det) 2))
(is (= (:orientation-angle det) -1075.0))
(is (= (:camera-source det) :vertical-hsync))
(is (= (:translation det) {:x 0.0 :y 0.0 :z 0.0}))
(is (= (:rotation det) {:m11 0.0, :m12 0.0, :m13 0.0,
:m21 0.0, :m22 0.0, :m23 0.0,
:m31 0.0, :m32 0.0, :m33 0.0}))))))
(testing "about parse-navdata"
(testing "parse-navdata"
(testing "hand-crafted input"
(let [navdata (navdata/parse-navdata nav-input)]
(is (= (:header navdata) 0x55667788))
(is (= (:seq-num navdata) 870))
(is (= (:vision-flag navdata) false))
(testing "state"
(let [state (:state navdata)]
(is (= (:battery state) :ok))
(is (= (:flying state) :landed))))
(testing "demo"
(let [demo (:demo navdata)]
(is (= (:control-state demo) :landed))
(is (= (:battery-percentage demo) 100))
(is (= (:theta demo) (float -1.075)))
(is (= (:phi demo) (float -2.904)))
(is (= (:psi demo) (float -0.215)))
(is (= (:altitude demo) 0.0))
(is (= (:velocity demo) {:x 0.0 :y 0.0 :z 0.0})))))))))
(defn test-adc-data-frame-option [navdata]
(testing "adc-data-frame option"
(let [adc (:adc-data-frame navdata)]
(is (= (:version adc) 0))
(is (= (:data-frame adc) (repeat 32 0))))))
(defn test-gps-option [navdata]
(testing "gps option"
(let [gps (:gps navdata)]
(are [x y] (= x y)
(:latitude gps) 34.0905016
(:longitude gps) -118.2766877
(:elevation gps) 122.64
(:hdop gps) 1.0
(:data-available gps) 7
(:zero-validated gps) 1
(:wpt-validated gps) 0
(:lat0 gps) 34.0905016
(:lon0 gps) -118.2766877
(:lat-fuse gps) 34.0904833
(:lon-fuse gps) -118.2766982
(:gps-state gps) 1
(:x-traj gps) 0.0
(:x-ref gps) 0.0
(:y-traj gps) 0.0
(:y-ref gps) 0.0
(:theta-p gps) 0.0
(:phi-p gps) 0.0
(:theta-i gps) 0.0
(:phi-i gps) 0.0
(:theta-d gps) 0.0
(:phi-d gps) 0.0
(:vdop gps) 0.0
(:pdop gps) 0.0
(:speed gps) (float 0.1)
(:last-frame-timestamp gps) 2.409591
(:degree gps) (float 141.01)
(:degree-mag gps) 0.0
(:ehpe gps) (float 8.26)
(:ehve gps) (float 0.42999998)
(:c-n0 gps) 28.0
(:num-satellites gps) 9
(:channels gps) [{:cn0 26, :sat 10}
{:cn0 21, :sat 5}
{:cn0 27, :sat 8}
{:cn0 17, :sat 3}
{:cn0 18, :sat 13}
{:cn0 32, :sat 7}
{:cn0 23, :sat 9}
{:cn0 9, :sat 27}
{:cn0 19, :sat 19}
{:cn0 29, :sat 28}
{:cn0 26, :sat 30}
{:cn0 0, :sat 138}]
(:gps-plugged gps) 1
(:ephemeris-status gps) 73
(:vx-traj gps) 0.0
(:vy-traj gps) 0.0
(:firmware-status gps) 1))))
(defn test-trackers-send-option [navdata]
(testing "trackers-send option"
(let [ts (:trackers-send navdata)]
(is (= (:locked ts) (repeat 30 0)))
(is (= (:point ts) (repeat 30 {:x 0 :y 0}))))))
(defn test-vision-option [navdata]
(testing "vision option"
(let [v (:vision navdata)]
(are [x y] (= x y)
(:state v) 2
(:misc v) 0
(:phi v) {:trim 0.0 :ref-prop 0.0}
(:theta v) {:trim 0.0 :ref-prop 0.0}
(:new-raw-picture v) 0
(:capture v) {:theta (float 0.05190306529402733)
:phi (float 0.009620788507163525)
:psi (float 0.033727407455444336)
:altitude 243
:time 0.362969}
(:body-v v) {:x (float 0.05845191329717636)
:y (float -0.8817280530929565)
:z (float 0.011505687609314919)}
(:delta v) {:phi 0.0
:theta 0.0
:psi 0.0}
(:gold v) {:defined 0
:reset 0
:x 0.0
:y 0.0}))))
(defn test-vision-perf-option [navdata]
(testing "vision-perf option"
(let [v (:vision-perf navdata)]
(are [x y] (= x y)
(:szo v) 0.0
(:corners v) 0.0
(:compute v) 0.0
(:tracking v) 0.0
(:trans v) 0.0
(:update v) 0.0
(:custom v) [0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0
0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0]))))
(defn test-watchdog-option [navdata]
(testing "watchdog option"
(let [w (:watchdog navdata)]
(is (= w 4822)))))
(deftest navdata-specimen-tests
(testing "parse-navdata on specimen"
(let [navdata-bytes (xio/binary-slurp (io/resource "navdata.bin"))]
(let [navdata (navdata/parse-navdata navdata-bytes)]
(testing "navdata"
(are [x y] (= x y)
(:header navdata) 0x55667788
(:seq-num navdata) 300711
(:vision-flag navdata) true))
(testing "state"
(let [state (:state navdata)]
(are [x y] (every? (fn [[k v]] (= (x k) v)) y)
state {:flying :landed}
state {:video :off}
state {:vision :off}
state {:altitude-control :on}
state {:command-ack :received}
state {:camera :ready}
state {:travelling :off}
state {:usb :not-ready}
state {:demo :off}
state {:bootstrap :off}
state {:motors :ok}
state {:communication :ok}
state {:software :ok}
state {:bootstrap :off}
state {:battery :ok}
state {:emergency-landing :off}
state {:timer :not-elapsed}
state {:magneto :ok}
state {:angles :ok}
state {:wind :ok}
state {:ultrasound :ok}
state {:cutout :ok}
state {:pic-version :ok}
state {:atcodec-thread :on}
state {:navdata-thread :on}
state {:video-thread :on}
state {:acquisition-thread :on}
state {:ctrl-watchdog :ok}
state {:adc-watchdog :ok}
state {:com-watchdog :problem}
state {:emergency-landing :off})))
(test-adc-data-frame-option navdata)
(testing "time option"
(is (= (:time navdata) 362.979125)))
(testing "raw-measures option"
(let [raw-meas (:raw-measures navdata)]
(is (= (:accelerometers raw-meas)
{:x 2040 :y 2036 :z 2528}))
(is (= (:gyroscopes raw-meas)
{:x -23 :y 15 :z 0}))
(is (= (:gyroscopes-110 raw-meas)
{:x 0 :y 0}))
(is (= (:battery-millivolts raw-meas) 11686))
(is (= (:us-echo raw-meas)
{:start 0 :end 0 :association 3758 :distance 0}))
(is (= (:us-curve raw-meas)
{:time 21423 :value 0 :ref 120}))
(is (= (:echo raw-meas) {:flag-ini 1 :num 1 :sum 3539193}))
(is (= (:alt-temp-raw raw-meas) 243))
(is (= (:gradient raw-meas) 41))))
(testing "phys-measures option"
(let [phys-meas (:phys-measures navdata)]
(is (= (:temperature phys-meas)
{:accelerometer 45.309303283691406 :gyroscope 55738}))
(is (= (:accelerometers phys-meas)
{:x 80.2970962524414
:y -33.318603515625
:z -942.5283203125}))
(is (= (:gyroscopes phys-meas)
{:x -0.11236488074064255
:y 0.06872134655714035
:z 0.06200997903943062}))
(is (= (:alim3v3 phys-meas) 0))
(is (= (:vref-epson phys-meas) 0))
(is (= (:vref-idg phys-meas) 0))))
(testing "wifi option"
(let [wifi (:wifi navdata)]
(is (= (:link-quality wifi) 1.0))))
(testing "altitude option"
(let [alt (:altitude navdata)]
(are [x y] (= x y)
(:vision alt) 243
(:velocity alt) 0.0
(:ref alt) 0
(:raw alt) 243
(:observer alt) {:acceleration 0.0
:altitude 0.0
:x {:x 0.0
:y 0.0
:z 0.0}
:state 0}
(:estimated alt) {:vb {:x 0.0
:y 0.0}
:state 0})))
(testing "demo option"
(let [demo (:demo navdata)]
(are [x y] (= x y)
(:control-state demo) :landed
(:battery-percentage demo) 50
(:theta demo) (float 2.974)
(:phi demo) (float 0.55)
(:psi demo) (float 1.933)
(:altitude demo) 0.0
(:velocity demo) {:x 0.0585307739675045
:y -0.8817979097366333
:z 0.0})))
(testing "euler angles option"
(let [euler (:euler-angles navdata)]
(is (= (:theta euler) 4866.0))
(is (= (:phi euler) 2024.0))))
(testing "games option"
(let [games (:games navdata)]
(is (= games {:counters {:double-tap 0 :finish-line 0}}))))
(test-gps-option navdata)
(testing "gryos offsets option"
(let [gyros (:gyros-offsets navdata)]
(is (= gyros {:x -0.5329172611236572
:y 0.1788240224123001,
:z 0.0}))))
(testing "magneto option"
(let [magneto (:magneto navdata)]
(are [x y] (= x y)
(:mx magneto) 30
(:my magneto) -56
(:mz magneto) 80
(:raw magneto) {:x 189.0 :y -100.8984375 :z -278.4375}
(:rectified magneto) {:x 145.08058166503906
:y -84.93736267089844
:z -287.18157958984375}
(:offset magneto) {:x 29.21237564086914
:y -13.282999038696289
:z 0.0}
(:heading magneto) {:unwrapped (float 0.0)
:gyro-unwrapped (float 4.132266E-4)
:fusion-unwrapped (float 1.9333557)}
(:calibration-ok magneto) 1
(:state magneto) 2
(:radius magneto) (float 387.31146)
(:error magneto) {:mean (float -211.51361)
:variance (float 79.36719)})))
(testing "pressure raw option"
(let [pressure-raw (:pressure-raw navdata)]
(is (= pressure-raw
{:pressure 101586
:temperature 435
:ut 32556
:up 39148}))))
(testing "pwm option"
(let [pwm (:pwm navdata)]
(are [x y] (= x y)
(:motors pwm) [0 0 0 0]
(:sat-motors pwm) [255 255 255 255]
(:gaz-feed-forward pwm) 0.0
(:gaz-altitude pwm) 0.0
(:altitude-integral pwm) 0.0
(:vz-ref pwm) 0.0
(:u-pitch pwm) 0
(:u-roll pwm) 0
(:u-yaw pwm) 0
(:yaw-u-i pwm) 0
(:u-pitch-planif pwm) 0
(:u-roll-planif pwm) 0
(:u-yaw-planif pwm) 0
(:u-gaz-planif pwm) 0
(:motor-currents pwm) [0 0 0 0]
(:altitude-prop pwm) 0.0
(:altitude-der pwm) 0.0)))
(testing "rc references option"
(let [rc-ref (:rc-references navdata)]
(are [x y] (= x y)
(:pitch rc-ref) 0
(:roll rc-ref) 0
(:yaw rc-ref) 0
(:gaz rc-ref) 0
(:az rc-ref) 0)))
(testing "references option"
(let [ref (:references navdata)]
(are [x y] (= x y)
(:theta ref) 0
(:phi ref) 0
(:psi ref) 0
(:theta-i ref) 0
(:phi-i ref) 0
(:pitch ref) 0
(:roll ref) 0
(:yaw ref) 0
(:psi ref) 0
(:vx ref) 0.0
(:vy ref) 0.0
(:theta-mod ref) 0.0
(:phi-mod ref) 0.0
(:k-v-x ref) 0.0
(:k-v-y ref) 0.0
(:k-mode ref) 0.0
(:ui ref) {:time 0.0
:theta 0.0
:phi 0.0
:psi 0.0
:psi-accuracy 0.0
:seq 0})))
(test-trackers-send-option navdata)
(testing "trims option"
(let [trims (:trims navdata)]
(is (= (:angular-rates trims) {:r 0.0}))
(is (= (:euler-angles trims) {:theta (float 3028.916)
:phi (float 1544.3184)}))))
(testing "video-stream option"
(let [video-stream (:video-stream navdata)]
(is (= video-stream
{:at-cmd {:mean-gap 0 :quality 0 :sequence 0 :var-gap 0}
:bitrate {:desired 0 :out 0}
:data [0 0 0 0 0]
:fifo-queue-level 0
:frame {:number 46105 :size 4597}
:quant 0
:tcp-queue-level 0}))))
(test-vision-option navdata)
(testing "vision-detect option"
(let [detections (:vision-detect navdata)]
(is (= (count detections) 0))))
(testing "vision-of option"
(let [v (:vision-of navdata)]
(is (= (:dx v) [0.0 0.0 0.0 0.0 0.0]))
(is (= (:dy v) [0.0 0.0 0.0 0.0 0.0]))))
(test-vision-perf-option navdata)
(testing "vision-raw option"
(let [v (:vision-raw navdata)]
(is (= (:tx v) 1.3266397714614868))
(is (= (:ty v) -0.7230937480926514))
(is (= (:tz v) 0.0))))
(test-watchdog-option navdata)
(testing "windspeed option"
(let [wind-speed (:wind-speed navdata)]
(is (= wind-speed
{:angle (float 0.0)
:compensation {:phi (float 0.0) :theta (float 0.0)}
:debug [(float 0.0)
(float 0.0)
(float 0.0)]
:speed (float 0.0)
:state-x [(float 0.058451913)
(float -0.88172805)
(float 0.0)
(float 0.0)
(float 305.59628)
(float -236.80516)]}))))
))))
(deftest navdata-bytes-seq-tests
(testing "navdata-bytes-seq on specimen"
(let [navdata-bytes (xio/binary-slurp (io/resource "navdata.bin"))
navdata-seq (navdata/navdata-bytes-seq navdata-bytes)]
(is (= (count navdata-seq) 2))
(let [navdata-bb (first navdata-seq)
options-bbs (second navdata-seq)]
(is (instance? ByteBuffer navdata-bb))
(is (= (.remaining navdata-bb) 16))
(is (= (count options-bbs) 29))
(is (every? #(= (count %) 2) options-bbs))
(is (every? #(or (keyword? %) (number? %)) (map first options-bbs)))
(is (every? #(instance? ByteBuffer %) (map second options-bbs)))))))
|
dbf6111f787891cf162061bdc268dfa41da25ae72a024f160ee7b3a4acc6efb3
|
0day1day/bap
|
ssa.ml
|
*
Static Single Assignment form .
This is the intermediate language where most analysis should be happening .
@author
Static Single Assignment form.
This is the intermediate language where most analysis should be happening.
@author Ivan Jager
*)
open Big_int_Z
open Big_int_convenience
open BatListFull
open Type
open Var
type var = Var.t
(* type value = *)
| Int of big_int *
(* | Var of var *)
(* | Lab of string *)
type exp =
* , idx , endian , t )
* Store(arr , idx , , endian , t )
| BinOp of binop_type * exp * exp
| UnOp of unop_type * exp
| Var of var
| Lab of string
| Int of big_int * typ
| Cast of cast_type * typ * exp (** Cast to a new type. *)
(* Should SSA have Lets? *)
| Unknown of string * typ
| Ite of exp * exp * exp
| Extract of big_int * big_int * exp
| Concat of exp * exp
| Phi of var list
(** Joins variables that were assigned over different paths *)
type attrs = Type.attributes
type stmt =
| Move of var * exp * attrs (** Assign the exp on the right to the
var on the left *)
| Jmp of exp * attrs (** Jump to a label/address *)
| CJmp of exp * exp * exp * attrs
(** Conditional jump. If e1 is true, jumps to e2, otherwise jumps to e3 *)
| Label of label * attrs (** A label we can jump to *)
| Halt of exp * attrs
| Assert of exp * attrs
| Assume of exp * attrs
| Special of string * defuse * attrs
| Comment of string * attrs (** A comment to be ignored *)
(* | Special of string * attrs (** A "special" statement. (does magic) *) *)
let val_false = Int(zero_big_int, reg_1)
let val_true = Int(unit_big_int, reg_1)
(** If possible, make a label that would be refered to by the given
expression. *)
let lab_of_exp = function
| Lab s -> Some(Name s)
| Int(i, t) ->
Some(Addr(Arithmetic.to_big_int (i,t)))
| _ -> None
(******************************************************************************)
(* Equality of SSA expressions and statements *)
(******************************************************************************)
let full_value_eq v1 v2 = v1 = v2
let quick_value_eq = full_value_eq
let num_exp = function
| Load _ -> 0
| Store _ -> 1
| BinOp _ -> 2
| UnOp _ -> 3
| Var _ -> 4
| Lab _ -> 5
| Int _ -> 6
| Cast _ -> 7
| Unknown _ -> 8
| Ite _ -> 9
| Extract _ -> 10
| Concat _ -> 11
| Phi _ -> 12
Returns elist , tlist , btlist , utlist , slist , , , ilist
let getargs_exp = function
| Load(e1,e2,e3,t1) -> [e1;e2;e3], [t1], [], [], [], [], [], []
| Store(e1,e2,e3,e4,t1) -> [e1;e2;e3;e4], [t1], [], [], [], [], [], []
| BinOp(bt,e1,e2) -> [e1;e2], [], [bt], [], [], [], [], []
| UnOp(ut,e1) -> [e1], [], [], [ut], [], [], [], []
| Var(v1) -> [], [], [], [], [], [], [v1], []
| Lab(s1) -> [], [], [], [], [s1], [], [], []
| Int(i1,t1) -> [], [t1], [], [], [], [], [], [i1]
| Cast(c1,t1,e1) -> [e1], [t1], [], [], [], [c1], [], []
| Unknown(s1,t1) -> [], [t1], [], [], [s1], [], [], []
| Ite(e1,e2,e3) -> [e1;e2;e3], [], [], [], [], [], [], []
| Extract(i1,i2,e1) -> [e1], [], [], [], [], [], [], [i1;i2]
| Concat(e1,e2) -> [e1;e2], [], [], [], [], [], [], []
| Phi(vl1) -> [], [], [], [], [], [], vl1, []
* quick_exp_eq e1 e2 returns true if and only if the subexpressions
in e1 and e2 are * physically * equal .
in e1 and e2 are *physically* equal. *)
let quick_exp_eq e1 e2 =
if (num_exp e1) <> (num_exp e2) then false else
let l1,l2,l3,l4,l5,l6,l7,l8 = getargs_exp e1 in
let r1,r2,r3,r4,r5,r6,r7,r8 = getargs_exp e2 in
let b1 = List.for_all2 (==) l1 r1 in
let b2 = List.for_all2 (==) l2 r2 in
let b3 = List.for_all2 (==) l3 r3 in
let b4 = List.for_all2 (==) l4 r4 in
let b5 = List.for_all2 (==) l5 r5 in
let b6 = List.for_all2 (==) l6 r6 in
let b7 = List.for_all2 (==) l7 r7 in
let b8 = List.for_all2 (==) l8 r8 in
if b1 & b2 & b3 & b4 & b5 & b6 & b7 & b8 then
true else false
(** full_exp_eq e1 e2 returns true if and only if e1 and e2 are
structurally equivalent. *)
let rec full_exp_eq e1 e2 = e1 = e2
let (===) = full_exp_eq
let num_stmt = function
| Move _ -> 0
| Jmp _ -> 1
| CJmp _ -> 2
| Label _ -> 3
| Halt _ -> 4
| Assert _ -> 5
| Assume _ -> 6
| Comment _ -> 7
| Special _ -> 8
let getargs_stmt = function
(* value, var, label, attr, string, exp *)
| Move(v,e,a) -> [], [v], [], [a], [], [e]
| CJmp(e1,e2,e3,a) -> [e1;e2;e3], [], [], [a], [], []
| Label(l,a) -> [], [], [l], [a], [], []
| Jmp(e,a)
| Halt(e,a)
| Assert(e,a)
| Assume(e,a) -> [e], [], [], [a], [], []
| Comment(s,a) -> [], [], [], [a], [s], []
| Special(s,{Var.defs; Var.uses},a) -> [], defs@uses, [], [a], [s], []
(** quick_stmt_eq returns true if and only if the subexpressions in e1
and e2 are *physically* equal. *)
let quick_stmt_eq s1 s2 =
if (num_stmt s1) <> (num_stmt s2) then false else
let l1,l2,l3,l4,l5,l6 = getargs_stmt s1 in
let r1,r2,r3,r4,r5,r6 = getargs_stmt s2 in
let b1 = List.for_all2 (==) l1 r1 in
let b2 = List.for_all2 (==) l2 r2 in
let b3 = List.for_all2 (==) l3 r3 in
let b4 = List.for_all2 (==) l4 r4 in
let b5 = List.for_all2 (==) l5 r5 in
let b6 = List.for_all2 (==) l6 r6 in
if b1 & b2 & b3 & b4 & b5 & b6 then
true
else if b1 & b2 & b3 & b4 & b5 then
(* s1 and s2 are not physically equal. But maybe their
subexpressions are physically equal. *)
List.for_all2 quick_exp_eq l6 r6
else
false
(** full_stmt_eq returns true if and only if e1 and e2 are
structurally equivalent. *)
let full_stmt_eq s1 s2 = s1 = s2
if ( ) < > ( num_stmt s2 ) then false else
(* let l1,l2,l3,l4,l5,l6 = getargs_stmt s1 in *)
(* let r1,r2,r3,r4,r5,r6 = getargs_stmt s2 in *)
(* let b1 = List.for_all2 (==) l1 r1 in (\* e must use == *\) *)
(* let b2 = List.for_all2 (=) l2 r2 in *)
(* let b3 = List.for_all2 (=) l3 r3 in *)
let b4 = List.for_all2 ( =) l4 r4 in
(* let b5 = List.for_all2 (=) l5 r5 in *)
(* let b6 = List.for_all2 (==) l6 r6 in *)
(* if b1 & b2 & b3 & b4 & b5 & b6 then *)
(* true *)
(* else if b2 & b3 & b4 & b5 then *)
(* (\* e1 and e2 are not physically equal. But maybe the *)
(* subexpressions are structurally, but not physically, *)
(* equal. *\) *)
List.for_all2 full_exp_eq l6 r6
(* && List.for_all2 full_value_eq l1 r1 *)
(* else *)
(* false *)
let get_attrs = function
| Move(_,_,a)
| Jmp(_,a)
| CJmp(_,_,_,a)
| Label(_,a)
| Halt(_,a)
| Assert(_,a)
| Assume(_,a)
| Comment(_,a)
| Special(_,_,a) -> a
let exp_true = Int(bi1, Reg 1)
let exp_false = Int(bi0, Reg 1)
| null |
https://raw.githubusercontent.com/0day1day/bap/eead3c03b5c9fb59d6b2c8ac1e1f3425f8601a2a/ocaml/ssa.ml
|
ocaml
|
type value =
| Var of var
| Lab of string
* Cast to a new type.
Should SSA have Lets?
* Joins variables that were assigned over different paths
* Assign the exp on the right to the
var on the left
* Jump to a label/address
* Conditional jump. If e1 is true, jumps to e2, otherwise jumps to e3
* A label we can jump to
* A comment to be ignored
| Special of string * attrs (** A "special" statement. (does magic)
* If possible, make a label that would be refered to by the given
expression.
****************************************************************************
Equality of SSA expressions and statements
****************************************************************************
* full_exp_eq e1 e2 returns true if and only if e1 and e2 are
structurally equivalent.
value, var, label, attr, string, exp
* quick_stmt_eq returns true if and only if the subexpressions in e1
and e2 are *physically* equal.
s1 and s2 are not physically equal. But maybe their
subexpressions are physically equal.
* full_stmt_eq returns true if and only if e1 and e2 are
structurally equivalent.
let l1,l2,l3,l4,l5,l6 = getargs_stmt s1 in
let r1,r2,r3,r4,r5,r6 = getargs_stmt s2 in
let b1 = List.for_all2 (==) l1 r1 in (\* e must use == *\)
let b2 = List.for_all2 (=) l2 r2 in
let b3 = List.for_all2 (=) l3 r3 in
let b5 = List.for_all2 (=) l5 r5 in
let b6 = List.for_all2 (==) l6 r6 in
if b1 & b2 & b3 & b4 & b5 & b6 then
true
else if b2 & b3 & b4 & b5 then
(\* e1 and e2 are not physically equal. But maybe the
subexpressions are structurally, but not physically,
equal. *\)
&& List.for_all2 full_value_eq l1 r1
else
false
|
*
Static Single Assignment form .
This is the intermediate language where most analysis should be happening .
@author
Static Single Assignment form.
This is the intermediate language where most analysis should be happening.
@author Ivan Jager
*)
open Big_int_Z
open Big_int_convenience
open BatListFull
open Type
open Var
type var = Var.t
| Int of big_int *
type exp =
* , idx , endian , t )
* Store(arr , idx , , endian , t )
| BinOp of binop_type * exp * exp
| UnOp of unop_type * exp
| Var of var
| Lab of string
| Int of big_int * typ
| Unknown of string * typ
| Ite of exp * exp * exp
| Extract of big_int * big_int * exp
| Concat of exp * exp
| Phi of var list
type attrs = Type.attributes
type stmt =
| CJmp of exp * exp * exp * attrs
| Halt of exp * attrs
| Assert of exp * attrs
| Assume of exp * attrs
| Special of string * defuse * attrs
let val_false = Int(zero_big_int, reg_1)
let val_true = Int(unit_big_int, reg_1)
let lab_of_exp = function
| Lab s -> Some(Name s)
| Int(i, t) ->
Some(Addr(Arithmetic.to_big_int (i,t)))
| _ -> None
let full_value_eq v1 v2 = v1 = v2
let quick_value_eq = full_value_eq
let num_exp = function
| Load _ -> 0
| Store _ -> 1
| BinOp _ -> 2
| UnOp _ -> 3
| Var _ -> 4
| Lab _ -> 5
| Int _ -> 6
| Cast _ -> 7
| Unknown _ -> 8
| Ite _ -> 9
| Extract _ -> 10
| Concat _ -> 11
| Phi _ -> 12
Returns elist , tlist , btlist , utlist , slist , , , ilist
let getargs_exp = function
| Load(e1,e2,e3,t1) -> [e1;e2;e3], [t1], [], [], [], [], [], []
| Store(e1,e2,e3,e4,t1) -> [e1;e2;e3;e4], [t1], [], [], [], [], [], []
| BinOp(bt,e1,e2) -> [e1;e2], [], [bt], [], [], [], [], []
| UnOp(ut,e1) -> [e1], [], [], [ut], [], [], [], []
| Var(v1) -> [], [], [], [], [], [], [v1], []
| Lab(s1) -> [], [], [], [], [s1], [], [], []
| Int(i1,t1) -> [], [t1], [], [], [], [], [], [i1]
| Cast(c1,t1,e1) -> [e1], [t1], [], [], [], [c1], [], []
| Unknown(s1,t1) -> [], [t1], [], [], [s1], [], [], []
| Ite(e1,e2,e3) -> [e1;e2;e3], [], [], [], [], [], [], []
| Extract(i1,i2,e1) -> [e1], [], [], [], [], [], [], [i1;i2]
| Concat(e1,e2) -> [e1;e2], [], [], [], [], [], [], []
| Phi(vl1) -> [], [], [], [], [], [], vl1, []
* quick_exp_eq e1 e2 returns true if and only if the subexpressions
in e1 and e2 are * physically * equal .
in e1 and e2 are *physically* equal. *)
let quick_exp_eq e1 e2 =
if (num_exp e1) <> (num_exp e2) then false else
let l1,l2,l3,l4,l5,l6,l7,l8 = getargs_exp e1 in
let r1,r2,r3,r4,r5,r6,r7,r8 = getargs_exp e2 in
let b1 = List.for_all2 (==) l1 r1 in
let b2 = List.for_all2 (==) l2 r2 in
let b3 = List.for_all2 (==) l3 r3 in
let b4 = List.for_all2 (==) l4 r4 in
let b5 = List.for_all2 (==) l5 r5 in
let b6 = List.for_all2 (==) l6 r6 in
let b7 = List.for_all2 (==) l7 r7 in
let b8 = List.for_all2 (==) l8 r8 in
if b1 & b2 & b3 & b4 & b5 & b6 & b7 & b8 then
true else false
let rec full_exp_eq e1 e2 = e1 = e2
let (===) = full_exp_eq
let num_stmt = function
| Move _ -> 0
| Jmp _ -> 1
| CJmp _ -> 2
| Label _ -> 3
| Halt _ -> 4
| Assert _ -> 5
| Assume _ -> 6
| Comment _ -> 7
| Special _ -> 8
let getargs_stmt = function
| Move(v,e,a) -> [], [v], [], [a], [], [e]
| CJmp(e1,e2,e3,a) -> [e1;e2;e3], [], [], [a], [], []
| Label(l,a) -> [], [], [l], [a], [], []
| Jmp(e,a)
| Halt(e,a)
| Assert(e,a)
| Assume(e,a) -> [e], [], [], [a], [], []
| Comment(s,a) -> [], [], [], [a], [s], []
| Special(s,{Var.defs; Var.uses},a) -> [], defs@uses, [], [a], [s], []
let quick_stmt_eq s1 s2 =
if (num_stmt s1) <> (num_stmt s2) then false else
let l1,l2,l3,l4,l5,l6 = getargs_stmt s1 in
let r1,r2,r3,r4,r5,r6 = getargs_stmt s2 in
let b1 = List.for_all2 (==) l1 r1 in
let b2 = List.for_all2 (==) l2 r2 in
let b3 = List.for_all2 (==) l3 r3 in
let b4 = List.for_all2 (==) l4 r4 in
let b5 = List.for_all2 (==) l5 r5 in
let b6 = List.for_all2 (==) l6 r6 in
if b1 & b2 & b3 & b4 & b5 & b6 then
true
else if b1 & b2 & b3 & b4 & b5 then
List.for_all2 quick_exp_eq l6 r6
else
false
let full_stmt_eq s1 s2 = s1 = s2
if ( ) < > ( num_stmt s2 ) then false else
let b4 = List.for_all2 ( =) l4 r4 in
List.for_all2 full_exp_eq l6 r6
let get_attrs = function
| Move(_,_,a)
| Jmp(_,a)
| CJmp(_,_,_,a)
| Label(_,a)
| Halt(_,a)
| Assert(_,a)
| Assume(_,a)
| Comment(_,a)
| Special(_,_,a) -> a
let exp_true = Int(bi1, Reg 1)
let exp_false = Int(bi0, Reg 1)
|
6b79821c87ae0d09b78be1d5a73ab6a139aeb7e59cf5aec63fc34f1c99bdedca
|
GaloisInc/what4
|
ABC.hs
|
Module : What4.Solver . ABC
Copyright : ( c ) Galois , Inc 2014 - 2016
Maintainer : < : BSD3
Solver adapter and associcated operations for connecting the
Crucible simple builder backend to the ABC And - Inverter Graph ( AIG )
representation .
Module : What4.Solver.ABC
Copyright : (c) Galois, Inc 2014-2016
Maintainer : Joe Hendrix <>
License : BSD3
Solver adapter and associcated operations for connecting the
Crucible simple builder backend to the ABC And-Inverter Graph (AIG)
representation.
-}
# LANGUAGE CPP #
# LANGUAGE DataKinds #
{-# LANGUAGE DoAndIfThenElse #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
# OPTIONS_GHC -Werror #
module What4.Solver.ABC
( Network
, newNetwork
, withNetwork
, checkSat
, writeDimacsFile
, runExternalDimacsSolver
, GIA.SomeGraph(..)
, writeAig
, abcQbfIterations
, abcOptions
, abcAdapter
, satCommand
, genericSatOptions
, genericSatAdapter
) where
#if !MIN_VERSION_base(4,13,0)
import Control.Monad.Fail( MonadFail )
#endif
import Control.Concurrent
import Control.Exception hiding (evaluate)
import Control.Lens
import Control.Monad.Identity
import Control.Monad.ST
import Data.Bits
import qualified Data.BitVector.Sized as BV
import qualified Data.ABC as GIA
import qualified Data.ABC.GIA as GIA
import qualified Data.AIG.Operations as AIG
import qualified Data.AIG.Interface as AIG
import qualified Data.ByteString.UTF8 as UTF8
import qualified Data.Foldable as Fold
import qualified Data.HashSet as HSet
import Data.IORef
import Data.List (zipWith4)
import Data.List.NonEmpty (NonEmpty(..))
import qualified Data.Map.Strict as Map
import Data.Parameterized.HashTable (HashTable)
import qualified Data.Parameterized.HashTable as H
import Data.Parameterized.Nonce (Nonce)
import Data.Parameterized.Some
import Data.Set (Set)
import qualified Data.Set as Set
import qualified Data.Text as T
import Foreign.C.Types
import Prettyprinter
import System.Directory
import System.IO
import qualified System.IO.Streams as Streams
import System.Process
import What4.BaseTypes
import What4.Concrete
import What4.Config
import What4.Interface
( getConfiguration, IsExprBuilder, logSolverEvent
, SolverEvent(..), SolverStartSATQuery(..), SolverEndSATQuery(..), andAllOf )
import What4.Expr
import What4.Expr.Builder
import qualified What4.Expr.BoolMap as BM
import What4.Expr.GroundEval
import qualified What4.Expr.UnaryBV as UnaryBV
import What4.Expr.VarIdentification
import qualified What4.Expr.WeightedSum as WSum
import What4.Panic
import What4.ProgramLoc
import What4.Solver.Adapter
import What4.SatResult
import What4.Utils.AbstractDomains
import qualified What4.Utils.BVDomain as BVD
import qualified What4.Utils.BVDomain.Arith as A
import qualified What4.Utils.BVDomain.Bitwise as B
import What4.Utils.Complex
import qualified What4.Utils.Environment as Env
import What4.Utils.MonadST
import What4.Utils.Streams
import What4.Utils.StringLiteral
abcQbfIterations :: ConfigOption BaseIntegerType
abcQbfIterations = configOption BaseIntegerRepr "abc.qbf_max_iterations"
abcOptions :: [ConfigDesc]
abcOptions =
[ opt abcQbfIterations (ConcreteInteger (toInteger (maxBound :: CInt)))
("Max number of iterations to run ABC's QBF solver" :: T.Text)
]
abcAdapter :: SolverAdapter st
abcAdapter =
SolverAdapter
{ solver_adapter_name = "abc"
, solver_adapter_config_options = abcOptions
, solver_adapter_check_sat = \sym logData ps cont -> do
p <- andAllOf sym folded ps
res <- checkSat sym logData p
cont . runIdentity . traverseSatResult (\x -> pure (x,Nothing)) pure $ res
, solver_adapter_write_smt2 = \_ _ _ -> do
fail "ABC backend does not support writing SMTLIB2 files."
}
-- | Command to run sat solver.
satCommand :: ConfigOption (BaseStringType Unicode)
satCommand = configOption knownRepr "sat_command"
genericSatOptions :: [ConfigDesc]
genericSatOptions =
[ opt satCommand (ConcreteString "glucose $1")
("Generic SAT solving command to run" :: T.Text)
]
genericSatAdapter :: SolverAdapter st
genericSatAdapter =
SolverAdapter
{ solver_adapter_name = "sat"
, solver_adapter_config_options = genericSatOptions
, solver_adapter_check_sat = \sym logData ps cont -> do
let cfg = getConfiguration sym
cmd <- T.unpack <$> (getOpt =<< getOptionSetting satCommand cfg)
let mkCommand path = do
let var_map = Map.fromList [("1",path)]
Env.expandEnvironmentPath var_map cmd
p <- andAllOf sym folded ps
mmdl <- runExternalDimacsSolver (logCallbackVerbose logData) mkCommand p
cont . runIdentity . traverseSatResult (\x -> pure (x,Nothing)) pure $ mmdl
, solver_adapter_write_smt2 = \_ _ _ -> do
fail "SAT backend does not support writing SMTLIB2 files."
}
| Maps expression types to the representation used in the ABC backend .
The ABC backend only supports Bools and , so only constants
-- are supported for the other types.
type family LitValue s (tp :: BaseType) where
LitValue s BaseBoolType = GIA.Lit s
LitValue s (BaseBVType n) = AIG.BV (GIA.Lit s)
LitValue s BaseIntegerType = Integer
LitValue s BaseRealType = Rational
LitValue s (BaseStringType si) = StringLiteral si
LitValue s BaseComplexType = Complex Rational
| Newtype wrapper around names .
data NameType s (tp :: BaseType) where
B :: GIA.Lit s -> NameType s BaseBoolType
BV :: NatRepr n -> AIG.BV (GIA.Lit s) -> NameType s (BaseBVType n)
GroundInt :: Integer -> NameType s BaseIntegerType
GroundRat :: Rational -> NameType s BaseRealType
GroundString :: StringLiteral si -> NameType s (BaseStringType si)
GroundComplex :: Complex Rational -> NameType s BaseComplexType
| A variable binding in ABC .
data VarBinding t s where
BoolBinding :: Nonce t BaseBoolType
-> GIA.Lit s
-> VarBinding t s
BVBinding :: (1 <= w)
=> NatRepr w
-> Nonce t (BaseBVType w)
-> AIG.BV (GIA.Lit s)
-> GIA.Lit s {- side condition -}
-> VarBinding t s
| Handle to the ABC interface .
data Network t s = Network { gia :: GIA.GIA s
, nameCache :: !(HashTable RealWorld (Nonce t) (NameType s))
-- | Holds outputs in reverse order when used to write
-- AIGs
, revOutputs :: !(IORef [GIA.Lit s])
}
memoExprNonce :: Network t s
-> Nonce t tp
-> IO (NameType s tp)
-> IO (NameType s tp)
memoExprNonce ntk n ev = do
let c = nameCache ntk
mnm <- liftST $ H.lookup c n
case mnm of
Just nm -> return nm
Nothing -> do
r <- ev
liftST $ H.insert c n r
return r
eval :: Network t s -> Expr t tp -> IO (NameType s tp)
eval _ (BoolExpr b _) =
return $! if b then B GIA.true else B GIA.false
eval _ (SemiRingLiteral SemiRingIntegerRepr n _) = return (GroundInt n)
eval _ (SemiRingLiteral SemiRingRealRepr r _) = return (GroundRat r)
eval ntk (SemiRingLiteral (SemiRingBVRepr _ w) bv _) =
return $ BV w $ AIG.bvFromInteger (gia ntk) (widthVal w) (BV.asUnsigned bv)
eval _ (StringExpr s _) = return (GroundString s)
eval _ e@FloatExpr{} = failTerm e "floating-point expression"
eval ntk (NonceAppExpr e) = do
memoExprNonce ntk (nonceExprId e) $ do
bitblastPred ntk e
eval ntk (AppExpr a) = do
memoExprNonce ntk (appExprId a) $ do
bitblastExpr ntk a
eval ntk (BoundVarExpr info) = do
memoExprNonce ntk (bvarId info) $ do
case bvarKind info of
QuantifierVarKind ->
error $ "Bound variable is not defined."
LatchVarKind ->
error $ "Latches that are not defined."
UninterpVarKind ->
error $ "Uninterpreted variable that was not defined."
eval' :: Network t s -> Expr t tp -> IO (LitValue s tp)
eval' ntk e = do
r <- eval ntk e
case r of
B l -> return l
BV _ v -> return v
GroundInt c -> return c
GroundRat c -> return c
GroundComplex c -> return c
GroundString c -> return c
failAt :: ProgramLoc -> String -> IO a
failAt l msg = fail $ show $
vcat
[ pretty msg
, "From term created at" <+> pretty (plSourceLoc l)
]
failTerm :: Expr t tp -> String -> IO a
failTerm e nm = do
fail $ show $
vcat
[ "The" <+> pretty nm <+> "created at"
<+> pretty (plSourceLoc (exprLoc e))
<+> "is not supported by ABC:"
, indent 2 (ppExpr e)
]
bitblastPred :: Network t s -> NonceAppExpr t tp -> IO (NameType s tp)
bitblastPred h e = do
case nonceExprApp e of
Annotation _tpr _n x -> eval h x
Forall _ x -> eval h x
Exists _ x -> eval h x
ArrayFromFn{} -> fail "ABC does not support uninterpreted functions"
MapOverArrays{} -> fail "ABC does not support uninterpreted functions"
ArrayTrueOnEntries{} -> fail "ABC does not support uninterpreted functions"
FnApp{} -> fail "ABC does not support uninterpreted functions"
-- | Create a representation of the expression as Boolean variables.
bitblastExpr :: forall t s tp . Network t s -> AppExpr t tp -> IO (NameType s tp)
bitblastExpr h ae = do
let g = gia h
let intFail :: IO a
intFail = failTerm (AppExpr ae) "integer expression"
let realFail :: IO a
realFail = failTerm (AppExpr ae) "real expression"
let arrayFail :: IO a
arrayFail = failTerm (AppExpr ae) "array expression"
let structFail :: IO a
structFail = failTerm (AppExpr ae) "struct expression"
let floatFail :: IO a
floatFail = failTerm (AppExpr ae) "floating-point expression"
let stringFail :: IO a
stringFail = failTerm (AppExpr ae) "string expression"
case appExprApp ae of
------------------------------------------------------------------------
Integer operations
SemiRingLe OrderedSemiRingIntegerRepr _ _ -> intFail
IntAbs{} -> intFail
IntDiv{} -> intFail
IntMod{} -> intFail
IntDivisible{} -> intFail
------------------------------------------------------------------------
-- Real value operations
SemiRingLe OrderedSemiRingRealRepr _ _ -> realFail
RealDiv{} -> realFail
RealSqrt{} -> realFail
RealSpecialFunction{} -> realFail
--------------------------------------------------------------------
Bitvector operations
BaseIte bt _ c x y ->
case bt of
BaseBoolRepr ->
do c' <- eval' h c
B <$> AIG.lazyMux g c' (eval' h x) (eval' h y)
BaseBVRepr w ->
do c' <- eval' h c
BV w <$> AIG.iteM g c' (eval' h x) (eval' h y)
BaseIntegerRepr -> intFail
BaseRealRepr -> realFail
BaseComplexRepr -> realFail
BaseFloatRepr _ -> floatFail
BaseArrayRepr _ _ -> arrayFail
BaseStructRepr _ -> structFail
BaseStringRepr _ -> stringFail
BaseEq bt x y ->
case bt of
BaseBoolRepr -> B <$> join (AIG.eq g <$> eval' h x <*> eval' h y)
BaseBVRepr _ -> B <$> join (AIG.bvEq g <$> eval' h x <*> eval' h y)
BaseIntegerRepr -> intFail
BaseRealRepr -> realFail
BaseComplexRepr -> realFail
BaseFloatRepr _ -> floatFail
BaseArrayRepr _ _ -> arrayFail
BaseStructRepr _ -> structFail
BaseStringRepr _ -> stringFail
BVTestBit i xe -> assert (i <= fromIntegral (maxBound :: Int)) $
(\v -> B $ v AIG.! (fromIntegral i)) <$> eval' h xe
BVSlt x y -> B <$> join (AIG.slt g <$> eval' h x <*> eval' h y)
BVUlt x y -> B <$> join (AIG.ult g <$> eval' h x <*> eval' h y)
BVUnaryTerm u -> do
let w = UnaryBV.width u
let cns v = return $ AIG.bvFromInteger g (widthVal w) v
let ite :: BoolExpr t
-> AIG.BV (GIA.Lit s)
-> AIG.BV (GIA.Lit s)
-> IO (AIG.BV (GIA.Lit s))
ite p x y = do
c <- eval' h p
AIG.ite g c x y
BV w <$> UnaryBV.sym_evaluate cns ite u
BVConcat w xe ye -> do
x <- eval' h xe
y <- eval' h ye
return $ BV w $ x AIG.++ y
BVSelect idx n xe -> do
x <- eval' h xe
return $ BV n $ AIG.sliceRev x (fromIntegral (natValue idx)) (fromIntegral (natValue n))
NotPred xe -> B . AIG.not <$> eval' h xe
ConjPred xs ->
let pol (x,BM.Positive) = eval' h x
pol (x,BM.Negative) = AIG.not <$> eval' h x
in
case BM.viewBoolMap xs of
BM.BoolMapUnit -> return (B GIA.true)
BM.BoolMapDualUnit -> return (B GIA.false)
BM.BoolMapTerms (t:|ts) ->
B <$> join (foldM (AIG.lAnd' g) <$> pol t <*> mapM pol ts)
SemiRingSum s ->
case WSum.sumRepr s of
SemiRingBVRepr BVArithRepr w -> BV w <$> WSum.evalM (AIG.add g) smul cnst s
where
smul c e =
NB , better constant folding if the constant is the second value
flip (AIG.mul g) (AIG.bvFromInteger g (widthVal w) (BV.asUnsigned c)) =<< eval' h e
cnst c = pure (AIG.bvFromInteger g (widthVal w) (BV.asUnsigned c))
SemiRingBVRepr BVBitsRepr w -> BV w <$> WSum.evalM (AIG.zipWithM (AIG.lXor' g)) smul cnst s
where
smul c e = AIG.zipWithM (AIG.lAnd' g) (AIG.bvFromInteger g (widthVal w) (BV.asUnsigned c)) =<< eval' h e
cnst c = pure (AIG.bvFromInteger g (widthVal w) (BV.asUnsigned c))
SemiRingIntegerRepr -> intFail
SemiRingRealRepr -> realFail
SemiRingProd pd ->
case WSum.prodRepr pd of
SemiRingBVRepr BVArithRepr w ->
maybe (BV w (AIG.bvFromInteger g (widthVal w) 1)) (BV w) <$>
WSum.prodEvalM (AIG.mul g) (eval' h) pd
SemiRingBVRepr BVBitsRepr w ->
maybe (BV w (AIG.bvFromInteger g (widthVal w) (maxUnsigned w))) (BV w) <$>
WSum.prodEvalM (AIG.zipWithM (AIG.lAnd' g)) (eval' h) pd
SemiRingIntegerRepr -> intFail
SemiRingRealRepr -> realFail
BVOrBits w bs ->
do bs' <- traverse (eval' h) (bvOrToList bs)
case bs' of
[] -> return (BV w (AIG.bvFromInteger g (widthVal w) 0))
x:xs -> BV w <$> foldM (AIG.zipWithM (AIG.lOr' g)) x xs
BVUdiv w x y -> do
BV w <$> join (AIG.uquot g <$> eval' h x <*> eval' h y)
BVUrem w x y -> do
BV w <$> join (AIG.urem g <$> eval' h x <*> eval' h y)
BVSdiv w x y ->
BV w <$> join (AIG.squot g <$> eval' h x <*> eval' h y)
BVSrem w x y ->
BV w <$> join (AIG.srem g <$> eval' h x <*> eval' h y)
BVShl w x y -> BV w <$> join (AIG.shl g <$> eval' h x <*> eval' h y)
BVLshr w x y -> BV w <$> join (AIG.ushr g <$> eval' h x <*> eval' h y)
BVAshr w x y -> BV w <$> join (AIG.sshr g <$> eval' h x <*> eval' h y)
BVRol w x y -> BV w <$> join (AIG.rol g <$> eval' h x <*> eval' h y)
BVRor w x y -> BV w <$> join (AIG.ror g <$> eval' h x <*> eval' h y)
BVFill w xe -> BV w . AIG.bvFromList . replicate (widthVal w) <$> eval' h xe
BVPopcount w xe -> do
x <- eval' h xe
BV w <$> AIG.popCount g x
BVCountLeadingZeros w xe -> do
x <- eval' h xe
BV w <$> AIG.countLeadingZeros g x
BVCountTrailingZeros w xe -> do
x <- eval' h xe
BV w <$> AIG.countTrailingZeros g x
BVZext w' xe -> do
x <- eval' h xe
return $ BV w' $ AIG.zext g x (widthVal w')
BVSext w' xe -> do
x <- eval' h xe
return $ BV w' $ AIG.sext g x (widthVal w')
------------------------------------------------------------------------
-- Floating point operations
FloatNeg{} -> floatFail
FloatAbs{} -> floatFail
FloatSqrt{} -> floatFail
FloatAdd{} -> floatFail
FloatSub{} -> floatFail
FloatMul{} -> floatFail
FloatDiv{} -> floatFail
FloatRem{} -> floatFail
FloatFMA{} -> floatFail
FloatFpEq{} -> floatFail
FloatLe{} -> floatFail
FloatLt{} -> floatFail
FloatIsNaN{} -> floatFail
FloatIsInf{} -> floatFail
FloatIsZero{} -> floatFail
FloatIsPos{} -> floatFail
FloatIsNeg{} -> floatFail
FloatIsSubnorm{} -> floatFail
FloatIsNorm{} -> floatFail
FloatCast{} -> floatFail
FloatRound{} -> floatFail
FloatFromBinary{} -> floatFail
BVToFloat{} -> floatFail
SBVToFloat{} -> floatFail
RealToFloat{} -> floatFail
FloatToBV{} -> floatFail
FloatToSBV{} -> floatFail
FloatToReal{} -> floatFail
FloatToBinary{} -> floatFail
FloatSpecialFunction{} -> floatFail
------------------------------------------------------------------------
-- Array operations
ArrayMap{} -> arrayFail
ConstantArray{} -> arrayFail
SelectArray{} -> arrayFail
UpdateArray{} -> arrayFail
CopyArray{} -> arrayFail
SetArray{} -> arrayFail
EqualArrayRange{} -> arrayFail
------------------------------------------------------------------------
-- String operations
StringAppend{} -> stringFail
StringLength{} -> stringFail
StringContains{} -> stringFail
StringIsPrefixOf{} -> stringFail
StringIsSuffixOf{} -> stringFail
StringIndexOf{} -> stringFail
StringSubstring{} -> stringFail
------------------------------------------------------------------------
-- Conversions.
RealIsInteger{} -> realFail
IntegerToReal{} -> realFail
BVToInteger{} -> intFail
SBVToInteger{} -> intFail
RoundReal{} -> realFail
RoundEvenReal{} -> realFail
FloorReal{} -> realFail
CeilReal{} -> realFail
RealToInteger{} -> intFail
IntegerToBV{} -> intFail
------------------------------------------------------------------------
Complex operations
Cplx (r :+ i) -> do
GroundComplex <$> ((:+) <$> eval' h r <*> eval' h i)
RealPart c -> do
GroundRat . realPart <$> eval' h c
ImagPart c -> do
GroundRat . imagPart <$> eval' h c
------------------------------------------------------------------------
-- Structs
StructCtor{} -> structFail
StructField{} -> structFail
newNetwork :: IO (GIA.SomeGraph (Network t))
newNetwork = do
GIA.SomeGraph g <- GIA.newGIA
nc <- liftST $ H.new
outputsRef <- newIORef []
let s = Network { gia = g
, nameCache = nc
, revOutputs = outputsRef
}
return (GIA.SomeGraph s)
withNetwork :: (forall s . Network t s -> IO a) -> IO a
withNetwork m = do
GIA.SomeGraph h <- newNetwork
m h
data SizedBV = forall w . SizedBV (NatRepr w) (BV.BV w)
asBV :: Monad m => (l -> m Bool) -> AIG.BV l -> m SizedBV
asBV f v = do
x <- go 0 0
Some n <- return $ mkNatRepr (fromIntegral nInt)
return $ SizedBV n (BV.mkBV n x)
where nInt = AIG.length v
go r i | i == nInt = return r
go r i = do
b <- f (v `AIG.at` i)
let q = if b then 1 else 0
go ((r `shiftL` 1) .|. q) (i+1)
-- | Look to see if literals have been assigned to expression.
evalNonce :: Network t s
-> Nonce t tp
-> (GIA.Lit s -> Bool)
-> IO (GroundValue tp)
-> IO (GroundValue tp)
evalNonce ntk n eval_fn fallback = do
-- Look to see if literals have been assigned to expression.
mnm <- liftST $ H.lookup (nameCache ntk) n
case mnm of
Just (B l) -> return $ eval_fn l
Just (BV w bv) -> do
SizedBV w' bv' <- asBV (return . eval_fn) bv
case w `testEquality` w' of
Just Refl -> return bv'
Nothing -> panic "What4.Solver.ABC.evalNonce"
["Got back bitvector with wrong width"]
Just (GroundInt x) -> return x
Just (GroundRat x) -> return x
Just (GroundComplex c) -> return c
Just (GroundString c) -> return c
Nothing -> fallback
evaluateSatModel :: forall t s
. Network t s
^ Fixed input arguments ( used for QBF ) .
-> GIA.SatResult
-> IO (SatResult (GroundEvalFn t) ())
evaluateSatModel ntk initial_args sat_res = do
case sat_res of
GIA.Sat assignment -> do
-- Get literal evaluation function.
eval_fn <- GIA.evaluator (gia ntk) (assignment ++ initial_args)
Create cache for results .
groundCache <- newIdxCache
let f :: Expr t tp -> IO (GroundValue tp)
f e = case exprMaybeId e of
Nothing -> evalGroundExpr f e
Just n ->
fmap unGVW $ idxCacheEval groundCache e $ fmap GVW $ do
evalNonce ntk n eval_fn $ do
evalGroundExpr f e
return $ Sat $ GroundEvalFn f
GIA.Unsat -> return (Unsat ())
GIA.SatUnknown ->
fail "evaluateSatModel: ABC returned unknown sat result"
runQBF :: Network t s
-> Int
-- ^ Number of existential variables.
-> GIA.Lit s
-- ^ Condition to check satifiability of.
-> CInt
-- ^ Maximum number of iterations to run.
-> IO (SatResult (GroundEvalFn t) ())
runQBF ntk e_cnt cond max_iter = do
tot_cnt <- GIA.inputCount (gia ntk)
let a_cnt = tot_cnt - e_cnt
initial_forall = replicate a_cnt False
mr <- GIA.check_exists_forall (gia ntk) e_cnt cond initial_forall max_iter
case mr of
Left m -> fail m
Right r -> evaluateSatModel ntk initial_forall r
addOutput :: Network t s -> GIA.Lit s -> IO ()
addOutput h l = do
modifyIORef' (revOutputs h) $ (l:)
outputExpr :: Network t s -> Expr t tp -> IO ()
outputExpr h e = do
r <- eval h e
case r of
B l -> addOutput h l
BV _ v -> Fold.traverse_ (addOutput h) v
GroundInt _ -> fail $ "Cannot bitblast integer values."
GroundRat _ -> fail $ "Cannot bitblast real values."
GroundComplex _ -> fail $ "Cannot bitblast complex values."
GroundString _ -> fail $ "Cannot bitblast string values."
| @getForallPred ntk v p ev av@ adds assertion that :
-- @Ep.Eev.Aav.p = v@.
getForallPred :: Network t s
-> Some (QuantifierInfo t)
-> GIA.Lit s
-> VarBinding t s
-> VarBinding t s
-> IO (GIA.Lit s)
getForallPred ntk (Some b) p e_binding a_binding = do
let g = gia ntk
let c = nameCache ntk
let e = boundTopTerm b
let t = boundInnerTerm b
-- Bind top-most quantifier to e
liftST $ H.insert c (nonceExprId e) (B p)
Switch on quantifier type .
case boundQuant b of
ForallBound -> do
-- Generate predicate p => (Av. t)
a_conds <- recordBinding ntk a_binding
B c_a <- eval ntk t
c_a' <- GIA.implies g a_conds c_a
c1 <- GIA.implies g p c_a'
-- Generate predicate (Av. t) => p
e_conds <- recordBinding ntk e_binding
B c_e <- eval ntk t
c_e' <- GIA.implies g e_conds c_e
c2 <- GIA.implies g c_e' p
-- Delete binding to elements.
deleteBinding ntk e_binding
-- Return both predicates.
GIA.and g c1 c2
ExistBound -> do
-- Generate predicate p => (Ev. t)
e_conds <- recordBinding ntk e_binding
B c_e <- eval ntk t
c_e' <- GIA.and g e_conds c_e
c1 <- GIA.implies g p c_e'
-- Generate predicate (Ev. t) => p
a_conds <- recordBinding ntk a_binding
B c_a <- eval ntk t
c_a' <- GIA.and g a_conds c_a
c2 <- GIA.implies g c_a' p
-- Delete binding to elements.
deleteBinding ntk a_binding
-- Return both predicates.
GIA.and g c1 c2
| Check variables are supported by ABC .
checkSupportedByAbc :: MonadFail m => CollectedVarInfo t -> m ()
checkSupportedByAbc vars = do
let errors = Fold.toList (vars^.varErrors)
-- Check no errors where reported in result.
when (not (null errors)) $ do
fail $ show $ vcat
[ "This formula is not supported by abc:"
, indent 2 (vcat errors)
]
checkNoLatches :: MonadFail m => CollectedVarInfo t -> m ()
checkNoLatches vars = do
when (not (Set.null (vars^.latches))) $ do
fail "Cannot check satisfiability of circuits with latches."
-- | Check that var result contains no universally quantified variables.
checkNoForallVars :: MonadFail m => CollectedVarInfo t -> m ()
checkNoForallVars vars = do
unless (Map.null (vars^.forallQuantifiers)) $ do
fail "This operation does not support universally quantified variables."
recordUninterpConstants :: Network t s -> Set (Some (ExprBoundVar t)) -> IO (GIA.Lit s)
recordUninterpConstants ntk s = do
let recordCon v = recordBinding ntk =<< addBoundVar' ntk v
conds <- mapM recordCon (Fold.toList s)
foldM (AIG.lAnd' (gia ntk)) GIA.true conds
recordBoundVar :: Network t s -> Some (QuantifierInfo t) -> IO (GIA.Lit s)
recordBoundVar ntk info = do
recordBinding ntk =<< addBoundVar ntk info
-- | Expression to check is satisfiable.
checkSat :: IsExprBuilder sym
=> sym
-> LogData
-> BoolExpr t
-> IO (SatResult (GroundEvalFn t) ())
checkSat sym logData e = do
let cfg = getConfiguration sym
-- Get variables in expression.
let vars = predicateVarInfo e
max_qbf_iter <- fromInteger <$> (getOpt =<< getOptionSetting abcQbfIterations cfg)
checkSupportedByAbc vars
checkNoLatches vars
logSolverEvent sym
(SolverStartSATQuery $ SolverStartSATQueryRec
{ satQuerySolverName = "ABC"
, satQueryReason = logReason logData
})
withNetwork $ \ntk -> do
-- Get network
let g = gia ntk
-- Add bindings for uninterpreted bindings.
sideconds <- recordUninterpConstants ntk (vars^.uninterpConstants)
-- Add bindings for bound variables.
let e_quants = vars^.existQuantifiers
let a_quants = vars^.forallQuantifiers
let e_only_quants = Fold.toList $ Map.difference e_quants a_quants
let a_only_quants = Fold.toList $ Map.difference a_quants e_quants
let both_quants = Fold.toList $ Map.intersection a_quants e_quants
-- Add bindings for existential variables.
mapM_ (recordBoundVar ntk) e_only_quants
-- Get predicate to hold value on whether quantifier is true
-- true or false.
both_preds <- mapM (\_ -> GIA.newInput (gia ntk)) both_quants
-- Get existential variables for representing both bound variables.
e_both_bindings <- mapM (addBoundVar ntk) both_quants
exist_cnt <- GIA.inputCount g
-- Add variables that are only universally quantified.
mapM_ (recordBoundVar ntk) a_only_quants
-- Get uninterval variables for representing both bound variables.
a_both_bindings <- mapM (addBoundVar ntk) both_quants
-- Evaluate lit.
B c <- eval ntk e
-- Add predicates for both vars.
preds <- sequence $ do
zipWith4 (getForallPred ntk) both_quants both_preds e_both_bindings a_both_bindings
-- Get final pred.
p <- foldM (AIG.lAnd' (gia ntk)) c (sideconds : preds)
-- Add bindings for uninterpreted bindings.
res <- if Map.null a_quants then do
logCallbackVerbose logData 2 "Calling ABC's SAT solver"
r <- GIA.checkSat (gia ntk) p
evaluateSatModel ntk [] r
else do
logCallbackVerbose logData 2 "Calling ABC's QBF solver"
runQBF ntk exist_cnt p max_qbf_iter
logSolverEvent sym
(SolverEndSATQuery $ SolverEndSATQueryRec
{ satQueryResult = forgetModelAndCore res
, satQueryError = Nothing
})
return res
-- | Associate an element in a binding with the term.
recordBinding :: Network t s -> VarBinding t s -> IO (GIA.Lit s)
recordBinding ntk b = liftST $
case b of
BoolBinding n r ->
do H.insert (nameCache ntk) n (B r)
return GIA.true
BVBinding w n r sidecond ->
do H.insert (nameCache ntk) n (BV w r)
return sidecond
deleteBinding :: Network t s -> VarBinding t s -> IO ()
deleteBinding ntk b = liftST $
case b of
BoolBinding n _ -> H.delete (nameCache ntk) n
BVBinding _ n _ _ -> H.delete (nameCache ntk) n
freshBV :: AIG.IsAIG l g => g s -> NatRepr n -> IO (AIG.BV (l s))
freshBV g w = AIG.generateM_msb0 (widthVal w) (\_ -> GIA.newInput g)
-- | Add an uninterpreted variable.
freshBinding :: Network t s
-> Nonce t tp
-- ^ Unique id for variable.
-> ProgramLoc
-- ^ Location of binding.
-> BaseTypeRepr tp
-- ^ Type of variable
-> Maybe (AbstractValue tp)
-- ^ Bounds on the value
-> IO (VarBinding t s)
freshBinding ntk n l tp mbnds = do
let g = gia ntk
case tp of
BaseBoolRepr -> do
BoolBinding n <$> GIA.newInput g
BaseBVRepr w ->
do bv <- freshBV g w
cond <- case mbnds of
Nothing -> return GIA.true
Just bnds ->
do let wint = fromIntegral (natValue w)
let arithBounds Nothing = return GIA.true
arithBounds (Just (lo,sz)) =
do diff <- AIG.sub g bv (AIG.bvFromInteger g wint lo)
AIG.ule g diff (AIG.bvFromInteger g wint sz)
case bnds of
BVD.BVDArith a -> arithBounds (A.arithDomainData a)
BVD.BVDBitwise b -> between g (B.bitbounds b) bv
return (BVBinding w n bv cond)
BaseIntegerRepr -> failAt l "Integer variables are not supported by ABC."
BaseRealRepr -> failAt l "Real variables are not supported by ABC."
BaseStringRepr _ -> failAt l "String variables are not supported by ABC."
BaseComplexRepr -> failAt l "Complex variables are not supported by ABC."
BaseArrayRepr _ _ -> failAt l "Array variables are not supported by ABC."
BaseStructRepr{} -> failAt l "Struct variables are not supported by ABC."
BaseFloatRepr{} -> failAt l "Floating-point variables are not supported by ABC."
between :: GIA.GIA s -> (Integer, Integer) -> AIG.BV (GIA.Lit s) -> IO (GIA.Lit s)
between g (lo, hi) bv = foldM (AIG.lAnd' g) GIA.true =<< mapM bitBetween [0 .. l-1]
where
l = length bv
bitBetween i = AIG.lAnd' g lop hip
where
lop = if lobit then bvbit else GIA.true
hip = if hibit then GIA.true else AIG.not bvbit
bvbit = AIG.at bv i
lobit = testBit lo (l - i - 1)
hibit = testBit hi (l - i - 1)
-- | Add a bound variable.
addBoundVar :: Network t s -> Some (QuantifierInfo t) -> IO (VarBinding t s)
addBoundVar ntk (Some info) = do
let bvar = boundVar info
freshBinding ntk (bvarId bvar) (bvarLoc bvar) (bvarType bvar) (bvarAbstractValue bvar)
-- | Add a bound variable.
addBoundVar' :: Network t s -> Some (ExprBoundVar t) -> IO (VarBinding t s)
addBoundVar' ntk (Some bvar) = do
freshBinding ntk (bvarId bvar) (bvarLoc bvar) (bvarType bvar) (bvarAbstractValue bvar)
readSATInput :: (String -> IO ())
-> Streams.InputStream String
-> [Int]
-> IO GIA.SatResult
readSATInput logLn in_stream vars = do
mln <- Streams.read in_stream
case mln of
Nothing -> fail "Unexpected end of SAT solver output."
Just "s SATISFIABLE" -> do
msln <- Streams.read in_stream
case words <$> msln of
Just ("v":num) -> do
let trueVars :: HSet.HashSet Int
trueVars = HSet.fromList $ filter (>0) $ read <$> num
let varValue v = HSet.member v trueVars
return $ GIA.Sat (varValue <$> vars)
Just _ -> do
fail "Could not parse output from sat solver."
Nothing -> fail "Unexpected end of SAT solver output."
Just "s UNSATISFIABLE" -> do
return $ GIA.Unsat
Just ln -> do
logLn ln
readSATInput logLn in_stream vars
-- | Write an external file using DIMACS format.
writeDimacsFile :: Network t s
-> FilePath
-> BoolExpr t
-> IO [Int]
writeDimacsFile ntk cnf_path condition = do
-- Get variables in expression.
let vars = predicateVarInfo condition
checkSupportedByAbc vars
checkNoLatches vars
checkNoForallVars vars
-- Add bindings for uninterpreted bindings.
sideconds <- recordUninterpConstants ntk (vars^.uninterpConstants)
-- Add bindings for existential variables.
Fold.traverse_ (recordBoundVar ntk) (vars^.existQuantifiers)
-- Generate predicate for top level term.
B c <- eval ntk condition
-- Assert any necessary sideconditions
c' <- AIG.lAnd' (gia ntk) sideconds c
GIA.writeCNF (gia ntk) c' cnf_path
-- | Run an external solver using competition dimacs format.
runExternalDimacsSolver :: (Int -> String -> IO ()) -- ^ Logging function
-> (FilePath -> IO String)
-> BoolExpr t
-> IO (SatResult (GroundEvalFn t) ())
runExternalDimacsSolver logLn mkCommand condition = do
temp_dir <- getTemporaryDirectory
let close (path,h) = do
hClose h
removeFile path
bracket (openTempFile temp_dir "sat.cnf") close $ \(cnf_path,_h) -> do
logLn 2 $ "Writing CNF file to " ++ show cnf_path ++ "."
withNetwork $ \ntk -> do
vars <- writeDimacsFile ntk cnf_path condition
command <- mkCommand cnf_path
logLn 2 $ "About to call: " ++ command
let stopProcess (_,_,_,ph) = do
terminateProcess ph
let runSatProcess (_in_stream, out_stream, err_stream, _ph) = do
-- Log stderr to output.
void $ forkIO $ logErrorStream err_stream (logLn 2)
-- Read stdout as result.
out_lines <- Streams.map UTF8.toString =<< Streams.lines out_stream
res <- readSATInput (logLn 2) out_lines vars
-- Create model
evaluateSatModel ntk [] res
bracketOnError (Streams.runInteractiveCommand command) stopProcess runSatProcess
hasBoundVars :: CollectedVarInfo t -> Bool
hasBoundVars vars = not (Map.null (vars^.forallQuantifiers))
|| not (Map.null (vars^.existQuantifiers))
| Write AIG that outputs given value .
writeAig :: FilePath
-> [Some (Expr t)]
-- ^ The combinational outputs.
-> [Some (Expr t)]
-- ^ The latch outputs (may be empty)
-> IO ()
writeAig path v latchOutputs = do
-- Get variables in expression.
let vars = runST $ collectVarInfo $ do
Fold.traverse_ (traverseSome_ (recordExprVars ExistsOnly)) v
Fold.traverse_ (traverseSome_ (recordExprVars ExistsOnly))
latchOutputs
-- Check inputs.
checkSupportedByAbc vars
when (hasBoundVars vars) $ do
fail "Cannot write an AIG with bound variables."
-- Generate AIG
withNetwork $ \ntk -> do
-- Add bindings for uninterpreted bindings.
-- FIXME? should we do anything with these side conditions?
_sideconds <- recordUninterpConstants ntk (vars^.uninterpConstants)
-- Add bindings for existential variables.
Fold.traverse_ (recordBoundVar ntk) (vars^.existQuantifiers)
-- Get input count
cInCount <- getInputCount ntk
Add latchInputs
Fold.traverse_ (addBoundVar' ntk) $ vars^.latches
Add value to AIGER output .
Fold.traverse_ (traverseSome_ (outputExpr ntk)) v
-- Get current number of outputs.
cOutCount <- getOutputCount ntk
-- Write latch outputs.
Fold.traverse_ (traverseSome_ (outputExpr ntk)) latchOutputs
-- Get number of outputs including latches.
allInCount <- getInputCount ntk
allOutCount <- getOutputCount ntk
let inLatchCount = allInCount - cInCount
let outLatchCount = allOutCount - cOutCount
when (inLatchCount /= outLatchCount) $ do
fail $ "Expected " ++ show inLatchCount ++ " latch outputs, when "
++ show outLatchCount ++ " are given."
out <- getOutputs ntk
GIA.writeAigerWithLatches path (GIA.Network (gia ntk) out) inLatchCount
getOutputs :: Network t s -> IO [GIA.Lit s]
getOutputs ntk = reverse <$> readIORef (revOutputs ntk)
-- | Return number of inputs so far in network.
getInputCount :: Network t s -> IO Int
getInputCount ntk = GIA.inputCount (gia ntk)
-- | Return number of outputs so far in network.
getOutputCount :: Network t s -> IO Int
getOutputCount ntk = length <$> readIORef (revOutputs ntk)
| null |
https://raw.githubusercontent.com/GaloisInc/what4/393ba786f41c49cc6b7bc0ea01374fad56efdaab/what4-abc/src/What4/Solver/ABC.hs
|
haskell
|
# LANGUAGE DoAndIfThenElse #
# LANGUAGE GADTs #
# LANGUAGE OverloadedStrings #
# LANGUAGE RankNTypes #
| Command to run sat solver.
are supported for the other types.
side condition
| Holds outputs in reverse order when used to write
AIGs
| Create a representation of the expression as Boolean variables.
----------------------------------------------------------------------
----------------------------------------------------------------------
Real value operations
------------------------------------------------------------------
----------------------------------------------------------------------
Floating point operations
----------------------------------------------------------------------
Array operations
----------------------------------------------------------------------
String operations
----------------------------------------------------------------------
Conversions.
----------------------------------------------------------------------
----------------------------------------------------------------------
Structs
| Look to see if literals have been assigned to expression.
Look to see if literals have been assigned to expression.
Get literal evaluation function.
^ Number of existential variables.
^ Condition to check satifiability of.
^ Maximum number of iterations to run.
@Ep.Eev.Aav.p = v@.
Bind top-most quantifier to e
Generate predicate p => (Av. t)
Generate predicate (Av. t) => p
Delete binding to elements.
Return both predicates.
Generate predicate p => (Ev. t)
Generate predicate (Ev. t) => p
Delete binding to elements.
Return both predicates.
Check no errors where reported in result.
| Check that var result contains no universally quantified variables.
| Expression to check is satisfiable.
Get variables in expression.
Get network
Add bindings for uninterpreted bindings.
Add bindings for bound variables.
Add bindings for existential variables.
Get predicate to hold value on whether quantifier is true
true or false.
Get existential variables for representing both bound variables.
Add variables that are only universally quantified.
Get uninterval variables for representing both bound variables.
Evaluate lit.
Add predicates for both vars.
Get final pred.
Add bindings for uninterpreted bindings.
| Associate an element in a binding with the term.
| Add an uninterpreted variable.
^ Unique id for variable.
^ Location of binding.
^ Type of variable
^ Bounds on the value
| Add a bound variable.
| Add a bound variable.
| Write an external file using DIMACS format.
Get variables in expression.
Add bindings for uninterpreted bindings.
Add bindings for existential variables.
Generate predicate for top level term.
Assert any necessary sideconditions
| Run an external solver using competition dimacs format.
^ Logging function
Log stderr to output.
Read stdout as result.
Create model
^ The combinational outputs.
^ The latch outputs (may be empty)
Get variables in expression.
Check inputs.
Generate AIG
Add bindings for uninterpreted bindings.
FIXME? should we do anything with these side conditions?
Add bindings for existential variables.
Get input count
Get current number of outputs.
Write latch outputs.
Get number of outputs including latches.
| Return number of inputs so far in network.
| Return number of outputs so far in network.
|
Module : What4.Solver . ABC
Copyright : ( c ) Galois , Inc 2014 - 2016
Maintainer : < : BSD3
Solver adapter and associcated operations for connecting the
Crucible simple builder backend to the ABC And - Inverter Graph ( AIG )
representation .
Module : What4.Solver.ABC
Copyright : (c) Galois, Inc 2014-2016
Maintainer : Joe Hendrix <>
License : BSD3
Solver adapter and associcated operations for connecting the
Crucible simple builder backend to the ABC And-Inverter Graph (AIG)
representation.
-}
# LANGUAGE CPP #
# LANGUAGE DataKinds #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
# OPTIONS_GHC -Werror #
module What4.Solver.ABC
( Network
, newNetwork
, withNetwork
, checkSat
, writeDimacsFile
, runExternalDimacsSolver
, GIA.SomeGraph(..)
, writeAig
, abcQbfIterations
, abcOptions
, abcAdapter
, satCommand
, genericSatOptions
, genericSatAdapter
) where
#if !MIN_VERSION_base(4,13,0)
import Control.Monad.Fail( MonadFail )
#endif
import Control.Concurrent
import Control.Exception hiding (evaluate)
import Control.Lens
import Control.Monad.Identity
import Control.Monad.ST
import Data.Bits
import qualified Data.BitVector.Sized as BV
import qualified Data.ABC as GIA
import qualified Data.ABC.GIA as GIA
import qualified Data.AIG.Operations as AIG
import qualified Data.AIG.Interface as AIG
import qualified Data.ByteString.UTF8 as UTF8
import qualified Data.Foldable as Fold
import qualified Data.HashSet as HSet
import Data.IORef
import Data.List (zipWith4)
import Data.List.NonEmpty (NonEmpty(..))
import qualified Data.Map.Strict as Map
import Data.Parameterized.HashTable (HashTable)
import qualified Data.Parameterized.HashTable as H
import Data.Parameterized.Nonce (Nonce)
import Data.Parameterized.Some
import Data.Set (Set)
import qualified Data.Set as Set
import qualified Data.Text as T
import Foreign.C.Types
import Prettyprinter
import System.Directory
import System.IO
import qualified System.IO.Streams as Streams
import System.Process
import What4.BaseTypes
import What4.Concrete
import What4.Config
import What4.Interface
( getConfiguration, IsExprBuilder, logSolverEvent
, SolverEvent(..), SolverStartSATQuery(..), SolverEndSATQuery(..), andAllOf )
import What4.Expr
import What4.Expr.Builder
import qualified What4.Expr.BoolMap as BM
import What4.Expr.GroundEval
import qualified What4.Expr.UnaryBV as UnaryBV
import What4.Expr.VarIdentification
import qualified What4.Expr.WeightedSum as WSum
import What4.Panic
import What4.ProgramLoc
import What4.Solver.Adapter
import What4.SatResult
import What4.Utils.AbstractDomains
import qualified What4.Utils.BVDomain as BVD
import qualified What4.Utils.BVDomain.Arith as A
import qualified What4.Utils.BVDomain.Bitwise as B
import What4.Utils.Complex
import qualified What4.Utils.Environment as Env
import What4.Utils.MonadST
import What4.Utils.Streams
import What4.Utils.StringLiteral
abcQbfIterations :: ConfigOption BaseIntegerType
abcQbfIterations = configOption BaseIntegerRepr "abc.qbf_max_iterations"
abcOptions :: [ConfigDesc]
abcOptions =
[ opt abcQbfIterations (ConcreteInteger (toInteger (maxBound :: CInt)))
("Max number of iterations to run ABC's QBF solver" :: T.Text)
]
abcAdapter :: SolverAdapter st
abcAdapter =
SolverAdapter
{ solver_adapter_name = "abc"
, solver_adapter_config_options = abcOptions
, solver_adapter_check_sat = \sym logData ps cont -> do
p <- andAllOf sym folded ps
res <- checkSat sym logData p
cont . runIdentity . traverseSatResult (\x -> pure (x,Nothing)) pure $ res
, solver_adapter_write_smt2 = \_ _ _ -> do
fail "ABC backend does not support writing SMTLIB2 files."
}
satCommand :: ConfigOption (BaseStringType Unicode)
satCommand = configOption knownRepr "sat_command"
genericSatOptions :: [ConfigDesc]
genericSatOptions =
[ opt satCommand (ConcreteString "glucose $1")
("Generic SAT solving command to run" :: T.Text)
]
genericSatAdapter :: SolverAdapter st
genericSatAdapter =
SolverAdapter
{ solver_adapter_name = "sat"
, solver_adapter_config_options = genericSatOptions
, solver_adapter_check_sat = \sym logData ps cont -> do
let cfg = getConfiguration sym
cmd <- T.unpack <$> (getOpt =<< getOptionSetting satCommand cfg)
let mkCommand path = do
let var_map = Map.fromList [("1",path)]
Env.expandEnvironmentPath var_map cmd
p <- andAllOf sym folded ps
mmdl <- runExternalDimacsSolver (logCallbackVerbose logData) mkCommand p
cont . runIdentity . traverseSatResult (\x -> pure (x,Nothing)) pure $ mmdl
, solver_adapter_write_smt2 = \_ _ _ -> do
fail "SAT backend does not support writing SMTLIB2 files."
}
| Maps expression types to the representation used in the ABC backend .
The ABC backend only supports Bools and , so only constants
type family LitValue s (tp :: BaseType) where
LitValue s BaseBoolType = GIA.Lit s
LitValue s (BaseBVType n) = AIG.BV (GIA.Lit s)
LitValue s BaseIntegerType = Integer
LitValue s BaseRealType = Rational
LitValue s (BaseStringType si) = StringLiteral si
LitValue s BaseComplexType = Complex Rational
| Newtype wrapper around names .
data NameType s (tp :: BaseType) where
B :: GIA.Lit s -> NameType s BaseBoolType
BV :: NatRepr n -> AIG.BV (GIA.Lit s) -> NameType s (BaseBVType n)
GroundInt :: Integer -> NameType s BaseIntegerType
GroundRat :: Rational -> NameType s BaseRealType
GroundString :: StringLiteral si -> NameType s (BaseStringType si)
GroundComplex :: Complex Rational -> NameType s BaseComplexType
| A variable binding in ABC .
data VarBinding t s where
BoolBinding :: Nonce t BaseBoolType
-> GIA.Lit s
-> VarBinding t s
BVBinding :: (1 <= w)
=> NatRepr w
-> Nonce t (BaseBVType w)
-> AIG.BV (GIA.Lit s)
-> VarBinding t s
| Handle to the ABC interface .
data Network t s = Network { gia :: GIA.GIA s
, nameCache :: !(HashTable RealWorld (Nonce t) (NameType s))
, revOutputs :: !(IORef [GIA.Lit s])
}
memoExprNonce :: Network t s
-> Nonce t tp
-> IO (NameType s tp)
-> IO (NameType s tp)
memoExprNonce ntk n ev = do
let c = nameCache ntk
mnm <- liftST $ H.lookup c n
case mnm of
Just nm -> return nm
Nothing -> do
r <- ev
liftST $ H.insert c n r
return r
eval :: Network t s -> Expr t tp -> IO (NameType s tp)
eval _ (BoolExpr b _) =
return $! if b then B GIA.true else B GIA.false
eval _ (SemiRingLiteral SemiRingIntegerRepr n _) = return (GroundInt n)
eval _ (SemiRingLiteral SemiRingRealRepr r _) = return (GroundRat r)
eval ntk (SemiRingLiteral (SemiRingBVRepr _ w) bv _) =
return $ BV w $ AIG.bvFromInteger (gia ntk) (widthVal w) (BV.asUnsigned bv)
eval _ (StringExpr s _) = return (GroundString s)
eval _ e@FloatExpr{} = failTerm e "floating-point expression"
eval ntk (NonceAppExpr e) = do
memoExprNonce ntk (nonceExprId e) $ do
bitblastPred ntk e
eval ntk (AppExpr a) = do
memoExprNonce ntk (appExprId a) $ do
bitblastExpr ntk a
eval ntk (BoundVarExpr info) = do
memoExprNonce ntk (bvarId info) $ do
case bvarKind info of
QuantifierVarKind ->
error $ "Bound variable is not defined."
LatchVarKind ->
error $ "Latches that are not defined."
UninterpVarKind ->
error $ "Uninterpreted variable that was not defined."
eval' :: Network t s -> Expr t tp -> IO (LitValue s tp)
eval' ntk e = do
r <- eval ntk e
case r of
B l -> return l
BV _ v -> return v
GroundInt c -> return c
GroundRat c -> return c
GroundComplex c -> return c
GroundString c -> return c
failAt :: ProgramLoc -> String -> IO a
failAt l msg = fail $ show $
vcat
[ pretty msg
, "From term created at" <+> pretty (plSourceLoc l)
]
failTerm :: Expr t tp -> String -> IO a
failTerm e nm = do
fail $ show $
vcat
[ "The" <+> pretty nm <+> "created at"
<+> pretty (plSourceLoc (exprLoc e))
<+> "is not supported by ABC:"
, indent 2 (ppExpr e)
]
bitblastPred :: Network t s -> NonceAppExpr t tp -> IO (NameType s tp)
bitblastPred h e = do
case nonceExprApp e of
Annotation _tpr _n x -> eval h x
Forall _ x -> eval h x
Exists _ x -> eval h x
ArrayFromFn{} -> fail "ABC does not support uninterpreted functions"
MapOverArrays{} -> fail "ABC does not support uninterpreted functions"
ArrayTrueOnEntries{} -> fail "ABC does not support uninterpreted functions"
FnApp{} -> fail "ABC does not support uninterpreted functions"
bitblastExpr :: forall t s tp . Network t s -> AppExpr t tp -> IO (NameType s tp)
bitblastExpr h ae = do
let g = gia h
let intFail :: IO a
intFail = failTerm (AppExpr ae) "integer expression"
let realFail :: IO a
realFail = failTerm (AppExpr ae) "real expression"
let arrayFail :: IO a
arrayFail = failTerm (AppExpr ae) "array expression"
let structFail :: IO a
structFail = failTerm (AppExpr ae) "struct expression"
let floatFail :: IO a
floatFail = failTerm (AppExpr ae) "floating-point expression"
let stringFail :: IO a
stringFail = failTerm (AppExpr ae) "string expression"
case appExprApp ae of
Integer operations
SemiRingLe OrderedSemiRingIntegerRepr _ _ -> intFail
IntAbs{} -> intFail
IntDiv{} -> intFail
IntMod{} -> intFail
IntDivisible{} -> intFail
SemiRingLe OrderedSemiRingRealRepr _ _ -> realFail
RealDiv{} -> realFail
RealSqrt{} -> realFail
RealSpecialFunction{} -> realFail
Bitvector operations
BaseIte bt _ c x y ->
case bt of
BaseBoolRepr ->
do c' <- eval' h c
B <$> AIG.lazyMux g c' (eval' h x) (eval' h y)
BaseBVRepr w ->
do c' <- eval' h c
BV w <$> AIG.iteM g c' (eval' h x) (eval' h y)
BaseIntegerRepr -> intFail
BaseRealRepr -> realFail
BaseComplexRepr -> realFail
BaseFloatRepr _ -> floatFail
BaseArrayRepr _ _ -> arrayFail
BaseStructRepr _ -> structFail
BaseStringRepr _ -> stringFail
BaseEq bt x y ->
case bt of
BaseBoolRepr -> B <$> join (AIG.eq g <$> eval' h x <*> eval' h y)
BaseBVRepr _ -> B <$> join (AIG.bvEq g <$> eval' h x <*> eval' h y)
BaseIntegerRepr -> intFail
BaseRealRepr -> realFail
BaseComplexRepr -> realFail
BaseFloatRepr _ -> floatFail
BaseArrayRepr _ _ -> arrayFail
BaseStructRepr _ -> structFail
BaseStringRepr _ -> stringFail
BVTestBit i xe -> assert (i <= fromIntegral (maxBound :: Int)) $
(\v -> B $ v AIG.! (fromIntegral i)) <$> eval' h xe
BVSlt x y -> B <$> join (AIG.slt g <$> eval' h x <*> eval' h y)
BVUlt x y -> B <$> join (AIG.ult g <$> eval' h x <*> eval' h y)
BVUnaryTerm u -> do
let w = UnaryBV.width u
let cns v = return $ AIG.bvFromInteger g (widthVal w) v
let ite :: BoolExpr t
-> AIG.BV (GIA.Lit s)
-> AIG.BV (GIA.Lit s)
-> IO (AIG.BV (GIA.Lit s))
ite p x y = do
c <- eval' h p
AIG.ite g c x y
BV w <$> UnaryBV.sym_evaluate cns ite u
BVConcat w xe ye -> do
x <- eval' h xe
y <- eval' h ye
return $ BV w $ x AIG.++ y
BVSelect idx n xe -> do
x <- eval' h xe
return $ BV n $ AIG.sliceRev x (fromIntegral (natValue idx)) (fromIntegral (natValue n))
NotPred xe -> B . AIG.not <$> eval' h xe
ConjPred xs ->
let pol (x,BM.Positive) = eval' h x
pol (x,BM.Negative) = AIG.not <$> eval' h x
in
case BM.viewBoolMap xs of
BM.BoolMapUnit -> return (B GIA.true)
BM.BoolMapDualUnit -> return (B GIA.false)
BM.BoolMapTerms (t:|ts) ->
B <$> join (foldM (AIG.lAnd' g) <$> pol t <*> mapM pol ts)
SemiRingSum s ->
case WSum.sumRepr s of
SemiRingBVRepr BVArithRepr w -> BV w <$> WSum.evalM (AIG.add g) smul cnst s
where
smul c e =
NB , better constant folding if the constant is the second value
flip (AIG.mul g) (AIG.bvFromInteger g (widthVal w) (BV.asUnsigned c)) =<< eval' h e
cnst c = pure (AIG.bvFromInteger g (widthVal w) (BV.asUnsigned c))
SemiRingBVRepr BVBitsRepr w -> BV w <$> WSum.evalM (AIG.zipWithM (AIG.lXor' g)) smul cnst s
where
smul c e = AIG.zipWithM (AIG.lAnd' g) (AIG.bvFromInteger g (widthVal w) (BV.asUnsigned c)) =<< eval' h e
cnst c = pure (AIG.bvFromInteger g (widthVal w) (BV.asUnsigned c))
SemiRingIntegerRepr -> intFail
SemiRingRealRepr -> realFail
SemiRingProd pd ->
case WSum.prodRepr pd of
SemiRingBVRepr BVArithRepr w ->
maybe (BV w (AIG.bvFromInteger g (widthVal w) 1)) (BV w) <$>
WSum.prodEvalM (AIG.mul g) (eval' h) pd
SemiRingBVRepr BVBitsRepr w ->
maybe (BV w (AIG.bvFromInteger g (widthVal w) (maxUnsigned w))) (BV w) <$>
WSum.prodEvalM (AIG.zipWithM (AIG.lAnd' g)) (eval' h) pd
SemiRingIntegerRepr -> intFail
SemiRingRealRepr -> realFail
BVOrBits w bs ->
do bs' <- traverse (eval' h) (bvOrToList bs)
case bs' of
[] -> return (BV w (AIG.bvFromInteger g (widthVal w) 0))
x:xs -> BV w <$> foldM (AIG.zipWithM (AIG.lOr' g)) x xs
BVUdiv w x y -> do
BV w <$> join (AIG.uquot g <$> eval' h x <*> eval' h y)
BVUrem w x y -> do
BV w <$> join (AIG.urem g <$> eval' h x <*> eval' h y)
BVSdiv w x y ->
BV w <$> join (AIG.squot g <$> eval' h x <*> eval' h y)
BVSrem w x y ->
BV w <$> join (AIG.srem g <$> eval' h x <*> eval' h y)
BVShl w x y -> BV w <$> join (AIG.shl g <$> eval' h x <*> eval' h y)
BVLshr w x y -> BV w <$> join (AIG.ushr g <$> eval' h x <*> eval' h y)
BVAshr w x y -> BV w <$> join (AIG.sshr g <$> eval' h x <*> eval' h y)
BVRol w x y -> BV w <$> join (AIG.rol g <$> eval' h x <*> eval' h y)
BVRor w x y -> BV w <$> join (AIG.ror g <$> eval' h x <*> eval' h y)
BVFill w xe -> BV w . AIG.bvFromList . replicate (widthVal w) <$> eval' h xe
BVPopcount w xe -> do
x <- eval' h xe
BV w <$> AIG.popCount g x
BVCountLeadingZeros w xe -> do
x <- eval' h xe
BV w <$> AIG.countLeadingZeros g x
BVCountTrailingZeros w xe -> do
x <- eval' h xe
BV w <$> AIG.countTrailingZeros g x
BVZext w' xe -> do
x <- eval' h xe
return $ BV w' $ AIG.zext g x (widthVal w')
BVSext w' xe -> do
x <- eval' h xe
return $ BV w' $ AIG.sext g x (widthVal w')
FloatNeg{} -> floatFail
FloatAbs{} -> floatFail
FloatSqrt{} -> floatFail
FloatAdd{} -> floatFail
FloatSub{} -> floatFail
FloatMul{} -> floatFail
FloatDiv{} -> floatFail
FloatRem{} -> floatFail
FloatFMA{} -> floatFail
FloatFpEq{} -> floatFail
FloatLe{} -> floatFail
FloatLt{} -> floatFail
FloatIsNaN{} -> floatFail
FloatIsInf{} -> floatFail
FloatIsZero{} -> floatFail
FloatIsPos{} -> floatFail
FloatIsNeg{} -> floatFail
FloatIsSubnorm{} -> floatFail
FloatIsNorm{} -> floatFail
FloatCast{} -> floatFail
FloatRound{} -> floatFail
FloatFromBinary{} -> floatFail
BVToFloat{} -> floatFail
SBVToFloat{} -> floatFail
RealToFloat{} -> floatFail
FloatToBV{} -> floatFail
FloatToSBV{} -> floatFail
FloatToReal{} -> floatFail
FloatToBinary{} -> floatFail
FloatSpecialFunction{} -> floatFail
ArrayMap{} -> arrayFail
ConstantArray{} -> arrayFail
SelectArray{} -> arrayFail
UpdateArray{} -> arrayFail
CopyArray{} -> arrayFail
SetArray{} -> arrayFail
EqualArrayRange{} -> arrayFail
StringAppend{} -> stringFail
StringLength{} -> stringFail
StringContains{} -> stringFail
StringIsPrefixOf{} -> stringFail
StringIsSuffixOf{} -> stringFail
StringIndexOf{} -> stringFail
StringSubstring{} -> stringFail
RealIsInteger{} -> realFail
IntegerToReal{} -> realFail
BVToInteger{} -> intFail
SBVToInteger{} -> intFail
RoundReal{} -> realFail
RoundEvenReal{} -> realFail
FloorReal{} -> realFail
CeilReal{} -> realFail
RealToInteger{} -> intFail
IntegerToBV{} -> intFail
Complex operations
Cplx (r :+ i) -> do
GroundComplex <$> ((:+) <$> eval' h r <*> eval' h i)
RealPart c -> do
GroundRat . realPart <$> eval' h c
ImagPart c -> do
GroundRat . imagPart <$> eval' h c
StructCtor{} -> structFail
StructField{} -> structFail
newNetwork :: IO (GIA.SomeGraph (Network t))
newNetwork = do
GIA.SomeGraph g <- GIA.newGIA
nc <- liftST $ H.new
outputsRef <- newIORef []
let s = Network { gia = g
, nameCache = nc
, revOutputs = outputsRef
}
return (GIA.SomeGraph s)
withNetwork :: (forall s . Network t s -> IO a) -> IO a
withNetwork m = do
GIA.SomeGraph h <- newNetwork
m h
data SizedBV = forall w . SizedBV (NatRepr w) (BV.BV w)
asBV :: Monad m => (l -> m Bool) -> AIG.BV l -> m SizedBV
asBV f v = do
x <- go 0 0
Some n <- return $ mkNatRepr (fromIntegral nInt)
return $ SizedBV n (BV.mkBV n x)
where nInt = AIG.length v
go r i | i == nInt = return r
go r i = do
b <- f (v `AIG.at` i)
let q = if b then 1 else 0
go ((r `shiftL` 1) .|. q) (i+1)
evalNonce :: Network t s
-> Nonce t tp
-> (GIA.Lit s -> Bool)
-> IO (GroundValue tp)
-> IO (GroundValue tp)
evalNonce ntk n eval_fn fallback = do
mnm <- liftST $ H.lookup (nameCache ntk) n
case mnm of
Just (B l) -> return $ eval_fn l
Just (BV w bv) -> do
SizedBV w' bv' <- asBV (return . eval_fn) bv
case w `testEquality` w' of
Just Refl -> return bv'
Nothing -> panic "What4.Solver.ABC.evalNonce"
["Got back bitvector with wrong width"]
Just (GroundInt x) -> return x
Just (GroundRat x) -> return x
Just (GroundComplex c) -> return c
Just (GroundString c) -> return c
Nothing -> fallback
evaluateSatModel :: forall t s
. Network t s
^ Fixed input arguments ( used for QBF ) .
-> GIA.SatResult
-> IO (SatResult (GroundEvalFn t) ())
evaluateSatModel ntk initial_args sat_res = do
case sat_res of
GIA.Sat assignment -> do
eval_fn <- GIA.evaluator (gia ntk) (assignment ++ initial_args)
Create cache for results .
groundCache <- newIdxCache
let f :: Expr t tp -> IO (GroundValue tp)
f e = case exprMaybeId e of
Nothing -> evalGroundExpr f e
Just n ->
fmap unGVW $ idxCacheEval groundCache e $ fmap GVW $ do
evalNonce ntk n eval_fn $ do
evalGroundExpr f e
return $ Sat $ GroundEvalFn f
GIA.Unsat -> return (Unsat ())
GIA.SatUnknown ->
fail "evaluateSatModel: ABC returned unknown sat result"
runQBF :: Network t s
-> Int
-> GIA.Lit s
-> CInt
-> IO (SatResult (GroundEvalFn t) ())
runQBF ntk e_cnt cond max_iter = do
tot_cnt <- GIA.inputCount (gia ntk)
let a_cnt = tot_cnt - e_cnt
initial_forall = replicate a_cnt False
mr <- GIA.check_exists_forall (gia ntk) e_cnt cond initial_forall max_iter
case mr of
Left m -> fail m
Right r -> evaluateSatModel ntk initial_forall r
addOutput :: Network t s -> GIA.Lit s -> IO ()
addOutput h l = do
modifyIORef' (revOutputs h) $ (l:)
outputExpr :: Network t s -> Expr t tp -> IO ()
outputExpr h e = do
r <- eval h e
case r of
B l -> addOutput h l
BV _ v -> Fold.traverse_ (addOutput h) v
GroundInt _ -> fail $ "Cannot bitblast integer values."
GroundRat _ -> fail $ "Cannot bitblast real values."
GroundComplex _ -> fail $ "Cannot bitblast complex values."
GroundString _ -> fail $ "Cannot bitblast string values."
| @getForallPred ntk v p ev av@ adds assertion that :
getForallPred :: Network t s
-> Some (QuantifierInfo t)
-> GIA.Lit s
-> VarBinding t s
-> VarBinding t s
-> IO (GIA.Lit s)
getForallPred ntk (Some b) p e_binding a_binding = do
let g = gia ntk
let c = nameCache ntk
let e = boundTopTerm b
let t = boundInnerTerm b
liftST $ H.insert c (nonceExprId e) (B p)
Switch on quantifier type .
case boundQuant b of
ForallBound -> do
a_conds <- recordBinding ntk a_binding
B c_a <- eval ntk t
c_a' <- GIA.implies g a_conds c_a
c1 <- GIA.implies g p c_a'
e_conds <- recordBinding ntk e_binding
B c_e <- eval ntk t
c_e' <- GIA.implies g e_conds c_e
c2 <- GIA.implies g c_e' p
deleteBinding ntk e_binding
GIA.and g c1 c2
ExistBound -> do
e_conds <- recordBinding ntk e_binding
B c_e <- eval ntk t
c_e' <- GIA.and g e_conds c_e
c1 <- GIA.implies g p c_e'
a_conds <- recordBinding ntk a_binding
B c_a <- eval ntk t
c_a' <- GIA.and g a_conds c_a
c2 <- GIA.implies g c_a' p
deleteBinding ntk a_binding
GIA.and g c1 c2
| Check variables are supported by ABC .
checkSupportedByAbc :: MonadFail m => CollectedVarInfo t -> m ()
checkSupportedByAbc vars = do
let errors = Fold.toList (vars^.varErrors)
when (not (null errors)) $ do
fail $ show $ vcat
[ "This formula is not supported by abc:"
, indent 2 (vcat errors)
]
checkNoLatches :: MonadFail m => CollectedVarInfo t -> m ()
checkNoLatches vars = do
when (not (Set.null (vars^.latches))) $ do
fail "Cannot check satisfiability of circuits with latches."
checkNoForallVars :: MonadFail m => CollectedVarInfo t -> m ()
checkNoForallVars vars = do
unless (Map.null (vars^.forallQuantifiers)) $ do
fail "This operation does not support universally quantified variables."
recordUninterpConstants :: Network t s -> Set (Some (ExprBoundVar t)) -> IO (GIA.Lit s)
recordUninterpConstants ntk s = do
let recordCon v = recordBinding ntk =<< addBoundVar' ntk v
conds <- mapM recordCon (Fold.toList s)
foldM (AIG.lAnd' (gia ntk)) GIA.true conds
recordBoundVar :: Network t s -> Some (QuantifierInfo t) -> IO (GIA.Lit s)
recordBoundVar ntk info = do
recordBinding ntk =<< addBoundVar ntk info
checkSat :: IsExprBuilder sym
=> sym
-> LogData
-> BoolExpr t
-> IO (SatResult (GroundEvalFn t) ())
checkSat sym logData e = do
let cfg = getConfiguration sym
let vars = predicateVarInfo e
max_qbf_iter <- fromInteger <$> (getOpt =<< getOptionSetting abcQbfIterations cfg)
checkSupportedByAbc vars
checkNoLatches vars
logSolverEvent sym
(SolverStartSATQuery $ SolverStartSATQueryRec
{ satQuerySolverName = "ABC"
, satQueryReason = logReason logData
})
withNetwork $ \ntk -> do
let g = gia ntk
sideconds <- recordUninterpConstants ntk (vars^.uninterpConstants)
let e_quants = vars^.existQuantifiers
let a_quants = vars^.forallQuantifiers
let e_only_quants = Fold.toList $ Map.difference e_quants a_quants
let a_only_quants = Fold.toList $ Map.difference a_quants e_quants
let both_quants = Fold.toList $ Map.intersection a_quants e_quants
mapM_ (recordBoundVar ntk) e_only_quants
both_preds <- mapM (\_ -> GIA.newInput (gia ntk)) both_quants
e_both_bindings <- mapM (addBoundVar ntk) both_quants
exist_cnt <- GIA.inputCount g
mapM_ (recordBoundVar ntk) a_only_quants
a_both_bindings <- mapM (addBoundVar ntk) both_quants
B c <- eval ntk e
preds <- sequence $ do
zipWith4 (getForallPred ntk) both_quants both_preds e_both_bindings a_both_bindings
p <- foldM (AIG.lAnd' (gia ntk)) c (sideconds : preds)
res <- if Map.null a_quants then do
logCallbackVerbose logData 2 "Calling ABC's SAT solver"
r <- GIA.checkSat (gia ntk) p
evaluateSatModel ntk [] r
else do
logCallbackVerbose logData 2 "Calling ABC's QBF solver"
runQBF ntk exist_cnt p max_qbf_iter
logSolverEvent sym
(SolverEndSATQuery $ SolverEndSATQueryRec
{ satQueryResult = forgetModelAndCore res
, satQueryError = Nothing
})
return res
recordBinding :: Network t s -> VarBinding t s -> IO (GIA.Lit s)
recordBinding ntk b = liftST $
case b of
BoolBinding n r ->
do H.insert (nameCache ntk) n (B r)
return GIA.true
BVBinding w n r sidecond ->
do H.insert (nameCache ntk) n (BV w r)
return sidecond
deleteBinding :: Network t s -> VarBinding t s -> IO ()
deleteBinding ntk b = liftST $
case b of
BoolBinding n _ -> H.delete (nameCache ntk) n
BVBinding _ n _ _ -> H.delete (nameCache ntk) n
freshBV :: AIG.IsAIG l g => g s -> NatRepr n -> IO (AIG.BV (l s))
freshBV g w = AIG.generateM_msb0 (widthVal w) (\_ -> GIA.newInput g)
freshBinding :: Network t s
-> Nonce t tp
-> ProgramLoc
-> BaseTypeRepr tp
-> Maybe (AbstractValue tp)
-> IO (VarBinding t s)
freshBinding ntk n l tp mbnds = do
let g = gia ntk
case tp of
BaseBoolRepr -> do
BoolBinding n <$> GIA.newInput g
BaseBVRepr w ->
do bv <- freshBV g w
cond <- case mbnds of
Nothing -> return GIA.true
Just bnds ->
do let wint = fromIntegral (natValue w)
let arithBounds Nothing = return GIA.true
arithBounds (Just (lo,sz)) =
do diff <- AIG.sub g bv (AIG.bvFromInteger g wint lo)
AIG.ule g diff (AIG.bvFromInteger g wint sz)
case bnds of
BVD.BVDArith a -> arithBounds (A.arithDomainData a)
BVD.BVDBitwise b -> between g (B.bitbounds b) bv
return (BVBinding w n bv cond)
BaseIntegerRepr -> failAt l "Integer variables are not supported by ABC."
BaseRealRepr -> failAt l "Real variables are not supported by ABC."
BaseStringRepr _ -> failAt l "String variables are not supported by ABC."
BaseComplexRepr -> failAt l "Complex variables are not supported by ABC."
BaseArrayRepr _ _ -> failAt l "Array variables are not supported by ABC."
BaseStructRepr{} -> failAt l "Struct variables are not supported by ABC."
BaseFloatRepr{} -> failAt l "Floating-point variables are not supported by ABC."
between :: GIA.GIA s -> (Integer, Integer) -> AIG.BV (GIA.Lit s) -> IO (GIA.Lit s)
between g (lo, hi) bv = foldM (AIG.lAnd' g) GIA.true =<< mapM bitBetween [0 .. l-1]
where
l = length bv
bitBetween i = AIG.lAnd' g lop hip
where
lop = if lobit then bvbit else GIA.true
hip = if hibit then GIA.true else AIG.not bvbit
bvbit = AIG.at bv i
lobit = testBit lo (l - i - 1)
hibit = testBit hi (l - i - 1)
addBoundVar :: Network t s -> Some (QuantifierInfo t) -> IO (VarBinding t s)
addBoundVar ntk (Some info) = do
let bvar = boundVar info
freshBinding ntk (bvarId bvar) (bvarLoc bvar) (bvarType bvar) (bvarAbstractValue bvar)
addBoundVar' :: Network t s -> Some (ExprBoundVar t) -> IO (VarBinding t s)
addBoundVar' ntk (Some bvar) = do
freshBinding ntk (bvarId bvar) (bvarLoc bvar) (bvarType bvar) (bvarAbstractValue bvar)
readSATInput :: (String -> IO ())
-> Streams.InputStream String
-> [Int]
-> IO GIA.SatResult
readSATInput logLn in_stream vars = do
mln <- Streams.read in_stream
case mln of
Nothing -> fail "Unexpected end of SAT solver output."
Just "s SATISFIABLE" -> do
msln <- Streams.read in_stream
case words <$> msln of
Just ("v":num) -> do
let trueVars :: HSet.HashSet Int
trueVars = HSet.fromList $ filter (>0) $ read <$> num
let varValue v = HSet.member v trueVars
return $ GIA.Sat (varValue <$> vars)
Just _ -> do
fail "Could not parse output from sat solver."
Nothing -> fail "Unexpected end of SAT solver output."
Just "s UNSATISFIABLE" -> do
return $ GIA.Unsat
Just ln -> do
logLn ln
readSATInput logLn in_stream vars
writeDimacsFile :: Network t s
-> FilePath
-> BoolExpr t
-> IO [Int]
writeDimacsFile ntk cnf_path condition = do
let vars = predicateVarInfo condition
checkSupportedByAbc vars
checkNoLatches vars
checkNoForallVars vars
sideconds <- recordUninterpConstants ntk (vars^.uninterpConstants)
Fold.traverse_ (recordBoundVar ntk) (vars^.existQuantifiers)
B c <- eval ntk condition
c' <- AIG.lAnd' (gia ntk) sideconds c
GIA.writeCNF (gia ntk) c' cnf_path
-> (FilePath -> IO String)
-> BoolExpr t
-> IO (SatResult (GroundEvalFn t) ())
runExternalDimacsSolver logLn mkCommand condition = do
temp_dir <- getTemporaryDirectory
let close (path,h) = do
hClose h
removeFile path
bracket (openTempFile temp_dir "sat.cnf") close $ \(cnf_path,_h) -> do
logLn 2 $ "Writing CNF file to " ++ show cnf_path ++ "."
withNetwork $ \ntk -> do
vars <- writeDimacsFile ntk cnf_path condition
command <- mkCommand cnf_path
logLn 2 $ "About to call: " ++ command
let stopProcess (_,_,_,ph) = do
terminateProcess ph
let runSatProcess (_in_stream, out_stream, err_stream, _ph) = do
void $ forkIO $ logErrorStream err_stream (logLn 2)
out_lines <- Streams.map UTF8.toString =<< Streams.lines out_stream
res <- readSATInput (logLn 2) out_lines vars
evaluateSatModel ntk [] res
bracketOnError (Streams.runInteractiveCommand command) stopProcess runSatProcess
hasBoundVars :: CollectedVarInfo t -> Bool
hasBoundVars vars = not (Map.null (vars^.forallQuantifiers))
|| not (Map.null (vars^.existQuantifiers))
| Write AIG that outputs given value .
writeAig :: FilePath
-> [Some (Expr t)]
-> [Some (Expr t)]
-> IO ()
writeAig path v latchOutputs = do
let vars = runST $ collectVarInfo $ do
Fold.traverse_ (traverseSome_ (recordExprVars ExistsOnly)) v
Fold.traverse_ (traverseSome_ (recordExprVars ExistsOnly))
latchOutputs
checkSupportedByAbc vars
when (hasBoundVars vars) $ do
fail "Cannot write an AIG with bound variables."
withNetwork $ \ntk -> do
_sideconds <- recordUninterpConstants ntk (vars^.uninterpConstants)
Fold.traverse_ (recordBoundVar ntk) (vars^.existQuantifiers)
cInCount <- getInputCount ntk
Add latchInputs
Fold.traverse_ (addBoundVar' ntk) $ vars^.latches
Add value to AIGER output .
Fold.traverse_ (traverseSome_ (outputExpr ntk)) v
cOutCount <- getOutputCount ntk
Fold.traverse_ (traverseSome_ (outputExpr ntk)) latchOutputs
allInCount <- getInputCount ntk
allOutCount <- getOutputCount ntk
let inLatchCount = allInCount - cInCount
let outLatchCount = allOutCount - cOutCount
when (inLatchCount /= outLatchCount) $ do
fail $ "Expected " ++ show inLatchCount ++ " latch outputs, when "
++ show outLatchCount ++ " are given."
out <- getOutputs ntk
GIA.writeAigerWithLatches path (GIA.Network (gia ntk) out) inLatchCount
getOutputs :: Network t s -> IO [GIA.Lit s]
getOutputs ntk = reverse <$> readIORef (revOutputs ntk)
getInputCount :: Network t s -> IO Int
getInputCount ntk = GIA.inputCount (gia ntk)
getOutputCount :: Network t s -> IO Int
getOutputCount ntk = length <$> readIORef (revOutputs ntk)
|
21a6741b2915700e3c500ff1677814c4780532f0ec3c09b82d81f30ddd797c5b
|
aumouvantsillage/Hydromel-lang
|
fifo1-1.rkt
|
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
#lang racket
(require
racket/runtime-path
hydromel/support
"../common.rkt"
"fifo1-1.mel"
"fifo1-tests.rkt")
(define inst (fifo1 (unsigned 8)))
(instance-set! inst 'c_valid (list->signal c_valid-in))
(instance-set! inst 'c_data (list->signal c_data-in))
(instance-set! inst 'p_ready (list->signal p_ready-in))
(test-signal inst 'c_ready c_ready-exp)
(test-signal inst 'full full-exp)
(test-signal inst 'write write-exp)
(test-signal inst 'r_data r_data-exp)
(test-signal inst 'p_valid p_valid-exp)
(test-signal inst 'p_data p_data-exp)
(define-runtime-path vcd-file "fifo1-1.vcd")
(instance-dump-vcd inst (length c_valid-in) "10 ns"
(open-output-file vcd-file #:exists 'replace))
| null |
https://raw.githubusercontent.com/aumouvantsillage/Hydromel-lang/3bed7667b9474e24ed0afaff44f89189d8887b26/examples/fifo/fifo1-1.rkt
|
racket
|
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
#lang racket
(require
racket/runtime-path
hydromel/support
"../common.rkt"
"fifo1-1.mel"
"fifo1-tests.rkt")
(define inst (fifo1 (unsigned 8)))
(instance-set! inst 'c_valid (list->signal c_valid-in))
(instance-set! inst 'c_data (list->signal c_data-in))
(instance-set! inst 'p_ready (list->signal p_ready-in))
(test-signal inst 'c_ready c_ready-exp)
(test-signal inst 'full full-exp)
(test-signal inst 'write write-exp)
(test-signal inst 'r_data r_data-exp)
(test-signal inst 'p_valid p_valid-exp)
(test-signal inst 'p_data p_data-exp)
(define-runtime-path vcd-file "fifo1-1.vcd")
(instance-dump-vcd inst (length c_valid-in) "10 ns"
(open-output-file vcd-file #:exists 'replace))
|
|
675d8b79df1057f6a3b63158090b4866ac6e81974ba34db720ab2be02bf092e6
|
lylek/vintage-basic
|
LineScanner_test.hs
|
module Language.VintageBasic.LineScanner_test where
import Test.HUnit
import Language.VintageBasic.Asserts
import Language.VintageBasic.LineScanner
import Language.VintageBasic.Result
assertRawParseResult = assertParseResult withLineAndCol ScanError rawLinesP
assertRawParseError = assertParseError withLineAndCol ScanError rawLinesP
test_LineScanner = TestCase $ do
let text = unlines ["10SKDJF@#"," 5 ASJDKFdf "]
assertRawParseResult text [(10, 3, "SKDJF@#"), (5, 6, "ASJDKFdf ")]
test_reports_error_if_line_doesn't_start_with_number = TestCase $ do
let text = unlines ["10SKDJF@#","ASJD4KFdf "]
assertRawParseError text "EXPECTING LINE NUMBER OR END OF FILE"
test_skips_blank_lines = TestCase $ do
let text = unlines ["","10?","","20?",""]
assertRawParseResult text [(10, 3, "?"), (20, 3, "?")]
test_reports_error_if_file_doesn't_end_in_newline = TestCase $ do
let text = "10SKDJF@#"
assertRawParseError text "UNEXPECTED END OF FILE\n EXPECTING END OF LINE OR CHARACTER"
test_accepts_blank_line = TestCase $ do
let text = unlines ["10"]
assertRawParseResult text [(10, 3, "")]
test_strips_carriage_return_preceding_newline = TestCase $ do
let text = "10 BLAH\r\n20 BLORT\r\n"
assertRawParseResult text [(10, 4, "BLAH"), (20, 4, "BLORT")]
| null |
https://raw.githubusercontent.com/lylek/vintage-basic/6cb46767aeb6d2c0b9da4175c7fa8c0ffc31b1c7/test/Language/VintageBasic/LineScanner_test.hs
|
haskell
|
module Language.VintageBasic.LineScanner_test where
import Test.HUnit
import Language.VintageBasic.Asserts
import Language.VintageBasic.LineScanner
import Language.VintageBasic.Result
assertRawParseResult = assertParseResult withLineAndCol ScanError rawLinesP
assertRawParseError = assertParseError withLineAndCol ScanError rawLinesP
test_LineScanner = TestCase $ do
let text = unlines ["10SKDJF@#"," 5 ASJDKFdf "]
assertRawParseResult text [(10, 3, "SKDJF@#"), (5, 6, "ASJDKFdf ")]
test_reports_error_if_line_doesn't_start_with_number = TestCase $ do
let text = unlines ["10SKDJF@#","ASJD4KFdf "]
assertRawParseError text "EXPECTING LINE NUMBER OR END OF FILE"
test_skips_blank_lines = TestCase $ do
let text = unlines ["","10?","","20?",""]
assertRawParseResult text [(10, 3, "?"), (20, 3, "?")]
test_reports_error_if_file_doesn't_end_in_newline = TestCase $ do
let text = "10SKDJF@#"
assertRawParseError text "UNEXPECTED END OF FILE\n EXPECTING END OF LINE OR CHARACTER"
test_accepts_blank_line = TestCase $ do
let text = unlines ["10"]
assertRawParseResult text [(10, 3, "")]
test_strips_carriage_return_preceding_newline = TestCase $ do
let text = "10 BLAH\r\n20 BLORT\r\n"
assertRawParseResult text [(10, 4, "BLAH"), (20, 4, "BLORT")]
|
|
3a3567cd1d6ba997a4b279f6b592e986e87ceb47e1912f2fafd008266ea4e866
|
nixeagle/cl-github
|
network.lisp
|
(in-package :cl-github)
;;; Network API
(defgeneric show-network-meta (username repository &key login token)
(:documentation "Network meta information for USERNAME's REPOSITORY."))
(defgeneric show-network-data (username repository
&key network-meta login token
start end)
(:documentation "Data on last 100 commits."))
(defclass commit-range ()
(name count start)
(:documentation "Blocks of something that github gives when querying
the network api."))
(defclass head ()
(name id)
(:documentation "Heads of branches returned from github's Network API."))
(defclass github-network-meta ()
(blocks
(nethash :reader nethash)
focus dates users)
(:documentation "Toplevel result from github's Network API."))
(defmethod show-network-meta ((username string) (repository string)
&key login token)
(let ((*current-prototype* "GITHUB-NETWORK-META"))
(to-json (github-request :login login :token token :auth :default
:parameters `(,username ,repository "network_meta")
:base-url ""))))
(defmethod show-network-data ((username string) (repository string)
&key network-meta login token start end)
(let ((network-meta (or network-meta
(nethash (show-network-meta username
repository
:token token
:login login)))))
(to-json (github-request :login login :token token :auth :default
:parameters `(,username ,repository
"network_data_chunk")
:base-url ""
:nethash network-meta
:start start
:end end))))
| null |
https://raw.githubusercontent.com/nixeagle/cl-github/19ba2477ea65e52e74e166482407ea96bee8e395/network.lisp
|
lisp
|
Network API
|
(in-package :cl-github)
(defgeneric show-network-meta (username repository &key login token)
(:documentation "Network meta information for USERNAME's REPOSITORY."))
(defgeneric show-network-data (username repository
&key network-meta login token
start end)
(:documentation "Data on last 100 commits."))
(defclass commit-range ()
(name count start)
(:documentation "Blocks of something that github gives when querying
the network api."))
(defclass head ()
(name id)
(:documentation "Heads of branches returned from github's Network API."))
(defclass github-network-meta ()
(blocks
(nethash :reader nethash)
focus dates users)
(:documentation "Toplevel result from github's Network API."))
(defmethod show-network-meta ((username string) (repository string)
&key login token)
(let ((*current-prototype* "GITHUB-NETWORK-META"))
(to-json (github-request :login login :token token :auth :default
:parameters `(,username ,repository "network_meta")
:base-url ""))))
(defmethod show-network-data ((username string) (repository string)
&key network-meta login token start end)
(let ((network-meta (or network-meta
(nethash (show-network-meta username
repository
:token token
:login login)))))
(to-json (github-request :login login :token token :auth :default
:parameters `(,username ,repository
"network_data_chunk")
:base-url ""
:nethash network-meta
:start start
:end end))))
|
bb07c4299d27d5be2b74c35501b4259088a9de7d208e8546d7fe8467f4661a1e
|
joelburget/lvca
|
Directed_graph.mli
|
open Lvca_util
module type Key_intf = sig
type t
include Base.Comparator.S with type t := t
include Base.Hashtbl.Key.S with type t := t
end
* Raised by [ topsort_exn ] if the graph is not a dag .
exception NotDag
module Int : sig
module Connected_components : sig
(** The output from connected component algorithm. *)
type t =
* The number of found .
; scc_numbering : int list
* A list corresponding to the input adjacency list , with the SCC number
assigned to each node . Note that SCC numbers need not be contiguous :
they 're the numbers of a representative from each SCC ( the lowest - numbered
representative ) . So , each SCC number is in the range \[0,n ) .
assigned to each node. Note that SCC numbers need not be contiguous:
they're the numbers of a representative from each SCC (the lowest-numbered
representative). So, each SCC number is in the range \[0,n). *)
}
end
val graph_of_adjacency : int list list -> int list Int.Map.t
(** Given an adjacency list, give the SCCs. *)
val connected_components : int list Int.Map.t -> Connected_components.t
* Given an SCC numbering ( see [ connected_components ] ) , return SCCs , each represented
as a set of nodes contained in it .
as a set of nodes contained in it. *)
val make_sets : int list -> Int.Set.t Int.Map.t
(** The composition of [connected_components] and [make_sets]. *)
val connected_component_sets : int list Int.Map.t -> Int.Set.t Int.Map.t
(** Topologically sort a graph given as an adjacency list. *)
val topsort_exn : int list Int.Map.t -> int list
(** Topologically sort a graph given as an adjacency list. *)
val topsort : int list Int.Map.t -> int list option
end
module Make (Key : Key_intf) : sig
module Graph : sig
(** A graph is represented as a mapping from key to key list. *)
type t = (Key.t, Key.t list, Key.comparator_witness) Base.Map.t
end
module Connected_components : sig
(** The output from the connected component algorithm. *)
type t =
* The graph of SCCs .
; sccs : (Key.t, Key.comparator_witness) Base.Set.t Lvca_util.Int.Map.t
* Mapping from SCC number to keys contained in it .
}
end
(** Find the (strongly) [connected_components] in a [graph]. *)
val connected_components : Graph.t -> Connected_components.t
(** Topologically sort a graph given as an adjacency list. *)
val topsort_exn : Graph.t -> Key.t list
(** Topologically sort a graph given as an adjacency list. *)
val topsort : Graph.t -> Key.t list option
end
| null |
https://raw.githubusercontent.com/joelburget/lvca/8d1282163623b3541eef021cdff92865890b0563/syntax/Directed_graph.mli
|
ocaml
|
* The output from connected component algorithm.
* Given an adjacency list, give the SCCs.
* The composition of [connected_components] and [make_sets].
* Topologically sort a graph given as an adjacency list.
* Topologically sort a graph given as an adjacency list.
* A graph is represented as a mapping from key to key list.
* The output from the connected component algorithm.
* Find the (strongly) [connected_components] in a [graph].
* Topologically sort a graph given as an adjacency list.
* Topologically sort a graph given as an adjacency list.
|
open Lvca_util
module type Key_intf = sig
type t
include Base.Comparator.S with type t := t
include Base.Hashtbl.Key.S with type t := t
end
* Raised by [ topsort_exn ] if the graph is not a dag .
exception NotDag
module Int : sig
module Connected_components : sig
type t =
* The number of found .
; scc_numbering : int list
* A list corresponding to the input adjacency list , with the SCC number
assigned to each node . Note that SCC numbers need not be contiguous :
they 're the numbers of a representative from each SCC ( the lowest - numbered
representative ) . So , each SCC number is in the range \[0,n ) .
assigned to each node. Note that SCC numbers need not be contiguous:
they're the numbers of a representative from each SCC (the lowest-numbered
representative). So, each SCC number is in the range \[0,n). *)
}
end
val graph_of_adjacency : int list list -> int list Int.Map.t
val connected_components : int list Int.Map.t -> Connected_components.t
* Given an SCC numbering ( see [ connected_components ] ) , return SCCs , each represented
as a set of nodes contained in it .
as a set of nodes contained in it. *)
val make_sets : int list -> Int.Set.t Int.Map.t
val connected_component_sets : int list Int.Map.t -> Int.Set.t Int.Map.t
val topsort_exn : int list Int.Map.t -> int list
val topsort : int list Int.Map.t -> int list option
end
module Make (Key : Key_intf) : sig
module Graph : sig
type t = (Key.t, Key.t list, Key.comparator_witness) Base.Map.t
end
module Connected_components : sig
type t =
* The graph of SCCs .
; sccs : (Key.t, Key.comparator_witness) Base.Set.t Lvca_util.Int.Map.t
* Mapping from SCC number to keys contained in it .
}
end
val connected_components : Graph.t -> Connected_components.t
val topsort_exn : Graph.t -> Key.t list
val topsort : Graph.t -> Key.t list option
end
|
5b627ae320e4313335719bf3abec0514742ae2d96348e8d79e6c03a569943b04
|
janestreet/async
|
ppx_log_syntax.mli
|
(** @open *)
include
Ppx_log_types.S
with type t = Async_unix.Log.t
and type return_type = unit
and type Global.return_type = unit
(** If you wish you prevent global logging with ppx_log, you can open this module and any
use of global logging will return a warning type
`Do_not_use_because_it_will_not_log (and will not actually log anything). *)
module No_global : sig
module Ppx_log_syntax :
Ppx_log_types.S
with type t = Async_unix.Log.t
and type return_type = unit
and type Global.return_type = [ `Do_not_use_because_it_will_not_log ]
end
| null |
https://raw.githubusercontent.com/janestreet/async/209af88dff921a3883f62ea9bd4fdf95b9429072/src/ppx_log_syntax.mli
|
ocaml
|
* @open
* If you wish you prevent global logging with ppx_log, you can open this module and any
use of global logging will return a warning type
`Do_not_use_because_it_will_not_log (and will not actually log anything).
|
include
Ppx_log_types.S
with type t = Async_unix.Log.t
and type return_type = unit
and type Global.return_type = unit
module No_global : sig
module Ppx_log_syntax :
Ppx_log_types.S
with type t = Async_unix.Log.t
and type return_type = unit
and type Global.return_type = [ `Do_not_use_because_it_will_not_log ]
end
|
3abc33be7dd5771c1c6518e6cc46647983aa0dbcef7c858ed194dd5b50d0abea
|
typeable/generic-arbitrary
|
NoTypecheckTest.hs
|
{-# OPTIONS_GHC -fdefer-type-errors #-}
{-# OPTIONS_GHC -Wno-deferred-type-errors #-}
-- | Test that infinite types has no Arbitrary instance
module NoTypecheckTest where
import Auxiliary
import GHC.Generics (Generic)
import Test.QuickCheck
import Test.QuickCheck.Arbitrary.Generic
-- | Recursive infinite type which can not have valid Arbitrary instance
data R = R R
deriving (Eq, Show, Generic)
-- | Instance which must not compile, but we are using deferred type errors
instance Arbitrary R where
arbitrary = genericArbitrary
shrink = genericShrink
unit_mustFail :: IO ()
unit_mustFail = failGeneration @R
| null |
https://raw.githubusercontent.com/typeable/generic-arbitrary/107426a603375b9326ce1f3d312afe6f4d9c5f2f/test/NoTypecheckTest.hs
|
haskell
|
# OPTIONS_GHC -fdefer-type-errors #
# OPTIONS_GHC -Wno-deferred-type-errors #
| Test that infinite types has no Arbitrary instance
| Recursive infinite type which can not have valid Arbitrary instance
| Instance which must not compile, but we are using deferred type errors
|
module NoTypecheckTest where
import Auxiliary
import GHC.Generics (Generic)
import Test.QuickCheck
import Test.QuickCheck.Arbitrary.Generic
data R = R R
deriving (Eq, Show, Generic)
instance Arbitrary R where
arbitrary = genericArbitrary
shrink = genericShrink
unit_mustFail :: IO ()
unit_mustFail = failGeneration @R
|
2ec5143c57620c17bf419da349cbed9affa855dea57ed12627f375aa2df1466d
|
matteobusi/incremental-mincaml
|
adder.ml
|
let rec make_adder (x : int) : int -> int =
let rec adder (y : int) : int = x + y in
adder in
print_int((make_adder 3) 7)
| null |
https://raw.githubusercontent.com/matteobusi/incremental-mincaml/aebbaf3b918bab4b0398edbf01b43c63d8b7820d/src/fun/examples/adder.ml
|
ocaml
|
let rec make_adder (x : int) : int -> int =
let rec adder (y : int) : int = x + y in
adder in
print_int((make_adder 3) 7)
|
|
0afd5513010a4ab40e0bf643230bf284ea872d1329a7a384e3f383f1d0957940
|
bmeurer/ocaml-arm
|
location.mli
|
(***********************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
(* *)
(***********************************************************************)
$ Id$
Source code locations ( ranges of positions ) , used in parsetree .
open Format
type t = {
loc_start: Lexing.position;
loc_end: Lexing.position;
loc_ghost: bool;
}
(* Note on the use of Lexing.position in this module.
If [pos_fname = ""], then use [!input_name] instead.
If [pos_lnum = -1], then [pos_bol = 0]. Use [pos_cnum] and
re-parse the file to get the line and character numbers.
Else all fields are correct.
*)
val none : t
(** An arbitrary value of type [t]; describes an empty ghost range. *)
val in_file : string -> t;;
(** Return an empty ghost range located in a given file. *)
val init : Lexing.lexbuf -> string -> unit
(** Set the file name and line number of the [lexbuf] to be the start
of the named file. *)
val curr : Lexing.lexbuf -> t
(** Get the location of the current token from the [lexbuf]. *)
val symbol_rloc: unit -> t
val symbol_gloc: unit -> t
* [ rhs_loc n ] returns the location of the symbol at position [ n ] , starting
at 1 , in the current parser rule .
at 1, in the current parser rule. *)
val rhs_loc: int -> t
val input_name: string ref
val input_lexbuf: Lexing.lexbuf option ref
val get_pos_info: Lexing.position -> string * int * int (* file, line, char *)
val print_loc: formatter -> t -> unit
val print_error: formatter -> t -> unit
val print_error_cur_file: formatter -> unit
val print_warning: t -> formatter -> Warnings.t -> unit
val prerr_warning: t -> Warnings.t -> unit
val echo_eof: unit -> unit
val reset: unit -> unit
val highlight_locations: formatter -> t -> t -> bool
type 'a loc = {
txt : 'a;
loc : t;
}
val mknoloc : 'a -> 'a loc
val mkloc : 'a -> t -> 'a loc
val print: formatter -> t -> unit
val print_filename: formatter -> string -> unit
val show_filename: string -> string
(** In -absname mode, return the absolute path for this filename.
Otherwise, returns the filename unchanged. *)
val absname: bool ref
| null |
https://raw.githubusercontent.com/bmeurer/ocaml-arm/43f7689c76a349febe3d06ae7a4fc1d52984fd8b/parsing/location.mli
|
ocaml
|
*********************************************************************
OCaml
*********************************************************************
Note on the use of Lexing.position in this module.
If [pos_fname = ""], then use [!input_name] instead.
If [pos_lnum = -1], then [pos_bol = 0]. Use [pos_cnum] and
re-parse the file to get the line and character numbers.
Else all fields are correct.
* An arbitrary value of type [t]; describes an empty ghost range.
* Return an empty ghost range located in a given file.
* Set the file name and line number of the [lexbuf] to be the start
of the named file.
* Get the location of the current token from the [lexbuf].
file, line, char
* In -absname mode, return the absolute path for this filename.
Otherwise, returns the filename unchanged.
|
, projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
$ Id$
Source code locations ( ranges of positions ) , used in parsetree .
open Format
type t = {
loc_start: Lexing.position;
loc_end: Lexing.position;
loc_ghost: bool;
}
val none : t
val in_file : string -> t;;
val init : Lexing.lexbuf -> string -> unit
val curr : Lexing.lexbuf -> t
val symbol_rloc: unit -> t
val symbol_gloc: unit -> t
* [ rhs_loc n ] returns the location of the symbol at position [ n ] , starting
at 1 , in the current parser rule .
at 1, in the current parser rule. *)
val rhs_loc: int -> t
val input_name: string ref
val input_lexbuf: Lexing.lexbuf option ref
val print_loc: formatter -> t -> unit
val print_error: formatter -> t -> unit
val print_error_cur_file: formatter -> unit
val print_warning: t -> formatter -> Warnings.t -> unit
val prerr_warning: t -> Warnings.t -> unit
val echo_eof: unit -> unit
val reset: unit -> unit
val highlight_locations: formatter -> t -> t -> bool
type 'a loc = {
txt : 'a;
loc : t;
}
val mknoloc : 'a -> 'a loc
val mkloc : 'a -> t -> 'a loc
val print: formatter -> t -> unit
val print_filename: formatter -> string -> unit
val show_filename: string -> string
val absname: bool ref
|
f815b1ea372483c9ac921369e08f23aec1dbd7c77875e171a7ebb76c0dfe6450
|
Elastifile/git-sling
|
Proposal.hs
|
{-# LANGUAGE OverloadedStrings #-}
module Sling.Proposal
( Proposal (..)
, ProposalStatus (..)
, ServerId (..)
, MergeType (..)
, ProposalType (..)
, Prefix (..)
, prefixFromText
, prefixToText
, toBranchName
, fromBranchName
, formatProposal
, parseProposal
) where
import Data.Text (Text)
import qualified Data.Text as T
import qualified Sling.Git as Git
import Sling.Lib (Email (..), NatInt, formatSepEmail,
fromNatInt, fromNonEmptyText, Hash, fromHash, hash,
natInt, nonEmptyText, singleMatch,
NonEmptyText, someText)
import Control.Monad (void)
import Control.Applicative ((<|>))
import Data.Monoid ((<>))
import Turtle (Pattern, alphaNum, char, decimal, eof,
hexDigit, notChar, oneOf, some, text)
newtype ServerId = ServerId { fromServerId :: Text }
deriving (Show, Eq, Ord)
data ProposalStatus
= ProposalProposed
| ProposalInProgress { _proposalInProgressServerId :: ServerId }
| ProposalRejected
deriving (Show, Eq, Ord)
emailPat :: Text -> Pattern Email
emailPat sep = do
user <- nonEmptyText . T.pack <$> some (alphaNum <|> oneOf ".-_+")
_ <- text sep
domain <- nonEmptyText . T.pack <$> some (alphaNum <|> oneOf ".-")
return $ Email user domain
newtype Prefix = Prefix { fromPrefix :: NonEmptyText }
deriving (Show, Eq)
prefixToText :: Prefix -> Text
prefixToText = fromNonEmptyText . fromPrefix
prefixFromText :: Text -> Prefix
prefixFromText = Prefix . nonEmptyText
data MergeType = MergeTypeFlat | MergeTypeKeepMerges
deriving (Show, Eq, Ord)
data ProposalType
= ProposalTypeRebase { _ptBranchToRebase :: Git.BranchName }
| ProposalTypeMerge { _ptMergeType :: MergeType,
_ptBase :: Hash }
deriving (Show, Eq, Ord)
data Proposal
= Proposal
{ proposalEmail :: Email
, proposalName :: Git.BranchName -- not really a branch, but it will be used as a branch name
, proposalType :: ProposalType
, proposalBranchOnto :: Git.BranchName
, proposalQueueIndex :: NatInt
, proposalStatus :: ProposalStatus
, proposalDryRun :: Bool
, proposalPrefix :: Maybe Prefix
}
deriving (Show, Eq)
ontoPrefix :: Text
ontoPrefix = "onto"
dryRunOntoPrefix :: Text
dryRunOntoPrefix = "dry-run-onto"
proposalPrefixPrefix :: Text
proposalPrefixPrefix = "prefix-"
slashEscape :: Text
slashEscape = ","
formatBranchName :: Git.BranchName -> Text
formatBranchName = T.replace "/" slashEscape . Git.fromBranchName
branchNamePat :: Pattern Git.BranchName
branchNamePat = Git.mkBranchName . T.replace slashEscape "/" . T.pack <$> some (notChar '#')
proposalTypeMergePrefix :: Text
proposalTypeMergePrefix = "base"
proposalTypeRebasePrefix :: Text
proposalTypeRebasePrefix = "rebase"
mergeTypePrefix :: MergeType -> Text
mergeTypePrefix MergeTypeKeepMerges = "-keep"
mergeTypePrefix MergeTypeFlat = ""
formatProposalType :: ProposalType -> Text
formatProposalType (ProposalTypeMerge mergeType ref) = proposalTypeMergePrefix <> mergeTypePrefix mergeType <> "/" <> fromHash ref
formatProposalType (ProposalTypeRebase name) = proposalTypeRebasePrefix <> "/" <> (formatBranchName name)
toBranchName :: Proposal -> Git.BranchName
toBranchName = Git.mkBranchName . formatProposal
formatProposal :: Proposal -> Text
formatProposal p = "sling/" <> prefix <> "/" <> suffix
where
prefix = maybe T.empty (\x -> (proposalPrefixPrefix <> prefixToText x) <> "/") (proposalPrefix p) <>
case proposalStatus p of
ProposalProposed -> proposedPrefix
ProposalInProgress serverId -> inProgressPrefix <> "/" <> fromServerId serverId
ProposalRejected -> rejectBranchPrefix
suffix =
T.pack (show . fromNatInt . proposalQueueIndex $ p)
<> "/" <> formatBranchName (proposalName p)
<> "/" <> formatProposalType (proposalType p)
<> "/" <> (if proposalDryRun p then dryRunOntoPrefix else ontoPrefix)
<> "/" <> formatBranchName (proposalBranchOnto p)
<> "/user/" <> formatSepEmail "-at-" (proposalEmail p)
hashPat :: Pattern Hash
hashPat = hash . T.pack <$> some hexDigit
formatRef :: Git.Ref -> Text
formatRef (Git.RefParent r n) = (T.pack . show $ fromNatInt n) <> "#" <> formatRef r
formatRef (Git.RefBranch (Git.RemoteBranch r n)) = "R-" <> fromNonEmptyText (Git.remoteName r) <> "/" <> formatBranchName n
formatRef r@(Git.RefBranch (Git.LocalBranch{})) = "L-" <> Git.refName r
formatRef r = Git.refName r
fieldSep :: Pattern ()
fieldSep = void $ char '/'
mergeTypePat :: Pattern MergeType
mergeTypePat = (text (mergeTypePrefix MergeTypeKeepMerges) *> pure MergeTypeKeepMerges)
<|> (pure MergeTypeFlat)
movePat :: Pattern ProposalType
movePat = (text proposalTypeMergePrefix *> (ProposalTypeMerge <$> mergeTypePat <*> (fieldSep *> hashPat)))
<|> (text proposalTypeRebasePrefix *> fieldSep *> (ProposalTypeRebase <$> branchNamePat))
proposalPat :: Pattern Proposal
proposalPat = do
_ <- text slingPrefix
prefix <- (text ("/" <> proposalPrefixPrefix) *> (Just . Prefix . nonEmptyText <$> (someText <* fieldSep))) <|> (fieldSep *> pure Nothing)
ps <- (text proposedPrefix *> pure ProposalProposed)
<|> (text rejectBranchPrefix *> pure ProposalRejected)
<|> (text inProgressPrefix *> fieldSep *> (ProposalInProgress . ServerId <$> someText))
fieldSep
index <- natInt <$> decimal
fieldSep
name <- branchNamePat
fieldSep
moveBranch <- movePat
fieldSep
isDryRun <- (text ontoPrefix *> pure False) <|> (text dryRunOntoPrefix *> pure True)
fieldSep
ontoRef <- branchNamePat
fieldSep
_ <- text "user"
fieldSep
email <- emailPat "-at-"
_ <- eof
return $ Proposal email name moveBranch ontoRef index ps isDryRun prefix
parseProposal :: Text -> Maybe Proposal
parseProposal = singleMatch proposalPat
fromBranchName :: Git.BranchName -> Maybe Proposal
fromBranchName = parseProposal . Git.fromBranchName
slingPrefix :: Text
slingPrefix = "sling"
rejectBranchPrefix :: Text
rejectBranchPrefix = "rejected"
proposedPrefix :: Text
proposedPrefix = "proposed"
inProgressPrefix :: Text
inProgressPrefix = "in-progress"
| null |
https://raw.githubusercontent.com/Elastifile/git-sling/a92f1836910c0a4d8105ca0dff9d1bd08e9bb181/server/src/Sling/Proposal.hs
|
haskell
|
# LANGUAGE OverloadedStrings #
not really a branch, but it will be used as a branch name
|
module Sling.Proposal
( Proposal (..)
, ProposalStatus (..)
, ServerId (..)
, MergeType (..)
, ProposalType (..)
, Prefix (..)
, prefixFromText
, prefixToText
, toBranchName
, fromBranchName
, formatProposal
, parseProposal
) where
import Data.Text (Text)
import qualified Data.Text as T
import qualified Sling.Git as Git
import Sling.Lib (Email (..), NatInt, formatSepEmail,
fromNatInt, fromNonEmptyText, Hash, fromHash, hash,
natInt, nonEmptyText, singleMatch,
NonEmptyText, someText)
import Control.Monad (void)
import Control.Applicative ((<|>))
import Data.Monoid ((<>))
import Turtle (Pattern, alphaNum, char, decimal, eof,
hexDigit, notChar, oneOf, some, text)
newtype ServerId = ServerId { fromServerId :: Text }
deriving (Show, Eq, Ord)
data ProposalStatus
= ProposalProposed
| ProposalInProgress { _proposalInProgressServerId :: ServerId }
| ProposalRejected
deriving (Show, Eq, Ord)
emailPat :: Text -> Pattern Email
emailPat sep = do
user <- nonEmptyText . T.pack <$> some (alphaNum <|> oneOf ".-_+")
_ <- text sep
domain <- nonEmptyText . T.pack <$> some (alphaNum <|> oneOf ".-")
return $ Email user domain
newtype Prefix = Prefix { fromPrefix :: NonEmptyText }
deriving (Show, Eq)
prefixToText :: Prefix -> Text
prefixToText = fromNonEmptyText . fromPrefix
prefixFromText :: Text -> Prefix
prefixFromText = Prefix . nonEmptyText
data MergeType = MergeTypeFlat | MergeTypeKeepMerges
deriving (Show, Eq, Ord)
data ProposalType
= ProposalTypeRebase { _ptBranchToRebase :: Git.BranchName }
| ProposalTypeMerge { _ptMergeType :: MergeType,
_ptBase :: Hash }
deriving (Show, Eq, Ord)
data Proposal
= Proposal
{ proposalEmail :: Email
, proposalType :: ProposalType
, proposalBranchOnto :: Git.BranchName
, proposalQueueIndex :: NatInt
, proposalStatus :: ProposalStatus
, proposalDryRun :: Bool
, proposalPrefix :: Maybe Prefix
}
deriving (Show, Eq)
ontoPrefix :: Text
ontoPrefix = "onto"
dryRunOntoPrefix :: Text
dryRunOntoPrefix = "dry-run-onto"
proposalPrefixPrefix :: Text
proposalPrefixPrefix = "prefix-"
slashEscape :: Text
slashEscape = ","
formatBranchName :: Git.BranchName -> Text
formatBranchName = T.replace "/" slashEscape . Git.fromBranchName
branchNamePat :: Pattern Git.BranchName
branchNamePat = Git.mkBranchName . T.replace slashEscape "/" . T.pack <$> some (notChar '#')
proposalTypeMergePrefix :: Text
proposalTypeMergePrefix = "base"
proposalTypeRebasePrefix :: Text
proposalTypeRebasePrefix = "rebase"
mergeTypePrefix :: MergeType -> Text
mergeTypePrefix MergeTypeKeepMerges = "-keep"
mergeTypePrefix MergeTypeFlat = ""
formatProposalType :: ProposalType -> Text
formatProposalType (ProposalTypeMerge mergeType ref) = proposalTypeMergePrefix <> mergeTypePrefix mergeType <> "/" <> fromHash ref
formatProposalType (ProposalTypeRebase name) = proposalTypeRebasePrefix <> "/" <> (formatBranchName name)
toBranchName :: Proposal -> Git.BranchName
toBranchName = Git.mkBranchName . formatProposal
formatProposal :: Proposal -> Text
formatProposal p = "sling/" <> prefix <> "/" <> suffix
where
prefix = maybe T.empty (\x -> (proposalPrefixPrefix <> prefixToText x) <> "/") (proposalPrefix p) <>
case proposalStatus p of
ProposalProposed -> proposedPrefix
ProposalInProgress serverId -> inProgressPrefix <> "/" <> fromServerId serverId
ProposalRejected -> rejectBranchPrefix
suffix =
T.pack (show . fromNatInt . proposalQueueIndex $ p)
<> "/" <> formatBranchName (proposalName p)
<> "/" <> formatProposalType (proposalType p)
<> "/" <> (if proposalDryRun p then dryRunOntoPrefix else ontoPrefix)
<> "/" <> formatBranchName (proposalBranchOnto p)
<> "/user/" <> formatSepEmail "-at-" (proposalEmail p)
hashPat :: Pattern Hash
hashPat = hash . T.pack <$> some hexDigit
formatRef :: Git.Ref -> Text
formatRef (Git.RefParent r n) = (T.pack . show $ fromNatInt n) <> "#" <> formatRef r
formatRef (Git.RefBranch (Git.RemoteBranch r n)) = "R-" <> fromNonEmptyText (Git.remoteName r) <> "/" <> formatBranchName n
formatRef r@(Git.RefBranch (Git.LocalBranch{})) = "L-" <> Git.refName r
formatRef r = Git.refName r
fieldSep :: Pattern ()
fieldSep = void $ char '/'
mergeTypePat :: Pattern MergeType
mergeTypePat = (text (mergeTypePrefix MergeTypeKeepMerges) *> pure MergeTypeKeepMerges)
<|> (pure MergeTypeFlat)
movePat :: Pattern ProposalType
movePat = (text proposalTypeMergePrefix *> (ProposalTypeMerge <$> mergeTypePat <*> (fieldSep *> hashPat)))
<|> (text proposalTypeRebasePrefix *> fieldSep *> (ProposalTypeRebase <$> branchNamePat))
proposalPat :: Pattern Proposal
proposalPat = do
_ <- text slingPrefix
prefix <- (text ("/" <> proposalPrefixPrefix) *> (Just . Prefix . nonEmptyText <$> (someText <* fieldSep))) <|> (fieldSep *> pure Nothing)
ps <- (text proposedPrefix *> pure ProposalProposed)
<|> (text rejectBranchPrefix *> pure ProposalRejected)
<|> (text inProgressPrefix *> fieldSep *> (ProposalInProgress . ServerId <$> someText))
fieldSep
index <- natInt <$> decimal
fieldSep
name <- branchNamePat
fieldSep
moveBranch <- movePat
fieldSep
isDryRun <- (text ontoPrefix *> pure False) <|> (text dryRunOntoPrefix *> pure True)
fieldSep
ontoRef <- branchNamePat
fieldSep
_ <- text "user"
fieldSep
email <- emailPat "-at-"
_ <- eof
return $ Proposal email name moveBranch ontoRef index ps isDryRun prefix
parseProposal :: Text -> Maybe Proposal
parseProposal = singleMatch proposalPat
fromBranchName :: Git.BranchName -> Maybe Proposal
fromBranchName = parseProposal . Git.fromBranchName
slingPrefix :: Text
slingPrefix = "sling"
rejectBranchPrefix :: Text
rejectBranchPrefix = "rejected"
proposedPrefix :: Text
proposedPrefix = "proposed"
inProgressPrefix :: Text
inProgressPrefix = "in-progress"
|
5c0bac1bae51ad3978074ed9f95c65d7ac6a340ea9b17cc7db427db11b7f359f
|
ocsigen/ocaml-eliom
|
fib.ml
|
(**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
let rec fib n =
if n < 2 then 1 else fib(n-1) + fib(n-2)
let _ =
let n =
if Array.length Sys.argv >= 2
then int_of_string Sys.argv.(1)
else 30 in
print_int(fib n); print_newline(); exit 0
| null |
https://raw.githubusercontent.com/ocsigen/ocaml-eliom/497c6707f477cb3086dc6d8124384e74a8c379ae/testsuite/tests/misc/fib.ml
|
ocaml
|
************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
|
, projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
let rec fib n =
if n < 2 then 1 else fib(n-1) + fib(n-2)
let _ =
let n =
if Array.length Sys.argv >= 2
then int_of_string Sys.argv.(1)
else 30 in
print_int(fib n); print_newline(); exit 0
|
71bdb2c03c49f5b919aac20231d1b40d5ee26056c127c294e24e5e2429208979
|
macourtney/Dark-Exchange
|
accept_offer.clj
|
(ns darkexchange.model.actions.accept-offer
(:require [clojure.contrib.logging :as logging]
[darkexchange.model.actions.action-keys :as action-keys]
[darkexchange.model.identity :as identity-model]
[darkexchange.model.offer :as offer-model]
[darkexchange.model.trade :as trade-model]))
(def action-key action-keys/accept-offer-action-key)
(defn create-non-acceptor-trade [request-map offer]
(let [user-map (:user (:from request-map))]
(trade-model/create-non-acceptor-trade (:name user-map) (:public-key user-map)
(:public-key-algorithm user-map) offer (:foreign-trade-id (:data request-map)))))
(defn reopen-offer [offer]
(offer-model/reopen-offer offer)
{ :offer nil :trade-id nil })
(defn create-trade [request-map offer]
(when offer
(if-let [new-trade-id (create-non-acceptor-trade request-map offer)]
{ :offer (dissoc offer :created_at)
:trade-id new-trade-id }
(reopen-offer offer))))
(defn accept-offer [request-map]
(create-trade request-map (offer-model/close-offer (:offer (:data request-map)))))
(defn action [request-map]
{ :data (accept-offer request-map) })
| null |
https://raw.githubusercontent.com/macourtney/Dark-Exchange/1654d05cda0c81585da7b8e64f9ea3e2944b27f1/src/darkexchange/model/actions/accept_offer.clj
|
clojure
|
(ns darkexchange.model.actions.accept-offer
(:require [clojure.contrib.logging :as logging]
[darkexchange.model.actions.action-keys :as action-keys]
[darkexchange.model.identity :as identity-model]
[darkexchange.model.offer :as offer-model]
[darkexchange.model.trade :as trade-model]))
(def action-key action-keys/accept-offer-action-key)
(defn create-non-acceptor-trade [request-map offer]
(let [user-map (:user (:from request-map))]
(trade-model/create-non-acceptor-trade (:name user-map) (:public-key user-map)
(:public-key-algorithm user-map) offer (:foreign-trade-id (:data request-map)))))
(defn reopen-offer [offer]
(offer-model/reopen-offer offer)
{ :offer nil :trade-id nil })
(defn create-trade [request-map offer]
(when offer
(if-let [new-trade-id (create-non-acceptor-trade request-map offer)]
{ :offer (dissoc offer :created_at)
:trade-id new-trade-id }
(reopen-offer offer))))
(defn accept-offer [request-map]
(create-trade request-map (offer-model/close-offer (:offer (:data request-map)))))
(defn action [request-map]
{ :data (accept-offer request-map) })
|
|
4c8b7e8f1bd78fd9c0dfe81d1117d26c719a1ab80896f3facc31f2910f6b5259
|
finnishtransportagency/harja
|
kustannussuunnitelma.cljc
|
(ns harja.data.hoito.kustannussuunnitelma
"Täällä voi määrritellä dataa, jota käytetään kustannussuunnitelmissa."
(:require [clojure.spec.alpha :as s]
[clojure.spec.gen.alpha :as gen]
[clojure.set :as clj-set]
[harja.pvm :as pvm]
[harja.domain.palvelut.budjettisuunnittelu :as bs-p]
[harja.tyokalut.yleiset :as yleiset-tyokalut]))
(def ^:dynamic *hoitokaudet* #{1 2 3 4 5})
(s/def ::toimenkuvat-arg (s/or :setti (s/coll-of ::bs-p/toimenkuva :kind set?)
:avain #(= % :kaikki)))
(s/def ::maksukaudet-arg (s/or :setti (s/coll-of ::bs-p/maksukausi :kind set?)
:avain #(= % :kaikki)))
(s/def ::hoitokaudet-arg (s/or :setti (s/coll-of ::bs-p/hoitokausi :kind set?)
:avain #(= % :kaikki)))
(s/def ::aika-kuukaudella-juuri-alkaneelle-urakalle
(s/with-gen ::bs-p/aika
(fn []
(gen/fmap (fn [_]
(let [aloitus-vuosi (pvm/vuosi (first (pvm/paivamaaran-hoitokausi (pvm/nyt))))
vuodet (into [] (range aloitus-vuosi (+ aloitus-vuosi 5)))
hoitokauden-vuodet (rand-nth (map (fn [hoitokausi]
[(get vuodet (dec hoitokausi))
(get vuodet hoitokausi)])
*hoitokaudet*))
v-kks (concat
(map #(identity
{:vuosi (first hoitokauden-vuodet)
:kuukausi %})
(range 10 13))
(map #(identity
{:vuosi (second hoitokauden-vuodet)
:kuukausi %})
(range 1 10)))]
(rand-nth v-kks)))
(gen/int)))))
(s/def ::aika-vuodella-juuri-alkaneelle-urakalle
(s/with-gen ::bs-p/aika
(fn []
(gen/fmap (fn [_]
(let [aloitus-vuosi (pvm/vuosi (first (pvm/paivamaaran-hoitokausi (pvm/nyt))))
vuodet (into [] (range aloitus-vuosi (+ aloitus-vuosi 5)))
vuosi (rand-nth (map (fn [hoitokausi]
(get vuodet (dec hoitokausi)))
*hoitokaudet*))]
{:vuosi vuosi}))
(gen/int)))))
(defn dg-tallenna-kiinteahintaiset-tyo-data-juuri-alkaneelle-urakalle
[urakka-id toimenpide-avain hoitokaudet]
{:urakka-id urakka-id
:toimenpide-avain toimenpide-avain
Ajoille tämmöinen hirvitys , että random dataa , ,
että lopulta on uniikki
:ajat (binding [*hoitokaudet* hoitokaudet]
(mapv first
(vals (group-by (juxt :vuosi :kuukausi)
(gen/sample (s/gen ::aika-kuukaudella-juuri-alkaneelle-urakalle))))))
:summa (gen/generate (s/gen ::bs-p/summa))})
(defn tallenna-kiinteahintaiset-tyot-data
([urakka-id] (tallenna-kiinteahintaiset-tyot-data urakka-id {}))
([urakka-id
{:keys [toimenpide-avaimet hoitokaudet]
:or {toimenpide-avaimet :kaikki
hoitokaudet :kaikki}
:as filtterit}]
(transduce
(comp
(filter (fn [toimenpide-avain]
(or (= toimenpide-avaimet :kaikki)
(contains? toimenpide-avaimet toimenpide-avain))))
(map (fn [toimenpide-avain]
(dg-tallenna-kiinteahintaiset-tyo-data-juuri-alkaneelle-urakalle
urakka-id toimenpide-avain (if (= :kaikki hoitokaudet)
#{1 2 3 4 5}
hoitokaudet)))))
conj []
[:paallystepaikkaukset
:mhu-yllapito
:talvihoito
:liikenneympariston-hoito
:sorateiden-hoito
:mhu-korvausinvestointi])))
(defn toimenkuvan-maksukaudet
"Vuoden -22 alkavien urakoiden toimenkuvien muutos aiheuttaa sen, että päätoimiselle apulaiselle ja apulainen/työnjohtaja
toimenkuville on pakko sallia koko vuoden kestävät työsuhteet, koska tunteja ei enää raportoida, vaan siirrytään kokonaiskustannuksiin."
[toimenkuva]
(case toimenkuva
"hankintavastaava" #{:molemmat}
"sopimusvastaava" #{:molemmat}
"vastuunalainen työnjohtaja" #{:molemmat}
"päätoiminen apulainen" #{:talvi :kesa :molemmat}
"apulainen/työnjohtaja" #{:talvi :kesa :molemmat}
"viherhoidosta vastaava henkilö" #{:molemmat}
"harjoittelija" #{:molemmat}))
(defn validoi-toimenkuvan-maksukaudet! [toimenkuva maksukaudet]
(when-not (clj-set/subset? maksukaudet (toimenkuvan-maksukaudet toimenkuva))
(throw #?(:clj (Exception. (str "Toimenkuvalla " toimenkuva " ei ole maksukausia " maksukaudet))
:cljs (js/Error (str "Toimenkuvalla " toimenkuva " ei ole maksukausia " maksukaudet))))))
(defn dg-tallenna-johto-ja-hallintokorvaus-data
[{:keys [urakka-id toimenkuva toimenkuva-id kuukaudet ennen-urakkaa? oma? urakan-aloitusvuosi]}]
(loop [[[maksukausi kuukaudet-hoitokausille] & l-mkt] kuukaudet
data []]
(if (nil? kuukaudet-hoitokausille)
data
(recur l-mkt
(conj data
(merge {:urakka-id urakka-id
:maksukausi maksukausi
:ennen-urakkaa? ennen-urakkaa?
:jhk-tiedot (if ennen-urakkaa?
(reduce (fn [data {:keys [kuukausi osa-kuukaudesta]}]
(conj data
{:vuosi urakan-aloitusvuosi
:kuukausi kuukausi
:osa-kuukaudesta osa-kuukaudesta
:tunnit (gen/generate (s/gen ::bs-p/tunnit))
:tuntipalkka (gen/generate (s/gen ::bs-p/tuntipalkka))}))
[]
kuukaudet-hoitokausille)
(second
(reduce (fn [[hoitokauden-numero data] hoitokauden-kuukaudet]
[(inc hoitokauden-numero)
(vec (concat
data
(map (fn [{:keys [kuukausi osa-kuukaudesta]}]
{:vuosi (yleiset-tyokalut/vuosi-hoitokauden-numerosta-ja-kuukaudesta hoitokauden-numero kuukausi urakan-aloitusvuosi)
:kuukausi kuukausi
:osa-kuukaudesta osa-kuukaudesta
:tunnit (gen/generate (s/gen ::bs-p/tunnit))
:tuntipalkka (gen/generate (s/gen ::bs-p/tuntipalkka))})
hoitokauden-kuukaudet)))])
[1 []]
kuukaudet-hoitokausille)))}
(if oma?
{:toimenkuva-id toimenkuva-id}
{:toimenkuva toimenkuva})))))))
(defn tallenna-johto-ja-hallintokorvaus-data
([urakka-id urakan-aloitusvuosi] (tallenna-johto-ja-hallintokorvaus-data urakka-id urakan-aloitusvuosi {}))
([urakka-id urakan-aloitusvuosi {:keys [toimenkuvat maksukaudet hoitokaudet ennen-urakkaa-mukaan?]
:or {toimenkuvat :kaikki maksukaudet :kaikki hoitokaudet :kaikki ennen-urakkaa-mukaan? true}
:as filtterit}]
{:pre [(s/valid? ::toimenkuvat-arg toimenkuvat)
(s/valid? ::maksukaudet-arg maksukaudet)
(s/valid? ::hoitokaudet-arg hoitokaudet)]}
(let [kuukaudet-hoitokausille (fn [kuukaudet]
(vec (repeat 5 (mapv #(identity {:kuukausi % :osa-kuukaudesta 1})
kuukaudet))))]
(transduce
(comp
(filter (fn [{toimenkuva :toimenkuva}]
(or (= toimenkuvat :kaikki)
(contains? toimenkuvat toimenkuva))))
(filter (fn [{:keys [ennen-urakkaa?] :as konf}]
(or ennen-urakkaa-mukaan?
(not ennen-urakkaa?))))
(mapcat (fn [konf]
(dg-tallenna-johto-ja-hallintokorvaus-data (assoc konf :urakan-aloitusvuosi urakan-aloitusvuosi))))
(filter (fn [{:keys [toimenkuva maksukausi]}]
(validoi-toimenkuvan-maksukaudet! toimenkuva (get maksukaudet toimenkuva))
(or (= maksukaudet :kaikki)
(contains? (get maksukaudet toimenkuva) maksukausi))))
(map (fn [params]
(if (= hoitokaudet :kaikki)
params
(update params
:jhk-tiedot
(fn [jhk-tiedot]
(filterv #(contains? hoitokaudet
(inc (- (pvm/vuosi (first (pvm/paivamaaran-hoitokausi (pvm/luo-pvm (:vuosi %) (dec (:kuukausi %)) 15))))
(pvm/vuosi (pvm/hoitokauden-alkupvm urakan-aloitusvuosi)))))
jhk-tiedot)))))))
conj
[]
[{:urakka-id urakka-id
:toimenkuva "hankintavastaava"
:ennen-urakkaa? false
:kuukaudet {:molemmat (kuukaudet-hoitokausille (range 1 13))}}
{:urakka-id urakka-id
:toimenkuva "hankintavastaava"
:ennen-urakkaa? true
Urakoiden alkaessa -22 ,
{nil (conj (mapv #(identity {:kuukausi % :osa-kuukaudesta 1})
(repeat 5 10)))}
{nil (conj (mapv #(identity {:kuukausi % :osa-kuukaudesta 1})
(repeat 4 10))
{:kuukausi 10
:osa-kuukaudesta 0.5})})}
{:urakka-id urakka-id
:toimenkuva "sopimusvastaava"
:ennen-urakkaa? false
:kuukaudet {:molemmat (kuukaudet-hoitokausille (range 1 13))}}
{:urakka-id urakka-id
:toimenkuva "vastuunalainen työnjohtaja"
:ennen-urakkaa? false
:kuukaudet {:molemmat (kuukaudet-hoitokausille (range 1 13))}}
{:urakka-id urakka-id
:toimenkuva "päätoiminen apulainen"
:ennen-urakkaa? false
Urakoiden alkaessa -22 ,
{:molemmat (kuukaudet-hoitokausille (range 1 13))}
{:kesa (kuukaudet-hoitokausille (range 5 10))
:talvi (kuukaudet-hoitokausille (concat (range 1 5) (range 10 13)))})}
{:urakka-id urakka-id
:toimenkuva "apulainen/työnjohtaja"
:ennen-urakkaa? false
Urakoiden alkaessa -22 ,
{:molemmat (kuukaudet-hoitokausille (range 1 13))}
{:kesa (kuukaudet-hoitokausille (range 5 10))
:talvi (kuukaudet-hoitokausille (concat (range 1 5) (range 10 13)))})}
{:urakka-id urakka-id
:toimenkuva "viherhoidosta vastaava henkilö"
:ennen-urakkaa? false
Urakoiden alkaessa -22 ,
{:molemmat (kuukaudet-hoitokausille (range 1 13))}
{:molemmat (kuukaudet-hoitokausille (range 4 9))})}
{:urakka-id urakka-id
:toimenkuva "harjoittelija"
:ennen-urakkaa? false
Urakoiden alkaessa -22 ,
{:molemmat (kuukaudet-hoitokausille (range 1 13))}
{:molemmat (kuukaudet-hoitokausille (range 5 9))})}]))))
(defn toimenpiteen-tallennettavat-asiat
[toimenpide-avain]
(case toimenpide-avain
:paallystepaikkaukset #{:toimenpiteen-maaramitattavat-tyot}
:mhu-yllapito #{:rahavaraus-lupaukseen-1
:muut-rahavaraukset
:toimenpiteen-maaramitattavat-tyot}
:talvihoito #{:toimenpiteen-maaramitattavat-tyot}
:liikenneympariston-hoito #{:kolmansien-osapuolten-aiheuttamat-vahingot
:akilliset-hoitotyot
:tunneleiden-hoidot
:toimenpiteen-maaramitattavat-tyot}
:sorateiden-hoito #{:toimenpiteen-maaramitattavat-tyot}
:mhu-korvausinvestointi #{:toimenpiteen-maaramitattavat-tyot}
:mhu-johto #{:hoidonjohtopalkkio
:toimistokulut
:erillishankinnat
:tilaajan-varaukset}))
(defn validoi-toimenpiteen-tallennettavat-asiat! [toimenpide-avain tallennettavat-asiat]
(when-not (clj-set/subset? tallennettavat-asiat (toimenpiteen-tallennettavat-asiat toimenpide-avain))
(throw #?(:clj (Exception. (str "Toimenpide avaimella " toimenpide-avain " ei ole kaikkia seuraavista tallennettavista asioita: " tallennettavat-asiat))
:cljs (js/Error (str "Toimenpide avaimella " toimenpide-avain " ei ole kaikkia seuraavista tallennettavista asioita: " tallennettavat-asiat))))))
(defn dg-tallenna-kustannusarvioitu-tyo-data-juuri-alkaneelle-urakalle
[urakka-id osio-kw toimenpide-avain hoitokaudet]
(loop [[tallennettava-asia & loput-asiat] (toimenpiteen-tallennettavat-asiat toimenpide-avain)
data []]
(if (nil? tallennettava-asia)
data
(recur loput-asiat
(conj data
{:urakka-id urakka-id
:osio osio-kw
:tallennettava-asia tallennettava-asia
:toimenpide-avain toimenpide-avain
Ajoille tämmöinen hirvitys , että random dataa , ,
että on uniikki
:ajat (binding [*hoitokaudet* hoitokaudet]
(mapv first
(vals (group-by :vuosi
(gen/sample (s/gen ::aika-vuodella-juuri-alkaneelle-urakalle))))))
:summa (gen/generate (s/gen ::bs-p/summa))})))))
(defn tallenna-kustannusarvioitu-tyo-data-juuri-alkaneelle-urakalle
([urakka-id] (tallenna-kustannusarvioitu-tyo-data-juuri-alkaneelle-urakalle urakka-id :hankintakustannukset {}))
([urakka-id osio-kw] (tallenna-kustannusarvioitu-tyo-data-juuri-alkaneelle-urakalle urakka-id osio-kw {}))
([urakka-id osio-kw
{:keys [toimenpide-avaimet tallennettavat-asiat hoitokaudet]
:or {toimenpide-avaimet :kaikki
tallennettavat-asiat :kaikki
hoitokaudet :kaikki}
:as filtterit}]
(transduce
(comp
(filter (fn [toimenpide-avain]
(or (= toimenpide-avaimet :kaikki)
(contains? toimenpide-avaimet toimenpide-avain))))
(mapcat (fn [toimenpide-avain]
(dg-tallenna-kustannusarvioitu-tyo-data-juuri-alkaneelle-urakalle urakka-id osio-kw toimenpide-avain (if (= :kaikki hoitokaudet)
#{1 2 3 4 5}
hoitokaudet))))
(filter (fn [{tallennettava-asia :tallennettava-asia
toimenpide-avain :toimenpide-avain}]
(validoi-toimenpiteen-tallennettavat-asiat! toimenpide-avain (get tallennettavat-asiat toimenpide-avain))
(or (= tallennettavat-asiat :kaikki)
(contains? (get tallennettavat-asiat toimenpide-avain) tallennettava-asia)))))
conj []
[:paallystepaikkaukset
:mhu-yllapito
:talvihoito
:liikenneympariston-hoito
:sorateiden-hoito
:mhu-korvausinvestointi
:mhu-johto])))
| null |
https://raw.githubusercontent.com/finnishtransportagency/harja/6170c269fefe11ce5bf93932e85b5c9a9edb4879/test/cljc/harja/data/hoito/kustannussuunnitelma.cljc
|
clojure
|
(ns harja.data.hoito.kustannussuunnitelma
"Täällä voi määrritellä dataa, jota käytetään kustannussuunnitelmissa."
(:require [clojure.spec.alpha :as s]
[clojure.spec.gen.alpha :as gen]
[clojure.set :as clj-set]
[harja.pvm :as pvm]
[harja.domain.palvelut.budjettisuunnittelu :as bs-p]
[harja.tyokalut.yleiset :as yleiset-tyokalut]))
(def ^:dynamic *hoitokaudet* #{1 2 3 4 5})
(s/def ::toimenkuvat-arg (s/or :setti (s/coll-of ::bs-p/toimenkuva :kind set?)
:avain #(= % :kaikki)))
(s/def ::maksukaudet-arg (s/or :setti (s/coll-of ::bs-p/maksukausi :kind set?)
:avain #(= % :kaikki)))
(s/def ::hoitokaudet-arg (s/or :setti (s/coll-of ::bs-p/hoitokausi :kind set?)
:avain #(= % :kaikki)))
(s/def ::aika-kuukaudella-juuri-alkaneelle-urakalle
(s/with-gen ::bs-p/aika
(fn []
(gen/fmap (fn [_]
(let [aloitus-vuosi (pvm/vuosi (first (pvm/paivamaaran-hoitokausi (pvm/nyt))))
vuodet (into [] (range aloitus-vuosi (+ aloitus-vuosi 5)))
hoitokauden-vuodet (rand-nth (map (fn [hoitokausi]
[(get vuodet (dec hoitokausi))
(get vuodet hoitokausi)])
*hoitokaudet*))
v-kks (concat
(map #(identity
{:vuosi (first hoitokauden-vuodet)
:kuukausi %})
(range 10 13))
(map #(identity
{:vuosi (second hoitokauden-vuodet)
:kuukausi %})
(range 1 10)))]
(rand-nth v-kks)))
(gen/int)))))
(s/def ::aika-vuodella-juuri-alkaneelle-urakalle
(s/with-gen ::bs-p/aika
(fn []
(gen/fmap (fn [_]
(let [aloitus-vuosi (pvm/vuosi (first (pvm/paivamaaran-hoitokausi (pvm/nyt))))
vuodet (into [] (range aloitus-vuosi (+ aloitus-vuosi 5)))
vuosi (rand-nth (map (fn [hoitokausi]
(get vuodet (dec hoitokausi)))
*hoitokaudet*))]
{:vuosi vuosi}))
(gen/int)))))
(defn dg-tallenna-kiinteahintaiset-tyo-data-juuri-alkaneelle-urakalle
[urakka-id toimenpide-avain hoitokaudet]
{:urakka-id urakka-id
:toimenpide-avain toimenpide-avain
Ajoille tämmöinen hirvitys , että random dataa , ,
että lopulta on uniikki
:ajat (binding [*hoitokaudet* hoitokaudet]
(mapv first
(vals (group-by (juxt :vuosi :kuukausi)
(gen/sample (s/gen ::aika-kuukaudella-juuri-alkaneelle-urakalle))))))
:summa (gen/generate (s/gen ::bs-p/summa))})
(defn tallenna-kiinteahintaiset-tyot-data
([urakka-id] (tallenna-kiinteahintaiset-tyot-data urakka-id {}))
([urakka-id
{:keys [toimenpide-avaimet hoitokaudet]
:or {toimenpide-avaimet :kaikki
hoitokaudet :kaikki}
:as filtterit}]
(transduce
(comp
(filter (fn [toimenpide-avain]
(or (= toimenpide-avaimet :kaikki)
(contains? toimenpide-avaimet toimenpide-avain))))
(map (fn [toimenpide-avain]
(dg-tallenna-kiinteahintaiset-tyo-data-juuri-alkaneelle-urakalle
urakka-id toimenpide-avain (if (= :kaikki hoitokaudet)
#{1 2 3 4 5}
hoitokaudet)))))
conj []
[:paallystepaikkaukset
:mhu-yllapito
:talvihoito
:liikenneympariston-hoito
:sorateiden-hoito
:mhu-korvausinvestointi])))
(defn toimenkuvan-maksukaudet
"Vuoden -22 alkavien urakoiden toimenkuvien muutos aiheuttaa sen, että päätoimiselle apulaiselle ja apulainen/työnjohtaja
toimenkuville on pakko sallia koko vuoden kestävät työsuhteet, koska tunteja ei enää raportoida, vaan siirrytään kokonaiskustannuksiin."
[toimenkuva]
(case toimenkuva
"hankintavastaava" #{:molemmat}
"sopimusvastaava" #{:molemmat}
"vastuunalainen työnjohtaja" #{:molemmat}
"päätoiminen apulainen" #{:talvi :kesa :molemmat}
"apulainen/työnjohtaja" #{:talvi :kesa :molemmat}
"viherhoidosta vastaava henkilö" #{:molemmat}
"harjoittelija" #{:molemmat}))
(defn validoi-toimenkuvan-maksukaudet! [toimenkuva maksukaudet]
(when-not (clj-set/subset? maksukaudet (toimenkuvan-maksukaudet toimenkuva))
(throw #?(:clj (Exception. (str "Toimenkuvalla " toimenkuva " ei ole maksukausia " maksukaudet))
:cljs (js/Error (str "Toimenkuvalla " toimenkuva " ei ole maksukausia " maksukaudet))))))
(defn dg-tallenna-johto-ja-hallintokorvaus-data
[{:keys [urakka-id toimenkuva toimenkuva-id kuukaudet ennen-urakkaa? oma? urakan-aloitusvuosi]}]
(loop [[[maksukausi kuukaudet-hoitokausille] & l-mkt] kuukaudet
data []]
(if (nil? kuukaudet-hoitokausille)
data
(recur l-mkt
(conj data
(merge {:urakka-id urakka-id
:maksukausi maksukausi
:ennen-urakkaa? ennen-urakkaa?
:jhk-tiedot (if ennen-urakkaa?
(reduce (fn [data {:keys [kuukausi osa-kuukaudesta]}]
(conj data
{:vuosi urakan-aloitusvuosi
:kuukausi kuukausi
:osa-kuukaudesta osa-kuukaudesta
:tunnit (gen/generate (s/gen ::bs-p/tunnit))
:tuntipalkka (gen/generate (s/gen ::bs-p/tuntipalkka))}))
[]
kuukaudet-hoitokausille)
(second
(reduce (fn [[hoitokauden-numero data] hoitokauden-kuukaudet]
[(inc hoitokauden-numero)
(vec (concat
data
(map (fn [{:keys [kuukausi osa-kuukaudesta]}]
{:vuosi (yleiset-tyokalut/vuosi-hoitokauden-numerosta-ja-kuukaudesta hoitokauden-numero kuukausi urakan-aloitusvuosi)
:kuukausi kuukausi
:osa-kuukaudesta osa-kuukaudesta
:tunnit (gen/generate (s/gen ::bs-p/tunnit))
:tuntipalkka (gen/generate (s/gen ::bs-p/tuntipalkka))})
hoitokauden-kuukaudet)))])
[1 []]
kuukaudet-hoitokausille)))}
(if oma?
{:toimenkuva-id toimenkuva-id}
{:toimenkuva toimenkuva})))))))
(defn tallenna-johto-ja-hallintokorvaus-data
([urakka-id urakan-aloitusvuosi] (tallenna-johto-ja-hallintokorvaus-data urakka-id urakan-aloitusvuosi {}))
([urakka-id urakan-aloitusvuosi {:keys [toimenkuvat maksukaudet hoitokaudet ennen-urakkaa-mukaan?]
:or {toimenkuvat :kaikki maksukaudet :kaikki hoitokaudet :kaikki ennen-urakkaa-mukaan? true}
:as filtterit}]
{:pre [(s/valid? ::toimenkuvat-arg toimenkuvat)
(s/valid? ::maksukaudet-arg maksukaudet)
(s/valid? ::hoitokaudet-arg hoitokaudet)]}
(let [kuukaudet-hoitokausille (fn [kuukaudet]
(vec (repeat 5 (mapv #(identity {:kuukausi % :osa-kuukaudesta 1})
kuukaudet))))]
(transduce
(comp
(filter (fn [{toimenkuva :toimenkuva}]
(or (= toimenkuvat :kaikki)
(contains? toimenkuvat toimenkuva))))
(filter (fn [{:keys [ennen-urakkaa?] :as konf}]
(or ennen-urakkaa-mukaan?
(not ennen-urakkaa?))))
(mapcat (fn [konf]
(dg-tallenna-johto-ja-hallintokorvaus-data (assoc konf :urakan-aloitusvuosi urakan-aloitusvuosi))))
(filter (fn [{:keys [toimenkuva maksukausi]}]
(validoi-toimenkuvan-maksukaudet! toimenkuva (get maksukaudet toimenkuva))
(or (= maksukaudet :kaikki)
(contains? (get maksukaudet toimenkuva) maksukausi))))
(map (fn [params]
(if (= hoitokaudet :kaikki)
params
(update params
:jhk-tiedot
(fn [jhk-tiedot]
(filterv #(contains? hoitokaudet
(inc (- (pvm/vuosi (first (pvm/paivamaaran-hoitokausi (pvm/luo-pvm (:vuosi %) (dec (:kuukausi %)) 15))))
(pvm/vuosi (pvm/hoitokauden-alkupvm urakan-aloitusvuosi)))))
jhk-tiedot)))))))
conj
[]
[{:urakka-id urakka-id
:toimenkuva "hankintavastaava"
:ennen-urakkaa? false
:kuukaudet {:molemmat (kuukaudet-hoitokausille (range 1 13))}}
{:urakka-id urakka-id
:toimenkuva "hankintavastaava"
:ennen-urakkaa? true
Urakoiden alkaessa -22 ,
{nil (conj (mapv #(identity {:kuukausi % :osa-kuukaudesta 1})
(repeat 5 10)))}
{nil (conj (mapv #(identity {:kuukausi % :osa-kuukaudesta 1})
(repeat 4 10))
{:kuukausi 10
:osa-kuukaudesta 0.5})})}
{:urakka-id urakka-id
:toimenkuva "sopimusvastaava"
:ennen-urakkaa? false
:kuukaudet {:molemmat (kuukaudet-hoitokausille (range 1 13))}}
{:urakka-id urakka-id
:toimenkuva "vastuunalainen työnjohtaja"
:ennen-urakkaa? false
:kuukaudet {:molemmat (kuukaudet-hoitokausille (range 1 13))}}
{:urakka-id urakka-id
:toimenkuva "päätoiminen apulainen"
:ennen-urakkaa? false
Urakoiden alkaessa -22 ,
{:molemmat (kuukaudet-hoitokausille (range 1 13))}
{:kesa (kuukaudet-hoitokausille (range 5 10))
:talvi (kuukaudet-hoitokausille (concat (range 1 5) (range 10 13)))})}
{:urakka-id urakka-id
:toimenkuva "apulainen/työnjohtaja"
:ennen-urakkaa? false
Urakoiden alkaessa -22 ,
{:molemmat (kuukaudet-hoitokausille (range 1 13))}
{:kesa (kuukaudet-hoitokausille (range 5 10))
:talvi (kuukaudet-hoitokausille (concat (range 1 5) (range 10 13)))})}
{:urakka-id urakka-id
:toimenkuva "viherhoidosta vastaava henkilö"
:ennen-urakkaa? false
Urakoiden alkaessa -22 ,
{:molemmat (kuukaudet-hoitokausille (range 1 13))}
{:molemmat (kuukaudet-hoitokausille (range 4 9))})}
{:urakka-id urakka-id
:toimenkuva "harjoittelija"
:ennen-urakkaa? false
Urakoiden alkaessa -22 ,
{:molemmat (kuukaudet-hoitokausille (range 1 13))}
{:molemmat (kuukaudet-hoitokausille (range 5 9))})}]))))
(defn toimenpiteen-tallennettavat-asiat
[toimenpide-avain]
(case toimenpide-avain
:paallystepaikkaukset #{:toimenpiteen-maaramitattavat-tyot}
:mhu-yllapito #{:rahavaraus-lupaukseen-1
:muut-rahavaraukset
:toimenpiteen-maaramitattavat-tyot}
:talvihoito #{:toimenpiteen-maaramitattavat-tyot}
:liikenneympariston-hoito #{:kolmansien-osapuolten-aiheuttamat-vahingot
:akilliset-hoitotyot
:tunneleiden-hoidot
:toimenpiteen-maaramitattavat-tyot}
:sorateiden-hoito #{:toimenpiteen-maaramitattavat-tyot}
:mhu-korvausinvestointi #{:toimenpiteen-maaramitattavat-tyot}
:mhu-johto #{:hoidonjohtopalkkio
:toimistokulut
:erillishankinnat
:tilaajan-varaukset}))
(defn validoi-toimenpiteen-tallennettavat-asiat! [toimenpide-avain tallennettavat-asiat]
(when-not (clj-set/subset? tallennettavat-asiat (toimenpiteen-tallennettavat-asiat toimenpide-avain))
(throw #?(:clj (Exception. (str "Toimenpide avaimella " toimenpide-avain " ei ole kaikkia seuraavista tallennettavista asioita: " tallennettavat-asiat))
:cljs (js/Error (str "Toimenpide avaimella " toimenpide-avain " ei ole kaikkia seuraavista tallennettavista asioita: " tallennettavat-asiat))))))
(defn dg-tallenna-kustannusarvioitu-tyo-data-juuri-alkaneelle-urakalle
[urakka-id osio-kw toimenpide-avain hoitokaudet]
(loop [[tallennettava-asia & loput-asiat] (toimenpiteen-tallennettavat-asiat toimenpide-avain)
data []]
(if (nil? tallennettava-asia)
data
(recur loput-asiat
(conj data
{:urakka-id urakka-id
:osio osio-kw
:tallennettava-asia tallennettava-asia
:toimenpide-avain toimenpide-avain
Ajoille tämmöinen hirvitys , että random dataa , ,
että on uniikki
:ajat (binding [*hoitokaudet* hoitokaudet]
(mapv first
(vals (group-by :vuosi
(gen/sample (s/gen ::aika-vuodella-juuri-alkaneelle-urakalle))))))
:summa (gen/generate (s/gen ::bs-p/summa))})))))
(defn tallenna-kustannusarvioitu-tyo-data-juuri-alkaneelle-urakalle
([urakka-id] (tallenna-kustannusarvioitu-tyo-data-juuri-alkaneelle-urakalle urakka-id :hankintakustannukset {}))
([urakka-id osio-kw] (tallenna-kustannusarvioitu-tyo-data-juuri-alkaneelle-urakalle urakka-id osio-kw {}))
([urakka-id osio-kw
{:keys [toimenpide-avaimet tallennettavat-asiat hoitokaudet]
:or {toimenpide-avaimet :kaikki
tallennettavat-asiat :kaikki
hoitokaudet :kaikki}
:as filtterit}]
(transduce
(comp
(filter (fn [toimenpide-avain]
(or (= toimenpide-avaimet :kaikki)
(contains? toimenpide-avaimet toimenpide-avain))))
(mapcat (fn [toimenpide-avain]
(dg-tallenna-kustannusarvioitu-tyo-data-juuri-alkaneelle-urakalle urakka-id osio-kw toimenpide-avain (if (= :kaikki hoitokaudet)
#{1 2 3 4 5}
hoitokaudet))))
(filter (fn [{tallennettava-asia :tallennettava-asia
toimenpide-avain :toimenpide-avain}]
(validoi-toimenpiteen-tallennettavat-asiat! toimenpide-avain (get tallennettavat-asiat toimenpide-avain))
(or (= tallennettavat-asiat :kaikki)
(contains? (get tallennettavat-asiat toimenpide-avain) tallennettava-asia)))))
conj []
[:paallystepaikkaukset
:mhu-yllapito
:talvihoito
:liikenneympariston-hoito
:sorateiden-hoito
:mhu-korvausinvestointi
:mhu-johto])))
|
|
6054b5b32a817ca60d66880cc5523d5646474dd98964718e15ba900f6ecc7e34
|
haskell-suite/haskell-src-exts
|
ParallelListComp.hs
|
# LANGUAGE ParallelListComp #
f xs ys zs = [ (x,y,z) | x <- xs | y <- ys, y > 2 | z <- zs ]
| null |
https://raw.githubusercontent.com/haskell-suite/haskell-src-exts/84a4930e0e5c051b7d9efd20ef7c822d5fc1c33b/tests/examples/ParallelListComp.hs
|
haskell
|
# LANGUAGE ParallelListComp #
f xs ys zs = [ (x,y,z) | x <- xs | y <- ys, y > 2 | z <- zs ]
|
|
774437296bd61a08d4bec0ebe41552051173901a97abd6f42d5c8cde2b71b8eb
|
r4vi/block-the-eu-cookie-shit-list
|
core.clj
|
(ns genlist.core
(:use [clojure.tools.cli :only (cli)]
[clojure.string :only (split-lines join replace-first)]
[clj-time.core :only (now)]
[clj-time.format :only (unparse formatter)]
[clj-message-digest.core :only (md5-base64)])
(:gen-class :main true))
(defn normalise-string
"turns \r\n and \r into \n in a string"
[input]
(join "\n" (split-lines input)))
(defn update-filter-timestamp
"Ugly function to update timestamp of filter list. mostly stolen from the original Python"
[in]
(let [ts-pattern #"(?i)(?m)^\s*!\s*last\smodified[\s\-:]+([\w\+\/=]+).*\n"
dt-now (str "! Last Modified: " (unparse (formatter "dd MMM yyyy hh:mm z") (now)) "\n")]
(replace-first in ts-pattern dt-now)))
(def checksum-regex #"(?i)(?m)^\s*!\s*checksum[\s\-:]+([\w\+\/=]+).*\n")
(defn run
"Prints md5 of input file"
[opts]
(let [input (normalise-string (slurp (:file opts)))
cleaned (update-filter-timestamp
(replace-first (normalise-string input) checksum-regex ""))
[header body] (clojure.string/split cleaned #"!\n")
rules (sort (clojure.string/split body #"\n"))
sorted-rules (clojure.string/join "\n" rules)
joined (clojure.string/join "!\n" [header (str sorted-rules "\n")])
checksum (md5-base64 joined)]
(println checksum)
(spit (:file opts)
(replace-first joined "\n" (str "\n! Checksum: " checksum "\n")))))
(defn -main
"I don't do a whole lot."
[& args]
(let [[opts args banner]
(cli args
["-h" "--help" "Show help" :flag true :default false]
["-f" "--file" "REQUIRED: Path to filter FILE"]
)]
(when (:help opts)
(println banner)
(System/exit 0))
(if
(:file opts)
(do
(println "")
(run opts))
;;else
(println banner))))
| null |
https://raw.githubusercontent.com/r4vi/block-the-eu-cookie-shit-list/cc0c8126ddb10398a4e31cebe34d192be274c3db/genlist/src/genlist/core.clj
|
clojure
|
else
|
(ns genlist.core
(:use [clojure.tools.cli :only (cli)]
[clojure.string :only (split-lines join replace-first)]
[clj-time.core :only (now)]
[clj-time.format :only (unparse formatter)]
[clj-message-digest.core :only (md5-base64)])
(:gen-class :main true))
(defn normalise-string
"turns \r\n and \r into \n in a string"
[input]
(join "\n" (split-lines input)))
(defn update-filter-timestamp
"Ugly function to update timestamp of filter list. mostly stolen from the original Python"
[in]
(let [ts-pattern #"(?i)(?m)^\s*!\s*last\smodified[\s\-:]+([\w\+\/=]+).*\n"
dt-now (str "! Last Modified: " (unparse (formatter "dd MMM yyyy hh:mm z") (now)) "\n")]
(replace-first in ts-pattern dt-now)))
(def checksum-regex #"(?i)(?m)^\s*!\s*checksum[\s\-:]+([\w\+\/=]+).*\n")
(defn run
"Prints md5 of input file"
[opts]
(let [input (normalise-string (slurp (:file opts)))
cleaned (update-filter-timestamp
(replace-first (normalise-string input) checksum-regex ""))
[header body] (clojure.string/split cleaned #"!\n")
rules (sort (clojure.string/split body #"\n"))
sorted-rules (clojure.string/join "\n" rules)
joined (clojure.string/join "!\n" [header (str sorted-rules "\n")])
checksum (md5-base64 joined)]
(println checksum)
(spit (:file opts)
(replace-first joined "\n" (str "\n! Checksum: " checksum "\n")))))
(defn -main
"I don't do a whole lot."
[& args]
(let [[opts args banner]
(cli args
["-h" "--help" "Show help" :flag true :default false]
["-f" "--file" "REQUIRED: Path to filter FILE"]
)]
(when (:help opts)
(println banner)
(System/exit 0))
(if
(:file opts)
(do
(println "")
(run opts))
(println banner))))
|
b163551e2c3d8ab0fcd024b1e02dcb130b489aa68b0553d160da6bb17f4c9b4c
|
lucasvreis/org-mode-hs
|
Document.hs
|
| This was one of the first files that I wrote and is adapted from
--
So all credit where credit is due to and .
module Org.Parser.Document where
import Data.Text qualified as T
import Org.Parser.Common
import Org.Parser.Definitions
import Org.Parser.Elements
import Org.Parser.MarkupContexts
import Org.Parser.Objects
import Prelude hiding (many, some)
-- | Parse input as org document tree.
orgDocument :: OrgParser OrgDocument
orgDocument = do
_ <- many commentLine
properties <- option mempty propertyDrawer
topLevel <- elements
sections <- many (section 1)
eof
return $
OrgDocument
{ documentProperties = properties,
documentChildren = toList topLevel,
documentSections = sections
}
-- | Read an Org mode section and its contents. @lvl@
-- gives the minimum acceptable level of the heading.
section :: Int -> OrgParser OrgSection
section lvl = try $ do
level <- headingStart
guard (lvl <= level)
todoKw <- optional todoKeyword
isComment <- option False $ try $ string "COMMENT" *> hspace1 $> True
priority <- optional priorityCookie
(title, tags, titleTxt) <- titleObjects
planning <- option emptyPlanning planningInfo
properties <- option mempty propertyDrawer
clearPendingAffiliated
contents <- elements
children <- many (section (level + 1))
return
OrgSection
{ sectionLevel = level,
sectionProperties = properties,
sectionTodo = todoKw,
sectionIsComment = isComment,
sectionPriority = priority,
sectionTitle = toList title,
sectionRawTitle = titleTxt,
sectionAnchor = "", -- Dealt with later
sectionTags = tags,
sectionPlanning = planning,
sectionChildren = toList contents,
sectionSubsections = children
}
where
titleObjects :: OrgParser (OrgObjects, Tags, Text)
titleObjects =
option mempty $
withContext__
anySingle
endOfTitle
(plainMarkupContext standardSet)
endOfTitle :: OrgParser Tags
endOfTitle = try $ do
skipSpaces
tags <- option [] (headerTags <* skipSpaces)
void newline <|> eof
return tags
headerTags :: OrgParser Tags
headerTags = try $ do
_ <- char ':'
endBy1 orgTagWord (char ':')
-- * Heading and document "subelements"
-- | Parse a to-do keyword that is registered in the state.
todoKeyword :: OrgParser TodoKeyword
todoKeyword = try $ do
taskStates <- getsO orgTodoKeywords
choice (map kwParser taskStates)
where
kwParser :: TodoKeyword -> OrgParser TodoKeyword
kwParser tdm =
NOTE to self : space placement - " TO " is subset of " TODOKEY "
try (string (todoName tdm) *> hspace1 $> tdm)
| Parse a priority cookie like @[#A]@.
priorityCookie :: OrgParser Priority
priorityCookie =
try $
string "[#"
*> priorityFromChar
<* char ']'
where
priorityFromChar :: OrgParser Priority
priorityFromChar =
NumericPriority <$> digitIntChar
<|> LetterPriority <$> upperAscii
orgTagWord :: OrgParser Text
orgTagWord =
takeWhile1P
(Just "tag characters (alphanumeric, @, %, # or _)")
(\c -> isAlphaNum c || c `elem` ['@', '%', '#', '_'])
-- | TODO READ ABOUT PLANNING
emptyPlanning :: PlanningInfo
emptyPlanning = PlanningInfo Nothing Nothing Nothing
-- | Read a single planning-related and timestamped line. TODO
planningInfo :: OrgParser PlanningInfo
planningInfo = try $ do
updaters <- some planningDatum <* skipSpaces <* newline
return $ foldr ($) emptyPlanning updaters
where
planningDatum =
skipSpaces
*> choice
[ updateWith (\s p -> p {planningScheduled = Just s}) "SCHEDULED",
updateWith (\d p -> p {planningDeadline = Just d}) "DEADLINE",
updateWith (\c p -> p {planningClosed = Just c}) "CLOSED"
]
updateWith fn cs = fn <$> (string cs *> char ':' *> skipSpaces *> parseTimestamp)
-- | Read a :PROPERTIES: drawer and return the key/value pairs contained
-- within.
propertyDrawer :: OrgParser Properties
propertyDrawer = try $ do
skipSpaces
_ <- string' ":properties:"
skipSpaces
_ <- newline
fromList <$> manyTill nodeProperty (try endOfDrawer)
where
endOfDrawer :: OrgParser Text
endOfDrawer =
try $
hspace *> string' ":end:" <* blankline'
nodeProperty :: OrgParser (Text, Text)
nodeProperty = try $ liftA2 (,) name value
name :: OrgParser Text
name =
skipSpaces
*> char ':'
*> takeWhile1P (Just "node property name") (not . isSpace)
<&> T.stripSuffix ":"
>>= guardMaybe "expecting ':' at end of node property name"
<&> T.toLower
value :: OrgParser Text
value =
skipSpaces
*> ( takeWhileP (Just "node property value") (/= '\n')
<&> T.stripEnd
)
<* newline
| null |
https://raw.githubusercontent.com/lucasvreis/org-mode-hs/8eed3910638f379d55114995e0bc75a5915c4e98/org-parser/src/Org/Parser/Document.hs
|
haskell
|
| Parse input as org document tree.
| Read an Org mode section and its contents. @lvl@
gives the minimum acceptable level of the heading.
Dealt with later
* Heading and document "subelements"
| Parse a to-do keyword that is registered in the state.
| TODO READ ABOUT PLANNING
| Read a single planning-related and timestamped line. TODO
| Read a :PROPERTIES: drawer and return the key/value pairs contained
within.
|
| This was one of the first files that I wrote and is adapted from
So all credit where credit is due to and .
module Org.Parser.Document where
import Data.Text qualified as T
import Org.Parser.Common
import Org.Parser.Definitions
import Org.Parser.Elements
import Org.Parser.MarkupContexts
import Org.Parser.Objects
import Prelude hiding (many, some)
orgDocument :: OrgParser OrgDocument
orgDocument = do
_ <- many commentLine
properties <- option mempty propertyDrawer
topLevel <- elements
sections <- many (section 1)
eof
return $
OrgDocument
{ documentProperties = properties,
documentChildren = toList topLevel,
documentSections = sections
}
section :: Int -> OrgParser OrgSection
section lvl = try $ do
level <- headingStart
guard (lvl <= level)
todoKw <- optional todoKeyword
isComment <- option False $ try $ string "COMMENT" *> hspace1 $> True
priority <- optional priorityCookie
(title, tags, titleTxt) <- titleObjects
planning <- option emptyPlanning planningInfo
properties <- option mempty propertyDrawer
clearPendingAffiliated
contents <- elements
children <- many (section (level + 1))
return
OrgSection
{ sectionLevel = level,
sectionProperties = properties,
sectionTodo = todoKw,
sectionIsComment = isComment,
sectionPriority = priority,
sectionTitle = toList title,
sectionRawTitle = titleTxt,
sectionTags = tags,
sectionPlanning = planning,
sectionChildren = toList contents,
sectionSubsections = children
}
where
titleObjects :: OrgParser (OrgObjects, Tags, Text)
titleObjects =
option mempty $
withContext__
anySingle
endOfTitle
(plainMarkupContext standardSet)
endOfTitle :: OrgParser Tags
endOfTitle = try $ do
skipSpaces
tags <- option [] (headerTags <* skipSpaces)
void newline <|> eof
return tags
headerTags :: OrgParser Tags
headerTags = try $ do
_ <- char ':'
endBy1 orgTagWord (char ':')
todoKeyword :: OrgParser TodoKeyword
todoKeyword = try $ do
taskStates <- getsO orgTodoKeywords
choice (map kwParser taskStates)
where
kwParser :: TodoKeyword -> OrgParser TodoKeyword
kwParser tdm =
NOTE to self : space placement - " TO " is subset of " TODOKEY "
try (string (todoName tdm) *> hspace1 $> tdm)
| Parse a priority cookie like @[#A]@.
priorityCookie :: OrgParser Priority
priorityCookie =
try $
string "[#"
*> priorityFromChar
<* char ']'
where
priorityFromChar :: OrgParser Priority
priorityFromChar =
NumericPriority <$> digitIntChar
<|> LetterPriority <$> upperAscii
orgTagWord :: OrgParser Text
orgTagWord =
takeWhile1P
(Just "tag characters (alphanumeric, @, %, # or _)")
(\c -> isAlphaNum c || c `elem` ['@', '%', '#', '_'])
emptyPlanning :: PlanningInfo
emptyPlanning = PlanningInfo Nothing Nothing Nothing
planningInfo :: OrgParser PlanningInfo
planningInfo = try $ do
updaters <- some planningDatum <* skipSpaces <* newline
return $ foldr ($) emptyPlanning updaters
where
planningDatum =
skipSpaces
*> choice
[ updateWith (\s p -> p {planningScheduled = Just s}) "SCHEDULED",
updateWith (\d p -> p {planningDeadline = Just d}) "DEADLINE",
updateWith (\c p -> p {planningClosed = Just c}) "CLOSED"
]
updateWith fn cs = fn <$> (string cs *> char ':' *> skipSpaces *> parseTimestamp)
propertyDrawer :: OrgParser Properties
propertyDrawer = try $ do
skipSpaces
_ <- string' ":properties:"
skipSpaces
_ <- newline
fromList <$> manyTill nodeProperty (try endOfDrawer)
where
endOfDrawer :: OrgParser Text
endOfDrawer =
try $
hspace *> string' ":end:" <* blankline'
nodeProperty :: OrgParser (Text, Text)
nodeProperty = try $ liftA2 (,) name value
name :: OrgParser Text
name =
skipSpaces
*> char ':'
*> takeWhile1P (Just "node property name") (not . isSpace)
<&> T.stripSuffix ":"
>>= guardMaybe "expecting ':' at end of node property name"
<&> T.toLower
value :: OrgParser Text
value =
skipSpaces
*> ( takeWhileP (Just "node property value") (/= '\n')
<&> T.stripEnd
)
<* newline
|
d7a1e968e90fbf813a1bbae1373ed78988d2ca85b8f15f45ab261f31485621f9
|
janherich/chisel
|
curves_test.clj
|
(ns chisel.curves-test
(:require [clojure.test :refer :all]
[chisel.curves :as curves]))
(deftest resolve-points-test
(testing "Testing closed interval mapping of the resolve-points function"
(is (= (curves/resolve-points 5 identity)
'(0 1/4 1/2 3/4 1))))))
| null |
https://raw.githubusercontent.com/janherich/chisel/9ea70cae5af4f0a8d0f2acd164e98dab19c77a36/test/chisel/curves_test.clj
|
clojure
|
(ns chisel.curves-test
(:require [clojure.test :refer :all]
[chisel.curves :as curves]))
(deftest resolve-points-test
(testing "Testing closed interval mapping of the resolve-points function"
(is (= (curves/resolve-points 5 identity)
'(0 1/4 1/2 3/4 1))))))
|
|
1b91ab3d74885517c7464627654c619a914baf39d9d2ecaa08b9c8aa1012110b
|
imrehg/ypsilon
|
dynamic-wind.scm
|
Ypsilon Scheme System
Copyright ( c ) 2004 - 2009 Y.FUJITA / LittleWing Company Limited .
See license.txt for terms and conditions of use .
;; Reference:
;; R. Kent Dybvig / The Scheme Programming Language, Third Edition
Chapter 5 . Control Operations , Section 5.6 . Continuations
(define dynamic-wind
(lambda (in body out)
(in)
(current-dynamic-wind-record (cons (cons in out) (current-dynamic-wind-record)))
(call-with-values
body
(lambda ans
(current-dynamic-wind-record (cdr (current-dynamic-wind-record)))
(out)
(apply values ans)))))
(define perform-dynamic-wind
(lambda (new cont args)
(define common-tail
(lambda (x y)
(let ((nx (length x)) (ny (length y)))
(do ((x (if (> nx ny) (list-tail x (- nx ny)) x) (cdr x))
(y (if (> ny nx) (list-tail y (- ny nx)) y) (cdr y)))
((eq? x y) x)))))
(let ((tail (common-tail new (current-dynamic-wind-record))))
(let loop ((rec (current-dynamic-wind-record)))
(cond ((not (eq? rec tail))
(current-dynamic-wind-record (cdr rec))
((cdar rec))
(loop (cdr rec)))))
(let loop ((rec new))
(cond ((not (eq? rec tail))
(loop (cdr rec))
((caar rec))
(current-dynamic-wind-record rec)))))
(apply cont args)))
| null |
https://raw.githubusercontent.com/imrehg/ypsilon/e57a06ef5c66c1a88905b2be2fa791fa29848514/heap/boot/dynamic-wind.scm
|
scheme
|
Reference:
R. Kent Dybvig / The Scheme Programming Language, Third Edition
|
Ypsilon Scheme System
Copyright ( c ) 2004 - 2009 Y.FUJITA / LittleWing Company Limited .
See license.txt for terms and conditions of use .
Chapter 5 . Control Operations , Section 5.6 . Continuations
(define dynamic-wind
(lambda (in body out)
(in)
(current-dynamic-wind-record (cons (cons in out) (current-dynamic-wind-record)))
(call-with-values
body
(lambda ans
(current-dynamic-wind-record (cdr (current-dynamic-wind-record)))
(out)
(apply values ans)))))
(define perform-dynamic-wind
(lambda (new cont args)
(define common-tail
(lambda (x y)
(let ((nx (length x)) (ny (length y)))
(do ((x (if (> nx ny) (list-tail x (- nx ny)) x) (cdr x))
(y (if (> ny nx) (list-tail y (- ny nx)) y) (cdr y)))
((eq? x y) x)))))
(let ((tail (common-tail new (current-dynamic-wind-record))))
(let loop ((rec (current-dynamic-wind-record)))
(cond ((not (eq? rec tail))
(current-dynamic-wind-record (cdr rec))
((cdar rec))
(loop (cdr rec)))))
(let loop ((rec new))
(cond ((not (eq? rec tail))
(loop (cdr rec))
((caar rec))
(current-dynamic-wind-record rec)))))
(apply cont args)))
|
54b4d0756ddbd53fe22848843863bdfc444adec9e511a084ac72ea5f342b4f29
|
JoaoVasques/kafka-async
|
producer.clj
|
(ns kafka-async.producer
(import org.apache.kafka.clients.producer.KafkaProducer)
(import org.apache.kafka.clients.producer.ProducerRecord)
(import java.util.UUID)
(:require
[kafka-async.commons :as commons]
[clojure.core.async :as a :refer [>! <! >!! close! chan timeout go-loop]]))
(def producers
"CLojure `atom` containing all the registered producers in Kafka-Async"
(atom {}))
(def producer-defaults
{
:retries "0"
:batch.size "16384"
:linger.ms "1"
:buffer.memory "33554432"
:key.serializer "org.apache.kafka.common.serialization.StringSerializer"
:value.serializer "org.apache.kafka.common.serialization.StringSerializer"
:acks "1"
})
(def default-in-chan-size
"Producer core.async input channel default buffer size"
100)
(defn to-producer-record
"Converts a map into a Kafka Producer Record. The map contains the following structure
```clojure
{:topic \"some-topic\" :hash-key \"some-hash\" :event {...}}
```
"
{:doc/format :markdown}
[{topic :topic hash-key :key event :event}]
(ProducerRecord. topic hash-key event))
(defn create!
"Creates a Kafka Producer client with a core.async interface given the broker's list and group id.
After the Java Kafka producer is created it's saved in the `producers` atom with the following format:
```clojure
{:uuid {:chan core-async-input-channel :producer java-kafka-producer}}
```
A core.async process is created that reads from the input channel and sends the event to Java Kafka Producer.
If a nil event is passed the process ends.
This function returns the following map to the client
```clojure
{:chan in-chan :id producer-id}
```
"
{:doc/format :markdown}
([servers client-id] (create! servers client-id default-in-chan-size {}))
([servers client-id buffer-size options]
(let [producer (-> {:bootstrap.servers servers :client.id client-id}
(merge producer-defaults options)
(commons/map-to-properties)
(KafkaProducer.))
producer-id (.toString (UUID/randomUUID))
in-chan (chan buffer-size (map to-producer-record))]
(swap! producers (fn [saved-producers]
(let [info {(keyword producer-id) {:chan in-chan :producer producer}}]
(merge saved-producers info))))
(go-loop []
(let [event (<! in-chan)]
(if-not (nil? event)
(do
(.send producer event)
(recur)
))))
{:chan in-chan :id producer-id})))
(defn close
"Closes a Kafka producer and the respective core-async channel given the producer id obtained in `create!`.
Returns `nil` if there is no producer with the given id or a sequence of all producers otherwise"
[producer-id]
(let [{chan :chan producer :producer} ((keyword producer-id) @producers)]
(if-not (nil? (and chan producer))
(do
(close! chan)
(.close producer)
(swap! producers dissoc (keyword producer-id)))
(println "producer not found"))))
| null |
https://raw.githubusercontent.com/JoaoVasques/kafka-async/fa548418c3b0e486d0d2c3586367ed8dd3935a4d/src/kafka_async/producer.clj
|
clojure
|
(ns kafka-async.producer
(import org.apache.kafka.clients.producer.KafkaProducer)
(import org.apache.kafka.clients.producer.ProducerRecord)
(import java.util.UUID)
(:require
[kafka-async.commons :as commons]
[clojure.core.async :as a :refer [>! <! >!! close! chan timeout go-loop]]))
(def producers
"CLojure `atom` containing all the registered producers in Kafka-Async"
(atom {}))
(def producer-defaults
{
:retries "0"
:batch.size "16384"
:linger.ms "1"
:buffer.memory "33554432"
:key.serializer "org.apache.kafka.common.serialization.StringSerializer"
:value.serializer "org.apache.kafka.common.serialization.StringSerializer"
:acks "1"
})
(def default-in-chan-size
"Producer core.async input channel default buffer size"
100)
(defn to-producer-record
"Converts a map into a Kafka Producer Record. The map contains the following structure
```clojure
{:topic \"some-topic\" :hash-key \"some-hash\" :event {...}}
```
"
{:doc/format :markdown}
[{topic :topic hash-key :key event :event}]
(ProducerRecord. topic hash-key event))
(defn create!
"Creates a Kafka Producer client with a core.async interface given the broker's list and group id.
After the Java Kafka producer is created it's saved in the `producers` atom with the following format:
```clojure
{:uuid {:chan core-async-input-channel :producer java-kafka-producer}}
```
A core.async process is created that reads from the input channel and sends the event to Java Kafka Producer.
If a nil event is passed the process ends.
This function returns the following map to the client
```clojure
{:chan in-chan :id producer-id}
```
"
{:doc/format :markdown}
([servers client-id] (create! servers client-id default-in-chan-size {}))
([servers client-id buffer-size options]
(let [producer (-> {:bootstrap.servers servers :client.id client-id}
(merge producer-defaults options)
(commons/map-to-properties)
(KafkaProducer.))
producer-id (.toString (UUID/randomUUID))
in-chan (chan buffer-size (map to-producer-record))]
(swap! producers (fn [saved-producers]
(let [info {(keyword producer-id) {:chan in-chan :producer producer}}]
(merge saved-producers info))))
(go-loop []
(let [event (<! in-chan)]
(if-not (nil? event)
(do
(.send producer event)
(recur)
))))
{:chan in-chan :id producer-id})))
(defn close
"Closes a Kafka producer and the respective core-async channel given the producer id obtained in `create!`.
Returns `nil` if there is no producer with the given id or a sequence of all producers otherwise"
[producer-id]
(let [{chan :chan producer :producer} ((keyword producer-id) @producers)]
(if-not (nil? (and chan producer))
(do
(close! chan)
(.close producer)
(swap! producers dissoc (keyword producer-id)))
(println "producer not found"))))
|
|
0283b6e1871c3c3b4611cf401be50d904f72843276d3599ecb06303c22b1e43a
|
vbmithr/ocaml-websocket
|
websocket_async.ml
|
* Copyright ( c ) 2012 - 2018 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
*
* Copyright (c) 2012-2018 Vincent Bernardoff <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
*)
open Websocket
open Core
module Time_ns = Time_ns_unix
open Core.Poly
open Async
open Cohttp
module Frame = Websocket.Frame
module Async_IO = Websocket.Make (Cohttp_async.Io)
open Async_IO
let set_tcp_nodelay writer =
let socket = Socket.of_fd (Writer.fd writer) Socket.Type.tcp in
Socket.setopt socket Socket.Opt.nodelay true
let src =
Logs.Src.create "websocket.async.client" ~doc:"Websocket client for Async"
let client ?(name = "websocket.client") ?(extra_headers = Header.init ())
?(random_string = Rng.init ()) ?initialized ~app_to_ws ~ws_to_app ~net_to_ws
~ws_to_net uri =
let drain_handshake r w =
let nonce = Base64.encode_exn (random_string 16) in
let headers =
Header.add_list extra_headers
[
("Upgrade", "websocket");
("Connection", "Upgrade");
("Sec-WebSocket-Key", nonce);
("Sec-WebSocket-Version", "13");
]
in
let req = Cohttp.Request.make ~headers uri in
Logs_async.debug ~src (fun m ->
m "%a" Sexp.pp_hum Cohttp.Request.(sexp_of_t req))
>>= fun () ->
Request.write (fun _ -> Deferred.unit) req w >>= fun () ->
Response.read r >>= function
| `Eof -> raise End_of_file
| `Invalid s -> failwith s
| `Ok response ->
Logs_async.debug ~src (fun m ->
m "%a" Sexp.pp_hum Cohttp.Response.(sexp_of_t response))
>>= fun () ->
let status = Cohttp.Response.status response in
let headers = Cohttp.Response.headers response in
if Code.(is_error @@ code_of_status status) then
Reader.contents r >>= fun msg ->
Logs_async.err ~src (fun m -> m "%s" msg) >>= fun () ->
failwith @@ "HTTP Error " ^ Code.(string_of_status status)
else if Cohttp.Response.version response <> `HTTP_1_1 then
failwith "HTTP version error"
else if status <> `Switching_protocols then
failwith @@ "status error " ^ Code.(string_of_status status)
else if
Header.(get headers "upgrade")
|> Option.map ~f:String.lowercase
<> Some "websocket"
then failwith "upgrade error"
else if not @@ upgrade_present headers then
failwith "update not present"
else if
Header.get headers "sec-websocket-accept"
<> Some (nonce ^ websocket_uuid |> b64_encoded_sha1sum)
then failwith "accept error"
else Deferred.unit
in
let run () =
drain_handshake net_to_ws ws_to_net >>= fun () ->
Option.iter initialized ~f:(fun ivar -> Ivar.fill ivar ());
let read_frame =
make_read_frame ~mode:(Client random_string) net_to_ws ws_to_net
in
let buf = Buffer.create 128 in
let rec forward_frames_to_app ws_to_app =
read_frame () >>= fun fr ->
(if not @@ Pipe.is_closed ws_to_app then Pipe.write ws_to_app fr
else Deferred.unit)
>>= fun () -> forward_frames_to_app ws_to_app
in
let forward_frames_to_net ws_to_net app_to_ws =
Writer.transfer' ws_to_net app_to_ws (fun frs ->
Queue.iter frs ~f:(fun fr ->
Buffer.clear buf;
write_frame_to_buf ~mode:(Client random_string) buf fr;
let contents = Buffer.contents buf in
Writer.write ws_to_net contents);
Writer.flushed ws_to_net)
in
Deferred.any_unit
[
forward_frames_to_app ws_to_app;
forward_frames_to_net ws_to_net app_to_ws;
Deferred.all_unit Pipe.[ closed app_to_ws; closed ws_to_app ];
]
in
let finally_f =
lazy
(Pipe.close_read app_to_ws;
Pipe.close ws_to_app)
in
Monitor.try_with_or_error ~name run >>| fun res ->
Lazy.force finally_f;
res
let client_ez ?opcode ?(name = "websocket.client_ez") ?extra_headers ?heartbeat
?random_string uri net_to_ws ws_to_net =
let app_to_ws, reactor_write = Pipe.create () in
let to_reactor_write, client_write = Pipe.create () in
let client_read, ws_to_app = Pipe.create () in
let initialized = Ivar.create () in
let initialized_d = Ivar.read initialized in
let last_pong = ref @@ Time_ns.epoch in
let cleanup =
lazy
(Pipe.close ws_to_app;
Pipe.close_read app_to_ws;
Pipe.close_read to_reactor_write;
Pipe.close client_write)
in
let send_ping w span =
let now = Time_ns.now () in
Logs_async.debug ~src (fun m -> m "-> PING") >>= fun () ->
Pipe.write w
@@ Frame.create ~opcode:Frame.Opcode.Ping
~content:(Time_ns.to_string_fix_proto `Utc now)
()
>>| fun () ->
let time_since_last_pong = Time_ns.diff now !last_pong in
if
!last_pong > Time_ns.epoch
&& Time_ns.Span.(time_since_last_pong > span + span)
then Lazy.force cleanup
in
let react w fr =
let open Frame in
Logs_async.debug ~src (fun m -> m "<- %a" Frame.pp fr) >>= fun () ->
match fr.opcode with
| Opcode.Ping ->
Pipe.write w @@ Frame.create ~opcode:Opcode.Pong () >>| fun () -> None
| Opcode.Close ->
(* Immediately echo and pass this last message to the user *)
(if String.length fr.content >= 2 then
Pipe.write w
@@ Frame.create ~opcode:Opcode.Close
~content:(String.sub fr.content ~pos:0 ~len:2)
()
else Pipe.write w @@ Frame.close 1000)
>>| fun () ->
Pipe.close w;
None
| Opcode.Pong ->
last_pong := Time_ns.now ();
return None
| Opcode.Text | Opcode.Binary -> return @@ Some fr.content
| _ ->
Pipe.write w @@ Frame.close 1002 >>| fun () ->
Pipe.close w;
None
in
let client_read = Pipe.filter_map' client_read ~f:(react reactor_write) in
let react () =
initialized_d >>= fun () ->
Pipe.transfer to_reactor_write reactor_write ~f:(fun content ->
Frame.create ?opcode ~content ())
in
(* Run send_ping every heartbeat when heartbeat is set. *)
don't_wait_for
(match heartbeat with
| None -> Deferred.unit
| Some span ->
initialized_d >>| fun () ->
Clock_ns.run_at_intervals' ~continue_on_error:false
~stop:(Pipe.closed reactor_write) span (fun () ->
send_ping reactor_write span));
don't_wait_for
(Monitor.protect
~finally:(fun () ->
Lazy.force cleanup;
Deferred.unit)
(fun () ->
Deferred.any_unit
[
client ~name ?extra_headers ?random_string ~initialized ~app_to_ws
~ws_to_app ~net_to_ws ~ws_to_net uri
|> Deferred.ignore_m;
react ();
Deferred.all_unit Pipe.[ closed client_read; closed client_write ];
]));
(client_read, client_write)
let src =
Logs.Src.create "websocket.async.server" ~doc:"Websocket server for Async"
let server ?(name = "websocket.server")
?(check_request = fun _ -> Deferred.return true)
?(select_protocol = fun _ -> None) ~reader ~writer ~app_to_ws ~ws_to_app ()
=
let handshake r w =
(Request.read r >>= function
| `Ok r -> Deferred.return r
| `Eof ->
(* Remote endpoint closed connection. No further action
necessary here. *)
Logs_async.info ~src (fun m -> m "Remote endpoint closed connection")
>>= fun () -> raise End_of_file
| `Invalid reason ->
Logs_async.info ~src (fun m ->
m "Invalid input from remote endpoint: %s" reason)
>>= fun () -> failwith reason)
>>= fun request ->
(check_request request >>= function
| true -> Deferred.unit
| false ->
let body = "403 Forbidden" in
let response =
Cohttp.Response.make ~status:`Forbidden ()
~encoding:
(Cohttp.Transfer.Fixed (String.length body |> Int64.of_int))
in
let open Response in
write ~flush:true (fun w -> write_body w body) response w >>= fun () ->
raise Exit)
>>= fun () ->
let meth = Cohttp.Request.meth request in
let version = Cohttp.Request.version request in
let headers = Cohttp.Request.headers request in
if
not
(version = `HTTP_1_1 && meth = `GET
&& Option.map (Header.get headers "upgrade") ~f:String.lowercase
= Some "websocket"
&& upgrade_present headers)
then failwith "Protocol error";
let key =
Option.value_exn ~message:"missing sec-websocket-key"
(Header.get headers "sec-websocket-key")
in
let hash = key ^ websocket_uuid |> b64_encoded_sha1sum in
let subprotocol =
Option.value_map (Header.get headers "sec-websocket-protocol") ~default:[]
~f:(fun p ->
Option.value_map (select_protocol p) ~default:[] ~f:(fun selected ->
[ ("Sec-WebSocket-Protocol", selected) ]))
in
let response_headers =
("Upgrade", "websocket") :: ("Connection", "Upgrade")
:: ("Sec-WebSocket-Accept", hash)
:: subprotocol
in
let response =
Cohttp.Response.make ~status:`Switching_protocols
~encoding:Transfer.Unknown
~headers:(Header.of_list response_headers)
()
in
Response.write (fun _ -> Deferred.unit) response w
in
Monitor.try_with_or_error ~name ~extract_exn:true (fun () ->
handshake reader writer)
|> Deferred.Or_error.bind ~f:(fun () ->
set_tcp_nodelay writer;
let read_frame = make_read_frame ~mode:Server reader writer in
let rec loop () = read_frame () >>= Pipe.write ws_to_app >>= loop in
let transfer_end =
let buf = Buffer.create 128 in
Pipe.transfer app_to_ws
Writer.(pipe writer)
~f:(fun fr ->
Buffer.clear buf;
write_frame_to_buf ~mode:Server buf fr;
Buffer.contents buf)
in
Monitor.protect
~finally:(fun () ->
Pipe.close ws_to_app;
Pipe.close_read app_to_ws;
Deferred.unit)
(fun () ->
Deferred.any
[
transfer_end;
loop ();
Pipe.closed ws_to_app;
Pipe.closed app_to_ws;
])
>>= Deferred.Or_error.return)
let upgrade_connection ?(select_protocol = fun _ -> None)
?(ping_interval = Time_ns.Span.of_int_sec 50) ~app_to_ws ~ws_to_app ~f
request =
let headers = Cohttp.Request.headers request in
let key =
Option.value_exn ~message:"missing sec-websocket-key"
(Header.get headers "sec-websocket-key")
in
let hash = key ^ websocket_uuid |> b64_encoded_sha1sum in
let subprotocol =
Option.value_map (Header.get headers "sec-websocket-protocol") ~default:[]
~f:(fun p ->
Option.value_map (select_protocol p) ~default:[] ~f:(fun selected ->
[ ("Sec-WebSocket-Protocol", selected) ]))
in
let response_headers =
("Upgrade", "websocket") :: ("Connection", "Upgrade")
:: ("Sec-WebSocket-Accept", hash)
:: subprotocol
in
let response =
Cohttp.Response.make ~status:`Switching_protocols ~encoding:Transfer.Unknown
~headers:(Header.of_list response_headers)
()
in
let handler reader writer =
let read_frame = make_read_frame ~mode:Server reader writer in
let rec loop () =
try_with read_frame >>= function
| Error _ -> Deferred.unit
| Ok frame -> Pipe.write ws_to_app frame >>= loop
in
let buf = Buffer.create 128 in
let frame_to_string fr =
Buffer.clear buf;
write_frame_to_buf ~mode:Server buf fr;
Buffer.contents buf
in
let ping () =
if Time_ns.Span.(ping_interval = zero) then Deferred.never ()
else
let ping_frame_string =
frame_to_string Frame.(create ~opcode:Opcode.Ping ())
in
let rec ping_loop () =
Clock_ns.after ping_interval >>= fun () ->
match Writer.is_closed writer with
| true -> Deferred.unit
| false ->
Writer.write writer ping_frame_string;
Writer.flushed writer >>= fun () -> ping_loop ()
in
ping_loop ()
in
let transfer_end () =
Pipe.transfer app_to_ws Writer.(pipe writer) ~f:frame_to_string
in
let finally () =
Pipe.close ws_to_app;
Pipe.close_read app_to_ws;
Deferred.unit
in
Monitor.protect ~finally (fun () ->
set_tcp_nodelay writer;
Deferred.all_unit
[
Deferred.any
[
transfer_end ();
loop ();
ping ();
Pipe.closed ws_to_app;
Pipe.closed app_to_ws;
];
f ();
])
in
(response, handler)
| null |
https://raw.githubusercontent.com/vbmithr/ocaml-websocket/a5d7cb0710e6df49cc0c328b2ed28be4d1a397b4/async/websocket_async.ml
|
ocaml
|
Immediately echo and pass this last message to the user
Run send_ping every heartbeat when heartbeat is set.
Remote endpoint closed connection. No further action
necessary here.
|
* Copyright ( c ) 2012 - 2018 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
*
* Copyright (c) 2012-2018 Vincent Bernardoff <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
*)
open Websocket
open Core
module Time_ns = Time_ns_unix
open Core.Poly
open Async
open Cohttp
module Frame = Websocket.Frame
module Async_IO = Websocket.Make (Cohttp_async.Io)
open Async_IO
let set_tcp_nodelay writer =
let socket = Socket.of_fd (Writer.fd writer) Socket.Type.tcp in
Socket.setopt socket Socket.Opt.nodelay true
let src =
Logs.Src.create "websocket.async.client" ~doc:"Websocket client for Async"
let client ?(name = "websocket.client") ?(extra_headers = Header.init ())
?(random_string = Rng.init ()) ?initialized ~app_to_ws ~ws_to_app ~net_to_ws
~ws_to_net uri =
let drain_handshake r w =
let nonce = Base64.encode_exn (random_string 16) in
let headers =
Header.add_list extra_headers
[
("Upgrade", "websocket");
("Connection", "Upgrade");
("Sec-WebSocket-Key", nonce);
("Sec-WebSocket-Version", "13");
]
in
let req = Cohttp.Request.make ~headers uri in
Logs_async.debug ~src (fun m ->
m "%a" Sexp.pp_hum Cohttp.Request.(sexp_of_t req))
>>= fun () ->
Request.write (fun _ -> Deferred.unit) req w >>= fun () ->
Response.read r >>= function
| `Eof -> raise End_of_file
| `Invalid s -> failwith s
| `Ok response ->
Logs_async.debug ~src (fun m ->
m "%a" Sexp.pp_hum Cohttp.Response.(sexp_of_t response))
>>= fun () ->
let status = Cohttp.Response.status response in
let headers = Cohttp.Response.headers response in
if Code.(is_error @@ code_of_status status) then
Reader.contents r >>= fun msg ->
Logs_async.err ~src (fun m -> m "%s" msg) >>= fun () ->
failwith @@ "HTTP Error " ^ Code.(string_of_status status)
else if Cohttp.Response.version response <> `HTTP_1_1 then
failwith "HTTP version error"
else if status <> `Switching_protocols then
failwith @@ "status error " ^ Code.(string_of_status status)
else if
Header.(get headers "upgrade")
|> Option.map ~f:String.lowercase
<> Some "websocket"
then failwith "upgrade error"
else if not @@ upgrade_present headers then
failwith "update not present"
else if
Header.get headers "sec-websocket-accept"
<> Some (nonce ^ websocket_uuid |> b64_encoded_sha1sum)
then failwith "accept error"
else Deferred.unit
in
let run () =
drain_handshake net_to_ws ws_to_net >>= fun () ->
Option.iter initialized ~f:(fun ivar -> Ivar.fill ivar ());
let read_frame =
make_read_frame ~mode:(Client random_string) net_to_ws ws_to_net
in
let buf = Buffer.create 128 in
let rec forward_frames_to_app ws_to_app =
read_frame () >>= fun fr ->
(if not @@ Pipe.is_closed ws_to_app then Pipe.write ws_to_app fr
else Deferred.unit)
>>= fun () -> forward_frames_to_app ws_to_app
in
let forward_frames_to_net ws_to_net app_to_ws =
Writer.transfer' ws_to_net app_to_ws (fun frs ->
Queue.iter frs ~f:(fun fr ->
Buffer.clear buf;
write_frame_to_buf ~mode:(Client random_string) buf fr;
let contents = Buffer.contents buf in
Writer.write ws_to_net contents);
Writer.flushed ws_to_net)
in
Deferred.any_unit
[
forward_frames_to_app ws_to_app;
forward_frames_to_net ws_to_net app_to_ws;
Deferred.all_unit Pipe.[ closed app_to_ws; closed ws_to_app ];
]
in
let finally_f =
lazy
(Pipe.close_read app_to_ws;
Pipe.close ws_to_app)
in
Monitor.try_with_or_error ~name run >>| fun res ->
Lazy.force finally_f;
res
let client_ez ?opcode ?(name = "websocket.client_ez") ?extra_headers ?heartbeat
?random_string uri net_to_ws ws_to_net =
let app_to_ws, reactor_write = Pipe.create () in
let to_reactor_write, client_write = Pipe.create () in
let client_read, ws_to_app = Pipe.create () in
let initialized = Ivar.create () in
let initialized_d = Ivar.read initialized in
let last_pong = ref @@ Time_ns.epoch in
let cleanup =
lazy
(Pipe.close ws_to_app;
Pipe.close_read app_to_ws;
Pipe.close_read to_reactor_write;
Pipe.close client_write)
in
let send_ping w span =
let now = Time_ns.now () in
Logs_async.debug ~src (fun m -> m "-> PING") >>= fun () ->
Pipe.write w
@@ Frame.create ~opcode:Frame.Opcode.Ping
~content:(Time_ns.to_string_fix_proto `Utc now)
()
>>| fun () ->
let time_since_last_pong = Time_ns.diff now !last_pong in
if
!last_pong > Time_ns.epoch
&& Time_ns.Span.(time_since_last_pong > span + span)
then Lazy.force cleanup
in
let react w fr =
let open Frame in
Logs_async.debug ~src (fun m -> m "<- %a" Frame.pp fr) >>= fun () ->
match fr.opcode with
| Opcode.Ping ->
Pipe.write w @@ Frame.create ~opcode:Opcode.Pong () >>| fun () -> None
| Opcode.Close ->
(if String.length fr.content >= 2 then
Pipe.write w
@@ Frame.create ~opcode:Opcode.Close
~content:(String.sub fr.content ~pos:0 ~len:2)
()
else Pipe.write w @@ Frame.close 1000)
>>| fun () ->
Pipe.close w;
None
| Opcode.Pong ->
last_pong := Time_ns.now ();
return None
| Opcode.Text | Opcode.Binary -> return @@ Some fr.content
| _ ->
Pipe.write w @@ Frame.close 1002 >>| fun () ->
Pipe.close w;
None
in
let client_read = Pipe.filter_map' client_read ~f:(react reactor_write) in
let react () =
initialized_d >>= fun () ->
Pipe.transfer to_reactor_write reactor_write ~f:(fun content ->
Frame.create ?opcode ~content ())
in
don't_wait_for
(match heartbeat with
| None -> Deferred.unit
| Some span ->
initialized_d >>| fun () ->
Clock_ns.run_at_intervals' ~continue_on_error:false
~stop:(Pipe.closed reactor_write) span (fun () ->
send_ping reactor_write span));
don't_wait_for
(Monitor.protect
~finally:(fun () ->
Lazy.force cleanup;
Deferred.unit)
(fun () ->
Deferred.any_unit
[
client ~name ?extra_headers ?random_string ~initialized ~app_to_ws
~ws_to_app ~net_to_ws ~ws_to_net uri
|> Deferred.ignore_m;
react ();
Deferred.all_unit Pipe.[ closed client_read; closed client_write ];
]));
(client_read, client_write)
let src =
Logs.Src.create "websocket.async.server" ~doc:"Websocket server for Async"
let server ?(name = "websocket.server")
?(check_request = fun _ -> Deferred.return true)
?(select_protocol = fun _ -> None) ~reader ~writer ~app_to_ws ~ws_to_app ()
=
let handshake r w =
(Request.read r >>= function
| `Ok r -> Deferred.return r
| `Eof ->
Logs_async.info ~src (fun m -> m "Remote endpoint closed connection")
>>= fun () -> raise End_of_file
| `Invalid reason ->
Logs_async.info ~src (fun m ->
m "Invalid input from remote endpoint: %s" reason)
>>= fun () -> failwith reason)
>>= fun request ->
(check_request request >>= function
| true -> Deferred.unit
| false ->
let body = "403 Forbidden" in
let response =
Cohttp.Response.make ~status:`Forbidden ()
~encoding:
(Cohttp.Transfer.Fixed (String.length body |> Int64.of_int))
in
let open Response in
write ~flush:true (fun w -> write_body w body) response w >>= fun () ->
raise Exit)
>>= fun () ->
let meth = Cohttp.Request.meth request in
let version = Cohttp.Request.version request in
let headers = Cohttp.Request.headers request in
if
not
(version = `HTTP_1_1 && meth = `GET
&& Option.map (Header.get headers "upgrade") ~f:String.lowercase
= Some "websocket"
&& upgrade_present headers)
then failwith "Protocol error";
let key =
Option.value_exn ~message:"missing sec-websocket-key"
(Header.get headers "sec-websocket-key")
in
let hash = key ^ websocket_uuid |> b64_encoded_sha1sum in
let subprotocol =
Option.value_map (Header.get headers "sec-websocket-protocol") ~default:[]
~f:(fun p ->
Option.value_map (select_protocol p) ~default:[] ~f:(fun selected ->
[ ("Sec-WebSocket-Protocol", selected) ]))
in
let response_headers =
("Upgrade", "websocket") :: ("Connection", "Upgrade")
:: ("Sec-WebSocket-Accept", hash)
:: subprotocol
in
let response =
Cohttp.Response.make ~status:`Switching_protocols
~encoding:Transfer.Unknown
~headers:(Header.of_list response_headers)
()
in
Response.write (fun _ -> Deferred.unit) response w
in
Monitor.try_with_or_error ~name ~extract_exn:true (fun () ->
handshake reader writer)
|> Deferred.Or_error.bind ~f:(fun () ->
set_tcp_nodelay writer;
let read_frame = make_read_frame ~mode:Server reader writer in
let rec loop () = read_frame () >>= Pipe.write ws_to_app >>= loop in
let transfer_end =
let buf = Buffer.create 128 in
Pipe.transfer app_to_ws
Writer.(pipe writer)
~f:(fun fr ->
Buffer.clear buf;
write_frame_to_buf ~mode:Server buf fr;
Buffer.contents buf)
in
Monitor.protect
~finally:(fun () ->
Pipe.close ws_to_app;
Pipe.close_read app_to_ws;
Deferred.unit)
(fun () ->
Deferred.any
[
transfer_end;
loop ();
Pipe.closed ws_to_app;
Pipe.closed app_to_ws;
])
>>= Deferred.Or_error.return)
let upgrade_connection ?(select_protocol = fun _ -> None)
?(ping_interval = Time_ns.Span.of_int_sec 50) ~app_to_ws ~ws_to_app ~f
request =
let headers = Cohttp.Request.headers request in
let key =
Option.value_exn ~message:"missing sec-websocket-key"
(Header.get headers "sec-websocket-key")
in
let hash = key ^ websocket_uuid |> b64_encoded_sha1sum in
let subprotocol =
Option.value_map (Header.get headers "sec-websocket-protocol") ~default:[]
~f:(fun p ->
Option.value_map (select_protocol p) ~default:[] ~f:(fun selected ->
[ ("Sec-WebSocket-Protocol", selected) ]))
in
let response_headers =
("Upgrade", "websocket") :: ("Connection", "Upgrade")
:: ("Sec-WebSocket-Accept", hash)
:: subprotocol
in
let response =
Cohttp.Response.make ~status:`Switching_protocols ~encoding:Transfer.Unknown
~headers:(Header.of_list response_headers)
()
in
let handler reader writer =
let read_frame = make_read_frame ~mode:Server reader writer in
let rec loop () =
try_with read_frame >>= function
| Error _ -> Deferred.unit
| Ok frame -> Pipe.write ws_to_app frame >>= loop
in
let buf = Buffer.create 128 in
let frame_to_string fr =
Buffer.clear buf;
write_frame_to_buf ~mode:Server buf fr;
Buffer.contents buf
in
let ping () =
if Time_ns.Span.(ping_interval = zero) then Deferred.never ()
else
let ping_frame_string =
frame_to_string Frame.(create ~opcode:Opcode.Ping ())
in
let rec ping_loop () =
Clock_ns.after ping_interval >>= fun () ->
match Writer.is_closed writer with
| true -> Deferred.unit
| false ->
Writer.write writer ping_frame_string;
Writer.flushed writer >>= fun () -> ping_loop ()
in
ping_loop ()
in
let transfer_end () =
Pipe.transfer app_to_ws Writer.(pipe writer) ~f:frame_to_string
in
let finally () =
Pipe.close ws_to_app;
Pipe.close_read app_to_ws;
Deferred.unit
in
Monitor.protect ~finally (fun () ->
set_tcp_nodelay writer;
Deferred.all_unit
[
Deferred.any
[
transfer_end ();
loop ();
ping ();
Pipe.closed ws_to_app;
Pipe.closed app_to_ws;
];
f ();
])
in
(response, handler)
|
bdef6bcb50277dd51ce4f7fe6068f95a2eeca56d212e2e9f1e75ad08ce493c69
|
Glue42/gateway-modules
|
measurements.cljc
|
(ns gateway.common.measurements)
(defprotocol Measurements
(get-all [this])
(record! [this type measurement-name measurement-value]))
| null |
https://raw.githubusercontent.com/Glue42/gateway-modules/be48a132134b5f9f41fd6a6067800da6be5e6eca/common/src/gateway/common/measurements.cljc
|
clojure
|
(ns gateway.common.measurements)
(defprotocol Measurements
(get-all [this])
(record! [this type measurement-name measurement-value]))
|
|
c6cfc4d7b1bc98d5f5a861e733a242120e64cef94e6317faf71ee3feb09f10fb
|
ocaml-multicore/multicoretests
|
stm_tests.ml
|
open QCheck
open STM
* parallel STM tests of Bytes
module ByConf =
struct
type cmd =
| Length
| Get of int
| Set of int * char
| Sub of int * int
| Copy
| Fill of int * int * char
| To_seq
[@@deriving show { with_path = false }]
type state = char list
type sut = Bytes.t
let arb_cmd s =
let int_gen = Gen.(oneof [small_nat; int_bound (List.length s - 1)]) in
let char_gen = Gen.printable in
~shrink : shrink_cmd
Gen.(oneof
[ return Length;
map (fun i -> Get i) int_gen;
map2 (fun i c -> Set (i,c)) int_gen char_gen;
map2 (fun i len -> Sub (i,len)) int_gen int_gen; (* hack: reusing int_gen for length *)
return Copy;
map3 (fun i len c -> Fill (i,len,c)) int_gen int_gen char_gen; (* hack: reusing int_gen for length*)
return To_seq;
])
let byte_size = 16
let init_state = List.init byte_size (fun _ -> 'a')
let next_state c s = match c with
| Length -> s
| Get _ -> s
| Set (i,c) -> List.mapi (fun j c' -> if i = j then c else c') s
| Sub (_,_) -> s
| Copy -> s
| Fill (i,l,c) ->
if i >= 0 && l >= 0 && i+l-1 < (List.length s)
then List.mapi (fun j c' -> if i <= j && j <= i+l-1 then c else c') s
else s
| To_seq -> s
let init_sut () = Bytes.make byte_size 'a'
let cleanup _ = ()
let precond c _s = match c with
| _ -> true
let run c b = match c with
| Length -> Res (int, Bytes.length b)
| Get i -> Res (result char exn, protect (Bytes.get b) i)
| Set (i,c) -> Res (result unit exn, protect (Bytes.set b i) c)
| Sub (i,l) -> Res (result (bytes) exn, protect (Bytes.sub b i) l)
| Copy -> Res (bytes, Bytes.copy b)
| Fill (i,l,c) -> Res (result unit exn, protect (Bytes.fill b i l) c)
| To_seq -> Res (seq char, List.to_seq (List.of_seq (Bytes.to_seq b)))
let postcond c (s: char list) res = match c, res with
| Length, Res ((Int,_),i) -> i = List.length s
| Get i, Res ((Result (Char,Exn),_), r) ->
if i < 0 || i >= List.length s
then r = Error (Invalid_argument "index out of bounds")
else r = Ok (List.nth s i)
| Set (i,_), Res ((Result (Unit,Exn),_), r) ->
if i < 0 || i >= List.length s
then r = Error (Invalid_argument "index out of bounds")
else r = Ok ()
| Sub (i,l), Res ((Result (Bytes,Exn),_), r) ->
if i < 0 || l < 0 || i+l > List.length s
then r = Error (Invalid_argument "String.sub / Bytes.sub")
else r = Ok (Bytes.of_seq (List.to_seq (List.filteri (fun j _ -> i <= j && j <= i+l-1) s)))
| Copy, Res ((Bytes,_),r) -> r = Bytes.of_seq (List.to_seq s)
| Fill (i,l,_), Res ((Result (Unit,Exn),_), r) ->
if i < 0 || l < 0 || i+l > List.length s
then r = Error (Invalid_argument "String.fill / Bytes.fill" )
else r = Ok ()
| To_seq, Res ((Seq Char,_),r) -> Seq.equal (=) r (List.to_seq s)
| _, _ -> false
end
module BytesSTM_seq = STM_sequential.Make(ByConf)
module BytesSTM_dom = STM_domain.Make(ByConf)
;;
QCheck_base_runner.run_tests_main
(let count = 1000 in
[BytesSTM_seq.agree_test ~count ~name:"STM Bytes test sequential";
BytesSTM_dom.neg_agree_test_par ~count ~name:"STM Bytes test parallel"
])
| null |
https://raw.githubusercontent.com/ocaml-multicore/multicoretests/3e0f2ceb72eaf334e97252140ae5d40bf6461b96/src/bytes/stm_tests.ml
|
ocaml
|
hack: reusing int_gen for length
hack: reusing int_gen for length
|
open QCheck
open STM
* parallel STM tests of Bytes
module ByConf =
struct
type cmd =
| Length
| Get of int
| Set of int * char
| Sub of int * int
| Copy
| Fill of int * int * char
| To_seq
[@@deriving show { with_path = false }]
type state = char list
type sut = Bytes.t
let arb_cmd s =
let int_gen = Gen.(oneof [small_nat; int_bound (List.length s - 1)]) in
let char_gen = Gen.printable in
~shrink : shrink_cmd
Gen.(oneof
[ return Length;
map (fun i -> Get i) int_gen;
map2 (fun i c -> Set (i,c)) int_gen char_gen;
return Copy;
return To_seq;
])
let byte_size = 16
let init_state = List.init byte_size (fun _ -> 'a')
let next_state c s = match c with
| Length -> s
| Get _ -> s
| Set (i,c) -> List.mapi (fun j c' -> if i = j then c else c') s
| Sub (_,_) -> s
| Copy -> s
| Fill (i,l,c) ->
if i >= 0 && l >= 0 && i+l-1 < (List.length s)
then List.mapi (fun j c' -> if i <= j && j <= i+l-1 then c else c') s
else s
| To_seq -> s
let init_sut () = Bytes.make byte_size 'a'
let cleanup _ = ()
let precond c _s = match c with
| _ -> true
let run c b = match c with
| Length -> Res (int, Bytes.length b)
| Get i -> Res (result char exn, protect (Bytes.get b) i)
| Set (i,c) -> Res (result unit exn, protect (Bytes.set b i) c)
| Sub (i,l) -> Res (result (bytes) exn, protect (Bytes.sub b i) l)
| Copy -> Res (bytes, Bytes.copy b)
| Fill (i,l,c) -> Res (result unit exn, protect (Bytes.fill b i l) c)
| To_seq -> Res (seq char, List.to_seq (List.of_seq (Bytes.to_seq b)))
let postcond c (s: char list) res = match c, res with
| Length, Res ((Int,_),i) -> i = List.length s
| Get i, Res ((Result (Char,Exn),_), r) ->
if i < 0 || i >= List.length s
then r = Error (Invalid_argument "index out of bounds")
else r = Ok (List.nth s i)
| Set (i,_), Res ((Result (Unit,Exn),_), r) ->
if i < 0 || i >= List.length s
then r = Error (Invalid_argument "index out of bounds")
else r = Ok ()
| Sub (i,l), Res ((Result (Bytes,Exn),_), r) ->
if i < 0 || l < 0 || i+l > List.length s
then r = Error (Invalid_argument "String.sub / Bytes.sub")
else r = Ok (Bytes.of_seq (List.to_seq (List.filteri (fun j _ -> i <= j && j <= i+l-1) s)))
| Copy, Res ((Bytes,_),r) -> r = Bytes.of_seq (List.to_seq s)
| Fill (i,l,_), Res ((Result (Unit,Exn),_), r) ->
if i < 0 || l < 0 || i+l > List.length s
then r = Error (Invalid_argument "String.fill / Bytes.fill" )
else r = Ok ()
| To_seq, Res ((Seq Char,_),r) -> Seq.equal (=) r (List.to_seq s)
| _, _ -> false
end
module BytesSTM_seq = STM_sequential.Make(ByConf)
module BytesSTM_dom = STM_domain.Make(ByConf)
;;
QCheck_base_runner.run_tests_main
(let count = 1000 in
[BytesSTM_seq.agree_test ~count ~name:"STM Bytes test sequential";
BytesSTM_dom.neg_agree_test_par ~count ~name:"STM Bytes test parallel"
])
|
b77b604c34322ec41eed01b33fadae1da455f10b5d235acc1fdd9d880c67822b
|
dym/movitz
|
complexes.lisp
|
;;;;------------------------------------------------------------------
;;;;
Copyright ( C ) 2008 ,
;;;;
;;;; Description: Complex numbers
Author :
;;;; Distribution: See the accompanying file COPYING.
;;;;
$ I d : complexes.lisp , v 1.4 2009 - 07 - 19 18:52:08 ffjeld Exp $
;;;;
;;;;------------------------------------------------------------------
(require :muerte/basic-macros)
(require :muerte/defstruct)
(in-package muerte)
(provide :muerte/complexes)
(defstruct (complex (:constructor make-complex (realpart imagpart))
(:predicate complexp))
realpart
imagpart)
(defun complex (realpart &optional (imagpart 0))
(check-type realpart real)
(check-type imagpart real)
(if (= 0 imagpart)
realpart
(make-complex realpart imagpart)))
(defmethod print-object ((x complex) stream)
(format stream "#c(~W ~W)"
(complex-realpart x)
(complex-imagpart x)))
(defun realpart (x)
(etypecase x
(complex
(complex-realpart x))
(real
x)))
(defun imagpart (x)
(etypecase x
(complex
(complex-imagpart x))
(real
0)))
| null |
https://raw.githubusercontent.com/dym/movitz/56176e1ebe3eabc15c768df92eca7df3c197cb3d/losp/muerte/complexes.lisp
|
lisp
|
------------------------------------------------------------------
Description: Complex numbers
Distribution: See the accompanying file COPYING.
------------------------------------------------------------------
|
Copyright ( C ) 2008 ,
Author :
$ I d : complexes.lisp , v 1.4 2009 - 07 - 19 18:52:08 ffjeld Exp $
(require :muerte/basic-macros)
(require :muerte/defstruct)
(in-package muerte)
(provide :muerte/complexes)
(defstruct (complex (:constructor make-complex (realpart imagpart))
(:predicate complexp))
realpart
imagpart)
(defun complex (realpart &optional (imagpart 0))
(check-type realpart real)
(check-type imagpart real)
(if (= 0 imagpart)
realpart
(make-complex realpart imagpart)))
(defmethod print-object ((x complex) stream)
(format stream "#c(~W ~W)"
(complex-realpart x)
(complex-imagpart x)))
(defun realpart (x)
(etypecase x
(complex
(complex-realpart x))
(real
x)))
(defun imagpart (x)
(etypecase x
(complex
(complex-imagpart x))
(real
0)))
|
583836a4bb0eeffcc3de7b0c0b8ac79234755f6fe5a21796c2d406cf2a483d9e
|
Frama-C/Frama-C-snapshot
|
pdg_state.mli
|
(**************************************************************************)
(* *)
This file is part of Frama - C.
(* *)
Copyright ( C ) 2007 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
(* alternatives) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
exception Cannot_fold
open PdgTypes
(** Types data_state and Node.t come froms this module *)
val make : PdgTypes.LocInfo.t -> Locations.Zone.t -> data_state
val empty : data_state
val bottom: data_state
val add_loc_node :
data_state -> exact:bool -> Locations.Zone.t -> Node.t -> data_state
val add_init_state_input :
data_state -> Locations.Zone.t -> Node.t -> data_state
* Kind of ' join ' of the two states
but test before if the new state is included in ~old .
@return ( true , old U new ) if the result is a new state ,
( false , old ) if new is included in old .
but test before if the new state is included in ~old.
@return (true, old U new) if the result is a new state,
(false, old) if new is included in old. *)
val test_and_merge :
old:data_state -> data_state -> bool * data_state
(** @raise Cannot_fold if the state is Top *)
val get_loc_nodes :
data_state -> Locations.Zone.t -> (Node.t * Locations.Zone.t option) list * Locations.Zone.t option
val pretty : Format.formatter -> data_state -> unit
(* ~~~~~~~~~~~~~~~~~~~ *)
type states = data_state Cil_datatype.Stmt.Hashtbl.t
val store_init_state : states -> data_state -> unit
val store_last_state : states -> data_state -> unit
val get_init_state : states -> data_state
val get_stmt_state : states -> Cil_types.stmt -> data_state
val get_last_state : states -> data_state
| null |
https://raw.githubusercontent.com/Frama-C/Frama-C-snapshot/639a3647736bf8ac127d00ebe4c4c259f75f9b87/src/plugins/pdg/pdg_state.mli
|
ocaml
|
************************************************************************
alternatives)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************
* Types data_state and Node.t come froms this module
* @raise Cannot_fold if the state is Top
~~~~~~~~~~~~~~~~~~~
|
This file is part of Frama - C.
Copyright ( C ) 2007 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
exception Cannot_fold
open PdgTypes
val make : PdgTypes.LocInfo.t -> Locations.Zone.t -> data_state
val empty : data_state
val bottom: data_state
val add_loc_node :
data_state -> exact:bool -> Locations.Zone.t -> Node.t -> data_state
val add_init_state_input :
data_state -> Locations.Zone.t -> Node.t -> data_state
* Kind of ' join ' of the two states
but test before if the new state is included in ~old .
@return ( true , old U new ) if the result is a new state ,
( false , old ) if new is included in old .
but test before if the new state is included in ~old.
@return (true, old U new) if the result is a new state,
(false, old) if new is included in old. *)
val test_and_merge :
old:data_state -> data_state -> bool * data_state
val get_loc_nodes :
data_state -> Locations.Zone.t -> (Node.t * Locations.Zone.t option) list * Locations.Zone.t option
val pretty : Format.formatter -> data_state -> unit
type states = data_state Cil_datatype.Stmt.Hashtbl.t
val store_init_state : states -> data_state -> unit
val store_last_state : states -> data_state -> unit
val get_init_state : states -> data_state
val get_stmt_state : states -> Cil_types.stmt -> data_state
val get_last_state : states -> data_state
|
77ad90460e263e6203332700e9397ac7fc3086c53b8a7644b88515cf1112d8dc
|
travisbrady/ocaml-vw
|
personalization_homework.ml
|
(*
* A quick test based on this homework assignment: /~jebara/6998/hw2.pdf
*)
open Core
let printf = Printf.printf
let sprintf = Printf.sprintf
let show_tables left right =
printf "\n";
let al = Float.Table.to_alist left |> List.sort ~compare:(fun a b -> Float.compare (fst a) (fst b)) in
List.iter al ~f:(fun (key, data) ->
match Float.Table.find right key with
| Some x -> printf "key:%.0f real=%d recd=%d\n" key data x
| None -> printf "key:%.0f real=%d recd=\n" key data
)
type example_components = {
chunks : string list;
features : string;
lab : string;
labf : float;
reward : float;
train_string : string;
test_string : string
}
let parse_line line =
let chunks = String.split line ~on:' ' in
let features = List.mapi (List.drop chunks 2) ~f:(fun i x -> sprintf "f%d:%s.0" (i+1) x) |> String.concat ~sep:" " in
let lab = List.hd_exn chunks in
let labf = Float.of_string lab in
let reward = Float.of_string (List.nth_exn chunks 1) in
let cost = if Float.(reward = 1.0) then -1.0 else 1.0 in
let vw_string = sprintf "%s:%.4f:0.1 | %s" lab cost features in
let test_string = sprintf "| %s" features in
{chunks=chunks; features=features; lab=lab; labf=labf;
reward=reward; test_string=test_string;
train_string=vw_string}
let () =
let vw = Vw.initialize "--cb 10 --cover 3 --cb_type ips -l 0.001 --quiet" in
let ic = In_channel.create "data/dataset.txt" in
let ctr = ref 0 in
let numerator_sum = ref 0.0 in
let denom_sum = ref 0.0 in
let real_actions : int Float.Table.t = Float.Table.create () in
let reco_arms : int Float.Table.t = Float.Table.create () in
let correct_pred = Float.Table.create () in
In_channel.iter_lines ic ~f:(fun line ->
let ec = parse_line line in
Float.Table.incr real_actions ec.labf;
let p = Vw.cb_predict_string vw ec.test_string in
Float.Table.incr reco_arms p;
let are_same = if (Float.equal ec.labf p) then 1.0 else 0.0 in
if (Float.equal ec.labf p) then begin
denom_sum := !denom_sum +. are_same;
numerator_sum := !numerator_sum +. ec.reward;
Float.Table.incr correct_pred ~by:(Int.of_float are_same) p;
let _ = Vw.learn_string vw ec.train_string in
()
end;
if (!ctr mod 1000) = 0 then (
printf "%.2f %.2f Take Rate: %f\n" !numerator_sum !denom_sum (!numerator_sum /. !denom_sum));
incr ctr;
);
Vw.finish vw;
show_tables real_actions reco_arms;
printf "Total Agreements: %.0f\n" !denom_sum;
let al = Float.Table.to_alist correct_pred |> List.sort ~compare:(fun a b -> Float.compare (fst a) (fst b)) in
printf "\nCorrect Preds\n";
List.iter al ~f:(fun (key, data) ->
printf "key:%.0f count:%d\n" key data
);
printf "%.2f %.2f Take Rate: %f\n" !numerator_sum !denom_sum (!numerator_sum /. !denom_sum)
| null |
https://raw.githubusercontent.com/travisbrady/ocaml-vw/2c5ac40872a7bd4b921a8554f82f8783f5e2f88d/examples/personalization_homework.ml
|
ocaml
|
* A quick test based on this homework assignment: /~jebara/6998/hw2.pdf
|
open Core
let printf = Printf.printf
let sprintf = Printf.sprintf
let show_tables left right =
printf "\n";
let al = Float.Table.to_alist left |> List.sort ~compare:(fun a b -> Float.compare (fst a) (fst b)) in
List.iter al ~f:(fun (key, data) ->
match Float.Table.find right key with
| Some x -> printf "key:%.0f real=%d recd=%d\n" key data x
| None -> printf "key:%.0f real=%d recd=\n" key data
)
type example_components = {
chunks : string list;
features : string;
lab : string;
labf : float;
reward : float;
train_string : string;
test_string : string
}
let parse_line line =
let chunks = String.split line ~on:' ' in
let features = List.mapi (List.drop chunks 2) ~f:(fun i x -> sprintf "f%d:%s.0" (i+1) x) |> String.concat ~sep:" " in
let lab = List.hd_exn chunks in
let labf = Float.of_string lab in
let reward = Float.of_string (List.nth_exn chunks 1) in
let cost = if Float.(reward = 1.0) then -1.0 else 1.0 in
let vw_string = sprintf "%s:%.4f:0.1 | %s" lab cost features in
let test_string = sprintf "| %s" features in
{chunks=chunks; features=features; lab=lab; labf=labf;
reward=reward; test_string=test_string;
train_string=vw_string}
let () =
let vw = Vw.initialize "--cb 10 --cover 3 --cb_type ips -l 0.001 --quiet" in
let ic = In_channel.create "data/dataset.txt" in
let ctr = ref 0 in
let numerator_sum = ref 0.0 in
let denom_sum = ref 0.0 in
let real_actions : int Float.Table.t = Float.Table.create () in
let reco_arms : int Float.Table.t = Float.Table.create () in
let correct_pred = Float.Table.create () in
In_channel.iter_lines ic ~f:(fun line ->
let ec = parse_line line in
Float.Table.incr real_actions ec.labf;
let p = Vw.cb_predict_string vw ec.test_string in
Float.Table.incr reco_arms p;
let are_same = if (Float.equal ec.labf p) then 1.0 else 0.0 in
if (Float.equal ec.labf p) then begin
denom_sum := !denom_sum +. are_same;
numerator_sum := !numerator_sum +. ec.reward;
Float.Table.incr correct_pred ~by:(Int.of_float are_same) p;
let _ = Vw.learn_string vw ec.train_string in
()
end;
if (!ctr mod 1000) = 0 then (
printf "%.2f %.2f Take Rate: %f\n" !numerator_sum !denom_sum (!numerator_sum /. !denom_sum));
incr ctr;
);
Vw.finish vw;
show_tables real_actions reco_arms;
printf "Total Agreements: %.0f\n" !denom_sum;
let al = Float.Table.to_alist correct_pred |> List.sort ~compare:(fun a b -> Float.compare (fst a) (fst b)) in
printf "\nCorrect Preds\n";
List.iter al ~f:(fun (key, data) ->
printf "key:%.0f count:%d\n" key data
);
printf "%.2f %.2f Take Rate: %f\n" !numerator_sum !denom_sum (!numerator_sum /. !denom_sum)
|
247c37d18c4afcee9836a34e71bbeb9adb5f65f9859f54db6fdb9a5191b4163c
|
fission-codes/fission
|
WebSocket.hs
|
# OPTIONS_GHC -fno - warn - orphans #
module Fission.Web.Client.Internal.Orphanage.WebSocket () where
import RIO
import qualified RIO.ByteString.Lazy as Lazy
import qualified RIO.Partial as Partial
import qualified RIO.Text as Text
import qualified Data.Binary.Builder as Builder
import Network.WebSockets.Client
import Servant.API.WebSocket
import Servant.Client.Core
import Wuss
import Fission.Web.API.Host.Types
instance (RunClient m, MonadIO m) => HasClient m WebSocket where
type Client m WebSocket = Host -> Port -> ClientApp () -> m ()
hoistClientMonad _pxyM _pxyWS nt client' =
\host port handler -> nt (client' host port handler)
clientWithRoute _pxyM _pxyWS Request {..} host (Port port) handler =
liftIO $ runSecureClient (getRawHost host) (Partial.toEnum port) path handler
where
path = Text.unpack . decodeUtf8Lenient . Lazy.toStrict $ Builder.toLazyByteString requestPath
| null |
https://raw.githubusercontent.com/fission-codes/fission/7e69c0da210a77412c96631f5ff7ef1b38240d37/fission-web-client/library/Fission/Web/Client/Internal/Orphanage/WebSocket.hs
|
haskell
|
# OPTIONS_GHC -fno - warn - orphans #
module Fission.Web.Client.Internal.Orphanage.WebSocket () where
import RIO
import qualified RIO.ByteString.Lazy as Lazy
import qualified RIO.Partial as Partial
import qualified RIO.Text as Text
import qualified Data.Binary.Builder as Builder
import Network.WebSockets.Client
import Servant.API.WebSocket
import Servant.Client.Core
import Wuss
import Fission.Web.API.Host.Types
instance (RunClient m, MonadIO m) => HasClient m WebSocket where
type Client m WebSocket = Host -> Port -> ClientApp () -> m ()
hoistClientMonad _pxyM _pxyWS nt client' =
\host port handler -> nt (client' host port handler)
clientWithRoute _pxyM _pxyWS Request {..} host (Port port) handler =
liftIO $ runSecureClient (getRawHost host) (Partial.toEnum port) path handler
where
path = Text.unpack . decodeUtf8Lenient . Lazy.toStrict $ Builder.toLazyByteString requestPath
|
|
ab023ede0358c5b0416153a2696d1ac3fc6af87dc063ac3fbf4781e86e0e61a9
|
1Jajen1/Brokkr
|
PackedSpec.hs
|
# LANGUAGE DataKinds #
# LANGUAGE TypeFamilies #
module Vector.PackedSpec where
import Test.Syd
import Test.Syd.Validity
import Control.Monad.ST
import qualified Foreign as FP
import Data.Word
import Control.Monad
import Data.Bits
import qualified Util.Vector.Packed as P
import qualified Test.QuickCheck as Gen
import Control.Monad.Primitive (unsafePrimToPrim)
import qualified Test.QuickCheck as Q
import qualified Foreign.ForeignPtr.Unsafe as FP
import qualified Data.Primitive.Ptr as Prim
import GHC.TypeLits
import qualified Data.Vector.Storable as S
import Data.Semigroup
import Data.Coerce
import Data.List (nub)
spec :: Spec
spec = describe "PackedVector" $ do
TODO Read and write from / to arbitrary sizes
describe "basicRead" $ do
specify "64: basicWrite v i a >> basicRead v i == pure a" $ forAllValid $ \(VectorWrite @64 i el (SomePackedVector v)) -> runST $ do
mv <- P.unsafeThaw v
P.unsafeWrite mv i el
nEl <- P.unsafeRead mv i
pure $ nEl Q.=== el
specify "49: basicWrite v i a >> basicRead v i == pure a" $ forAllValid $ \(VectorWrite @49 i el (SomePackedVector v)) -> runST $ do
mv <- P.unsafeThaw v
P.unsafeWrite mv i el
nEl <- P.unsafeRead mv i
pure $ nEl Q.=== el
describe "unsafeCopy" $ do
specify "unsafeCopy v1 v2 => v1 == v2" $ forAllValid $ \(SomePackedVector @64 v1, SomePackedVector @64 v2) -> runST $ do
mv1 <- P.unsafeThaw v1
let destBSz = P.bitSz mv1
mv2 <- P.unsafeThaw v2
P.unsafeCopy mv1 mv2
Collect both vectors and do n't forget that the first one has the values from the second but truncated !
pure $ P.foldMap (\w -> [w]) v1 Q.=== P.foldMap (\w -> [((unsafeShiftL 1 destBSz) - 1) .&. w]) v2
specify "countElems" $ forAllValid $ \(SomePackedVector @128 v, (ints :: [Word8])) ->
let act = P.countElems els v
-- TODO Mask them into whatever bitsize we use
els = S.fromList $ take 3 $ nub $ (fromIntegral . (.&. 15)) <$> ints
exp = coerce $ P.foldMap (\x -> if S.elem x els then Sum (1 :: Int) else Sum 0) v
in (not $ null ints) Q.==> act `shouldBe` exp
-- Generate random valid writes
data VectorWrite (sz :: Nat) = VectorWrite Int Word (SomePackedVector sz)
instance Show (VectorWrite sz) where
show (VectorWrite i el v) = "Write " <> show i <> " " <> show el <> " with Vector " <> show v
instance KnownNat sz => GenValid (VectorWrite sz) where
genValid = do
v@(SomePackedVector vec) <- genValid @(SomePackedVector sz)
let (len, bSz) =
runST $ do
mv <- P.unsafeThaw vec
pure (P.length mv, P.bitSz mv)
i <- Gen.choose (0, len)
el <- Gen.choose (0, (unsafeShiftL 1 bSz) - 1)
pure $ VectorWrite i el v
shrinkValid _ = []
instance Validity (VectorWrite sz) where
validate _ = valid
-- Generate vectors
data SomePackedVector (sz :: Nat) where
SomePackedVector :: forall sz v . (P.PVector v, Show v) => v -> SomePackedVector sz
instance Show (SomePackedVector sz) where
show (SomePackedVector v) = show v
instance KnownNat sz => GenValid (SomePackedVector sz) where
genValid = Gen.oneof [
First a few different statically sized vectors
do
xs <- Gen.vector @Word8 sz
let wordSz = nrWords 5 sz
wordMask = (unsafeShiftL 1 5) - 1
pure $ runST $ unsafePrimToPrim $ do
fptr <- FP.mallocForeignPtrArray wordSz
let ptr = FP.unsafeForeignPtrToPtr fptr
Prim.setPtr ptr wordSz 0
mpv <- P.unsafeThaw $ P.unsafeStaticFromForeignPtr @sz @5 fptr
forM_ (zip xs [0..]) $ \(el, i) -> P.unsafeWrite mpv i $ wordMask .&. fromIntegral el
pv <- P.unsafeFreeze mpv
pure $ SomePackedVector pv
, do
xs <- Gen.vector @Word8 sz
let wordSz = nrWords 23 sz
wordMask = (unsafeShiftL 1 23) - 1
pure $ runST $ unsafePrimToPrim $ do
fptr <- FP.mallocForeignPtrArray wordSz
let ptr = FP.unsafeForeignPtrToPtr fptr
Prim.setPtr ptr wordSz 0
mpv <- P.unsafeThaw $ P.unsafeStaticFromForeignPtr @sz @23 fptr
forM_ (zip xs [0..]) $ \(el, i) -> P.unsafeWrite mpv i $ wordMask .&. fromIntegral el
pv <- P.unsafeFreeze mpv
pure $ SomePackedVector pv
, do
xs <- Gen.vector @Word8 sz
let wordSz = nrWords 4 sz
wordMask = (unsafeShiftL 1 4) - 1
pure $ runST $ unsafePrimToPrim $ do
fptr <- FP.mallocForeignPtrArray wordSz
let ptr = FP.unsafeForeignPtrToPtr fptr
Prim.setPtr ptr wordSz 0
mpv <- P.unsafeThaw $ P.unsafeStaticFromForeignPtr @sz @4 fptr
forM_ (zip xs [0..]) $ \(el, i) -> P.unsafeWrite mpv i $ wordMask .&. fromIntegral el
pv <- P.unsafeFreeze mpv
pure $ SomePackedVector pv
-- Next some dynamic sized vectors
, do
bitSz <- Gen.elements [1..63]
xs <- Gen.vector @Word sz
let wordSz = nrWords bitSz sz
wordMask = (unsafeShiftL 1 bitSz) - 1
pure $ runST $ unsafePrimToPrim $ do
fptr <- FP.mallocForeignPtrArray wordSz
let ptr = FP.unsafeForeignPtrToPtr fptr
Prim.setPtr ptr wordSz 0
mpv <- P.unsafeThaw $ P.unsafeDynamicFromForeignPtr @sz bitSz fptr
forM_ (zip xs [0..]) $ \(el, i) -> P.unsafeWrite mpv i $ wordMask .&. el
pv <- P.unsafeFreeze mpv
pure $ SomePackedVector pv
]
where sz = fromIntegral $ natVal @sz undefined
shrinkValid _ = []
instance Validity (SomePackedVector sz) where
validate _ = valid
nrWords :: Int -> Int -> Int
nrWords bSz i = (i + perWord - 1) `div` perWord
where perWord = 64 `div` bSz
# INLINE nrWords #
| null |
https://raw.githubusercontent.com/1Jajen1/Brokkr/fe56efaf450f29a5571cc34fa01f7301678f3eaf/test/Vector/PackedSpec.hs
|
haskell
|
TODO Mask them into whatever bitsize we use
Generate random valid writes
Generate vectors
Next some dynamic sized vectors
|
# LANGUAGE DataKinds #
# LANGUAGE TypeFamilies #
module Vector.PackedSpec where
import Test.Syd
import Test.Syd.Validity
import Control.Monad.ST
import qualified Foreign as FP
import Data.Word
import Control.Monad
import Data.Bits
import qualified Util.Vector.Packed as P
import qualified Test.QuickCheck as Gen
import Control.Monad.Primitive (unsafePrimToPrim)
import qualified Test.QuickCheck as Q
import qualified Foreign.ForeignPtr.Unsafe as FP
import qualified Data.Primitive.Ptr as Prim
import GHC.TypeLits
import qualified Data.Vector.Storable as S
import Data.Semigroup
import Data.Coerce
import Data.List (nub)
spec :: Spec
spec = describe "PackedVector" $ do
TODO Read and write from / to arbitrary sizes
describe "basicRead" $ do
specify "64: basicWrite v i a >> basicRead v i == pure a" $ forAllValid $ \(VectorWrite @64 i el (SomePackedVector v)) -> runST $ do
mv <- P.unsafeThaw v
P.unsafeWrite mv i el
nEl <- P.unsafeRead mv i
pure $ nEl Q.=== el
specify "49: basicWrite v i a >> basicRead v i == pure a" $ forAllValid $ \(VectorWrite @49 i el (SomePackedVector v)) -> runST $ do
mv <- P.unsafeThaw v
P.unsafeWrite mv i el
nEl <- P.unsafeRead mv i
pure $ nEl Q.=== el
describe "unsafeCopy" $ do
specify "unsafeCopy v1 v2 => v1 == v2" $ forAllValid $ \(SomePackedVector @64 v1, SomePackedVector @64 v2) -> runST $ do
mv1 <- P.unsafeThaw v1
let destBSz = P.bitSz mv1
mv2 <- P.unsafeThaw v2
P.unsafeCopy mv1 mv2
Collect both vectors and do n't forget that the first one has the values from the second but truncated !
pure $ P.foldMap (\w -> [w]) v1 Q.=== P.foldMap (\w -> [((unsafeShiftL 1 destBSz) - 1) .&. w]) v2
specify "countElems" $ forAllValid $ \(SomePackedVector @128 v, (ints :: [Word8])) ->
let act = P.countElems els v
els = S.fromList $ take 3 $ nub $ (fromIntegral . (.&. 15)) <$> ints
exp = coerce $ P.foldMap (\x -> if S.elem x els then Sum (1 :: Int) else Sum 0) v
in (not $ null ints) Q.==> act `shouldBe` exp
data VectorWrite (sz :: Nat) = VectorWrite Int Word (SomePackedVector sz)
instance Show (VectorWrite sz) where
show (VectorWrite i el v) = "Write " <> show i <> " " <> show el <> " with Vector " <> show v
instance KnownNat sz => GenValid (VectorWrite sz) where
genValid = do
v@(SomePackedVector vec) <- genValid @(SomePackedVector sz)
let (len, bSz) =
runST $ do
mv <- P.unsafeThaw vec
pure (P.length mv, P.bitSz mv)
i <- Gen.choose (0, len)
el <- Gen.choose (0, (unsafeShiftL 1 bSz) - 1)
pure $ VectorWrite i el v
shrinkValid _ = []
instance Validity (VectorWrite sz) where
validate _ = valid
data SomePackedVector (sz :: Nat) where
SomePackedVector :: forall sz v . (P.PVector v, Show v) => v -> SomePackedVector sz
instance Show (SomePackedVector sz) where
show (SomePackedVector v) = show v
instance KnownNat sz => GenValid (SomePackedVector sz) where
genValid = Gen.oneof [
First a few different statically sized vectors
do
xs <- Gen.vector @Word8 sz
let wordSz = nrWords 5 sz
wordMask = (unsafeShiftL 1 5) - 1
pure $ runST $ unsafePrimToPrim $ do
fptr <- FP.mallocForeignPtrArray wordSz
let ptr = FP.unsafeForeignPtrToPtr fptr
Prim.setPtr ptr wordSz 0
mpv <- P.unsafeThaw $ P.unsafeStaticFromForeignPtr @sz @5 fptr
forM_ (zip xs [0..]) $ \(el, i) -> P.unsafeWrite mpv i $ wordMask .&. fromIntegral el
pv <- P.unsafeFreeze mpv
pure $ SomePackedVector pv
, do
xs <- Gen.vector @Word8 sz
let wordSz = nrWords 23 sz
wordMask = (unsafeShiftL 1 23) - 1
pure $ runST $ unsafePrimToPrim $ do
fptr <- FP.mallocForeignPtrArray wordSz
let ptr = FP.unsafeForeignPtrToPtr fptr
Prim.setPtr ptr wordSz 0
mpv <- P.unsafeThaw $ P.unsafeStaticFromForeignPtr @sz @23 fptr
forM_ (zip xs [0..]) $ \(el, i) -> P.unsafeWrite mpv i $ wordMask .&. fromIntegral el
pv <- P.unsafeFreeze mpv
pure $ SomePackedVector pv
, do
xs <- Gen.vector @Word8 sz
let wordSz = nrWords 4 sz
wordMask = (unsafeShiftL 1 4) - 1
pure $ runST $ unsafePrimToPrim $ do
fptr <- FP.mallocForeignPtrArray wordSz
let ptr = FP.unsafeForeignPtrToPtr fptr
Prim.setPtr ptr wordSz 0
mpv <- P.unsafeThaw $ P.unsafeStaticFromForeignPtr @sz @4 fptr
forM_ (zip xs [0..]) $ \(el, i) -> P.unsafeWrite mpv i $ wordMask .&. fromIntegral el
pv <- P.unsafeFreeze mpv
pure $ SomePackedVector pv
, do
bitSz <- Gen.elements [1..63]
xs <- Gen.vector @Word sz
let wordSz = nrWords bitSz sz
wordMask = (unsafeShiftL 1 bitSz) - 1
pure $ runST $ unsafePrimToPrim $ do
fptr <- FP.mallocForeignPtrArray wordSz
let ptr = FP.unsafeForeignPtrToPtr fptr
Prim.setPtr ptr wordSz 0
mpv <- P.unsafeThaw $ P.unsafeDynamicFromForeignPtr @sz bitSz fptr
forM_ (zip xs [0..]) $ \(el, i) -> P.unsafeWrite mpv i $ wordMask .&. el
pv <- P.unsafeFreeze mpv
pure $ SomePackedVector pv
]
where sz = fromIntegral $ natVal @sz undefined
shrinkValid _ = []
instance Validity (SomePackedVector sz) where
validate _ = valid
nrWords :: Int -> Int -> Int
nrWords bSz i = (i + perWord - 1) `div` perWord
where perWord = 64 `div` bSz
# INLINE nrWords #
|
0b6f717aa5b235da706a28b1b8eb847fa47b70dbd7542705c8f64086390de434
|
mzp/coq-for-ipad
|
condition.ml
|
(***********************************************************************)
(* *)
(* Objective Caml *)
(* *)
and , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the GNU Library General Public License , with
(* the special exception on linking described in file ../../LICENSE. *)
(* *)
(***********************************************************************)
$ I d : condition.ml 9547 2010 - 01 - 22 12:48:24Z doligez $
type t = { mutable waiting: Thread.t list }
let create () = { waiting = [] }
let wait cond mut =
Thread.critical_section := true;
Mutex.unlock mut;
cond.waiting <- Thread.self() :: cond.waiting;
Thread.sleep();
Mutex.lock mut
let signal cond =
match cond.waiting with (* atomic *)
[] -> ()
| th :: rem -> cond.waiting <- rem (* atomic *); Thread.wakeup th
let broadcast cond =
let w = cond.waiting in (* atomic *)
cond.waiting <- []; (* atomic *)
List.iter Thread.wakeup w
| null |
https://raw.githubusercontent.com/mzp/coq-for-ipad/4fb3711723e2581a170ffd734e936f210086396e/src/ocaml-3.12.0/otherlibs/threads/condition.ml
|
ocaml
|
*********************************************************************
Objective Caml
the special exception on linking described in file ../../LICENSE.
*********************************************************************
atomic
atomic
atomic
atomic
|
and , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the GNU Library General Public License , with
$ I d : condition.ml 9547 2010 - 01 - 22 12:48:24Z doligez $
type t = { mutable waiting: Thread.t list }
let create () = { waiting = [] }
let wait cond mut =
Thread.critical_section := true;
Mutex.unlock mut;
cond.waiting <- Thread.self() :: cond.waiting;
Thread.sleep();
Mutex.lock mut
let signal cond =
[] -> ()
let broadcast cond =
List.iter Thread.wakeup w
|
170fa635eef7737e721d2bb70d1d99a188490a83bf500e6b581b5c4cce3d464b
|
uw-unsat/serval
|
common.rkt
|
#lang rosette
(require
"define.rkt"
"../base.rkt"
"../../lib/bvarith.rkt"
(prefix-in core: "../../lib/core.rkt"))
(provide
(all-defined-out)
(all-from-out
"define.rkt"
"../base.rkt"
"../../lib/bvarith.rkt"
"../../lib/core.rkt"))
(struct ptr (addr off size) #:transparent)
(define (notimplemented cpu insn . args)
(error (format "instruction ~v not implemented" insn)))
(define (cpu-next! cpu insn)
(set-cpu-pc! cpu (bvadd (bv (instruction-size insn) (cpu-xlen cpu))
(cpu-pc cpu))))
(define (skip cpu insn . args)
(cpu-next! cpu insn))
(define (skip/debug cpu insn . args)
(displayln insn)
(cpu-next! cpu insn))
Make a shift op from SMT shift operation by masking out upper bits .
(define ((make-shift-op op) v1 v2)
(op v1 (bvand (bv (sub1 (core:bv-size v1)) (core:bv-size v1)) v2)))
Make a comparison op from SMT comparison and lifting to bv .
(define ((make-cmp-op op) v1 v2)
(if (op v1 v2) (bv 1 (core:bv-size v1)) (bv 0 (core:bv-size v1))))
Register - register operation
(define (reg-reg-op op cpu insn rs2 rs1 rd)
(define a (gpr-ref cpu rs1))
(define b (gpr-ref cpu rs2))
(gpr-set! cpu rd (op a b))
(cpu-next! cpu insn))
(define (reg-imm-op op cpu insn imm rs1 rd)
(define xlen (cpu-xlen cpu))
(define a (gpr-ref cpu rs1))
(define b (sign-extend imm (bitvector xlen)))
(gpr-set! cpu rd (op a b))
(cpu-next! cpu insn))
32 - bit ops on rv64
(define (reg-reg-opw op cpu insn rs2 rs1 rd)
(core:bug-on (! (= (cpu-xlen cpu) 64)) #:msg (format "~v: (cpu-xlen cpu) != 64" insn))
(define a (trunc 32 (gpr-ref cpu rs1)))
(define b (trunc 32 (gpr-ref cpu rs2)))
(gpr-set! cpu rd (sign-extend (op a b) (bitvector 64)))
(cpu-next! cpu insn))
(define (reg-imm-opw op cpu insn imm rs1 rd)
(core:bug-on (! (= (cpu-xlen cpu) 64)) #:msg (format "~v: (cpu-xlen cpu) != 64" insn))
(define a (trunc 32 (gpr-ref cpu rs1)))
(define b (sign-extend imm (bitvector 32)))
(gpr-set! cpu rd (sign-extend (op a b) (bitvector 64)))
(cpu-next! cpu insn))
| null |
https://raw.githubusercontent.com/uw-unsat/serval/be11ecccf03f81b8bd0557acf8385a6a5d4f51ed/serval/riscv/interp/common.rkt
|
racket
|
#lang rosette
(require
"define.rkt"
"../base.rkt"
"../../lib/bvarith.rkt"
(prefix-in core: "../../lib/core.rkt"))
(provide
(all-defined-out)
(all-from-out
"define.rkt"
"../base.rkt"
"../../lib/bvarith.rkt"
"../../lib/core.rkt"))
(struct ptr (addr off size) #:transparent)
(define (notimplemented cpu insn . args)
(error (format "instruction ~v not implemented" insn)))
(define (cpu-next! cpu insn)
(set-cpu-pc! cpu (bvadd (bv (instruction-size insn) (cpu-xlen cpu))
(cpu-pc cpu))))
(define (skip cpu insn . args)
(cpu-next! cpu insn))
(define (skip/debug cpu insn . args)
(displayln insn)
(cpu-next! cpu insn))
Make a shift op from SMT shift operation by masking out upper bits .
(define ((make-shift-op op) v1 v2)
(op v1 (bvand (bv (sub1 (core:bv-size v1)) (core:bv-size v1)) v2)))
Make a comparison op from SMT comparison and lifting to bv .
(define ((make-cmp-op op) v1 v2)
(if (op v1 v2) (bv 1 (core:bv-size v1)) (bv 0 (core:bv-size v1))))
Register - register operation
(define (reg-reg-op op cpu insn rs2 rs1 rd)
(define a (gpr-ref cpu rs1))
(define b (gpr-ref cpu rs2))
(gpr-set! cpu rd (op a b))
(cpu-next! cpu insn))
(define (reg-imm-op op cpu insn imm rs1 rd)
(define xlen (cpu-xlen cpu))
(define a (gpr-ref cpu rs1))
(define b (sign-extend imm (bitvector xlen)))
(gpr-set! cpu rd (op a b))
(cpu-next! cpu insn))
32 - bit ops on rv64
(define (reg-reg-opw op cpu insn rs2 rs1 rd)
(core:bug-on (! (= (cpu-xlen cpu) 64)) #:msg (format "~v: (cpu-xlen cpu) != 64" insn))
(define a (trunc 32 (gpr-ref cpu rs1)))
(define b (trunc 32 (gpr-ref cpu rs2)))
(gpr-set! cpu rd (sign-extend (op a b) (bitvector 64)))
(cpu-next! cpu insn))
(define (reg-imm-opw op cpu insn imm rs1 rd)
(core:bug-on (! (= (cpu-xlen cpu) 64)) #:msg (format "~v: (cpu-xlen cpu) != 64" insn))
(define a (trunc 32 (gpr-ref cpu rs1)))
(define b (sign-extend imm (bitvector 32)))
(gpr-set! cpu rd (sign-extend (op a b) (bitvector 64)))
(cpu-next! cpu insn))
|
|
2cdd45ae00b453326f8204ed9a19d3a3d41991ed4b26c15a31d95eecf055a2c5
|
plum-umd/adapton.racket
|
adapton.rkt
|
#lang racket
;; This file contains the core of adapton, consisting of:
;; - definition of functions memoized for adapton
;; - creation of nodes when those functions are called
;; - force for nodes and cells
(require (prefix-in r: (only-in racket delay force equal-hash-code))
rackunit
"memo-table-modification-tools.rkt"
"graphing.rkt"
"data-structures.rkt")
;; This file contains the core of adapton, consisting of:
;; - definition of functions memoized for adapton
;; - creation of nodes when those functions are called
;; - force for nodes and cells
(require (prefix-in r: (only-in racket delay force equal-hash-code))
rackunit
"memo-table-modification-tools.rkt"
"graphing.rkt"
"data-structures.rkt")
(provide (all-defined-out))
;; ========================================
;; define/memo creates a memoized version of a function
;; any function that you want to be tracked by adapton should use define/memo.
;; tests for define/memo
(module+ test
return a node that adds 1 to n
(+ 1 n))
(define/memo (fib n) ;; return a node that computes nth number of fib
(cond
[(= n 0) 0]
[(= n 1) 1]
[else (+ (force (fib (- n 1))) (force (fib (- n 2))))]))
(check-equal? (node? (add1 4)) #t)
(check-equal? (force (add1 4)) 5)
(check-equal? (node? (fib 4)) #t)
(check-equal? (force (fib 4)) 3))
;; definition
(define-syntax-rule
(define/memo (f x ...) e ...)
(define f
(matt (λ (x ...) e ...))))
;; ========================================
;; a matt structure is created when a memoized function is called,
;; and invokes memo to create a thunk delaying the evaluation of
;; that function with the given arguments.
;; tests for matt structure and memo function
(module+ test
(define m1 (matt (λ (a) a)))
(define m2 (matt (λ (a b) (+ a b))))
;;ensure that the nodes are well-formed
(check-equal? (node? (m1 21)) #t)
(check-equal? (force (m1 21)) 21)
(check-equal? (node? (m2 21 21)) #t)
(check-equal? (force (m2 21 21)) 42)
;;ensure they are added to the memo-table properly
(check-equal? (node? (hash-ref *memo-table* (node-id (m1 21)))) #t)
(check-equal? (node? (hash-ref *memo-table* (node-id (m2 21 21)))) #t))
(struct matt (f)
#:property
prop:procedure
(λ (m . xs) (apply memo m xs)))
;; calling a memoized function with args creates a node for that function
;; and those arguments, and adds that node to the memo table
(define (memo m . xs)
(match m
[(matt f)
(let* ([id (equal-hash-code (cons f xs))]
[n (node id #f '() '() (car (unbox create-stack)) xs (λ () (apply f xs)) '())]
[t (hash-ref! *memo-table* id n)])
(write-to-graph (format "[change]add node~n[node ~a red]~n" id))
( write - to - graph ( format " [ change]add create edge ~ n[edge ~a ~a blue]~n "
; (car (unbox create-stack)) id))
(if (equal? (node-args t) xs)
t
(hash-ref! *memo-table*
(+ id 1)
(node (+ id 1) #f '() '()
(car (unbox create-stack)) xs (λ () (apply f xs)) '()))))]))
| null |
https://raw.githubusercontent.com/plum-umd/adapton.racket/9ddfec8a22809cfb37fbbd8871a088fc3bd51787/adapton/adapton.rkt
|
racket
|
This file contains the core of adapton, consisting of:
- definition of functions memoized for adapton
- creation of nodes when those functions are called
- force for nodes and cells
This file contains the core of adapton, consisting of:
- definition of functions memoized for adapton
- creation of nodes when those functions are called
- force for nodes and cells
========================================
define/memo creates a memoized version of a function
any function that you want to be tracked by adapton should use define/memo.
tests for define/memo
return a node that computes nth number of fib
definition
========================================
a matt structure is created when a memoized function is called,
and invokes memo to create a thunk delaying the evaluation of
that function with the given arguments.
tests for matt structure and memo function
ensure that the nodes are well-formed
ensure they are added to the memo-table properly
calling a memoized function with args creates a node for that function
and those arguments, and adds that node to the memo table
(car (unbox create-stack)) id))
|
#lang racket
(require (prefix-in r: (only-in racket delay force equal-hash-code))
rackunit
"memo-table-modification-tools.rkt"
"graphing.rkt"
"data-structures.rkt")
(require (prefix-in r: (only-in racket delay force equal-hash-code))
rackunit
"memo-table-modification-tools.rkt"
"graphing.rkt"
"data-structures.rkt")
(provide (all-defined-out))
(module+ test
return a node that adds 1 to n
(+ 1 n))
(cond
[(= n 0) 0]
[(= n 1) 1]
[else (+ (force (fib (- n 1))) (force (fib (- n 2))))]))
(check-equal? (node? (add1 4)) #t)
(check-equal? (force (add1 4)) 5)
(check-equal? (node? (fib 4)) #t)
(check-equal? (force (fib 4)) 3))
(define-syntax-rule
(define/memo (f x ...) e ...)
(define f
(matt (λ (x ...) e ...))))
(module+ test
(define m1 (matt (λ (a) a)))
(define m2 (matt (λ (a b) (+ a b))))
(check-equal? (node? (m1 21)) #t)
(check-equal? (force (m1 21)) 21)
(check-equal? (node? (m2 21 21)) #t)
(check-equal? (force (m2 21 21)) 42)
(check-equal? (node? (hash-ref *memo-table* (node-id (m1 21)))) #t)
(check-equal? (node? (hash-ref *memo-table* (node-id (m2 21 21)))) #t))
(struct matt (f)
#:property
prop:procedure
(λ (m . xs) (apply memo m xs)))
(define (memo m . xs)
(match m
[(matt f)
(let* ([id (equal-hash-code (cons f xs))]
[n (node id #f '() '() (car (unbox create-stack)) xs (λ () (apply f xs)) '())]
[t (hash-ref! *memo-table* id n)])
(write-to-graph (format "[change]add node~n[node ~a red]~n" id))
( write - to - graph ( format " [ change]add create edge ~ n[edge ~a ~a blue]~n "
(if (equal? (node-args t) xs)
t
(hash-ref! *memo-table*
(+ id 1)
(node (+ id 1) #f '() '()
(car (unbox create-stack)) xs (λ () (apply f xs)) '()))))]))
|
4087d83ea863b14496ed0f8c0dae1274af2723cf17550d74566bc30b2697eb29
|
lightquake/itsa
|
Main.hs
|
{-# LANGUAGE CPP #-}
# LANGUAGE TemplateHaskell #
NOTE : Do n't modify this file unless you know what you are doing . If you are
new to snap , start with Site.hs and Application.hs . This file contains
boilerplate needed for dynamic reloading and is not meant for general
consumption .
Occasionally if we modify the way the dynamic reloader works and you want to
upgrade , you might have to swap out this file for a newer version . But in
most cases you 'll never need to modify this code .
NOTE: Don't modify this file unless you know what you are doing. If you are
new to snap, start with Site.hs and Application.hs. This file contains
boilerplate needed for dynamic reloading and is not meant for general
consumption.
Occasionally if we modify the way the dynamic reloader works and you want to
upgrade, you might have to swap out this file for a newer version. But in
most cases you'll never need to modify this code.
-}
module Main where
------------------------------------------------------------------------------
import Control.Exception (SomeException, try)
import qualified Data.Text as T
import Snap.Http.Server
import Snap.Snaplet
import Snap.Snaplet.Config
import Snap.Core
import System.IO
import Site
#ifdef DEVELOPMENT
import Snap.Loader.Dynamic
#else
import Snap.Loader.Static
#endif
------------------------------------------------------------------------------
-- | This is the entry point for this web server application. It supports
-- easily switching between interpreting source and running statically compiled
-- code.
--
-- In either mode, the generated program should be run from the root of the
-- project tree. When it is run, it locates its templates, static content, and
-- source files in development mode, relative to the current working directory.
--
-- When compiled with the development flag, only changes to the libraries, your
-- cabal file, or this file should require a recompile to be picked up.
-- Everything else is interpreted at runtime. There are a few consequences of
-- this.
--
First , this is much slower . Running the interpreter takes a significant
chunk of time ( a couple tenths of a second on the author 's machine , at this
-- time), regardless of the simplicity of the loaded code. In order to
-- recompile and re-load server state as infrequently as possible, the source
-- directories are watched for updates, as are any extra directories specified
-- below.
--
Second , the generated server binary is MUCH larger , since it links in the
GHC API ( via the hint library ) .
--
-- Third, and the reason you would ever want to actually compile with
-- development mode, is that it enables a faster development cycle. You can
-- simply edit a file, save your changes, and hit reload to see your changes
-- reflected immediately.
--
-- When this is compiled without the development flag, all the actions are
-- statically compiled in. This results in faster execution, a smaller binary
-- size, and having to recompile the server for any code change.
--
main :: IO ()
main = do
-- Depending on the version of loadSnapTH in scope, this either enables
-- dynamic reloading, or compiles it without. The last argument to
-- loadSnapTH is a list of additional directories to watch for changes to
-- trigger reloads in development mode. It doesn't need to include source
-- directories, those are picked up automatically by the splice.
(conf, site, cleanup) <- $(loadSnapTH [| getConf |]
'getActions
["templates"])
_ <- try $ httpServe conf site :: IO (Either SomeException ())
cleanup
------------------------------------------------------------------------------
-- | This action loads the config used by this application. The loaded config
is returned as the first element of the tuple produced by the loadSnapTH
Splice . The type is not solidly fixed , though it must be an IO action that
produces the same type as ' ' takes . It also must be an instance of
-- Typeable. If the type of this is changed, a full recompile will be needed to
-- pick up the change, even in development mode.
--
-- This action is only run once, regardless of whether development or
-- production mode is in use.
getConf :: IO (Config Snap AppConfig)
getConf = commandLineAppConfig defaultConfig
------------------------------------------------------------------------------
-- | This function generates the the site handler and cleanup action from the
-- configuration. In production mode, this action is only run once. In
-- development mode, this action is run whenever the application is reloaded.
--
-- Development mode also makes sure that the cleanup actions are run
-- appropriately before shutdown. The cleanup action returned from loadSnapTH
-- should still be used after the server has stopped handling requests, as the
-- cleanup actions are only automatically run when a reload is triggered.
--
-- This sample doesn't actually use the config passed in, but more
-- sophisticated code might.
getActions :: Config Snap AppConfig -> IO (Snap (), IO ())
getActions conf = do
(msgs, site, cleanup) <- runSnaplet
(appEnvironment =<< getOther conf) app
hPutStrLn stderr $ T.unpack msgs
return (site, cleanup)
| null |
https://raw.githubusercontent.com/lightquake/itsa/76e1d3ebb8f84d90614b894c629471dbacf0b2d3/src/Main.hs
|
haskell
|
# LANGUAGE CPP #
----------------------------------------------------------------------------
----------------------------------------------------------------------------
| This is the entry point for this web server application. It supports
easily switching between interpreting source and running statically compiled
code.
In either mode, the generated program should be run from the root of the
project tree. When it is run, it locates its templates, static content, and
source files in development mode, relative to the current working directory.
When compiled with the development flag, only changes to the libraries, your
cabal file, or this file should require a recompile to be picked up.
Everything else is interpreted at runtime. There are a few consequences of
this.
time), regardless of the simplicity of the loaded code. In order to
recompile and re-load server state as infrequently as possible, the source
directories are watched for updates, as are any extra directories specified
below.
Third, and the reason you would ever want to actually compile with
development mode, is that it enables a faster development cycle. You can
simply edit a file, save your changes, and hit reload to see your changes
reflected immediately.
When this is compiled without the development flag, all the actions are
statically compiled in. This results in faster execution, a smaller binary
size, and having to recompile the server for any code change.
Depending on the version of loadSnapTH in scope, this either enables
dynamic reloading, or compiles it without. The last argument to
loadSnapTH is a list of additional directories to watch for changes to
trigger reloads in development mode. It doesn't need to include source
directories, those are picked up automatically by the splice.
----------------------------------------------------------------------------
| This action loads the config used by this application. The loaded config
Typeable. If the type of this is changed, a full recompile will be needed to
pick up the change, even in development mode.
This action is only run once, regardless of whether development or
production mode is in use.
----------------------------------------------------------------------------
| This function generates the the site handler and cleanup action from the
configuration. In production mode, this action is only run once. In
development mode, this action is run whenever the application is reloaded.
Development mode also makes sure that the cleanup actions are run
appropriately before shutdown. The cleanup action returned from loadSnapTH
should still be used after the server has stopped handling requests, as the
cleanup actions are only automatically run when a reload is triggered.
This sample doesn't actually use the config passed in, but more
sophisticated code might.
|
# LANGUAGE TemplateHaskell #
NOTE : Do n't modify this file unless you know what you are doing . If you are
new to snap , start with Site.hs and Application.hs . This file contains
boilerplate needed for dynamic reloading and is not meant for general
consumption .
Occasionally if we modify the way the dynamic reloader works and you want to
upgrade , you might have to swap out this file for a newer version . But in
most cases you 'll never need to modify this code .
NOTE: Don't modify this file unless you know what you are doing. If you are
new to snap, start with Site.hs and Application.hs. This file contains
boilerplate needed for dynamic reloading and is not meant for general
consumption.
Occasionally if we modify the way the dynamic reloader works and you want to
upgrade, you might have to swap out this file for a newer version. But in
most cases you'll never need to modify this code.
-}
module Main where
import Control.Exception (SomeException, try)
import qualified Data.Text as T
import Snap.Http.Server
import Snap.Snaplet
import Snap.Snaplet.Config
import Snap.Core
import System.IO
import Site
#ifdef DEVELOPMENT
import Snap.Loader.Dynamic
#else
import Snap.Loader.Static
#endif
First , this is much slower . Running the interpreter takes a significant
chunk of time ( a couple tenths of a second on the author 's machine , at this
Second , the generated server binary is MUCH larger , since it links in the
GHC API ( via the hint library ) .
main :: IO ()
main = do
(conf, site, cleanup) <- $(loadSnapTH [| getConf |]
'getActions
["templates"])
_ <- try $ httpServe conf site :: IO (Either SomeException ())
cleanup
is returned as the first element of the tuple produced by the loadSnapTH
Splice . The type is not solidly fixed , though it must be an IO action that
produces the same type as ' ' takes . It also must be an instance of
getConf :: IO (Config Snap AppConfig)
getConf = commandLineAppConfig defaultConfig
getActions :: Config Snap AppConfig -> IO (Snap (), IO ())
getActions conf = do
(msgs, site, cleanup) <- runSnaplet
(appEnvironment =<< getOther conf) app
hPutStrLn stderr $ T.unpack msgs
return (site, cleanup)
|
0f7665cf1a520f806a4bd83436524cf4479209050d98cbae3a97c9f15d333351
|
duo-lang/duo-lang
|
Pattern.hs
|
module Resolution.Pattern
( resolvePattern
, fromVar
, analyzeInstancePattern
) where
import Control.Monad ( unless, when, zipWithM )
import Control.Monad.Except ( MonadError(throwError) )
import Control.Monad.Writer ( MonadWriter(tell) )
import Data.Text qualified as T
import Errors.Renamer
import Resolution.Definition ( ResolverM, lookupXtor )
import Resolution.SymbolTable ( XtorNameResolve(..) )
import Syntax.CST.Terms qualified as CST
import Syntax.CST.Types qualified as CST
import Syntax.CST.Types (PrdCns(..))
import Syntax.CST.Names ( FreeVarName(MkFreeVarName), XtorName )
import Loc ( Loc, HasLoc(getLoc))
import Syntax.RST.Terms qualified as RST
import Data.Either (isRight, fromLeft, fromRight)
---------------------------------------------------------------------------------
-- Resolve Pattern
---------------------------------------------------------------------------------
findAtMostOneRight :: HasLoc b => [Either a b] -> ResolverM (Either [a] ([a],b,[a]))
findAtMostOneRight args = case break isRight args of
(pats, []) -> pure $ Left (fromLeft undefined <$> pats)
(left_pats, (starpat: right_pats)) ->
case break isRight right_pats of
(right_pats, []) -> pure $ Right (fromLeft undefined <$> left_pats,fromRight undefined starpat,fromLeft undefined <$> right_pats)
(_, _:_) -> throwError (UnknownResolutionError (getLoc (fromRight undefined starpat)) "Found more than one star in pattern")
-- | Annotate every part of the pattern with information on whether it stands for
-- a producer or consumer.
resolvePattern :: PrdCns -> CST.Pattern -> ResolverM (Either RST.PatternNew RST.StarPattern)
resolvePattern pc (CST.PatXtor loc xt pats) = do
-- Lookup up the arity information in the symbol table.
(_,res) <- lookupXtor loc xt
case res of
(MethodNameResult _cn _) -> do
throwError (UnknownResolutionError loc "Expected a constructor or destructor, but found a typeclas method.")
(XtorNameResult dc ns arity) -> do
when (length arity /= length pats) $
throwError (XtorArityMismatch loc xt (length arity) (length pats))
Check whether the Xtor is a Constructor / Destructor as expected .
case (pc,dc) of
(Cns, CST.Data ) -> throwError (UnknownResolutionError loc "Expected a destructor but found a constructor")
(Prd, CST.Codata) -> throwError (UnknownResolutionError loc "Expected a constructor but found a destructor")
(Prd, CST.Data ) -> pure ()
(Cns, CST.Codata) -> pure ()
pats' <- zipWithM resolvePattern arity pats
pats'' <- findAtMostOneRight pats'
case pats'' of
Left pats''' -> pure $ Left (RST.PatXtor loc pc ns xt pats''')
Right pats''' -> pure $ Right (RST.PatXtorStar loc pc ns xt pats''')
resolvePattern Prd (CST.PatVar loc var@(MkFreeVarName name)) = do
when ("k" `T.isPrefixOf` name) $
tell [MisnamedProducerVar loc name]
pure $ Left (RST.PatVar loc Prd var)
resolvePattern Cns (CST.PatVar loc var@(MkFreeVarName name)) = do
unless ("k" `T.isPrefixOf` name) $
tell [MisnamedConsumerVar loc name]
pure $ Left (RST.PatVar loc Cns var)
resolvePattern pc (CST.PatStar loc) = do
pure $ Right (RST.PatStar loc pc)
resolvePattern pc (CST.PatWildcard loc) = do
pure $ Left (RST.PatWildcard loc pc)
---------------------------------------------------------------------------------
-- Analyze Patterns
---------------------------------------------------------------------------------
fromVar :: RST.PatternNew -> ResolverM (Loc, PrdCns, FreeVarName)
fromVar (RST.PatVar loc pc var) = pure (loc, pc, var)
fromVar (RST.PatWildcard loc pc) = pure (loc, pc, MkFreeVarName "_")
fromVar pat = throwError (UnknownResolutionError (getLoc pat) "Called function \"fromVar\" on pattern which is not a variable.")
analyzeInstancePattern :: CST.Pattern -> ResolverM (Loc, XtorName, [(Loc, PrdCns, FreeVarName)])
analyzeInstancePattern (CST.PatXtor loc xt pats) = do
(_,res) <- lookupXtor loc xt
case res of
XtorNameResult {} -> do
throwError (UnknownResolutionError loc ("Expected typeclass method but found xtor" <> T.pack (show xt)))
MethodNameResult _cn arity -> do
when (length arity /= length pats) $
throwError (XtorArityMismatch loc xt (length arity) (length pats))
pats' <- zipWithM resolvePattern arity pats
foo <- findAtMostOneRight pats'
case foo of
Left pats2 -> do
args <- mapM fromVar pats2
pure (loc, xt, args)
Right _ ->
throwError (UnknownResolutionError loc "Found star in instance method")
analyzeInstancePattern pat =
throwError (UnknownResolutionError (getLoc pat) ("Expected typeclass method but found pattern" <> T.pack (show pat)))
| null |
https://raw.githubusercontent.com/duo-lang/duo-lang/449b7015683e0a40cc51df80011e45e13d3ca151/duo-lang-renamer/src/Resolution/Pattern.hs
|
haskell
|
-------------------------------------------------------------------------------
Resolve Pattern
-------------------------------------------------------------------------------
| Annotate every part of the pattern with information on whether it stands for
a producer or consumer.
Lookup up the arity information in the symbol table.
-------------------------------------------------------------------------------
Analyze Patterns
-------------------------------------------------------------------------------
|
module Resolution.Pattern
( resolvePattern
, fromVar
, analyzeInstancePattern
) where
import Control.Monad ( unless, when, zipWithM )
import Control.Monad.Except ( MonadError(throwError) )
import Control.Monad.Writer ( MonadWriter(tell) )
import Data.Text qualified as T
import Errors.Renamer
import Resolution.Definition ( ResolverM, lookupXtor )
import Resolution.SymbolTable ( XtorNameResolve(..) )
import Syntax.CST.Terms qualified as CST
import Syntax.CST.Types qualified as CST
import Syntax.CST.Types (PrdCns(..))
import Syntax.CST.Names ( FreeVarName(MkFreeVarName), XtorName )
import Loc ( Loc, HasLoc(getLoc))
import Syntax.RST.Terms qualified as RST
import Data.Either (isRight, fromLeft, fromRight)
findAtMostOneRight :: HasLoc b => [Either a b] -> ResolverM (Either [a] ([a],b,[a]))
findAtMostOneRight args = case break isRight args of
(pats, []) -> pure $ Left (fromLeft undefined <$> pats)
(left_pats, (starpat: right_pats)) ->
case break isRight right_pats of
(right_pats, []) -> pure $ Right (fromLeft undefined <$> left_pats,fromRight undefined starpat,fromLeft undefined <$> right_pats)
(_, _:_) -> throwError (UnknownResolutionError (getLoc (fromRight undefined starpat)) "Found more than one star in pattern")
resolvePattern :: PrdCns -> CST.Pattern -> ResolverM (Either RST.PatternNew RST.StarPattern)
resolvePattern pc (CST.PatXtor loc xt pats) = do
(_,res) <- lookupXtor loc xt
case res of
(MethodNameResult _cn _) -> do
throwError (UnknownResolutionError loc "Expected a constructor or destructor, but found a typeclas method.")
(XtorNameResult dc ns arity) -> do
when (length arity /= length pats) $
throwError (XtorArityMismatch loc xt (length arity) (length pats))
Check whether the Xtor is a Constructor / Destructor as expected .
case (pc,dc) of
(Cns, CST.Data ) -> throwError (UnknownResolutionError loc "Expected a destructor but found a constructor")
(Prd, CST.Codata) -> throwError (UnknownResolutionError loc "Expected a constructor but found a destructor")
(Prd, CST.Data ) -> pure ()
(Cns, CST.Codata) -> pure ()
pats' <- zipWithM resolvePattern arity pats
pats'' <- findAtMostOneRight pats'
case pats'' of
Left pats''' -> pure $ Left (RST.PatXtor loc pc ns xt pats''')
Right pats''' -> pure $ Right (RST.PatXtorStar loc pc ns xt pats''')
resolvePattern Prd (CST.PatVar loc var@(MkFreeVarName name)) = do
when ("k" `T.isPrefixOf` name) $
tell [MisnamedProducerVar loc name]
pure $ Left (RST.PatVar loc Prd var)
resolvePattern Cns (CST.PatVar loc var@(MkFreeVarName name)) = do
unless ("k" `T.isPrefixOf` name) $
tell [MisnamedConsumerVar loc name]
pure $ Left (RST.PatVar loc Cns var)
resolvePattern pc (CST.PatStar loc) = do
pure $ Right (RST.PatStar loc pc)
resolvePattern pc (CST.PatWildcard loc) = do
pure $ Left (RST.PatWildcard loc pc)
fromVar :: RST.PatternNew -> ResolverM (Loc, PrdCns, FreeVarName)
fromVar (RST.PatVar loc pc var) = pure (loc, pc, var)
fromVar (RST.PatWildcard loc pc) = pure (loc, pc, MkFreeVarName "_")
fromVar pat = throwError (UnknownResolutionError (getLoc pat) "Called function \"fromVar\" on pattern which is not a variable.")
analyzeInstancePattern :: CST.Pattern -> ResolverM (Loc, XtorName, [(Loc, PrdCns, FreeVarName)])
analyzeInstancePattern (CST.PatXtor loc xt pats) = do
(_,res) <- lookupXtor loc xt
case res of
XtorNameResult {} -> do
throwError (UnknownResolutionError loc ("Expected typeclass method but found xtor" <> T.pack (show xt)))
MethodNameResult _cn arity -> do
when (length arity /= length pats) $
throwError (XtorArityMismatch loc xt (length arity) (length pats))
pats' <- zipWithM resolvePattern arity pats
foo <- findAtMostOneRight pats'
case foo of
Left pats2 -> do
args <- mapM fromVar pats2
pure (loc, xt, args)
Right _ ->
throwError (UnknownResolutionError loc "Found star in instance method")
analyzeInstancePattern pat =
throwError (UnknownResolutionError (getLoc pat) ("Expected typeclass method but found pattern" <> T.pack (show pat)))
|
815a25079995f4d643b3ce010e267ee864613fb77817bfe147b1d620217ea67b
|
exoscale/clojure-kubernetes-client
|
v1beta1_cron_job_status.clj
|
(ns clojure-kubernetes-client.specs.v1beta1-cron-job-status
(:require [clojure.spec.alpha :as s]
[spec-tools.data-spec :as ds]
[clojure-kubernetes-client.specs.v1-object-reference :refer :all]
)
(:import (java.io File)))
(declare v1beta1-cron-job-status-data v1beta1-cron-job-status)
(def v1beta1-cron-job-status-data
{
(ds/opt :active) (s/coll-of v1-object-reference)
(ds/opt :lastScheduleTime) inst?
})
(def v1beta1-cron-job-status
(ds/spec
{:name ::v1beta1-cron-job-status
:spec v1beta1-cron-job-status-data}))
| null |
https://raw.githubusercontent.com/exoscale/clojure-kubernetes-client/79d84417f28d048c5ac015c17e3926c73e6ac668/src/clojure_kubernetes_client/specs/v1beta1_cron_job_status.clj
|
clojure
|
(ns clojure-kubernetes-client.specs.v1beta1-cron-job-status
(:require [clojure.spec.alpha :as s]
[spec-tools.data-spec :as ds]
[clojure-kubernetes-client.specs.v1-object-reference :refer :all]
)
(:import (java.io File)))
(declare v1beta1-cron-job-status-data v1beta1-cron-job-status)
(def v1beta1-cron-job-status-data
{
(ds/opt :active) (s/coll-of v1-object-reference)
(ds/opt :lastScheduleTime) inst?
})
(def v1beta1-cron-job-status
(ds/spec
{:name ::v1beta1-cron-job-status
:spec v1beta1-cron-job-status-data}))
|
|
cb40738ee0806fd61eda78305e7a740cc675f87eda425803dcc08441dbcba081
|
tarides/ocaml-platform-installer
|
opam.mli
|
open Astring
open Import
module GlobalOpts : sig
type t = {
root : Fpath.t;
switch : string option; (** Whether to pass the [--switch] option. *)
env : string String.map option;
(** Environment to use when calling commands. *)
log_height : int option;
(** [log_height] determines how the output of an [opam] call should be
displayed. With [None], no output is displayed. With [Some h], the
[h] last lines are displayed. *)
}
val v :
root:Fpath.t ->
?switch:string ->
?env:string String.map ->
?log_height:int ->
unit ->
t
val default : t
end
module Config : sig
module Var : sig
val get : GlobalOpts.t -> string -> (string, 'e) Result.or_msg
val get_opt : GlobalOpts.t -> string -> (string option, 'e) Result.or_msg
val set :
GlobalOpts.t ->
global:bool ->
string ->
string ->
(unit, 'e) Result.or_msg
val unset :
GlobalOpts.t -> global:bool -> string -> (unit, 'e) Result.or_msg
end
end
module Switch : sig
val list : GlobalOpts.t -> (string list, [> Rresult.R.msg ]) result
val create :
ocaml_version:string option ->
GlobalOpts.t ->
string ->
(unit, [> `Msg of string ]) result
(** When [ocaml_version] is [None], create a switch with no compiler
installed. *)
val remove : GlobalOpts.t -> string -> (string, [> `Msg of string ]) result
val show : GlobalOpts.t -> (string, [> `Msg of string ]) result
end
module Repository : sig
val add :
GlobalOpts.t -> path:Fpath.t -> string -> (unit, [> `Msg of string ]) result
(** Add a repository, act on the switch selected by the options. *)
val remove : GlobalOpts.t -> string -> (unit, [> `Msg of string ]) result
(** Remove a repository, act on all switches. *)
end
module Show : sig
val list_files :
GlobalOpts.t -> string -> (string list, [> `Msg of string ]) result
val available_versions :
GlobalOpts.t -> string -> (string list, [> `Msg of string ]) result
val installed_version :
GlobalOpts.t -> string -> (string option, [> `Msg of string ]) result
val installed_versions :
GlobalOpts.t -> string list -> ((string * string) list, 'a) Result.or_msg
(** Query the installed version of a list of package. Packages that are not
installed don't appear in the result. *)
val opam_file : GlobalOpts.t -> pkg:string -> (string, 'a) Result.or_msg
val depends :
GlobalOpts.t -> string -> (string list, [> `Msg of string ]) result
val version : GlobalOpts.t -> string -> (string, [> `Msg of string ]) result
val pin : GlobalOpts.t -> string -> (string, [> `Msg of string ]) result
end
module List_ : sig
val compiler :
GlobalOpts.t -> unit -> (string option, [> `Msg of string ]) result
end
val install : GlobalOpts.t -> string list -> (unit, [> `Msg of string ]) result
(** [install opam_opts atoms] installs the [atoms] into the current local
switch. If opam has not been initialised, or if their is no local switch
this function will also create those too. *)
val remove : GlobalOpts.t -> string list -> (unit, [> `Msg of string ]) result
(** [remove atoms] removes the [atoms] from the current local switch. Returns
the list of package removed. *)
val update : GlobalOpts.t -> string list -> (unit, [> `Msg of string ]) result
(** [update names] updates the repositories by their [names] that the current
local switch has set. *)
val upgrade : GlobalOpts.t -> string list -> (unit, [> `Msg of string ]) result
(** [upgrade atoms] will try to upgrade the packages whilst keeping [atoms]
installed. *)
val check_init : unit -> (unit, [> `Msg of string ]) result
| null |
https://raw.githubusercontent.com/tarides/ocaml-platform-installer/d24fa0590888673cc3b0eb828a3be484c0f8a8af/src/lib/opam.mli
|
ocaml
|
* Whether to pass the [--switch] option.
* Environment to use when calling commands.
* [log_height] determines how the output of an [opam] call should be
displayed. With [None], no output is displayed. With [Some h], the
[h] last lines are displayed.
* When [ocaml_version] is [None], create a switch with no compiler
installed.
* Add a repository, act on the switch selected by the options.
* Remove a repository, act on all switches.
* Query the installed version of a list of package. Packages that are not
installed don't appear in the result.
* [install opam_opts atoms] installs the [atoms] into the current local
switch. If opam has not been initialised, or if their is no local switch
this function will also create those too.
* [remove atoms] removes the [atoms] from the current local switch. Returns
the list of package removed.
* [update names] updates the repositories by their [names] that the current
local switch has set.
* [upgrade atoms] will try to upgrade the packages whilst keeping [atoms]
installed.
|
open Astring
open Import
module GlobalOpts : sig
type t = {
root : Fpath.t;
env : string String.map option;
log_height : int option;
}
val v :
root:Fpath.t ->
?switch:string ->
?env:string String.map ->
?log_height:int ->
unit ->
t
val default : t
end
module Config : sig
module Var : sig
val get : GlobalOpts.t -> string -> (string, 'e) Result.or_msg
val get_opt : GlobalOpts.t -> string -> (string option, 'e) Result.or_msg
val set :
GlobalOpts.t ->
global:bool ->
string ->
string ->
(unit, 'e) Result.or_msg
val unset :
GlobalOpts.t -> global:bool -> string -> (unit, 'e) Result.or_msg
end
end
module Switch : sig
val list : GlobalOpts.t -> (string list, [> Rresult.R.msg ]) result
val create :
ocaml_version:string option ->
GlobalOpts.t ->
string ->
(unit, [> `Msg of string ]) result
val remove : GlobalOpts.t -> string -> (string, [> `Msg of string ]) result
val show : GlobalOpts.t -> (string, [> `Msg of string ]) result
end
module Repository : sig
val add :
GlobalOpts.t -> path:Fpath.t -> string -> (unit, [> `Msg of string ]) result
val remove : GlobalOpts.t -> string -> (unit, [> `Msg of string ]) result
end
module Show : sig
val list_files :
GlobalOpts.t -> string -> (string list, [> `Msg of string ]) result
val available_versions :
GlobalOpts.t -> string -> (string list, [> `Msg of string ]) result
val installed_version :
GlobalOpts.t -> string -> (string option, [> `Msg of string ]) result
val installed_versions :
GlobalOpts.t -> string list -> ((string * string) list, 'a) Result.or_msg
val opam_file : GlobalOpts.t -> pkg:string -> (string, 'a) Result.or_msg
val depends :
GlobalOpts.t -> string -> (string list, [> `Msg of string ]) result
val version : GlobalOpts.t -> string -> (string, [> `Msg of string ]) result
val pin : GlobalOpts.t -> string -> (string, [> `Msg of string ]) result
end
module List_ : sig
val compiler :
GlobalOpts.t -> unit -> (string option, [> `Msg of string ]) result
end
val install : GlobalOpts.t -> string list -> (unit, [> `Msg of string ]) result
val remove : GlobalOpts.t -> string list -> (unit, [> `Msg of string ]) result
val update : GlobalOpts.t -> string list -> (unit, [> `Msg of string ]) result
val upgrade : GlobalOpts.t -> string list -> (unit, [> `Msg of string ]) result
val check_init : unit -> (unit, [> `Msg of string ]) result
|
2173ecfe2bd2bea0a80207a9a48f70b9dd366f23ed736a0f554361b43b48ae3b
|
bobzhang/fan
|
fState.mli
|
(** Experimental: state management for deriving *)
open Sig_util
(* when you do the iteration, you should do it in reverse order *)
val current_filters : (plugin_name * plugin) list ref
val reset_current_filters : unit -> unit
val keep : bool ref
val reset : unit -> unit
val gensym : ?pkg:string -> string -> string
| null |
https://raw.githubusercontent.com/bobzhang/fan/7ed527d96c5a006da43d3813f32ad8a5baa31b7f/src/cold/fState.mli
|
ocaml
|
* Experimental: state management for deriving
when you do the iteration, you should do it in reverse order
|
open Sig_util
val current_filters : (plugin_name * plugin) list ref
val reset_current_filters : unit -> unit
val keep : bool ref
val reset : unit -> unit
val gensym : ?pkg:string -> string -> string
|
24a08773eb3ec494a0f4310cfd5bc57c74cae8902ef51e036f1e81d6c89e69e2
|
alvatar/spheres
|
c-define-objc#.scm
|
;;!!! Objective-C Utility Macros for Gambit Scheme
.author , March 19 , 2011
;;
;; This software is licensed under the WTFPL.
;;
;; These were developed while I was working on the Gambit Gaming
Engine ( ) . They should be helpful in easily wrapping existing
;; Objective-C APIs with Gambit Scheme.
;;
;;
;; somewhere in your Gambit source file.
;;
;; The API is as follows:
;;
;; (objc-method class-name (formaltype1 ...) return-type method-name)
;;
;; Creates a `c-lambda' that wraps an invocation of method
;; `method-name' to objects of class `class-name'. So for example if
;; you had:
;;
@class Barney ;
;;
;; @interface Fred
;; { ... }
;;
-(int)frobWithBarney : ( Barney * ) aBarney : ( ) hats ;
;; +instanceNumber: (int) n
;; @end
;;
;; you could wrap the frobWithBarney method with something like the
;; following:
;;
;; (define frob-with-barney
( objc - method " " ( ( pointer " " ) bool ) int
" frobWithBarney : : " ) )
;;
Then if Scheme - side you had a pointer to and a pointer
;; to Barney `b' you could call from Scheme:
;;
;; (frob-with-barney f b #t)
;;
Procedures which wrap Objective - C methods in this way take one
;; additional argument to the ones accounted for in their formals
list . Their first argument should be a pointer to the object on
;; which the method is invoked, followed by the arguments in the
;; formals list, as in the example above which takes a pointer to
, a pointer to , and a boolean value .
;;
;; (objc-class-method class-name (formaltype1 ...) return-type method-name)
;;
;; Creates a `c-lambda' that wraps an invocation of class method
` method - name ' in class ` class - name ' . For instance , in class
above you could wrap the class method instanceNumber with the following :
;;
( define - number
( objc - class - method " " ( int ) ( pointer ) " : " ) )
;;
Then Scheme - side you could get a pointer to with a call like :
;;
( fred - instance - number 5 )
;;
;; Procedures which wrap Objective-C class methods in this way take
;; only the arguments accounted for in their formals list.
(define-macro (%%objc-method class-name class? formal-types return-type method-name)
(define (parse-method-name m)
(define (split-at-colon s)
(let ((l (string-length s)))
(call-with-current-continuation
(lambda (k)
(do ((i 0 (+ i 1)))
((>= i l) #f)
(if (char=? (string-ref s i) #\:)
(k (cons (substring s 0 (+ i 1))
(substring s (+ i 1) l)))))))))
(define (parse-method-name1 m acc)
(let ((p (split-at-colon m)))
(if (not p)
(if (null? acc) (cons m acc) acc)
(parse-method-name1 (cdr p) (cons (car p) acc)))))
(reverse (parse-method-name1 m '())))
(define (make-methodcall lst start)
(if (and (= (length lst) 1)
(not (char=? (string-ref
(car lst)
(- (string-length (car lst)) 1))
#\:)))
(car lst)
(do ((i start (+ i 1))
(l lst (cdr l))
(s ""
(string-append s
(car l)
" ___arg"
(number->string i)
" ")))
((null? l) s))))
(let* ((res (cond
((list? return-type)
"___result_voidstar = (void *)")
((eq? return-type 'void) "")
(else "___result = ")))
(methodparts (parse-method-name method-name)))
`(c-lambda ,(if class? formal-types (cons (list 'pointer class-name) formal-types)) ,return-type
,(string-append
(if class?
(string-append res "[" class-name " ")
(string-append res "[___arg1 "))
(make-methodcall methodparts (if class? 1 2))
"];"))))
(define-macro (objc-method class-name formal-types return-type method-name)
`(%%objc-method ,class-name #f ,formal-types ,return-type ,method-name))
(define-macro (objc-class-method class-name formal-types return-type method-name)
`(%%objc-method ,class-name #t ,formal-types ,return-type ,method-name))
| null |
https://raw.githubusercontent.com/alvatar/spheres/568836f234a469ef70c69f4a2d9b56d41c3fc5bd/spheres/gambit/ffi/c-define-objc%23.scm
|
scheme
|
!!! Objective-C Utility Macros for Gambit Scheme
This software is licensed under the WTFPL.
These were developed while I was working on the Gambit Gaming
Objective-C APIs with Gambit Scheme.
somewhere in your Gambit source file.
The API is as follows:
(objc-method class-name (formaltype1 ...) return-type method-name)
Creates a `c-lambda' that wraps an invocation of method
`method-name' to objects of class `class-name'. So for example if
you had:
@interface Fred
{ ... }
+instanceNumber: (int) n
@end
you could wrap the frobWithBarney method with something like the
following:
(define frob-with-barney
to Barney `b' you could call from Scheme:
(frob-with-barney f b #t)
additional argument to the ones accounted for in their formals
which the method is invoked, followed by the arguments in the
formals list, as in the example above which takes a pointer to
(objc-class-method class-name (formaltype1 ...) return-type method-name)
Creates a `c-lambda' that wraps an invocation of class method
Procedures which wrap Objective-C class methods in this way take
only the arguments accounted for in their formals list.
|
.author , March 19 , 2011
Engine ( ) . They should be helpful in easily wrapping existing
( objc - method " " ( ( pointer " " ) bool ) int
" frobWithBarney : : " ) )
Then if Scheme - side you had a pointer to and a pointer
Procedures which wrap Objective - C methods in this way take one
list . Their first argument should be a pointer to the object on
, a pointer to , and a boolean value .
` method - name ' in class ` class - name ' . For instance , in class
above you could wrap the class method instanceNumber with the following :
( define - number
( objc - class - method " " ( int ) ( pointer ) " : " ) )
Then Scheme - side you could get a pointer to with a call like :
( fred - instance - number 5 )
(define-macro (%%objc-method class-name class? formal-types return-type method-name)
(define (parse-method-name m)
(define (split-at-colon s)
(let ((l (string-length s)))
(call-with-current-continuation
(lambda (k)
(do ((i 0 (+ i 1)))
((>= i l) #f)
(if (char=? (string-ref s i) #\:)
(k (cons (substring s 0 (+ i 1))
(substring s (+ i 1) l)))))))))
(define (parse-method-name1 m acc)
(let ((p (split-at-colon m)))
(if (not p)
(if (null? acc) (cons m acc) acc)
(parse-method-name1 (cdr p) (cons (car p) acc)))))
(reverse (parse-method-name1 m '())))
(define (make-methodcall lst start)
(if (and (= (length lst) 1)
(not (char=? (string-ref
(car lst)
(- (string-length (car lst)) 1))
#\:)))
(car lst)
(do ((i start (+ i 1))
(l lst (cdr l))
(s ""
(string-append s
(car l)
" ___arg"
(number->string i)
" ")))
((null? l) s))))
(let* ((res (cond
((list? return-type)
"___result_voidstar = (void *)")
((eq? return-type 'void) "")
(else "___result = ")))
(methodparts (parse-method-name method-name)))
`(c-lambda ,(if class? formal-types (cons (list 'pointer class-name) formal-types)) ,return-type
,(string-append
(if class?
(string-append res "[" class-name " ")
(string-append res "[___arg1 "))
(make-methodcall methodparts (if class? 1 2))
"];"))))
(define-macro (objc-method class-name formal-types return-type method-name)
`(%%objc-method ,class-name #f ,formal-types ,return-type ,method-name))
(define-macro (objc-class-method class-name formal-types return-type method-name)
`(%%objc-method ,class-name #t ,formal-types ,return-type ,method-name))
|
61d4a63f035cd36c2ae4efc2a37b494ba1d604a256cf8f023e767c6916c30357
|
haskus/packages
|
CodePoint.hs
|
# LANGUAGE TypeFamilies #
# LANGUAGE DataKinds #
{-# LANGUAGE DeriveLift #-}
# LANGUAGE PatternSynonyms #
# LANGUAGE ViewPatterns #
# LANGUAGE BinaryLiterals #
-- | Unicode code-point
module Haskus.Text.Unicode.CodePoint
( CodePoint (..)
, CodePointRange
, pattern CodePointRange
, toUtf8
, fromUtf8
, toModifiedUtf8
)
where
import Haskus.Number.Word
import Haskus.Binary.Bits
import Haskus.Utils.Flow
import Numeric
import Data.Char (toUpper)
import Language.Haskell.TH.Syntax (Lift)
--------------------------------------------------
-- Code-point
--------------------------------------------------
-- | Code point
--
-- Number from 0 to 0x10FFFF
newtype CodePoint = CodePoint Word32 deriving (Eq,Lift)
| Show instance for CodePoint
--
-- >>> CodePoint 0x1234
-- U+1234
--
> > > CodePoint 0x12
U+0012
--
-- >>> CodePoint 0x1234A
-- U+1234A
--
instance Show CodePoint where
show (CodePoint v) = "U+" ++ f (fmap toUpper (showHex v ""))
where
f xs@[_,_,_] = '0':xs
f xs@[_,_] = "00" <> xs
f xs@[_] = "000" <> xs
f xs@[] = "0000" <> xs
f xs = xs
--------------------------------------------------
-- Code-point range
--------------------------------------------------
-- | Code point range
newtype CodePointRange = Range Word64 deriving (Eq,Lift)
fromRange :: CodePointRange -> (CodePoint,CodePoint)
fromRange (Range w) = ( CodePoint $ fromIntegral (w .&. 0xFFFFFFFF)
, CodePoint $ fromIntegral (w `uncheckedShiftR` 32)
)
toRange :: (CodePoint,CodePoint) -> CodePointRange
toRange (CodePoint x, CodePoint y) =
Range (fromIntegral x .|. (fromIntegral y `uncheckedShiftL` 32))
-- | Code-point range
# COMPLETE CodePointRange #
pattern CodePointRange :: CodePoint -> CodePoint -> CodePointRange
pattern CodePointRange x y <- (fromRange -> (x,y))
where
CodePointRange x y = toRange (x,y)
instance Show CodePointRange where
show (CodePointRange x y) = show x ++ ".." ++ show y
| Encode a code - point into UTF-8 .
Extended to support any ( not just 21 bits ) to make the function total
-- and useful in other contexts
--
-- >>> :set -XBinaryLiterals
> > > let f x = ( putStr . ( + + " " ) . bitsToString ) x
-- >>> f 0x24
00100100
-- >>> f 0xA2
11000010 10100010
> > > f 0x939
11100000 10100100 10111001
-- >>> f 0x20AC
11100010 10000010 10101100
-- >>> f 0x10348
11110000 10010000 10001101 10001000
toUtf8 :: Monad m => (Word8 -> m ()) -> Word32 -> m ()
toUtf8 putW8 w
| w .&. 0xFFFFFF80 == 0 = putW8 (fromIntegral w)
| w .&. 0xFFFFF800 == 0 = do
putW8 <| fromIntegral <| (0b11000000 .|. (w `shiftR` 6))
putW8 <| fromIntegral <| (0b10000000 .|. (w .&. 0b00111111))
| w .&. 0xFFFF0000 == 0 = do
putW8 <| fromIntegral <| (0b11100000 .|. (w `shiftR` 12))
putW8 <| fromIntegral <| (0b10000000 .|. ((w `shiftR` 6) .&. 0b00111111))
putW8 <| fromIntegral <| (0b10000000 .|. (w .&. 0b00111111))
| w .&. 0xFFE00000 == 0 = do
putW8 <| fromIntegral <| (0b11110000 .|. (w `shiftR` 18))
putW8 <| fromIntegral <| (0b10000000 .|. ((w `shiftR` 12) .&. 0b00111111))
putW8 <| fromIntegral <| (0b10000000 .|. ((w `shiftR` 6 ) .&. 0b00111111))
putW8 <| fromIntegral <| (0b10000000 .|. (w .&. 0b00111111))
| w .&. 0xFC000000 == 0 = do
putW8 <| fromIntegral <| (0b11111000 .|. (w `shiftR` 24))
putW8 <| fromIntegral <| (0b10000000 .|. ((w `shiftR` 18) .&. 0b00111111))
putW8 <| fromIntegral <| (0b10000000 .|. ((w `shiftR` 12) .&. 0b00111111))
putW8 <| fromIntegral <| (0b10000000 .|. ((w `shiftR` 6 ) .&. 0b00111111))
putW8 <| fromIntegral <| (0b10000000 .|. (w .&. 0b00111111))
| otherwise = do
putW8 <| fromIntegral <| (0b11111100 .|. (w `shiftR` 30))
putW8 <| fromIntegral <| (0b10000000 .|. ((w `shiftR` 24) .&. 0b00111111))
putW8 <| fromIntegral <| (0b10000000 .|. ((w `shiftR` 18) .&. 0b00111111))
putW8 <| fromIntegral <| (0b10000000 .|. ((w `shiftR` 12) .&. 0b00111111))
putW8 <| fromIntegral <| (0b10000000 .|. ((w `shiftR` 6 ) .&. 0b00111111))
putW8 <| fromIntegral <| (0b10000000 .|. (w .&. 0b00111111))
-- | Encode a code-point into Modified UTF-8.
--
Compared to UTF-8 , NULL values are encoded in two non null bytes .
--
-- >>> :set -XBinaryLiterals
-- >>> let f x = toModifiedUtf8 (putStr . (++ " ") . bitsToString) x
-- >>> f 0x24
00100100
-- >>> f 0x00
11000000 10000000
toModifiedUtf8 :: Monad m => (Word8 -> m ()) -> Word32 -> m ()
toModifiedUtf8 putW8 w
| w == 0 = putW8 0b11000000 >> putW8 0b10000000
| otherwise = toUtf8 putW8 w
-- | Decode a code-point in UTF8.
--
Extended to support any ( not just 21 bits ) .
--
-- We don't check that following bytes are valid.
--
-- >>> :set -XBinaryLiterals
> > > import Control . Monad . Trans . State
-- >>> getNext = do { ~(x:xs) <- get; put xs; pure x }
> > > let x = evalState ( getNext ) [ 0b11110000,0b10010000,0b10001101,0b10001000 ]
> > > x = = Just 0x10348
-- True
fromUtf8 :: Monad m => m Word8 -> m (Maybe Word32)
fromUtf8 getW8 = do
w <- getW8
let
n = countLeadingZeros (complement w)
b1 = fromIntegral w
case n of
not a first byte
0 -> pure (Just b1)
2 -> do
b2 <- fromIntegral <|| getW8
pure <| Just <| ((b1 .&. 0b00011111) `shiftL` 6)
.|. (b2 .&. 0b00111111)
3 -> do
b2 <- fromIntegral <|| getW8
b3 <- fromIntegral <|| getW8
pure <| Just <| ((b1 .&. 0b00001111) `shiftL` 12)
.|. ((b2 .&. 0b00111111) `shiftL` 6)
.|. (b3 .&. 0b00111111)
4 -> do
b2 <- fromIntegral <|| getW8
b3 <- fromIntegral <|| getW8
b4 <- fromIntegral <|| getW8
pure <| Just <| ((b1 .&. 0b00000111) `shiftL` 18)
.|. ((b2 .&. 0b00111111) `shiftL` 12)
.|. ((b3 .&. 0b00111111) `shiftL` 6)
.|. (b4 .&. 0b00111111)
5 -> do
b2 <- fromIntegral <|| getW8
b3 <- fromIntegral <|| getW8
b4 <- fromIntegral <|| getW8
b5 <- fromIntegral <|| getW8
pure <| Just <| ((b1 .&. 0b00000111) `shiftL` 24)
.|. ((b2 .&. 0b00111111) `shiftL` 18)
.|. ((b3 .&. 0b00111111) `shiftL` 12)
.|. ((b4 .&. 0b00111111) `shiftL` 6)
.|. (b5 .&. 0b00111111)
_ -> do
b2 <- fromIntegral <|| getW8
b3 <- fromIntegral <|| getW8
b4 <- fromIntegral <|| getW8
b5 <- fromIntegral <|| getW8
b6 <- fromIntegral <|| getW8
pure <| Just <| ((b1 .&. 0b00000011) `shiftL` 30)
.|. ((b2 .&. 0b00111111) `shiftL` 24)
.|. ((b3 .&. 0b00111111) `shiftL` 18)
.|. ((b4 .&. 0b00111111) `shiftL` 12)
.|. ((b5 .&. 0b00111111) `shiftL` 6)
.|. (b6 .&. 0b00111111)
| null |
https://raw.githubusercontent.com/haskus/packages/40ea6101cea84e2c1466bc55cdb22bed92f642a2/haskus-text/src/lib/Haskus/Text/Unicode/CodePoint.hs
|
haskell
|
# LANGUAGE DeriveLift #
| Unicode code-point
------------------------------------------------
Code-point
------------------------------------------------
| Code point
Number from 0 to 0x10FFFF
>>> CodePoint 0x1234
U+1234
>>> CodePoint 0x1234A
U+1234A
------------------------------------------------
Code-point range
------------------------------------------------
| Code point range
| Code-point range
and useful in other contexts
>>> :set -XBinaryLiterals
>>> f 0x24
>>> f 0xA2
>>> f 0x20AC
>>> f 0x10348
| Encode a code-point into Modified UTF-8.
>>> :set -XBinaryLiterals
>>> let f x = toModifiedUtf8 (putStr . (++ " ") . bitsToString) x
>>> f 0x24
>>> f 0x00
| Decode a code-point in UTF8.
We don't check that following bytes are valid.
>>> :set -XBinaryLiterals
>>> getNext = do { ~(x:xs) <- get; put xs; pure x }
True
|
# LANGUAGE TypeFamilies #
# LANGUAGE DataKinds #
# LANGUAGE PatternSynonyms #
# LANGUAGE ViewPatterns #
# LANGUAGE BinaryLiterals #
module Haskus.Text.Unicode.CodePoint
( CodePoint (..)
, CodePointRange
, pattern CodePointRange
, toUtf8
, fromUtf8
, toModifiedUtf8
)
where
import Haskus.Number.Word
import Haskus.Binary.Bits
import Haskus.Utils.Flow
import Numeric
import Data.Char (toUpper)
import Language.Haskell.TH.Syntax (Lift)
newtype CodePoint = CodePoint Word32 deriving (Eq,Lift)
| Show instance for CodePoint
> > > CodePoint 0x12
U+0012
instance Show CodePoint where
show (CodePoint v) = "U+" ++ f (fmap toUpper (showHex v ""))
where
f xs@[_,_,_] = '0':xs
f xs@[_,_] = "00" <> xs
f xs@[_] = "000" <> xs
f xs@[] = "0000" <> xs
f xs = xs
newtype CodePointRange = Range Word64 deriving (Eq,Lift)
fromRange :: CodePointRange -> (CodePoint,CodePoint)
fromRange (Range w) = ( CodePoint $ fromIntegral (w .&. 0xFFFFFFFF)
, CodePoint $ fromIntegral (w `uncheckedShiftR` 32)
)
toRange :: (CodePoint,CodePoint) -> CodePointRange
toRange (CodePoint x, CodePoint y) =
Range (fromIntegral x .|. (fromIntegral y `uncheckedShiftL` 32))
# COMPLETE CodePointRange #
pattern CodePointRange :: CodePoint -> CodePoint -> CodePointRange
pattern CodePointRange x y <- (fromRange -> (x,y))
where
CodePointRange x y = toRange (x,y)
instance Show CodePointRange where
show (CodePointRange x y) = show x ++ ".." ++ show y
| Encode a code - point into UTF-8 .
Extended to support any ( not just 21 bits ) to make the function total
> > > let f x = ( putStr . ( + + " " ) . bitsToString ) x
00100100
11000010 10100010
> > > f 0x939
11100000 10100100 10111001
11100010 10000010 10101100
11110000 10010000 10001101 10001000
toUtf8 :: Monad m => (Word8 -> m ()) -> Word32 -> m ()
toUtf8 putW8 w
| w .&. 0xFFFFFF80 == 0 = putW8 (fromIntegral w)
| w .&. 0xFFFFF800 == 0 = do
putW8 <| fromIntegral <| (0b11000000 .|. (w `shiftR` 6))
putW8 <| fromIntegral <| (0b10000000 .|. (w .&. 0b00111111))
| w .&. 0xFFFF0000 == 0 = do
putW8 <| fromIntegral <| (0b11100000 .|. (w `shiftR` 12))
putW8 <| fromIntegral <| (0b10000000 .|. ((w `shiftR` 6) .&. 0b00111111))
putW8 <| fromIntegral <| (0b10000000 .|. (w .&. 0b00111111))
| w .&. 0xFFE00000 == 0 = do
putW8 <| fromIntegral <| (0b11110000 .|. (w `shiftR` 18))
putW8 <| fromIntegral <| (0b10000000 .|. ((w `shiftR` 12) .&. 0b00111111))
putW8 <| fromIntegral <| (0b10000000 .|. ((w `shiftR` 6 ) .&. 0b00111111))
putW8 <| fromIntegral <| (0b10000000 .|. (w .&. 0b00111111))
| w .&. 0xFC000000 == 0 = do
putW8 <| fromIntegral <| (0b11111000 .|. (w `shiftR` 24))
putW8 <| fromIntegral <| (0b10000000 .|. ((w `shiftR` 18) .&. 0b00111111))
putW8 <| fromIntegral <| (0b10000000 .|. ((w `shiftR` 12) .&. 0b00111111))
putW8 <| fromIntegral <| (0b10000000 .|. ((w `shiftR` 6 ) .&. 0b00111111))
putW8 <| fromIntegral <| (0b10000000 .|. (w .&. 0b00111111))
| otherwise = do
putW8 <| fromIntegral <| (0b11111100 .|. (w `shiftR` 30))
putW8 <| fromIntegral <| (0b10000000 .|. ((w `shiftR` 24) .&. 0b00111111))
putW8 <| fromIntegral <| (0b10000000 .|. ((w `shiftR` 18) .&. 0b00111111))
putW8 <| fromIntegral <| (0b10000000 .|. ((w `shiftR` 12) .&. 0b00111111))
putW8 <| fromIntegral <| (0b10000000 .|. ((w `shiftR` 6 ) .&. 0b00111111))
putW8 <| fromIntegral <| (0b10000000 .|. (w .&. 0b00111111))
Compared to UTF-8 , NULL values are encoded in two non null bytes .
00100100
11000000 10000000
toModifiedUtf8 :: Monad m => (Word8 -> m ()) -> Word32 -> m ()
toModifiedUtf8 putW8 w
| w == 0 = putW8 0b11000000 >> putW8 0b10000000
| otherwise = toUtf8 putW8 w
Extended to support any ( not just 21 bits ) .
> > > import Control . Monad . Trans . State
> > > let x = evalState ( getNext ) [ 0b11110000,0b10010000,0b10001101,0b10001000 ]
> > > x = = Just 0x10348
fromUtf8 :: Monad m => m Word8 -> m (Maybe Word32)
fromUtf8 getW8 = do
w <- getW8
let
n = countLeadingZeros (complement w)
b1 = fromIntegral w
case n of
not a first byte
0 -> pure (Just b1)
2 -> do
b2 <- fromIntegral <|| getW8
pure <| Just <| ((b1 .&. 0b00011111) `shiftL` 6)
.|. (b2 .&. 0b00111111)
3 -> do
b2 <- fromIntegral <|| getW8
b3 <- fromIntegral <|| getW8
pure <| Just <| ((b1 .&. 0b00001111) `shiftL` 12)
.|. ((b2 .&. 0b00111111) `shiftL` 6)
.|. (b3 .&. 0b00111111)
4 -> do
b2 <- fromIntegral <|| getW8
b3 <- fromIntegral <|| getW8
b4 <- fromIntegral <|| getW8
pure <| Just <| ((b1 .&. 0b00000111) `shiftL` 18)
.|. ((b2 .&. 0b00111111) `shiftL` 12)
.|. ((b3 .&. 0b00111111) `shiftL` 6)
.|. (b4 .&. 0b00111111)
5 -> do
b2 <- fromIntegral <|| getW8
b3 <- fromIntegral <|| getW8
b4 <- fromIntegral <|| getW8
b5 <- fromIntegral <|| getW8
pure <| Just <| ((b1 .&. 0b00000111) `shiftL` 24)
.|. ((b2 .&. 0b00111111) `shiftL` 18)
.|. ((b3 .&. 0b00111111) `shiftL` 12)
.|. ((b4 .&. 0b00111111) `shiftL` 6)
.|. (b5 .&. 0b00111111)
_ -> do
b2 <- fromIntegral <|| getW8
b3 <- fromIntegral <|| getW8
b4 <- fromIntegral <|| getW8
b5 <- fromIntegral <|| getW8
b6 <- fromIntegral <|| getW8
pure <| Just <| ((b1 .&. 0b00000011) `shiftL` 30)
.|. ((b2 .&. 0b00111111) `shiftL` 24)
.|. ((b3 .&. 0b00111111) `shiftL` 18)
.|. ((b4 .&. 0b00111111) `shiftL` 12)
.|. ((b5 .&. 0b00111111) `shiftL` 6)
.|. (b6 .&. 0b00111111)
|
8b21b4b14653f08c25999fcb387bf51e253fb86413b5621d185ad7d622997e76
|
hanshuebner/bknr-web
|
billboard.lisp
|
(in-package :bknr.text)
(define-persistent-class billboard ()
((name :read
:index-type string-unique-index
:index-reader find-billboard
:index-values all-billboards)
(description :update :initform nil)
(articles :none :initform nil)
(always-show-all :update :initform nil))
(:documentation "message board with a list of articles and topics"))
(deftransaction billboard-add-article (billboard article)
(setf (slot-value billboard 'articles)
(push article (slot-value billboard 'articles))))
(deftransaction billboard-delete-article (billboard article)
(setf (slot-value billboard 'articles)
(delete article (billboard-articles billboard))))
(defun billboard-make-empty-description (billboard author)
(let ((article (make-instance 'article
:author author)))
(change-slot-values billboard 'billboard-description article)))
(defmethod billboard-articles ((billboard billboard) &key since new-for-user)
(cond
(since (remove since (slot-value billboard 'articles)
:test #'< :key #'article-time))
(new-for-user (remove new-for-user (slot-value billboard 'articles)
:test #'(lambda (user article) (article-read article user))))
(t (slot-value billboard 'articles))))
| null |
https://raw.githubusercontent.com/hanshuebner/bknr-web/5c30b61818a2f02f6f2e5dc69fd77396ec3afc51/modules/text/billboard.lisp
|
lisp
|
(in-package :bknr.text)
(define-persistent-class billboard ()
((name :read
:index-type string-unique-index
:index-reader find-billboard
:index-values all-billboards)
(description :update :initform nil)
(articles :none :initform nil)
(always-show-all :update :initform nil))
(:documentation "message board with a list of articles and topics"))
(deftransaction billboard-add-article (billboard article)
(setf (slot-value billboard 'articles)
(push article (slot-value billboard 'articles))))
(deftransaction billboard-delete-article (billboard article)
(setf (slot-value billboard 'articles)
(delete article (billboard-articles billboard))))
(defun billboard-make-empty-description (billboard author)
(let ((article (make-instance 'article
:author author)))
(change-slot-values billboard 'billboard-description article)))
(defmethod billboard-articles ((billboard billboard) &key since new-for-user)
(cond
(since (remove since (slot-value billboard 'articles)
:test #'< :key #'article-time))
(new-for-user (remove new-for-user (slot-value billboard 'articles)
:test #'(lambda (user article) (article-read article user))))
(t (slot-value billboard 'articles))))
|
|
0f8cb97ef90e6f055bb0b8c37074d48a21ecbe9ed71148c89eb7484a18a91997
|
ocaml-ppx/ppx
|
result.ml
|
type ('a, 'error) t = ('a, 'error) Ppx_caml.result =
| Ok of 'a
| Error of 'error
let compare compare_a compare_error a b =
match a, b with
| Ok a, Ok b -> compare_a a b
| Ok _, Error _ -> Ordering.Lt
| Error _, Ok _ -> Ordering.Gt
| Error a, Error b -> compare_error a b
let ok x = Ok x
let is_ok = function
| Ok _ -> true
| Error _ -> false
let is_error = function
| Ok _ -> false
| Error _ -> true
let ok_exn = function
| Ok x -> x
| Error e -> raise e
let try_with f =
match f () with
| s -> Ok s
| exception e -> Error e
let bind t ~f =
match t with
| Ok x -> f x
| Error _ as t -> t
let map x ~f =
match x with
| Ok x -> Ok (f x)
| Error _ as x -> x
let map_error x ~f =
match x with
| Ok _ as res -> res
| Error x -> Error (f x)
let to_option = function
| Ok p -> Some p
| Error _ -> None
let errorf fmt =
Printf.ksprintf (fun x -> Error x) fmt
let both a b =
match a with
| Error e -> Error e
| Ok a ->
match b with
| Error e -> Error e
| Ok b -> Ok (a, b)
module O = struct
let ( >>= ) t f = bind t ~f
let ( >>| ) t f = map t ~f
let (let*) = (>>=)
let (let+) = (>>|)
let (and+) = both
end
open O
type ('a, 'error) result = ('a, 'error) t
module List = struct
let map t ~f =
let rec loop acc = function
| [] -> Ok (List.rev acc)
| x :: xs ->
f x >>= fun x ->
loop (x :: acc) xs
in
loop [] t
let all =
let rec loop acc = function
| [] -> Ok (List.rev acc)
| t :: l ->
t >>= fun x ->
loop (x :: acc) l
in
fun l -> loop [] l
let concat_map =
let rec loop f acc = function
| [] -> Ok (List.rev acc)
| x :: l ->
f x >>= fun y ->
loop f (List.rev_append y acc) l
in
fun l ~f -> loop f [] l
let rec iter t ~f =
match t with
| [] -> Ok ()
| x :: xs ->
f x >>= fun () ->
iter xs ~f
let rec fold_left t ~f ~init =
match t with
| [] -> Ok init
| x :: xs ->
f init x >>= fun init ->
fold_left xs ~f ~init
end
let hash h1 h2 t =
Ppx_caml.Hashtbl.hash (
match t with
| Ok s -> h1 s
| Error e -> h2 e)
let equal e1 e2 x y =
match x, y with
| Ok x, Ok y -> e1 x y
| Error x, Error y -> e2 x y
| _, _ -> false
let iter t ~f =
match t with
| Error _ -> ()
| Ok s -> f s
module Option = struct
let iter t ~f =
match t with
| None -> Ok ()
| Some x -> x >>= f
end
let to_dyn ok err = function
| Ok e -> Dyn.Encoder.constr "Ok" [ok e]
| Error e -> Dyn.Encoder.constr "Error" [err e]
| null |
https://raw.githubusercontent.com/ocaml-ppx/ppx/40e5a35a4386d969effaf428078c900bd03b78ec/stdppx/result.ml
|
ocaml
|
type ('a, 'error) t = ('a, 'error) Ppx_caml.result =
| Ok of 'a
| Error of 'error
let compare compare_a compare_error a b =
match a, b with
| Ok a, Ok b -> compare_a a b
| Ok _, Error _ -> Ordering.Lt
| Error _, Ok _ -> Ordering.Gt
| Error a, Error b -> compare_error a b
let ok x = Ok x
let is_ok = function
| Ok _ -> true
| Error _ -> false
let is_error = function
| Ok _ -> false
| Error _ -> true
let ok_exn = function
| Ok x -> x
| Error e -> raise e
let try_with f =
match f () with
| s -> Ok s
| exception e -> Error e
let bind t ~f =
match t with
| Ok x -> f x
| Error _ as t -> t
let map x ~f =
match x with
| Ok x -> Ok (f x)
| Error _ as x -> x
let map_error x ~f =
match x with
| Ok _ as res -> res
| Error x -> Error (f x)
let to_option = function
| Ok p -> Some p
| Error _ -> None
let errorf fmt =
Printf.ksprintf (fun x -> Error x) fmt
let both a b =
match a with
| Error e -> Error e
| Ok a ->
match b with
| Error e -> Error e
| Ok b -> Ok (a, b)
module O = struct
let ( >>= ) t f = bind t ~f
let ( >>| ) t f = map t ~f
let (let*) = (>>=)
let (let+) = (>>|)
let (and+) = both
end
open O
type ('a, 'error) result = ('a, 'error) t
module List = struct
let map t ~f =
let rec loop acc = function
| [] -> Ok (List.rev acc)
| x :: xs ->
f x >>= fun x ->
loop (x :: acc) xs
in
loop [] t
let all =
let rec loop acc = function
| [] -> Ok (List.rev acc)
| t :: l ->
t >>= fun x ->
loop (x :: acc) l
in
fun l -> loop [] l
let concat_map =
let rec loop f acc = function
| [] -> Ok (List.rev acc)
| x :: l ->
f x >>= fun y ->
loop f (List.rev_append y acc) l
in
fun l ~f -> loop f [] l
let rec iter t ~f =
match t with
| [] -> Ok ()
| x :: xs ->
f x >>= fun () ->
iter xs ~f
let rec fold_left t ~f ~init =
match t with
| [] -> Ok init
| x :: xs ->
f init x >>= fun init ->
fold_left xs ~f ~init
end
let hash h1 h2 t =
Ppx_caml.Hashtbl.hash (
match t with
| Ok s -> h1 s
| Error e -> h2 e)
let equal e1 e2 x y =
match x, y with
| Ok x, Ok y -> e1 x y
| Error x, Error y -> e2 x y
| _, _ -> false
let iter t ~f =
match t with
| Error _ -> ()
| Ok s -> f s
module Option = struct
let iter t ~f =
match t with
| None -> Ok ()
| Some x -> x >>= f
end
let to_dyn ok err = function
| Ok e -> Dyn.Encoder.constr "Ok" [ok e]
| Error e -> Dyn.Encoder.constr "Error" [err e]
|
|
cda386c51770c5013bcd6cd66732a871cad7142356ae96fd46c8e1a9b639c3e7
|
CryptoKami/cryptokami-core
|
Full.hs
|
# LANGUAGE CPP #
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RecordWildCards #-}
# LANGUAGE ScopedTypeVariables #
module Pos.Diffusion.Full
( diffusionLayerFull
) where
import Nub (ordNub)
import Universum
import Control.Monad.Fix (MonadFix)
import qualified Data.Map as M
import Data.Time.Units (Millisecond, Second)
import Formatting (Format)
import Mockable (withAsync, link)
import qualified Network.Broadcast.OutboundQueue as OQ
import Network.Broadcast.OutboundQueue.Types (MsgType (..), Origin (..))
import Network.Transport.Abstract (Transport)
import Node (Node, NodeAction (..), simpleNodeEndPoint, NodeEnvironment (..), defaultNodeEnvironment, node)
import Node.Conversation (Converse, converseWith, Conversation)
import System.Random (newStdGen)
import System.Wlog (WithLogger, CanLog, usingLoggerName)
import Pos.Block.Network (MsgGetHeaders, MsgHeaders, MsgGetBlocks, MsgBlock)
import Pos.Communication (NodeId, VerInfo (..), PeerData, PackingType,
EnqueueMsg, makeEnqueueMsg, bipPacking, Listener,
MkListeners (..), HandlerSpecs, InSpecs (..),
OutSpecs (..), createOutSpecs, toOutSpecs, convH,
InvOrDataTK, MsgSubscribe, MsgSubscribe1,
makeSendActions, SendActions, Msg)
import Pos.Communication.Relay.Logic (invReqDataFlowTK)
import Pos.Communication.Util (wrapListener)
import Pos.Configuration (HasNodeConfiguration, conversationEstablishTimeout)
import Pos.Core (BlockVersionData (..), BlockVersion, HeaderHash, ProxySKHeavy, StakeholderId)
import Pos.Core.Block (Block, BlockHeader, MainBlockHeader)
import Pos.Core.Configuration (protocolMagic)
import Pos.Core.Ssc (Opening, InnerSharesMap, SignedCommitment, VssCertificate)
import Pos.Core.Txp (TxAux)
import Pos.Core.Update (UpId, UpdateProposal, UpdateVote)
import Pos.Crypto.Configuration (ProtocolMagic (..))
import Pos.DHT.Real (KademliaDHTInstance (..), KademliaParams (..),
startDHTInstance, stopDHTInstance,
kademliaJoinNetworkNoThrow, kademliaJoinNetworkRetry)
import qualified Pos.Diffusion.Full.Block as Diffusion.Block
import qualified Pos.Diffusion.Full.Delegation as Diffusion.Delegation
import qualified Pos.Diffusion.Full.Ssc as Diffusion.Ssc
import qualified Pos.Diffusion.Full.Txp as Diffusion.Txp
import Pos.Diffusion.Full.Types (DiffusionWorkMode)
import qualified Pos.Diffusion.Full.Update as Diffusion.Update
import Pos.Diffusion.Subscription.Common (subscriptionListeners)
import Pos.Diffusion.Subscription.Dht (dhtSubscriptionWorker)
import Pos.Diffusion.Subscription.Dns (dnsSubscriptionWorker)
import Pos.Diffusion.Types (Diffusion (..), DiffusionLayer (..))
import Pos.Logic.Types (Logic (..))
import Pos.Network.Types (NetworkConfig (..), Topology (..), Bucket (..), initQueue,
topologySubscribers, SubscriptionWorker (..),
topologySubscriptionWorker, topologyMaxBucketSize,
topologyRunKademlia)
import Pos.Reporting.Health.Types (HealthStatus (..))
import Pos.Reporting.Ekg (EkgNodeMetrics (..), registerEkgNodeMetrics)
import Pos.Ssc.Message (MCOpening (..), MCShares (..), MCCommitment (..), MCVssCertificate (..))
import Pos.Util.Chrono (OldestFirst)
import Pos.Util.OutboundQueue (EnqueuedConversation (..))
import Pos.Util.Timer (Timer, newTimer)
# ANN module ( " HLint : ignore Reduce duplication " : : Text ) #
-- | The full diffusion layer.
--
NB : we could do the whole logic / diffusion layer interface using typeclasses
-- against a monad, but we'd end up with a bunch of reader constraints over
-- the values that would otherwise appear in the Logic and Diffusion records.
-- That's to say, we'd have to do the same work anyway, but then even more
-- work to juggle the instances.
diffusionLayerFull
:: forall d m x .
( DiffusionWorkMode d
, MonadFix d
, MonadIO m
, MonadMask m
, WithLogger m
)
=> NetworkConfig KademliaParams
For making the VerInfo .
-> Transport d
-> Maybe (EkgNodeMetrics d)
-> ((Logic d -> m (DiffusionLayer d)) -> m x)
-> m x
diffusionLayerFull networkConfig lastKnownBlockVersion transport mEkgNodeMetrics expectLogic =
bracket acquire release $ \_ -> expectLogic $ \logic -> do
-- Make the outbound queue using network policies.
oq :: OQ.OutboundQ (EnqueuedConversation d) NodeId Bucket <-
initQueue networkConfig (enmStore <$> mEkgNodeMetrics)
Timer is in microseconds .
keepaliveTimer :: Timer <- newTimer 20000000
let -- VerInfo is a diffusion-layer-specific thing. It's only used for
-- negotiating with peers.
ourVerInfo :: VerInfo
TODO pull protocol magic from an explicit configuration argument
-- rather than from a magic Data.Reflection instance.
-- The lastKnownBlockVersion can go into that configuration record
-- as well. Goal: eliminate all Has*Configuration constraints from
-- full diffusion layer.
-- Ah but that won't be so easy, because serialization instances
-- currently depend on these... so defer it for later.
ourVerInfo = VerInfo (getProtocolMagic protocolMagic) lastKnownBlockVersion ins (outs <> workerOuts)
ins :: HandlerSpecs
InSpecs ins = inSpecs mkL
-- The out specs come not just from listeners but also from workers.
-- Workers in the existing implementation were bundled up in
: : ( [ WorkerSpec m ] , OutSpecs )
-- and they performed logic layer tasks, so having out specs defined
-- by them doesn't make sense.
For the first iteration , we just dump those out specs here , since
-- we know in the diffusion layer the set of all requests that might
-- be made.
--
-- Find below a definition of each of the worker out specs,
-- copied from Pos.Worker (allWorkers). Each one was manually
-- inspected to determine the out specs.
--
FIXME this system must change . Perhaps replace it with a
-- version number?
outs :: HandlerSpecs
OutSpecs outs = outSpecs mkL
workerOuts :: HandlerSpecs
OutSpecs workerOuts = mconcat
First : the relay system out specs .
Diffusion.Txp.txOutSpecs logic
, Diffusion.Update.updateOutSpecs logic
, Diffusion.Delegation.delegationOutSpecs logic
, Diffusion.Ssc.sscOutSpecs logic
-- Relay system for blocks is ad-hoc.
, blockWorkerOutSpecs
-- SSC has non-relay out specs, defined below.
, sscWorkerOutSpecs
, securityWorkerOutSpecs
, slottingWorkerOutSpecs
, subscriptionWorkerOutSpecs
, dhtWorkerOutSpecs
]
An onNewSlotWorker and a localWorker . Latter is . Former
actually does the stuff .
sscWorkerOutSpecs = mconcat
[ createOutSpecs (Proxy @(InvOrDataTK StakeholderId MCCommitment))
, createOutSpecs (Proxy @(InvOrDataTK StakeholderId MCOpening))
, createOutSpecs (Proxy @(InvOrDataTK StakeholderId MCShares))
, createOutSpecs (Proxy @(InvOrDataTK StakeholderId MCVssCertificate))
]
A single worker with
-- requestTipOuts from Pos.Block.Network.
securityWorkerOutSpecs = toOutSpecs
[ convH (Proxy :: Proxy MsgGetHeaders)
(Proxy :: Proxy MsgHeaders)
]
-- announceBlockHeaderOuts from blkCreatorWorker
-- announceBlockHeaderOuts from blkMetricCheckerWorker
-- along with the retrieval worker outs which also include
-- announceBlockHeaderOuts.
blockWorkerOutSpecs = mconcat
[ announceBlockHeaderOuts
, announceBlockHeaderOuts
, announceBlockHeaderOuts <> toOutSpecs [ convH (Proxy :: Proxy MsgGetBlocks)
(Proxy :: Proxy MsgBlock)
]
]
announceBlockHeaderOuts = toOutSpecs [ convH (Proxy :: Proxy MsgHeaders)
(Proxy :: Proxy MsgGetHeaders)
]
Plainly from the definition of allWorkers .
slottingWorkerOutSpecs = mempty
Copied from existing implementation but
-- FIXME it will be wrong when the patch to include a keepalive
is merged . That shall be the first test of this inspec / outspec
-- system I suppose.
subscriptionWorkerOutSpecs = case topologySubscriptionWorker (ncTopology networkConfig) of
Just (SubscriptionWorkerBehindNAT _) -> specs
Just (SubscriptionWorkerKademlia __ _ _ _) -> specs
_ -> mempty
where
specs = toOutSpecs
[ convH (Proxy @MsgSubscribe) (Proxy @Void)
, convH (Proxy @MsgSubscribe1) (Proxy @Void)
]
It 's a localOnNewSlotWorker , so mempty .
dhtWorkerOutSpecs = mempty
mkL :: MkListeners d
mkL = mconcat $
[ lmodifier "block" $ Diffusion.Block.blockListeners logic oq keepaliveTimer
, lmodifier "tx" $ Diffusion.Txp.txListeners logic oq enqueue
, lmodifier "update" $ Diffusion.Update.updateListeners logic oq enqueue
, lmodifier "delegation" $ Diffusion.Delegation.delegationListeners logic oq enqueue
, lmodifier "ssc" $ Diffusion.Ssc.sscListeners logic oq enqueue
] ++ [
lmodifier "subscription" $ subscriptionListeners oq subscriberNodeType
| Just (subscriberNodeType, _) <- [topologySubscribers (ncTopology networkConfig)]
]
lmodifier lname mkLs = mkLs { mkListeners = mkListeners' }
where
mkListeners' v p =
let ls = mkListeners mkLs v p
f = wrapListener ("server" <> lname)
in map f ls
listeners :: VerInfo -> [Listener d]
listeners = mkListeners mkL ourVerInfo
currentSlotDuration :: d Millisecond
currentSlotDuration = bvdSlotDuration <$> getAdoptedBVData logic
-- Bracket kademlia and network-transport, create a node. This
-- will be very involved. Should make it top-level I think.
runDiffusionLayer :: forall y . d y -> d y
runDiffusionLayer = runDiffusionLayerFull
networkConfig
transport
ourVerInfo
mEkgNodeMetrics
oq
keepaliveTimer
currentSlotDuration
listeners
enqueue :: EnqueueMsg d
enqueue = makeEnqueueMsg ourVerInfo $ \msgType k -> do
itList <- OQ.enqueue oq msgType (EnqueuedConversation (msgType, k))
let itMap = M.fromList itList
return ((>>= either throwM return) <$> itMap)
getBlocks :: NodeId
-> BlockHeader
-> [HeaderHash]
-> d (OldestFirst [] Block)
getBlocks = Diffusion.Block.getBlocks logic enqueue
requestTip :: (BlockHeader -> NodeId -> d t) -> d (Map NodeId (d t))
requestTip = Diffusion.Block.requestTip enqueue
announceBlockHeader :: MainBlockHeader -> d ()
announceBlockHeader = void . Diffusion.Block.announceBlockHeader logic enqueue
sendTx :: TxAux -> d Bool
sendTx = Diffusion.Txp.sendTx enqueue
sendUpdateProposal :: UpId -> UpdateProposal -> [UpdateVote] -> d ()
sendUpdateProposal = Diffusion.Update.sendUpdateProposal enqueue
sendVote :: UpdateVote -> d ()
sendVote = Diffusion.Update.sendVote enqueue
FIXME
-- SSC stuff has a 'waitUntilSend' motif before it. Must remember to
-- investigate that and port it if necessary...
-- No, it really should be the logic layer which decides when to send
-- things.
--
TODO put these into a Pos . Diffusion . Full . Ssc module .
sendSscCert :: VssCertificate -> d ()
sendSscCert = void . invReqDataFlowTK "ssc" enqueue (MsgMPC OriginSender) (ourStakeholderId logic) . MCVssCertificate
sendSscOpening :: Opening -> d ()
sendSscOpening = void . invReqDataFlowTK "ssc" enqueue (MsgMPC OriginSender) (ourStakeholderId logic) . MCOpening (ourStakeholderId logic)
sendSscShares :: InnerSharesMap -> d ()
sendSscShares = void . invReqDataFlowTK "ssc" enqueue (MsgMPC OriginSender) (ourStakeholderId logic) . MCShares (ourStakeholderId logic)
sendSscCommitment :: SignedCommitment -> d ()
sendSscCommitment = void . invReqDataFlowTK "ssc" enqueue (MsgMPC OriginSender) (ourStakeholderId logic) . MCCommitment
sendPskHeavy :: ProxySKHeavy -> d ()
sendPskHeavy = Diffusion.Delegation.sendPskHeavy enqueue
Amazon Route53 health check support ( stopgap measure , see note
-- in Pos.Diffusion.Types, above 'healthStatus' record field).
healthStatus :: d HealthStatus
healthStatus = do
let maxCapacityText :: Text
maxCapacityText = case topologyMaxBucketSize (ncTopology networkConfig) BucketSubscriptionListener of
OQ.BucketSizeUnlimited -> fromString "unlimited"
OQ.BucketSizeMax x -> fromString (show x)
spareCapacity <- OQ.bucketSpareCapacity oq BucketSubscriptionListener
pure $ case spareCapacity of
OQ.SpareCapacity sc | sc == 0 -> HSUnhealthy (fromString "0/" <> maxCapacityText)
OQ.SpareCapacity sc -> HSHealthy $ fromString (show sc) <> "/" <> maxCapacityText
OQ.UnlimitedCapacity -> HSHealthy maxCapacityText
formatPeers :: forall r . (forall a . Format r a -> a) -> d (Maybe r)
formatPeers formatter = Just <$> OQ.dumpState oq formatter
diffusion :: Diffusion d
diffusion = Diffusion {..}
return DiffusionLayer {..}
where
TBD will we need any resources here ?
acquire = pure ()
release = \_ -> pure ()
-- | Create kademlia, network-transport, and run the outbound queue's
-- dequeue thread.
runDiffusionLayerFull
:: forall d x .
( DiffusionWorkMode d, MonadFix d )
=> NetworkConfig KademliaParams
-> Transport d
-> VerInfo
-> Maybe (EkgNodeMetrics d)
-> OQ.OutboundQ (EnqueuedConversation d) NodeId Bucket
-> Timer -- ^ Keepalive timer.
-> d Millisecond -- ^ Slot duration; may change over time.
-> (VerInfo -> [Listener d])
-> d x
-> d x
runDiffusionLayerFull networkConfig transport ourVerInfo mEkgNodeMetrics oq keepaliveTimer slotDuration listeners action =
bracketKademlia networkConfig $ \networkConfig' ->
timeWarpNode transport ourVerInfo listeners $ \nd converse ->
withAsync (OQ.dequeueThread oq (sendMsgFromConverse converse)) $ \dthread -> do
link dthread
case mEkgNodeMetrics of
Just ekgNodeMetrics -> registerEkgNodeMetrics ekgNodeMetrics nd
Nothing -> pure ()
-- Subscription worker bypasses the outbound queue and uses
-- send actions directly.
let sendActions :: SendActions d
sendActions = makeSendActions ourVerInfo oqEnqueue converse
withAsync (subscriptionThread networkConfig' sendActions) $ \sthread -> do
link sthread
action
where
oqEnqueue :: Msg -> (NodeId -> VerInfo -> Conversation PackingType d t) -> d (Map NodeId (d t))
oqEnqueue msgType k = do
itList <- OQ.enqueue oq msgType (EnqueuedConversation (msgType, k))
let itMap = M.fromList itList
return ((>>= either throwM return) <$> itMap)
subscriptionThread nc sactions = case topologySubscriptionWorker (ncTopology nc) of
Just (SubscriptionWorkerBehindNAT dnsDomains) ->
dnsSubscriptionWorker oq networkConfig dnsDomains keepaliveTimer slotDuration sactions
Just (SubscriptionWorkerKademlia kinst nodeType valency fallbacks) ->
dhtSubscriptionWorker oq kinst nodeType valency fallbacks sactions
Nothing -> pure ()
sendMsgFromConverse
:: Converse PackingType PeerData d
-> OQ.SendMsg d (EnqueuedConversation d) NodeId
sendMsgFromConverse converse (EnqueuedConversation (_, k)) nodeId =
converseWith converse nodeId (k nodeId)
-- | Bring up a time-warp node. It will come down when the continuation ends.
timeWarpNode
:: forall d t .
( DiffusionWorkMode d, MonadFix d )
=> Transport d
-> VerInfo
-> (VerInfo -> [Listener d])
-> (Node d -> Converse PackingType PeerData d -> d t)
-> d t
timeWarpNode transport ourVerInfo listeners k = do
stdGen <- liftIO newStdGen
node mkTransport mkReceiveDelay mkConnectDelay stdGen bipPacking ourVerInfo nodeEnv $ \theNode ->
NodeAction listeners $ k theNode
where
mkTransport = simpleNodeEndPoint transport
mkReceiveDelay = const (pure Nothing)
mkConnectDelay = const (pure Nothing)
nodeEnv = defaultNodeEnvironment { nodeAckTimeout = conversationEstablishTimeout }
----------------------------------------------------------------------------
-- Kademlia
----------------------------------------------------------------------------
createKademliaInstance ::
(HasNodeConfiguration, MonadIO m, MonadCatch m, CanLog m)
=> KademliaParams
-> Word16 -- ^ Default port to bind to.
-> m KademliaDHTInstance
createKademliaInstance kp defaultPort =
usingLoggerName "kademlia" (startDHTInstance instConfig defaultBindAddress)
where
instConfig = kp {kpPeers = ordNub $ kpPeers kp}
defaultBindAddress = ("0.0.0.0", defaultPort)
-- | RAII for 'KademliaDHTInstance'.
bracketKademliaInstance
:: (HasNodeConfiguration, MonadIO m, MonadMask m, CanLog m)
=> KademliaParams
-> Word16 -- ^ Default port to bind to.
-> (KademliaDHTInstance -> m a)
-> m a
bracketKademliaInstance kp defaultPort action =
bracket (createKademliaInstance kp defaultPort) stopDHTInstance action
| The ' NodeParams ' contain enough information to determine whether a Kademlia
-- instance should be brought up. Use this to safely acquire/release one.
bracketKademlia
:: (HasNodeConfiguration, MonadIO m, MonadMask m, CanLog m)
=> NetworkConfig KademliaParams
-> (NetworkConfig KademliaDHTInstance -> m a)
-> m a
bracketKademlia nc@NetworkConfig {..} action = case ncTopology of
-- cases that need Kademlia
TopologyP2P{topologyKademlia = kp, ..} ->
bracketKademliaInstance kp ncDefaultPort $ \kinst ->
k $ TopologyP2P{topologyKademlia = kinst, ..}
TopologyTraditional{topologyKademlia = kp, ..} ->
bracketKademliaInstance kp ncDefaultPort $ \kinst ->
k $ TopologyTraditional{topologyKademlia = kinst, ..}
TopologyRelay{topologyOptKademlia = Just kp, ..} ->
bracketKademliaInstance kp ncDefaultPort $ \kinst ->
k $ TopologyRelay{topologyOptKademlia = Just kinst, ..}
TopologyCore{topologyOptKademlia = Just kp, ..} ->
bracketKademliaInstance kp ncDefaultPort $ \kinst ->
k $ TopologyCore{topologyOptKademlia = Just kinst, ..}
-- cases that don't
TopologyRelay{topologyOptKademlia = Nothing, ..} ->
k $ TopologyRelay{topologyOptKademlia = Nothing, ..}
TopologyCore{topologyOptKademlia = Nothing, ..} ->
k $ TopologyCore{topologyOptKademlia = Nothing, ..}
TopologyBehindNAT{..} ->
k $ TopologyBehindNAT{..}
TopologyAuxx{..} ->
k $ TopologyAuxx{..}
where
k topology = action (nc { ncTopology = topology })
| Synchronously join the Kademlia network .
joinKademlia
:: ( DiffusionWorkMode m )
=> NetworkConfig KademliaDHTInstance
-> m ()
joinKademlia networkConfig = case topologyRunKademlia (ncTopology networkConfig) of
See ' topologyRunKademlia ' documentation : the second component is ' True '
iff it 's essential that at least one of the initial peers is contacted .
-- Otherwise, it's OK to not find any initial peers and the program can
-- continue.
Just (kInst, True) -> kademliaJoinNetworkRetry kInst (kdiInitialPeers kInst) retryInterval
Just (kInst, False) -> kademliaJoinNetworkNoThrow kInst (kdiInitialPeers kInst)
Nothing -> return ()
where
retryInterval :: Second
retryInterval = 5
data MissingKademliaParams = MissingKademliaParams
deriving (Show)
instance Exception MissingKademliaParams
| null |
https://raw.githubusercontent.com/CryptoKami/cryptokami-core/12ca60a9ad167b6327397b3b2f928c19436ae114/lib/src/Pos/Diffusion/Full.hs
|
haskell
|
# LANGUAGE RankNTypes #
# LANGUAGE RecordWildCards #
| The full diffusion layer.
against a monad, but we'd end up with a bunch of reader constraints over
the values that would otherwise appear in the Logic and Diffusion records.
That's to say, we'd have to do the same work anyway, but then even more
work to juggle the instances.
Make the outbound queue using network policies.
VerInfo is a diffusion-layer-specific thing. It's only used for
negotiating with peers.
rather than from a magic Data.Reflection instance.
The lastKnownBlockVersion can go into that configuration record
as well. Goal: eliminate all Has*Configuration constraints from
full diffusion layer.
Ah but that won't be so easy, because serialization instances
currently depend on these... so defer it for later.
The out specs come not just from listeners but also from workers.
Workers in the existing implementation were bundled up in
and they performed logic layer tasks, so having out specs defined
by them doesn't make sense.
we know in the diffusion layer the set of all requests that might
be made.
Find below a definition of each of the worker out specs,
copied from Pos.Worker (allWorkers). Each one was manually
inspected to determine the out specs.
version number?
Relay system for blocks is ad-hoc.
SSC has non-relay out specs, defined below.
requestTipOuts from Pos.Block.Network.
announceBlockHeaderOuts from blkCreatorWorker
announceBlockHeaderOuts from blkMetricCheckerWorker
along with the retrieval worker outs which also include
announceBlockHeaderOuts.
FIXME it will be wrong when the patch to include a keepalive
system I suppose.
Bracket kademlia and network-transport, create a node. This
will be very involved. Should make it top-level I think.
SSC stuff has a 'waitUntilSend' motif before it. Must remember to
investigate that and port it if necessary...
No, it really should be the logic layer which decides when to send
things.
in Pos.Diffusion.Types, above 'healthStatus' record field).
| Create kademlia, network-transport, and run the outbound queue's
dequeue thread.
^ Keepalive timer.
^ Slot duration; may change over time.
Subscription worker bypasses the outbound queue and uses
send actions directly.
| Bring up a time-warp node. It will come down when the continuation ends.
--------------------------------------------------------------------------
Kademlia
--------------------------------------------------------------------------
^ Default port to bind to.
| RAII for 'KademliaDHTInstance'.
^ Default port to bind to.
instance should be brought up. Use this to safely acquire/release one.
cases that need Kademlia
cases that don't
Otherwise, it's OK to not find any initial peers and the program can
continue.
|
# LANGUAGE CPP #
# LANGUAGE ScopedTypeVariables #
module Pos.Diffusion.Full
( diffusionLayerFull
) where
import Nub (ordNub)
import Universum
import Control.Monad.Fix (MonadFix)
import qualified Data.Map as M
import Data.Time.Units (Millisecond, Second)
import Formatting (Format)
import Mockable (withAsync, link)
import qualified Network.Broadcast.OutboundQueue as OQ
import Network.Broadcast.OutboundQueue.Types (MsgType (..), Origin (..))
import Network.Transport.Abstract (Transport)
import Node (Node, NodeAction (..), simpleNodeEndPoint, NodeEnvironment (..), defaultNodeEnvironment, node)
import Node.Conversation (Converse, converseWith, Conversation)
import System.Random (newStdGen)
import System.Wlog (WithLogger, CanLog, usingLoggerName)
import Pos.Block.Network (MsgGetHeaders, MsgHeaders, MsgGetBlocks, MsgBlock)
import Pos.Communication (NodeId, VerInfo (..), PeerData, PackingType,
EnqueueMsg, makeEnqueueMsg, bipPacking, Listener,
MkListeners (..), HandlerSpecs, InSpecs (..),
OutSpecs (..), createOutSpecs, toOutSpecs, convH,
InvOrDataTK, MsgSubscribe, MsgSubscribe1,
makeSendActions, SendActions, Msg)
import Pos.Communication.Relay.Logic (invReqDataFlowTK)
import Pos.Communication.Util (wrapListener)
import Pos.Configuration (HasNodeConfiguration, conversationEstablishTimeout)
import Pos.Core (BlockVersionData (..), BlockVersion, HeaderHash, ProxySKHeavy, StakeholderId)
import Pos.Core.Block (Block, BlockHeader, MainBlockHeader)
import Pos.Core.Configuration (protocolMagic)
import Pos.Core.Ssc (Opening, InnerSharesMap, SignedCommitment, VssCertificate)
import Pos.Core.Txp (TxAux)
import Pos.Core.Update (UpId, UpdateProposal, UpdateVote)
import Pos.Crypto.Configuration (ProtocolMagic (..))
import Pos.DHT.Real (KademliaDHTInstance (..), KademliaParams (..),
startDHTInstance, stopDHTInstance,
kademliaJoinNetworkNoThrow, kademliaJoinNetworkRetry)
import qualified Pos.Diffusion.Full.Block as Diffusion.Block
import qualified Pos.Diffusion.Full.Delegation as Diffusion.Delegation
import qualified Pos.Diffusion.Full.Ssc as Diffusion.Ssc
import qualified Pos.Diffusion.Full.Txp as Diffusion.Txp
import Pos.Diffusion.Full.Types (DiffusionWorkMode)
import qualified Pos.Diffusion.Full.Update as Diffusion.Update
import Pos.Diffusion.Subscription.Common (subscriptionListeners)
import Pos.Diffusion.Subscription.Dht (dhtSubscriptionWorker)
import Pos.Diffusion.Subscription.Dns (dnsSubscriptionWorker)
import Pos.Diffusion.Types (Diffusion (..), DiffusionLayer (..))
import Pos.Logic.Types (Logic (..))
import Pos.Network.Types (NetworkConfig (..), Topology (..), Bucket (..), initQueue,
topologySubscribers, SubscriptionWorker (..),
topologySubscriptionWorker, topologyMaxBucketSize,
topologyRunKademlia)
import Pos.Reporting.Health.Types (HealthStatus (..))
import Pos.Reporting.Ekg (EkgNodeMetrics (..), registerEkgNodeMetrics)
import Pos.Ssc.Message (MCOpening (..), MCShares (..), MCCommitment (..), MCVssCertificate (..))
import Pos.Util.Chrono (OldestFirst)
import Pos.Util.OutboundQueue (EnqueuedConversation (..))
import Pos.Util.Timer (Timer, newTimer)
# ANN module ( " HLint : ignore Reduce duplication " : : Text ) #
NB : we could do the whole logic / diffusion layer interface using typeclasses
diffusionLayerFull
:: forall d m x .
( DiffusionWorkMode d
, MonadFix d
, MonadIO m
, MonadMask m
, WithLogger m
)
=> NetworkConfig KademliaParams
For making the VerInfo .
-> Transport d
-> Maybe (EkgNodeMetrics d)
-> ((Logic d -> m (DiffusionLayer d)) -> m x)
-> m x
diffusionLayerFull networkConfig lastKnownBlockVersion transport mEkgNodeMetrics expectLogic =
bracket acquire release $ \_ -> expectLogic $ \logic -> do
oq :: OQ.OutboundQ (EnqueuedConversation d) NodeId Bucket <-
initQueue networkConfig (enmStore <$> mEkgNodeMetrics)
Timer is in microseconds .
keepaliveTimer :: Timer <- newTimer 20000000
ourVerInfo :: VerInfo
TODO pull protocol magic from an explicit configuration argument
ourVerInfo = VerInfo (getProtocolMagic protocolMagic) lastKnownBlockVersion ins (outs <> workerOuts)
ins :: HandlerSpecs
InSpecs ins = inSpecs mkL
: : ( [ WorkerSpec m ] , OutSpecs )
For the first iteration , we just dump those out specs here , since
FIXME this system must change . Perhaps replace it with a
outs :: HandlerSpecs
OutSpecs outs = outSpecs mkL
workerOuts :: HandlerSpecs
OutSpecs workerOuts = mconcat
First : the relay system out specs .
Diffusion.Txp.txOutSpecs logic
, Diffusion.Update.updateOutSpecs logic
, Diffusion.Delegation.delegationOutSpecs logic
, Diffusion.Ssc.sscOutSpecs logic
, blockWorkerOutSpecs
, sscWorkerOutSpecs
, securityWorkerOutSpecs
, slottingWorkerOutSpecs
, subscriptionWorkerOutSpecs
, dhtWorkerOutSpecs
]
An onNewSlotWorker and a localWorker . Latter is . Former
actually does the stuff .
sscWorkerOutSpecs = mconcat
[ createOutSpecs (Proxy @(InvOrDataTK StakeholderId MCCommitment))
, createOutSpecs (Proxy @(InvOrDataTK StakeholderId MCOpening))
, createOutSpecs (Proxy @(InvOrDataTK StakeholderId MCShares))
, createOutSpecs (Proxy @(InvOrDataTK StakeholderId MCVssCertificate))
]
A single worker with
securityWorkerOutSpecs = toOutSpecs
[ convH (Proxy :: Proxy MsgGetHeaders)
(Proxy :: Proxy MsgHeaders)
]
blockWorkerOutSpecs = mconcat
[ announceBlockHeaderOuts
, announceBlockHeaderOuts
, announceBlockHeaderOuts <> toOutSpecs [ convH (Proxy :: Proxy MsgGetBlocks)
(Proxy :: Proxy MsgBlock)
]
]
announceBlockHeaderOuts = toOutSpecs [ convH (Proxy :: Proxy MsgHeaders)
(Proxy :: Proxy MsgGetHeaders)
]
Plainly from the definition of allWorkers .
slottingWorkerOutSpecs = mempty
Copied from existing implementation but
is merged . That shall be the first test of this inspec / outspec
subscriptionWorkerOutSpecs = case topologySubscriptionWorker (ncTopology networkConfig) of
Just (SubscriptionWorkerBehindNAT _) -> specs
Just (SubscriptionWorkerKademlia __ _ _ _) -> specs
_ -> mempty
where
specs = toOutSpecs
[ convH (Proxy @MsgSubscribe) (Proxy @Void)
, convH (Proxy @MsgSubscribe1) (Proxy @Void)
]
It 's a localOnNewSlotWorker , so mempty .
dhtWorkerOutSpecs = mempty
mkL :: MkListeners d
mkL = mconcat $
[ lmodifier "block" $ Diffusion.Block.blockListeners logic oq keepaliveTimer
, lmodifier "tx" $ Diffusion.Txp.txListeners logic oq enqueue
, lmodifier "update" $ Diffusion.Update.updateListeners logic oq enqueue
, lmodifier "delegation" $ Diffusion.Delegation.delegationListeners logic oq enqueue
, lmodifier "ssc" $ Diffusion.Ssc.sscListeners logic oq enqueue
] ++ [
lmodifier "subscription" $ subscriptionListeners oq subscriberNodeType
| Just (subscriberNodeType, _) <- [topologySubscribers (ncTopology networkConfig)]
]
lmodifier lname mkLs = mkLs { mkListeners = mkListeners' }
where
mkListeners' v p =
let ls = mkListeners mkLs v p
f = wrapListener ("server" <> lname)
in map f ls
listeners :: VerInfo -> [Listener d]
listeners = mkListeners mkL ourVerInfo
currentSlotDuration :: d Millisecond
currentSlotDuration = bvdSlotDuration <$> getAdoptedBVData logic
runDiffusionLayer :: forall y . d y -> d y
runDiffusionLayer = runDiffusionLayerFull
networkConfig
transport
ourVerInfo
mEkgNodeMetrics
oq
keepaliveTimer
currentSlotDuration
listeners
enqueue :: EnqueueMsg d
enqueue = makeEnqueueMsg ourVerInfo $ \msgType k -> do
itList <- OQ.enqueue oq msgType (EnqueuedConversation (msgType, k))
let itMap = M.fromList itList
return ((>>= either throwM return) <$> itMap)
getBlocks :: NodeId
-> BlockHeader
-> [HeaderHash]
-> d (OldestFirst [] Block)
getBlocks = Diffusion.Block.getBlocks logic enqueue
requestTip :: (BlockHeader -> NodeId -> d t) -> d (Map NodeId (d t))
requestTip = Diffusion.Block.requestTip enqueue
announceBlockHeader :: MainBlockHeader -> d ()
announceBlockHeader = void . Diffusion.Block.announceBlockHeader logic enqueue
sendTx :: TxAux -> d Bool
sendTx = Diffusion.Txp.sendTx enqueue
sendUpdateProposal :: UpId -> UpdateProposal -> [UpdateVote] -> d ()
sendUpdateProposal = Diffusion.Update.sendUpdateProposal enqueue
sendVote :: UpdateVote -> d ()
sendVote = Diffusion.Update.sendVote enqueue
FIXME
TODO put these into a Pos . Diffusion . Full . Ssc module .
sendSscCert :: VssCertificate -> d ()
sendSscCert = void . invReqDataFlowTK "ssc" enqueue (MsgMPC OriginSender) (ourStakeholderId logic) . MCVssCertificate
sendSscOpening :: Opening -> d ()
sendSscOpening = void . invReqDataFlowTK "ssc" enqueue (MsgMPC OriginSender) (ourStakeholderId logic) . MCOpening (ourStakeholderId logic)
sendSscShares :: InnerSharesMap -> d ()
sendSscShares = void . invReqDataFlowTK "ssc" enqueue (MsgMPC OriginSender) (ourStakeholderId logic) . MCShares (ourStakeholderId logic)
sendSscCommitment :: SignedCommitment -> d ()
sendSscCommitment = void . invReqDataFlowTK "ssc" enqueue (MsgMPC OriginSender) (ourStakeholderId logic) . MCCommitment
sendPskHeavy :: ProxySKHeavy -> d ()
sendPskHeavy = Diffusion.Delegation.sendPskHeavy enqueue
Amazon Route53 health check support ( stopgap measure , see note
healthStatus :: d HealthStatus
healthStatus = do
let maxCapacityText :: Text
maxCapacityText = case topologyMaxBucketSize (ncTopology networkConfig) BucketSubscriptionListener of
OQ.BucketSizeUnlimited -> fromString "unlimited"
OQ.BucketSizeMax x -> fromString (show x)
spareCapacity <- OQ.bucketSpareCapacity oq BucketSubscriptionListener
pure $ case spareCapacity of
OQ.SpareCapacity sc | sc == 0 -> HSUnhealthy (fromString "0/" <> maxCapacityText)
OQ.SpareCapacity sc -> HSHealthy $ fromString (show sc) <> "/" <> maxCapacityText
OQ.UnlimitedCapacity -> HSHealthy maxCapacityText
formatPeers :: forall r . (forall a . Format r a -> a) -> d (Maybe r)
formatPeers formatter = Just <$> OQ.dumpState oq formatter
diffusion :: Diffusion d
diffusion = Diffusion {..}
return DiffusionLayer {..}
where
TBD will we need any resources here ?
acquire = pure ()
release = \_ -> pure ()
runDiffusionLayerFull
:: forall d x .
( DiffusionWorkMode d, MonadFix d )
=> NetworkConfig KademliaParams
-> Transport d
-> VerInfo
-> Maybe (EkgNodeMetrics d)
-> OQ.OutboundQ (EnqueuedConversation d) NodeId Bucket
-> (VerInfo -> [Listener d])
-> d x
-> d x
runDiffusionLayerFull networkConfig transport ourVerInfo mEkgNodeMetrics oq keepaliveTimer slotDuration listeners action =
bracketKademlia networkConfig $ \networkConfig' ->
timeWarpNode transport ourVerInfo listeners $ \nd converse ->
withAsync (OQ.dequeueThread oq (sendMsgFromConverse converse)) $ \dthread -> do
link dthread
case mEkgNodeMetrics of
Just ekgNodeMetrics -> registerEkgNodeMetrics ekgNodeMetrics nd
Nothing -> pure ()
let sendActions :: SendActions d
sendActions = makeSendActions ourVerInfo oqEnqueue converse
withAsync (subscriptionThread networkConfig' sendActions) $ \sthread -> do
link sthread
action
where
oqEnqueue :: Msg -> (NodeId -> VerInfo -> Conversation PackingType d t) -> d (Map NodeId (d t))
oqEnqueue msgType k = do
itList <- OQ.enqueue oq msgType (EnqueuedConversation (msgType, k))
let itMap = M.fromList itList
return ((>>= either throwM return) <$> itMap)
subscriptionThread nc sactions = case topologySubscriptionWorker (ncTopology nc) of
Just (SubscriptionWorkerBehindNAT dnsDomains) ->
dnsSubscriptionWorker oq networkConfig dnsDomains keepaliveTimer slotDuration sactions
Just (SubscriptionWorkerKademlia kinst nodeType valency fallbacks) ->
dhtSubscriptionWorker oq kinst nodeType valency fallbacks sactions
Nothing -> pure ()
sendMsgFromConverse
:: Converse PackingType PeerData d
-> OQ.SendMsg d (EnqueuedConversation d) NodeId
sendMsgFromConverse converse (EnqueuedConversation (_, k)) nodeId =
converseWith converse nodeId (k nodeId)
timeWarpNode
:: forall d t .
( DiffusionWorkMode d, MonadFix d )
=> Transport d
-> VerInfo
-> (VerInfo -> [Listener d])
-> (Node d -> Converse PackingType PeerData d -> d t)
-> d t
timeWarpNode transport ourVerInfo listeners k = do
stdGen <- liftIO newStdGen
node mkTransport mkReceiveDelay mkConnectDelay stdGen bipPacking ourVerInfo nodeEnv $ \theNode ->
NodeAction listeners $ k theNode
where
mkTransport = simpleNodeEndPoint transport
mkReceiveDelay = const (pure Nothing)
mkConnectDelay = const (pure Nothing)
nodeEnv = defaultNodeEnvironment { nodeAckTimeout = conversationEstablishTimeout }
createKademliaInstance ::
(HasNodeConfiguration, MonadIO m, MonadCatch m, CanLog m)
=> KademliaParams
-> m KademliaDHTInstance
createKademliaInstance kp defaultPort =
usingLoggerName "kademlia" (startDHTInstance instConfig defaultBindAddress)
where
instConfig = kp {kpPeers = ordNub $ kpPeers kp}
defaultBindAddress = ("0.0.0.0", defaultPort)
bracketKademliaInstance
:: (HasNodeConfiguration, MonadIO m, MonadMask m, CanLog m)
=> KademliaParams
-> (KademliaDHTInstance -> m a)
-> m a
bracketKademliaInstance kp defaultPort action =
bracket (createKademliaInstance kp defaultPort) stopDHTInstance action
| The ' NodeParams ' contain enough information to determine whether a Kademlia
bracketKademlia
:: (HasNodeConfiguration, MonadIO m, MonadMask m, CanLog m)
=> NetworkConfig KademliaParams
-> (NetworkConfig KademliaDHTInstance -> m a)
-> m a
bracketKademlia nc@NetworkConfig {..} action = case ncTopology of
TopologyP2P{topologyKademlia = kp, ..} ->
bracketKademliaInstance kp ncDefaultPort $ \kinst ->
k $ TopologyP2P{topologyKademlia = kinst, ..}
TopologyTraditional{topologyKademlia = kp, ..} ->
bracketKademliaInstance kp ncDefaultPort $ \kinst ->
k $ TopologyTraditional{topologyKademlia = kinst, ..}
TopologyRelay{topologyOptKademlia = Just kp, ..} ->
bracketKademliaInstance kp ncDefaultPort $ \kinst ->
k $ TopologyRelay{topologyOptKademlia = Just kinst, ..}
TopologyCore{topologyOptKademlia = Just kp, ..} ->
bracketKademliaInstance kp ncDefaultPort $ \kinst ->
k $ TopologyCore{topologyOptKademlia = Just kinst, ..}
TopologyRelay{topologyOptKademlia = Nothing, ..} ->
k $ TopologyRelay{topologyOptKademlia = Nothing, ..}
TopologyCore{topologyOptKademlia = Nothing, ..} ->
k $ TopologyCore{topologyOptKademlia = Nothing, ..}
TopologyBehindNAT{..} ->
k $ TopologyBehindNAT{..}
TopologyAuxx{..} ->
k $ TopologyAuxx{..}
where
k topology = action (nc { ncTopology = topology })
| Synchronously join the Kademlia network .
joinKademlia
:: ( DiffusionWorkMode m )
=> NetworkConfig KademliaDHTInstance
-> m ()
joinKademlia networkConfig = case topologyRunKademlia (ncTopology networkConfig) of
See ' topologyRunKademlia ' documentation : the second component is ' True '
iff it 's essential that at least one of the initial peers is contacted .
Just (kInst, True) -> kademliaJoinNetworkRetry kInst (kdiInitialPeers kInst) retryInterval
Just (kInst, False) -> kademliaJoinNetworkNoThrow kInst (kdiInitialPeers kInst)
Nothing -> return ()
where
retryInterval :: Second
retryInterval = 5
data MissingKademliaParams = MissingKademliaParams
deriving (Show)
instance Exception MissingKademliaParams
|
853f965544b44d2bc9ea54a273b0beb8926df175841d14ca619580270285b8e4
|
orchid-hybrid/microKanren-sagittarius
|
table.scm
|
(define (make-table g)
artifically limit to 100
;; to help avoid crashes (maybe?)
(cons 'table (stream-take 100 (stream-map (lambda (k)
(walk* (var 0) (substitution k)))
((call/fresh g) initial-kanren)))))
(define (table-membero t q)
(unless (eq? (car t) 'table)
(error "not a table!"))
(let loop ((t t))
(if (null? t)
(== 0 1)
(begin
(when (procedure? (cdr t))
(set-cdr! t (pull (cdr t))))
(disj (copy-termo (car t) q)
(Zzz (loop (cdr t))))))))
| null |
https://raw.githubusercontent.com/orchid-hybrid/microKanren-sagittarius/9e740bbf94ed2930f88bbcf32636d3480934cfbb/miruKanren/table.scm
|
scheme
|
to help avoid crashes (maybe?)
|
(define (make-table g)
artifically limit to 100
(cons 'table (stream-take 100 (stream-map (lambda (k)
(walk* (var 0) (substitution k)))
((call/fresh g) initial-kanren)))))
(define (table-membero t q)
(unless (eq? (car t) 'table)
(error "not a table!"))
(let loop ((t t))
(if (null? t)
(== 0 1)
(begin
(when (procedure? (cdr t))
(set-cdr! t (pull (cdr t))))
(disj (copy-termo (car t) q)
(Zzz (loop (cdr t))))))))
|
f08c459851cd0e4ef84325ec87ee99657f73e5fd7e9689661f5a3fb87bd72e20
|
senapk/funcional_arcade
|
solver.hs
|
somaMaybe (Just x) (Just y) = Just(x + y)
somaMaybe (Just x) Nothing = Just(x)
somaMaybe Nothing (Just y) = Just(y)
somaMaybe Nothing Nothing = Nothing
main = do
print $ somaMaybe (Just 5) (Just 7) == Just(12)
print $ somaMaybe (Just 5) Nothing == Just(5)
print $ somaMaybe Nothing (Just 3) == Just(3)
print $ somaMaybe Nothing Nothing == Nothing
| null |
https://raw.githubusercontent.com/senapk/funcional_arcade/7bfbd3d1407c5c8013550691d4fa048d74a53339/base/081/solver.hs
|
haskell
|
somaMaybe (Just x) (Just y) = Just(x + y)
somaMaybe (Just x) Nothing = Just(x)
somaMaybe Nothing (Just y) = Just(y)
somaMaybe Nothing Nothing = Nothing
main = do
print $ somaMaybe (Just 5) (Just 7) == Just(12)
print $ somaMaybe (Just 5) Nothing == Just(5)
print $ somaMaybe Nothing (Just 3) == Just(3)
print $ somaMaybe Nothing Nothing == Nothing
|
|
f95446033455f0b58094033546a40dc6c3c2e708044aec5c101bc1c8ea431fc4
|
ml4tp/tcoq
|
redexpr.ml
|
(************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2017
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
open Pp
open CErrors
open Util
open Names
open Term
open Declarations
open Globnames
open Genredexpr
open Pattern
open Reductionops
open Tacred
open CClosure
open RedFlags
open Libobject
open Misctypes
(* call by value normalisation function using the virtual machine *)
let cbv_vm env sigma c =
let ctyp = Retyping.get_type_of env sigma c in
if Termops.occur_meta_or_existential c then
error "vm_compute does not support existential variables.";
Vnorm.cbv_vm env c ctyp
let warn_native_compute_disabled =
CWarnings.create ~name:"native-compute-disabled" ~category:"native-compiler"
(fun () ->
strbrk "native_compute disabled at configure time; falling back to vm_compute.")
let cbv_native env sigma c =
if Coq_config.no_native_compiler then
(warn_native_compute_disabled ();
cbv_vm env sigma c)
else
let ctyp = Retyping.get_type_of env sigma c in
Nativenorm.native_norm env sigma c ctyp
let whd_cbn flags env sigma t =
let (state,_) =
(whd_state_gen true true flags env sigma (t,Reductionops.Stack.empty))
in Reductionops.Stack.zip ~refold:true state
let strong_cbn flags =
strong (whd_cbn flags)
let simplIsCbn = ref (false)
let _ = Goptions.declare_bool_option {
Goptions.optsync = true; Goptions.optdepr = false;
Goptions.optname =
"Plug the simpl tactic to the new cbn mechanism";
Goptions.optkey = ["SimplIsCbn"];
Goptions.optread = (fun () -> !simplIsCbn);
Goptions.optwrite = (fun a -> simplIsCbn:=a);
}
let set_strategy_one ref l =
let k =
match ref with
| EvalConstRef sp -> ConstKey sp
| EvalVarRef id -> VarKey id in
Global.set_strategy k l;
match k,l with
ConstKey sp, Conv_oracle.Opaque ->
Csymtable.set_opaque_const sp
| ConstKey sp, _ ->
let cb = Global.lookup_constant sp in
(match cb.const_body with
| OpaqueDef _ ->
errorlabstrm "set_transparent_const"
(str "Cannot make" ++ spc () ++
Nametab.pr_global_env Id.Set.empty (ConstRef sp) ++
spc () ++ str "transparent because it was declared opaque.");
| _ -> Csymtable.set_transparent_const sp)
| _ -> ()
let cache_strategy (_,str) =
List.iter
(fun (lev,ql) -> List.iter (fun q -> set_strategy_one q lev) ql)
str
let subst_strategy (subs,(local,obj)) =
local,
List.smartmap
(fun (k,ql as entry) ->
let ql' = List.smartmap (Mod_subst.subst_evaluable_reference subs) ql in
if ql==ql' then entry else (k,ql'))
obj
let map_strategy f l =
let l' = List.fold_right
(fun (lev,ql) str ->
let ql' = List.fold_right
(fun q ql ->
match f q with
Some q' -> q' :: ql
| None -> ql) ql [] in
if List.is_empty ql' then str else (lev,ql')::str) l [] in
if List.is_empty l' then None else Some (false,l')
let classify_strategy (local,_ as obj) =
if local then Dispose else Substitute obj
let disch_ref ref =
match ref with
EvalConstRef c ->
let c' = Lib.discharge_con c in
if c==c' then Some ref else Some (EvalConstRef c')
| EvalVarRef id -> if Lib.is_in_section (VarRef id) then None else Some ref
let discharge_strategy (_,(local,obj)) =
if local then None else
map_strategy disch_ref obj
type strategy_obj =
bool * (Conv_oracle.level * evaluable_global_reference list) list
let inStrategy : strategy_obj -> obj =
declare_object {(default_object "STRATEGY") with
cache_function = (fun (_,obj) -> cache_strategy obj);
load_function = (fun _ (_,obj) -> cache_strategy obj);
subst_function = subst_strategy;
discharge_function = discharge_strategy;
classify_function = classify_strategy }
let set_strategy local str =
Lib.add_anonymous_leaf (inStrategy (local,str))
Generic reduction : reduction functions used in reduction tactics
type red_expr =
(constr, evaluable_global_reference, constr_pattern) red_expr_gen
let make_flag_constant = function
| EvalVarRef id -> fVAR id
| EvalConstRef sp -> fCONST sp
let make_flag env f =
let red = no_red in
let red = if f.rBeta then red_add red fBETA else red in
let red = if f.rMatch then red_add red fMATCH else red in
let red = if f.rFix then red_add red fFIX else red in
let red = if f.rCofix then red_add red fCOFIX else red in
let red = if f.rZeta then red_add red fZETA else red in
let red =
if f.rDelta then (* All but rConst *)
let red = red_add red fDELTA in
let red = red_add_transparent red
(Conv_oracle.get_transp_state (Environ.oracle env)) in
List.fold_right
(fun v red -> red_sub red (make_flag_constant v))
f.rConst red
else (* Only rConst *)
let red = red_add_transparent (red_add red fDELTA) all_opaque in
List.fold_right
(fun v red -> red_add red (make_flag_constant v))
f.rConst red
in red
table of custom reductino fonctions , not synchronized ,
filled via ML calls to [ declare_reduction ]
filled via ML calls to [declare_reduction] *)
let reduction_tab = ref String.Map.empty
(* table of custom reduction expressions, synchronized,
filled by command Declare Reduction *)
let red_expr_tab = Summary.ref String.Map.empty ~name:"Declare Reduction"
let declare_reduction s f =
if String.Map.mem s !reduction_tab || String.Map.mem s !red_expr_tab
then errorlabstrm "Redexpr.declare_reduction"
(str "There is already a reduction expression of name " ++ str s)
else reduction_tab := String.Map.add s f !reduction_tab
let check_custom = function
| ExtraRedExpr s ->
if not (String.Map.mem s !reduction_tab || String.Map.mem s !red_expr_tab)
then errorlabstrm "Redexpr.check_custom" (str "Reference to undefined reduction expression " ++ str s)
|_ -> ()
let decl_red_expr s e =
if String.Map.mem s !reduction_tab || String.Map.mem s !red_expr_tab
then errorlabstrm "Redexpr.decl_red_expr"
(str "There is already a reduction expression of name " ++ str s)
else begin
check_custom e;
red_expr_tab := String.Map.add s e !red_expr_tab
end
let out_arg = function
| ArgVar _ -> anomaly (Pp.str "Unevaluated or_var variable")
| ArgArg x -> x
let out_with_occurrences (occs,c) =
(Locusops.occurrences_map (List.map out_arg) occs, c)
let e_red f = { e_redfun = fun env evm c -> Sigma.here (f env (Sigma.to_evar_map evm) c) evm }
Turn to true to have a semantics where simpl
only reduce at the head when an evaluable reference is given , e.g.
2+n would just reduce to S(1+n ) instead of S(S(n ) )
only reduce at the head when an evaluable reference is given, e.g.
2+n would just reduce to S(1+n) instead of S(S(n)) *)
let contextualize f g = function
| Some (occs,c) ->
let l = Locusops.occurrences_map (List.map out_arg) occs in
let b,c,h = match c with
| Inl r -> true,PRef (global_of_evaluable_reference r),f
| Inr c -> false,c,f in
e_red (contextually b (l,c) (fun _ -> h))
| None -> e_red g
let warn_simpl_unfolding_modifiers =
CWarnings.create ~name:"simpl-unfolding-modifiers" ~category:"tactics"
(fun () ->
Pp.strbrk "The legacy simpl ignores constant unfolding modifiers.")
let reduction_of_red_expr env =
let make_flag = make_flag env in
let rec reduction_of_red_expr = function
| Red internal ->
if internal then (e_red try_red_product,DEFAULTcast)
else (e_red red_product,DEFAULTcast)
| Hnf -> (e_red hnf_constr,DEFAULTcast)
| Simpl (f,o) ->
let whd_am = if !simplIsCbn then whd_cbn (make_flag f) else whd_simpl in
let am = if !simplIsCbn then strong_cbn (make_flag f) else simpl in
let () =
if not (!simplIsCbn || List.is_empty f.rConst) then
warn_simpl_unfolding_modifiers () in
(contextualize (if head_style then whd_am else am) am o,DEFAULTcast)
| Cbv f -> (e_red (cbv_norm_flags (make_flag f)),DEFAULTcast)
| Cbn f ->
(e_red (strong_cbn (make_flag f)), DEFAULTcast)
| Lazy f -> (e_red (clos_norm_flags (make_flag f)),DEFAULTcast)
| Unfold ubinds -> (e_red (unfoldn (List.map out_with_occurrences ubinds)),DEFAULTcast)
| Fold cl -> (e_red (fold_commands cl),DEFAULTcast)
| Pattern lp -> (pattern_occs (List.map out_with_occurrences lp),DEFAULTcast)
| ExtraRedExpr s ->
(try (e_red (String.Map.find s !reduction_tab),DEFAULTcast)
with Not_found ->
(try reduction_of_red_expr (String.Map.find s !red_expr_tab)
with Not_found ->
errorlabstrm "Redexpr.reduction_of_red_expr"
(str "unknown user-defined reduction \"" ++ str s ++ str "\"")))
| CbvVm o -> (contextualize cbv_vm cbv_vm o, VMcast)
| CbvNative o -> (contextualize cbv_native cbv_native o, NATIVEcast)
in
reduction_of_red_expr
let subst_red_expr subs =
Miscops.map_red_expr_gen
(Mod_subst.subst_mps subs)
(Mod_subst.subst_evaluable_reference subs)
(Patternops.subst_pattern subs)
let inReduction : bool * string * red_expr -> obj =
declare_object
{(default_object "REDUCTION") with
cache_function = (fun (_,(_,s,e)) -> decl_red_expr s e);
load_function = (fun _ (_,(_,s,e)) -> decl_red_expr s e);
subst_function =
(fun (subs,(b,s,e)) -> b,s,subst_red_expr subs e);
classify_function =
(fun ((b,_,_) as obj) -> if b then Dispose else Substitute obj) }
let declare_red_expr locality s expr =
Lib.add_anonymous_leaf (inReduction (locality,s,expr))
| null |
https://raw.githubusercontent.com/ml4tp/tcoq/7a78c31df480fba721648f277ab0783229c8bece/proofs/redexpr.ml
|
ocaml
|
**********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
call by value normalisation function using the virtual machine
All but rConst
Only rConst
table of custom reduction expressions, synchronized,
filled by command Declare Reduction
|
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2017
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
open Pp
open CErrors
open Util
open Names
open Term
open Declarations
open Globnames
open Genredexpr
open Pattern
open Reductionops
open Tacred
open CClosure
open RedFlags
open Libobject
open Misctypes
let cbv_vm env sigma c =
let ctyp = Retyping.get_type_of env sigma c in
if Termops.occur_meta_or_existential c then
error "vm_compute does not support existential variables.";
Vnorm.cbv_vm env c ctyp
let warn_native_compute_disabled =
CWarnings.create ~name:"native-compute-disabled" ~category:"native-compiler"
(fun () ->
strbrk "native_compute disabled at configure time; falling back to vm_compute.")
let cbv_native env sigma c =
if Coq_config.no_native_compiler then
(warn_native_compute_disabled ();
cbv_vm env sigma c)
else
let ctyp = Retyping.get_type_of env sigma c in
Nativenorm.native_norm env sigma c ctyp
let whd_cbn flags env sigma t =
let (state,_) =
(whd_state_gen true true flags env sigma (t,Reductionops.Stack.empty))
in Reductionops.Stack.zip ~refold:true state
let strong_cbn flags =
strong (whd_cbn flags)
let simplIsCbn = ref (false)
let _ = Goptions.declare_bool_option {
Goptions.optsync = true; Goptions.optdepr = false;
Goptions.optname =
"Plug the simpl tactic to the new cbn mechanism";
Goptions.optkey = ["SimplIsCbn"];
Goptions.optread = (fun () -> !simplIsCbn);
Goptions.optwrite = (fun a -> simplIsCbn:=a);
}
let set_strategy_one ref l =
let k =
match ref with
| EvalConstRef sp -> ConstKey sp
| EvalVarRef id -> VarKey id in
Global.set_strategy k l;
match k,l with
ConstKey sp, Conv_oracle.Opaque ->
Csymtable.set_opaque_const sp
| ConstKey sp, _ ->
let cb = Global.lookup_constant sp in
(match cb.const_body with
| OpaqueDef _ ->
errorlabstrm "set_transparent_const"
(str "Cannot make" ++ spc () ++
Nametab.pr_global_env Id.Set.empty (ConstRef sp) ++
spc () ++ str "transparent because it was declared opaque.");
| _ -> Csymtable.set_transparent_const sp)
| _ -> ()
let cache_strategy (_,str) =
List.iter
(fun (lev,ql) -> List.iter (fun q -> set_strategy_one q lev) ql)
str
let subst_strategy (subs,(local,obj)) =
local,
List.smartmap
(fun (k,ql as entry) ->
let ql' = List.smartmap (Mod_subst.subst_evaluable_reference subs) ql in
if ql==ql' then entry else (k,ql'))
obj
let map_strategy f l =
let l' = List.fold_right
(fun (lev,ql) str ->
let ql' = List.fold_right
(fun q ql ->
match f q with
Some q' -> q' :: ql
| None -> ql) ql [] in
if List.is_empty ql' then str else (lev,ql')::str) l [] in
if List.is_empty l' then None else Some (false,l')
let classify_strategy (local,_ as obj) =
if local then Dispose else Substitute obj
let disch_ref ref =
match ref with
EvalConstRef c ->
let c' = Lib.discharge_con c in
if c==c' then Some ref else Some (EvalConstRef c')
| EvalVarRef id -> if Lib.is_in_section (VarRef id) then None else Some ref
let discharge_strategy (_,(local,obj)) =
if local then None else
map_strategy disch_ref obj
type strategy_obj =
bool * (Conv_oracle.level * evaluable_global_reference list) list
let inStrategy : strategy_obj -> obj =
declare_object {(default_object "STRATEGY") with
cache_function = (fun (_,obj) -> cache_strategy obj);
load_function = (fun _ (_,obj) -> cache_strategy obj);
subst_function = subst_strategy;
discharge_function = discharge_strategy;
classify_function = classify_strategy }
let set_strategy local str =
Lib.add_anonymous_leaf (inStrategy (local,str))
Generic reduction : reduction functions used in reduction tactics
type red_expr =
(constr, evaluable_global_reference, constr_pattern) red_expr_gen
let make_flag_constant = function
| EvalVarRef id -> fVAR id
| EvalConstRef sp -> fCONST sp
let make_flag env f =
let red = no_red in
let red = if f.rBeta then red_add red fBETA else red in
let red = if f.rMatch then red_add red fMATCH else red in
let red = if f.rFix then red_add red fFIX else red in
let red = if f.rCofix then red_add red fCOFIX else red in
let red = if f.rZeta then red_add red fZETA else red in
let red =
let red = red_add red fDELTA in
let red = red_add_transparent red
(Conv_oracle.get_transp_state (Environ.oracle env)) in
List.fold_right
(fun v red -> red_sub red (make_flag_constant v))
f.rConst red
let red = red_add_transparent (red_add red fDELTA) all_opaque in
List.fold_right
(fun v red -> red_add red (make_flag_constant v))
f.rConst red
in red
table of custom reductino fonctions , not synchronized ,
filled via ML calls to [ declare_reduction ]
filled via ML calls to [declare_reduction] *)
let reduction_tab = ref String.Map.empty
let red_expr_tab = Summary.ref String.Map.empty ~name:"Declare Reduction"
let declare_reduction s f =
if String.Map.mem s !reduction_tab || String.Map.mem s !red_expr_tab
then errorlabstrm "Redexpr.declare_reduction"
(str "There is already a reduction expression of name " ++ str s)
else reduction_tab := String.Map.add s f !reduction_tab
let check_custom = function
| ExtraRedExpr s ->
if not (String.Map.mem s !reduction_tab || String.Map.mem s !red_expr_tab)
then errorlabstrm "Redexpr.check_custom" (str "Reference to undefined reduction expression " ++ str s)
|_ -> ()
let decl_red_expr s e =
if String.Map.mem s !reduction_tab || String.Map.mem s !red_expr_tab
then errorlabstrm "Redexpr.decl_red_expr"
(str "There is already a reduction expression of name " ++ str s)
else begin
check_custom e;
red_expr_tab := String.Map.add s e !red_expr_tab
end
let out_arg = function
| ArgVar _ -> anomaly (Pp.str "Unevaluated or_var variable")
| ArgArg x -> x
let out_with_occurrences (occs,c) =
(Locusops.occurrences_map (List.map out_arg) occs, c)
let e_red f = { e_redfun = fun env evm c -> Sigma.here (f env (Sigma.to_evar_map evm) c) evm }
Turn to true to have a semantics where simpl
only reduce at the head when an evaluable reference is given , e.g.
2+n would just reduce to S(1+n ) instead of S(S(n ) )
only reduce at the head when an evaluable reference is given, e.g.
2+n would just reduce to S(1+n) instead of S(S(n)) *)
let contextualize f g = function
| Some (occs,c) ->
let l = Locusops.occurrences_map (List.map out_arg) occs in
let b,c,h = match c with
| Inl r -> true,PRef (global_of_evaluable_reference r),f
| Inr c -> false,c,f in
e_red (contextually b (l,c) (fun _ -> h))
| None -> e_red g
let warn_simpl_unfolding_modifiers =
CWarnings.create ~name:"simpl-unfolding-modifiers" ~category:"tactics"
(fun () ->
Pp.strbrk "The legacy simpl ignores constant unfolding modifiers.")
let reduction_of_red_expr env =
let make_flag = make_flag env in
let rec reduction_of_red_expr = function
| Red internal ->
if internal then (e_red try_red_product,DEFAULTcast)
else (e_red red_product,DEFAULTcast)
| Hnf -> (e_red hnf_constr,DEFAULTcast)
| Simpl (f,o) ->
let whd_am = if !simplIsCbn then whd_cbn (make_flag f) else whd_simpl in
let am = if !simplIsCbn then strong_cbn (make_flag f) else simpl in
let () =
if not (!simplIsCbn || List.is_empty f.rConst) then
warn_simpl_unfolding_modifiers () in
(contextualize (if head_style then whd_am else am) am o,DEFAULTcast)
| Cbv f -> (e_red (cbv_norm_flags (make_flag f)),DEFAULTcast)
| Cbn f ->
(e_red (strong_cbn (make_flag f)), DEFAULTcast)
| Lazy f -> (e_red (clos_norm_flags (make_flag f)),DEFAULTcast)
| Unfold ubinds -> (e_red (unfoldn (List.map out_with_occurrences ubinds)),DEFAULTcast)
| Fold cl -> (e_red (fold_commands cl),DEFAULTcast)
| Pattern lp -> (pattern_occs (List.map out_with_occurrences lp),DEFAULTcast)
| ExtraRedExpr s ->
(try (e_red (String.Map.find s !reduction_tab),DEFAULTcast)
with Not_found ->
(try reduction_of_red_expr (String.Map.find s !red_expr_tab)
with Not_found ->
errorlabstrm "Redexpr.reduction_of_red_expr"
(str "unknown user-defined reduction \"" ++ str s ++ str "\"")))
| CbvVm o -> (contextualize cbv_vm cbv_vm o, VMcast)
| CbvNative o -> (contextualize cbv_native cbv_native o, NATIVEcast)
in
reduction_of_red_expr
let subst_red_expr subs =
Miscops.map_red_expr_gen
(Mod_subst.subst_mps subs)
(Mod_subst.subst_evaluable_reference subs)
(Patternops.subst_pattern subs)
let inReduction : bool * string * red_expr -> obj =
declare_object
{(default_object "REDUCTION") with
cache_function = (fun (_,(_,s,e)) -> decl_red_expr s e);
load_function = (fun _ (_,(_,s,e)) -> decl_red_expr s e);
subst_function =
(fun (subs,(b,s,e)) -> b,s,subst_red_expr subs e);
classify_function =
(fun ((b,_,_) as obj) -> if b then Dispose else Substitute obj) }
let declare_red_expr locality s expr =
Lib.add_anonymous_leaf (inReduction (locality,s,expr))
|
5094489289d6f93047b4900dda3d07836647315f22853000aea7b8afdd0a78d0
|
a13x/aberth
|
aberth.erl
|
Copyright ( c ) 2013 < >
%%
%% Permission to use, copy, modify, and/or distribute this software for any
%% purpose with or without fee is hereby granted, provided that the above
%% copyright notice and this permission notice appear in all copies.
%%
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-module(aberth).
%% API.
-export([start/0, stop/0]).
-export([start_server/3]).
Utils
-export([no_such_module/1, not_allowed/1, not_loaded/1]).
%% Client
-export([call/4, cast/4]).
%% Types
-type handler() :: module().
-type handlers() :: [handler()].
-export_type([handlers/0]).
%% Aplication
start() ->
application:ensure_all_started(aberth),
ok.
stop() ->
application:stop(aberth).
%% Starting and loading aberth server
-spec start_server(integer(), integer(), aberth:handlers()) -> {ok, pid()} | {error, term()}.
start_server(NbAcceptors, Port, Handlers) ->
@doc start aberth BERT - RPC server
%%
%% ```
%% NbAcceptors = integer()
%% Port = integer()
%% Handlers - any(),
%% '''
%%
NbAcceptors is a number of processes that receive connections
%% Port is a port number the server should listen to
%% Handlers is a list of modules that are wired to the server
ok = lists:foreach(fun code:ensure_loaded/1, Handlers),
aberth_server:add_handlers(Handlers),
ranch:start_listener(aberth, NbAcceptors, ranch_tcp,
[{port, Port}], aberth_protocol, []).
%% Utility funs
no_such_module(Module) ->
Msg = list_to_binary(io_lib:format("Module '~p' not found", [Module])),
{error, {server, 1, <<"ServerError">>, Msg, []}}.
not_allowed(Func) ->
Msg = list_to_binary(io_lib:format("Method '~p' not allowed", [Func])),
{error, {server, 2, <<"ServerError">>, Msg, []}}.
not_loaded(Mod) ->
Msg = list_to_binary(io_lib:format("Module '~p' not loaded", [Mod])),
{error, {server, 1, <<"ServerError">>, Msg, []}}.
%% Client API funs
call(Host, Port, {mfa, Mod, Fun, Args}, Info) when is_list(Args) andalso is_list(Info) ->
Packets = lists:map(fun bert:encode/1, Info ++ [{call, Mod, Fun, Args}]),
call_1(Host, Port, Packets).
cast(Host, Port, {mfa, Mod, Fun, Args}, Info) when is_list(Args) andalso is_list(Info) ->
Packets = lists:map(fun bert:encode/1, Info ++ [{cast, Mod, Fun, Args}]),
call_1(Host, Port, Packets).
%% Client internal funs
call_1(Host, Port, Packets) when is_list(Packets) ->
case gen_tcp:connect(Host, Port, [binary, {packet, 4}, {active, false}]) of
{ok, Socket} ->
call_2(Socket, Packets);
Error ->
Error
end.
call_2(Socket, Packets) ->
ok = lists:foreach(fun(X) -> gen_tcp:send(Socket,X) end, Packets),
case gen_tcp:recv(Socket, 0) of
{ok, Received} ->
gen_tcp:close(Socket),
decode(Received);
{error, Reason} ->
{error, Reason}
end.
decode(Data) ->
case bert:decode(Data) of
{reply, Reply} ->
Reply;
{noreply} ->
ok;
{error, Error} ->
{error, Error};
Other ->
{error, {bad_response, Other}}
end.
| null |
https://raw.githubusercontent.com/a13x/aberth/e09b721b40782d337970aebe3d126e0ab27b1089/src/aberth.erl
|
erlang
|
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
API.
Client
Types
Aplication
Starting and loading aberth server
```
NbAcceptors = integer()
Port = integer()
Handlers - any(),
'''
Port is a port number the server should listen to
Handlers is a list of modules that are wired to the server
Utility funs
Client API funs
Client internal funs
|
Copyright ( c ) 2013 < >
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
-module(aberth).
-export([start/0, stop/0]).
-export([start_server/3]).
Utils
-export([no_such_module/1, not_allowed/1, not_loaded/1]).
-export([call/4, cast/4]).
-type handler() :: module().
-type handlers() :: [handler()].
-export_type([handlers/0]).
start() ->
application:ensure_all_started(aberth),
ok.
stop() ->
application:stop(aberth).
-spec start_server(integer(), integer(), aberth:handlers()) -> {ok, pid()} | {error, term()}.
start_server(NbAcceptors, Port, Handlers) ->
@doc start aberth BERT - RPC server
NbAcceptors is a number of processes that receive connections
ok = lists:foreach(fun code:ensure_loaded/1, Handlers),
aberth_server:add_handlers(Handlers),
ranch:start_listener(aberth, NbAcceptors, ranch_tcp,
[{port, Port}], aberth_protocol, []).
no_such_module(Module) ->
Msg = list_to_binary(io_lib:format("Module '~p' not found", [Module])),
{error, {server, 1, <<"ServerError">>, Msg, []}}.
not_allowed(Func) ->
Msg = list_to_binary(io_lib:format("Method '~p' not allowed", [Func])),
{error, {server, 2, <<"ServerError">>, Msg, []}}.
not_loaded(Mod) ->
Msg = list_to_binary(io_lib:format("Module '~p' not loaded", [Mod])),
{error, {server, 1, <<"ServerError">>, Msg, []}}.
call(Host, Port, {mfa, Mod, Fun, Args}, Info) when is_list(Args) andalso is_list(Info) ->
Packets = lists:map(fun bert:encode/1, Info ++ [{call, Mod, Fun, Args}]),
call_1(Host, Port, Packets).
cast(Host, Port, {mfa, Mod, Fun, Args}, Info) when is_list(Args) andalso is_list(Info) ->
Packets = lists:map(fun bert:encode/1, Info ++ [{cast, Mod, Fun, Args}]),
call_1(Host, Port, Packets).
call_1(Host, Port, Packets) when is_list(Packets) ->
case gen_tcp:connect(Host, Port, [binary, {packet, 4}, {active, false}]) of
{ok, Socket} ->
call_2(Socket, Packets);
Error ->
Error
end.
call_2(Socket, Packets) ->
ok = lists:foreach(fun(X) -> gen_tcp:send(Socket,X) end, Packets),
case gen_tcp:recv(Socket, 0) of
{ok, Received} ->
gen_tcp:close(Socket),
decode(Received);
{error, Reason} ->
{error, Reason}
end.
decode(Data) ->
case bert:decode(Data) of
{reply, Reply} ->
Reply;
{noreply} ->
ok;
{error, Error} ->
{error, Error};
Other ->
{error, {bad_response, Other}}
end.
|
f02307db0537386a584278fbcb34756d17d497878cc8e65d1a8a805d22268140
|
puppetlabs/puppetdb
|
pql_test.clj
|
(ns puppetlabs.puppetdb.pql-test
(:require [clojure.test :refer :all]
[puppetlabs.puppetdb.pql :as pql]))
(deftest test-pql->ast
(are [pql ast] (= (first (pql/pql->ast pql))
ast)
;; Some basic comparisons
"nodes { a = 'a' }"
["from" "nodes"
["=" "a" "a"]]
"nodes { a = 'a' }"
["from" "nodes"
["=" "a" "a"]]
"nodes { a = 1 or b = 2 }"
["from" "nodes"
["or" ["=" "a" 1] ["=" "b" 2]]]
"nodes { a = 1 and b = 2 }"
["from" "nodes"
["and" ["=" "a" 1] ["=" "b" 2]]]
(format "nodes { a = %d and b = %d }" Long/MAX_VALUE Long/MIN_VALUE)
["from" "nodes"
["and" ["=" "a" Long/MAX_VALUE] ["=" "b" Long/MIN_VALUE]]]
"fact_contents {path = [\"foo\",\"bar\"]}"
["from" "fact_contents"
["=" "path" ["foo" "bar"]]]
"fact_contents {path = [\"foo\",\"bar\" ]}"
["from" "fact_contents"
["=" "path" ["foo" "bar"]]]
"fact_contents {path = [ \"foo\",\"bar\" ]}"
["from" "fact_contents"
["=" "path" ["foo" "bar"]]]
"fact_contents {path = [ \"foo\", \"bar\" ]}"
["from" "fact_contents"
["=" "path" ["foo" "bar"]]]
"fact_contents {path = [ \"foo\", \"bar\"]}"
["from" "fact_contents"
["=" "path" ["foo" "bar"]]]
"fact_contents {path = [\"foo\", \"bar\" ]}"
["from" "fact_contents"
["=" "path" ["foo" "bar"]]]
"fact_contents {path = [\"foo\", 1 ]}"
["from" "fact_contents"
["=" "path" ["foo" 1]]]
;; Not
"nodes { !(a = 1) }"
["from" "nodes"
["not" ["=" "a" 1]]]
"nodes { !a = 1 }"
["from" "nodes"
["not" ["=" "a" 1]]]
;; Null?
"events { line is null }"
["from" "events"
["null?" "line" true]]
"events { line is not null }"
["from" "events"
["null?" "line" false]]
;; Strings & escaping
"facts { name = 'kernel' }"
["from" "facts"
["=" "name" "kernel"]]
"facts { name = 'escapemy\\'quote' }"
["from" "facts"
["=" "name" "escapemy'quote"]]
"facts { name = 'carriage\\nreturn' }"
["from" "facts"
["=" "name" "carriage\\nreturn"]]
"facts { name = \"escapemy\\\"quote\" }"
["from" "facts"
["=" "name" "escapemy\"quote"]]
"facts { name = \"carriage\\nreturn\" }"
["from" "facts"
["=" "name" "carriage\nreturn"]]
"facts { name ~ 'escapemy\\'quote' }"
["from" "facts"
["~" "name" "escapemy'quote"]]
"fact_contents { path ~> ['networking', 'eth.*'] }"
["from" "fact_contents"
["~>" "path" ["networking" "eth.*"]]]
;; More complex expressions
"nodes { a = 1 or b = 2 and c = 3 }"
["from" "nodes"
["or"
["=" "a" 1]
["and"
["=" "b" 2]
["=" "c" 3]]]]
"nodes { a = 1 or b = 2 and c = 3 or d = 4 }"
["from" "nodes"
["or"
["=" "a" 1]
["and"
["=" "b" 2]
["=" "c" 3]]
["=" "d" 4]]]
"nodes { a = 1 or b = 2 and (c = 3 or d = 4) }"
["from" "nodes"
["or"
["=" "a" 1]
["and"
["=" "b" 2]
["or"
["=" "c" 3]
["=" "d" 4]]]]]
"nodes { a = 1 or b = 2 and !(c = 3 or d = 4) }"
["from" "nodes"
["or"
["=" "a" 1]
["and"
["=" "b" 2]
["not"
["or"
["=" "c" 3]
["=" "d" 4]]]]]]
"nodes { a = 1 or b = 2 and (!c = 3 or d = 4) }"
["from" "nodes"
["or"
["=" "a" 1]
["and"
["=" "b" 2]
["or"
["not"
["=" "c" 3]]
["=" "d" 4]]]]]
Whitespace around parentheses
"nodes { ( a = 1 or b = 2) }"
["from" "nodes"
["or"
["=" "a" 1]
["=" "b" 2]]]
"nodes { (a = 1 or b = 2 ) }"
["from" "nodes"
["or"
["=" "a" 1]
["=" "b" 2]]]
;; Extractions
"nodes[] {}"
["from" "nodes"
["extract" []]]
"nodes[a, b, c] {}"
["from" "nodes"
["extract" ["a" "b" "c"]]]
"nodes[a, b, c] { a = 1 }"
["from" "nodes"
["extract" ["a" "b" "c"]
["=" "a" 1]]]
"inventory[facts.os.family, facts.os.release] { facts.os.family = \"Debian\" }"
["from" "inventory"
["extract" ["facts.os.family" "facts.os.release"]
["=" "facts.os.family" "Debian"]]]
;; Functions
"nodes[count(a)] {}"
["from" "nodes"
["extract" [["function" "count" "a"]]]]
"nodes[count()] {}"
["from" "nodes"
["extract" [["function" "count"]]]]
;; Subqueries
"nodes [a, b, c] { a in resources [x] { x = 1 } }"
["from" "nodes"
["extract" ["a" "b" "c"]
["in" "a"
["from" "resources"
["extract" ["x"]
["=" "x" 1]]]]]]
"nodes[a, b, c] { resources { x = 1 } }"
["from" "nodes"
["extract" ["a" "b" "c"]
["subquery" "resources"
["=" "x" 1]]]]
"nodes[a, b, c] { [a, b] in resources [a, b] { x = 1 }}"
["from" "nodes"
["extract" ["a" "b" "c"]
["in" ["a" "b"]
["from" "resources"
["extract" ["a" "b"]
["=" "x" 1]]]]]]
"nodes[a, b, c] { [a, b] in [1, 2] }"
["from" "nodes"
["extract" ["a" "b" "c"]
["in" ["a" "b"]
["array" [1 2]]]]]
"nodes[a,b,c] { resources { x = 1 } }"
["from" "nodes"
["extract" ["a" "b" "c"]
["subquery" "resources"
["=" "x" 1]]]]
"facts[value] { [certname,name] in fact_contents[certname,name] { value < 100 }}"
["from" "facts"
["extract" ["value"]
["in" ["certname" "name"]
["from" "fact_contents"
["extract" ["certname" "name"] ["<" "value" 100]]]]]]
"facts[value] { fact_contents { value < 100 } }"
["from" "facts"
["extract" ["value"]
["subquery" "fact_contents"
["<" "value" 100]]]]
;; Modifiers
"facts[name, count()] { group by name }"
["from" "facts"
["extract"
["name" ["function" "count"]]
["group_by" "name"]]]
"facts[name, count(value)] { certname ~ 'web.*' group by name }"
["from" "facts"
["extract" ["name" ["function" "count" "value"]]
["~" "certname" "web.*"]
["group_by" "name"]]]
"events[count(), status, certname] { certname ~ 'web.*' group by status, certname }"
["from" "events"
["extract", [["function" "count"] "status" "certname"],
["~" "certname" "web.*"]
["group_by" "status" "certname"]]]
;; Paging
"reports{limit 1}"
["from" "reports"
["limit" 1]]
"reports {limit 1 offset 1}"
["from" "reports"
["limit" 1] ["offset" 1]]
"reports {certname = 'foo' limit 1 offset 1}"
["from" "reports"
["=" "certname" "foo"]
["limit" 1] ["offset" 1]]
"reports[certname, receive_time]{certname = 'foo' limit 1 offset 1}"
["from" "reports"
["extract" ["certname" "receive_time"] ["=" "certname" "foo"]]
["limit" 1] ["offset" 1]]
"reports[certname]{certname = 'foo' limit 10 order by certname}"
["from" "reports"
["extract" ["certname"] ["=" "certname" "foo"]]
["limit" 10] ["order_by" ["certname"]]]
"reports[certname]{certname = 'foo' limit 10 order by certname desc}"
["from" "reports"
["extract" ["certname"] ["=" "certname" "foo"]]
["limit" 10] ["order_by" [["certname" "desc"]]]]
"reports[certname]{certname = 'foo' limit 10 order by certname desc, receive_time asc}"
["from" "reports"
["extract" ["certname"] ["=" "certname" "foo"]]
["limit" 10] ["order_by" [["certname" "desc"] ["receive_time" "asc"]]]]
"reports[certname]{certname = 'foo' limit 10 order by certname, receive_time desc}"
["from" "reports"
["extract" ["certname"] ["=" "certname" "foo"]]
["limit" 10] ["order_by" ["certname" ["receive_time" "desc"]]]]
"reports[certname]{certname = 'foo' order by certname desc, receive_time asc limit 10}"
["from" "reports"
["extract" ["certname"] ["=" "certname" "foo"]]
["order_by" [["certname" "desc"] ["receive_time" "asc"]]] ["limit" 10]]
;;Inequality on dates
"reports{receive_time > '2016-02-07T08:45:42.170687300Z'}"
["from" "reports"
[">" "receive_time" "2016-02-07T08:45:42.170687300Z"]]
"reports{receive_time >= '2016-02-07T08:45:42.170687300Z'}"
["from" "reports"
[">=" "receive_time" "2016-02-07T08:45:42.170687300Z"]]
"reports{receive_time <= '2016-02-07T08:45:42.170687300Z'}"
["from" "reports"
["<=" "receive_time" "2016-02-07T08:45:42.170687300Z"]]
"reports{receive_time < '2016-02-07T08:45:42.170687300Z'}"
["from" "reports"
["<" "receive_time" "2016-02-07T08:45:42.170687300Z"]]))
| null |
https://raw.githubusercontent.com/puppetlabs/puppetdb/b3d6d10555561657150fa70b6d1e609fba9c0eda/test/puppetlabs/puppetdb/pql_test.clj
|
clojure
|
Some basic comparisons
Not
Null?
Strings & escaping
More complex expressions
Extractions
Functions
Subqueries
Modifiers
Paging
Inequality on dates
|
(ns puppetlabs.puppetdb.pql-test
(:require [clojure.test :refer :all]
[puppetlabs.puppetdb.pql :as pql]))
(deftest test-pql->ast
(are [pql ast] (= (first (pql/pql->ast pql))
ast)
"nodes { a = 'a' }"
["from" "nodes"
["=" "a" "a"]]
"nodes { a = 'a' }"
["from" "nodes"
["=" "a" "a"]]
"nodes { a = 1 or b = 2 }"
["from" "nodes"
["or" ["=" "a" 1] ["=" "b" 2]]]
"nodes { a = 1 and b = 2 }"
["from" "nodes"
["and" ["=" "a" 1] ["=" "b" 2]]]
(format "nodes { a = %d and b = %d }" Long/MAX_VALUE Long/MIN_VALUE)
["from" "nodes"
["and" ["=" "a" Long/MAX_VALUE] ["=" "b" Long/MIN_VALUE]]]
"fact_contents {path = [\"foo\",\"bar\"]}"
["from" "fact_contents"
["=" "path" ["foo" "bar"]]]
"fact_contents {path = [\"foo\",\"bar\" ]}"
["from" "fact_contents"
["=" "path" ["foo" "bar"]]]
"fact_contents {path = [ \"foo\",\"bar\" ]}"
["from" "fact_contents"
["=" "path" ["foo" "bar"]]]
"fact_contents {path = [ \"foo\", \"bar\" ]}"
["from" "fact_contents"
["=" "path" ["foo" "bar"]]]
"fact_contents {path = [ \"foo\", \"bar\"]}"
["from" "fact_contents"
["=" "path" ["foo" "bar"]]]
"fact_contents {path = [\"foo\", \"bar\" ]}"
["from" "fact_contents"
["=" "path" ["foo" "bar"]]]
"fact_contents {path = [\"foo\", 1 ]}"
["from" "fact_contents"
["=" "path" ["foo" 1]]]
"nodes { !(a = 1) }"
["from" "nodes"
["not" ["=" "a" 1]]]
"nodes { !a = 1 }"
["from" "nodes"
["not" ["=" "a" 1]]]
"events { line is null }"
["from" "events"
["null?" "line" true]]
"events { line is not null }"
["from" "events"
["null?" "line" false]]
"facts { name = 'kernel' }"
["from" "facts"
["=" "name" "kernel"]]
"facts { name = 'escapemy\\'quote' }"
["from" "facts"
["=" "name" "escapemy'quote"]]
"facts { name = 'carriage\\nreturn' }"
["from" "facts"
["=" "name" "carriage\\nreturn"]]
"facts { name = \"escapemy\\\"quote\" }"
["from" "facts"
["=" "name" "escapemy\"quote"]]
"facts { name = \"carriage\\nreturn\" }"
["from" "facts"
["=" "name" "carriage\nreturn"]]
"facts { name ~ 'escapemy\\'quote' }"
["from" "facts"
["~" "name" "escapemy'quote"]]
"fact_contents { path ~> ['networking', 'eth.*'] }"
["from" "fact_contents"
["~>" "path" ["networking" "eth.*"]]]
"nodes { a = 1 or b = 2 and c = 3 }"
["from" "nodes"
["or"
["=" "a" 1]
["and"
["=" "b" 2]
["=" "c" 3]]]]
"nodes { a = 1 or b = 2 and c = 3 or d = 4 }"
["from" "nodes"
["or"
["=" "a" 1]
["and"
["=" "b" 2]
["=" "c" 3]]
["=" "d" 4]]]
"nodes { a = 1 or b = 2 and (c = 3 or d = 4) }"
["from" "nodes"
["or"
["=" "a" 1]
["and"
["=" "b" 2]
["or"
["=" "c" 3]
["=" "d" 4]]]]]
"nodes { a = 1 or b = 2 and !(c = 3 or d = 4) }"
["from" "nodes"
["or"
["=" "a" 1]
["and"
["=" "b" 2]
["not"
["or"
["=" "c" 3]
["=" "d" 4]]]]]]
"nodes { a = 1 or b = 2 and (!c = 3 or d = 4) }"
["from" "nodes"
["or"
["=" "a" 1]
["and"
["=" "b" 2]
["or"
["not"
["=" "c" 3]]
["=" "d" 4]]]]]
Whitespace around parentheses
"nodes { ( a = 1 or b = 2) }"
["from" "nodes"
["or"
["=" "a" 1]
["=" "b" 2]]]
"nodes { (a = 1 or b = 2 ) }"
["from" "nodes"
["or"
["=" "a" 1]
["=" "b" 2]]]
"nodes[] {}"
["from" "nodes"
["extract" []]]
"nodes[a, b, c] {}"
["from" "nodes"
["extract" ["a" "b" "c"]]]
"nodes[a, b, c] { a = 1 }"
["from" "nodes"
["extract" ["a" "b" "c"]
["=" "a" 1]]]
"inventory[facts.os.family, facts.os.release] { facts.os.family = \"Debian\" }"
["from" "inventory"
["extract" ["facts.os.family" "facts.os.release"]
["=" "facts.os.family" "Debian"]]]
"nodes[count(a)] {}"
["from" "nodes"
["extract" [["function" "count" "a"]]]]
"nodes[count()] {}"
["from" "nodes"
["extract" [["function" "count"]]]]
"nodes [a, b, c] { a in resources [x] { x = 1 } }"
["from" "nodes"
["extract" ["a" "b" "c"]
["in" "a"
["from" "resources"
["extract" ["x"]
["=" "x" 1]]]]]]
"nodes[a, b, c] { resources { x = 1 } }"
["from" "nodes"
["extract" ["a" "b" "c"]
["subquery" "resources"
["=" "x" 1]]]]
"nodes[a, b, c] { [a, b] in resources [a, b] { x = 1 }}"
["from" "nodes"
["extract" ["a" "b" "c"]
["in" ["a" "b"]
["from" "resources"
["extract" ["a" "b"]
["=" "x" 1]]]]]]
"nodes[a, b, c] { [a, b] in [1, 2] }"
["from" "nodes"
["extract" ["a" "b" "c"]
["in" ["a" "b"]
["array" [1 2]]]]]
"nodes[a,b,c] { resources { x = 1 } }"
["from" "nodes"
["extract" ["a" "b" "c"]
["subquery" "resources"
["=" "x" 1]]]]
"facts[value] { [certname,name] in fact_contents[certname,name] { value < 100 }}"
["from" "facts"
["extract" ["value"]
["in" ["certname" "name"]
["from" "fact_contents"
["extract" ["certname" "name"] ["<" "value" 100]]]]]]
"facts[value] { fact_contents { value < 100 } }"
["from" "facts"
["extract" ["value"]
["subquery" "fact_contents"
["<" "value" 100]]]]
"facts[name, count()] { group by name }"
["from" "facts"
["extract"
["name" ["function" "count"]]
["group_by" "name"]]]
"facts[name, count(value)] { certname ~ 'web.*' group by name }"
["from" "facts"
["extract" ["name" ["function" "count" "value"]]
["~" "certname" "web.*"]
["group_by" "name"]]]
"events[count(), status, certname] { certname ~ 'web.*' group by status, certname }"
["from" "events"
["extract", [["function" "count"] "status" "certname"],
["~" "certname" "web.*"]
["group_by" "status" "certname"]]]
"reports{limit 1}"
["from" "reports"
["limit" 1]]
"reports {limit 1 offset 1}"
["from" "reports"
["limit" 1] ["offset" 1]]
"reports {certname = 'foo' limit 1 offset 1}"
["from" "reports"
["=" "certname" "foo"]
["limit" 1] ["offset" 1]]
"reports[certname, receive_time]{certname = 'foo' limit 1 offset 1}"
["from" "reports"
["extract" ["certname" "receive_time"] ["=" "certname" "foo"]]
["limit" 1] ["offset" 1]]
"reports[certname]{certname = 'foo' limit 10 order by certname}"
["from" "reports"
["extract" ["certname"] ["=" "certname" "foo"]]
["limit" 10] ["order_by" ["certname"]]]
"reports[certname]{certname = 'foo' limit 10 order by certname desc}"
["from" "reports"
["extract" ["certname"] ["=" "certname" "foo"]]
["limit" 10] ["order_by" [["certname" "desc"]]]]
"reports[certname]{certname = 'foo' limit 10 order by certname desc, receive_time asc}"
["from" "reports"
["extract" ["certname"] ["=" "certname" "foo"]]
["limit" 10] ["order_by" [["certname" "desc"] ["receive_time" "asc"]]]]
"reports[certname]{certname = 'foo' limit 10 order by certname, receive_time desc}"
["from" "reports"
["extract" ["certname"] ["=" "certname" "foo"]]
["limit" 10] ["order_by" ["certname" ["receive_time" "desc"]]]]
"reports[certname]{certname = 'foo' order by certname desc, receive_time asc limit 10}"
["from" "reports"
["extract" ["certname"] ["=" "certname" "foo"]]
["order_by" [["certname" "desc"] ["receive_time" "asc"]]] ["limit" 10]]
"reports{receive_time > '2016-02-07T08:45:42.170687300Z'}"
["from" "reports"
[">" "receive_time" "2016-02-07T08:45:42.170687300Z"]]
"reports{receive_time >= '2016-02-07T08:45:42.170687300Z'}"
["from" "reports"
[">=" "receive_time" "2016-02-07T08:45:42.170687300Z"]]
"reports{receive_time <= '2016-02-07T08:45:42.170687300Z'}"
["from" "reports"
["<=" "receive_time" "2016-02-07T08:45:42.170687300Z"]]
"reports{receive_time < '2016-02-07T08:45:42.170687300Z'}"
["from" "reports"
["<" "receive_time" "2016-02-07T08:45:42.170687300Z"]]))
|
e4b954f8aa87b7e57bd4781f5c91b7c50227a6a4553b70da5c88a62e155b7cb2
|
YPares/Haskell-OpenGL3.1-Tutos
|
main.hs
|
{-# LANGUAGE ImplicitParams, RecordWildCards,
NoMonomorphismRestriction #-}
module Main where
import qualified Graphics.UI.GLFW as W
import Graphics.Rendering.OpenGL.Raw
import Control.Monad
import Control.Applicative
import Data.Function
import Foreign
import Foreign.C.String
withNewPtr f = alloca (\p -> f p >> peek p)
checkStatus statusFlag glGetFn glInfoLogFn id = do
let fetch info = withNewPtr (glGetFn id info)
status <- toBool <$> fetch statusFlag
logLength <- fetch gl_INFO_LOG_LENGTH
when (logLength > 0) $
allocaArray0 (fromIntegral logLength) $ \msgPtr -> do
glInfoLogFn id logLength nullPtr msgPtr
peekCString msgPtr >>=
if status then ?log else ?err
return status
loadShader shaderTypeFlag filePath = do
code <- readFile filePath
id <- glCreateShader shaderTypeFlag
withCString code $ \codePtr ->
with codePtr $ \codePtrPtr ->
glShaderSource id 1 codePtrPtr nullPtr
?log $ "Compiling shader: " ++ filePath
glCompileShader id
checkStatus gl_COMPILE_STATUS glGetShaderiv glGetShaderInfoLog id
return id
loadProgram vertFP fragFP = do
shaderIds <- mapM (uncurry loadShader)
[(gl_VERTEX_SHADER, vertFP)
,(gl_FRAGMENT_SHADER, fragFP)]
progId <- glCreateProgram
?log "Linking program"
mapM_ (glAttachShader progId) shaderIds
glLinkProgram progId
checkStatus gl_LINK_STATUS glGetProgramiv glGetProgramInfoLog progId
mapM_ glDeleteShader shaderIds
return progId
data GLIds = GLIds { progId :: !GLuint, vertexArrayId :: !GLuint
, vertexBufferId :: !GLuint }
initGLStuff = do
glClearColor 0 0 0.4 0
progId <- loadProgram "simple.vert" "simple.frag"
vertexArrayId <- withNewPtr (glGenVertexArrays 1)
glBindVertexArray vertexArrayId
let vertexBufferData = [-1, -1, 0
, 1, -1, 0
, 0, 1, 0]
vertexBufferId <- withNewPtr (glGenBuffers 1)
glBindBuffer gl_ARRAY_BUFFER vertexBufferId
withArrayLen vertexBufferData $ \length ptr ->
glBufferData gl_ARRAY_BUFFER (fromIntegral (length *
sizeOf (undefined :: GLfloat)))
(ptr :: Ptr GLfloat) gl_STATIC_DRAW
return GLIds{..}
(<&&>) = liftA2 (&&)
mainLoop GLIds{..} = fix $ \loop -> do
glClear gl_COLOR_BUFFER_BIT
glUseProgram progId
glEnableVertexAttribArray 0 -- 1st attribute: vertices
glBindBuffer gl_ARRAY_BUFFER vertexBufferId
glVertexAttribPointer 0 -- attribute 0 in the shader
we draw 3 vertices
gl_FLOAT -- coordinates type
(fromBool False) -- normalized?
0 -- stride
nullPtr -- vertex buffer offset
from 0 , 3 vertices
glDisableVertexAttribArray 0
W.swapBuffers
continue <- W.windowIsOpen
<&&> (not <$> W.keyIsPressed W.KeyEsc)
when continue loop
cleanUpGLStuff GLIds{..} = do
with vertexBufferId $ glDeleteBuffers 1
with vertexArrayId $ glDeleteVertexArrays 1
main = do
let ?log = putStrLn
?err = error
W.initialize
success <- W.openWindow $ W.defaultDisplayOptions
{ W.displayOptions_numFsaaSamples = Just 4
, W.displayOptions_openGLVersion = (3, 1)}
--, W.displayOptions_openGLProfile = W.CoreProfile }
when (not success) $ do
W.terminate
?err "GLFW couldn't open a window."
W.enableKeyRepeat
ids <- initGLStuff
mainLoop ids
cleanUpGLStuff ids
W.terminate
?log "Normal termination."
| null |
https://raw.githubusercontent.com/YPares/Haskell-OpenGL3.1-Tutos/7a027b927d061fbd26138cb7357c40c4cacbc927/tutorial02/main.hs
|
haskell
|
# LANGUAGE ImplicitParams, RecordWildCards,
NoMonomorphismRestriction #
1st attribute: vertices
attribute 0 in the shader
coordinates type
normalized?
stride
vertex buffer offset
, W.displayOptions_openGLProfile = W.CoreProfile }
|
module Main where
import qualified Graphics.UI.GLFW as W
import Graphics.Rendering.OpenGL.Raw
import Control.Monad
import Control.Applicative
import Data.Function
import Foreign
import Foreign.C.String
withNewPtr f = alloca (\p -> f p >> peek p)
checkStatus statusFlag glGetFn glInfoLogFn id = do
let fetch info = withNewPtr (glGetFn id info)
status <- toBool <$> fetch statusFlag
logLength <- fetch gl_INFO_LOG_LENGTH
when (logLength > 0) $
allocaArray0 (fromIntegral logLength) $ \msgPtr -> do
glInfoLogFn id logLength nullPtr msgPtr
peekCString msgPtr >>=
if status then ?log else ?err
return status
loadShader shaderTypeFlag filePath = do
code <- readFile filePath
id <- glCreateShader shaderTypeFlag
withCString code $ \codePtr ->
with codePtr $ \codePtrPtr ->
glShaderSource id 1 codePtrPtr nullPtr
?log $ "Compiling shader: " ++ filePath
glCompileShader id
checkStatus gl_COMPILE_STATUS glGetShaderiv glGetShaderInfoLog id
return id
loadProgram vertFP fragFP = do
shaderIds <- mapM (uncurry loadShader)
[(gl_VERTEX_SHADER, vertFP)
,(gl_FRAGMENT_SHADER, fragFP)]
progId <- glCreateProgram
?log "Linking program"
mapM_ (glAttachShader progId) shaderIds
glLinkProgram progId
checkStatus gl_LINK_STATUS glGetProgramiv glGetProgramInfoLog progId
mapM_ glDeleteShader shaderIds
return progId
data GLIds = GLIds { progId :: !GLuint, vertexArrayId :: !GLuint
, vertexBufferId :: !GLuint }
initGLStuff = do
glClearColor 0 0 0.4 0
progId <- loadProgram "simple.vert" "simple.frag"
vertexArrayId <- withNewPtr (glGenVertexArrays 1)
glBindVertexArray vertexArrayId
let vertexBufferData = [-1, -1, 0
, 1, -1, 0
, 0, 1, 0]
vertexBufferId <- withNewPtr (glGenBuffers 1)
glBindBuffer gl_ARRAY_BUFFER vertexBufferId
withArrayLen vertexBufferData $ \length ptr ->
glBufferData gl_ARRAY_BUFFER (fromIntegral (length *
sizeOf (undefined :: GLfloat)))
(ptr :: Ptr GLfloat) gl_STATIC_DRAW
return GLIds{..}
(<&&>) = liftA2 (&&)
mainLoop GLIds{..} = fix $ \loop -> do
glClear gl_COLOR_BUFFER_BIT
glUseProgram progId
glBindBuffer gl_ARRAY_BUFFER vertexBufferId
we draw 3 vertices
from 0 , 3 vertices
glDisableVertexAttribArray 0
W.swapBuffers
continue <- W.windowIsOpen
<&&> (not <$> W.keyIsPressed W.KeyEsc)
when continue loop
cleanUpGLStuff GLIds{..} = do
with vertexBufferId $ glDeleteBuffers 1
with vertexArrayId $ glDeleteVertexArrays 1
main = do
let ?log = putStrLn
?err = error
W.initialize
success <- W.openWindow $ W.defaultDisplayOptions
{ W.displayOptions_numFsaaSamples = Just 4
, W.displayOptions_openGLVersion = (3, 1)}
when (not success) $ do
W.terminate
?err "GLFW couldn't open a window."
W.enableKeyRepeat
ids <- initGLStuff
mainLoop ids
cleanUpGLStuff ids
W.terminate
?log "Normal termination."
|
1af0a31bb1ab513a25af70187820f0e32ce7c26940458e73da7eec14111c7871
|
oakes/play-cljs-examples
|
prod.clj
|
(require
'[cljs.build.api :as api]
'[clojure.java.io :as io])
(defn delete-children-recursively! [f]
(when (.isDirectory f)
(doseq [f2 (.listFiles f)]
(delete-children-recursively! f2)))
(when (.exists f) (io/delete-file f)))
(def out-file "resources/public/main.js")
(def out-dir "resources/public/main.out")
(println "Building main.js")
(delete-children-recursively! (io/file out-dir))
(api/build "src" {:main 'flappy-bird-clone.core
:optimizations :advanced
:output-to out-file
:output-dir out-dir
:infer-externs true})
(delete-children-recursively! (io/file out-dir))
(println "Build complete:" out-file)
(System/exit 0)
| null |
https://raw.githubusercontent.com/oakes/play-cljs-examples/88716f19d3cd59c2bf7fb4029f5792e00014e10a/flappy-bird-clone/prod.clj
|
clojure
|
(require
'[cljs.build.api :as api]
'[clojure.java.io :as io])
(defn delete-children-recursively! [f]
(when (.isDirectory f)
(doseq [f2 (.listFiles f)]
(delete-children-recursively! f2)))
(when (.exists f) (io/delete-file f)))
(def out-file "resources/public/main.js")
(def out-dir "resources/public/main.out")
(println "Building main.js")
(delete-children-recursively! (io/file out-dir))
(api/build "src" {:main 'flappy-bird-clone.core
:optimizations :advanced
:output-to out-file
:output-dir out-dir
:infer-externs true})
(delete-children-recursively! (io/file out-dir))
(println "Build complete:" out-file)
(System/exit 0)
|
|
27483f1d9bbdf62a0feccb857fe0679864a98027ae4834efcb4637bdaa88ef01
|
YoEight/lambda-database-experiment
|
Operation.hs
|
# LANGUAGE ExistentialQuantification #
{-# LANGUAGE GADTs #-}
--------------------------------------------------------------------------------
-- |
-- Module : Protocol.Operation
Copyright : ( C ) 2017
-- License : (see the file LICENSE)
--
Maintainer : < >
-- Stability : provisional
-- Portability : non-portable
--
--------------------------------------------------------------------------------
module Protocol.Operation where
--------------------------------------------------------------------------------
import Data.List.NonEmpty
--------------------------------------------------------------------------------
import Lambda.Prelude
--------------------------------------------------------------------------------
import Protocol.Package
import Protocol.Types
--------------------------------------------------------------------------------
data Request a where
WriteEvents :: StreamName
-> ExpectedVersion
-> NonEmpty Event
-> Request WriteEventsResp
ReadEvents :: StreamName
-> Batch
-> Request ReadEventsResp
--------------------------------------------------------------------------------
data WriteEventsResp =
WriteEventsResp EventNumber WriteResultFlag
--------------------------------------------------------------------------------
data ReadEventsResp =
ReadEventsResp StreamName [SavedEvent] ReadResultFlag EventNumber Bool
--------------------------------------------------------------------------------
data Operation a =
Operation { operationId :: PkgId
, operationType :: Request a
}
--------------------------------------------------------------------------------
data SomeOperation = forall a. Typeable a => SomeOperation (Operation a)
--------------------------------------------------------------------------------
data Response a =
Response { responseId :: PkgId
, responseType :: a
}
| null |
https://raw.githubusercontent.com/YoEight/lambda-database-experiment/da4fab8bd358fb8fb78412c805d6f5bc05854432/lambda-protocol/library/Protocol/Operation.hs
|
haskell
|
# LANGUAGE GADTs #
------------------------------------------------------------------------------
|
Module : Protocol.Operation
License : (see the file LICENSE)
Stability : provisional
Portability : non-portable
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
|
# LANGUAGE ExistentialQuantification #
Copyright : ( C ) 2017
Maintainer : < >
module Protocol.Operation where
import Data.List.NonEmpty
import Lambda.Prelude
import Protocol.Package
import Protocol.Types
data Request a where
WriteEvents :: StreamName
-> ExpectedVersion
-> NonEmpty Event
-> Request WriteEventsResp
ReadEvents :: StreamName
-> Batch
-> Request ReadEventsResp
data WriteEventsResp =
WriteEventsResp EventNumber WriteResultFlag
data ReadEventsResp =
ReadEventsResp StreamName [SavedEvent] ReadResultFlag EventNumber Bool
data Operation a =
Operation { operationId :: PkgId
, operationType :: Request a
}
data SomeOperation = forall a. Typeable a => SomeOperation (Operation a)
data Response a =
Response { responseId :: PkgId
, responseType :: a
}
|
2229e42ad658ce22d1435f6b94e0ee721b6f3fb2d7cb53dfd351a89c3b7c565a
|
anwarmamat/cmsc330fall18-public
|
disc.ml
|
(* Polymorphic Examples*)
let f x y = x :: [y]
let g p q = match (p, q) with
| ([], []) -> 1
| (_, _) -> 2
Chipotle record
type chipotle_order = { item : string; cost : float }
Given a list of Chipotle orders , find the most expensive cost . Return 0.0 for empty list .
let find_expensive (orders : chipotle_order list) : float =
failwith "unimplemented"
(* Map and fold are defined here for you. You may use them. *)
let rec map f l =
match l with
| [] -> []
| h :: t -> (f h) :: (map f t)
let rec foldl f acc l =
match l with
| [] -> acc
| h :: t -> foldl f (f acc h) t
let rec foldr f l acc =
match l with
| [] -> acc
| h::t -> f h (foldr f t acc)
(* Name record *)
type name = { first : string ; middle : string option; last : string }
Returns full name string representation of the name_records in l.
let full_names (l : name list) : string list =
failwith "unimplemented"
(* Vector record *)
type vector = { x : int; y : int }
Returns the sum of the vectors in l.
let sum_vectors (l : vector list) : vector =
failwith "unimplemented"
Returns the sum of the ints in the lists in l.
let sum_list_list (l : int list list) : int =
failwith "unimplemented"
(* Write your own map function using the provided fold function *)
let my_map (f : 'a -> 'b) (l : 'a list) : 'b list =
failwith "unimplemented"
OPTIONAL : Similar to foldr except it returns all intermediate
accumulators instead of just the last one ( including the first one ) .
This one is tested locally , but not on the submit server . Try it !
accumulators instead of just the last one (including the first one).
This one is tested locally, but not on the submit server. Try it! *)
let my_scanr (f : 'a -> 'b -> 'b) (l : 'a list) (acc : 'b) : 'b list =
failwith "unimplemented"
| null |
https://raw.githubusercontent.com/anwarmamat/cmsc330fall18-public/12585d98d45f954f75e2f78df3062444f5f97cf6/discussions/disc4/src/disc.ml
|
ocaml
|
Polymorphic Examples
Map and fold are defined here for you. You may use them.
Name record
Vector record
Write your own map function using the provided fold function
|
let f x y = x :: [y]
let g p q = match (p, q) with
| ([], []) -> 1
| (_, _) -> 2
Chipotle record
type chipotle_order = { item : string; cost : float }
Given a list of Chipotle orders , find the most expensive cost . Return 0.0 for empty list .
let find_expensive (orders : chipotle_order list) : float =
failwith "unimplemented"
let rec map f l =
match l with
| [] -> []
| h :: t -> (f h) :: (map f t)
let rec foldl f acc l =
match l with
| [] -> acc
| h :: t -> foldl f (f acc h) t
let rec foldr f l acc =
match l with
| [] -> acc
| h::t -> f h (foldr f t acc)
type name = { first : string ; middle : string option; last : string }
Returns full name string representation of the name_records in l.
let full_names (l : name list) : string list =
failwith "unimplemented"
type vector = { x : int; y : int }
Returns the sum of the vectors in l.
let sum_vectors (l : vector list) : vector =
failwith "unimplemented"
Returns the sum of the ints in the lists in l.
let sum_list_list (l : int list list) : int =
failwith "unimplemented"
let my_map (f : 'a -> 'b) (l : 'a list) : 'b list =
failwith "unimplemented"
OPTIONAL : Similar to foldr except it returns all intermediate
accumulators instead of just the last one ( including the first one ) .
This one is tested locally , but not on the submit server . Try it !
accumulators instead of just the last one (including the first one).
This one is tested locally, but not on the submit server. Try it! *)
let my_scanr (f : 'a -> 'b -> 'b) (l : 'a list) (acc : 'b) : 'b list =
failwith "unimplemented"
|
05eaa47f00e4b291b9326a1866240fad54194e8a8c5d0bbb8221529c3ba20ea7
|
brendanhay/amazonka
|
AccessControlRuleEffect.hs
|
# LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE PatternSynonyms #
{-# LANGUAGE StrictData #-}
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - unused - imports #
Derived from AWS service descriptions , licensed under Apache 2.0 .
-- |
-- Module : Amazonka.WorkMail.Types.AccessControlRuleEffect
Copyright : ( c ) 2013 - 2023
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
-- Stability : auto-generated
Portability : non - portable ( GHC extensions )
module Amazonka.WorkMail.Types.AccessControlRuleEffect
( AccessControlRuleEffect
( ..,
AccessControlRuleEffect_ALLOW,
AccessControlRuleEffect_DENY
),
)
where
import qualified Amazonka.Core as Core
import qualified Amazonka.Data as Data
import qualified Amazonka.Prelude as Prelude
newtype AccessControlRuleEffect = AccessControlRuleEffect'
{ fromAccessControlRuleEffect ::
Data.Text
}
deriving stock
( Prelude.Show,
Prelude.Read,
Prelude.Eq,
Prelude.Ord,
Prelude.Generic
)
deriving newtype
( Prelude.Hashable,
Prelude.NFData,
Data.FromText,
Data.ToText,
Data.ToByteString,
Data.ToLog,
Data.ToHeader,
Data.ToQuery,
Data.FromJSON,
Data.FromJSONKey,
Data.ToJSON,
Data.ToJSONKey,
Data.FromXML,
Data.ToXML
)
pattern AccessControlRuleEffect_ALLOW :: AccessControlRuleEffect
pattern AccessControlRuleEffect_ALLOW = AccessControlRuleEffect' "ALLOW"
pattern AccessControlRuleEffect_DENY :: AccessControlRuleEffect
pattern AccessControlRuleEffect_DENY = AccessControlRuleEffect' "DENY"
# COMPLETE
AccessControlRuleEffect_ALLOW ,
AccessControlRuleEffect_DENY ,
AccessControlRuleEffect '
#
AccessControlRuleEffect_ALLOW,
AccessControlRuleEffect_DENY,
AccessControlRuleEffect'
#-}
| null |
https://raw.githubusercontent.com/brendanhay/amazonka/09f52b75d2cfdff221b439280d3279d22690d6a6/lib/services/amazonka-workmail/gen/Amazonka/WorkMail/Types/AccessControlRuleEffect.hs
|
haskell
|
# LANGUAGE OverloadedStrings #
# LANGUAGE StrictData #
|
Module : Amazonka.WorkMail.Types.AccessControlRuleEffect
Stability : auto-generated
|
# LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
# LANGUAGE PatternSynonyms #
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - unused - imports #
Derived from AWS service descriptions , licensed under Apache 2.0 .
Copyright : ( c ) 2013 - 2023
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
Portability : non - portable ( GHC extensions )
module Amazonka.WorkMail.Types.AccessControlRuleEffect
( AccessControlRuleEffect
( ..,
AccessControlRuleEffect_ALLOW,
AccessControlRuleEffect_DENY
),
)
where
import qualified Amazonka.Core as Core
import qualified Amazonka.Data as Data
import qualified Amazonka.Prelude as Prelude
newtype AccessControlRuleEffect = AccessControlRuleEffect'
{ fromAccessControlRuleEffect ::
Data.Text
}
deriving stock
( Prelude.Show,
Prelude.Read,
Prelude.Eq,
Prelude.Ord,
Prelude.Generic
)
deriving newtype
( Prelude.Hashable,
Prelude.NFData,
Data.FromText,
Data.ToText,
Data.ToByteString,
Data.ToLog,
Data.ToHeader,
Data.ToQuery,
Data.FromJSON,
Data.FromJSONKey,
Data.ToJSON,
Data.ToJSONKey,
Data.FromXML,
Data.ToXML
)
pattern AccessControlRuleEffect_ALLOW :: AccessControlRuleEffect
pattern AccessControlRuleEffect_ALLOW = AccessControlRuleEffect' "ALLOW"
pattern AccessControlRuleEffect_DENY :: AccessControlRuleEffect
pattern AccessControlRuleEffect_DENY = AccessControlRuleEffect' "DENY"
# COMPLETE
AccessControlRuleEffect_ALLOW ,
AccessControlRuleEffect_DENY ,
AccessControlRuleEffect '
#
AccessControlRuleEffect_ALLOW,
AccessControlRuleEffect_DENY,
AccessControlRuleEffect'
#-}
|
0c4c736b7d1e4001ff4ec41d8d2fd47c457daffd58ded60c5e6f4989fdb92d6c
|
Helium4Haskell/helium
|
Hiding2.hs
|
x :: Char
x = 'a'
y, z :: Int
y = 200
z = 300
data X = X
deriving Show
| null |
https://raw.githubusercontent.com/Helium4Haskell/helium/5928bff479e6f151b4ceb6c69bbc15d71e29eb47/test/make/Hiding2.hs
|
haskell
|
x :: Char
x = 'a'
y, z :: Int
y = 200
z = 300
data X = X
deriving Show
|
|
81b06503e52f37f60acd451898bb5177c4574d07ca5120300cc799a906893084
|
immoh/clj-instagram
|
endpoints_test.clj
|
(ns clj-instagram.endpoints-test
(:require [midje.sweet :refer :all]
[clj-instagram.endpoints :as endpoints]))
(def opts {:access-token (System/getenv "CLJ_INSTAGRAM_ACCESS_TOKEN")
:client-secret (System/getenv "CLJ_INSTAGRAM_CLIENT_SECRET")})
;; Users
(fact
"Get user (self)" :integration
(endpoints/get-user opts)
=> (contains {:meta {:code 200}
:data (contains {:username anything})}))
(fact
"Get user (user id)" :integration
(endpoints/get-user "2987617812" opts)
=> (contains {:meta {:code 200}
:data (contains {:username "cljtest1"})}))
(fact
"Get recent media by me" :integration
(endpoints/get-recent-media-by-me opts)
=> (contains {:meta {:code 200}
:data (contains [])}))
(fact
"Get recent media by me with optional params" :integration
(endpoints/get-recent-media-by-me {:count 1} opts)
=> (contains {:meta {:code 200}
:data (contains [])}))
(fact
"Get recent media by user (user id)" :integration
(endpoints/get-recent-media-by-user "2987617812" opts)
=> (contains {:meta {:code 200}
:data (contains [(contains {:type "image"})])}))
(fact
"Get recent media by user (user id) with optional params" :integration
(endpoints/get-recent-media-by-user "2987617812" {:min-id "1196893517451621714_2987617812"} opts)
=> (contains {:meta {:code 200}
:data (contains [(contains {:type "image"})])}))
(fact
"Get liked media" :integration
(endpoints/get-liked-media opts)
=> (contains {:meta {:code 200}
:data (contains [])}))
(fact
"Get liked media with optional params" :integration
(endpoints/get-liked-media {:count 1} opts)
=> (contains {:meta {:code 200}
:data (contains [])}))
(fact
"Search users" :integration
(endpoints/search-users "cljtest" opts)
=> (contains {:meta {:code 200}
:data (contains [(contains {:username "cljtest1"})])}))
;; Relationships
(fact
"Get follows" :integration
(endpoints/get-follows opts)
=> (contains {:meta {:code 200}
:data (contains [(contains {:username "cljtest2"})])}))
(fact
"Get followed by" :integration
(endpoints/get-followed-by opts)
=> (contains {:meta {:code 200}
:data (contains [(contains {:username "cljtest2"})])}))
(fact
"Get requested by" :integration
(endpoints/get-requested-by opts)
=> {:meta {:code 200}
:data []})
(fact
"Get relationship" :integration
(endpoints/get-relationship "2987620865" opts)
=> (contains {:meta {:code 200}
:data (contains {:outgoing_status "follows"})}))
(fact
"Modify relationship" :integration
(endpoints/modify-relationship "2987620865" :unfollow opts)
=> (contains {:meta {:code 200}
:data (contains {:outgoing_status "none"})})
(endpoints/modify-relationship "2987620865" :follow opts)
=> (contains {:meta {:code 200}
:data (contains {:outgoing_status "follows"})}))
;; Media
(fact
"Get media by id" :integration
(endpoints/get-media-by-id "1196893517451621714_2987617812" opts)
=> (contains {:meta {:code 200}
:data (contains {:type "image"})}))
(fact
"Get media by shortcode" :integration
(endpoints/get-media-by-shortcode "BCcOFANq_VS" opts)
=> (contains {:meta {:code 200}
:data (contains {:type "image"})}))
(fact
"Search media" :integration
(endpoints/search-media 60.170833 24.9375 5000 opts)
=> (contains {:meta {:code 200}
:data (contains [(contains {:type "image"})])}))
;; Comments
(fact
"Get comments" :integration
(endpoints/get-comments "1196893517451621714_2987617812" opts)
=> (contains {:meta {:code 200}
:data (contains [(contains {:text "foo"})])}))
(fact
"Create comment" :integration
(endpoints/create-comment "1196893517451621714_2987617812" "foo" opts)
=> (contains {:meta {:code 200}
:data (contains {:text "foo"})}))
(fact
"Delete comment" :integration
(let [comment-id (-> (endpoints/create-comment "1196893517451621714_2987617812" "bar" opts)
(get-in [:data :id]))]
(endpoints/delete-comment "1196893517451621714_2987617812" comment-id opts)
=> {:meta {:code 200}
:data nil}))
;; Likes
(fact
"Get likes" :integration
(endpoints/get-likes "1196893517451621714_2987617812" opts)
=> (contains {:meta {:code 200}
:data (contains [(contains {:username "cljtest2"})])}))
(fact
"Set like" :integration
(endpoints/set-like "1196893517451621714_2987617812" opts)
=> {:meta {:code 200}
:data nil})
(fact
"Remove like" :integration
(endpoints/remove-like "1196893517451621714_2987617812" opts)
=> {:meta {:code 200}
:data nil})
;; Tags
(fact
"Get tag" :integration
(endpoints/get-tag "nofilter" opts)
=> (just {:meta {:code 200}
:data (just {:media_count pos?
:name "nofilter"})}))
(fact
"Get recently tagged media" :integration
(endpoints/get-recently-tagged-media "nofilter" opts)
=> (contains {:meta {:code 200}
:data (contains [(contains {:type "image"})])}))
(fact
"Get recently tagged media (with options)" :integration
(endpoints/get-recently-tagged-media "nofilter" {:count 1} opts)
=> (contains {:meta {:code 200}
:data (contains [(contains {:type "image"})])}))
(fact
"Search tags" :integration
(endpoints/search-tags "no" opts)
=> (contains {:meta {:code 200}
:data (contains [(contains {:name "nofilter"})])}))
;; Locations
(fact
"Get location" :integration
(endpoints/get-location "736780008" opts)
=> (contains {:meta {:code 200}
:data (contains {:name "Helsinki"})}))
(fact
"Get recent media from location" :integration
(endpoints/get-recent-media-from-location "736780008" opts)
=> (contains {:meta {:code 200}
:data (contains [(contains {:type "image"})])}))
(fact
"Get recent media from location (with options)" :integration
(endpoints/get-recent-media-from-location "736780008" {:max-id "1196893517451621714_2987617812"} opts)
=> (contains {:meta {:code 200}
:data (contains [(contains {:type "image"})])}))
(fact
"Search location by coordinates" :integration
(endpoints/search-locations {:lat 60.17 :lng 24.93 :distance 5000} opts)
=> (contains {:meta {:code 200}
:data (contains [(contains {:name "Tennispalatsi"})])}))
(fact
"Search location by Facebook places id" :integration
(endpoints/search-locations {:facebook-places-id "109595459060079"} opts)
=> (contains {:meta {:code 200}
:data (contains [(contains {:name "Helsinki"})])}))
| null |
https://raw.githubusercontent.com/immoh/clj-instagram/cfd703ed74463a3b93fd40ea63f819c7218230e5/test/clj_instagram/endpoints_test.clj
|
clojure
|
Users
Relationships
Media
Comments
Likes
Tags
Locations
|
(ns clj-instagram.endpoints-test
(:require [midje.sweet :refer :all]
[clj-instagram.endpoints :as endpoints]))
(def opts {:access-token (System/getenv "CLJ_INSTAGRAM_ACCESS_TOKEN")
:client-secret (System/getenv "CLJ_INSTAGRAM_CLIENT_SECRET")})
(fact
"Get user (self)" :integration
(endpoints/get-user opts)
=> (contains {:meta {:code 200}
:data (contains {:username anything})}))
(fact
"Get user (user id)" :integration
(endpoints/get-user "2987617812" opts)
=> (contains {:meta {:code 200}
:data (contains {:username "cljtest1"})}))
(fact
"Get recent media by me" :integration
(endpoints/get-recent-media-by-me opts)
=> (contains {:meta {:code 200}
:data (contains [])}))
(fact
"Get recent media by me with optional params" :integration
(endpoints/get-recent-media-by-me {:count 1} opts)
=> (contains {:meta {:code 200}
:data (contains [])}))
(fact
"Get recent media by user (user id)" :integration
(endpoints/get-recent-media-by-user "2987617812" opts)
=> (contains {:meta {:code 200}
:data (contains [(contains {:type "image"})])}))
(fact
"Get recent media by user (user id) with optional params" :integration
(endpoints/get-recent-media-by-user "2987617812" {:min-id "1196893517451621714_2987617812"} opts)
=> (contains {:meta {:code 200}
:data (contains [(contains {:type "image"})])}))
(fact
"Get liked media" :integration
(endpoints/get-liked-media opts)
=> (contains {:meta {:code 200}
:data (contains [])}))
(fact
"Get liked media with optional params" :integration
(endpoints/get-liked-media {:count 1} opts)
=> (contains {:meta {:code 200}
:data (contains [])}))
(fact
"Search users" :integration
(endpoints/search-users "cljtest" opts)
=> (contains {:meta {:code 200}
:data (contains [(contains {:username "cljtest1"})])}))
(fact
"Get follows" :integration
(endpoints/get-follows opts)
=> (contains {:meta {:code 200}
:data (contains [(contains {:username "cljtest2"})])}))
(fact
"Get followed by" :integration
(endpoints/get-followed-by opts)
=> (contains {:meta {:code 200}
:data (contains [(contains {:username "cljtest2"})])}))
(fact
"Get requested by" :integration
(endpoints/get-requested-by opts)
=> {:meta {:code 200}
:data []})
(fact
"Get relationship" :integration
(endpoints/get-relationship "2987620865" opts)
=> (contains {:meta {:code 200}
:data (contains {:outgoing_status "follows"})}))
(fact
"Modify relationship" :integration
(endpoints/modify-relationship "2987620865" :unfollow opts)
=> (contains {:meta {:code 200}
:data (contains {:outgoing_status "none"})})
(endpoints/modify-relationship "2987620865" :follow opts)
=> (contains {:meta {:code 200}
:data (contains {:outgoing_status "follows"})}))
(fact
"Get media by id" :integration
(endpoints/get-media-by-id "1196893517451621714_2987617812" opts)
=> (contains {:meta {:code 200}
:data (contains {:type "image"})}))
(fact
"Get media by shortcode" :integration
(endpoints/get-media-by-shortcode "BCcOFANq_VS" opts)
=> (contains {:meta {:code 200}
:data (contains {:type "image"})}))
(fact
"Search media" :integration
(endpoints/search-media 60.170833 24.9375 5000 opts)
=> (contains {:meta {:code 200}
:data (contains [(contains {:type "image"})])}))
(fact
"Get comments" :integration
(endpoints/get-comments "1196893517451621714_2987617812" opts)
=> (contains {:meta {:code 200}
:data (contains [(contains {:text "foo"})])}))
(fact
"Create comment" :integration
(endpoints/create-comment "1196893517451621714_2987617812" "foo" opts)
=> (contains {:meta {:code 200}
:data (contains {:text "foo"})}))
(fact
"Delete comment" :integration
(let [comment-id (-> (endpoints/create-comment "1196893517451621714_2987617812" "bar" opts)
(get-in [:data :id]))]
(endpoints/delete-comment "1196893517451621714_2987617812" comment-id opts)
=> {:meta {:code 200}
:data nil}))
(fact
"Get likes" :integration
(endpoints/get-likes "1196893517451621714_2987617812" opts)
=> (contains {:meta {:code 200}
:data (contains [(contains {:username "cljtest2"})])}))
(fact
"Set like" :integration
(endpoints/set-like "1196893517451621714_2987617812" opts)
=> {:meta {:code 200}
:data nil})
(fact
"Remove like" :integration
(endpoints/remove-like "1196893517451621714_2987617812" opts)
=> {:meta {:code 200}
:data nil})
(fact
"Get tag" :integration
(endpoints/get-tag "nofilter" opts)
=> (just {:meta {:code 200}
:data (just {:media_count pos?
:name "nofilter"})}))
(fact
"Get recently tagged media" :integration
(endpoints/get-recently-tagged-media "nofilter" opts)
=> (contains {:meta {:code 200}
:data (contains [(contains {:type "image"})])}))
(fact
"Get recently tagged media (with options)" :integration
(endpoints/get-recently-tagged-media "nofilter" {:count 1} opts)
=> (contains {:meta {:code 200}
:data (contains [(contains {:type "image"})])}))
(fact
"Search tags" :integration
(endpoints/search-tags "no" opts)
=> (contains {:meta {:code 200}
:data (contains [(contains {:name "nofilter"})])}))
(fact
"Get location" :integration
(endpoints/get-location "736780008" opts)
=> (contains {:meta {:code 200}
:data (contains {:name "Helsinki"})}))
(fact
"Get recent media from location" :integration
(endpoints/get-recent-media-from-location "736780008" opts)
=> (contains {:meta {:code 200}
:data (contains [(contains {:type "image"})])}))
(fact
"Get recent media from location (with options)" :integration
(endpoints/get-recent-media-from-location "736780008" {:max-id "1196893517451621714_2987617812"} opts)
=> (contains {:meta {:code 200}
:data (contains [(contains {:type "image"})])}))
(fact
"Search location by coordinates" :integration
(endpoints/search-locations {:lat 60.17 :lng 24.93 :distance 5000} opts)
=> (contains {:meta {:code 200}
:data (contains [(contains {:name "Tennispalatsi"})])}))
(fact
"Search location by Facebook places id" :integration
(endpoints/search-locations {:facebook-places-id "109595459060079"} opts)
=> (contains {:meta {:code 200}
:data (contains [(contains {:name "Helsinki"})])}))
|
c198b158df389e6911c904dd779ed52f2afef8ba2abe2a10dbf4d9c04c2aba82
|
input-output-hk/plutus
|
Parser.hs
|
{-# LANGUAGE OverloadedStrings #-}
| Parsers for PIR terms in DefaultUni .
module PlutusIR.Parser
( parse
, program
, pType
, pTerm
, parseProgram
, Parser
, SourcePos
) where
import PlutusCore.Annotation
import PlutusCore.Default qualified as PLC (DefaultFun, DefaultUni)
import PlutusCore.Parser hiding (parseProgram, program)
import PlutusIR as PIR
import PlutusIR.MkPir qualified as PIR
import PlutusPrelude
import Prelude hiding (fail)
import Control.Monad.Combinators.NonEmpty qualified as NE
import Control.Monad.Except (MonadError)
import Data.Text (Text)
import PlutusCore (MonadQuote)
import PlutusCore.Error (AsParserErrorBundle)
import Text.Megaparsec hiding (ParseError, State, many, parse, some)
-- | A parsable PIR pTerm.
type PTerm = PIR.Term TyName Name PLC.DefaultUni PLC.DefaultFun SrcSpan
recursivity :: Parser Recursivity
recursivity = trailingWhitespace . inParens $
(symbol "rec" $> Rec) <|> (symbol "nonrec" $> NonRec)
strictness :: Parser Strictness
strictness = trailingWhitespace . inParens $
(symbol "strict" $> Strict) <|> (symbol "nonstrict" $> NonStrict)
varDecl :: Parser (VarDecl TyName Name PLC.DefaultUni SrcSpan)
varDecl = withSpan $ \sp ->
inParens $ VarDecl sp <$> (symbol "vardecl" *> trailingWhitespace name) <*> pType
tyVarDecl :: Parser (TyVarDecl TyName SrcSpan)
tyVarDecl = withSpan $ \sp ->
inParens $ TyVarDecl sp <$> (symbol "tyvardecl" *> trailingWhitespace tyName) <*> kind
datatype :: Parser (Datatype TyName Name PLC.DefaultUni SrcSpan)
datatype = withSpan $ \sp ->
inParens $
Datatype sp
<$> (symbol "datatype" *> tyVarDecl)
<*> many tyVarDecl
<*> trailingWhitespace name
<*> many varDecl
binding :: Parser (Binding TyName Name PLC.DefaultUni PLC.DefaultFun SrcSpan)
binding = withSpan $ \sp ->
inParens . choice $ try <$>
[ TermBind sp <$> (symbol "termbind" *> strictness) <*> varDecl <*> pTerm
, TypeBind sp <$> (symbol "typebind" *> tyVarDecl) <*> pType
, DatatypeBind sp <$> (symbol "datatypebind" *> datatype)
]
varTerm :: Parser PTerm
varTerm = withSpan $ \sp ->
PIR.Var sp <$> name
-- A small type wrapper for parsers that are parametric in the type of term they parse
type Parametric
= Parser PTerm -> Parser PTerm
absTerm :: Parametric
absTerm tm = withSpan $ \sp ->
inParens $ PIR.tyAbs sp <$> (symbol "abs" *> trailingWhitespace tyName) <*> kind <*> tm
lamTerm :: Parametric
lamTerm tm = withSpan $ \sp ->
inParens $ PIR.lamAbs sp <$> (symbol "lam" *> trailingWhitespace name) <*> pType <*> tm
conTerm :: Parametric
conTerm _tm = withSpan $ \sp ->
inParens $ PIR.constant sp <$> (symbol "con" *> constant)
iwrapTerm :: Parametric
iwrapTerm tm = withSpan $ \sp ->
inParens $ PIR.iWrap sp <$> (symbol "iwrap" *> pType) <*> pType <*> tm
builtinTerm :: Parametric
builtinTerm _tm = withSpan $ \sp ->
inParens $ PIR.builtin sp <$> (symbol "builtin" *> builtinFunction)
unwrapTerm :: Parametric
unwrapTerm tm = withSpan $ \sp ->
inParens $ PIR.unwrap sp <$> (symbol "unwrap" *> tm)
errorTerm :: Parametric
errorTerm _tm = withSpan $ \sp ->
inParens $ PIR.error sp <$> (symbol "error" *> pType)
letTerm :: Parser PTerm
letTerm = withSpan $ \sp ->
inParens $ Let sp <$> (symbol "let" *> recursivity) <*> NE.some (try binding) <*> pTerm
appTerm :: Parametric
appTerm tm = withSpan $ \sp ->
inBrackets $ PIR.mkIterApp sp <$> tm <*> some tm
tyInstTerm :: Parametric
tyInstTerm tm = withSpan $ \sp ->
inBraces $ PIR.mkIterInst sp <$> tm <*> some pType
pTerm :: Parser PTerm
pTerm = leadingWhitespace go
where
go = choice $ try <$>
[ varTerm
, letTerm
, absTerm go
, lamTerm go
, conTerm go
, iwrapTerm go
, builtinTerm go
, unwrapTerm go
, errorTerm go
, tyInstTerm go
, appTerm go
]
program :: Parser (Program TyName Name PLC.DefaultUni PLC.DefaultFun SrcSpan)
program = leadingWhitespace go
where
go = do
prog <- withSpan $ \sp ->
inParens $ Program sp <$> (symbol "program" *> version) <*> pTerm
notFollowedBy anySingle
pure prog
-- | Parse a PIR program. The resulting program will have fresh names. The
-- underlying monad must be capable of handling any parse errors. This passes
-- "test" to the parser as the name of the input stream; to supply a name
-- explicity, use `parse program <name> <input>`.
parseProgram ::
(AsParserErrorBundle e, MonadError e m, MonadQuote m)
=> Text
-> m (Program TyName Name PLC.DefaultUni PLC.DefaultFun SrcSpan)
parseProgram = parseGen program
| null |
https://raw.githubusercontent.com/input-output-hk/plutus/bb9b5a18c26476fbf6b2f446ab267706426fec3a/plutus-core/plutus-ir/src/PlutusIR/Parser.hs
|
haskell
|
# LANGUAGE OverloadedStrings #
| A parsable PIR pTerm.
A small type wrapper for parsers that are parametric in the type of term they parse
| Parse a PIR program. The resulting program will have fresh names. The
underlying monad must be capable of handling any parse errors. This passes
"test" to the parser as the name of the input stream; to supply a name
explicity, use `parse program <name> <input>`.
|
| Parsers for PIR terms in DefaultUni .
module PlutusIR.Parser
( parse
, program
, pType
, pTerm
, parseProgram
, Parser
, SourcePos
) where
import PlutusCore.Annotation
import PlutusCore.Default qualified as PLC (DefaultFun, DefaultUni)
import PlutusCore.Parser hiding (parseProgram, program)
import PlutusIR as PIR
import PlutusIR.MkPir qualified as PIR
import PlutusPrelude
import Prelude hiding (fail)
import Control.Monad.Combinators.NonEmpty qualified as NE
import Control.Monad.Except (MonadError)
import Data.Text (Text)
import PlutusCore (MonadQuote)
import PlutusCore.Error (AsParserErrorBundle)
import Text.Megaparsec hiding (ParseError, State, many, parse, some)
type PTerm = PIR.Term TyName Name PLC.DefaultUni PLC.DefaultFun SrcSpan
recursivity :: Parser Recursivity
recursivity = trailingWhitespace . inParens $
(symbol "rec" $> Rec) <|> (symbol "nonrec" $> NonRec)
strictness :: Parser Strictness
strictness = trailingWhitespace . inParens $
(symbol "strict" $> Strict) <|> (symbol "nonstrict" $> NonStrict)
varDecl :: Parser (VarDecl TyName Name PLC.DefaultUni SrcSpan)
varDecl = withSpan $ \sp ->
inParens $ VarDecl sp <$> (symbol "vardecl" *> trailingWhitespace name) <*> pType
tyVarDecl :: Parser (TyVarDecl TyName SrcSpan)
tyVarDecl = withSpan $ \sp ->
inParens $ TyVarDecl sp <$> (symbol "tyvardecl" *> trailingWhitespace tyName) <*> kind
datatype :: Parser (Datatype TyName Name PLC.DefaultUni SrcSpan)
datatype = withSpan $ \sp ->
inParens $
Datatype sp
<$> (symbol "datatype" *> tyVarDecl)
<*> many tyVarDecl
<*> trailingWhitespace name
<*> many varDecl
binding :: Parser (Binding TyName Name PLC.DefaultUni PLC.DefaultFun SrcSpan)
binding = withSpan $ \sp ->
inParens . choice $ try <$>
[ TermBind sp <$> (symbol "termbind" *> strictness) <*> varDecl <*> pTerm
, TypeBind sp <$> (symbol "typebind" *> tyVarDecl) <*> pType
, DatatypeBind sp <$> (symbol "datatypebind" *> datatype)
]
varTerm :: Parser PTerm
varTerm = withSpan $ \sp ->
PIR.Var sp <$> name
type Parametric
= Parser PTerm -> Parser PTerm
absTerm :: Parametric
absTerm tm = withSpan $ \sp ->
inParens $ PIR.tyAbs sp <$> (symbol "abs" *> trailingWhitespace tyName) <*> kind <*> tm
lamTerm :: Parametric
lamTerm tm = withSpan $ \sp ->
inParens $ PIR.lamAbs sp <$> (symbol "lam" *> trailingWhitespace name) <*> pType <*> tm
conTerm :: Parametric
conTerm _tm = withSpan $ \sp ->
inParens $ PIR.constant sp <$> (symbol "con" *> constant)
iwrapTerm :: Parametric
iwrapTerm tm = withSpan $ \sp ->
inParens $ PIR.iWrap sp <$> (symbol "iwrap" *> pType) <*> pType <*> tm
builtinTerm :: Parametric
builtinTerm _tm = withSpan $ \sp ->
inParens $ PIR.builtin sp <$> (symbol "builtin" *> builtinFunction)
unwrapTerm :: Parametric
unwrapTerm tm = withSpan $ \sp ->
inParens $ PIR.unwrap sp <$> (symbol "unwrap" *> tm)
errorTerm :: Parametric
errorTerm _tm = withSpan $ \sp ->
inParens $ PIR.error sp <$> (symbol "error" *> pType)
letTerm :: Parser PTerm
letTerm = withSpan $ \sp ->
inParens $ Let sp <$> (symbol "let" *> recursivity) <*> NE.some (try binding) <*> pTerm
appTerm :: Parametric
appTerm tm = withSpan $ \sp ->
inBrackets $ PIR.mkIterApp sp <$> tm <*> some tm
tyInstTerm :: Parametric
tyInstTerm tm = withSpan $ \sp ->
inBraces $ PIR.mkIterInst sp <$> tm <*> some pType
pTerm :: Parser PTerm
pTerm = leadingWhitespace go
where
go = choice $ try <$>
[ varTerm
, letTerm
, absTerm go
, lamTerm go
, conTerm go
, iwrapTerm go
, builtinTerm go
, unwrapTerm go
, errorTerm go
, tyInstTerm go
, appTerm go
]
program :: Parser (Program TyName Name PLC.DefaultUni PLC.DefaultFun SrcSpan)
program = leadingWhitespace go
where
go = do
prog <- withSpan $ \sp ->
inParens $ Program sp <$> (symbol "program" *> version) <*> pTerm
notFollowedBy anySingle
pure prog
parseProgram ::
(AsParserErrorBundle e, MonadError e m, MonadQuote m)
=> Text
-> m (Program TyName Name PLC.DefaultUni PLC.DefaultFun SrcSpan)
parseProgram = parseGen program
|
c07dd9f7d597b8bdff498f7af2de5ca03331dcb8d1d4616caee40e0ea9aa56c1
|
taktoa/hsdm
|
PAMTest.hs
|
module Main where
import System.HSDM.PAM
conversation :: [PamMessage] -> IO [PamResponse]
conversation m = do
print m
results <- mapM go m
return results
where
go (PamMessage str style) = do
print str
print style
return $ PamResponse "test"
main :: IO ()
main = do
putStrLn "doing start"
( handle, retcode) <- pamStart "slim" "test" conversation
print retcode
putStrLn "did start"
putStrLn "auth"
ret1 <- pamAuthenticate handle 0
print ret1
putStrLn "setcred"
ret2 <- pamSetCred handle (False,PAM_ESTABLISH_CRED)
print ret2
putStrLn "open"
ret3 <- pamOpenSession handle False
print ret3
-- do things
putStrLn "close"
ret10 <- pamCloseSession handle False
print ret10
putStrLn "clear"
ret11 <- pamSetCred handle (False,PAM_DELETE_CRED)
print ret11
putStrLn "doing stop"
ret12 <- pamEnd handle 0
print ret12
putStrLn "did stop"
return ()
| null |
https://raw.githubusercontent.com/taktoa/hsdm/e088ddbd20da76ac40450ae4cca9b1f87d042c58/haskell/src/PAMTest.hs
|
haskell
|
do things
|
module Main where
import System.HSDM.PAM
conversation :: [PamMessage] -> IO [PamResponse]
conversation m = do
print m
results <- mapM go m
return results
where
go (PamMessage str style) = do
print str
print style
return $ PamResponse "test"
main :: IO ()
main = do
putStrLn "doing start"
( handle, retcode) <- pamStart "slim" "test" conversation
print retcode
putStrLn "did start"
putStrLn "auth"
ret1 <- pamAuthenticate handle 0
print ret1
putStrLn "setcred"
ret2 <- pamSetCred handle (False,PAM_ESTABLISH_CRED)
print ret2
putStrLn "open"
ret3 <- pamOpenSession handle False
print ret3
putStrLn "close"
ret10 <- pamCloseSession handle False
print ret10
putStrLn "clear"
ret11 <- pamSetCred handle (False,PAM_DELETE_CRED)
print ret11
putStrLn "doing stop"
ret12 <- pamEnd handle 0
print ret12
putStrLn "did stop"
return ()
|
4d25f3f99ac5158261dd29030914b01df376d3ca34e38acd115e5fc85e8a5fca
|
FranklinChen/learn-you-some-erlang
|
kitty_server2_tests.erl
|
-module(kitty_server2_tests).
-record(cat, {name, color=green, description}). % stolen from kitty_server2.erl
-include_lib("eunit/include/eunit.hrl").
-define(CAT1, #cat{name=a, color=b, description=c}).
-define(CAT2, #cat{name=d, color=e, description=f}).
order_test() ->
Pid = kitty_server2:start_link(),
?assertEqual(?CAT1, kitty_server2:order_cat(Pid, a, b, c)),
?assertEqual(?CAT2, kitty_server2:order_cat(Pid, d, e, f)),
?assertEqual(ok, kitty_server2:close_shop(Pid)).
return_test() ->
Pid = kitty_server2:start_link(),
?assertEqual(ok, kitty_server2:return_cat(Pid, ?CAT1)),
?assertEqual(?CAT1, kitty_server2:order_cat(Pid, d, e, f)),
?assertEqual(?CAT2, kitty_server2:order_cat(Pid, d, e, f)),
?assertEqual(ok, kitty_server2:close_shop(Pid)).
close_noproc_test() ->
DeadPid = spawn_link(fun() -> ok end),
timer:sleep(100),
?assertError(noproc, kitty_server2:close_shop(DeadPid)).
| null |
https://raw.githubusercontent.com/FranklinChen/learn-you-some-erlang/878c8bc2011a12862fe72dd7fdc6c921348c79d6/tests/kitty_server2_tests.erl
|
erlang
|
stolen from kitty_server2.erl
|
-module(kitty_server2_tests).
-include_lib("eunit/include/eunit.hrl").
-define(CAT1, #cat{name=a, color=b, description=c}).
-define(CAT2, #cat{name=d, color=e, description=f}).
order_test() ->
Pid = kitty_server2:start_link(),
?assertEqual(?CAT1, kitty_server2:order_cat(Pid, a, b, c)),
?assertEqual(?CAT2, kitty_server2:order_cat(Pid, d, e, f)),
?assertEqual(ok, kitty_server2:close_shop(Pid)).
return_test() ->
Pid = kitty_server2:start_link(),
?assertEqual(ok, kitty_server2:return_cat(Pid, ?CAT1)),
?assertEqual(?CAT1, kitty_server2:order_cat(Pid, d, e, f)),
?assertEqual(?CAT2, kitty_server2:order_cat(Pid, d, e, f)),
?assertEqual(ok, kitty_server2:close_shop(Pid)).
close_noproc_test() ->
DeadPid = spawn_link(fun() -> ok end),
timer:sleep(100),
?assertError(noproc, kitty_server2:close_shop(DeadPid)).
|
b55ec5b9cf31786c5f32f1eee898a10eacab49ab1be2a054026c5ebca742acaa
|
ocaml/ocaml
|
compenv.ml
|
(**************************************************************************)
(* *)
(* OCaml *)
(* *)
Fabrice Le Fessant , EPI Gallium , INRIA Paris - Rocquencourt
(* *)
Copyright 2013 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
open Clflags
exception Exit_with_status of int
let output_prefix name =
let oname =
match !output_name with
| None -> name
| Some n -> if !compile_only then (output_name := None; n) else name in
Filename.remove_extension oname
let print_version_and_library compiler =
Printf.printf "The OCaml %s, version " compiler;
print_string Config.version; print_newline();
print_string "Standard library directory: ";
print_string Config.standard_library; print_newline();
raise (Exit_with_status 0)
let print_version_string () =
print_string Config.version; print_newline();
raise (Exit_with_status 0)
let print_standard_library () =
print_string Config.standard_library; print_newline();
raise (Exit_with_status 0)
let fatal err =
prerr_endline err;
raise (Exit_with_status 2)
let extract_output = function
| Some s -> s
| None ->
fatal "Please specify the name of the output file, using option -o"
let default_output = function
| Some s -> s
| None -> Config.default_executable_name
let first_include_dirs = ref []
let last_include_dirs = ref []
let first_ccopts = ref []
let last_ccopts = ref []
let first_ppx = ref []
let last_ppx = ref []
let first_objfiles = ref []
let last_objfiles = ref []
let stop_early = ref false
(* Check validity of module name *)
let is_unit_name name =
try
if name = "" then raise Exit;
begin match name.[0] with
| 'A'..'Z' -> ()
| _ ->
raise Exit;
end;
for i = 1 to String.length name - 1 do
match name.[i] with
| 'A'..'Z' | 'a'..'z' | '0'..'9' | '_' | '\'' -> ()
| _ ->
raise Exit;
done;
true
with Exit -> false
let check_unit_name filename name =
if not (is_unit_name name) then
Location.prerr_warning (Location.in_file filename)
(Warnings.Bad_module_name name)
(* Compute name of module from output file name *)
let module_of_filename inputfile outputprefix =
let basename = Filename.basename outputprefix in
let name =
try
let pos = String.index basename '.' in
String.sub basename 0 pos
with Not_found -> basename
in
let name = String.capitalize_ascii name in
check_unit_name inputfile name;
name
type filename = string
type readenv_position =
Before_args | Before_compile of filename | Before_link
Syntax of OCAMLPARAM : SEP?(name = VALUE SEP ) * _ ( SEP name = VALUE ) *
where VALUE should not contain SEP , and SEP is ' , ' if unspecified ,
or ' : ' , ' | ' , ' ; ' , ' ' or ' , '
where VALUE should not contain SEP, and SEP is ',' if unspecified,
or ':', '|', ';', ' ' or ',' *)
exception SyntaxError of string
let print_error ppf msg =
Location.print_warning Location.none ppf
(Warnings.Bad_env_variable ("OCAMLPARAM", msg))
let parse_args s =
let args =
let len = String.length s in
if len = 0 then []
else
allow first char to specify an alternative separator in " :| ; , "
match s.[0] with
| ( ':' | '|' | ';' | ' ' | ',' ) as c ->
List.tl (String.split_on_char c s)
| _ -> String.split_on_char ',' s
in
let rec iter is_after args before after =
match args with
[] ->
if not is_after then
raise (SyntaxError "no '_' separator found")
else
(List.rev before, List.rev after)
| "" :: tail -> iter is_after tail before after
| "_" :: _ when is_after -> raise (SyntaxError "too many '_' separators")
| "_" :: tail -> iter true tail before after
| arg :: tail ->
let binding = try
Misc.cut_at arg '='
with Not_found ->
raise (SyntaxError ("missing '=' in " ^ arg))
in
if is_after then
iter is_after tail before (binding :: after)
else
iter is_after tail (binding :: before) after
in
iter false args [] []
let setter ppf f name options s =
try
let bool = match s with
| "0" -> false
| "1" -> true
| _ -> raise Not_found
in
List.iter (fun b -> b := f bool) options
with Not_found ->
Printf.ksprintf (print_error ppf)
"bad value %s for %s" s name
let int_setter ppf name option s =
try
option := int_of_string s
with _ ->
Printf.ksprintf (print_error ppf)
"non-integer parameter %s for %S" s name
let int_option_setter ppf name option s =
try
option := Some (int_of_string s)
with _ ->
Printf.ksprintf (print_error ppf)
"non-integer parameter %s for %S" s name
let float_setter ppf name option s =
try
option : = float_of_string s
with _ - >
Location.print_warning Location.none ppf
( Warnings . Bad_env_variable
( " OCAMLPARAM " , Printf.sprintf " non - float parameter for \"%s\ " " name ) )
let float_setter ppf name option s =
try
option := float_of_string s
with _ ->
Location.print_warning Location.none ppf
(Warnings.Bad_env_variable
("OCAMLPARAM", Printf.sprintf "non-float parameter for \"%s\"" name))
*)
let check_bool ppf name s =
match s with
| "0" -> false
| "1" -> true
| _ ->
Printf.ksprintf (print_error ppf)
"bad value %s for %s" s name;
false
let decode_compiler_pass ppf v ~name ~filter =
let module P = Clflags.Compiler_pass in
let passes = P.available_pass_names ~filter ~native:!native_code in
begin match List.find_opt (String.equal v) passes with
| None ->
Printf.ksprintf (print_error ppf)
"bad value %s for option \"%s\" (expected one of: %s)"
v name (String.concat ", " passes);
None
| Some v -> P.of_string v
end
let set_compiler_pass ppf ~name v flag ~filter =
match decode_compiler_pass ppf v ~name ~filter with
| None -> ()
| Some pass ->
match !flag with
| None -> flag := Some pass
| Some p ->
if not (p = pass) then begin
Printf.ksprintf (print_error ppf)
"Please specify at most one %s <pass>." name
end
(* 'can-discard=' specifies which arguments can be discarded without warning
because they are not understood by some versions of OCaml. *)
let can_discard = ref []
let parse_warnings error v =
Option.iter Location.(prerr_alert none) @@ Warnings.parse_options error v
let read_one_param ppf position name v =
let set name options s = setter ppf (fun b -> b) name options s in
let clear name options s = setter ppf (fun b -> not b) name options s in
let compat name s =
let error_if_unset = function
| true -> true
| false ->
Printf.ksprintf (print_error ppf)
"Unsetting %s is not supported anymore" name;
true
in
setter ppf error_if_unset name [ ref true ] s
in
match name with
| "g" -> set "g" [ Clflags.debug ] v
| "bin-annot" -> set "bin-annot" [ Clflags.binary_annotations ] v
| "afl-instrument" -> set "afl-instrument" [ Clflags.afl_instrument ] v
| "afl-inst-ratio" ->
int_setter ppf "afl-inst-ratio" afl_inst_ratio v
| "annot" -> set "annot" [ Clflags.annotations ] v
| "absname" -> set "absname" [ Clflags.absname ] v
| "compat-32" -> set "compat-32" [ bytecode_compatible_32 ] v
| "noassert" -> set "noassert" [ noassert ] v
| "noautolink" -> set "noautolink" [ no_auto_link ] v
| "nostdlib" -> set "nostdlib" [ no_std_include ] v
| "nocwd" -> set "nocwd" [ no_cwd ] v
| "linkall" -> set "linkall" [ link_everything ] v
| "nolabels" -> set "nolabels" [ classic ] v
| "principal" -> set "principal" [ principal ] v
| "rectypes" -> set "rectypes" [ recursive_types ] v
| "safe-string" -> compat "safe-string" v (* kept for compatibility *)
| "strict-sequence" -> set "strict-sequence" [ strict_sequence ] v
| "strict-formats" -> set "strict-formats" [ strict_formats ] v
| "thread" -> set "thread" [ use_threads ] v
| "unboxed-types" -> set "unboxed-types" [ unboxed_types ] v
| "unsafe" -> set "unsafe" [ unsafe ] v
| "verbose" -> set "verbose" [ verbose ] v
| "nopervasives" -> set "nopervasives" [ nopervasives ] v
| "slash" -> set "slash" [ force_slash ] v (* for ocamldep *)
| "no-slash" -> clear "no-slash" [ force_slash ] v (* for ocamldep *)
| "keep-docs" -> set "keep-docs" [ Clflags.keep_docs ] v
| "keep-locs" -> set "keep-locs" [ Clflags.keep_locs ] v
| "compact" -> clear "compact" [ optimize_for_speed ] v
| "no-app-funct" -> clear "no-app-funct" [ applicative_functors ] v
| "nodynlink" -> clear "nodynlink" [ dlcode ] v
| "short-paths" -> clear "short-paths" [ real_paths ] v
| "trans-mod" -> set "trans-mod" [ transparent_modules ] v
| "opaque" -> set "opaque" [ opaque ] v
| "pp" -> preprocessor := Some v
| "runtime-variant" -> runtime_variant := v
| "with-runtime" -> set "with-runtime" [ with_runtime ] v
| "open" ->
open_modules := List.rev_append (String.split_on_char ',' v) !open_modules
| "cc" -> c_compiler := Some v
| "clambda-checks" -> set "clambda-checks" [ clambda_checks ] v
| "function-sections" ->
set "function-sections" [ Clflags.function_sections ] v
(* assembly sources *)
| "s" ->
set "s" [ Clflags.keep_asm_file ; Clflags.keep_startup_file ] v
| "S" -> set "S" [ Clflags.keep_asm_file ] v
| "dstartup" -> set "dstartup" [ Clflags.keep_startup_file ] v
(* warn-errors *)
| "we" | "warn-error" -> parse_warnings true v
(* warnings *)
| "w" -> parse_warnings false v
(* warn-errors *)
| "wwe" -> parse_warnings false v
(* alerts *)
| "alert" -> Warnings.parse_alert_option v
(* inlining *)
| "inline" ->
let module F = Float_arg_helper in
begin match F.parse_no_error v inline_threshold with
| F.Ok -> ()
| F.Parse_failed exn ->
Printf.ksprintf (print_error ppf)
"bad syntax %s for \"inline\": %s" v (Printexc.to_string exn)
end
| "inline-toplevel" ->
Int_arg_helper.parse v
"Bad syntax in OCAMLPARAM for 'inline-toplevel'"
inline_toplevel_threshold
| "rounds" -> int_option_setter ppf "rounds" simplify_rounds v
| "inline-max-unroll" ->
Int_arg_helper.parse v "Bad syntax in OCAMLPARAM for 'inline-max-unroll'"
inline_max_unroll
| "inline-call-cost" ->
Int_arg_helper.parse v
"Bad syntax in OCAMLPARAM for 'inline-call-cost'"
inline_call_cost
| "inline-alloc-cost" ->
Int_arg_helper.parse v
"Bad syntax in OCAMLPARAM for 'inline-alloc-cost'"
inline_alloc_cost
| "inline-prim-cost" ->
Int_arg_helper.parse v
"Bad syntax in OCAMLPARAM for 'inline-prim-cost'"
inline_prim_cost
| "inline-branch-cost" ->
Int_arg_helper.parse v
"Bad syntax in OCAMLPARAM for 'inline-branch-cost'"
inline_branch_cost
| "inline-indirect-cost" ->
Int_arg_helper.parse v
"Bad syntax in OCAMLPARAM for 'inline-indirect-cost'"
inline_indirect_cost
| "inline-lifting-benefit" ->
Int_arg_helper.parse v
"Bad syntax in OCAMLPARAM for 'inline-lifting-benefit'"
inline_lifting_benefit
| "inline-branch-factor" ->
Float_arg_helper.parse v
"Bad syntax in OCAMLPARAM for 'inline-branch-factor'"
inline_branch_factor
| "inline-max-depth" ->
Int_arg_helper.parse v
"Bad syntax in OCAMLPARAM for 'inline-max-depth'"
inline_max_depth
| "Oclassic" ->
set "Oclassic" [ classic_inlining ] v
| "O2" ->
if check_bool ppf "O2" v then begin
default_simplify_rounds := 2;
use_inlining_arguments_set o2_arguments;
use_inlining_arguments_set ~round:0 o1_arguments
end
| "O3" ->
if check_bool ppf "O3" v then begin
default_simplify_rounds := 3;
use_inlining_arguments_set o3_arguments;
use_inlining_arguments_set ~round:1 o2_arguments;
use_inlining_arguments_set ~round:0 o1_arguments
end
| "unbox-closures" ->
set "unbox-closures" [ unbox_closures ] v
| "unbox-closures-factor" ->
int_setter ppf "unbox-closures-factor" unbox_closures_factor v
| "remove-unused-arguments" ->
set "remove-unused-arguments" [ remove_unused_arguments ] v
| "inlining-report" ->
if !native_code then
set "inlining-report" [ inlining_report ] v
| "flambda-verbose" ->
set "flambda-verbose" [ dump_flambda_verbose ] v
| "flambda-invariants" ->
set "flambda-invariants" [ flambda_invariant_checks ] v
| "cmm-invariants" ->
set "cmm-invariants" [ cmm_invariants ] v
| "linscan" ->
set "linscan" [ use_linscan ] v
| "insn-sched" -> set "insn-sched" [ insn_sched ] v
| "no-insn-sched" -> clear "insn-sched" [ insn_sched ] v
(* color output *)
| "color" ->
begin match color_reader.parse v with
| None ->
Printf.ksprintf (print_error ppf)
"bad value %s for \"color\", (%s)" v color_reader.usage
| Some setting -> color := Some setting
end
| "error-style" ->
begin match error_style_reader.parse v with
| None ->
Printf.ksprintf (print_error ppf)
"bad value %s for \"error-style\", (%s)" v error_style_reader.usage
| Some setting -> error_style := Some setting
end
| "intf-suffix" -> Config.interface_suffix := v
| "I" -> begin
match position with
| Before_args -> first_include_dirs := v :: !first_include_dirs
| Before_link | Before_compile _ ->
last_include_dirs := v :: !last_include_dirs
end
| "cclib" ->
begin
match position with
| Before_compile _ -> ()
| Before_link | Before_args ->
ccobjs := Misc.rev_split_words v @ !ccobjs
end
| "ccopt"
| "ccopts"
->
begin
match position with
| Before_link | Before_compile _ ->
last_ccopts := v :: !last_ccopts
| Before_args ->
first_ccopts := v :: !first_ccopts
end
| "ppx" ->
begin
match position with
| Before_link | Before_compile _ ->
last_ppx := v :: !last_ppx
| Before_args ->
first_ppx := v :: !first_ppx
end
| "cmo" | "cma" ->
if not !native_code then
begin
match position with
| Before_link | Before_compile _ ->
last_objfiles := v ::! last_objfiles
| Before_args ->
first_objfiles := v :: !first_objfiles
end
| "cmx" | "cmxa" ->
if !native_code then
begin
match position with
| Before_link | Before_compile _ ->
last_objfiles := v ::! last_objfiles
| Before_args ->
first_objfiles := v :: !first_objfiles
end
| "pic" ->
if !native_code then
set "pic" [ pic_code ] v
| "can-discard" ->
can_discard := v ::!can_discard
| "timings" | "profile" ->
let if_on = if name = "timings" then [ `Time ] else Profile.all_columns in
profile_columns := if check_bool ppf name v then if_on else []
| "stop-after" ->
set_compiler_pass ppf v ~name Clflags.stop_after ~filter:(fun _ -> true)
| "save-ir-after" ->
if !native_code then begin
let filter = Clflags.Compiler_pass.can_save_ir_after in
match decode_compiler_pass ppf v ~name ~filter with
| None -> ()
| Some pass -> set_save_ir_after pass true
end
| "dump-into-file" -> Clflags.dump_into_file := true
| "dump-dir" -> Clflags.dump_dir := Some v
| _ ->
if not (List.mem name !can_discard) then begin
can_discard := name :: !can_discard;
Printf.ksprintf (print_error ppf)
"Warning: discarding value of variable %S in OCAMLPARAM\n%!"
name
end
let read_OCAMLPARAM ppf position =
try
let s = Sys.getenv "OCAMLPARAM" in
if s <> "" then
let (before, after) =
try
parse_args s
with SyntaxError s ->
print_error ppf s;
[],[]
in
List.iter (fun (name, v) -> read_one_param ppf position name v)
(match position with
Before_args -> before
| Before_compile _ | Before_link -> after)
with Not_found -> ()
OCAMLPARAM passed as file
type pattern =
| Filename of string
| Any
type file_option = {
pattern : pattern;
name : string;
value : string;
}
let scan_line ic =
Scanf.bscanf ic "%[0-9a-zA-Z_.*] : %[a-zA-Z_-] = %s "
(fun pattern name value ->
let pattern =
match pattern with
| "*" -> Any
| _ -> Filename pattern
in
{ pattern; name; value })
let load_config ppf filename =
match open_in_bin filename with
| exception e ->
Location.errorf ~loc:(Location.in_file filename)
"Cannot open file %s" (Printexc.to_string e)
|> Location.print_report ppf;
raise Exit
| ic ->
let sic = Scanf.Scanning.from_channel ic in
let rec read line_number line_start acc =
match scan_line sic with
| exception End_of_file ->
close_in ic;
acc
| exception Scanf.Scan_failure error ->
let position = Lexing.{
pos_fname = filename;
pos_lnum = line_number;
pos_bol = line_start;
pos_cnum = pos_in ic;
}
in
let loc = Location.{
loc_start = position;
loc_end = position;
loc_ghost = false;
}
in
Location.errorf ~loc "Configuration file error %s" error
|> Location.print_report ppf;
close_in ic;
raise Exit
| line ->
read (line_number + 1) (pos_in ic) (line :: acc)
in
let lines = read 0 0 [] in
lines
let matching_filename filename { pattern } =
match pattern with
| Any -> true
| Filename pattern ->
let filename = String.lowercase_ascii filename in
let pattern = String.lowercase_ascii pattern in
filename = pattern
let apply_config_file ppf position =
let config_file =
Filename.concat Config.standard_library "ocaml_compiler_internal_params"
in
let config =
if Sys.file_exists config_file then
load_config ppf config_file
else
[]
in
let config =
match position with
| Before_compile filename ->
List.filter (matching_filename filename) config
| Before_args | Before_link ->
List.filter (fun { pattern } -> pattern = Any) config
in
List.iter (fun { name; value } -> read_one_param ppf position name value)
config
let readenv ppf position =
last_include_dirs := [];
last_ccopts := [];
last_ppx := [];
last_objfiles := [];
apply_config_file ppf position;
read_OCAMLPARAM ppf position;
all_ccopts := !last_ccopts @ !first_ccopts;
all_ppx := !last_ppx @ !first_ppx
let get_objfiles ~with_ocamlparam =
if with_ocamlparam then
List.rev (!last_objfiles @ !objfiles @ !first_objfiles)
else
List.rev !objfiles
let has_linker_inputs = ref false
type deferred_action =
| ProcessImplementation of string
| ProcessInterface of string
| ProcessCFile of string
| ProcessOtherFile of string
| ProcessObjects of string list
| ProcessDLLs of string list
let c_object_of_filename name =
Filename.chop_suffix (Filename.basename name) ".c" ^ Config.ext_obj
let process_action
(ppf, implementation, interface, ocaml_mod_ext, ocaml_lib_ext) action =
let impl ~start_from name =
readenv ppf (Before_compile name);
let opref = output_prefix name in
implementation ~start_from ~source_file:name ~output_prefix:opref;
objfiles := (opref ^ ocaml_mod_ext) :: !objfiles
in
match action with
| ProcessImplementation name ->
impl ~start_from:Compiler_pass.Parsing name
| ProcessInterface name ->
readenv ppf (Before_compile name);
let opref = output_prefix name in
interface ~source_file:name ~output_prefix:opref;
if !make_package then objfiles := (opref ^ ".cmi") :: !objfiles
| ProcessCFile name ->
readenv ppf (Before_compile name);
Location.input_name := name;
let obj_name = match !output_name with
| None -> c_object_of_filename name
| Some n -> n
in
if Ccomp.compile_file ?output:!output_name name <> 0
then raise (Exit_with_status 2);
ccobjs := obj_name :: !ccobjs
| ProcessObjects names ->
ccobjs := names @ !ccobjs
| ProcessDLLs names ->
dllibs := names @ !dllibs
| ProcessOtherFile name ->
if Filename.check_suffix name ocaml_mod_ext
|| Filename.check_suffix name ocaml_lib_ext then
objfiles := name :: !objfiles
else if Filename.check_suffix name ".cmi" && !make_package then
objfiles := name :: !objfiles
else if Filename.check_suffix name Config.ext_obj
|| Filename.check_suffix name Config.ext_lib then begin
has_linker_inputs := true;
ccobjs := name :: !ccobjs
end
else if not !native_code && Filename.check_suffix name Config.ext_dll then
dllibs := name :: !dllibs
else
match Compiler_pass.of_input_filename name with
| Some start_from ->
Location.input_name := name;
impl ~start_from name
| None -> raise(Arg.Bad("don't know what to do with " ^ name))
let action_of_file name =
if Filename.check_suffix name ".ml"
|| Filename.check_suffix name ".mlt" then
ProcessImplementation name
else if Filename.check_suffix name !Config.interface_suffix then
ProcessInterface name
else if Filename.check_suffix name ".c" then
ProcessCFile name
else
ProcessOtherFile name
let deferred_actions = ref []
let defer action =
deferred_actions := action :: !deferred_actions
let anonymous filename = defer (action_of_file filename)
let impl filename = defer (ProcessImplementation filename)
let intf filename = defer (ProcessInterface filename)
let process_deferred_actions env =
let final_output_name = !output_name in
Make sure the intermediate products do n't clash with the final one
when we 're invoked like : ocamlopt -o foo bar.c baz.ml .
when we're invoked like: ocamlopt -o foo bar.c baz.ml. *)
if not !compile_only then output_name := None;
begin
match final_output_name with
| None -> ()
| Some _output_name ->
if !compile_only then begin
if List.length (List.filter (function
| ProcessCFile _
| ProcessImplementation _
| ProcessInterface _ -> true
| _ -> false) !deferred_actions) > 1 then
fatal "Options -c -o are incompatible with compiling multiple files"
end;
end;
if !make_archive && List.exists (function
| ProcessOtherFile name -> Filename.check_suffix name ".cmxa"
| _ -> false) !deferred_actions then
fatal "Option -a cannot be used with .cmxa input files.";
List.iter (process_action env) (List.rev !deferred_actions);
output_name := final_output_name;
stop_early :=
!compile_only ||
!print_types ||
match !stop_after with
| None -> false
| Some p -> Clflags.Compiler_pass.is_compilation_pass p
(* This function is almost the same as [Arg.parse_expand], except
that [Arg.parse_expand] could not be used because it does not take a
reference for [arg_spec].
We use a marker \000 for Arg.parse_and_expand_argv_dynamic
so we can split out error message from usage options, because
it always concatenates
error message with usage options *)
let parse_arguments ?(current=ref 0) argv f program =
try
Arg.parse_and_expand_argv_dynamic current argv Clflags.arg_spec f "\000"
with
| Arg.Bad err_msg ->
let usage_msg = create_usage_msg program in
let err_msg = err_msg
|> String.split_on_char '\000'
|> List.hd
|> String.trim in
Printf.eprintf "%s\n%s\n" err_msg usage_msg;
raise (Exit_with_status 2)
| Arg.Help msg ->
let err_msg =
msg
|> String.split_on_char '\000'
|> String.concat "" in
let help_msg =
Printf.sprintf "Usage: %s <options> <files>\nOptions are:" program in
Printf.printf "%s\n%s" help_msg err_msg;
raise (Exit_with_status 0)
| null |
https://raw.githubusercontent.com/ocaml/ocaml/949e2626c22b5fe8159cb29d47207d5621eabf90/driver/compenv.ml
|
ocaml
|
************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
Check validity of module name
Compute name of module from output file name
'can-discard=' specifies which arguments can be discarded without warning
because they are not understood by some versions of OCaml.
kept for compatibility
for ocamldep
for ocamldep
assembly sources
warn-errors
warnings
warn-errors
alerts
inlining
color output
This function is almost the same as [Arg.parse_expand], except
that [Arg.parse_expand] could not be used because it does not take a
reference for [arg_spec].
We use a marker \000 for Arg.parse_and_expand_argv_dynamic
so we can split out error message from usage options, because
it always concatenates
error message with usage options
|
Fabrice Le Fessant , EPI Gallium , INRIA Paris - Rocquencourt
Copyright 2013 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
open Clflags
exception Exit_with_status of int
let output_prefix name =
let oname =
match !output_name with
| None -> name
| Some n -> if !compile_only then (output_name := None; n) else name in
Filename.remove_extension oname
let print_version_and_library compiler =
Printf.printf "The OCaml %s, version " compiler;
print_string Config.version; print_newline();
print_string "Standard library directory: ";
print_string Config.standard_library; print_newline();
raise (Exit_with_status 0)
let print_version_string () =
print_string Config.version; print_newline();
raise (Exit_with_status 0)
let print_standard_library () =
print_string Config.standard_library; print_newline();
raise (Exit_with_status 0)
let fatal err =
prerr_endline err;
raise (Exit_with_status 2)
let extract_output = function
| Some s -> s
| None ->
fatal "Please specify the name of the output file, using option -o"
let default_output = function
| Some s -> s
| None -> Config.default_executable_name
let first_include_dirs = ref []
let last_include_dirs = ref []
let first_ccopts = ref []
let last_ccopts = ref []
let first_ppx = ref []
let last_ppx = ref []
let first_objfiles = ref []
let last_objfiles = ref []
let stop_early = ref false
let is_unit_name name =
try
if name = "" then raise Exit;
begin match name.[0] with
| 'A'..'Z' -> ()
| _ ->
raise Exit;
end;
for i = 1 to String.length name - 1 do
match name.[i] with
| 'A'..'Z' | 'a'..'z' | '0'..'9' | '_' | '\'' -> ()
| _ ->
raise Exit;
done;
true
with Exit -> false
let check_unit_name filename name =
if not (is_unit_name name) then
Location.prerr_warning (Location.in_file filename)
(Warnings.Bad_module_name name)
let module_of_filename inputfile outputprefix =
let basename = Filename.basename outputprefix in
let name =
try
let pos = String.index basename '.' in
String.sub basename 0 pos
with Not_found -> basename
in
let name = String.capitalize_ascii name in
check_unit_name inputfile name;
name
type filename = string
type readenv_position =
Before_args | Before_compile of filename | Before_link
Syntax of OCAMLPARAM : SEP?(name = VALUE SEP ) * _ ( SEP name = VALUE ) *
where VALUE should not contain SEP , and SEP is ' , ' if unspecified ,
or ' : ' , ' | ' , ' ; ' , ' ' or ' , '
where VALUE should not contain SEP, and SEP is ',' if unspecified,
or ':', '|', ';', ' ' or ',' *)
exception SyntaxError of string
let print_error ppf msg =
Location.print_warning Location.none ppf
(Warnings.Bad_env_variable ("OCAMLPARAM", msg))
let parse_args s =
let args =
let len = String.length s in
if len = 0 then []
else
allow first char to specify an alternative separator in " :| ; , "
match s.[0] with
| ( ':' | '|' | ';' | ' ' | ',' ) as c ->
List.tl (String.split_on_char c s)
| _ -> String.split_on_char ',' s
in
let rec iter is_after args before after =
match args with
[] ->
if not is_after then
raise (SyntaxError "no '_' separator found")
else
(List.rev before, List.rev after)
| "" :: tail -> iter is_after tail before after
| "_" :: _ when is_after -> raise (SyntaxError "too many '_' separators")
| "_" :: tail -> iter true tail before after
| arg :: tail ->
let binding = try
Misc.cut_at arg '='
with Not_found ->
raise (SyntaxError ("missing '=' in " ^ arg))
in
if is_after then
iter is_after tail before (binding :: after)
else
iter is_after tail (binding :: before) after
in
iter false args [] []
let setter ppf f name options s =
try
let bool = match s with
| "0" -> false
| "1" -> true
| _ -> raise Not_found
in
List.iter (fun b -> b := f bool) options
with Not_found ->
Printf.ksprintf (print_error ppf)
"bad value %s for %s" s name
let int_setter ppf name option s =
try
option := int_of_string s
with _ ->
Printf.ksprintf (print_error ppf)
"non-integer parameter %s for %S" s name
let int_option_setter ppf name option s =
try
option := Some (int_of_string s)
with _ ->
Printf.ksprintf (print_error ppf)
"non-integer parameter %s for %S" s name
let float_setter ppf name option s =
try
option : = float_of_string s
with _ - >
Location.print_warning Location.none ppf
( Warnings . Bad_env_variable
( " OCAMLPARAM " , Printf.sprintf " non - float parameter for \"%s\ " " name ) )
let float_setter ppf name option s =
try
option := float_of_string s
with _ ->
Location.print_warning Location.none ppf
(Warnings.Bad_env_variable
("OCAMLPARAM", Printf.sprintf "non-float parameter for \"%s\"" name))
*)
let check_bool ppf name s =
match s with
| "0" -> false
| "1" -> true
| _ ->
Printf.ksprintf (print_error ppf)
"bad value %s for %s" s name;
false
let decode_compiler_pass ppf v ~name ~filter =
let module P = Clflags.Compiler_pass in
let passes = P.available_pass_names ~filter ~native:!native_code in
begin match List.find_opt (String.equal v) passes with
| None ->
Printf.ksprintf (print_error ppf)
"bad value %s for option \"%s\" (expected one of: %s)"
v name (String.concat ", " passes);
None
| Some v -> P.of_string v
end
let set_compiler_pass ppf ~name v flag ~filter =
match decode_compiler_pass ppf v ~name ~filter with
| None -> ()
| Some pass ->
match !flag with
| None -> flag := Some pass
| Some p ->
if not (p = pass) then begin
Printf.ksprintf (print_error ppf)
"Please specify at most one %s <pass>." name
end
let can_discard = ref []
let parse_warnings error v =
Option.iter Location.(prerr_alert none) @@ Warnings.parse_options error v
let read_one_param ppf position name v =
let set name options s = setter ppf (fun b -> b) name options s in
let clear name options s = setter ppf (fun b -> not b) name options s in
let compat name s =
let error_if_unset = function
| true -> true
| false ->
Printf.ksprintf (print_error ppf)
"Unsetting %s is not supported anymore" name;
true
in
setter ppf error_if_unset name [ ref true ] s
in
match name with
| "g" -> set "g" [ Clflags.debug ] v
| "bin-annot" -> set "bin-annot" [ Clflags.binary_annotations ] v
| "afl-instrument" -> set "afl-instrument" [ Clflags.afl_instrument ] v
| "afl-inst-ratio" ->
int_setter ppf "afl-inst-ratio" afl_inst_ratio v
| "annot" -> set "annot" [ Clflags.annotations ] v
| "absname" -> set "absname" [ Clflags.absname ] v
| "compat-32" -> set "compat-32" [ bytecode_compatible_32 ] v
| "noassert" -> set "noassert" [ noassert ] v
| "noautolink" -> set "noautolink" [ no_auto_link ] v
| "nostdlib" -> set "nostdlib" [ no_std_include ] v
| "nocwd" -> set "nocwd" [ no_cwd ] v
| "linkall" -> set "linkall" [ link_everything ] v
| "nolabels" -> set "nolabels" [ classic ] v
| "principal" -> set "principal" [ principal ] v
| "rectypes" -> set "rectypes" [ recursive_types ] v
| "strict-sequence" -> set "strict-sequence" [ strict_sequence ] v
| "strict-formats" -> set "strict-formats" [ strict_formats ] v
| "thread" -> set "thread" [ use_threads ] v
| "unboxed-types" -> set "unboxed-types" [ unboxed_types ] v
| "unsafe" -> set "unsafe" [ unsafe ] v
| "verbose" -> set "verbose" [ verbose ] v
| "nopervasives" -> set "nopervasives" [ nopervasives ] v
| "keep-docs" -> set "keep-docs" [ Clflags.keep_docs ] v
| "keep-locs" -> set "keep-locs" [ Clflags.keep_locs ] v
| "compact" -> clear "compact" [ optimize_for_speed ] v
| "no-app-funct" -> clear "no-app-funct" [ applicative_functors ] v
| "nodynlink" -> clear "nodynlink" [ dlcode ] v
| "short-paths" -> clear "short-paths" [ real_paths ] v
| "trans-mod" -> set "trans-mod" [ transparent_modules ] v
| "opaque" -> set "opaque" [ opaque ] v
| "pp" -> preprocessor := Some v
| "runtime-variant" -> runtime_variant := v
| "with-runtime" -> set "with-runtime" [ with_runtime ] v
| "open" ->
open_modules := List.rev_append (String.split_on_char ',' v) !open_modules
| "cc" -> c_compiler := Some v
| "clambda-checks" -> set "clambda-checks" [ clambda_checks ] v
| "function-sections" ->
set "function-sections" [ Clflags.function_sections ] v
| "s" ->
set "s" [ Clflags.keep_asm_file ; Clflags.keep_startup_file ] v
| "S" -> set "S" [ Clflags.keep_asm_file ] v
| "dstartup" -> set "dstartup" [ Clflags.keep_startup_file ] v
| "we" | "warn-error" -> parse_warnings true v
| "w" -> parse_warnings false v
| "wwe" -> parse_warnings false v
| "alert" -> Warnings.parse_alert_option v
| "inline" ->
let module F = Float_arg_helper in
begin match F.parse_no_error v inline_threshold with
| F.Ok -> ()
| F.Parse_failed exn ->
Printf.ksprintf (print_error ppf)
"bad syntax %s for \"inline\": %s" v (Printexc.to_string exn)
end
| "inline-toplevel" ->
Int_arg_helper.parse v
"Bad syntax in OCAMLPARAM for 'inline-toplevel'"
inline_toplevel_threshold
| "rounds" -> int_option_setter ppf "rounds" simplify_rounds v
| "inline-max-unroll" ->
Int_arg_helper.parse v "Bad syntax in OCAMLPARAM for 'inline-max-unroll'"
inline_max_unroll
| "inline-call-cost" ->
Int_arg_helper.parse v
"Bad syntax in OCAMLPARAM for 'inline-call-cost'"
inline_call_cost
| "inline-alloc-cost" ->
Int_arg_helper.parse v
"Bad syntax in OCAMLPARAM for 'inline-alloc-cost'"
inline_alloc_cost
| "inline-prim-cost" ->
Int_arg_helper.parse v
"Bad syntax in OCAMLPARAM for 'inline-prim-cost'"
inline_prim_cost
| "inline-branch-cost" ->
Int_arg_helper.parse v
"Bad syntax in OCAMLPARAM for 'inline-branch-cost'"
inline_branch_cost
| "inline-indirect-cost" ->
Int_arg_helper.parse v
"Bad syntax in OCAMLPARAM for 'inline-indirect-cost'"
inline_indirect_cost
| "inline-lifting-benefit" ->
Int_arg_helper.parse v
"Bad syntax in OCAMLPARAM for 'inline-lifting-benefit'"
inline_lifting_benefit
| "inline-branch-factor" ->
Float_arg_helper.parse v
"Bad syntax in OCAMLPARAM for 'inline-branch-factor'"
inline_branch_factor
| "inline-max-depth" ->
Int_arg_helper.parse v
"Bad syntax in OCAMLPARAM for 'inline-max-depth'"
inline_max_depth
| "Oclassic" ->
set "Oclassic" [ classic_inlining ] v
| "O2" ->
if check_bool ppf "O2" v then begin
default_simplify_rounds := 2;
use_inlining_arguments_set o2_arguments;
use_inlining_arguments_set ~round:0 o1_arguments
end
| "O3" ->
if check_bool ppf "O3" v then begin
default_simplify_rounds := 3;
use_inlining_arguments_set o3_arguments;
use_inlining_arguments_set ~round:1 o2_arguments;
use_inlining_arguments_set ~round:0 o1_arguments
end
| "unbox-closures" ->
set "unbox-closures" [ unbox_closures ] v
| "unbox-closures-factor" ->
int_setter ppf "unbox-closures-factor" unbox_closures_factor v
| "remove-unused-arguments" ->
set "remove-unused-arguments" [ remove_unused_arguments ] v
| "inlining-report" ->
if !native_code then
set "inlining-report" [ inlining_report ] v
| "flambda-verbose" ->
set "flambda-verbose" [ dump_flambda_verbose ] v
| "flambda-invariants" ->
set "flambda-invariants" [ flambda_invariant_checks ] v
| "cmm-invariants" ->
set "cmm-invariants" [ cmm_invariants ] v
| "linscan" ->
set "linscan" [ use_linscan ] v
| "insn-sched" -> set "insn-sched" [ insn_sched ] v
| "no-insn-sched" -> clear "insn-sched" [ insn_sched ] v
| "color" ->
begin match color_reader.parse v with
| None ->
Printf.ksprintf (print_error ppf)
"bad value %s for \"color\", (%s)" v color_reader.usage
| Some setting -> color := Some setting
end
| "error-style" ->
begin match error_style_reader.parse v with
| None ->
Printf.ksprintf (print_error ppf)
"bad value %s for \"error-style\", (%s)" v error_style_reader.usage
| Some setting -> error_style := Some setting
end
| "intf-suffix" -> Config.interface_suffix := v
| "I" -> begin
match position with
| Before_args -> first_include_dirs := v :: !first_include_dirs
| Before_link | Before_compile _ ->
last_include_dirs := v :: !last_include_dirs
end
| "cclib" ->
begin
match position with
| Before_compile _ -> ()
| Before_link | Before_args ->
ccobjs := Misc.rev_split_words v @ !ccobjs
end
| "ccopt"
| "ccopts"
->
begin
match position with
| Before_link | Before_compile _ ->
last_ccopts := v :: !last_ccopts
| Before_args ->
first_ccopts := v :: !first_ccopts
end
| "ppx" ->
begin
match position with
| Before_link | Before_compile _ ->
last_ppx := v :: !last_ppx
| Before_args ->
first_ppx := v :: !first_ppx
end
| "cmo" | "cma" ->
if not !native_code then
begin
match position with
| Before_link | Before_compile _ ->
last_objfiles := v ::! last_objfiles
| Before_args ->
first_objfiles := v :: !first_objfiles
end
| "cmx" | "cmxa" ->
if !native_code then
begin
match position with
| Before_link | Before_compile _ ->
last_objfiles := v ::! last_objfiles
| Before_args ->
first_objfiles := v :: !first_objfiles
end
| "pic" ->
if !native_code then
set "pic" [ pic_code ] v
| "can-discard" ->
can_discard := v ::!can_discard
| "timings" | "profile" ->
let if_on = if name = "timings" then [ `Time ] else Profile.all_columns in
profile_columns := if check_bool ppf name v then if_on else []
| "stop-after" ->
set_compiler_pass ppf v ~name Clflags.stop_after ~filter:(fun _ -> true)
| "save-ir-after" ->
if !native_code then begin
let filter = Clflags.Compiler_pass.can_save_ir_after in
match decode_compiler_pass ppf v ~name ~filter with
| None -> ()
| Some pass -> set_save_ir_after pass true
end
| "dump-into-file" -> Clflags.dump_into_file := true
| "dump-dir" -> Clflags.dump_dir := Some v
| _ ->
if not (List.mem name !can_discard) then begin
can_discard := name :: !can_discard;
Printf.ksprintf (print_error ppf)
"Warning: discarding value of variable %S in OCAMLPARAM\n%!"
name
end
let read_OCAMLPARAM ppf position =
try
let s = Sys.getenv "OCAMLPARAM" in
if s <> "" then
let (before, after) =
try
parse_args s
with SyntaxError s ->
print_error ppf s;
[],[]
in
List.iter (fun (name, v) -> read_one_param ppf position name v)
(match position with
Before_args -> before
| Before_compile _ | Before_link -> after)
with Not_found -> ()
OCAMLPARAM passed as file
type pattern =
| Filename of string
| Any
type file_option = {
pattern : pattern;
name : string;
value : string;
}
let scan_line ic =
Scanf.bscanf ic "%[0-9a-zA-Z_.*] : %[a-zA-Z_-] = %s "
(fun pattern name value ->
let pattern =
match pattern with
| "*" -> Any
| _ -> Filename pattern
in
{ pattern; name; value })
let load_config ppf filename =
match open_in_bin filename with
| exception e ->
Location.errorf ~loc:(Location.in_file filename)
"Cannot open file %s" (Printexc.to_string e)
|> Location.print_report ppf;
raise Exit
| ic ->
let sic = Scanf.Scanning.from_channel ic in
let rec read line_number line_start acc =
match scan_line sic with
| exception End_of_file ->
close_in ic;
acc
| exception Scanf.Scan_failure error ->
let position = Lexing.{
pos_fname = filename;
pos_lnum = line_number;
pos_bol = line_start;
pos_cnum = pos_in ic;
}
in
let loc = Location.{
loc_start = position;
loc_end = position;
loc_ghost = false;
}
in
Location.errorf ~loc "Configuration file error %s" error
|> Location.print_report ppf;
close_in ic;
raise Exit
| line ->
read (line_number + 1) (pos_in ic) (line :: acc)
in
let lines = read 0 0 [] in
lines
let matching_filename filename { pattern } =
match pattern with
| Any -> true
| Filename pattern ->
let filename = String.lowercase_ascii filename in
let pattern = String.lowercase_ascii pattern in
filename = pattern
let apply_config_file ppf position =
let config_file =
Filename.concat Config.standard_library "ocaml_compiler_internal_params"
in
let config =
if Sys.file_exists config_file then
load_config ppf config_file
else
[]
in
let config =
match position with
| Before_compile filename ->
List.filter (matching_filename filename) config
| Before_args | Before_link ->
List.filter (fun { pattern } -> pattern = Any) config
in
List.iter (fun { name; value } -> read_one_param ppf position name value)
config
let readenv ppf position =
last_include_dirs := [];
last_ccopts := [];
last_ppx := [];
last_objfiles := [];
apply_config_file ppf position;
read_OCAMLPARAM ppf position;
all_ccopts := !last_ccopts @ !first_ccopts;
all_ppx := !last_ppx @ !first_ppx
let get_objfiles ~with_ocamlparam =
if with_ocamlparam then
List.rev (!last_objfiles @ !objfiles @ !first_objfiles)
else
List.rev !objfiles
let has_linker_inputs = ref false
type deferred_action =
| ProcessImplementation of string
| ProcessInterface of string
| ProcessCFile of string
| ProcessOtherFile of string
| ProcessObjects of string list
| ProcessDLLs of string list
let c_object_of_filename name =
Filename.chop_suffix (Filename.basename name) ".c" ^ Config.ext_obj
let process_action
(ppf, implementation, interface, ocaml_mod_ext, ocaml_lib_ext) action =
let impl ~start_from name =
readenv ppf (Before_compile name);
let opref = output_prefix name in
implementation ~start_from ~source_file:name ~output_prefix:opref;
objfiles := (opref ^ ocaml_mod_ext) :: !objfiles
in
match action with
| ProcessImplementation name ->
impl ~start_from:Compiler_pass.Parsing name
| ProcessInterface name ->
readenv ppf (Before_compile name);
let opref = output_prefix name in
interface ~source_file:name ~output_prefix:opref;
if !make_package then objfiles := (opref ^ ".cmi") :: !objfiles
| ProcessCFile name ->
readenv ppf (Before_compile name);
Location.input_name := name;
let obj_name = match !output_name with
| None -> c_object_of_filename name
| Some n -> n
in
if Ccomp.compile_file ?output:!output_name name <> 0
then raise (Exit_with_status 2);
ccobjs := obj_name :: !ccobjs
| ProcessObjects names ->
ccobjs := names @ !ccobjs
| ProcessDLLs names ->
dllibs := names @ !dllibs
| ProcessOtherFile name ->
if Filename.check_suffix name ocaml_mod_ext
|| Filename.check_suffix name ocaml_lib_ext then
objfiles := name :: !objfiles
else if Filename.check_suffix name ".cmi" && !make_package then
objfiles := name :: !objfiles
else if Filename.check_suffix name Config.ext_obj
|| Filename.check_suffix name Config.ext_lib then begin
has_linker_inputs := true;
ccobjs := name :: !ccobjs
end
else if not !native_code && Filename.check_suffix name Config.ext_dll then
dllibs := name :: !dllibs
else
match Compiler_pass.of_input_filename name with
| Some start_from ->
Location.input_name := name;
impl ~start_from name
| None -> raise(Arg.Bad("don't know what to do with " ^ name))
let action_of_file name =
if Filename.check_suffix name ".ml"
|| Filename.check_suffix name ".mlt" then
ProcessImplementation name
else if Filename.check_suffix name !Config.interface_suffix then
ProcessInterface name
else if Filename.check_suffix name ".c" then
ProcessCFile name
else
ProcessOtherFile name
let deferred_actions = ref []
let defer action =
deferred_actions := action :: !deferred_actions
let anonymous filename = defer (action_of_file filename)
let impl filename = defer (ProcessImplementation filename)
let intf filename = defer (ProcessInterface filename)
let process_deferred_actions env =
let final_output_name = !output_name in
Make sure the intermediate products do n't clash with the final one
when we 're invoked like : ocamlopt -o foo bar.c baz.ml .
when we're invoked like: ocamlopt -o foo bar.c baz.ml. *)
if not !compile_only then output_name := None;
begin
match final_output_name with
| None -> ()
| Some _output_name ->
if !compile_only then begin
if List.length (List.filter (function
| ProcessCFile _
| ProcessImplementation _
| ProcessInterface _ -> true
| _ -> false) !deferred_actions) > 1 then
fatal "Options -c -o are incompatible with compiling multiple files"
end;
end;
if !make_archive && List.exists (function
| ProcessOtherFile name -> Filename.check_suffix name ".cmxa"
| _ -> false) !deferred_actions then
fatal "Option -a cannot be used with .cmxa input files.";
List.iter (process_action env) (List.rev !deferred_actions);
output_name := final_output_name;
stop_early :=
!compile_only ||
!print_types ||
match !stop_after with
| None -> false
| Some p -> Clflags.Compiler_pass.is_compilation_pass p
let parse_arguments ?(current=ref 0) argv f program =
try
Arg.parse_and_expand_argv_dynamic current argv Clflags.arg_spec f "\000"
with
| Arg.Bad err_msg ->
let usage_msg = create_usage_msg program in
let err_msg = err_msg
|> String.split_on_char '\000'
|> List.hd
|> String.trim in
Printf.eprintf "%s\n%s\n" err_msg usage_msg;
raise (Exit_with_status 2)
| Arg.Help msg ->
let err_msg =
msg
|> String.split_on_char '\000'
|> String.concat "" in
let help_msg =
Printf.sprintf "Usage: %s <options> <files>\nOptions are:" program in
Printf.printf "%s\n%s" help_msg err_msg;
raise (Exit_with_status 0)
|
753a63939dcb7d22360c531046efeec8be0b9b11770db0563009c03ccfe3073a
|
kadena-io/chainweaver
|
Modal.hs
|
# LANGUAGE DataKinds #
# LANGUAGE ExtendedDefaultRules #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE FunctionalDependencies #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
-- |
Copyright : ( C ) 2020 - 2022 Kadena
-- License : BSD-style (see the file LICENSE)
--
module Frontend.UI.Modal
( Modal
, HasModalCfg (..)
, modalHeader
, modalMain
, modalFooter
) where
------------------------------------------------------------------------------
import Control.Lens hiding (element)
import Reflex
import Reflex.Dom
------------------------------------------------------------------------------
import Frontend.UI.Button
------------------------------------------------------------------------------
-- | Type of modal dialog.
--
-- It is some arbitrary widget, preferably built with `modalHeader`,
` modalBody ` and ` ` . It provides some config and an ` Event `
-- that will trigger close on the dialog.
type Modal baseCfg m t = Event t () -> m (ModalCfg baseCfg t, Event t ())
type ModalCfg cfg m t = cfg ( Modal ( cfg Void t ) t ) t
-- IdeCfg t modal
IdeCfg t ( m ( ) )
class HasModalCfg cfg modal t | cfg -> modal where
-- | A config should provide a variant of itself that does not depend on
-- modal for use in the modal.
--
E.g. Void
type ModalCfg cfg t
modalCfg_setModal :: Lens' cfg (Event t (Maybe modal))
{- newtype Mu a = Mu {unMu :: a (Mu a)} -}
newtype modal = Modal { unModal : : m ( cfg modal ) }
type cfg m = Mu ( Modal cfg m )
-- | Create a modal dialog header.
modalHeader
:: forall t m. DomBuilder t m
=> m ()
-- ^ Content of the h2 in the header.
-> m ( Event t ())
-- ^ Close event
modalHeader header = divClass "modal__header" $ do
header
uiButton (def & uiButtonCfg_class .~ "modal__button-close") $
elClass "div" "modal__button-close-x" $ text "x"
-- | Body content. Footer should be added after this.
modalMain :: DomBuilder t m => m a -> m a
modalMain = divClass "modal__main"
-- | Create a modal dialog footer.
modalFooter
:: forall t m a. DomBuilder t m
=> m a
-- ^ The actual footer of the dialog.
-> m a
-- ^ Wrapped up footer
modalFooter = divClass "modal__footer"
| null |
https://raw.githubusercontent.com/kadena-io/chainweaver/5d40e91411995e0a9a7e782d6bb2d89ac1c65d52/frontend/src/Frontend/UI/Modal.hs
|
haskell
|
# LANGUAGE OverloadedStrings #
|
License : BSD-style (see the file LICENSE)
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
| Type of modal dialog.
It is some arbitrary widget, preferably built with `modalHeader`,
that will trigger close on the dialog.
IdeCfg t modal
| A config should provide a variant of itself that does not depend on
modal for use in the modal.
newtype Mu a = Mu {unMu :: a (Mu a)}
| Create a modal dialog header.
^ Content of the h2 in the header.
^ Close event
| Body content. Footer should be added after this.
| Create a modal dialog footer.
^ The actual footer of the dialog.
^ Wrapped up footer
|
# LANGUAGE DataKinds #
# LANGUAGE ExtendedDefaultRules #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE FunctionalDependencies #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
Copyright : ( C ) 2020 - 2022 Kadena
module Frontend.UI.Modal
( Modal
, HasModalCfg (..)
, modalHeader
, modalMain
, modalFooter
) where
import Control.Lens hiding (element)
import Reflex
import Reflex.Dom
import Frontend.UI.Button
` modalBody ` and ` ` . It provides some config and an ` Event `
type Modal baseCfg m t = Event t () -> m (ModalCfg baseCfg t, Event t ())
type ModalCfg cfg m t = cfg ( Modal ( cfg Void t ) t ) t
IdeCfg t ( m ( ) )
class HasModalCfg cfg modal t | cfg -> modal where
E.g. Void
type ModalCfg cfg t
modalCfg_setModal :: Lens' cfg (Event t (Maybe modal))
newtype modal = Modal { unModal : : m ( cfg modal ) }
type cfg m = Mu ( Modal cfg m )
modalHeader
:: forall t m. DomBuilder t m
=> m ()
-> m ( Event t ())
modalHeader header = divClass "modal__header" $ do
header
uiButton (def & uiButtonCfg_class .~ "modal__button-close") $
elClass "div" "modal__button-close-x" $ text "x"
modalMain :: DomBuilder t m => m a -> m a
modalMain = divClass "modal__main"
modalFooter
:: forall t m a. DomBuilder t m
=> m a
-> m a
modalFooter = divClass "modal__footer"
|
88f84d41a3ba68568bcf2466a13e58cfc0bfb61417d7a03fdc5a074de0743ca1
|
michalkonecny/aern2
|
Array.hs
|
# OPTIONS_GHC -fno - warn - orphans #
module ERC.Array where
import Debug.Trace (trace)
import Prelude
import Control.Monad.ST.Trans
import Control.Monad.Except
import GHC.TypeLits
import Text.Printf
import ERC.Monad
import ERC.Variables
import ERC.Integer
import ERC.Real
newtype REALn s (n :: Nat) =
REALn { unREALn :: STArray s Integer REAL }
newtype REALnm s (n :: Nat) (m :: Nat) =
REALnm { unREALnm :: REALn s n }
declareREALn :: ERC s (REALn s n) -> ERC s (Var s (REALn s n))
declareREALn aERC =
do
a <- checkA $ aERC
newSTRef a
declareREALnm :: ERC s (REALnm s n m) -> ERC s (Var s (REALnm s n m))
declareREALnm aERC =
do
a <- checkAA $ aERC
newSTRef a
____traceREALn :: (KnownNat n) => String -> ERC s (REALn s n) -> ERC s ()
____traceREALn label aERC =
do
aa@(REALn a) <- aERC
let n = natVal aa
items <- mapM (readSTArray a) [0..(n - 1)]
trace (label ++ show items) $ pure ()
____traceREALnm :: (KnownNat n, KnownNat m) => String -> ERC s (REALnm s n m) -> ERC s ()
____traceREALnm label aERC =
do
aaa@(REALnm aa@(REALn a)) <- aERC
let n = natVal aa
let m = natVal aaa
items <- mapM (\j -> mapM (readSTArray a) [n*j .. (n*j+n-1)]) [0..(m - 1)]
trace (label ++ show items) $ pure ()
array :: (KnownNat n) => [ERC s REAL] -> ERC s (REALn s n)
array itemsERC =
do
items <- sequence itemsERC
let n = fromIntegral (length items) :: Integer
a <- newSTArray (0, n - 1) 0
let aa = REALn a
let an = natVal aa
case an == n of
True -> pure ()
False -> error $ printf "array literal length (%d) does not match the array size (%d)" n an
zipWithM_ (writeSTArray a) [0..] items -- initialise the array using items
return aa
array2D :: (KnownNat n, KnownNat m) => [[ERC s REAL]] -> ERC s (REALnm s n m)
array2D [] = error "2D array literal must not be empty"
array2D itemsERC
| not allEqualLength = error "2D array literal should have rows of equal lenths"
| otherwise =
do
items <- sequence (concat itemsERC)
a <- newSTArray (0, n*m - 1) 0
let aa = REALn a
let an = natVal aa
let aaa = REALnm aa
let am = natVal aaa
case am == m && an == n of
True -> pure ()
False -> error $ printf "2D array literal length (%d * %d) does not match the array size (%d * %d)" n m an am
zipWithM_ (writeSTArray a) [0..] items -- initialise the array using items
return aaa
where
n_ = length (head itemsERC)
m_ = length itemsERC
allEqualLength = all (== n_) (map length itemsERC)
n = fromIntegral n_ :: Integer
m = fromIntegral m_ :: Integer
arrayLength :: (KnownNat n) => ERC s (REALn s n) -> ERC s INTEGER
arrayLength aERC =
do
aa <- checkA aERC
pure (natVal aa)
array2DLength1 :: (KnownNat n) => ERC s (REALnm s n m) -> ERC s INTEGER
array2DLength1 aERC =
do
(REALnm aa) <- checkAA aERC
pure (natVal aa)
array2DLength2 :: (KnownNat m) => ERC s (REALnm s n m) -> ERC s INTEGER
array2DLength2 aERC =
do
aa <- checkAA aERC
pure (natVal aa)
arrayLookup, (?!) :: (KnownNat n) => ERC s (REALn s n) -> [ERC s INTEGER] -> ERC s REAL
arrayLookup _ [] =
error "arrayLookup: missing index"
arrayLookup aERC [iERC] =
do
aa@(REALn a) <- aERC
i <- checkI iERC
case 0 <= i && i < natVal aa of
True -> pure ()
False -> error "arrayLookup: index out of bounds"
checkR $ readSTArray a i
arrayLookup _ _ = error "arrayLookup: supporting only 1-dimensional arrays"
(?!) = arrayLookup
infix 9 ?!
array2DLookup, (?!!) :: (KnownNat n, KnownNat m) => ERC s (REALnm s n m) -> [ERC s INTEGER] -> ERC s REAL
array2DLookup _ [] =
error "array2DLookup: missing index"
array2DLookup _ [_] =
error "array2DLookup: missing second index"
array2DLookup aERC [iERC, jERC] =
do
aaa@(REALnm aa@(REALn a)) <- aERC
i <- checkI iERC
j <- checkI jERC
let di = natVal aa
let dj = natVal aaa
case 0 <= i && i < di && 0 <= j && j < dj of
True -> pure ()
False -> error "array2DLookup: index out of bounds"
checkR $ readSTArray a (i + j*di)
array2DLookup _ _ = error "array2DLookup: supporting only 2-dimensional arrays"
(?!!) = array2DLookup
infix 9 ?!!
arrayUpdate, (.=!) :: (KnownNat n) => (ERC s (REALn s n), [ERC s INTEGER]) -> ERC s REAL -> ERC s ()
arrayUpdate (_,[]) _ =
error "arrayUpdate: missing index"
arrayUpdate (aERC,[iERC]) rERC =
do
aa@(REALn a) <- aERC
i <- checkI iERC
case 0 <= i && i < natVal aa of
True -> pure ()
False -> error "arrayUpdate: index out of bounds"
r <- checkR rERC
ifInvalidUseDummy () $ writeSTArray a i r
arrayUpdate _ _ = error "arrayUpdate: too many indices"
(.=!) = arrayUpdate
infix 1 .=!
array2DUpdate, (.=!!) :: (KnownNat n, KnownNat m) => (ERC s (REALnm s n m), [ERC s INTEGER]) -> ERC s REAL -> ERC s ()
array2DUpdate (_,[]) _ =
error "array2DUpdate: missing index"
array2DUpdate (_,[_]) _ =
error "array2DUpdate: missing second index"
array2DUpdate (aERC,[iERC,jERC]) rERC =
do
aaa@(REALnm aa@(REALn a)) <- aERC
i <- checkI iERC
j <- checkI jERC
let di = natVal aa
let dj = natVal aaa
case 0 <= i && i < di && 0 <= j && j < dj of
True -> pure ()
False -> error "array2DUpdate: index out of bounds"
r <- checkR rERC
ifInvalidUseDummy () $ writeSTArray a (i + j*di) r
array2DUpdate _ _ = error "array2DUpdate: too many indices"
(.=!!) = array2DUpdate
infix 1 .=!!
arraySwap ::
(KnownNat n1, KnownNat n2) =>
(ERC s (REALn s n1), [ERC s INTEGER], ERC s (REALn s n2), [ERC s INTEGER]) ->
ERC s ()
arraySwap (a1,ix1,a2,ix2) =
do
temp <- a1?!ix1
(a1,ix1) .=! a2?!ix2
(a2,ix2) .=! (pure temp)
array2DSwap ::
(KnownNat n1, KnownNat m1, KnownNat n2, KnownNat m2) =>
(ERC s (REALnm s n1 m1), [ERC s INTEGER], ERC s (REALnm s n2 m2), [ERC s INTEGER]) ->
ERC s ()
array2DSwap (a1,ix1,a2,ix2) =
do
temp <- a1?!!ix1
(a1,ix1) .=!! a2?!!ix2
(a2,ix2) .=!! (pure temp)
checkA :: ERC s (REALn s n) -> ERC s (REALn s n)
checkA aERC =
do
dummyA <- error "accessing a non-existent dummy array"
ifInvalidUseDummy dummyA aERC
checkAA :: ERC s (REALnm s n m) -> ERC s (REALnm s n m)
checkAA aERC =
ifInvalidUseDummy dummyA aERC
where
dummyA = error "accessing a non-existent dummy array"
| null |
https://raw.githubusercontent.com/michalkonecny/aern2/1c8f12dfcb287bd8e3353802a94865d7c2c121ec/aern2-erc/src/ERC/Array.hs
|
haskell
|
initialise the array using items
initialise the array using items
|
# OPTIONS_GHC -fno - warn - orphans #
module ERC.Array where
import Debug.Trace (trace)
import Prelude
import Control.Monad.ST.Trans
import Control.Monad.Except
import GHC.TypeLits
import Text.Printf
import ERC.Monad
import ERC.Variables
import ERC.Integer
import ERC.Real
newtype REALn s (n :: Nat) =
REALn { unREALn :: STArray s Integer REAL }
newtype REALnm s (n :: Nat) (m :: Nat) =
REALnm { unREALnm :: REALn s n }
declareREALn :: ERC s (REALn s n) -> ERC s (Var s (REALn s n))
declareREALn aERC =
do
a <- checkA $ aERC
newSTRef a
declareREALnm :: ERC s (REALnm s n m) -> ERC s (Var s (REALnm s n m))
declareREALnm aERC =
do
a <- checkAA $ aERC
newSTRef a
____traceREALn :: (KnownNat n) => String -> ERC s (REALn s n) -> ERC s ()
____traceREALn label aERC =
do
aa@(REALn a) <- aERC
let n = natVal aa
items <- mapM (readSTArray a) [0..(n - 1)]
trace (label ++ show items) $ pure ()
____traceREALnm :: (KnownNat n, KnownNat m) => String -> ERC s (REALnm s n m) -> ERC s ()
____traceREALnm label aERC =
do
aaa@(REALnm aa@(REALn a)) <- aERC
let n = natVal aa
let m = natVal aaa
items <- mapM (\j -> mapM (readSTArray a) [n*j .. (n*j+n-1)]) [0..(m - 1)]
trace (label ++ show items) $ pure ()
array :: (KnownNat n) => [ERC s REAL] -> ERC s (REALn s n)
array itemsERC =
do
items <- sequence itemsERC
let n = fromIntegral (length items) :: Integer
a <- newSTArray (0, n - 1) 0
let aa = REALn a
let an = natVal aa
case an == n of
True -> pure ()
False -> error $ printf "array literal length (%d) does not match the array size (%d)" n an
return aa
array2D :: (KnownNat n, KnownNat m) => [[ERC s REAL]] -> ERC s (REALnm s n m)
array2D [] = error "2D array literal must not be empty"
array2D itemsERC
| not allEqualLength = error "2D array literal should have rows of equal lenths"
| otherwise =
do
items <- sequence (concat itemsERC)
a <- newSTArray (0, n*m - 1) 0
let aa = REALn a
let an = natVal aa
let aaa = REALnm aa
let am = natVal aaa
case am == m && an == n of
True -> pure ()
False -> error $ printf "2D array literal length (%d * %d) does not match the array size (%d * %d)" n m an am
return aaa
where
n_ = length (head itemsERC)
m_ = length itemsERC
allEqualLength = all (== n_) (map length itemsERC)
n = fromIntegral n_ :: Integer
m = fromIntegral m_ :: Integer
arrayLength :: (KnownNat n) => ERC s (REALn s n) -> ERC s INTEGER
arrayLength aERC =
do
aa <- checkA aERC
pure (natVal aa)
array2DLength1 :: (KnownNat n) => ERC s (REALnm s n m) -> ERC s INTEGER
array2DLength1 aERC =
do
(REALnm aa) <- checkAA aERC
pure (natVal aa)
array2DLength2 :: (KnownNat m) => ERC s (REALnm s n m) -> ERC s INTEGER
array2DLength2 aERC =
do
aa <- checkAA aERC
pure (natVal aa)
arrayLookup, (?!) :: (KnownNat n) => ERC s (REALn s n) -> [ERC s INTEGER] -> ERC s REAL
arrayLookup _ [] =
error "arrayLookup: missing index"
arrayLookup aERC [iERC] =
do
aa@(REALn a) <- aERC
i <- checkI iERC
case 0 <= i && i < natVal aa of
True -> pure ()
False -> error "arrayLookup: index out of bounds"
checkR $ readSTArray a i
arrayLookup _ _ = error "arrayLookup: supporting only 1-dimensional arrays"
(?!) = arrayLookup
infix 9 ?!
array2DLookup, (?!!) :: (KnownNat n, KnownNat m) => ERC s (REALnm s n m) -> [ERC s INTEGER] -> ERC s REAL
array2DLookup _ [] =
error "array2DLookup: missing index"
array2DLookup _ [_] =
error "array2DLookup: missing second index"
array2DLookup aERC [iERC, jERC] =
do
aaa@(REALnm aa@(REALn a)) <- aERC
i <- checkI iERC
j <- checkI jERC
let di = natVal aa
let dj = natVal aaa
case 0 <= i && i < di && 0 <= j && j < dj of
True -> pure ()
False -> error "array2DLookup: index out of bounds"
checkR $ readSTArray a (i + j*di)
array2DLookup _ _ = error "array2DLookup: supporting only 2-dimensional arrays"
(?!!) = array2DLookup
infix 9 ?!!
arrayUpdate, (.=!) :: (KnownNat n) => (ERC s (REALn s n), [ERC s INTEGER]) -> ERC s REAL -> ERC s ()
arrayUpdate (_,[]) _ =
error "arrayUpdate: missing index"
arrayUpdate (aERC,[iERC]) rERC =
do
aa@(REALn a) <- aERC
i <- checkI iERC
case 0 <= i && i < natVal aa of
True -> pure ()
False -> error "arrayUpdate: index out of bounds"
r <- checkR rERC
ifInvalidUseDummy () $ writeSTArray a i r
arrayUpdate _ _ = error "arrayUpdate: too many indices"
(.=!) = arrayUpdate
infix 1 .=!
array2DUpdate, (.=!!) :: (KnownNat n, KnownNat m) => (ERC s (REALnm s n m), [ERC s INTEGER]) -> ERC s REAL -> ERC s ()
array2DUpdate (_,[]) _ =
error "array2DUpdate: missing index"
array2DUpdate (_,[_]) _ =
error "array2DUpdate: missing second index"
array2DUpdate (aERC,[iERC,jERC]) rERC =
do
aaa@(REALnm aa@(REALn a)) <- aERC
i <- checkI iERC
j <- checkI jERC
let di = natVal aa
let dj = natVal aaa
case 0 <= i && i < di && 0 <= j && j < dj of
True -> pure ()
False -> error "array2DUpdate: index out of bounds"
r <- checkR rERC
ifInvalidUseDummy () $ writeSTArray a (i + j*di) r
array2DUpdate _ _ = error "array2DUpdate: too many indices"
(.=!!) = array2DUpdate
infix 1 .=!!
arraySwap ::
(KnownNat n1, KnownNat n2) =>
(ERC s (REALn s n1), [ERC s INTEGER], ERC s (REALn s n2), [ERC s INTEGER]) ->
ERC s ()
arraySwap (a1,ix1,a2,ix2) =
do
temp <- a1?!ix1
(a1,ix1) .=! a2?!ix2
(a2,ix2) .=! (pure temp)
array2DSwap ::
(KnownNat n1, KnownNat m1, KnownNat n2, KnownNat m2) =>
(ERC s (REALnm s n1 m1), [ERC s INTEGER], ERC s (REALnm s n2 m2), [ERC s INTEGER]) ->
ERC s ()
array2DSwap (a1,ix1,a2,ix2) =
do
temp <- a1?!!ix1
(a1,ix1) .=!! a2?!!ix2
(a2,ix2) .=!! (pure temp)
checkA :: ERC s (REALn s n) -> ERC s (REALn s n)
checkA aERC =
do
dummyA <- error "accessing a non-existent dummy array"
ifInvalidUseDummy dummyA aERC
checkAA :: ERC s (REALnm s n m) -> ERC s (REALnm s n m)
checkAA aERC =
ifInvalidUseDummy dummyA aERC
where
dummyA = error "accessing a non-existent dummy array"
|
fa9879d5bdd47c0814a19fee96fe0762d8dd040b01f35df9930d633c438f40b7
|
sqor/riak-tools
|
riak_tools.erl
|
-module('riak_tools').
-export([file_link/2 , file_link_targets/2 , main/1 , backup_script/0 , multipart_upload/4 , s3_upload/2 , read_file/2 , tar/1 , tar/2 , tar_riakdb/0 , s3_small_upload/2 , , get_month_name/0 , get_weekly_name/0 , get_wday_name/0 ] ) .
-compile(export_all).
-define('VERSION', "1.0.0").
-define('ACCESS_KEY', "AAAAAAAAAAAAAAAAAAAAA").
-define('SECRET_ACCESS_KEY', "XXXXXXXXXXXXXXXXXXXXX").
-define('S3_BUCKET', "my-s3-bucket").
-define('RIAK_DB', ["/etc/riak","/var/lib/riak"]).
-define('CHUNK_SIZE', (15 * 1024 * 1024)).
-define('RIAK_RUNNING', "riak is running").
-define('RIAK_STOPPED', "riak is stopped").
-define('AWS_ELB', "my-elb").
-define('BACKUP_DIR', ".").
-define (USAGE_TEMPLATE, "usage: riak-tools [backup|get|elb-register|elb-deregister|riak-start|riak-stop]~n").
-define (USAGE_DATA, []).
backup_script() ->
io:format("~s backup dir=~p ~n", [ts(),yaml_get("BACKUP_DIR",?BACKUP_DIR)]),
file:set_cwd(yaml_get("BACKUP_DIR",?BACKUP_DIR)),
io:format("~s pwd=~p ~n", [ts(),file:get_cwd()]),
assert_elb(),
TGZName = get_gzip_name(),
stop_riak(),
tar_riakdb(),
start_riak(),
TargetList = [get_wday_name(), get_month_name()],
file_link_targets( TGZName, TargetList ),
s3_upload( TGZName, TGZName ).
stop_riak() ->
assert_running(),
deregister_with_elb(),
io:format("~s stopping riak ~n", [ts()]),
os:cmd("/usr/sbin/riak stop"),
assert_stopped().
start_riak() ->
io:format("~s starting riak ~n", [ts()]),
os:cmd("/usr/sbin/riak start"),
assert_running(),
register_with_elb(),
assert_elb(),
io:format("~s riak running ~n", [ts()]).
register_with_elb() ->
I = get_instanceid(),
io:format("~s register elb=~s InstanceId=~s ~n", [ts(), yaml_get("AWS_ELB",?AWS_ELB), I]),
erlcloud_elb:configure(yaml_get("ACCESS_KEY",?ACCESS_KEY), yaml_get("SECRET_ACCESS_KEY",?SECRET_ACCESS_KEY)),
R = erlcloud_elb:register_instance(yaml_get("AWS_ELB",?AWS_ELB),I),
io:format("~s register reply=~p ~n", [ts(), R]).
deregister_with_elb() ->
I = get_instanceid(),
io:format("~s deregister elb=~s InstanceId=~s ~n", [ts(), yaml_get("AWS_ELB",?AWS_ELB), I]),
erlcloud_elb:configure(yaml_get("ACCESS_KEY",?ACCESS_KEY), yaml_get("SECRET_ACCESS_KEY",?SECRET_ACCESS_KEY)),
R = erlcloud_elb:deregister_instance(yaml_get("AWS_ELB",?AWS_ELB),I),
io:format("~s deregister reply=~p ~n", [ts(), R]).
get_instanceid() ->
Config = erlcloud_aws:default_config(),
case erlcloud_aws:http_body(
erlcloud_httpc:request(
"-data/instance-id/",
get, [], <<>>, 10000, Config)) of
{error, Reason} ->
{error, Reason};
{ok, Body} ->
binary_to_list(Body)
end.
ts() ->
TS = {_,_,Micro} = os:timestamp(),
{{Year,Month,Day},{Hour,Minute,Second}} =
calendar:now_to_universal_time(TS),
Mstr = element(Month,{"Jan","Feb","Mar","Apr","May","Jun","Jul",
"Aug","Sep","Oct","Nov","Dec"}),
io_lib:format("~2w ~s ~4w ~2w:~2..0w:~2..0w.~6..0w",
[Day,Mstr,Year,Hour,Minute,Second,Micro]).
riak_status() ->
os:cmd("/etc/init.d/riak status").
status_running(S) ->
case contains(S,"such file") of
[] -> false;
_ -> true
end.
assert_elb() ->
Validate this instance is registered to the ELB .
erlcloud_elb:configure(yaml_get("ACCESS_KEY",?ACCESS_KEY), yaml_get("SECRET_ACCESS_KEY",?SECRET_ACCESS_KEY)),
R = erlcloud_elb:describe_load_balancer("SQOR-RIAK-DEV-VPC"),
ELB = lists:flatten(io_lib:format("~p", [R])),
I = get_instanceid(),
assert_string(ELB,I),
io:format("~s ELB registered with InstanceId=~s ~n", [ts(), I]).
assert_running() ->
Status = riak_status(),
R = contains(Status,?RIAK_RUNNING),
case R of
[] -> erlang:error(riak_not_running);
_ -> true
end.
assert_stopped() ->
Status = riak_status(),
R = contains(Status,?RIAK_STOPPED),
case R of
[] -> erlang:error(riak_not_stopped);
_ -> true
end.
assert_string(S1,S2) ->
I = string:str(S1,S2),
case I of
0 -> erlang:error(not_registered_with_elb);
_ -> true
end.
starts_with(S1,S2) ->
io : format("sw ~5s ~s ~n " , [ S1,S2 ] ) ,
lists:prefix(S2,S1).
contains(S1,S2) ->
%io:format("begin ~p ~p ~n", [length(S1),length(S2)]),
contains(S1,S2,1,[]).
%% While S1 is at least as long as S2 we check if S2 is its prefix,
%% storing the result if it is. When S1 is shorter than S2, we return
the result . NB : this will work on any arbitrary list in erlang
%% since it makes no distinction between strings and lists.
contains([_|T]=S1,S2,N,Acc) when length(S2) =< length(S1) ->
%io:format("contains ~p ~n", [N]),
case starts_with(S1,S2) of
true ->
contains(T,S2,N+1,[N|Acc]);
false ->
contains(T,S2,N+1,Acc)
end;
contains(_S1,_S2,_N,Acc) ->
Acc.
hostname() ->
H = os:cmd("hostname"),
string:strip(H,right,$\n).
% nodename() ->
% atom_to_list(node()).
get_gzip_name() ->
S1 = hostname(),
S1 ++ "-riakdb.tgz".
get_month_name() ->
{{_,Month,_},{_,_,_}} = erlang:localtime(),
get_gzip_name() ++ ".month." ++ erlang:integer_to_list(Month).
get_weekly_name() ->
{_,Week} = calendar:iso_week_number(),
get_gzip_name() ++ ".week." ++ erlang:integer_to_list(Week).
get_wday_name() ->
WeekDay = calendar:day_of_the_week(date()),
get_gzip_name() ++ ".wday." ++ erlang:integer_to_list(WeekDay).
s3_small_upload(Key, File) ->
R = file:read_file(File),
{ok, Binary} = R,
s3_upload_single(Key, Binary).
s3_upload_single(Key, Value) ->
A = erlcloud_s3:configure(yaml_get("ACCESS_KEY",?ACCESS_KEY), yaml_get("SECRET_ACCESS_KEY",?SECRET_ACCESS_KEY)),
error_logger:info_msg("~p:~p Settng up AWS ~p to S3 ~n",
[?MODULE, ?LINE, A]),
R = erlcloud_s3 : put_object(yaml_get("S3_BUCKET",?S3_BUCKET ) , Key , Value , [ ] , [ { " Content - type " , " application / x - gzip " } ] ) ,
R = erlcloud_s3:put_object(yaml_get("S3_BUCKET",?S3_BUCKET), Key, Value),
error_logger:info_msg("~p:~p Uploaded File ~p to S3 ~n", [?MODULE, ?LINE, R]),
{ok, R}.
s3_upload(Key, Path) ->
{ok, IoDevice} = file:open(Path,[raw]),
multipart_upload(Key, IoDevice, "Config", ?CHUNK_SIZE).
read_file(IoDevice,Count) ->
case file:read(IoDevice, Count) of
eof ->
{eof, 1};
_ ->
{eof, 2}
end.
multipart_upload(Key, IoDevice, Config, Count) ->
erlcloud_s3:configure(yaml_get("ACCESS_KEY",?ACCESS_KEY), yaml_get("SECRET_ACCESS_KEY",?SECRET_ACCESS_KEY)),
{_,[{_,UploadId}]} = erlcloud_s3:start_multipart(yaml_get("S3_BUCKET",?S3_BUCKET), Key),
io:format("~s S3 UploadId=~p ~n", [ts(), UploadId]),
upload_parts(Key, IoDevice, UploadId, Config, 1, Count, []).
upload_parts(Key, _IoDevice, UploadId, _Config, PartCount, 0, Parts) ->
io:format("~s S3 Multipart Complete. S3Key=~p PartCount=~p ~n", [ts(), Key,PartCount]),
A = erlcloud_s3:complete_multipart(yaml_get("S3_BUCKET",?S3_BUCKET), Key, UploadId, lists:reverse(Parts)),
{ok,A};
upload_parts(Key, _Device, UploadId, _Config, PartCount, -1, _Parts) ->
io:format("~s Error. S3 Abort Multipart. S3Key=~p Part=~p ~n", [ts(), Key,PartCount]),
A = erlcloud_s3:abort_multipart(yaml_get("S3_BUCKET",?S3_BUCKET), Key, UploadId),
{err,A};
upload_parts(Key, IoDevice, UploadId, Config, PartCount, Count, Parts) ->
io:format("~s S3 upload_part. S3Key=~p Part=~p ~n", [ts(), Key,PartCount]),
case file:read(IoDevice, Count) of
{ok,Data} ->
%error_logger:info_msg("OK ~n"),
A = erlcloud_s3 : upload_part(yaml_get("S3_BUCKET",?S3_BUCKET ) , Key , UploadId , PartCount , Data ) ,
A = s3_part_retry(yaml_get("S3_BUCKET",?S3_BUCKET), Key, UploadId, PartCount, Data, 5),
{ok,[{etag,PartEtag}]} = A,
upload_parts(Key, IoDevice, UploadId, Config, PartCount + 1, Count, [{PartCount, PartEtag} | Parts]);
eof ->
%error_logger:info_msg("eof ~n"),
upload_parts(Key, IoDevice, UploadId, Config, PartCount, 0, Parts);
_ ->
%error_logger:info_msg("_ ~n"),
upload_parts(Key, IoDevice, UploadId, Config, PartCount + 1, -1, Parts)
end.
s3_part_retry(S3_BUCKET, Key, UploadId, PartCount, Data, 0) ->
erlcloud_s3:upload_part(S3_BUCKET, Key, UploadId, PartCount, Data);
s3_part_retry(S3_BUCKET, Key, UploadId, PartCount, Data, RetriesLeft) ->
R = erlcloud_s3:upload_part(S3_BUCKET, Key, UploadId, PartCount, Data),
case R of
% handle socket timeout
{error,{socket_error,timeout}} ->
io:format("catch: error: {error,{socket_error,timeout}} ~n"), % dbg only
retry sending message , after 1 second
timer:sleep(1000),
s3_part_retry(S3_BUCKET, Key, UploadId, PartCount, Data, RetriesLeft - 1);
{ok,_} ->
R;
_ ->
io:format("catch: unknown error: ~p ~n", [R]),
R
end.
tar_riakdb() ->
io:format("~s tar leveldb=~p ~n", [ts(), ?RIAK_DB]),
Tar_Name = get_gzip_name(),
tar(?RIAK_DB,Tar_Name).
tar(File) ->
TarName = File ++ ".tgz",
DelCmd = "rm " ++ TarName,
os:cmd(DelCmd),
erl_tar:create(TarName,[File],[compressed]).
tar(FileList,TarName) ->
DelCmd = "rm " ++ TarName,
os:cmd(DelCmd),
erl_tar:create(TarName,FileList,[compressed]).
file_link(Src,Target) ->
Cmd = "ln -f " ++ Src ++ " " ++ Target,
os:cmd(Cmd),
{ok}.
file_link_targets(Src,TargetList) ->
lists:foreach(fun(E) ->
file_link(Src,E)
end, TargetList).
%
% ToDo: Implement fetch multiple chunks download via Range HTTP header
%
s3_get(Key) ->
io:format("s3get key=~p ~n", [Key]),
erlcloud_s3:configure(yaml_get("ACCESS_KEY",?ACCESS_KEY), yaml_get("SECRET_ACCESS_KEY",?SECRET_ACCESS_KEY)),
R = erlcloud_s3:get_object(yaml_get("S3_BUCKET",?S3_BUCKET), Key),
{content,Data} = lists:keyfind(content,1,R),
file:write_file(Key, Data).
yaml_get(Key) ->
io:format("~s get yaml key=~p ~n", [ts(), Key]),
[Yaml] = yamerl_constr:file(".riak-tools.yaml"),
YamlDict = dict:from_list(Yaml),
{ok,Value} = dict:find(Key,YamlDict),
Value.
yaml_get(Key,Default) ->
try yaml_get(Key) of
Value -> Value
catch
_:_ -> Default
end.
handle_backup([]) ->
io:format("~s start riak leveldb backup host=~p ~n", [ts(), hostname()]),
backup_script(),
io:format("~s backup_complete host=~p ~n", [ts(), hostname()]);
handle_backup([_Arg|_Args]) ->
handle_backup([]).
handle_get([]) ->
s3_get(get_gzip_name());
handle_get([Arg|Args]) ->
io:format("Arg=~p Args=~p ~n", [Arg,Args]),
s3_get(Arg).
handle_status() ->
os:cmd("chmod 0600 .riak-tools.yaml"),
Key = "S3_BUCKETX",
Value = yaml_get(Key,?S3_BUCKET),
io:format("Yaml ~p ~n", [Value]).
%R = riak_status(),
%io:format("Status ~p ~n", [R]),
%I = get_instanceid(),
%io:format("InstanceId ~p ~n", [I]),
assert_elb ( ) .
handle_elb() ->
erlcloud_elb:configure(yaml_get("ACCESS_KEY",?ACCESS_KEY), yaml_get("SECRET_ACCESS_KEY",?SECRET_ACCESS_KEY)),
R = erlcloud_elb:describe_load_balancer(yaml_get("AWS_ELB",?AWS_ELB)),
ELB = lists:flatten(io_lib:format("~p", [R])),
I = get_instanceid(),
io:format("InstanceId ~s ~n", [I]),
io : format("ELB ~s ~n " , [ ELB ] ) ,
assert_string(ELB,I),
io:format("elb!~n").
%% escript --------------------------------------------------------------------------------
main([]) ->
usage();
main([MapType|Args]) ->
ssl:start(),
erlcloud:start(),
application:start(yamerl),
os:cmd("chmod 0600 .riak-tools.yaml"),
case MapType of
"backup" ->
handle_backup(Args);
"get" ->
handle_get(Args);
"hello" ->
io:format("Args ~p ~n", [Args]);
"status" ->
handle_status();
"elb-register" ->
register_with_elb();
"elb-deregister" ->
deregister_with_elb();
"riak-start" ->
start_riak();
"riak-stop" ->
stop_riak();
"version" ->
io:format("riak-tools ~p ~n", [?VERSION]);
_ ->
usage()
end.
usage() ->
io:format(?USAGE_TEMPLATE, ?USAGE_DATA).
%% ----------------------------------------------------------------------------------------
| null |
https://raw.githubusercontent.com/sqor/riak-tools/1d6c964bf789711534f078ba8c8a600a6ab4eda3/src/riak_tools.erl
|
erlang
|
io:format("begin ~p ~p ~n", [length(S1),length(S2)]),
While S1 is at least as long as S2 we check if S2 is its prefix,
storing the result if it is. When S1 is shorter than S2, we return
since it makes no distinction between strings and lists.
io:format("contains ~p ~n", [N]),
nodename() ->
atom_to_list(node()).
error_logger:info_msg("OK ~n"),
error_logger:info_msg("eof ~n"),
error_logger:info_msg("_ ~n"),
handle socket timeout
dbg only
ToDo: Implement fetch multiple chunks download via Range HTTP header
R = riak_status(),
io:format("Status ~p ~n", [R]),
I = get_instanceid(),
io:format("InstanceId ~p ~n", [I]),
escript --------------------------------------------------------------------------------
----------------------------------------------------------------------------------------
|
-module('riak_tools').
-export([file_link/2 , file_link_targets/2 , main/1 , backup_script/0 , multipart_upload/4 , s3_upload/2 , read_file/2 , tar/1 , tar/2 , tar_riakdb/0 , s3_small_upload/2 , , get_month_name/0 , get_weekly_name/0 , get_wday_name/0 ] ) .
-compile(export_all).
-define('VERSION', "1.0.0").
-define('ACCESS_KEY', "AAAAAAAAAAAAAAAAAAAAA").
-define('SECRET_ACCESS_KEY', "XXXXXXXXXXXXXXXXXXXXX").
-define('S3_BUCKET', "my-s3-bucket").
-define('RIAK_DB', ["/etc/riak","/var/lib/riak"]).
-define('CHUNK_SIZE', (15 * 1024 * 1024)).
-define('RIAK_RUNNING', "riak is running").
-define('RIAK_STOPPED', "riak is stopped").
-define('AWS_ELB', "my-elb").
-define('BACKUP_DIR', ".").
-define (USAGE_TEMPLATE, "usage: riak-tools [backup|get|elb-register|elb-deregister|riak-start|riak-stop]~n").
-define (USAGE_DATA, []).
backup_script() ->
io:format("~s backup dir=~p ~n", [ts(),yaml_get("BACKUP_DIR",?BACKUP_DIR)]),
file:set_cwd(yaml_get("BACKUP_DIR",?BACKUP_DIR)),
io:format("~s pwd=~p ~n", [ts(),file:get_cwd()]),
assert_elb(),
TGZName = get_gzip_name(),
stop_riak(),
tar_riakdb(),
start_riak(),
TargetList = [get_wday_name(), get_month_name()],
file_link_targets( TGZName, TargetList ),
s3_upload( TGZName, TGZName ).
stop_riak() ->
assert_running(),
deregister_with_elb(),
io:format("~s stopping riak ~n", [ts()]),
os:cmd("/usr/sbin/riak stop"),
assert_stopped().
start_riak() ->
io:format("~s starting riak ~n", [ts()]),
os:cmd("/usr/sbin/riak start"),
assert_running(),
register_with_elb(),
assert_elb(),
io:format("~s riak running ~n", [ts()]).
register_with_elb() ->
I = get_instanceid(),
io:format("~s register elb=~s InstanceId=~s ~n", [ts(), yaml_get("AWS_ELB",?AWS_ELB), I]),
erlcloud_elb:configure(yaml_get("ACCESS_KEY",?ACCESS_KEY), yaml_get("SECRET_ACCESS_KEY",?SECRET_ACCESS_KEY)),
R = erlcloud_elb:register_instance(yaml_get("AWS_ELB",?AWS_ELB),I),
io:format("~s register reply=~p ~n", [ts(), R]).
deregister_with_elb() ->
I = get_instanceid(),
io:format("~s deregister elb=~s InstanceId=~s ~n", [ts(), yaml_get("AWS_ELB",?AWS_ELB), I]),
erlcloud_elb:configure(yaml_get("ACCESS_KEY",?ACCESS_KEY), yaml_get("SECRET_ACCESS_KEY",?SECRET_ACCESS_KEY)),
R = erlcloud_elb:deregister_instance(yaml_get("AWS_ELB",?AWS_ELB),I),
io:format("~s deregister reply=~p ~n", [ts(), R]).
get_instanceid() ->
Config = erlcloud_aws:default_config(),
case erlcloud_aws:http_body(
erlcloud_httpc:request(
"-data/instance-id/",
get, [], <<>>, 10000, Config)) of
{error, Reason} ->
{error, Reason};
{ok, Body} ->
binary_to_list(Body)
end.
ts() ->
TS = {_,_,Micro} = os:timestamp(),
{{Year,Month,Day},{Hour,Minute,Second}} =
calendar:now_to_universal_time(TS),
Mstr = element(Month,{"Jan","Feb","Mar","Apr","May","Jun","Jul",
"Aug","Sep","Oct","Nov","Dec"}),
io_lib:format("~2w ~s ~4w ~2w:~2..0w:~2..0w.~6..0w",
[Day,Mstr,Year,Hour,Minute,Second,Micro]).
riak_status() ->
os:cmd("/etc/init.d/riak status").
status_running(S) ->
case contains(S,"such file") of
[] -> false;
_ -> true
end.
assert_elb() ->
Validate this instance is registered to the ELB .
erlcloud_elb:configure(yaml_get("ACCESS_KEY",?ACCESS_KEY), yaml_get("SECRET_ACCESS_KEY",?SECRET_ACCESS_KEY)),
R = erlcloud_elb:describe_load_balancer("SQOR-RIAK-DEV-VPC"),
ELB = lists:flatten(io_lib:format("~p", [R])),
I = get_instanceid(),
assert_string(ELB,I),
io:format("~s ELB registered with InstanceId=~s ~n", [ts(), I]).
assert_running() ->
Status = riak_status(),
R = contains(Status,?RIAK_RUNNING),
case R of
[] -> erlang:error(riak_not_running);
_ -> true
end.
assert_stopped() ->
Status = riak_status(),
R = contains(Status,?RIAK_STOPPED),
case R of
[] -> erlang:error(riak_not_stopped);
_ -> true
end.
assert_string(S1,S2) ->
I = string:str(S1,S2),
case I of
0 -> erlang:error(not_registered_with_elb);
_ -> true
end.
starts_with(S1,S2) ->
io : format("sw ~5s ~s ~n " , [ S1,S2 ] ) ,
lists:prefix(S2,S1).
contains(S1,S2) ->
contains(S1,S2,1,[]).
the result . NB : this will work on any arbitrary list in erlang
contains([_|T]=S1,S2,N,Acc) when length(S2) =< length(S1) ->
case starts_with(S1,S2) of
true ->
contains(T,S2,N+1,[N|Acc]);
false ->
contains(T,S2,N+1,Acc)
end;
contains(_S1,_S2,_N,Acc) ->
Acc.
hostname() ->
H = os:cmd("hostname"),
string:strip(H,right,$\n).
get_gzip_name() ->
S1 = hostname(),
S1 ++ "-riakdb.tgz".
get_month_name() ->
{{_,Month,_},{_,_,_}} = erlang:localtime(),
get_gzip_name() ++ ".month." ++ erlang:integer_to_list(Month).
get_weekly_name() ->
{_,Week} = calendar:iso_week_number(),
get_gzip_name() ++ ".week." ++ erlang:integer_to_list(Week).
get_wday_name() ->
WeekDay = calendar:day_of_the_week(date()),
get_gzip_name() ++ ".wday." ++ erlang:integer_to_list(WeekDay).
s3_small_upload(Key, File) ->
R = file:read_file(File),
{ok, Binary} = R,
s3_upload_single(Key, Binary).
s3_upload_single(Key, Value) ->
A = erlcloud_s3:configure(yaml_get("ACCESS_KEY",?ACCESS_KEY), yaml_get("SECRET_ACCESS_KEY",?SECRET_ACCESS_KEY)),
error_logger:info_msg("~p:~p Settng up AWS ~p to S3 ~n",
[?MODULE, ?LINE, A]),
R = erlcloud_s3 : put_object(yaml_get("S3_BUCKET",?S3_BUCKET ) , Key , Value , [ ] , [ { " Content - type " , " application / x - gzip " } ] ) ,
R = erlcloud_s3:put_object(yaml_get("S3_BUCKET",?S3_BUCKET), Key, Value),
error_logger:info_msg("~p:~p Uploaded File ~p to S3 ~n", [?MODULE, ?LINE, R]),
{ok, R}.
s3_upload(Key, Path) ->
{ok, IoDevice} = file:open(Path,[raw]),
multipart_upload(Key, IoDevice, "Config", ?CHUNK_SIZE).
read_file(IoDevice,Count) ->
case file:read(IoDevice, Count) of
eof ->
{eof, 1};
_ ->
{eof, 2}
end.
multipart_upload(Key, IoDevice, Config, Count) ->
erlcloud_s3:configure(yaml_get("ACCESS_KEY",?ACCESS_KEY), yaml_get("SECRET_ACCESS_KEY",?SECRET_ACCESS_KEY)),
{_,[{_,UploadId}]} = erlcloud_s3:start_multipart(yaml_get("S3_BUCKET",?S3_BUCKET), Key),
io:format("~s S3 UploadId=~p ~n", [ts(), UploadId]),
upload_parts(Key, IoDevice, UploadId, Config, 1, Count, []).
upload_parts(Key, _IoDevice, UploadId, _Config, PartCount, 0, Parts) ->
io:format("~s S3 Multipart Complete. S3Key=~p PartCount=~p ~n", [ts(), Key,PartCount]),
A = erlcloud_s3:complete_multipart(yaml_get("S3_BUCKET",?S3_BUCKET), Key, UploadId, lists:reverse(Parts)),
{ok,A};
upload_parts(Key, _Device, UploadId, _Config, PartCount, -1, _Parts) ->
io:format("~s Error. S3 Abort Multipart. S3Key=~p Part=~p ~n", [ts(), Key,PartCount]),
A = erlcloud_s3:abort_multipart(yaml_get("S3_BUCKET",?S3_BUCKET), Key, UploadId),
{err,A};
upload_parts(Key, IoDevice, UploadId, Config, PartCount, Count, Parts) ->
io:format("~s S3 upload_part. S3Key=~p Part=~p ~n", [ts(), Key,PartCount]),
case file:read(IoDevice, Count) of
{ok,Data} ->
A = erlcloud_s3 : upload_part(yaml_get("S3_BUCKET",?S3_BUCKET ) , Key , UploadId , PartCount , Data ) ,
A = s3_part_retry(yaml_get("S3_BUCKET",?S3_BUCKET), Key, UploadId, PartCount, Data, 5),
{ok,[{etag,PartEtag}]} = A,
upload_parts(Key, IoDevice, UploadId, Config, PartCount + 1, Count, [{PartCount, PartEtag} | Parts]);
eof ->
upload_parts(Key, IoDevice, UploadId, Config, PartCount, 0, Parts);
_ ->
upload_parts(Key, IoDevice, UploadId, Config, PartCount + 1, -1, Parts)
end.
s3_part_retry(S3_BUCKET, Key, UploadId, PartCount, Data, 0) ->
erlcloud_s3:upload_part(S3_BUCKET, Key, UploadId, PartCount, Data);
s3_part_retry(S3_BUCKET, Key, UploadId, PartCount, Data, RetriesLeft) ->
R = erlcloud_s3:upload_part(S3_BUCKET, Key, UploadId, PartCount, Data),
case R of
{error,{socket_error,timeout}} ->
retry sending message , after 1 second
timer:sleep(1000),
s3_part_retry(S3_BUCKET, Key, UploadId, PartCount, Data, RetriesLeft - 1);
{ok,_} ->
R;
_ ->
io:format("catch: unknown error: ~p ~n", [R]),
R
end.
tar_riakdb() ->
io:format("~s tar leveldb=~p ~n", [ts(), ?RIAK_DB]),
Tar_Name = get_gzip_name(),
tar(?RIAK_DB,Tar_Name).
tar(File) ->
TarName = File ++ ".tgz",
DelCmd = "rm " ++ TarName,
os:cmd(DelCmd),
erl_tar:create(TarName,[File],[compressed]).
tar(FileList,TarName) ->
DelCmd = "rm " ++ TarName,
os:cmd(DelCmd),
erl_tar:create(TarName,FileList,[compressed]).
file_link(Src,Target) ->
Cmd = "ln -f " ++ Src ++ " " ++ Target,
os:cmd(Cmd),
{ok}.
file_link_targets(Src,TargetList) ->
lists:foreach(fun(E) ->
file_link(Src,E)
end, TargetList).
s3_get(Key) ->
io:format("s3get key=~p ~n", [Key]),
erlcloud_s3:configure(yaml_get("ACCESS_KEY",?ACCESS_KEY), yaml_get("SECRET_ACCESS_KEY",?SECRET_ACCESS_KEY)),
R = erlcloud_s3:get_object(yaml_get("S3_BUCKET",?S3_BUCKET), Key),
{content,Data} = lists:keyfind(content,1,R),
file:write_file(Key, Data).
yaml_get(Key) ->
io:format("~s get yaml key=~p ~n", [ts(), Key]),
[Yaml] = yamerl_constr:file(".riak-tools.yaml"),
YamlDict = dict:from_list(Yaml),
{ok,Value} = dict:find(Key,YamlDict),
Value.
yaml_get(Key,Default) ->
try yaml_get(Key) of
Value -> Value
catch
_:_ -> Default
end.
handle_backup([]) ->
io:format("~s start riak leveldb backup host=~p ~n", [ts(), hostname()]),
backup_script(),
io:format("~s backup_complete host=~p ~n", [ts(), hostname()]);
handle_backup([_Arg|_Args]) ->
handle_backup([]).
handle_get([]) ->
s3_get(get_gzip_name());
handle_get([Arg|Args]) ->
io:format("Arg=~p Args=~p ~n", [Arg,Args]),
s3_get(Arg).
handle_status() ->
os:cmd("chmod 0600 .riak-tools.yaml"),
Key = "S3_BUCKETX",
Value = yaml_get(Key,?S3_BUCKET),
io:format("Yaml ~p ~n", [Value]).
assert_elb ( ) .
handle_elb() ->
erlcloud_elb:configure(yaml_get("ACCESS_KEY",?ACCESS_KEY), yaml_get("SECRET_ACCESS_KEY",?SECRET_ACCESS_KEY)),
R = erlcloud_elb:describe_load_balancer(yaml_get("AWS_ELB",?AWS_ELB)),
ELB = lists:flatten(io_lib:format("~p", [R])),
I = get_instanceid(),
io:format("InstanceId ~s ~n", [I]),
io : format("ELB ~s ~n " , [ ELB ] ) ,
assert_string(ELB,I),
io:format("elb!~n").
main([]) ->
usage();
main([MapType|Args]) ->
ssl:start(),
erlcloud:start(),
application:start(yamerl),
os:cmd("chmod 0600 .riak-tools.yaml"),
case MapType of
"backup" ->
handle_backup(Args);
"get" ->
handle_get(Args);
"hello" ->
io:format("Args ~p ~n", [Args]);
"status" ->
handle_status();
"elb-register" ->
register_with_elb();
"elb-deregister" ->
deregister_with_elb();
"riak-start" ->
start_riak();
"riak-stop" ->
stop_riak();
"version" ->
io:format("riak-tools ~p ~n", [?VERSION]);
_ ->
usage()
end.
usage() ->
io:format(?USAGE_TEMPLATE, ?USAGE_DATA).
|
87567051a446da90e22c9aabd17bd957157b183845479a9717410f1d84df6f54
|
RefactoringTools/HaRe
|
A3.hs
|
module A3 where
--Unfold 'sumSquares1'
import C3
main xs = case xs of
[] -> 0
[x:xs] -> x^pow + sumSquares1 xs
| null |
https://raw.githubusercontent.com/RefactoringTools/HaRe/ef5dee64c38fb104e6e5676095946279fbce381c/old/testing/unfoldDef/A3.hs
|
haskell
|
Unfold 'sumSquares1'
|
module A3 where
import C3
main xs = case xs of
[] -> 0
[x:xs] -> x^pow + sumSquares1 xs
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.