Search is not available for this dataset
repo_name
string | path
string | license
string | full_code
string | full_size
int64 | uncommented_code
string | uncommented_size
int64 | function_only_code
string | function_only_size
int64 | is_commented
bool | is_signatured
bool | n_ast_errors
int64 | ast_max_depth
int64 | n_whitespaces
int64 | n_ast_nodes
int64 | n_ast_terminals
int64 | n_ast_nonterminals
int64 | loc
int64 | cycloplexity
int64 |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
sdiehl/ghc
|
libraries/base/Control/Concurrent/Chan.hs
|
bsd-3-clause
|
getChanContents :: Chan a -> IO [a]
getChanContents ch
= unsafeInterleaveIO (do
x <- readChan ch
xs <- getChanContents ch
return (x:xs)
)
| 168 |
getChanContents :: Chan a -> IO [a]
getChanContents ch
= unsafeInterleaveIO (do
x <- readChan ch
xs <- getChanContents ch
return (x:xs)
)
| 168 |
getChanContents ch
= unsafeInterleaveIO (do
x <- readChan ch
xs <- getChanContents ch
return (x:xs)
)
| 132 | false | true | 0 | 11 | 53 | 66 | 30 | 36 | null | null |
katsusuke/haskell-practice
|
cline.hs
|
mit
|
cNextLine :: String -> String
cNextLine x = let len = (length (cCurrentLine x))
in drop len x
| 106 |
cNextLine :: String -> String
cNextLine x = let len = (length (cCurrentLine x))
in drop len x
| 106 |
cNextLine x = let len = (length (cCurrentLine x))
in drop len x
| 76 | false | true | 0 | 12 | 30 | 46 | 22 | 24 | null | null |
siphayne/CS381
|
HW3/HW3.hs
|
mit
|
cmd (Add) (x:y:xs) = Just ((x+y):xs)
| 37 |
cmd (Add) (x:y:xs) = Just ((x+y):xs)
| 37 |
cmd (Add) (x:y:xs) = Just ((x+y):xs)
| 37 | false | false | 1 | 9 | 6 | 44 | 22 | 22 | null | null |
romanb/amazonka
|
amazonka-codedeploy/gen/Network/AWS/CodeDeploy/ListDeploymentInstances.hs
|
mpl-2.0
|
-- | If the amount of information that is returned is significantly large, an
-- identifier will also be returned, which can be used in a subsequent list
-- deployment instances call to return the next set of deployment instances in
-- the list.
ldirNextToken :: Lens' ListDeploymentInstancesResponse (Maybe Text)
ldirNextToken = lens _ldirNextToken (\s a -> s { _ldirNextToken = a })
| 384 |
ldirNextToken :: Lens' ListDeploymentInstancesResponse (Maybe Text)
ldirNextToken = lens _ldirNextToken (\s a -> s { _ldirNextToken = a })
| 138 |
ldirNextToken = lens _ldirNextToken (\s a -> s { _ldirNextToken = a })
| 70 | true | true | 1 | 9 | 62 | 52 | 28 | 24 | null | null |
blanu/juicer
|
Juicer/Whip.hs
|
gpl-2.0
|
jsonifyPosts :: [Metapost] -> String
jsonifyPosts posts = "\"items\": [" ++ (intercalate "," (map jsonifyPost posts)) ++ "]"
| 124 |
jsonifyPosts :: [Metapost] -> String
jsonifyPosts posts = "\"items\": [" ++ (intercalate "," (map jsonifyPost posts)) ++ "]"
| 124 |
jsonifyPosts posts = "\"items\": [" ++ (intercalate "," (map jsonifyPost posts)) ++ "]"
| 87 | false | true | 0 | 10 | 17 | 51 | 24 | 27 | null | null |
koba-e964/hayashii-mcc
|
Typing.hs
|
bsd-3-clause
|
typingSub :: Syntax -> M Type
typingSub syn = case syn of
Unit -> return TUnit
Bool _ -> return TBool
Int _ -> return TInt
Float _ -> return TFloat
Not x -> typingSub x >>= unify TBool >> return TBool
Neg x -> typingSub x >>= unify TInt >> return TInt
ArithBin _ e1 e2 -> checkBinary TInt e1 e2 >> return TInt
FNeg x -> typingSub x >>= unify TFloat >> return TFloat
FloatBin _ e1 e2 -> checkBinary TFloat e1 e2 >> return TFloat
Cmp _ e1 e2 -> do
ty1 <- typingSub e1
ty2 <- typingSub e2
unify ty1 ty2
return TBool
If e1 e2 e3 -> do
typingSub e1 >>= unify TBool
ty2 <- typingSub e2
ty3 <- typingSub e3
unify ty2 ty3
return ty2
Let x t e1 e2 -> do
typingSub e1 >>= unify t
local (Map.insert x t) (typingSub e2)
Var x -> do
env <- ask
case Map.lookup x env of
Just ty -> return ty
Nothing -> throwError $ MiscError ("external variable " ++ show x ++ " was not in extenv.") (Var x)
LetRec (Fundef { name = (x, ty), args = ls, body = e1}) e2 -> do
newenv <- asks (Map.insert x ty)
let argsenv = Map.fromList ls `Map.union` newenv
TFun (map snd ls) <$> local (const argsenv) (typingSub e1) >>= unify ty
local (const newenv) (typingSub e2)
App e es -> do
t <- uniqType
join $ unify <$> typingSub e <*> (flip TFun t <$> mapM typingSub es);
return t
Tuple es -> TTuple <$> mapM typingSub es
LetTuple xts e1 e2 -> do
unify (TTuple (map snd xts)) =<< typingSub e1
local (Map.fromList xts `Map.union`) (typingSub e2)
Array e1 e2 -> do
unify TInt =<< typingSub e1
TArray <$> typingSub e2
Get e1 e2 -> do
t <- uniqType
unify (TArray t) =<< typingSub e1
unify TInt =<< typingSub e2
return t
Put e1 e2 e3 -> do
t <- typingSub e3
unify (TArray t) =<< typingSub e1
unify TInt =<< typingSub e2
return TUnit
-- | replace type variables with unique type variables
| 1,922 |
typingSub :: Syntax -> M Type
typingSub syn = case syn of
Unit -> return TUnit
Bool _ -> return TBool
Int _ -> return TInt
Float _ -> return TFloat
Not x -> typingSub x >>= unify TBool >> return TBool
Neg x -> typingSub x >>= unify TInt >> return TInt
ArithBin _ e1 e2 -> checkBinary TInt e1 e2 >> return TInt
FNeg x -> typingSub x >>= unify TFloat >> return TFloat
FloatBin _ e1 e2 -> checkBinary TFloat e1 e2 >> return TFloat
Cmp _ e1 e2 -> do
ty1 <- typingSub e1
ty2 <- typingSub e2
unify ty1 ty2
return TBool
If e1 e2 e3 -> do
typingSub e1 >>= unify TBool
ty2 <- typingSub e2
ty3 <- typingSub e3
unify ty2 ty3
return ty2
Let x t e1 e2 -> do
typingSub e1 >>= unify t
local (Map.insert x t) (typingSub e2)
Var x -> do
env <- ask
case Map.lookup x env of
Just ty -> return ty
Nothing -> throwError $ MiscError ("external variable " ++ show x ++ " was not in extenv.") (Var x)
LetRec (Fundef { name = (x, ty), args = ls, body = e1}) e2 -> do
newenv <- asks (Map.insert x ty)
let argsenv = Map.fromList ls `Map.union` newenv
TFun (map snd ls) <$> local (const argsenv) (typingSub e1) >>= unify ty
local (const newenv) (typingSub e2)
App e es -> do
t <- uniqType
join $ unify <$> typingSub e <*> (flip TFun t <$> mapM typingSub es);
return t
Tuple es -> TTuple <$> mapM typingSub es
LetTuple xts e1 e2 -> do
unify (TTuple (map snd xts)) =<< typingSub e1
local (Map.fromList xts `Map.union`) (typingSub e2)
Array e1 e2 -> do
unify TInt =<< typingSub e1
TArray <$> typingSub e2
Get e1 e2 -> do
t <- uniqType
unify (TArray t) =<< typingSub e1
unify TInt =<< typingSub e2
return t
Put e1 e2 e3 -> do
t <- typingSub e3
unify (TArray t) =<< typingSub e1
unify TInt =<< typingSub e2
return TUnit
-- | replace type variables with unique type variables
| 1,922 |
typingSub syn = case syn of
Unit -> return TUnit
Bool _ -> return TBool
Int _ -> return TInt
Float _ -> return TFloat
Not x -> typingSub x >>= unify TBool >> return TBool
Neg x -> typingSub x >>= unify TInt >> return TInt
ArithBin _ e1 e2 -> checkBinary TInt e1 e2 >> return TInt
FNeg x -> typingSub x >>= unify TFloat >> return TFloat
FloatBin _ e1 e2 -> checkBinary TFloat e1 e2 >> return TFloat
Cmp _ e1 e2 -> do
ty1 <- typingSub e1
ty2 <- typingSub e2
unify ty1 ty2
return TBool
If e1 e2 e3 -> do
typingSub e1 >>= unify TBool
ty2 <- typingSub e2
ty3 <- typingSub e3
unify ty2 ty3
return ty2
Let x t e1 e2 -> do
typingSub e1 >>= unify t
local (Map.insert x t) (typingSub e2)
Var x -> do
env <- ask
case Map.lookup x env of
Just ty -> return ty
Nothing -> throwError $ MiscError ("external variable " ++ show x ++ " was not in extenv.") (Var x)
LetRec (Fundef { name = (x, ty), args = ls, body = e1}) e2 -> do
newenv <- asks (Map.insert x ty)
let argsenv = Map.fromList ls `Map.union` newenv
TFun (map snd ls) <$> local (const argsenv) (typingSub e1) >>= unify ty
local (const newenv) (typingSub e2)
App e es -> do
t <- uniqType
join $ unify <$> typingSub e <*> (flip TFun t <$> mapM typingSub es);
return t
Tuple es -> TTuple <$> mapM typingSub es
LetTuple xts e1 e2 -> do
unify (TTuple (map snd xts)) =<< typingSub e1
local (Map.fromList xts `Map.union`) (typingSub e2)
Array e1 e2 -> do
unify TInt =<< typingSub e1
TArray <$> typingSub e2
Get e1 e2 -> do
t <- uniqType
unify (TArray t) =<< typingSub e1
unify TInt =<< typingSub e2
return t
Put e1 e2 e3 -> do
t <- typingSub e3
unify (TArray t) =<< typingSub e1
unify TInt =<< typingSub e2
return TUnit
-- | replace type variables with unique type variables
| 1,892 | false | true | 26 | 15 | 531 | 787 | 361 | 426 | null | null |
tolysz/prepare-ghcjs
|
spec-lts8/cabal/cabal-install/Distribution/Client/PackageIndex.hs
|
bsd-3-clause
|
--
-- * Case insensitive name lookups
--
-- | Does a case-insensitive search by package name.
--
-- If there is only one package that compares case-insensitively to this name
-- then the search is unambiguous and we get back all versions of that package.
-- If several match case-insensitively but one matches exactly then it is also
-- unambiguous.
--
-- If however several match case-insensitively and none match exactly then we
-- have an ambiguous result, and we get back all the versions of all the
-- packages. The list of ambiguous results is split by exact package name. So
-- it is a non-empty list of non-empty lists.
--
searchByName :: PackageIndex pkg
-> String -> [(PackageName, [pkg])]
searchByName (PackageIndex m) name =
[ pkgs
| pkgs@(PackageName name',_) <- Map.toList m
, lowercase name' == lname ]
where
lname = lowercase name
| 878 |
searchByName :: PackageIndex pkg
-> String -> [(PackageName, [pkg])]
searchByName (PackageIndex m) name =
[ pkgs
| pkgs@(PackageName name',_) <- Map.toList m
, lowercase name' == lname ]
where
lname = lowercase name
| 246 |
searchByName (PackageIndex m) name =
[ pkgs
| pkgs@(PackageName name',_) <- Map.toList m
, lowercase name' == lname ]
where
lname = lowercase name
| 164 | true | true | 1 | 11 | 177 | 118 | 65 | 53 | null | null |
vikraman/ghc
|
compiler/hsSyn/HsUtils.hs
|
bsd-3-clause
|
nlWildConPat :: DataCon -> LPat RdrName
nlWildConPat con = noLoc (ConPatIn (noLoc (getRdrName con))
(PrefixCon (nOfThem (dataConSourceArity con)
nlWildPat)))
| 227 |
nlWildConPat :: DataCon -> LPat RdrName
nlWildConPat con = noLoc (ConPatIn (noLoc (getRdrName con))
(PrefixCon (nOfThem (dataConSourceArity con)
nlWildPat)))
| 227 |
nlWildConPat con = noLoc (ConPatIn (noLoc (getRdrName con))
(PrefixCon (nOfThem (dataConSourceArity con)
nlWildPat)))
| 187 | false | true | 0 | 13 | 88 | 69 | 32 | 37 | null | null |
soupi/pureli
|
src/Language/Pureli/Parser.hs
|
bsd-3-clause
|
parseReqDefExp :: String -> String -> Either String ReqDefExp
parseReqDefExp name input =
case P.parse ((Req <$> require) <|> (Def <$> define) <|> (Exp <$> withMD expr)) name input of
Left err -> Left (show err)
Right val -> Right val
| 245 |
parseReqDefExp :: String -> String -> Either String ReqDefExp
parseReqDefExp name input =
case P.parse ((Req <$> require) <|> (Def <$> define) <|> (Exp <$> withMD expr)) name input of
Left err -> Left (show err)
Right val -> Right val
| 245 |
parseReqDefExp name input =
case P.parse ((Req <$> require) <|> (Def <$> define) <|> (Exp <$> withMD expr)) name input of
Left err -> Left (show err)
Right val -> Right val
| 183 | false | true | 0 | 11 | 51 | 111 | 53 | 58 | null | null |
karamellpelle/grid
|
source/Game/Values.hs
|
gpl-3.0
|
valueGUIHth :: Float
valueGUIHth =
0.4
| 42 |
valueGUIHth :: Float
valueGUIHth =
0.4
| 42 |
valueGUIHth =
0.4
| 21 | false | true | 0 | 4 | 9 | 11 | 6 | 5 | null | null |
aaronc/Idris-dev
|
src/Idris/CaseSplit.hs
|
bsd-3-clause
|
mergePat :: IState -> PTerm -> PTerm -> Maybe Name -> State MergeState PTerm
-- If any names are unified, make sure they stay unified. Always prefer
-- user provided name (first pattern)
mergePat ist (PPatvar fc n) new t
= mergePat ist (PRef fc [] n) new t
| 258 |
mergePat :: IState -> PTerm -> PTerm -> Maybe Name -> State MergeState PTerm
mergePat ist (PPatvar fc n) new t
= mergePat ist (PRef fc [] n) new t
| 148 |
mergePat ist (PPatvar fc n) new t
= mergePat ist (PRef fc [] n) new t
| 71 | true | true | 0 | 9 | 50 | 75 | 37 | 38 | null | null |
facebookincubator/duckling
|
tests/Duckling/Time/FR/Tests.hs
|
bsd-3-clause
|
tests :: TestTree
tests = testGroup "FR Tests"
[ makeCorpusTest [Seal Time] corpus
, makeNegativeCorpusTest [Seal Time] negativeCorpus
]
| 142 |
tests :: TestTree
tests = testGroup "FR Tests"
[ makeCorpusTest [Seal Time] corpus
, makeNegativeCorpusTest [Seal Time] negativeCorpus
]
| 142 |
tests = testGroup "FR Tests"
[ makeCorpusTest [Seal Time] corpus
, makeNegativeCorpusTest [Seal Time] negativeCorpus
]
| 124 | false | true | 0 | 9 | 24 | 52 | 23 | 29 | null | null |
Proclivis/wxHaskell
|
wxcore/src/haskell/Graphics/UI/WXCore/WxcDefs.hs
|
lgpl-2.1
|
wxSTC_MSSQL_DEFAULT :: Int
wxSTC_MSSQL_DEFAULT = 0
| 50 |
wxSTC_MSSQL_DEFAULT :: Int
wxSTC_MSSQL_DEFAULT = 0
| 50 |
wxSTC_MSSQL_DEFAULT = 0
| 23 | false | true | 0 | 4 | 5 | 11 | 6 | 5 | null | null |
joe9/streaming-betfair-api
|
src/Betfair/StreamingAPI/API/RequestProcessing.hs
|
mit
|
marketIdsSubscription :: Context -> [MarketId] -> IO (Context)
marketIdsSubscription c [] = return c
| 100 |
marketIdsSubscription :: Context -> [MarketId] -> IO (Context)
marketIdsSubscription c [] = return c
| 100 |
marketIdsSubscription c [] = return c
| 37 | false | true | 0 | 8 | 13 | 38 | 19 | 19 | null | null |
josiah14/lambdaline
|
LambdaLine/XTerm/Colors.hs
|
gpl-3.0
|
honeydew :: Color
honeydew = "194"
| 34 |
honeydew :: Color
honeydew = "194"
| 34 |
honeydew = "194"
| 16 | false | true | 0 | 6 | 5 | 18 | 7 | 11 | null | null |
mihaimaruseac/io-manager
|
Training/MM/IOManager.hs
|
bsd-3-clause
|
readInputFiles (f:fs) m = do
content <- readFile f
readInputFiles fs $ Map.insert f content m
-- | Writes the content of all the output files.
| 147 |
readInputFiles (f:fs) m = do
content <- readFile f
readInputFiles fs $ Map.insert f content m
-- | Writes the content of all the output files.
| 147 |
readInputFiles (f:fs) m = do
content <- readFile f
readInputFiles fs $ Map.insert f content m
-- | Writes the content of all the output files.
| 147 | false | false | 0 | 9 | 30 | 48 | 22 | 26 | null | null |
AlexeyRaga/eta
|
compiler/ETA/CodeGen/Types.hs
|
bsd-3-clause
|
storeDefault :: CgLoc -> Code
storeDefault cgLoc = storeLoc cgLoc $ defaultValue (locFt cgLoc)
| 94 |
storeDefault :: CgLoc -> Code
storeDefault cgLoc = storeLoc cgLoc $ defaultValue (locFt cgLoc)
| 94 |
storeDefault cgLoc = storeLoc cgLoc $ defaultValue (locFt cgLoc)
| 64 | false | true | 0 | 8 | 13 | 34 | 16 | 18 | null | null |
tgdavies/codeworld
|
codeworld-prediction/src/CodeWorld/Prediction.hs
|
apache-2.0
|
advanceCurrentTime ::
StepFun s -> AnimationRate -> Timestamp -> Future s -> Future s
advanceCurrentTime step rate target f =
f
{ current = timePassesBigStep step rate target $ current f
, lastQuery = target
}
| 232 |
advanceCurrentTime ::
StepFun s -> AnimationRate -> Timestamp -> Future s -> Future s
advanceCurrentTime step rate target f =
f
{ current = timePassesBigStep step rate target $ current f
, lastQuery = target
}
| 232 |
advanceCurrentTime step rate target f =
f
{ current = timePassesBigStep step rate target $ current f
, lastQuery = target
}
| 139 | false | true | 0 | 9 | 58 | 73 | 36 | 37 | null | null |
rueshyna/gogol
|
gogol-adexchange-buyer/gen/Network/Google/AdExchangeBuyer/Types/Product.hs
|
mpl-2.0
|
-- | Only set when contextType=LOCATION. Represents the geo criterias this
-- restriction applies to.
csriciGeoCriteriaId :: Lens' CreativeServingRestrictionsItemContextsItem [Int32]
csriciGeoCriteriaId
= lens _csriciGeoCriteriaId
(\ s a -> s{_csriciGeoCriteriaId = a})
. _Default
. _Coerce
| 310 |
csriciGeoCriteriaId :: Lens' CreativeServingRestrictionsItemContextsItem [Int32]
csriciGeoCriteriaId
= lens _csriciGeoCriteriaId
(\ s a -> s{_csriciGeoCriteriaId = a})
. _Default
. _Coerce
| 208 |
csriciGeoCriteriaId
= lens _csriciGeoCriteriaId
(\ s a -> s{_csriciGeoCriteriaId = a})
. _Default
. _Coerce
| 127 | true | true | 2 | 8 | 54 | 58 | 29 | 29 | null | null |
daherb/Haskell-Muste
|
muste-lib/Muste/Feat.hs
|
artistic-2.0
|
-- | The function 'getRules' returns the union of syntactic and lexical rules of a grammar
getAllRules :: Grammar -> [Rule]
getAllRules g = union (synrules g) (lexrules g)
| 171 |
getAllRules :: Grammar -> [Rule]
getAllRules g = union (synrules g) (lexrules g)
| 80 |
getAllRules g = union (synrules g) (lexrules g)
| 47 | true | true | 0 | 8 | 28 | 46 | 21 | 25 | null | null |
Heather/stack
|
src/Network/HTTP/Download/Verified.hs
|
bsd-3-clause
|
-- | Make sure that the hash digest for a finite stream of bytes
-- is as expected.
--
-- Throws WrongDigest (VerifiedDownloadException)
sinkCheckHash :: MonadThrow m
=> Request
-> HashCheck
-> Consumer ByteString m ()
sinkCheckHash req HashCheck{..} = do
digest <- sinkHashUsing hashCheckAlgorithm
let actualDigestString = show digest
let actualDigestHexByteString = digestToHexByteString digest
let passedCheck = case hashCheckHexDigest of
CheckHexDigestString s -> s == actualDigestString
CheckHexDigestByteString b -> b == actualDigestHexByteString
CheckHexDigestHeader b -> B64.decodeLenient b == actualDigestHexByteString
-- A hack to allow hackage tarballs to download.
-- They should really base64-encode their md5 header as per rfc2616#sec14.15.
-- https://github.com/commercialhaskell/stack/issues/240
|| b == actualDigestHexByteString
unless passedCheck $
throwM $ WrongDigest req (show hashCheckAlgorithm) hashCheckHexDigest actualDigestString
| 1,075 |
sinkCheckHash :: MonadThrow m
=> Request
-> HashCheck
-> Consumer ByteString m ()
sinkCheckHash req HashCheck{..} = do
digest <- sinkHashUsing hashCheckAlgorithm
let actualDigestString = show digest
let actualDigestHexByteString = digestToHexByteString digest
let passedCheck = case hashCheckHexDigest of
CheckHexDigestString s -> s == actualDigestString
CheckHexDigestByteString b -> b == actualDigestHexByteString
CheckHexDigestHeader b -> B64.decodeLenient b == actualDigestHexByteString
-- A hack to allow hackage tarballs to download.
-- They should really base64-encode their md5 header as per rfc2616#sec14.15.
-- https://github.com/commercialhaskell/stack/issues/240
|| b == actualDigestHexByteString
unless passedCheck $
throwM $ WrongDigest req (show hashCheckAlgorithm) hashCheckHexDigest actualDigestString
| 938 |
sinkCheckHash req HashCheck{..} = do
digest <- sinkHashUsing hashCheckAlgorithm
let actualDigestString = show digest
let actualDigestHexByteString = digestToHexByteString digest
let passedCheck = case hashCheckHexDigest of
CheckHexDigestString s -> s == actualDigestString
CheckHexDigestByteString b -> b == actualDigestHexByteString
CheckHexDigestHeader b -> B64.decodeLenient b == actualDigestHexByteString
-- A hack to allow hackage tarballs to download.
-- They should really base64-encode their md5 header as per rfc2616#sec14.15.
-- https://github.com/commercialhaskell/stack/issues/240
|| b == actualDigestHexByteString
unless passedCheck $
throwM $ WrongDigest req (show hashCheckAlgorithm) hashCheckHexDigest actualDigestString
| 844 | true | true | 18 | 6 | 236 | 161 | 90 | 71 | null | null |
supki/liblastfm
|
example/sort-friends.hs
|
mit
|
scores :: Connection -> [Text] -> IO [(Text, Score)]
scores conn xs = zip xs <$> forConcurrently xs (\x -> do
r <- query conn (Tasteometer.compare (user target) (user x))
return (fromMaybe "0" (preview score r)))
where
score = folded.key "comparison".key "result".key "score"._String
| 290 |
scores :: Connection -> [Text] -> IO [(Text, Score)]
scores conn xs = zip xs <$> forConcurrently xs (\x -> do
r <- query conn (Tasteometer.compare (user target) (user x))
return (fromMaybe "0" (preview score r)))
where
score = folded.key "comparison".key "result".key "score"._String
| 290 |
scores conn xs = zip xs <$> forConcurrently xs (\x -> do
r <- query conn (Tasteometer.compare (user target) (user x))
return (fromMaybe "0" (preview score r)))
where
score = folded.key "comparison".key "result".key "score"._String
| 237 | false | true | 0 | 16 | 49 | 138 | 68 | 70 | null | null |
jean-edouard/manager
|
rpc-proxy/Msg/Json.hs
|
gpl-2.0
|
jmsgToJson (JMsgRespErr m) = jerrToJson m
| 41 |
jmsgToJson (JMsgRespErr m) = jerrToJson m
| 41 |
jmsgToJson (JMsgRespErr m) = jerrToJson m
| 41 | false | false | 0 | 7 | 5 | 18 | 8 | 10 | null | null |
tolysz/prepare-ghcjs
|
spec-lts8/base/GHC/Natural.hs
|
bsd-3-clause
|
gcdNatural (NatS# x) (NatJ# y) = NatS# (gcdBigNatWord y x)
| 58 |
gcdNatural (NatS# x) (NatJ# y) = NatS# (gcdBigNatWord y x)
| 58 |
gcdNatural (NatS# x) (NatJ# y) = NatS# (gcdBigNatWord y x)
| 58 | false | false | 0 | 7 | 9 | 34 | 16 | 18 | null | null |
siddhanathan/ghc
|
testsuite/tests/rename/should_compile/timing001.hs
|
bsd-3-clause
|
a467 = a468
| 11 |
a467 = a468
| 11 |
a467 = a468
| 11 | false | false | 1 | 5 | 2 | 10 | 3 | 7 | null | null |
allanderek/ipclib
|
Smc/DataBase.hs
|
gpl-2.0
|
collateDataBase :: FilePath -> DataBase FilePath -> IO (DataBase PassageResults)
collateDataBase dir db =
do newGroups <- mapM collateEntrySet $ dbGroups db
return DataBase { dbStringModel = dbStringModel db
, dbGroups = newGroups
}
where
-- collating on entry set is simple, we just collate each entry in the set
collateEntrySet :: DataBaseGroup FilePath
-> IO ( DataBaseGroup PassageResults )
collateEntrySet g =
do entries <- mapM collateEntry $ dbgEntries g
return $ g { dbgEntries = entries }
-- Collating a single entry is pretty simple we just read in the
-- results file and replace the 'dbPassage' file of the entry
-- with the interpreted contents of the results file.
collateEntry :: DataBaseEntry FilePath -> IO (DataBaseEntry PassageResults)
collateEntry entry =
do passagePdf <- readResultsFile (combine dir $ dbPassagePdf entry)
passageCdf <- readResultsFile (combine dir $ dbPassageCdf entry)
return $ entry { dbPassagePdf = passagePdf
, dbPassageCdf = passageCdf
}
-- Read in a results file, essentially just read in the contents of the
-- the file and then interpret that contents as a set of passage-time
-- results.
readResultsFile :: FilePath -> IO PassageResults
readResultsFile =
liftM interpretContents . readFile
-- Interpreting the contents of a results file is simple
-- each mapping is on a line of its own and the time is separated
-- from the probability by a comma
interpretContents :: String -> PassageResults
interpretContents =
(map interpretLine) . lines
where
interpretLine :: String -> (Double, Double)
interpretLine inputline =
(read time, read prob)
where
(time, commaProb) = span (/= ',') inputline
prob = dropWhile (== ',') commaProb
{-
Returns for the given entry the probability at the given time.
Is unforgiving and will error should we attempt to lookup a time
which is not defined.
-}
| 2,084 |
collateDataBase :: FilePath -> DataBase FilePath -> IO (DataBase PassageResults)
collateDataBase dir db =
do newGroups <- mapM collateEntrySet $ dbGroups db
return DataBase { dbStringModel = dbStringModel db
, dbGroups = newGroups
}
where
-- collating on entry set is simple, we just collate each entry in the set
collateEntrySet :: DataBaseGroup FilePath
-> IO ( DataBaseGroup PassageResults )
collateEntrySet g =
do entries <- mapM collateEntry $ dbgEntries g
return $ g { dbgEntries = entries }
-- Collating a single entry is pretty simple we just read in the
-- results file and replace the 'dbPassage' file of the entry
-- with the interpreted contents of the results file.
collateEntry :: DataBaseEntry FilePath -> IO (DataBaseEntry PassageResults)
collateEntry entry =
do passagePdf <- readResultsFile (combine dir $ dbPassagePdf entry)
passageCdf <- readResultsFile (combine dir $ dbPassageCdf entry)
return $ entry { dbPassagePdf = passagePdf
, dbPassageCdf = passageCdf
}
-- Read in a results file, essentially just read in the contents of the
-- the file and then interpret that contents as a set of passage-time
-- results.
readResultsFile :: FilePath -> IO PassageResults
readResultsFile =
liftM interpretContents . readFile
-- Interpreting the contents of a results file is simple
-- each mapping is on a line of its own and the time is separated
-- from the probability by a comma
interpretContents :: String -> PassageResults
interpretContents =
(map interpretLine) . lines
where
interpretLine :: String -> (Double, Double)
interpretLine inputline =
(read time, read prob)
where
(time, commaProb) = span (/= ',') inputline
prob = dropWhile (== ',') commaProb
{-
Returns for the given entry the probability at the given time.
Is unforgiving and will error should we attempt to lookup a time
which is not defined.
-}
| 2,084 |
collateDataBase dir db =
do newGroups <- mapM collateEntrySet $ dbGroups db
return DataBase { dbStringModel = dbStringModel db
, dbGroups = newGroups
}
where
-- collating on entry set is simple, we just collate each entry in the set
collateEntrySet :: DataBaseGroup FilePath
-> IO ( DataBaseGroup PassageResults )
collateEntrySet g =
do entries <- mapM collateEntry $ dbgEntries g
return $ g { dbgEntries = entries }
-- Collating a single entry is pretty simple we just read in the
-- results file and replace the 'dbPassage' file of the entry
-- with the interpreted contents of the results file.
collateEntry :: DataBaseEntry FilePath -> IO (DataBaseEntry PassageResults)
collateEntry entry =
do passagePdf <- readResultsFile (combine dir $ dbPassagePdf entry)
passageCdf <- readResultsFile (combine dir $ dbPassageCdf entry)
return $ entry { dbPassagePdf = passagePdf
, dbPassageCdf = passageCdf
}
-- Read in a results file, essentially just read in the contents of the
-- the file and then interpret that contents as a set of passage-time
-- results.
readResultsFile :: FilePath -> IO PassageResults
readResultsFile =
liftM interpretContents . readFile
-- Interpreting the contents of a results file is simple
-- each mapping is on a line of its own and the time is separated
-- from the probability by a comma
interpretContents :: String -> PassageResults
interpretContents =
(map interpretLine) . lines
where
interpretLine :: String -> (Double, Double)
interpretLine inputline =
(read time, read prob)
where
(time, commaProb) = span (/= ',') inputline
prob = dropWhile (== ',') commaProb
{-
Returns for the given entry the probability at the given time.
Is unforgiving and will error should we attempt to lookup a time
which is not defined.
-}
| 2,003 | false | true | 0 | 11 | 546 | 374 | 190 | 184 | null | null |
e1528532/libelektra
|
src/plugins/regexdispatcher/Elektra/ValidationDispatcher.hs
|
bsd-3-clause
|
validationDispatch :: Dispatcher
validationDispatch ks = ksList ks >>= fmap catMaybes . mapM dispatch
where
dispatch k = keyGetMeta k "check/validation" >>= ifKey (return Nothing) (dispatch' k)
dispatch' k m = do
rgx <- keyString m
compiledRgx <- either (const Nothing) Just <$> compile rgx
fmap (k, "check/validation", ) <$> processAdditionalMetakeys compiledRgx
where
processAdditionalMetakeys Nothing = return Nothing
processAdditionalMetakeys (Just rgx) = do
complementedRgx <- keyGetMeta k "check/validation/invert" >>= ifKey (return rgx) (const $ complement rgx)
keyGetMeta k "check/validation/ignorecase" >>= whenKey (const $ const () <$> nocase complementedRgx)
_ <- minimize complementedRgx
either (const Nothing) Just <$> asRegexp complementedRgx
| 855 |
validationDispatch :: Dispatcher
validationDispatch ks = ksList ks >>= fmap catMaybes . mapM dispatch
where
dispatch k = keyGetMeta k "check/validation" >>= ifKey (return Nothing) (dispatch' k)
dispatch' k m = do
rgx <- keyString m
compiledRgx <- either (const Nothing) Just <$> compile rgx
fmap (k, "check/validation", ) <$> processAdditionalMetakeys compiledRgx
where
processAdditionalMetakeys Nothing = return Nothing
processAdditionalMetakeys (Just rgx) = do
complementedRgx <- keyGetMeta k "check/validation/invert" >>= ifKey (return rgx) (const $ complement rgx)
keyGetMeta k "check/validation/ignorecase" >>= whenKey (const $ const () <$> nocase complementedRgx)
_ <- minimize complementedRgx
either (const Nothing) Just <$> asRegexp complementedRgx
| 855 |
validationDispatch ks = ksList ks >>= fmap catMaybes . mapM dispatch
where
dispatch k = keyGetMeta k "check/validation" >>= ifKey (return Nothing) (dispatch' k)
dispatch' k m = do
rgx <- keyString m
compiledRgx <- either (const Nothing) Just <$> compile rgx
fmap (k, "check/validation", ) <$> processAdditionalMetakeys compiledRgx
where
processAdditionalMetakeys Nothing = return Nothing
processAdditionalMetakeys (Just rgx) = do
complementedRgx <- keyGetMeta k "check/validation/invert" >>= ifKey (return rgx) (const $ complement rgx)
keyGetMeta k "check/validation/ignorecase" >>= whenKey (const $ const () <$> nocase complementedRgx)
_ <- minimize complementedRgx
either (const Nothing) Just <$> asRegexp complementedRgx
| 822 | false | true | 0 | 15 | 197 | 262 | 119 | 143 | null | null |
GaloisInc/mistral
|
src/Mistral/CodeGen/Specialize.hs
|
bsd-3-clause
|
specGroup :: Group Decl -> Spec (Group Decl)
specGroup = traverse specDecl
| 75 |
specGroup :: Group Decl -> Spec (Group Decl)
specGroup = traverse specDecl
| 75 |
specGroup = traverse specDecl
| 30 | false | true | 0 | 8 | 12 | 30 | 14 | 16 | null | null |
Chatanga/codingame-hs
|
src/Codingame/SourcePackager.hs
|
mit
|
toExtension (Ident _ "ExplicitNamespaces") = EnableExtension ExplicitNamespaces
| 79 |
toExtension (Ident _ "ExplicitNamespaces") = EnableExtension ExplicitNamespaces
| 79 |
toExtension (Ident _ "ExplicitNamespaces") = EnableExtension ExplicitNamespaces
| 79 | false | false | 0 | 7 | 6 | 20 | 9 | 11 | null | null |
phischu/fragnix
|
builtins/base/Foreign.C.Error.hs
|
bsd-3-clause
|
eNODATA = Errno (61)
| 28 |
eNODATA = Errno (61)
| 28 |
eNODATA = Errno (61)
| 28 | false | false | 0 | 6 | 11 | 12 | 6 | 6 | null | null |
ian-ross/c2hs-macos-test
|
c2hs-0.26.1/src/C2HS/Gen/Bind.hs
|
mit
|
isIntegralHsType "CULong" = True
| 33 |
isIntegralHsType "CULong" = True
| 33 |
isIntegralHsType "CULong" = True
| 33 | false | false | 0 | 5 | 4 | 9 | 4 | 5 | null | null |
plow-technologies/shakespeare-dynamic
|
ghcjs-shakespeare-dynamic/example/TankGauge.hs
|
mit
|
testGauge :: RawTank
testGauge = TankGauge 200 100 BlueTank
| 59 |
testGauge :: RawTank
testGauge = TankGauge 200 100 BlueTank
| 59 |
testGauge = TankGauge 200 100 BlueTank
| 38 | false | true | 0 | 5 | 8 | 18 | 9 | 9 | null | null |
urbanslug/yesod
|
yesod-core/Yesod/Core/Handler.hs
|
mit
|
askHandlerEnv :: MonadHandler m => m (RunHandlerEnv (HandlerSite m))
askHandlerEnv = liftHandlerT $ HandlerT $ return . handlerEnv
| 130 |
askHandlerEnv :: MonadHandler m => m (RunHandlerEnv (HandlerSite m))
askHandlerEnv = liftHandlerT $ HandlerT $ return . handlerEnv
| 130 |
askHandlerEnv = liftHandlerT $ HandlerT $ return . handlerEnv
| 61 | false | true | 0 | 10 | 17 | 45 | 22 | 23 | null | null |
kolmodin/cabal
|
Cabal/Distribution/System.hs
|
bsd-3-clause
|
osAliases _ OSX = ["darwin"]
| 41 |
osAliases _ OSX = ["darwin"]
| 41 |
osAliases _ OSX = ["darwin"]
| 41 | false | false | 0 | 5 | 17 | 14 | 7 | 7 | null | null |
jcgruenhage/dendrite
|
vendor/src/github.com/apache/thrift/test/hs/TestServer.hs
|
apache-2.0
|
getTransport "framed" = Framed $ \s -> do
(h, _, _) <- (accept s)
openFramedTransport h
| 91 |
getTransport "framed" = Framed $ \s -> do
(h, _, _) <- (accept s)
openFramedTransport h
| 91 |
getTransport "framed" = Framed $ \s -> do
(h, _, _) <- (accept s)
openFramedTransport h
| 91 | false | false | 0 | 11 | 19 | 46 | 23 | 23 | null | null |
jstolarek/slicer
|
lib/Language/Slicer/Eval.hs
|
gpl-3.0
|
trace (EFst e) = do (v, t) <- trace' e
let (VPair v1 _) = getVal v
return (v >-< ORet v1, TFst t)
| 155 |
trace (EFst e) = do (v, t) <- trace' e
let (VPair v1 _) = getVal v
return (v >-< ORet v1, TFst t)
| 155 |
trace (EFst e) = do (v, t) <- trace' e
let (VPair v1 _) = getVal v
return (v >-< ORet v1, TFst t)
| 155 | false | false | 0 | 11 | 81 | 75 | 34 | 41 | null | null |
rjregenold/blog
|
css/blog.hs
|
mit
|
syntaxFontColor = "#CCCCCC"
| 27 |
syntaxFontColor = "#CCCCCC"
| 27 |
syntaxFontColor = "#CCCCCC"
| 27 | false | false | 1 | 5 | 2 | 10 | 3 | 7 | null | null |
nevrenato/Hets_Fork
|
Static/XGraph.hs
|
gpl-2.0
|
readCons :: Element -> Conservativity
readCons el = case findChild (unqual "ConsStatus") el of
Nothing -> None
Just c' -> fromMaybe None $ readMaybe $ strContent c'
| 168 |
readCons :: Element -> Conservativity
readCons el = case findChild (unqual "ConsStatus") el of
Nothing -> None
Just c' -> fromMaybe None $ readMaybe $ strContent c'
| 168 |
readCons el = case findChild (unqual "ConsStatus") el of
Nothing -> None
Just c' -> fromMaybe None $ readMaybe $ strContent c'
| 130 | false | true | 0 | 10 | 30 | 67 | 30 | 37 | null | null |
danplubell/CTG1371
|
library/Data/CTG1371/Internal/Parser/Parsers.hs
|
mit
|
translateHRMode :: (Num a, Eq a) => a -> HRMode
translateHRMode hrdata = case hrdata of
0 -> NoHRTransducer
1 -> Inop
2 -> US
4 -> DECG
12 -> Reserved2
14 -> UnknownHRMode
_ -> NullHRMode
-- | translate a numeric maternal heart rate into the symbolic heart rate mode
| 460 |
translateHRMode :: (Num a, Eq a) => a -> HRMode
translateHRMode hrdata = case hrdata of
0 -> NoHRTransducer
1 -> Inop
2 -> US
4 -> DECG
12 -> Reserved2
14 -> UnknownHRMode
_ -> NullHRMode
-- | translate a numeric maternal heart rate into the symbolic heart rate mode
| 460 |
translateHRMode hrdata = case hrdata of
0 -> NoHRTransducer
1 -> Inop
2 -> US
4 -> DECG
12 -> Reserved2
14 -> UnknownHRMode
_ -> NullHRMode
-- | translate a numeric maternal heart rate into the symbolic heart rate mode
| 412 | false | true | 0 | 8 | 244 | 86 | 42 | 44 | null | null |
AaronFriel/eff-experiments
|
src/Data/Iota/Eff1.hs
|
bsd-3-clause
|
t1rr = 11 == run t1r
| 20 |
t1rr = 11 == run t1r
| 20 |
t1rr = 11 == run t1r
| 20 | false | false | 2 | 5 | 5 | 17 | 6 | 11 | null | null |
urbanslug/yesod
|
yesod-core/Yesod/Core/Handler.hs
|
mit
|
-- | Helper function for setCookieExpires value
getExpires :: MonadIO m
=> Int -- ^ minutes
-> m UTCTime
getExpires m = do
now <- liftIO getCurrentTime
return $ fromIntegral (m * 60) `addUTCTime` now
-- | Unset the cookie on the client.
--
-- Note: although the value used for key and path is 'Text', you should only
-- use ASCII values to be HTTP compliant.
| 390 |
getExpires :: MonadIO m
=> Int -- ^ minutes
-> m UTCTime
getExpires m = do
now <- liftIO getCurrentTime
return $ fromIntegral (m * 60) `addUTCTime` now
-- | Unset the cookie on the client.
--
-- Note: although the value used for key and path is 'Text', you should only
-- use ASCII values to be HTTP compliant.
| 342 |
getExpires m = do
now <- liftIO getCurrentTime
return $ fromIntegral (m * 60) `addUTCTime` now
-- | Unset the cookie on the client.
--
-- Note: although the value used for key and path is 'Text', you should only
-- use ASCII values to be HTTP compliant.
| 263 | true | true | 0 | 11 | 98 | 67 | 35 | 32 | null | null |
shlevy/ghc
|
libraries/base/GHC/IO/Handle.hs
|
bsd-3-clause
|
hClose_rethrow :: SomeException -> Handle -> IO ()
hClose_rethrow e h =
case fromException e of
Just ioe -> ioError (augmentIOError ioe "hClose" h)
Nothing -> throwIO e
| 179 |
hClose_rethrow :: SomeException -> Handle -> IO ()
hClose_rethrow e h =
case fromException e of
Just ioe -> ioError (augmentIOError ioe "hClose" h)
Nothing -> throwIO e
| 179 |
hClose_rethrow e h =
case fromException e of
Just ioe -> ioError (augmentIOError ioe "hClose" h)
Nothing -> throwIO e
| 128 | false | true | 0 | 10 | 38 | 71 | 32 | 39 | null | null |
sdiehl/ghc
|
libraries/libiserv/src/Remote/Slave.hs
|
bsd-3-clause
|
startSlave' :: Bool -> String -> PortNumber -> IO ()
startSlave' verbose base_path port = do
hSetBuffering stdin LineBuffering
hSetBuffering stdout LineBuffering
sock <- openSocket port
forever $ do
when verbose $ trace "Opening socket"
pipe <- acceptSocket sock >>= socketToPipe
putStrLn $ "Listening on port " ++ show port
when verbose $ trace "Starting serv"
uninterruptibleMask $ serv verbose (hook verbose base_path pipe) pipe
when verbose $ trace "serv ended"
return ()
-- | The iserv library may need access to files, specifically
-- archives and object files to be linked. If ghc and the slave
-- are on the same host, this is trivial, as the underlying
-- filestorage is the same. If however the slave does not run
-- on the same host, the filestorage is not identical and we
-- need to request data from the host where ghc runs on.
--
-- If we however already have the requested file we need to make
-- sure that this file is the same one ghc sees. Hence we
-- calculate the Fingerprint of the file and send it back to the
-- host for comparison. The proxy will then send back either @Nothing@
-- indicating that the file on the host has the same Fingerprint, or
-- Maybe ByteString containing the payload to replace the existing
-- file with.
| 1,293 |
startSlave' :: Bool -> String -> PortNumber -> IO ()
startSlave' verbose base_path port = do
hSetBuffering stdin LineBuffering
hSetBuffering stdout LineBuffering
sock <- openSocket port
forever $ do
when verbose $ trace "Opening socket"
pipe <- acceptSocket sock >>= socketToPipe
putStrLn $ "Listening on port " ++ show port
when verbose $ trace "Starting serv"
uninterruptibleMask $ serv verbose (hook verbose base_path pipe) pipe
when verbose $ trace "serv ended"
return ()
-- | The iserv library may need access to files, specifically
-- archives and object files to be linked. If ghc and the slave
-- are on the same host, this is trivial, as the underlying
-- filestorage is the same. If however the slave does not run
-- on the same host, the filestorage is not identical and we
-- need to request data from the host where ghc runs on.
--
-- If we however already have the requested file we need to make
-- sure that this file is the same one ghc sees. Hence we
-- calculate the Fingerprint of the file and send it back to the
-- host for comparison. The proxy will then send back either @Nothing@
-- indicating that the file on the host has the same Fingerprint, or
-- Maybe ByteString containing the payload to replace the existing
-- file with.
| 1,293 |
startSlave' verbose base_path port = do
hSetBuffering stdin LineBuffering
hSetBuffering stdout LineBuffering
sock <- openSocket port
forever $ do
when verbose $ trace "Opening socket"
pipe <- acceptSocket sock >>= socketToPipe
putStrLn $ "Listening on port " ++ show port
when verbose $ trace "Starting serv"
uninterruptibleMask $ serv verbose (hook verbose base_path pipe) pipe
when verbose $ trace "serv ended"
return ()
-- | The iserv library may need access to files, specifically
-- archives and object files to be linked. If ghc and the slave
-- are on the same host, this is trivial, as the underlying
-- filestorage is the same. If however the slave does not run
-- on the same host, the filestorage is not identical and we
-- need to request data from the host where ghc runs on.
--
-- If we however already have the requested file we need to make
-- sure that this file is the same one ghc sees. Hence we
-- calculate the Fingerprint of the file and send it back to the
-- host for comparison. The proxy will then send back either @Nothing@
-- indicating that the file on the host has the same Fingerprint, or
-- Maybe ByteString containing the payload to replace the existing
-- file with.
| 1,240 | false | true | 0 | 13 | 262 | 181 | 85 | 96 | null | null |
conal/hermit
|
src/HERMIT/Kure.hs
|
bsd-2-clause
|
-- | Rewrite any children of a type of the form: @ForAllTy@ 'TyBinder' 'Type'
forAllTyAnyR :: (ExtendPath c Crumb, ReadPath c Crumb, AddBindings c, MonadCatch m)
=> Rewrite c m TyBinder -> Rewrite c m Type -> Rewrite c m Type
forAllTyAnyR r1 r2 = unwrapAnyR $ forAllTyAllR (wrapAnyR r1) (wrapAnyR r2)
| 314 |
forAllTyAnyR :: (ExtendPath c Crumb, ReadPath c Crumb, AddBindings c, MonadCatch m)
=> Rewrite c m TyBinder -> Rewrite c m Type -> Rewrite c m Type
forAllTyAnyR r1 r2 = unwrapAnyR $ forAllTyAllR (wrapAnyR r1) (wrapAnyR r2)
| 236 |
forAllTyAnyR r1 r2 = unwrapAnyR $ forAllTyAllR (wrapAnyR r1) (wrapAnyR r2)
| 74 | true | true | 0 | 8 | 65 | 98 | 48 | 50 | null | null |
Motions/motions
|
src/Bio/Motions/PDB/Write.hs
|
apache-2.0
|
rightInt :: Int -> Int -> Builder
rightInt just x = padding n <> intDec x
where
n = just - intLen x
intLen y = intLen' y 1
intLen' y acc | y < 0 = intLen' (-y) (acc + 1)
intLen' y acc | y <= 9 = acc
intLen' y acc = intLen' (y `div` 10) (acc + 1)
| 268 |
rightInt :: Int -> Int -> Builder
rightInt just x = padding n <> intDec x
where
n = just - intLen x
intLen y = intLen' y 1
intLen' y acc | y < 0 = intLen' (-y) (acc + 1)
intLen' y acc | y <= 9 = acc
intLen' y acc = intLen' (y `div` 10) (acc + 1)
| 268 |
rightInt just x = padding n <> intDec x
where
n = just - intLen x
intLen y = intLen' y 1
intLen' y acc | y < 0 = intLen' (-y) (acc + 1)
intLen' y acc | y <= 9 = acc
intLen' y acc = intLen' (y `div` 10) (acc + 1)
| 234 | false | true | 6 | 9 | 84 | 145 | 73 | 72 | null | null |
roberth/uu-helium
|
test/thompson/Thompson38.hs
|
gpl-3.0
|
main = reverse []
| 17 |
main = reverse []
| 17 |
main = reverse []
| 17 | false | false | 1 | 6 | 3 | 15 | 5 | 10 | null | null |
osa1/vindinium
|
src/Main.hs
|
mit
|
cmd :: Parser Cmd
cmd = subparser
( command "training" (info trainingCmd
( progDesc "Run bot in training mode" ))
<> command "arena" (info arenaCmd
(progDesc "Run bot in arena mode" ))
)
| 213 |
cmd :: Parser Cmd
cmd = subparser
( command "training" (info trainingCmd
( progDesc "Run bot in training mode" ))
<> command "arena" (info arenaCmd
(progDesc "Run bot in arena mode" ))
)
| 213 |
cmd = subparser
( command "training" (info trainingCmd
( progDesc "Run bot in training mode" ))
<> command "arena" (info arenaCmd
(progDesc "Run bot in arena mode" ))
)
| 195 | false | true | 1 | 12 | 59 | 66 | 30 | 36 | null | null |
ckaestne/CIDE
|
other/CaseStudies/fgl/CIDEfgl/Data/Graph/Inductive/Internal/Queue.hs
|
gpl-3.0
|
mkQueue :: Queue a;
mkQueue = MkQueue [] []
| 45 |
mkQueue :: Queue a
mkQueue = MkQueue [] []
| 42 |
mkQueue = MkQueue [] []
| 23 | false | true | 1 | 6 | 10 | 27 | 12 | 15 | null | null |
bergmark/haskell-opaleye
|
src/Opaleye/Internal/HaskellDB/Sql/Default.hs
|
bsd-3-clause
|
showAggrOp AggrVar = "Var"
| 37 |
showAggrOp AggrVar = "Var"
| 37 |
showAggrOp AggrVar = "Var"
| 37 | false | false | 0 | 5 | 14 | 9 | 4 | 5 | null | null |
yliu120/K3
|
src/Language/K3/Utils/Pretty/Syntax.hs
|
apache-2.0
|
typeS :: K3 Type -> Either String String
typeS t = show C.<$> runSyntaxPrinter (typ t)
| 86 |
typeS :: K3 Type -> Either String String
typeS t = show C.<$> runSyntaxPrinter (typ t)
| 86 |
typeS t = show C.<$> runSyntaxPrinter (typ t)
| 45 | false | true | 0 | 8 | 15 | 41 | 19 | 22 | null | null |
a143753/AOJ
|
0523.hs
|
apache-2.0
|
ans (0:_) = []
| 14 |
ans (0:_) = []
| 14 |
ans (0:_) = []
| 14 | false | false | 0 | 6 | 3 | 19 | 9 | 10 | null | null |
olsner/ghc
|
compiler/codeGen/StgCmmClosure.hs
|
bsd-3-clause
|
lfClosureType (LFCon con) = Constr (dataConTagZ con)
(dataConIdentity con)
| 145 |
lfClosureType (LFCon con) = Constr (dataConTagZ con)
(dataConIdentity con)
| 145 |
lfClosureType (LFCon con) = Constr (dataConTagZ con)
(dataConIdentity con)
| 145 | false | false | 0 | 7 | 79 | 33 | 15 | 18 | null | null |
phischu/fragnix
|
builtins/base/Data.Foldable.hs
|
bsd-3-clause
|
foldlM :: (Foldable t, Monad m) => (b -> a -> m b) -> b -> t a -> m b
foldlM f z0 xs = foldr f' return xs z0
where f' x k z = f z x >>= k
-- | Map each element of a structure to an action, evaluate these
-- actions from left to right, and ignore the results. For a version
-- that doesn't ignore the results see 'Data.Traversable.traverse'.
| 343 |
foldlM :: (Foldable t, Monad m) => (b -> a -> m b) -> b -> t a -> m b
foldlM f z0 xs = foldr f' return xs z0
where f' x k z = f z x >>= k
-- | Map each element of a structure to an action, evaluate these
-- actions from left to right, and ignore the results. For a version
-- that doesn't ignore the results see 'Data.Traversable.traverse'.
| 343 |
foldlM f z0 xs = foldr f' return xs z0
where f' x k z = f z x >>= k
-- | Map each element of a structure to an action, evaluate these
-- actions from left to right, and ignore the results. For a version
-- that doesn't ignore the results see 'Data.Traversable.traverse'.
| 273 | false | true | 0 | 10 | 78 | 100 | 50 | 50 | null | null |
OS2World/DEV-UTIL-HUGS
|
demos/Expr.hs
|
bsd-3-clause
|
number :: Parser Int
number = many1 digit >>> foldl (\a x -> 10*a+x) 0
| 83 |
number :: Parser Int
number = many1 digit >>> foldl (\a x -> 10*a+x) 0
| 83 |
number = many1 digit >>> foldl (\a x -> 10*a+x) 0
| 56 | false | true | 0 | 10 | 27 | 44 | 22 | 22 | null | null |
sdiehl/ghc
|
compiler/basicTypes/NameSet.hs
|
bsd-3-clause
|
delFVs ns s = delListFromNameSet s ns
| 37 |
delFVs ns s = delListFromNameSet s ns
| 37 |
delFVs ns s = delListFromNameSet s ns
| 37 | false | false | 0 | 5 | 6 | 16 | 7 | 9 | null | null |
mzero/plush
|
src/Plush/Server/Status.hs
|
apache-2.0
|
-- | A uniform, human readable prefix for a `ServerInfo`
serverInfoPrefix :: ServerInfo -> String
serverInfoPrefix si = serverTypePrefix (siType si) ++ extra (siType si) ++ proc
where
extra LocalServer = ""
extra (RemoteServer _) = ", ssh"
proc = "[" ++ show (siPid si) ++ "]"
-- | All server information is stored in files under @~/.plush/server@. This
-- function returns that path to some named file in that directory, making the
-- directories if needed. If @HOME@ cannot be determined, then all storage
-- functions do nothing.
| 547 |
serverInfoPrefix :: ServerInfo -> String
serverInfoPrefix si = serverTypePrefix (siType si) ++ extra (siType si) ++ proc
where
extra LocalServer = ""
extra (RemoteServer _) = ", ssh"
proc = "[" ++ show (siPid si) ++ "]"
-- | All server information is stored in files under @~/.plush/server@. This
-- function returns that path to some named file in that directory, making the
-- directories if needed. If @HOME@ cannot be determined, then all storage
-- functions do nothing.
| 490 |
serverInfoPrefix si = serverTypePrefix (siType si) ++ extra (siType si) ++ proc
where
extra LocalServer = ""
extra (RemoteServer _) = ", ssh"
proc = "[" ++ show (siPid si) ++ "]"
-- | All server information is stored in files under @~/.plush/server@. This
-- function returns that path to some named file in that directory, making the
-- directories if needed. If @HOME@ cannot be determined, then all storage
-- functions do nothing.
| 449 | true | true | 0 | 9 | 103 | 96 | 49 | 47 | null | null |
M42/interludio
|
Theory/Chords.hs
|
gpl-2.0
|
chordIntervals Maj7 = [4,3,4,1]
| 33 |
chordIntervals Maj7 = [4,3,4,1]
| 33 |
chordIntervals Maj7 = [4,3,4,1]
| 33 | false | false | 0 | 5 | 5 | 22 | 12 | 10 | null | null |
RobinKrom/interactive-brokers
|
library/API/IB/Parse.hs
|
bsd-3-clause
|
parseMaybe :: Parser a -> Parser (Maybe a)
parseMaybe p =
Just <$> p <|>
return Nothing <* parseStringField
| 112 |
parseMaybe :: Parser a -> Parser (Maybe a)
parseMaybe p =
Just <$> p <|>
return Nothing <* parseStringField
| 112 |
parseMaybe p =
Just <$> p <|>
return Nothing <* parseStringField
| 69 | false | true | 0 | 8 | 23 | 45 | 21 | 24 | null | null |
kazu-yamamoto/rpf
|
RPF/Parser.hs
|
bsd-3-clause
|
yesOrNo :: Parser Constant
yesOrNo = do
b <- yesno
return (DT_Sig, CV_Sig b)
where
yesno :: Parser Bool
yesno = sym2enum noyes [minBound..maxBound]
----------------------------------------------------------------
| 231 |
yesOrNo :: Parser Constant
yesOrNo = do
b <- yesno
return (DT_Sig, CV_Sig b)
where
yesno :: Parser Bool
yesno = sym2enum noyes [minBound..maxBound]
----------------------------------------------------------------
| 231 |
yesOrNo = do
b <- yesno
return (DT_Sig, CV_Sig b)
where
yesno :: Parser Bool
yesno = sym2enum noyes [minBound..maxBound]
----------------------------------------------------------------
| 204 | false | true | 0 | 9 | 43 | 74 | 33 | 41 | null | null |
gnn/Hets
|
TPTP/Pretty.hs
|
gpl-2.0
|
printTHF_conn_term :: THF_conn_term -> Doc
printTHF_conn_term x = case x of
THFC_pair a -> pretty a
THFC_assoc a -> pretty a
THFC_unary a -> pretty a
-- <thf_conditional> ::= $ite(<thf_logic_formula>,<thf_logic_formula>,
-- <thf_logic_formula>)
| 284 |
printTHF_conn_term :: THF_conn_term -> Doc
printTHF_conn_term x = case x of
THFC_pair a -> pretty a
THFC_assoc a -> pretty a
THFC_unary a -> pretty a
-- <thf_conditional> ::= $ite(<thf_logic_formula>,<thf_logic_formula>,
-- <thf_logic_formula>)
| 284 |
printTHF_conn_term x = case x of
THFC_pair a -> pretty a
THFC_assoc a -> pretty a
THFC_unary a -> pretty a
-- <thf_conditional> ::= $ite(<thf_logic_formula>,<thf_logic_formula>,
-- <thf_logic_formula>)
| 241 | false | true | 4 | 5 | 71 | 55 | 28 | 27 | null | null |
brendanhay/gogol
|
gogol-dfareporting/gen/Network/Google/Resource/DFAReporting/Reports/Delete.hs
|
mpl-2.0
|
-- | Creates a value of 'ReportsDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'rdXgafv'
--
-- * 'rdUploadProtocol'
--
-- * 'rdAccessToken'
--
-- * 'rdReportId'
--
-- * 'rdUploadType'
--
-- * 'rdProFileId'
--
-- * 'rdCallback'
reportsDelete
:: Int64 -- ^ 'rdReportId'
-> Int64 -- ^ 'rdProFileId'
-> ReportsDelete
reportsDelete pRdReportId_ pRdProFileId_ =
ReportsDelete'
{ _rdXgafv = Nothing
, _rdUploadProtocol = Nothing
, _rdAccessToken = Nothing
, _rdReportId = _Coerce # pRdReportId_
, _rdUploadType = Nothing
, _rdProFileId = _Coerce # pRdProFileId_
, _rdCallback = Nothing
}
| 720 |
reportsDelete
:: Int64 -- ^ 'rdReportId'
-> Int64 -- ^ 'rdProFileId'
-> ReportsDelete
reportsDelete pRdReportId_ pRdProFileId_ =
ReportsDelete'
{ _rdXgafv = Nothing
, _rdUploadProtocol = Nothing
, _rdAccessToken = Nothing
, _rdReportId = _Coerce # pRdReportId_
, _rdUploadType = Nothing
, _rdProFileId = _Coerce # pRdProFileId_
, _rdCallback = Nothing
}
| 399 |
reportsDelete pRdReportId_ pRdProFileId_ =
ReportsDelete'
{ _rdXgafv = Nothing
, _rdUploadProtocol = Nothing
, _rdAccessToken = Nothing
, _rdReportId = _Coerce # pRdReportId_
, _rdUploadType = Nothing
, _rdProFileId = _Coerce # pRdProFileId_
, _rdCallback = Nothing
}
| 301 | true | true | 0 | 7 | 154 | 95 | 64 | 31 | null | null |
AlexanderPankiv/ghc
|
compiler/llvmGen/LlvmCodeGen/CodeGen.hs
|
bsd-3-clause
|
genCall (PrimTarget (MO_AtomicRMW width amop)) [dst] [addr, n] = runStmtsDecls $ do
addrVar <- exprToVarW addr
nVar <- exprToVarW n
let targetTy = widthToLlvmInt width
ptrExpr = Cast LM_Inttoptr addrVar (pLift targetTy)
ptrVar <- doExprW (pLift targetTy) ptrExpr
dstVar <- getCmmRegW (CmmLocal dst)
let op = case amop of
AMO_Add -> LAO_Add
AMO_Sub -> LAO_Sub
AMO_And -> LAO_And
AMO_Nand -> LAO_Nand
AMO_Or -> LAO_Or
AMO_Xor -> LAO_Xor
retVar <- doExprW targetTy $ AtomicRMW op ptrVar nVar SyncSeqCst
statement $ Store retVar dstVar
| 667 |
genCall (PrimTarget (MO_AtomicRMW width amop)) [dst] [addr, n] = runStmtsDecls $ do
addrVar <- exprToVarW addr
nVar <- exprToVarW n
let targetTy = widthToLlvmInt width
ptrExpr = Cast LM_Inttoptr addrVar (pLift targetTy)
ptrVar <- doExprW (pLift targetTy) ptrExpr
dstVar <- getCmmRegW (CmmLocal dst)
let op = case amop of
AMO_Add -> LAO_Add
AMO_Sub -> LAO_Sub
AMO_And -> LAO_And
AMO_Nand -> LAO_Nand
AMO_Or -> LAO_Or
AMO_Xor -> LAO_Xor
retVar <- doExprW targetTy $ AtomicRMW op ptrVar nVar SyncSeqCst
statement $ Store retVar dstVar
| 667 |
genCall (PrimTarget (MO_AtomicRMW width amop)) [dst] [addr, n] = runStmtsDecls $ do
addrVar <- exprToVarW addr
nVar <- exprToVarW n
let targetTy = widthToLlvmInt width
ptrExpr = Cast LM_Inttoptr addrVar (pLift targetTy)
ptrVar <- doExprW (pLift targetTy) ptrExpr
dstVar <- getCmmRegW (CmmLocal dst)
let op = case amop of
AMO_Add -> LAO_Add
AMO_Sub -> LAO_Sub
AMO_And -> LAO_And
AMO_Nand -> LAO_Nand
AMO_Or -> LAO_Or
AMO_Xor -> LAO_Xor
retVar <- doExprW targetTy $ AtomicRMW op ptrVar nVar SyncSeqCst
statement $ Store retVar dstVar
| 667 | false | false | 1 | 14 | 217 | 212 | 97 | 115 | null | null |
Mathnerd314/atomo
|
src/Atomo/Parser/Expr.hs
|
bsd-3-clause
|
pdChain :: Parser Expr
pdChain = do
pos <- getPosition
chain <- wsManyStart
(liftM DNormal (try pLiteral <|> pThis <|> parens pExpr) <|> chained)
chained
return $ dispatches pos chain
<?> "single dispatch"
where
chained = liftM DParticle $ choice
[ cKeyword False
, cSingle False
]
-- start off by dispatching on either a primitive or Top
dispatches :: SourcePos -> [Dispatch] -> Expr
dispatches p (DNormal e:ps) =
dispatches' p ps e
dispatches p (DParticle (PMSingle n):ps) =
dispatches' p ps (Dispatch (Just p) $ single n (ETop (Just p)))
dispatches p (DParticle (PMKeyword ns (Nothing:es)):ps) =
dispatches' p ps (Dispatch (Just p) $ T.keyword ns (ETop (Just p):map fromJust es))
dispatches _ ds = error $ "impossible: dispatches on " ++ show ds
-- roll a list of partial messages into a bunch of dispatches
dispatches' :: SourcePos -> [Dispatch] -> Expr -> Expr
dispatches' _ [] acc = acc
dispatches' p (DParticle (PMKeyword ns (Nothing:es)):ps) acc =
dispatches' p ps (Dispatch (Just p) $ T.keyword ns (acc : map fromJust es))
dispatches' p (DParticle (PMSingle n):ps) acc =
dispatches' p ps (Dispatch (Just p) $ single n acc)
dispatches' _ x y = error $ "impossible: dispatches' on " ++ show (x, y)
-- | A comma-separated list of zero or more expressions, surrounded by square
-- brackets.
--
-- Examples: @[]@, @[1, $a]@
| 1,482 |
pdChain :: Parser Expr
pdChain = do
pos <- getPosition
chain <- wsManyStart
(liftM DNormal (try pLiteral <|> pThis <|> parens pExpr) <|> chained)
chained
return $ dispatches pos chain
<?> "single dispatch"
where
chained = liftM DParticle $ choice
[ cKeyword False
, cSingle False
]
-- start off by dispatching on either a primitive or Top
dispatches :: SourcePos -> [Dispatch] -> Expr
dispatches p (DNormal e:ps) =
dispatches' p ps e
dispatches p (DParticle (PMSingle n):ps) =
dispatches' p ps (Dispatch (Just p) $ single n (ETop (Just p)))
dispatches p (DParticle (PMKeyword ns (Nothing:es)):ps) =
dispatches' p ps (Dispatch (Just p) $ T.keyword ns (ETop (Just p):map fromJust es))
dispatches _ ds = error $ "impossible: dispatches on " ++ show ds
-- roll a list of partial messages into a bunch of dispatches
dispatches' :: SourcePos -> [Dispatch] -> Expr -> Expr
dispatches' _ [] acc = acc
dispatches' p (DParticle (PMKeyword ns (Nothing:es)):ps) acc =
dispatches' p ps (Dispatch (Just p) $ T.keyword ns (acc : map fromJust es))
dispatches' p (DParticle (PMSingle n):ps) acc =
dispatches' p ps (Dispatch (Just p) $ single n acc)
dispatches' _ x y = error $ "impossible: dispatches' on " ++ show (x, y)
-- | A comma-separated list of zero or more expressions, surrounded by square
-- brackets.
--
-- Examples: @[]@, @[1, $a]@
| 1,482 |
pdChain = do
pos <- getPosition
chain <- wsManyStart
(liftM DNormal (try pLiteral <|> pThis <|> parens pExpr) <|> chained)
chained
return $ dispatches pos chain
<?> "single dispatch"
where
chained = liftM DParticle $ choice
[ cKeyword False
, cSingle False
]
-- start off by dispatching on either a primitive or Top
dispatches :: SourcePos -> [Dispatch] -> Expr
dispatches p (DNormal e:ps) =
dispatches' p ps e
dispatches p (DParticle (PMSingle n):ps) =
dispatches' p ps (Dispatch (Just p) $ single n (ETop (Just p)))
dispatches p (DParticle (PMKeyword ns (Nothing:es)):ps) =
dispatches' p ps (Dispatch (Just p) $ T.keyword ns (ETop (Just p):map fromJust es))
dispatches _ ds = error $ "impossible: dispatches on " ++ show ds
-- roll a list of partial messages into a bunch of dispatches
dispatches' :: SourcePos -> [Dispatch] -> Expr -> Expr
dispatches' _ [] acc = acc
dispatches' p (DParticle (PMKeyword ns (Nothing:es)):ps) acc =
dispatches' p ps (Dispatch (Just p) $ T.keyword ns (acc : map fromJust es))
dispatches' p (DParticle (PMSingle n):ps) acc =
dispatches' p ps (Dispatch (Just p) $ single n acc)
dispatches' _ x y = error $ "impossible: dispatches' on " ++ show (x, y)
-- | A comma-separated list of zero or more expressions, surrounded by square
-- brackets.
--
-- Examples: @[]@, @[1, $a]@
| 1,459 | false | true | 23 | 16 | 382 | 520 | 261 | 259 | null | null |
JRigotti/srtparser
|
Parser.hs
|
mit
|
operation :: String -> SyncOp
operation ('+':xs) = (Forward, read xs)
| 69 |
operation :: String -> SyncOp
operation ('+':xs) = (Forward, read xs)
| 69 |
operation ('+':xs) = (Forward, read xs)
| 39 | false | true | 0 | 7 | 10 | 34 | 18 | 16 | null | null |
adrienhaxaire/funfem
|
Geometry.hs
|
bsd-3-clause
|
-- | The `z` coordinate of the given `Node`. An exception is thrown in the case of a dimension mismatch.
zn :: Node -> Double
zn (Node _ p) = zp p
| 146 |
zn :: Node -> Double
zn (Node _ p) = zp p
| 41 |
zn (Node _ p) = zp p
| 20 | true | true | 0 | 9 | 31 | 36 | 16 | 20 | null | null |
tylerjl/adventofcode
|
src/Y2015/D15.hs
|
mit
|
recipeSum :: [Ingredient] -> [Int] -> [Int]
recipeSum i p = map (max 0) $ foldl' (zipWith (+)) [0,0,0,0,0] portions
where portions = zipWith toScores i p
| 157 |
recipeSum :: [Ingredient] -> [Int] -> [Int]
recipeSum i p = map (max 0) $ foldl' (zipWith (+)) [0,0,0,0,0] portions
where portions = zipWith toScores i p
| 157 |
recipeSum i p = map (max 0) $ foldl' (zipWith (+)) [0,0,0,0,0] portions
where portions = zipWith toScores i p
| 113 | false | true | 0 | 8 | 30 | 91 | 49 | 42 | null | null |
sergv/vector
|
Data/Vector/Fusion/Bundle/Monadic.hs
|
bsd-3-clause
|
postscanl' f = postscanlM' (\a b -> return (f a b))
| 51 |
postscanl' f = postscanlM' (\a b -> return (f a b))
| 51 |
postscanl' f = postscanlM' (\a b -> return (f a b))
| 51 | false | false | 1 | 10 | 10 | 37 | 16 | 21 | null | null |
rahulmutt/ghcvm
|
libraries/base/GHC/Natural.hs
|
bsd-3-clause
|
minusNatural (NatJ# x) (NatS# y)
= bigNatToNatural $ minusBigNatWord x y
| 76 |
minusNatural (NatJ# x) (NatS# y)
= bigNatToNatural $ minusBigNatWord x y
| 76 |
minusNatural (NatJ# x) (NatS# y)
= bigNatToNatural $ minusBigNatWord x y
| 76 | false | false | 2 | 6 | 14 | 35 | 15 | 20 | null | null |
google/codeworld
|
funblocks-client/src/Blocks/Types.hs
|
apache-2.0
|
conEndWith =
standardFunction
"conEndWith"
"endsWith"
Nothing
[typeText, typeText, typeBool]
["TEXTMAIN", "TEXTTEST"]
colorBool
"Test whether the text ends with the characters of the other text"
| 224 |
conEndWith =
standardFunction
"conEndWith"
"endsWith"
Nothing
[typeText, typeText, typeBool]
["TEXTMAIN", "TEXTTEST"]
colorBool
"Test whether the text ends with the characters of the other text"
| 224 |
conEndWith =
standardFunction
"conEndWith"
"endsWith"
Nothing
[typeText, typeText, typeBool]
["TEXTMAIN", "TEXTTEST"]
colorBool
"Test whether the text ends with the characters of the other text"
| 224 | false | false | 0 | 5 | 53 | 42 | 20 | 22 | null | null |
shtukas/Gaia
|
src/Gaia/ScanningAndRecordingManager.hs
|
mit
|
generalScan :: IO ()
generalScan = do
scanroots <- FSRM.getFSScanRoots
_ <- sequence $ fmap (\scanroot -> do
s1 <- computeMerkleRootForLocationRecursivelyComputedaAndStored scanroot
case s1 of
Nothing -> return ()
Just s2 -> do
putStrLn $ "location: " ++ scanroot
putStrLn $ "merkle : " ++ s2
commitMerkleRootForFSScanRoot scanroot s2
) scanroots
return ()
| 588 |
generalScan :: IO ()
generalScan = do
scanroots <- FSRM.getFSScanRoots
_ <- sequence $ fmap (\scanroot -> do
s1 <- computeMerkleRootForLocationRecursivelyComputedaAndStored scanroot
case s1 of
Nothing -> return ()
Just s2 -> do
putStrLn $ "location: " ++ scanroot
putStrLn $ "merkle : " ++ s2
commitMerkleRootForFSScanRoot scanroot s2
) scanroots
return ()
| 588 |
generalScan = do
scanroots <- FSRM.getFSScanRoots
_ <- sequence $ fmap (\scanroot -> do
s1 <- computeMerkleRootForLocationRecursivelyComputedaAndStored scanroot
case s1 of
Nothing -> return ()
Just s2 -> do
putStrLn $ "location: " ++ scanroot
putStrLn $ "merkle : " ++ s2
commitMerkleRootForFSScanRoot scanroot s2
) scanroots
return ()
| 567 | false | true | 0 | 21 | 284 | 125 | 56 | 69 | null | null |
kawu/tag-vanilla
|
src/NLP/TAG/Vanilla/Tree.hs
|
bsd-2-clause
|
showTree :: (a -> String) -> (b -> String) -> Tree a b -> String
showTree f g = unlines . go
where
go t = case t of
INode{..} -> ("INode " ++ f labelI)
: map (" " ++) (concatMap go subTrees)
FNode{..} -> ["FNode " ++ g labelF]
-- | Like `showTree`, but using the default `Show` instances
-- to present label values.
| 356 |
showTree :: (a -> String) -> (b -> String) -> Tree a b -> String
showTree f g = unlines . go
where
go t = case t of
INode{..} -> ("INode " ++ f labelI)
: map (" " ++) (concatMap go subTrees)
FNode{..} -> ["FNode " ++ g labelF]
-- | Like `showTree`, but using the default `Show` instances
-- to present label values.
| 356 |
showTree f g = unlines . go
where
go t = case t of
INode{..} -> ("INode " ++ f labelI)
: map (" " ++) (concatMap go subTrees)
FNode{..} -> ["FNode " ++ g labelF]
-- | Like `showTree`, but using the default `Show` instances
-- to present label values.
| 291 | false | true | 0 | 11 | 106 | 132 | 68 | 64 | null | null |
apyrgio/snf-ganeti
|
test/hs/Test/Ganeti/Types.hs
|
bsd-2-clause
|
case_NonEmpty_fail :: Assertion
case_NonEmpty_fail =
assertEqual "building non-empty list from an empty list"
(Bad "Received empty value for non-empty list") (mkNonEmpty ([]::[Int]))
| 188 |
case_NonEmpty_fail :: Assertion
case_NonEmpty_fail =
assertEqual "building non-empty list from an empty list"
(Bad "Received empty value for non-empty list") (mkNonEmpty ([]::[Int]))
| 188 |
case_NonEmpty_fail =
assertEqual "building non-empty list from an empty list"
(Bad "Received empty value for non-empty list") (mkNonEmpty ([]::[Int]))
| 156 | false | true | 0 | 9 | 27 | 41 | 22 | 19 | null | null |
dmwit/pi-eta-epsilon
|
src/Language/PiEtaEpsilon/Interactive/CmdLine.hs
|
bsd-3-clause
|
runShell :: PeeCmdLineState -> IO ()
runShell st = do
-- putStrLn versionInfo
-- putStrLn shellMessage
peeShell (mapToShellState st)
return ()
--------------------------------------------------------------------------
-- For dealing with input from stdin or the command line
| 287 |
runShell :: PeeCmdLineState -> IO ()
runShell st = do
-- putStrLn versionInfo
-- putStrLn shellMessage
peeShell (mapToShellState st)
return ()
--------------------------------------------------------------------------
-- For dealing with input from stdin or the command line
| 287 |
runShell st = do
-- putStrLn versionInfo
-- putStrLn shellMessage
peeShell (mapToShellState st)
return ()
--------------------------------------------------------------------------
-- For dealing with input from stdin or the command line
| 250 | false | true | 0 | 10 | 45 | 52 | 24 | 28 | null | null |
keera-studios/hsQt
|
Qtc/Enums/Gui/QAbstractPrintDialog.hs
|
bsd-2-clause
|
fPrintSelection :: PrintDialogOptions
fPrintSelection
= ifPrintDialogOptions $ 2
| 82 |
fPrintSelection :: PrintDialogOptions
fPrintSelection
= ifPrintDialogOptions $ 2
| 82 |
fPrintSelection
= ifPrintDialogOptions $ 2
| 44 | false | true | 2 | 6 | 9 | 23 | 9 | 14 | null | null |
futufeld/eclogues
|
eclogues-impl/gen-hs/AuroraSchedulerManager.hs
|
bsd-3-clause
|
write_AcquireLock_args :: (T.Protocol p, T.Transport t) => p t -> AcquireLock_args -> P.IO ()
write_AcquireLock_args oprot record = T.writeVal oprot $ from_AcquireLock_args record
| 179 |
write_AcquireLock_args :: (T.Protocol p, T.Transport t) => p t -> AcquireLock_args -> P.IO ()
write_AcquireLock_args oprot record = T.writeVal oprot $ from_AcquireLock_args record
| 179 |
write_AcquireLock_args oprot record = T.writeVal oprot $ from_AcquireLock_args record
| 85 | false | true | 0 | 9 | 22 | 69 | 32 | 37 | null | null |
Happstack/happstack-server
|
src/Happstack/Server/Internal/Clock.hs
|
bsd-3-clause
|
getApproximateUTCTime :: IO UTCTime
getApproximateUTCTime = posixSecondsToUTCTime <$> getApproximatePOSIXTime
| 109 |
getApproximateUTCTime :: IO UTCTime
getApproximateUTCTime = posixSecondsToUTCTime <$> getApproximatePOSIXTime
| 109 |
getApproximateUTCTime = posixSecondsToUTCTime <$> getApproximatePOSIXTime
| 73 | false | true | 0 | 5 | 8 | 18 | 9 | 9 | null | null |
jimburton/spamfilter
|
src/SpamFilter/Classify.hs
|
mit
|
{-| Fisher's combined probability test. -}
fisher :: [Float] -> Int -> Float
fisher probs numProbs = inverseChiSquare
(sum (map log probs) * negate 2.0) (2*numProbs)
| 189 |
fisher :: [Float] -> Int -> Float
fisher probs numProbs = inverseChiSquare
(sum (map log probs) * negate 2.0) (2*numProbs)
| 146 |
fisher probs numProbs = inverseChiSquare
(sum (map log probs) * negate 2.0) (2*numProbs)
| 112 | true | true | 0 | 10 | 49 | 61 | 31 | 30 | null | null |
juodaspaulius/clafer
|
src/Language/Clafer/Intermediate/SimpleScopeAnalyzer.hs
|
mit
|
analyzeRefs :: UIDIClaferMap -> [IClafer] -> Map String Integer -> IElement -> Map String Integer
analyzeRefs uidClaferMap' clafers analysis (IEClafer clafer) =
foldl (analyzeRefs uidClaferMap' clafers) analysis' (_elements clafer)
where
(Just (cardLb, cardUb)) = _card clafer
lowerOrFixedUpperBound = maximum [1, cardLb, cardUb]
analysis' = if (isJust $ _reference clafer)
then case (directSuper uidClaferMap' clafer) of
(Just c) -> Map.alter (maxLB lowerOrFixedUpperBound) (_uid c) analysis
Nothing -> analysis
else analysis
maxLB lb' Nothing = Just lb'
maxLB lb' (Just lb) = Just (max lb lb')
| 695 |
analyzeRefs :: UIDIClaferMap -> [IClafer] -> Map String Integer -> IElement -> Map String Integer
analyzeRefs uidClaferMap' clafers analysis (IEClafer clafer) =
foldl (analyzeRefs uidClaferMap' clafers) analysis' (_elements clafer)
where
(Just (cardLb, cardUb)) = _card clafer
lowerOrFixedUpperBound = maximum [1, cardLb, cardUb]
analysis' = if (isJust $ _reference clafer)
then case (directSuper uidClaferMap' clafer) of
(Just c) -> Map.alter (maxLB lowerOrFixedUpperBound) (_uid c) analysis
Nothing -> analysis
else analysis
maxLB lb' Nothing = Just lb'
maxLB lb' (Just lb) = Just (max lb lb')
| 695 |
analyzeRefs uidClaferMap' clafers analysis (IEClafer clafer) =
foldl (analyzeRefs uidClaferMap' clafers) analysis' (_elements clafer)
where
(Just (cardLb, cardUb)) = _card clafer
lowerOrFixedUpperBound = maximum [1, cardLb, cardUb]
analysis' = if (isJust $ _reference clafer)
then case (directSuper uidClaferMap' clafer) of
(Just c) -> Map.alter (maxLB lowerOrFixedUpperBound) (_uid c) analysis
Nothing -> analysis
else analysis
maxLB lb' Nothing = Just lb'
maxLB lb' (Just lb) = Just (max lb lb')
| 597 | false | true | 0 | 11 | 183 | 231 | 116 | 115 | null | null |
rubenpieters/gre-project
|
shared/test/GameStateSpec.hs
|
bsd-3-clause
|
emptyPlayer = Player
{ _deck = emptyDeck
, _hand = []
, _timersL = []
, _timersM = []
, _timersR = []
, _dps = 0
, _actions = 0
}
| 145 |
emptyPlayer = Player
{ _deck = emptyDeck
, _hand = []
, _timersL = []
, _timersM = []
, _timersR = []
, _dps = 0
, _actions = 0
}
| 145 |
emptyPlayer = Player
{ _deck = emptyDeck
, _hand = []
, _timersL = []
, _timersM = []
, _timersR = []
, _dps = 0
, _actions = 0
}
| 145 | false | false | 1 | 8 | 47 | 65 | 37 | 28 | null | null |
denibertovic/haskell
|
kubernetes/lib/Kubernetes/OpenAPI/ModelLens.hs
|
bsd-3-clause
|
-- * V1beta1ReplicaSet
-- | 'v1beta1ReplicaSetApiVersion' Lens
v1beta1ReplicaSetApiVersionL :: Lens_' V1beta1ReplicaSet (Maybe Text)
v1beta1ReplicaSetApiVersionL f V1beta1ReplicaSet{..} = (\v1beta1ReplicaSetApiVersion -> V1beta1ReplicaSet { v1beta1ReplicaSetApiVersion, ..} ) <$> f v1beta1ReplicaSetApiVersion
| 310 |
v1beta1ReplicaSetApiVersionL :: Lens_' V1beta1ReplicaSet (Maybe Text)
v1beta1ReplicaSetApiVersionL f V1beta1ReplicaSet{..} = (\v1beta1ReplicaSetApiVersion -> V1beta1ReplicaSet { v1beta1ReplicaSetApiVersion, ..} ) <$> f v1beta1ReplicaSetApiVersion
| 246 |
v1beta1ReplicaSetApiVersionL f V1beta1ReplicaSet{..} = (\v1beta1ReplicaSetApiVersion -> V1beta1ReplicaSet { v1beta1ReplicaSetApiVersion, ..} ) <$> f v1beta1ReplicaSetApiVersion
| 176 | true | true | 0 | 8 | 27 | 58 | 31 | 27 | null | null |
lonnen/alonzo
|
src/Alonzo.hs
|
mpl-2.0
|
cdr badArgList = throwError $ NumArgs 1 badArgList
| 50 |
cdr badArgList = throwError $ NumArgs 1 badArgList
| 50 |
cdr badArgList = throwError $ NumArgs 1 badArgList
| 50 | false | false | 1 | 6 | 7 | 21 | 8 | 13 | null | null |
fmthoma/ghc
|
compiler/specialise/Rules.hs
|
bsd-3-clause
|
match renv subst (Type ty1) (Type ty2)
= match_ty renv subst ty1 ty2
| 70 |
match renv subst (Type ty1) (Type ty2)
= match_ty renv subst ty1 ty2
| 70 |
match renv subst (Type ty1) (Type ty2)
= match_ty renv subst ty1 ty2
| 70 | false | false | 1 | 7 | 14 | 43 | 17 | 26 | null | null |
alphaHeavy/hlint
|
data/Default.hs
|
gpl-2.0
|
-- I/O
error = putStrLn (show x) ==> print x
| 45 |
error = putStrLn (show x) ==> print x
| 37 |
error = putStrLn (show x) ==> print x
| 37 | true | false | 1 | 7 | 10 | 26 | 11 | 15 | null | null |
hpdeifel/hcharselect
|
CliMain.hs
|
gpl-3.0
|
defaultCliConfig = CliConf ""
| 29 |
defaultCliConfig = CliConf ""
| 29 |
defaultCliConfig = CliConf ""
| 29 | false | false | 0 | 5 | 3 | 9 | 4 | 5 | null | null |
m00nlight/99-problems
|
haskell/p-61.hs
|
bsd-3-clause
|
leaves (Branch x Empty Empty) = [x]
| 35 |
leaves (Branch x Empty Empty) = [x]
| 35 |
leaves (Branch x Empty Empty) = [x]
| 35 | false | false | 0 | 6 | 6 | 23 | 11 | 12 | null | null |
Yuras/tide
|
src/HaskellLex.hs
|
bsd-3-clause
|
isDashes :: Text -> Bool
isDashes txt = "--" `Text.isPrefixOf` txt && Text.all (== '-') txt
| 91 |
isDashes :: Text -> Bool
isDashes txt = "--" `Text.isPrefixOf` txt && Text.all (== '-') txt
| 91 |
isDashes txt = "--" `Text.isPrefixOf` txt && Text.all (== '-') txt
| 66 | false | true | 0 | 7 | 15 | 41 | 22 | 19 | null | null |
sdiehl/ghc
|
libraries/base/GHC/Event/PSQ.hs
|
bsd-3-clause
|
delete :: Key -> IntPSQ v -> IntPSQ v
delete k = go
where
go t = case t of
Nil -> Nil
Tip k' _ _
| k == k' -> Nil
| otherwise -> t
Bin k' p' x' m l r
| nomatch k k' m -> t
| k == k' -> merge m l r
| zero k m -> binShrinkL k' p' x' m (go l) r
| otherwise -> binShrinkR k' p' x' m l (go r)
-- | /O(min(n,W))/ Delete the binding with the least priority, and return the
-- rest of the queue stripped of that binding. In case the queue is empty, the
-- empty queue is returned again.
| 607 |
delete :: Key -> IntPSQ v -> IntPSQ v
delete k = go
where
go t = case t of
Nil -> Nil
Tip k' _ _
| k == k' -> Nil
| otherwise -> t
Bin k' p' x' m l r
| nomatch k k' m -> t
| k == k' -> merge m l r
| zero k m -> binShrinkL k' p' x' m (go l) r
| otherwise -> binShrinkR k' p' x' m l (go r)
-- | /O(min(n,W))/ Delete the binding with the least priority, and return the
-- rest of the queue stripped of that binding. In case the queue is empty, the
-- empty queue is returned again.
| 607 |
delete k = go
where
go t = case t of
Nil -> Nil
Tip k' _ _
| k == k' -> Nil
| otherwise -> t
Bin k' p' x' m l r
| nomatch k k' m -> t
| k == k' -> merge m l r
| zero k m -> binShrinkL k' p' x' m (go l) r
| otherwise -> binShrinkR k' p' x' m l (go r)
-- | /O(min(n,W))/ Delete the binding with the least priority, and return the
-- rest of the queue stripped of that binding. In case the queue is empty, the
-- empty queue is returned again.
| 569 | false | true | 0 | 11 | 247 | 197 | 91 | 106 | null | null |
duplode/threepenny-gui
|
src/Graphics/UI/Threepenny/SVG/Attributes.hs
|
bsd-3-clause
|
fill = strAttr "fill"
| 46 |
fill = strAttr "fill"
| 46 |
fill = strAttr "fill"
| 46 | false | false | 1 | 5 | 28 | 12 | 4 | 8 | null | null |
abakst/liquidhaskell
|
benchmarks/vector-0.10.0.1/Data/Vector/Unboxed.hs
|
bsd-3-clause
|
fold1M'_ = G.fold1M'_
| 21 |
fold1M'_ = G.fold1M'_
| 21 |
fold1M'_ = G.fold1M'_
| 21 | false | false | 1 | 6 | 2 | 12 | 4 | 8 | null | null |
ekmett/thyme
|
tests/sanity.hs
|
bsd-3-clause
|
prop_parseTime :: Spec -> UTCTime -> Property
prop_parseTime (Spec spec) (T.formatTime defaultTimeLocale spec . toTime -> s)
= printTestCase desc (fmap toTime t == t') where
t = parseTime defaultTimeLocale spec s
t' = T.parseTime defaultTimeLocale spec s
tp = P.parseOnly (timeParser defaultTimeLocale spec)
. SL.toStrict . S.toLazyByteString . S.stringUtf8
desc = "input: " ++ show s ++ "\nthyme: " ++ show t
++ "\ntime: " ++ show t' ++ "\nstate: " ++ show (tp s)
------------------------------------------------------------------------
| 579 |
prop_parseTime :: Spec -> UTCTime -> Property
prop_parseTime (Spec spec) (T.formatTime defaultTimeLocale spec . toTime -> s)
= printTestCase desc (fmap toTime t == t') where
t = parseTime defaultTimeLocale spec s
t' = T.parseTime defaultTimeLocale spec s
tp = P.parseOnly (timeParser defaultTimeLocale spec)
. SL.toStrict . S.toLazyByteString . S.stringUtf8
desc = "input: " ++ show s ++ "\nthyme: " ++ show t
++ "\ntime: " ++ show t' ++ "\nstate: " ++ show (tp s)
------------------------------------------------------------------------
| 579 |
prop_parseTime (Spec spec) (T.formatTime defaultTimeLocale spec . toTime -> s)
= printTestCase desc (fmap toTime t == t') where
t = parseTime defaultTimeLocale spec s
t' = T.parseTime defaultTimeLocale spec s
tp = P.parseOnly (timeParser defaultTimeLocale spec)
. SL.toStrict . S.toLazyByteString . S.stringUtf8
desc = "input: " ++ show s ++ "\nthyme: " ++ show t
++ "\ntime: " ++ show t' ++ "\nstate: " ++ show (tp s)
------------------------------------------------------------------------
| 533 | false | true | 0 | 12 | 118 | 180 | 89 | 91 | null | null |
Acidburn0zzz/maclight
|
src/Apple/Maclight.hs
|
mit
|
getDirectory :: Light -> FilePath
getDirectory Screen = "/sys/class/backlight/intel_backlight/"
| 95 |
getDirectory :: Light -> FilePath
getDirectory Screen = "/sys/class/backlight/intel_backlight/"
| 95 |
getDirectory Screen = "/sys/class/backlight/intel_backlight/"
| 61 | false | true | 0 | 5 | 8 | 18 | 9 | 9 | null | null |
ghc-android/ghc
|
compiler/simplCore/SetLevels.hs
|
bsd-3-clause
|
incMajorLvl :: Level -> Level
incMajorLvl (Level major _) = Level (major + 1) 0
| 79 |
incMajorLvl :: Level -> Level
incMajorLvl (Level major _) = Level (major + 1) 0
| 79 |
incMajorLvl (Level major _) = Level (major + 1) 0
| 49 | false | true | 0 | 7 | 14 | 38 | 19 | 19 | null | null |
mightymoose/liquidhaskell
|
benchmarks/xmonad-0.10/tests/Properties.hs
|
bsd-3-clause
|
prop_screens (x :: T) = n `elem` screens x
where
n = current x
| 67 |
prop_screens (x :: T) = n `elem` screens x
where
n = current x
| 67 |
prop_screens (x :: T) = n `elem` screens x
where
n = current x
| 67 | false | false | 1 | 6 | 18 | 41 | 18 | 23 | null | null |
ilyasergey/GHC-XAppFix
|
compiler/llvmGen/Llvm/Types.hs
|
bsd-3-clause
|
getPlainName (LMLocalVar x LMLabel ) = show x
| 47 |
getPlainName (LMLocalVar x LMLabel ) = show x
| 47 |
getPlainName (LMLocalVar x LMLabel ) = show x
| 47 | false | false | 0 | 7 | 9 | 20 | 9 | 11 | null | null |
proneetv/ariaDB
|
src/Service/BPlusTree/Helper.hs
|
bsd-3-clause
|
-- | It takes in an index, a value and list and updates the value in the list at
-- the given index.
updateAt :: Int -> a -> [a] -> [a]
updateAt index x xs = take index xs ++ [x] ++ drop (index+1) xs
| 199 |
updateAt :: Int -> a -> [a] -> [a]
updateAt index x xs = take index xs ++ [x] ++ drop (index+1) xs
| 98 |
updateAt index x xs = take index xs ++ [x] ++ drop (index+1) xs
| 63 | true | true | 0 | 10 | 45 | 71 | 36 | 35 | null | null |
brendanhay/gogol
|
gogol-dlp/gen/Network/Google/DLP/Types/Product.hs
|
mpl-2.0
|
-- | Google Cloud Storage options.
gpdvscCloudStorageOptions :: Lens' GooglePrivacyDlpV2StorageConfig (Maybe GooglePrivacyDlpV2CloudStorageOptions)
gpdvscCloudStorageOptions
= lens _gpdvscCloudStorageOptions
(\ s a -> s{_gpdvscCloudStorageOptions = a})
| 260 |
gpdvscCloudStorageOptions :: Lens' GooglePrivacyDlpV2StorageConfig (Maybe GooglePrivacyDlpV2CloudStorageOptions)
gpdvscCloudStorageOptions
= lens _gpdvscCloudStorageOptions
(\ s a -> s{_gpdvscCloudStorageOptions = a})
| 225 |
gpdvscCloudStorageOptions
= lens _gpdvscCloudStorageOptions
(\ s a -> s{_gpdvscCloudStorageOptions = a})
| 112 | true | true | 0 | 9 | 30 | 48 | 25 | 23 | null | null |
sdiehl/ghc
|
compiler/utils/Outputable.hs
|
bsd-3-clause
|
arrow = unicodeSyntax (char '→') (docToSDoc $ Pretty.text "->")
| 68 |
arrow = unicodeSyntax (char '→') (docToSDoc $ Pretty.text "->")
| 68 |
arrow = unicodeSyntax (char '→') (docToSDoc $ Pretty.text "->")
| 68 | false | false | 0 | 9 | 13 | 29 | 14 | 15 | null | null |
wkoiking/fieldequip
|
src/Backend.hs
|
bsd-3-clause
|
oneOrTwoCentralStation ss = nub [ss !! (floor num), ss !! (ceiling num)]
where num = fromIntegral (length ss - 1) / 2
-- Draw Antennas
| 136 |
oneOrTwoCentralStation ss = nub [ss !! (floor num), ss !! (ceiling num)]
where num = fromIntegral (length ss - 1) / 2
-- Draw Antennas
| 136 |
oneOrTwoCentralStation ss = nub [ss !! (floor num), ss !! (ceiling num)]
where num = fromIntegral (length ss - 1) / 2
-- Draw Antennas
| 136 | false | false | 1 | 9 | 26 | 68 | 32 | 36 | null | null |
gentoo-haskell/hackport
|
Portage/GHCCore.hs
|
gpl-3.0
|
ghc881 :: (DC.CompilerInfo, InstalledPackageIndex)
ghc881 = (ghc [8,8,1], mkIndex ghc881_pkgs)
| 94 |
ghc881 :: (DC.CompilerInfo, InstalledPackageIndex)
ghc881 = (ghc [8,8,1], mkIndex ghc881_pkgs)
| 94 |
ghc881 = (ghc [8,8,1], mkIndex ghc881_pkgs)
| 43 | false | true | 0 | 8 | 9 | 47 | 24 | 23 | null | null |
gergoerdi/soko-dash
|
src/SokoDash/NCurses/Render.hs
|
bsd-3-clause
|
renderState :: State -> Update ()
renderState State{..} = do
forM_ (Array.assocs stateWorld) . uncurry $ \pos field -> do
moveTo pos
drawString [fieldToChar open field]
moveTo statePos
drawString "R"
where
open = stateLambdaRemaining == 0
moveTo (x, y) = moveCursor (fromIntegral y) (fromIntegral x)
| 337 |
renderState :: State -> Update ()
renderState State{..} = do
forM_ (Array.assocs stateWorld) . uncurry $ \pos field -> do
moveTo pos
drawString [fieldToChar open field]
moveTo statePos
drawString "R"
where
open = stateLambdaRemaining == 0
moveTo (x, y) = moveCursor (fromIntegral y) (fromIntegral x)
| 337 |
renderState State{..} = do
forM_ (Array.assocs stateWorld) . uncurry $ \pos field -> do
moveTo pos
drawString [fieldToChar open field]
moveTo statePos
drawString "R"
where
open = stateLambdaRemaining == 0
moveTo (x, y) = moveCursor (fromIntegral y) (fromIntegral x)
| 303 | false | true | 10 | 7 | 82 | 121 | 64 | 57 | null | null |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.