_id
stringlengths 64
64
| repository
stringlengths 6
84
| name
stringlengths 4
110
| content
stringlengths 0
248k
| license
null | download_url
stringlengths 89
454
| language
stringclasses 7
values | comments
stringlengths 0
74.6k
| code
stringlengths 0
248k
|
---|---|---|---|---|---|---|---|---|
7c5fcc2a06b726039a97bb6a326b75fd294128ae32fe3760cc06707506fc0f56
|
disco-framework/disco
|
barkeeper_tests.erl
|
%% @hidden to edoc
-module(barkeeper_tests).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
%%
%% tests
ask_test() ->
MockedMods = [port_utils, json],
meck:new(MockedMods, [passthrough]),
meck:new(application, [passthrough, unstick]),
meck:expect(port_utils, easy_open_killer_port, fun(_) -> foo_port end),
PortCmdDoubleAnswer = fun(_,_) ->
self() ! {foo_port, {data, {bar, <<"somebinstring">>}}},
self() ! {foo_port, {data, {bar, <<"somebinstring">>}}}
end,
PortCmdNoAnswer = fun(_,_) -> ok end,
meck:expect(application, get_env, fun(port_call_timeout) -> {ok, 10} end),
meck:expect(json, default_decoder, fun() -> ok end),
meck:sequence(json, process, 2, [{incomplete, foo},
{ok, [{<<"worker input">>, [<<"AnswerString">>]}]},
{bad_json, bar}]),
{ok, SupPid} = barkeeper_sup:start_link("ignored executable path"),
try
meck:expect(port_utils, port_command, PortCmdDoubleAnswer),
?assertEqual({ok, ["AnswerString"]}, barkeeper:ask("ProblemString", "StateString")),
?assertEqual({error, illegal_json}, barkeeper:ask("ProblemString", "StateString")),
meck:expect(port_utils, port_command, PortCmdNoAnswer),
?assertEqual({error, timeout}, barkeeper:ask("ProblemString", "StateString"))
after
exit(SupPid, normal),
?assert(meck:validate(MockedMods)),
meck:unload(MockedMods),
meck:unload(application)
end.
-endif.
| null |
https://raw.githubusercontent.com/disco-framework/disco/f55f35d46d43ef5f4fa1466bdf8d662f5f01f30f/src/test/barkeeper_tests.erl
|
erlang
|
@hidden to edoc
tests
|
-module(barkeeper_tests).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
ask_test() ->
MockedMods = [port_utils, json],
meck:new(MockedMods, [passthrough]),
meck:new(application, [passthrough, unstick]),
meck:expect(port_utils, easy_open_killer_port, fun(_) -> foo_port end),
PortCmdDoubleAnswer = fun(_,_) ->
self() ! {foo_port, {data, {bar, <<"somebinstring">>}}},
self() ! {foo_port, {data, {bar, <<"somebinstring">>}}}
end,
PortCmdNoAnswer = fun(_,_) -> ok end,
meck:expect(application, get_env, fun(port_call_timeout) -> {ok, 10} end),
meck:expect(json, default_decoder, fun() -> ok end),
meck:sequence(json, process, 2, [{incomplete, foo},
{ok, [{<<"worker input">>, [<<"AnswerString">>]}]},
{bad_json, bar}]),
{ok, SupPid} = barkeeper_sup:start_link("ignored executable path"),
try
meck:expect(port_utils, port_command, PortCmdDoubleAnswer),
?assertEqual({ok, ["AnswerString"]}, barkeeper:ask("ProblemString", "StateString")),
?assertEqual({error, illegal_json}, barkeeper:ask("ProblemString", "StateString")),
meck:expect(port_utils, port_command, PortCmdNoAnswer),
?assertEqual({error, timeout}, barkeeper:ask("ProblemString", "StateString"))
after
exit(SupPid, normal),
?assert(meck:validate(MockedMods)),
meck:unload(MockedMods),
meck:unload(application)
end.
-endif.
|
9a4229d7a184c8e7901118405da20826a0e732199b91d60143427779eacea645
|
hansroland/CoreLang
|
Heap.hs
|
-- ----------------------------------------------------------------------------
-- Utils.Heap.hs - An implementation of a heap structure
-- Should be replaced later by a Hackage library
-- ----------------------------------------------------------------------------
module Utils.Heap
(Heap
, Addr
, hInitial
, hAlloc
, hUpdate
, hSize
, hFree
, hAddresses
, hLookup)
where
import Utils.Assoc
type Addr = Int
-- | Data type Heap
data Heap a = Heap Addr [Addr] [(Addr, a)]
-- Note: Do not automatically derive Show, The free list in infinite...
-- | Create a Heap
hInitial :: Heap a
hInitial = Heap 0 [1..] []
-- | Add a new element to the heap
-- Add the new element at the beginning of the list, and remove the address
hAlloc :: Heap a -> a -> (Heap a, Addr)
hAlloc (Heap size (next : free) xs) x = (Heap (size + 1) free ((next, x) : xs), next)
hAlloc (Heap _ [] _) _ = error "Heap.hs:hAlloc - Empty free list"
-- | Update an element added earlier to the heap
hUpdate :: Heap a -> Addr -> a -> Heap a
hUpdate (Heap size free xs) a x = Heap size free ((a,x) : remove xs a)
| Remove an address from the Heap
hFree :: Heap a -> Addr -> Heap a
hFree (Heap size free xs) a = Heap (size - 1) (a:free) (remove xs a)
-- | Helper function remove
remove :: [(Int, a)] -> Int -> [(Int, a)]
remove [] adr = error ("Heap.remove - Attemot to update or free nonexistent address"
++ show adr)
remove ((a, x) : xs) adr
| a == adr = xs
| otherwise = (a,x) : remove xs adr
-- Additional Functions
-- | Return the number of elements in our heap
hSize :: Heap a -> Int
hSize (Heap size _ _) = size
-- | Return all addresses with data stored in our heap
hAddresses :: Heap a -> [Addr]
hAddresses (Heap _ _ xs) = [addr | (addr, _) <- xs]
-- | Loopkup an element with its address
hLookup :: Heap a -> Addr -> a
hLookup (Heap _ _ xs) a = aLookup xs a (error ("Heap.hLokup - can't find address " ++ show a))
| null |
https://raw.githubusercontent.com/hansroland/CoreLang/a740b5d5158842a1ce05cc614ad10f0c676d3690/src/Utils/Heap.hs
|
haskell
|
----------------------------------------------------------------------------
Utils.Heap.hs - An implementation of a heap structure
Should be replaced later by a Hackage library
----------------------------------------------------------------------------
| Data type Heap
Note: Do not automatically derive Show, The free list in infinite...
| Create a Heap
| Add a new element to the heap
Add the new element at the beginning of the list, and remove the address
| Update an element added earlier to the heap
| Helper function remove
Additional Functions
| Return the number of elements in our heap
| Return all addresses with data stored in our heap
| Loopkup an element with its address
|
module Utils.Heap
(Heap
, Addr
, hInitial
, hAlloc
, hUpdate
, hSize
, hFree
, hAddresses
, hLookup)
where
import Utils.Assoc
type Addr = Int
data Heap a = Heap Addr [Addr] [(Addr, a)]
hInitial :: Heap a
hInitial = Heap 0 [1..] []
hAlloc :: Heap a -> a -> (Heap a, Addr)
hAlloc (Heap size (next : free) xs) x = (Heap (size + 1) free ((next, x) : xs), next)
hAlloc (Heap _ [] _) _ = error "Heap.hs:hAlloc - Empty free list"
hUpdate :: Heap a -> Addr -> a -> Heap a
hUpdate (Heap size free xs) a x = Heap size free ((a,x) : remove xs a)
| Remove an address from the Heap
hFree :: Heap a -> Addr -> Heap a
hFree (Heap size free xs) a = Heap (size - 1) (a:free) (remove xs a)
remove :: [(Int, a)] -> Int -> [(Int, a)]
remove [] adr = error ("Heap.remove - Attemot to update or free nonexistent address"
++ show adr)
remove ((a, x) : xs) adr
| a == adr = xs
| otherwise = (a,x) : remove xs adr
hSize :: Heap a -> Int
hSize (Heap size _ _) = size
hAddresses :: Heap a -> [Addr]
hAddresses (Heap _ _ xs) = [addr | (addr, _) <- xs]
hLookup :: Heap a -> Addr -> a
hLookup (Heap _ _ xs) a = aLookup xs a (error ("Heap.hLokup - can't find address " ++ show a))
|
620aff1e495093ff9e60391691863a93d7cd60c820bc91e358dd04bbe9437403
|
LennMars/algorithms_in_OCaml
|
main.ml
|
open Util
open IntSet
let eratosthenes n =
let rec eratosthenes_aux primes remains =
let minimum = min_elt remains in
if minimum * minimum > (max_elt remains) then
union primes remains
else
eratosthenes_aux (add minimum primes) (filter (fun m -> m mod minimum <> 0) remains)
in
List.range 3 n 2 |> add_list (singleton 2) |> eratosthenes_aux empty |> elements
open Bigarray;;
let eratosthenes_better n = (* more fast *)
let nums = Array1.create int c_layout (n + 1) in
for i = 0 to n do
Array1.set nums i i
done;
let term = sqrt (float n) |> int_of_float |> ( + ) 1 in
let rec aux m j =
if j > n then ()
else let _ = Array1.set nums j 0 in aux m (j + m)
in
let rec aux2 k =
if k > term then ()
else let _ = aux k (2 * k) in aux2 (k + 1)
in
let _ = aux2 2 in
let rec make_ans i accum =
if i = 0 then accum
else make_ans (i - 1) (let j = Array1.get nums i in if j = 0 then accum else j :: accum)
in
make_ans n [] |> List.filter ((<>) 0) |> List.tl
| null |
https://raw.githubusercontent.com/LennMars/algorithms_in_OCaml/f7fb8ca9f497883d86be3167bfc98a4a28ac73c9/eratosthenes/main.ml
|
ocaml
|
more fast
|
open Util
open IntSet
let eratosthenes n =
let rec eratosthenes_aux primes remains =
let minimum = min_elt remains in
if minimum * minimum > (max_elt remains) then
union primes remains
else
eratosthenes_aux (add minimum primes) (filter (fun m -> m mod minimum <> 0) remains)
in
List.range 3 n 2 |> add_list (singleton 2) |> eratosthenes_aux empty |> elements
open Bigarray;;
let nums = Array1.create int c_layout (n + 1) in
for i = 0 to n do
Array1.set nums i i
done;
let term = sqrt (float n) |> int_of_float |> ( + ) 1 in
let rec aux m j =
if j > n then ()
else let _ = Array1.set nums j 0 in aux m (j + m)
in
let rec aux2 k =
if k > term then ()
else let _ = aux k (2 * k) in aux2 (k + 1)
in
let _ = aux2 2 in
let rec make_ans i accum =
if i = 0 then accum
else make_ans (i - 1) (let j = Array1.get nums i in if j = 0 then accum else j :: accum)
in
make_ans n [] |> List.filter ((<>) 0) |> List.tl
|
2cbc09407528d297ff918dc439c59644442722d05ce54b933044d69b1a1e25b4
|
Mayvenn/storefront
|
graphql.clj
|
(ns storefront.system.contentful.graphql
(:require [clojure.java.io :as io]
[clojure.string :as string]
[environ.core :refer [env]]
[tugboat.core :as tugboat]
[cheshire.core :as json]))
(defn- request
"Please use [[query]] when possible"
[{:keys [graphql-endpoint preview-api-key api-key space-id env-id]} gql variables]
(try
(tugboat/request {:endpoint graphql-endpoint}
:post (str "/content/v1/spaces/" space-id "/environments/" env-id)
{:socket-timeout 10000
:conn-timeout 10000
:as :json
:headers {"Authorization" (str "Bearer " (if (get variables "preview")
preview-api-key
api-key))
"Content-Type" "application/json"}
:body (json/generate-string {:query (str gql)
:variables variables})})
(catch java.io.IOException ioe
nil)))
(defn query [contentful-ctx file variables]
(if-not (= (env :environment) "development")
(request contentful-ctx (slurp (io/resource (str "gql/" file))) variables)
(request contentful-ctx (slurp (io/resource (str "gql/" file))) variables)))
(comment
(query (:contentful dev-system/the-system) "static_page.gql" {"$preview" false "$path" "/policy/privacy"})
(query (:contentful dev-system/the-system) "all_static_pages.gql" {"$preview" false}))
| null |
https://raw.githubusercontent.com/Mayvenn/storefront/ffca6d1b8c6ecd672e77fb739b3e7c5684df2dac/src/storefront/system/contentful/graphql.clj
|
clojure
|
(ns storefront.system.contentful.graphql
(:require [clojure.java.io :as io]
[clojure.string :as string]
[environ.core :refer [env]]
[tugboat.core :as tugboat]
[cheshire.core :as json]))
(defn- request
"Please use [[query]] when possible"
[{:keys [graphql-endpoint preview-api-key api-key space-id env-id]} gql variables]
(try
(tugboat/request {:endpoint graphql-endpoint}
:post (str "/content/v1/spaces/" space-id "/environments/" env-id)
{:socket-timeout 10000
:conn-timeout 10000
:as :json
:headers {"Authorization" (str "Bearer " (if (get variables "preview")
preview-api-key
api-key))
"Content-Type" "application/json"}
:body (json/generate-string {:query (str gql)
:variables variables})})
(catch java.io.IOException ioe
nil)))
(defn query [contentful-ctx file variables]
(if-not (= (env :environment) "development")
(request contentful-ctx (slurp (io/resource (str "gql/" file))) variables)
(request contentful-ctx (slurp (io/resource (str "gql/" file))) variables)))
(comment
(query (:contentful dev-system/the-system) "static_page.gql" {"$preview" false "$path" "/policy/privacy"})
(query (:contentful dev-system/the-system) "all_static_pages.gql" {"$preview" false}))
|
|
48e145e7e48a1bc858380935995cee3ee1df13e111da3dde25fefba0d28105f1
|
maxhbr/LDBcollector
|
Base.hs
|
module Model.LicenseProperties.Base
where
type LicenseName
= String
type URL
= String
| null |
https://raw.githubusercontent.com/maxhbr/LDBcollector/51d940f0af00b2acdd7de246b2be16fa30fc8a6b/src/Model/LicenseProperties/Base.hs
|
haskell
|
module Model.LicenseProperties.Base
where
type LicenseName
= String
type URL
= String
|
|
48ed98007c03409fbd2cce609703629c735eb2d490a9c58f1d800f1524dc9b2d
|
kit-clj/kit
|
core.clj
|
(ns myapp.core
(:require
[clojure.tools.logging :as log]
[integrant.core :as ig]
[wake.guestbook.config :as config]
[wake.guestbook.env :refer [defaults]]
;; Edges
[kit.edge.utils.repl]
[kit.edge.server.undertow]
[wake.guestbook.web.handler]
;; Routes
[wake.guestbook.web.routes.api]
[wake.guestbook.web.routes.pages]
)
(:gen-class))
;; log uncaught exceptions in threads
(Thread/setDefaultUncaughtExceptionHandler
(reify Thread$UncaughtExceptionHandler
(uncaughtException [_ thread ex]
(log/error {:what :uncaught-exception
:exception ex
:where (str "Uncaught exception on" (.getName thread))}))))
(defonce system (atom nil))
(defn stop-app []
((or (:stop defaults) (fn [])))
(some-> (deref system) (ig/halt!))
(shutdown-agents))
(defn start-app [& [params]]
((or (:start params) (:start defaults) (fn [])))
(->> (config/system-config (or (:opts params) (:opts defaults) {}))
(ig/prep)
(ig/init)
(reset! system))
(.addShutdownHook (Runtime/getRuntime) (Thread. stop-app)))
(defn -main [& _]
(start-app))
| null |
https://raw.githubusercontent.com/kit-clj/kit/320b920dcf25c33130f33b0e1cd55ff13f3157f6/libs/kit-generator/test/resources/core.clj
|
clojure
|
Edges
Routes
log uncaught exceptions in threads
|
(ns myapp.core
(:require
[clojure.tools.logging :as log]
[integrant.core :as ig]
[wake.guestbook.config :as config]
[wake.guestbook.env :refer [defaults]]
[kit.edge.utils.repl]
[kit.edge.server.undertow]
[wake.guestbook.web.handler]
[wake.guestbook.web.routes.api]
[wake.guestbook.web.routes.pages]
)
(:gen-class))
(Thread/setDefaultUncaughtExceptionHandler
(reify Thread$UncaughtExceptionHandler
(uncaughtException [_ thread ex]
(log/error {:what :uncaught-exception
:exception ex
:where (str "Uncaught exception on" (.getName thread))}))))
(defonce system (atom nil))
(defn stop-app []
((or (:stop defaults) (fn [])))
(some-> (deref system) (ig/halt!))
(shutdown-agents))
(defn start-app [& [params]]
((or (:start params) (:start defaults) (fn [])))
(->> (config/system-config (or (:opts params) (:opts defaults) {}))
(ig/prep)
(ig/init)
(reset! system))
(.addShutdownHook (Runtime/getRuntime) (Thread. stop-app)))
(defn -main [& _]
(start-app))
|
7bd662fffcf87270ab6d51aaa5ceb5c49337cc3b6bb90b8cc1a43400cac790e1
|
Clozure/ccl-tests
|
plus.lsp
|
;-*- Mode: Lisp -*-
Author :
Created : Sun Aug 31 04:34:17 2003
;;;; Contains: Tests of the function +
(in-package :cl-test)
(compile-and-load "numbers-aux.lsp")
;;; (compile-and-load "plus-aux.lsp")
(deftest plus.1
(+)
0)
(deftest plus.2
(loop for x in *numbers*
unless (eql x (+ x))
collect x)
nil)
(deftest plus.3
(loop for x in *numbers*
for x1 = (+ x 0)
for x2 = (+ 0 x)
unless (and (eql x x1) (eql x x2) (eql x1 x2))
collect (list x x1 x2))
nil)
(deftest plus.4
(loop for x in *numbers*
for x1 = (- x x)
unless (= x1 0)
collect (list x x1))
nil)
(deftest plus.5
(let* ((upper-bound most-positive-fixnum)
(lower-bound most-negative-fixnum)
(spread (- upper-bound lower-bound)))
(flet ((%r () (+ (random spread) lower-bound)))
(loop for x = (%r)
for y = (%r)
for z = (%r)
for s1 = (+ x y z)
for s2 = (+ z y x)
for s3 = (+ y x z)
for s4 = (+ x z y)
for s5 = (+ z x y)
for s6 = (+ y z x)
repeat 1000
unless (and (eql s1 s2) (eql s1 s3) (eql s1 s4)
(eql s1 s5) (eql s1 s6))
collect (list x y z s1 s2 s3 s4 s5 s6))))
nil)
(deftest plus.6
(let* ((upper-bound 1000000000000000)
(lower-bound -1000000000000000)
(spread (- upper-bound lower-bound)))
(flet ((%r () (+ (random spread) lower-bound)))
(loop for x = (%r)
for y = (%r)
for z = (%r)
for s1 = (+ x y z)
for s2 = (+ z y x)
for s3 = (+ y x z)
for s4 = (+ x z y)
for s5 = (+ z x y)
for s6 = (+ y z x)
repeat 1000
unless (and (eql s1 s2) (eql s1 s3) (eql s1 s4)
(eql s1 s5) (eql s1 s6))
collect (list x y z s1 s2 s3 s4 s5 s6))))
nil)
(deftest plus.7
(let* ((upper-bound most-positive-fixnum)
(lower-bound most-negative-fixnum)
(spread (- upper-bound lower-bound)))
(flet ((%r () (+ (random spread) lower-bound)))
(loop for x = (/ (%r) (max 1 (%r)))
for y = (/ (%r) (max 1 (%r)))
for z = (/ (%r) (max 1 (%r)))
for s1 = (+ x y z)
for s2 = (+ z y x)
for s3 = (+ y x z)
for s4 = (+ x z y)
for s5 = (+ z x y)
for s6 = (+ y z x)
repeat 1000
unless (and (eql s1 s2) (eql s1 s3) (eql s1 s4)
(eql s1 s5) (eql s1 s6))
collect (list x y z s1 s2 s3 s4 s5 s6)
unless (= (+ x y)
(let ((xn (numerator x))
(xd (denominator x))
(yn (numerator y))
(yd (denominator y)))
(/ (+ (* xn yd) (* xd yn))
(* xd yd))))
collect (list x y))))
nil)
(deftest plus.8
(let (args)
(loop for i from 0 to (min 256 (1- call-arguments-limit))
unless (eql (apply #'+ args) (/ (* i (1+ i)) 2))
collect i
do (push (1+ i) args)))
nil)
(deftest plus.9
(let* ((upper-bound most-positive-fixnum)
(lower-bound most-negative-fixnum)
(spread (- upper-bound lower-bound)))
(flet ((%r () (+ (random spread) lower-bound)))
(loop
for xr = (%r)
for xi = (%r)
for yr = (%r)
for yi = (%r)
for x = (complex xr xi)
for y = (complex yr yi)
for s = (+ x y)
repeat 1000
unless (eql s (complex (+ xr yr) (+ xi yi)))
collect (list x y s))))
nil)
(deftest plus.10
(loop
for x in '(0.0s0 0.0f0 0.0d0 0.0l0)
for radix = (float-radix x)
for (k eps-r eps-f) = (multiple-value-list (find-epsilon x))
nconc
(loop for i from 1 to k
for e1 = (expt radix (- i))
for y = (+ x e1)
nconc
(loop for j from 1 to (- k i)
for e2 = (expt radix (- j))
for z = (+ x e2)
unless (eql (+ y z) (+ x e1 e2))
collect (list x i j))))
nil)
(deftest plus.11
(flet ((%r () (- (random most-positive-short-float) (/ most-positive-short-float 2))))
(loop for x = (%r)
for y = (%r)
for s = (+ x y)
repeat 1000
unless (and (eql s (+ y x))
(typep s 'short-float))
collect (list x y s)))
nil)
(deftest plus.12
(flet ((%r () (- (random most-positive-single-float) (/ most-positive-single-float 2))))
(loop for x = (%r)
for y = (%r)
for s = (+ x y)
repeat 1000
unless (and (eql s (+ y x))
(typep s 'single-float))
collect (list x y s)))
nil)
(deftest plus.13
(flet ((%r () (- (random most-positive-double-float) (/ most-positive-double-float 2))))
(loop for x = (%r)
for y = (%r)
for s = (+ x y)
repeat 1000
unless (and (eql s (+ y x))
(typep s 'double-float))
collect (list x y s)))
nil)
(deftest plus.14
(flet ((%r () (- (random most-positive-long-float) (/ most-positive-long-float 2))))
(loop for x = (%r)
for y = (%r)
for s = (+ x y)
repeat 1000
unless (and (eql s (+ y x))
(typep s 'long-float))
collect (list x y s)))
nil)
(deftest plus.15
(let ((bound most-positive-short-float)
(bound2 most-positive-single-float))
(loop for x = (- (random bound) (/ bound 2))
for y = (- (random bound2)(/ bound2 2))
for p = (+ x y)
repeat 1000
unless (and (eql p (+ y x))
(typep p 'single-float))
collect (list x y p)))
nil)
(deftest plus.16
(let ((bound most-positive-short-float)
(bound2 most-positive-double-float))
(loop for x = (- (random bound) (/ bound 2))
for y = (- (random bound2)(/ bound2 2))
for p = (+ x y)
repeat 1000
unless (and (eql p (+ y x))
(typep p 'double-float))
collect (list x y p)))
nil)
(deftest plus.17
(let ((bound most-positive-short-float)
(bound2 most-positive-long-float))
(loop for x = (- (random bound) (/ bound 2))
for y = (- (random bound2)(/ bound2 2))
for p = (+ x y)
repeat 1000
unless (and (eql p (+ y x))
(typep p 'long-float))
collect (list x y p)))
nil)
(deftest plus.18
(let ((bound most-positive-single-float)
(bound2 most-positive-double-float))
(loop for x = (- (random bound) (/ bound 2))
for y = (- (random bound2)(/ bound2 2))
for p = (+ x y)
repeat 1000
unless (and (eql p (+ y x))
(typep p 'double-float))
collect (list x y p)))
nil)
(deftest plus.19
(let ((bound most-positive-single-float)
(bound2 most-positive-long-float))
(loop for x = (- (random bound) (/ bound 2))
for y = (- (random bound2)(/ bound2 2))
for p = (+ x y)
repeat 1000
unless (and (eql p (+ y x))
(typep p 'long-float))
collect (list x y p)))
nil)
(deftest plus.20
(let ((bound most-positive-double-float)
(bound2 most-positive-long-float))
(loop for x = (- (random bound) (/ bound 2))
for y = (- (random bound2)(/ bound2 2))
for p = (+ x y)
repeat 1000
unless (and (eql p (+ y x))
(typep p 'long-float))
collect (list x y p)))
nil)
(deftest plus.21
(loop
for type in '(short-float single-float double-float long-float)
for bits in '(13 24 50 50)
for bound = (ash 1 (1- bits))
nconc
(loop for i = (random bound)
for x = (coerce i type)
for j = (random bound)
for y = (coerce j type)
for sum = (+ x y)
repeat 1000
unless (and (eql sum (coerce (+ i j) type))
(eql sum (+ y x)))
collect (list i j x y sum (coerce (+ i j) type))))
nil)
(deftest plus.22
(loop
for type in '(short-float single-float double-float long-float)
for bits in '(13 24 50 50)
for bound = (ash 1 (1- bits))
nconc
(loop
for one = (coerce 1 type)
for i = (random bound)
for x = (complex (coerce i type) one)
for j = (random bound)
for y = (complex (coerce j type) one)
for sum = (+ x y)
repeat 1000
unless (and (eql sum (complex (coerce (+ i j) type)
(coerce 2 type)))
(eql sum (+ y x)))
collect (list i j x y sum)))
nil)
(deftest plus.23
(loop
for type in '(short-float single-float double-float long-float)
for bits in '(13 24 50 50)
for bound = (ash 1 (1- bits))
nconc
(loop
for one = (coerce 1 type)
for i = (random bound)
for x = (complex one (coerce i type))
for j = (random bound)
for y = (complex one (coerce j type))
for sum = (+ x y)
repeat 1000
unless (and (eql sum (complex (coerce 2 type)
(coerce (+ i j) type)))
(eql sum (+ y x)))
collect (list i j x y sum)))
nil)
Negative zero tests ( suggested by )
(deftest plus.24
(funcall
(compile nil '(lambda (x) (declare (type short-float x) (optimize (speed 3) (safety 0) (debug 0)))
(+ 0.0s0 x)))
-0.0s0)
0.0s0)
(deftest plus.25
(funcall
(compile nil '(lambda (x) (declare (type single-float x) (optimize (speed 3) (safety 0) (debug 0)))
(+ 0.0f0 x)))
-0.0f0)
0.0f0)
(deftest plus.26
(funcall
(compile nil '(lambda (x) (declare (type double-float x) (optimize (speed 3) (safety 0) (debug 0)))
(+ 0.0d0 x)))
-0.0d0)
0.0d0)
(deftest plus.27
(funcall
(compile nil '(lambda (x) (declare (type long-float x) (optimize (speed 3) (safety 0) (debug 0)))
(+ 0.0l0 x)))
-0.0l0)
0.0l0)
;;; Test that explicit calls to macroexpand in subforms
;;; are done in the correct environment
(deftest plus.28
(macrolet ((%m (z) z))
(values
(+ (expand-in-current-env (%m 1)))
(+ (expand-in-current-env (%m 2)) 3)
(+ 4 (expand-in-current-env (%m 5)))
(+ 1/2 (expand-in-current-env (%m 6)) 2/3)))
1 5 9 43/6)
;;; Must test combinations of reals and complex arguments.
;;; Order of evaluation tests
(deftest plus.order.1
(let ((i 0) x y)
(values
(+ (progn (setf x (incf i)) '8)
(progn (setf y (incf i)) '11))
i x y))
19 2 1 2)
(deftest plus.order.2
(let ((i 0) x y z)
(values
(+ (progn (setf x (incf i)) '8)
(progn (setf y (incf i)) '11)
(progn (setf z (incf i)) '100))
i x y z))
119 3 1 2 3)
;;; Test that compilation does not reassociate float additions
(deftest plus.reassociation.1
(loop
for x in '(1.0s0 1.0f0 1.0d0 1.0l0)
for eps in (list short-float-epsilon single-float-epsilon
double-float-epsilon long-float-epsilon)
for eps2 = (* eps 9/10)
when (eql
(funcall (compile nil `(lambda () (+ ,x (+ ,eps2 ,eps2)))))
x)
collect (list x eps eps2))
nil)
(deftest plus.reassociation.2
(loop
for x in '(1.0s0 1.0f0 1.0d0 1.0l0)
for eps in (list short-float-epsilon single-float-epsilon
double-float-epsilon long-float-epsilon)
for eps2 = (* eps 9/10)
unless (equal
(funcall (compile nil `(lambda () (list (+ (+ ,x ,eps2) ,eps2)
(+ ,eps2 (+ ,eps2 ,x))))))
(list x x))
collect (list x eps eps2))
nil)
(deftest plus.reassociation.3
(loop
for x in '(1.0s0 1.0f0 1.0d0 1.0l0)
for eps in (list short-float-epsilon single-float-epsilon
double-float-epsilon long-float-epsilon)
for eps2 = (* eps 9/10)
when (eql
(funcall (compile nil `(lambda (y e) (+ y (+ e e)))) x eps2)
x)
collect (list x eps eps2))
nil)
(deftest plus.reassociation.4
(loop
for x in '(1.0s0 1.0f0 1.0d0 1.0l0)
for eps in (list short-float-epsilon single-float-epsilon
double-float-epsilon long-float-epsilon)
for eps2 = (* eps 9/10)
unless (equal
(funcall (compile nil `(lambda (y e) (list (+ (+ y e) e)
(+ e (+ e y)))))
x eps2)
(list x x))
collect (list x eps eps2))
nil)
| null |
https://raw.githubusercontent.com/Clozure/ccl-tests/0478abddb34dbc16487a1975560d8d073a988060/ansi-tests/plus.lsp
|
lisp
|
-*- Mode: Lisp -*-
Contains: Tests of the function +
(compile-and-load "plus-aux.lsp")
Test that explicit calls to macroexpand in subforms
are done in the correct environment
Must test combinations of reals and complex arguments.
Order of evaluation tests
Test that compilation does not reassociate float additions
|
Author :
Created : Sun Aug 31 04:34:17 2003
(in-package :cl-test)
(compile-and-load "numbers-aux.lsp")
(deftest plus.1
(+)
0)
(deftest plus.2
(loop for x in *numbers*
unless (eql x (+ x))
collect x)
nil)
(deftest plus.3
(loop for x in *numbers*
for x1 = (+ x 0)
for x2 = (+ 0 x)
unless (and (eql x x1) (eql x x2) (eql x1 x2))
collect (list x x1 x2))
nil)
(deftest plus.4
(loop for x in *numbers*
for x1 = (- x x)
unless (= x1 0)
collect (list x x1))
nil)
(deftest plus.5
(let* ((upper-bound most-positive-fixnum)
(lower-bound most-negative-fixnum)
(spread (- upper-bound lower-bound)))
(flet ((%r () (+ (random spread) lower-bound)))
(loop for x = (%r)
for y = (%r)
for z = (%r)
for s1 = (+ x y z)
for s2 = (+ z y x)
for s3 = (+ y x z)
for s4 = (+ x z y)
for s5 = (+ z x y)
for s6 = (+ y z x)
repeat 1000
unless (and (eql s1 s2) (eql s1 s3) (eql s1 s4)
(eql s1 s5) (eql s1 s6))
collect (list x y z s1 s2 s3 s4 s5 s6))))
nil)
(deftest plus.6
(let* ((upper-bound 1000000000000000)
(lower-bound -1000000000000000)
(spread (- upper-bound lower-bound)))
(flet ((%r () (+ (random spread) lower-bound)))
(loop for x = (%r)
for y = (%r)
for z = (%r)
for s1 = (+ x y z)
for s2 = (+ z y x)
for s3 = (+ y x z)
for s4 = (+ x z y)
for s5 = (+ z x y)
for s6 = (+ y z x)
repeat 1000
unless (and (eql s1 s2) (eql s1 s3) (eql s1 s4)
(eql s1 s5) (eql s1 s6))
collect (list x y z s1 s2 s3 s4 s5 s6))))
nil)
(deftest plus.7
(let* ((upper-bound most-positive-fixnum)
(lower-bound most-negative-fixnum)
(spread (- upper-bound lower-bound)))
(flet ((%r () (+ (random spread) lower-bound)))
(loop for x = (/ (%r) (max 1 (%r)))
for y = (/ (%r) (max 1 (%r)))
for z = (/ (%r) (max 1 (%r)))
for s1 = (+ x y z)
for s2 = (+ z y x)
for s3 = (+ y x z)
for s4 = (+ x z y)
for s5 = (+ z x y)
for s6 = (+ y z x)
repeat 1000
unless (and (eql s1 s2) (eql s1 s3) (eql s1 s4)
(eql s1 s5) (eql s1 s6))
collect (list x y z s1 s2 s3 s4 s5 s6)
unless (= (+ x y)
(let ((xn (numerator x))
(xd (denominator x))
(yn (numerator y))
(yd (denominator y)))
(/ (+ (* xn yd) (* xd yn))
(* xd yd))))
collect (list x y))))
nil)
(deftest plus.8
(let (args)
(loop for i from 0 to (min 256 (1- call-arguments-limit))
unless (eql (apply #'+ args) (/ (* i (1+ i)) 2))
collect i
do (push (1+ i) args)))
nil)
(deftest plus.9
(let* ((upper-bound most-positive-fixnum)
(lower-bound most-negative-fixnum)
(spread (- upper-bound lower-bound)))
(flet ((%r () (+ (random spread) lower-bound)))
(loop
for xr = (%r)
for xi = (%r)
for yr = (%r)
for yi = (%r)
for x = (complex xr xi)
for y = (complex yr yi)
for s = (+ x y)
repeat 1000
unless (eql s (complex (+ xr yr) (+ xi yi)))
collect (list x y s))))
nil)
(deftest plus.10
(loop
for x in '(0.0s0 0.0f0 0.0d0 0.0l0)
for radix = (float-radix x)
for (k eps-r eps-f) = (multiple-value-list (find-epsilon x))
nconc
(loop for i from 1 to k
for e1 = (expt radix (- i))
for y = (+ x e1)
nconc
(loop for j from 1 to (- k i)
for e2 = (expt radix (- j))
for z = (+ x e2)
unless (eql (+ y z) (+ x e1 e2))
collect (list x i j))))
nil)
(deftest plus.11
(flet ((%r () (- (random most-positive-short-float) (/ most-positive-short-float 2))))
(loop for x = (%r)
for y = (%r)
for s = (+ x y)
repeat 1000
unless (and (eql s (+ y x))
(typep s 'short-float))
collect (list x y s)))
nil)
(deftest plus.12
(flet ((%r () (- (random most-positive-single-float) (/ most-positive-single-float 2))))
(loop for x = (%r)
for y = (%r)
for s = (+ x y)
repeat 1000
unless (and (eql s (+ y x))
(typep s 'single-float))
collect (list x y s)))
nil)
(deftest plus.13
(flet ((%r () (- (random most-positive-double-float) (/ most-positive-double-float 2))))
(loop for x = (%r)
for y = (%r)
for s = (+ x y)
repeat 1000
unless (and (eql s (+ y x))
(typep s 'double-float))
collect (list x y s)))
nil)
(deftest plus.14
(flet ((%r () (- (random most-positive-long-float) (/ most-positive-long-float 2))))
(loop for x = (%r)
for y = (%r)
for s = (+ x y)
repeat 1000
unless (and (eql s (+ y x))
(typep s 'long-float))
collect (list x y s)))
nil)
(deftest plus.15
(let ((bound most-positive-short-float)
(bound2 most-positive-single-float))
(loop for x = (- (random bound) (/ bound 2))
for y = (- (random bound2)(/ bound2 2))
for p = (+ x y)
repeat 1000
unless (and (eql p (+ y x))
(typep p 'single-float))
collect (list x y p)))
nil)
(deftest plus.16
(let ((bound most-positive-short-float)
(bound2 most-positive-double-float))
(loop for x = (- (random bound) (/ bound 2))
for y = (- (random bound2)(/ bound2 2))
for p = (+ x y)
repeat 1000
unless (and (eql p (+ y x))
(typep p 'double-float))
collect (list x y p)))
nil)
(deftest plus.17
(let ((bound most-positive-short-float)
(bound2 most-positive-long-float))
(loop for x = (- (random bound) (/ bound 2))
for y = (- (random bound2)(/ bound2 2))
for p = (+ x y)
repeat 1000
unless (and (eql p (+ y x))
(typep p 'long-float))
collect (list x y p)))
nil)
(deftest plus.18
(let ((bound most-positive-single-float)
(bound2 most-positive-double-float))
(loop for x = (- (random bound) (/ bound 2))
for y = (- (random bound2)(/ bound2 2))
for p = (+ x y)
repeat 1000
unless (and (eql p (+ y x))
(typep p 'double-float))
collect (list x y p)))
nil)
(deftest plus.19
(let ((bound most-positive-single-float)
(bound2 most-positive-long-float))
(loop for x = (- (random bound) (/ bound 2))
for y = (- (random bound2)(/ bound2 2))
for p = (+ x y)
repeat 1000
unless (and (eql p (+ y x))
(typep p 'long-float))
collect (list x y p)))
nil)
(deftest plus.20
(let ((bound most-positive-double-float)
(bound2 most-positive-long-float))
(loop for x = (- (random bound) (/ bound 2))
for y = (- (random bound2)(/ bound2 2))
for p = (+ x y)
repeat 1000
unless (and (eql p (+ y x))
(typep p 'long-float))
collect (list x y p)))
nil)
(deftest plus.21
(loop
for type in '(short-float single-float double-float long-float)
for bits in '(13 24 50 50)
for bound = (ash 1 (1- bits))
nconc
(loop for i = (random bound)
for x = (coerce i type)
for j = (random bound)
for y = (coerce j type)
for sum = (+ x y)
repeat 1000
unless (and (eql sum (coerce (+ i j) type))
(eql sum (+ y x)))
collect (list i j x y sum (coerce (+ i j) type))))
nil)
(deftest plus.22
(loop
for type in '(short-float single-float double-float long-float)
for bits in '(13 24 50 50)
for bound = (ash 1 (1- bits))
nconc
(loop
for one = (coerce 1 type)
for i = (random bound)
for x = (complex (coerce i type) one)
for j = (random bound)
for y = (complex (coerce j type) one)
for sum = (+ x y)
repeat 1000
unless (and (eql sum (complex (coerce (+ i j) type)
(coerce 2 type)))
(eql sum (+ y x)))
collect (list i j x y sum)))
nil)
(deftest plus.23
(loop
for type in '(short-float single-float double-float long-float)
for bits in '(13 24 50 50)
for bound = (ash 1 (1- bits))
nconc
(loop
for one = (coerce 1 type)
for i = (random bound)
for x = (complex one (coerce i type))
for j = (random bound)
for y = (complex one (coerce j type))
for sum = (+ x y)
repeat 1000
unless (and (eql sum (complex (coerce 2 type)
(coerce (+ i j) type)))
(eql sum (+ y x)))
collect (list i j x y sum)))
nil)
Negative zero tests ( suggested by )
(deftest plus.24
(funcall
(compile nil '(lambda (x) (declare (type short-float x) (optimize (speed 3) (safety 0) (debug 0)))
(+ 0.0s0 x)))
-0.0s0)
0.0s0)
(deftest plus.25
(funcall
(compile nil '(lambda (x) (declare (type single-float x) (optimize (speed 3) (safety 0) (debug 0)))
(+ 0.0f0 x)))
-0.0f0)
0.0f0)
(deftest plus.26
(funcall
(compile nil '(lambda (x) (declare (type double-float x) (optimize (speed 3) (safety 0) (debug 0)))
(+ 0.0d0 x)))
-0.0d0)
0.0d0)
(deftest plus.27
(funcall
(compile nil '(lambda (x) (declare (type long-float x) (optimize (speed 3) (safety 0) (debug 0)))
(+ 0.0l0 x)))
-0.0l0)
0.0l0)
(deftest plus.28
(macrolet ((%m (z) z))
(values
(+ (expand-in-current-env (%m 1)))
(+ (expand-in-current-env (%m 2)) 3)
(+ 4 (expand-in-current-env (%m 5)))
(+ 1/2 (expand-in-current-env (%m 6)) 2/3)))
1 5 9 43/6)
(deftest plus.order.1
(let ((i 0) x y)
(values
(+ (progn (setf x (incf i)) '8)
(progn (setf y (incf i)) '11))
i x y))
19 2 1 2)
(deftest plus.order.2
(let ((i 0) x y z)
(values
(+ (progn (setf x (incf i)) '8)
(progn (setf y (incf i)) '11)
(progn (setf z (incf i)) '100))
i x y z))
119 3 1 2 3)
(deftest plus.reassociation.1
(loop
for x in '(1.0s0 1.0f0 1.0d0 1.0l0)
for eps in (list short-float-epsilon single-float-epsilon
double-float-epsilon long-float-epsilon)
for eps2 = (* eps 9/10)
when (eql
(funcall (compile nil `(lambda () (+ ,x (+ ,eps2 ,eps2)))))
x)
collect (list x eps eps2))
nil)
(deftest plus.reassociation.2
(loop
for x in '(1.0s0 1.0f0 1.0d0 1.0l0)
for eps in (list short-float-epsilon single-float-epsilon
double-float-epsilon long-float-epsilon)
for eps2 = (* eps 9/10)
unless (equal
(funcall (compile nil `(lambda () (list (+ (+ ,x ,eps2) ,eps2)
(+ ,eps2 (+ ,eps2 ,x))))))
(list x x))
collect (list x eps eps2))
nil)
(deftest plus.reassociation.3
(loop
for x in '(1.0s0 1.0f0 1.0d0 1.0l0)
for eps in (list short-float-epsilon single-float-epsilon
double-float-epsilon long-float-epsilon)
for eps2 = (* eps 9/10)
when (eql
(funcall (compile nil `(lambda (y e) (+ y (+ e e)))) x eps2)
x)
collect (list x eps eps2))
nil)
(deftest plus.reassociation.4
(loop
for x in '(1.0s0 1.0f0 1.0d0 1.0l0)
for eps in (list short-float-epsilon single-float-epsilon
double-float-epsilon long-float-epsilon)
for eps2 = (* eps 9/10)
unless (equal
(funcall (compile nil `(lambda (y e) (list (+ (+ y e) e)
(+ e (+ e y)))))
x eps2)
(list x x))
collect (list x eps eps2))
nil)
|
f2bce10f73081415decd4e6f3408b2cf0560b7b1b4530c1a07ac289ad61405af
|
KestrelInstitute/Specware
|
Tests.lisp
|
(test-directories ".")
(test
("Bug 0015 : Substitute and Translate fail to update the localTypes and localOps"
:show "subsExample#BB"
:output '(";;; Elaborating spec-substitution at $TESTDIR/subsExample#BB"
";;; Elaborating spec at $TESTDIR/subsExample#AA"
";;; Elaborating spec at $TESTDIR/subsExample#A"
";;; Elaborating spec-morphism at $TESTDIR/subsExample#M"
";;; Elaborating spec at $TESTDIR/subsExample#B"
(:optional "")
"spec"
(:optional "")
"import B"
(:optional "")
(:alternatives
"type Interval = {start: Int, stop: Int}"
"type Interval = {start : Int, stop : Int}"
)
(:optional "")
"op isEmptyInterval?: Interval -> Bool"
(:optional "")
(:alternatives
"def isEmptyInterval?{start = x, stop = y} = x = y"
"def isEmptyInterval? {start = x, stop = y} = x = y"
"def isEmptyInterval?{start = x: Int, stop = y: Int} = x = y"
"def isEmptyInterval? {start = x : Int, stop = y : Int} = x = y"
"def isEmptyInterval?{start = x: Int, stop = y: Int}: Bool = x = y"
"def isEmptyInterval? {start = x : Int, stop = y : Int} : Bool = x = y"
("def isEmptyInterval?{start = x: Int, stop = y: Int}: Bool ="
"x = y")
("def isEmptyInterval? {start = x: Int, stop = y: Int}: Bool = x = y")
("def isEmptyInterval? {start = x : Int, stop = y : Int} : Bool ="
"x = y")
("def isEmptyInterval?{start = x: Int, stop = y: Int}: Bool"
"= x = y")
("def isEmptyInterval? {start = x : Int, stop = y : Int} : Bool"
"= x = y")
)
(:optional "")
(:alternatives "endspec" "end-spec")
(:optional "")
(:optional "")
))
)
| null |
https://raw.githubusercontent.com/KestrelInstitute/Specware/2be6411c55f26432bf5c9e2f7778128898220c24/TestSuite/Bugs/Bug_0015/Tests.lisp
|
lisp
|
(test-directories ".")
(test
("Bug 0015 : Substitute and Translate fail to update the localTypes and localOps"
:show "subsExample#BB"
:output '(";;; Elaborating spec-substitution at $TESTDIR/subsExample#BB"
";;; Elaborating spec at $TESTDIR/subsExample#AA"
";;; Elaborating spec at $TESTDIR/subsExample#A"
";;; Elaborating spec-morphism at $TESTDIR/subsExample#M"
";;; Elaborating spec at $TESTDIR/subsExample#B"
(:optional "")
"spec"
(:optional "")
"import B"
(:optional "")
(:alternatives
"type Interval = {start: Int, stop: Int}"
"type Interval = {start : Int, stop : Int}"
)
(:optional "")
"op isEmptyInterval?: Interval -> Bool"
(:optional "")
(:alternatives
"def isEmptyInterval?{start = x, stop = y} = x = y"
"def isEmptyInterval? {start = x, stop = y} = x = y"
"def isEmptyInterval?{start = x: Int, stop = y: Int} = x = y"
"def isEmptyInterval? {start = x : Int, stop = y : Int} = x = y"
"def isEmptyInterval?{start = x: Int, stop = y: Int}: Bool = x = y"
"def isEmptyInterval? {start = x : Int, stop = y : Int} : Bool = x = y"
("def isEmptyInterval?{start = x: Int, stop = y: Int}: Bool ="
"x = y")
("def isEmptyInterval? {start = x: Int, stop = y: Int}: Bool = x = y")
("def isEmptyInterval? {start = x : Int, stop = y : Int} : Bool ="
"x = y")
("def isEmptyInterval?{start = x: Int, stop = y: Int}: Bool"
"= x = y")
("def isEmptyInterval? {start = x : Int, stop = y : Int} : Bool"
"= x = y")
)
(:optional "")
(:alternatives "endspec" "end-spec")
(:optional "")
(:optional "")
))
)
|
|
23294c05ac2458ef698f14e04b338f0c4f941ebc095a294429ff68df1cd60e9f
|
peak6/mmd_core
|
cpu_load.erl
|
Copyright 2011 PEAK6 Investments , L.P.
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(cpu_load).
-behaviour(gen_server).
%% API
-export([start_link/0]).
-export([util/0,util/1]).
%% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-include("logger.hrl").
-define(DMAP,cpu_load_map).
-define(SERVER, ?MODULE).
%%%===================================================================
%%% API
%%%===================================================================
start_link() ->
gen_server:start_link({local, ?SERVER}, ?MODULE, [], []).
util() ->
case util(node()) of
{_,Util} -> Util;
Other -> Other
end.
util([]) -> [];
util(Nodes) when is_list(Nodes) -> [{N,L} || [N,_,L] <- p6dmap:any(?DMAP,Nodes)];
util(Node) -> case util([Node]) of
[] -> undefined;
[L] -> L
end.
%%%===================================================================
%%% gen_server callbacks
%%%===================================================================
init([]) ->
p6dmap:ensure(?DMAP),
p6dmap:addGlobal(?DMAP,node(),99),
case os:type() of
{unix,linux}->
application:start(os_mon),
timer:send_interval(p6props:getApp('cpu_load.updateInterval',1000),update);
Unknown->
?lwarn("Unsupported os '~p', assuming 99% load",[Unknown])
end,
{ok,99}.
handle_call(Request, From, Load) ->
?lwarn("Unexpected handle_call(~p, ~p, ~p)",[Request,From,Load]),
{reply, ok, Load}.
handle_cast(Msg, Load) ->
?lwarn("Unexpected handle_cast(~p, ~p)",[Msg,Load]),
{noreply, Load}.
handle_info(update,LastLoad) ->
{noreply,genLoad(LastLoad)};
handle_info(Info, Load) ->
?lwarn("Unexpected handle_info(~p, ~p)",[Info,Load]),
{noreply, Load}.
terminate(_Reason, _Load) ->
ok.
code_change(_OldVsn, Load, _Extra) ->
{ok, Load}.
%%%===================================================================
Internal functions
%%%===================================================================
genLoad(OrigLoad) ->
case round(cpu_sup:util() * 10) / 10 of
OrigLoad -> OrigLoad;
NewLoad ->
p6dmap:set(?DMAP,node(),NewLoad),
NewLoad
end.
| null |
https://raw.githubusercontent.com/peak6/mmd_core/f90469ea9eac8cd607aa6ec5b9ad6ff003a35572/src/cpu_load.erl
|
erlang
|
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
API
gen_server callbacks
===================================================================
API
===================================================================
===================================================================
gen_server callbacks
===================================================================
===================================================================
===================================================================
|
Copyright 2011 PEAK6 Investments , L.P.
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(cpu_load).
-behaviour(gen_server).
-export([start_link/0]).
-export([util/0,util/1]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-include("logger.hrl").
-define(DMAP,cpu_load_map).
-define(SERVER, ?MODULE).
start_link() ->
gen_server:start_link({local, ?SERVER}, ?MODULE, [], []).
util() ->
case util(node()) of
{_,Util} -> Util;
Other -> Other
end.
util([]) -> [];
util(Nodes) when is_list(Nodes) -> [{N,L} || [N,_,L] <- p6dmap:any(?DMAP,Nodes)];
util(Node) -> case util([Node]) of
[] -> undefined;
[L] -> L
end.
init([]) ->
p6dmap:ensure(?DMAP),
p6dmap:addGlobal(?DMAP,node(),99),
case os:type() of
{unix,linux}->
application:start(os_mon),
timer:send_interval(p6props:getApp('cpu_load.updateInterval',1000),update);
Unknown->
?lwarn("Unsupported os '~p', assuming 99% load",[Unknown])
end,
{ok,99}.
handle_call(Request, From, Load) ->
?lwarn("Unexpected handle_call(~p, ~p, ~p)",[Request,From,Load]),
{reply, ok, Load}.
handle_cast(Msg, Load) ->
?lwarn("Unexpected handle_cast(~p, ~p)",[Msg,Load]),
{noreply, Load}.
handle_info(update,LastLoad) ->
{noreply,genLoad(LastLoad)};
handle_info(Info, Load) ->
?lwarn("Unexpected handle_info(~p, ~p)",[Info,Load]),
{noreply, Load}.
terminate(_Reason, _Load) ->
ok.
code_change(_OldVsn, Load, _Extra) ->
{ok, Load}.
Internal functions
genLoad(OrigLoad) ->
case round(cpu_sup:util() * 10) / 10 of
OrigLoad -> OrigLoad;
NewLoad ->
p6dmap:set(?DMAP,node(),NewLoad),
NewLoad
end.
|
391e0503d0880b78a7c4bf7cfb1d464da594a8dfa756da74a99ad0ac892ae8f9
|
zorkow/MaxTract
|
matcher.ml
|
open MatcherUtility;;
open Jsonfio.JsonfIO;;
open Contentparser;;
type symb = {
glList: bBox list;
elList: elem list;
}
let test = ref false
let glyphLst = ref []
let elemLst = ref []
let btLn = ref (Ln {stx=0.; sty=99999.; enx=0.; eny=99999.; lnw=0.})
let rtCh = ref (Chr {chname=""; chfont=""; chsize=0.; chx=99999.; chy=0.; chw=0.})
*
@edited : 08 - JUN-2010
@author : Josef Baker
@input : and list of symbols
@effects :
@output : True if char is within symbol list
@edited: 08-JUN-2010
@author: Josef Baker
@input: Char and list of symbols
@effects:
@output: True if char is within symbol list
*)
let rec checkChar char symbols =
match symbols with
h::t when equalChar char h -> true
| h::t -> checkChar char t
| [] -> false
;;
*
@edited : 07 - NOV-2012
@author : Josef Baker
@input :
@effects :
@output : list of chars not in the symbol list
@edited: 07-NOV-2012
@author: Josef Baker
@input:
@effects:
@output: list of chars not in the symbol list
*)
let rec checkChars inChars outChars symbols =
match inChars with
h::t when checkChar h symbols -> checkChars t outChars symbols
| h::t -> checkChars t (h::outChars) symbols
| [] -> outChars
;;
*
@edited : 08 - JUN-2010
@author : Josef Baker
@input : list , symbol list , empty list
@effects :
@output : list without any of the chars that are also present in the symbol list
@edited: 08-JUN-2010
@author: Josef Baker
@input: Char list, symbol list, empty list
@effects:
@output: Char list without any of the chars that are also present in the symbol list
*)
let rec removeDupChars chars symbols symbolChars =
match symbols with
h::t -> removeDupChars chars t (List.append h.elList symbolChars)
| [] -> checkChars chars [] symbolChars
;;
*
@edited : 08 - JUN-2010
@author : Josef Baker
@input : Glyph and list of symbols
@effects :
@output : True if glyph is within symbol list
@edited: 08-JUN-2010
@author: Josef Baker
@input: Glyph and list of symbols
@effects:
@output: True if glyph is within symbol list
*)
let rec checkGlyph glyph symbols =
match symbols with
h::t -> ( if (equalGlyph glyph h)
then true
else checkGlyph glyph t
)
| [] -> false
;;
let rec checkGlyphs inGlyphs outGlyphs symbols =
match inGlyphs with
h::t -> ( if checkGlyph h symbols then checkGlyphs t outGlyphs symbols
else checkGlyphs t (h::outGlyphs) symbols )
| [] -> outGlyphs
;;
*
@edited : 08 - JUN-2010
@author : Josef Baker
@input : Glyph list , symbol list , empty list
@effects :
@output : Glyph list without any of the glyphs that are also present in the symbol list
@edited: 08-JUN-2010
@author: Josef Baker
@input: Glyph list, symbol list, empty list
@effects:
@output: Glyph list without any of the glyphs that are also present in the symbol list
*)
let rec removeDupGlyphs glyphs symbols symbolGlyphs =
match symbols with
h::t -> removeDupGlyphs glyphs t (List.append h.glList symbolGlyphs)
| [] -> checkGlyphs glyphs [] symbolGlyphs
;;
let rec convElems elems out =
match elems with
h::t ->(match h with
Chr chr -> convElems t (PDFChar {Jsonfio.JsonfIO.c=chr.chname;
bx=(int_of_float chr.chx);
by=(int_of_float chr.chy);
font=chr.chfont;
scale=chr.chsize;}::out)
| Ln ln -> convElems t (Line {sx=(int_of_float ln.stx);
sy=(int_of_float ln.sty);
lw=(int_of_float ln.lnw);
ex=(int_of_float ln.enx);
ey=(int_of_float ln.eny);}::out))
|_ ->out
;;
let rec getBBox xm ym wm hm glyphs =
match glyphs with
hd::tl -> ( getBBox (min xm hd.x)
(min ym hd.y)
((max (xm + wm) (hd.x +hd.w)) - (min xm hd.x) )
((max (ym + hm) (hd.y +hd.h)) - (min ym hd.y) )
tl)
| _ ->{x=xm;y=ym;w=wm;h=hm}
;;
let rec convert symbols out =
match symbols with
h::t -> convert t
({bbox=(getBBox (List.hd h.glList).x
(List.hd h.glList).y
(List.hd h.glList).w
(List.hd h.glList).h h.glList);glyphs=h.glList;elements=(convElems h.elList [])}::out)
| _ -> out
;;
*
@edited : 14 - MAR-2011
@author : Josef Baker
@input : Two symbols
@effects :
@output : True if symbol 1 's y coord is less than 2 's
@edited: 14-MAR-2011
@author: Josef Baker
@input: Two symbols
@effects:
@output: True if symbol 1's y coord is less than 2's
*)
let symbLessY s1 s2=
let el1 = s1.elList in
let el2 = s2.elList in
charLessY (List.hd el1) (List.hd el2)
;;
*
@edited : 14 - MAR-2011
@author : Josef Baker
@input : Symbol list
@effects :
@output : Symbols sorted by y coords
@edited: 14-MAR-2011
@author: Josef Baker
@input: Symbol list
@effects:
@output: Symbols sorted by y coords
*)
let rec sortSymbolY = function
| [] -> []
| pivot :: rest ->
let is_less x = symbLessY x pivot in
let left, right = List.partition is_less rest in
sortSymbolY left @ [pivot] @ sortSymbolY right
;;
*
@edited : 11 - MAY-2012
@author : Josef Baker
@input :
@effects :
@output : list of elements overlapping glyph
@edited: 11-MAY-2012
@author: Josef Baker
@input:
@effects:
@output: list of elements overlapping glyph
*)
let rec getGlyphElemOverlap glyph elems overlap hGScale vScale hEScale=
match elems with
h::t when glyphElemOverlap glyph h hGScale vScale hEScale ->
getGlyphElemOverlap glyph t (h::overlap) hGScale vScale hEScale
| h::t -> getGlyphElemOverlap glyph t overlap hGScale vScale hEScale
| _ -> overlap
;;
let rec multiMatchAux glyphs
match glyphs with
h::t - > (
let overlap = h elems [ ] 1 . 1 . 0.8 in
if ( overlap ) > 1 then
multiMatchAux t elems ( { glList=([h ] ) ; elList = overlap}::symbs )
else
multiMatchAux t elems symbs )
| _ - > symbs
let rec multiMatch symbols=
let symbols1 = ( ( ! ) ( ! ) [ ] ) @symbols in
: = removeDupChars ! [ ] ;
glyphLst : = removeDupGlyphs ! [ ] ;
if ( ) = ( symbols ) then symbols1
else multiMatch symbols1
; ;
let rec symbols =
match chars with
h::t - > ( let upper = findUpper h ( ! ) [ ] in
if ( upper ) = 1 then (
let = { elList=[h];glList = upper } in
glyphLst : = removeDupGlyphs ( ! ) [ ] [ ] ;
upperMatch t ( newSymbol::symbols ) )
else upperMatch t symbols
)
| _ - > ( getAbove symbols [ ] )
; ;
let basicMatch symbols =
let symbols = ( [ ] 1 . 1 . 1.)@symbols in
let symbols = ( [ ] 1 . 1 . 0.4)@symbols in
let symbols = ( [ ] 0.9 1 . 0.4)@symbols in
let symbols = ( [ ] 1 . 1.1 1.)@symbols in
let symbols = ( [ ] 1 . 1.1 0.4)@symbols in
let symbols = sortSymbolY symbols in
let symbols = getAbove symbols [ ] in
symbols
; ;
let rec multiMatchAux glyphs elems symbs =
match glyphs with
h::t ->(
let overlap = getOverlap h elems [] 1. 1. 0.8 in
if (List.length overlap) > 1 then
multiMatchAux t elems ({glList=([h]); elList = overlap}::symbs)
else
multiMatchAux t elems symbs)
| _ -> symbs
let rec multiMatch symbols=
let symbols1 = (multiMatchAux (!glyphLst) (!elemLst) [])@symbols in
elemLst := removeDupChars !elemLst symbols1 [];
glyphLst := removeDupGlyphs !glyphLst symbols1 [];
if (List.length symbols1) = (List.length symbols) then symbols1
else multiMatch symbols1
;;
let rec upperMatch chars symbols =
match chars with
h::t -> (let upper = findUpper h (!glyphLst) [] in
if (List.length upper) = 1 then (
let newSymbol = {elList=[h];glList=upper} in
glyphLst := removeDupGlyphs (!glyphLst) [newSymbol] [];
upperMatch t (newSymbol::symbols))
else upperMatch t symbols
)
| _ -> (getAbove symbols [])
;;
let basicMatch symbols =
let symbols = (singleMatch [] 1. 1. 1.)@symbols in
let symbols = (singleMatch [] 1. 1. 0.4)@symbols in
let symbols = (singleMatch [] 0.9 1. 0.4)@symbols in
let symbols = (singleMatch [] 1. 1.1 1.)@symbols in
let symbols = (singleMatch [] 1. 1.1 0.4)@symbols in
let symbols = sortSymbolY symbols in
let symbols = getAbove symbols [] in
symbols
;;
*)
let hasAbove ch =
(ch ="j") || (ch ="i") || (ch="equal") || (ch="colon") || (ch="greaterequal")
|| (ch="lessequal")|| (ch="semicolon")
;;
let addAbove symbol glyphs =
let above = findAbove (List.hd symbol.elList) glyphs [] in
{elList = symbol.elList; glList=(symbol.glList@above)}
;;
let rec getAbove inSymbols outSymbols =
match inSymbols with
h::t -> ( let c = (List.hd h.elList) in
match c with
Chr ch ->(
if (hasAbove ch.chname) then (
let newSymbol = addAbove h !glyphLst in
glyphLst := removeDupGlyphs !glyphLst [newSymbol] [];
getAbove t (newSymbol::outSymbols)
)
else getAbove t (h::outSymbols))
| _ -> getAbove t (h::outSymbols))
| [] -> outSymbols
;;
* glyph below elem
@edited : 18 - JUL-2012
@author : Josef Baker
@input :
@effects :
@output :
@edited: 18-JUL-2012
@author: Josef Baker
@input:
@effects:
@output:
*)
let rec lowerMatch elems symbols =
match elems with
h::t -> (let lower = findLower h (!glyphLst) [] in
if (List.length lower) = 1 then (
let newSymbol = {elList=[h];glList=lower} in
glyphLst := removeDupGlyphs (!glyphLst) [newSymbol] [];
elemLst := removeDupChars (!elemLst) [newSymbol] [];
lowerMatch t (newSymbol::symbols))
else lowerMatch t symbols
)
| _ -> symbols
;;
* glyph above elem
@edited : 18 - JUL-2012
@author : Josef Baker
@input :
@effects :
@output :
@edited: 18-JUL-2012
@author: Josef Baker
@input:
@effects:
@output:
*)
let rec upperMatch elems symbols =
match elems with
h::t -> (let upper = findUpper h (!glyphLst) [] in
if (List.length upper) = 1 then (
let newSymbol = {elList=[h];glList=upper} in
glyphLst := removeDupGlyphs (!glyphLst) [newSymbol] [];
elemLst := removeDupChars (!elemLst) [newSymbol] [];
upperMatch t (newSymbol::symbols))
else upperMatch t symbols
)
| _ -> symbols
;;
* glyph contains muliple elems
@edited : 17 - JUL-2012
@author : Josef Baker
@input :
@effects :
@output :
@edited: 17-JUL-2012
@author: Josef Baker
@input:
@effects:
@output:
*)
let rec bigGlyphMatch glyphs symbols=
match glyphs with
h::t ->(let overlap = getGlyphElemOverlap h (!elemLst) [] 0.8 1.1 0.8 in
if (List.length overlap) > 1 then(
let symbol = {glList=([h]); elList = overlap} in
glyphLst := removeDupGlyphs (!glyphLst) [symbol] [];
elemLst := removeDupChars (!elemLst) [symbol] [];
bigGlyphMatch t (symbol::symbols))
else bigGlyphMatch t symbols)
| _ -> symbols
;;
*
@edited : 11 - MAY-2012
@author : Josef Baker
@input :
@effects :
@output :
@edited: 11-MAY-2012
@author: Josef Baker
@input:
@effects:
@output:
*)
let rec singleMatchAux glyphs elems symbs hGScale vScale hEScale=
match glyphs with
h::t -> (let overlap = getGlyphElemOverlap h elems [] hGScale vScale hEScale in
if (List.length overlap) = 1 then
singleMatchAux t elems ({glList=([h]); elList =
overlap}::symbs) hGScale vScale hEScale
else
singleMatchAux t elems symbs hGScale vScale hEScale)
| _ -> symbs
;;
let rec singleMatch symbols elems glyphs hGScale vScale hEScale=
let symbols1 = (singleMatchAux glyphs elems [] hGScale vScale hEScale)@symbols in
let elems = removeDupChars elems symbols1 [] in
let glyphs = removeDupGlyphs glyphs symbols1 [] in
if (List.length symbols1) = (List.length symbols) then symbols1
else singleMatch symbols1 elems glyphs hGScale vScale hEScale
;;
*
@edited : 16 - JUL-2012
@author : Josef Baker
@input :
@effects :
@output : true if elem only overlaps one glyph
@edited: 16-JUL-2012
@author: Josef Baker
@input:
@effects:
@output: true if elem only overlaps one glyph
*)
let rec getSingleElemsAux elem glyphs overlap=
match glyphs with
| _ when (List.length overlap) = 2 -> false
| h::t -> if (glyphElemOverlap h elem 0.8 1.1 0.8)
then getSingleElemsAux elem t (h::overlap)
else getSingleElemsAux elem t overlap
| [] when (List.length overlap) = 1 -> true
| _ -> false
;;
*
@edited : 16 - JUL-2012
@author : Josef Baker
@input :
@effects :
@output : list of elements only overlapping one glyph
@edited: 16-JUL-2012
@author: Josef Baker
@input:
@effects:
@output: list of elements only overlapping one glyph
*)
let rec getSingleElems glyphs elems singles =
match elems with
| h::t -> if (getSingleElemsAux h glyphs [])
then getSingleElems glyphs t (h::singles)
else getSingleElems glyphs t singles
| _ -> singles
;;
*
@edited : 16 - JUL-2012
@author : Josef Baker
@input :
@effects :
@output : true if glyph only overlaps one elem
@edited: 16-JUL-2012
@author: Josef Baker
@input:
@effects:
@output: true if glyph only overlaps one elem
*)
let rec getSingleGlyphsAux elems glyph overlap=
match elems with
| _ when (List.length overlap) = 2 -> false
| h::t when glyphElemOverlap glyph h 0.8 1.1 0.8-> getSingleGlyphsAux t glyph (h::overlap)
| h::t -> getSingleGlyphsAux t glyph overlap
| [] when (List.length overlap) = 1 -> true
| _ -> false
;;
*
@edited : 16 - JUL-2012
@author : Josef Baker
@input :
@effects :
@output : List of glyphs only overlapping one elem
@edited: 16-JUL-2012
@author: Josef Baker
@input:
@effects:
@output: List of glyphs only overlapping one elem
*)
let rec getSingleGlyphs glyphs elems singles =
match glyphs with
| h::t when getSingleGlyphsAux elems h [] -> getSingleGlyphs t elems (h::singles)
| h::t -> getSingleGlyphs t elems singles
| _ -> singles
;;
*
@edited : 17 - JUL-2012
@author : Josef Baker
@input :
@effects :
@output :
@edited: 17-JUL-2012
@author: Josef Baker
@input:
@effects:
@output:
*)
let singlesMatch glyphs elems=
let glyphs = getSingleGlyphs glyphs elems [] in
let elems = getSingleElems glyphs elems [] in
let symbols = singleMatch [] elems glyphs 0.8 1.1 0.8 in
glyphLst := removeDupGlyphs (!glyphLst) symbols [];
elemLst := removeDupChars (!elemLst) symbols [];
symbols
;;
*
@edited : 17 - JUL-2012
@author : Josef Baker
@input : single glyph
@effects :
@output : a list of the elements , if any , making the root in that glyph
@edited: 17-JUL-2012
@author: Josef Baker
@input: single glyph
@effects:
@output: a list of the elements ,if any, making the root in that glyph
*)
let getEnclosedRoot glyph =
let line = getTopLine glyph (!elemLst) (!btLn) in
let root = getLeftRoot glyph (!elemLst) (!rtCh) in
if (glyphElemOverlap glyph line 1. 1. 1.)
&& (glyphElemOverlap glyph root 1. 1. 1.)
then [line;root]
else []
;;
*
@edited : 17 - JUL-2012
@author : Josef Baker
@input : glyph list
@effects : removes used glyphs and elems from their respective ref lists
@output : list of root symbols
@edited: 17-JUL-2012
@author: Josef Baker
@input: glyph list
@effects: removes used glyphs and elems from their respective ref lists
@output: list of root symbols
*)
let rec rootMatch glyphs symbols =
match glyphs with
| h::t ->( let root = getEnclosedRoot h in
if (List.length root = 2)
then ((*print_string "found";*)
rootMatch t ({glList=([h]); elList =root}::symbols))
else rootMatch t symbols)
| _ -> ( glyphLst := removeDupGlyphs (!glyphLst) symbols [];
elemLst := removeDupChars (!elemLst) symbols [];
symbols)
*
@edited : 08 - MAY-2012
@author : Josef Baker
@input :
@effects :
@output :
@edited: 08-MAY-2012
@author: Josef Baker
@input:
@effects:
@output:
*)
let makeSymbols glyphs elems =
printElems ( elems ) ;
let bottom = findBottom glyphs 0 in
elemLst := sortElemX (cutElems bottom elems []);
glyphLst := sortGlyphX glyphs;
if (false)then (
print_endline ("Before Match");
printGlyphs (!glyphLst);
printElems (!elemLst);
);
let roots = rootMatch (!glyphLst) [] in
let singles = singlesMatch (!glyphLst) (!elemLst) in
let bigGlyphs = bigGlyphMatch (!glyphLst) [] in
let singles2 = singlesMatch (!glyphLst) (!elemLst) in
let bigGlyphs2 = bigGlyphMatch (!glyphLst) [] in
let uppers = upperMatch (!elemLst) []in
let singles3 = singlesMatch (!glyphLst) (!elemLst) in
let bigGlyphs3 = bigGlyphMatch (!glyphLst) [] in
let lowers = lowerMatch (!elemLst) [] in
let symbols = roots@singles@bigGlyphs@uppers@singles2@bigGlyphs2@singles3@bigGlyphs3@lowers in
let symbols = getAbove symbols [] in
let symbols = basicMatch [ ] in
let symbols = ( ( upperMatch ! [ ] ) @symbols ) in
elemLst : = removeDupChars ! symbols [ ] ;
glyphLst : = sortGlyphX ! glyphLst ;
elemLst : = sortElemX ! ;
let symbols = basicMatch [] in
let symbols = ((upperMatch !elemLst [])@symbols) in
elemLst := removeDupChars !elemLst symbols [];
glyphLst := sortGlyphX !glyphLst;
elemLst := sortElemX !elemLst;
*)
if (List.length (!glyphLst) <(-1))then(
print_endline ("After Match");
printGlyphs (!glyphLst);
printElems (!elemLst);
print_endline ("Next Line");
);
(* printSymbols symbols;*)
symbols
;;
| null |
https://raw.githubusercontent.com/zorkow/MaxTract/cb0b46792bb22d9d30996ae8c9e54a9819d740f9/src/pdfExtract/matcher.ml
|
ocaml
|
print_string "found";
printSymbols symbols;
|
open MatcherUtility;;
open Jsonfio.JsonfIO;;
open Contentparser;;
type symb = {
glList: bBox list;
elList: elem list;
}
let test = ref false
let glyphLst = ref []
let elemLst = ref []
let btLn = ref (Ln {stx=0.; sty=99999.; enx=0.; eny=99999.; lnw=0.})
let rtCh = ref (Chr {chname=""; chfont=""; chsize=0.; chx=99999.; chy=0.; chw=0.})
*
@edited : 08 - JUN-2010
@author : Josef Baker
@input : and list of symbols
@effects :
@output : True if char is within symbol list
@edited: 08-JUN-2010
@author: Josef Baker
@input: Char and list of symbols
@effects:
@output: True if char is within symbol list
*)
let rec checkChar char symbols =
match symbols with
h::t when equalChar char h -> true
| h::t -> checkChar char t
| [] -> false
;;
*
@edited : 07 - NOV-2012
@author : Josef Baker
@input :
@effects :
@output : list of chars not in the symbol list
@edited: 07-NOV-2012
@author: Josef Baker
@input:
@effects:
@output: list of chars not in the symbol list
*)
let rec checkChars inChars outChars symbols =
match inChars with
h::t when checkChar h symbols -> checkChars t outChars symbols
| h::t -> checkChars t (h::outChars) symbols
| [] -> outChars
;;
*
@edited : 08 - JUN-2010
@author : Josef Baker
@input : list , symbol list , empty list
@effects :
@output : list without any of the chars that are also present in the symbol list
@edited: 08-JUN-2010
@author: Josef Baker
@input: Char list, symbol list, empty list
@effects:
@output: Char list without any of the chars that are also present in the symbol list
*)
let rec removeDupChars chars symbols symbolChars =
match symbols with
h::t -> removeDupChars chars t (List.append h.elList symbolChars)
| [] -> checkChars chars [] symbolChars
;;
*
@edited : 08 - JUN-2010
@author : Josef Baker
@input : Glyph and list of symbols
@effects :
@output : True if glyph is within symbol list
@edited: 08-JUN-2010
@author: Josef Baker
@input: Glyph and list of symbols
@effects:
@output: True if glyph is within symbol list
*)
let rec checkGlyph glyph symbols =
match symbols with
h::t -> ( if (equalGlyph glyph h)
then true
else checkGlyph glyph t
)
| [] -> false
;;
let rec checkGlyphs inGlyphs outGlyphs symbols =
match inGlyphs with
h::t -> ( if checkGlyph h symbols then checkGlyphs t outGlyphs symbols
else checkGlyphs t (h::outGlyphs) symbols )
| [] -> outGlyphs
;;
*
@edited : 08 - JUN-2010
@author : Josef Baker
@input : Glyph list , symbol list , empty list
@effects :
@output : Glyph list without any of the glyphs that are also present in the symbol list
@edited: 08-JUN-2010
@author: Josef Baker
@input: Glyph list, symbol list, empty list
@effects:
@output: Glyph list without any of the glyphs that are also present in the symbol list
*)
let rec removeDupGlyphs glyphs symbols symbolGlyphs =
match symbols with
h::t -> removeDupGlyphs glyphs t (List.append h.glList symbolGlyphs)
| [] -> checkGlyphs glyphs [] symbolGlyphs
;;
let rec convElems elems out =
match elems with
h::t ->(match h with
Chr chr -> convElems t (PDFChar {Jsonfio.JsonfIO.c=chr.chname;
bx=(int_of_float chr.chx);
by=(int_of_float chr.chy);
font=chr.chfont;
scale=chr.chsize;}::out)
| Ln ln -> convElems t (Line {sx=(int_of_float ln.stx);
sy=(int_of_float ln.sty);
lw=(int_of_float ln.lnw);
ex=(int_of_float ln.enx);
ey=(int_of_float ln.eny);}::out))
|_ ->out
;;
let rec getBBox xm ym wm hm glyphs =
match glyphs with
hd::tl -> ( getBBox (min xm hd.x)
(min ym hd.y)
((max (xm + wm) (hd.x +hd.w)) - (min xm hd.x) )
((max (ym + hm) (hd.y +hd.h)) - (min ym hd.y) )
tl)
| _ ->{x=xm;y=ym;w=wm;h=hm}
;;
let rec convert symbols out =
match symbols with
h::t -> convert t
({bbox=(getBBox (List.hd h.glList).x
(List.hd h.glList).y
(List.hd h.glList).w
(List.hd h.glList).h h.glList);glyphs=h.glList;elements=(convElems h.elList [])}::out)
| _ -> out
;;
*
@edited : 14 - MAR-2011
@author : Josef Baker
@input : Two symbols
@effects :
@output : True if symbol 1 's y coord is less than 2 's
@edited: 14-MAR-2011
@author: Josef Baker
@input: Two symbols
@effects:
@output: True if symbol 1's y coord is less than 2's
*)
let symbLessY s1 s2=
let el1 = s1.elList in
let el2 = s2.elList in
charLessY (List.hd el1) (List.hd el2)
;;
*
@edited : 14 - MAR-2011
@author : Josef Baker
@input : Symbol list
@effects :
@output : Symbols sorted by y coords
@edited: 14-MAR-2011
@author: Josef Baker
@input: Symbol list
@effects:
@output: Symbols sorted by y coords
*)
let rec sortSymbolY = function
| [] -> []
| pivot :: rest ->
let is_less x = symbLessY x pivot in
let left, right = List.partition is_less rest in
sortSymbolY left @ [pivot] @ sortSymbolY right
;;
*
@edited : 11 - MAY-2012
@author : Josef Baker
@input :
@effects :
@output : list of elements overlapping glyph
@edited: 11-MAY-2012
@author: Josef Baker
@input:
@effects:
@output: list of elements overlapping glyph
*)
let rec getGlyphElemOverlap glyph elems overlap hGScale vScale hEScale=
match elems with
h::t when glyphElemOverlap glyph h hGScale vScale hEScale ->
getGlyphElemOverlap glyph t (h::overlap) hGScale vScale hEScale
| h::t -> getGlyphElemOverlap glyph t overlap hGScale vScale hEScale
| _ -> overlap
;;
let rec multiMatchAux glyphs
match glyphs with
h::t - > (
let overlap = h elems [ ] 1 . 1 . 0.8 in
if ( overlap ) > 1 then
multiMatchAux t elems ( { glList=([h ] ) ; elList = overlap}::symbs )
else
multiMatchAux t elems symbs )
| _ - > symbs
let rec multiMatch symbols=
let symbols1 = ( ( ! ) ( ! ) [ ] ) @symbols in
: = removeDupChars ! [ ] ;
glyphLst : = removeDupGlyphs ! [ ] ;
if ( ) = ( symbols ) then symbols1
else multiMatch symbols1
; ;
let rec symbols =
match chars with
h::t - > ( let upper = findUpper h ( ! ) [ ] in
if ( upper ) = 1 then (
let = { elList=[h];glList = upper } in
glyphLst : = removeDupGlyphs ( ! ) [ ] [ ] ;
upperMatch t ( newSymbol::symbols ) )
else upperMatch t symbols
)
| _ - > ( getAbove symbols [ ] )
; ;
let basicMatch symbols =
let symbols = ( [ ] 1 . 1 . 1.)@symbols in
let symbols = ( [ ] 1 . 1 . 0.4)@symbols in
let symbols = ( [ ] 0.9 1 . 0.4)@symbols in
let symbols = ( [ ] 1 . 1.1 1.)@symbols in
let symbols = ( [ ] 1 . 1.1 0.4)@symbols in
let symbols = sortSymbolY symbols in
let symbols = getAbove symbols [ ] in
symbols
; ;
let rec multiMatchAux glyphs elems symbs =
match glyphs with
h::t ->(
let overlap = getOverlap h elems [] 1. 1. 0.8 in
if (List.length overlap) > 1 then
multiMatchAux t elems ({glList=([h]); elList = overlap}::symbs)
else
multiMatchAux t elems symbs)
| _ -> symbs
let rec multiMatch symbols=
let symbols1 = (multiMatchAux (!glyphLst) (!elemLst) [])@symbols in
elemLst := removeDupChars !elemLst symbols1 [];
glyphLst := removeDupGlyphs !glyphLst symbols1 [];
if (List.length symbols1) = (List.length symbols) then symbols1
else multiMatch symbols1
;;
let rec upperMatch chars symbols =
match chars with
h::t -> (let upper = findUpper h (!glyphLst) [] in
if (List.length upper) = 1 then (
let newSymbol = {elList=[h];glList=upper} in
glyphLst := removeDupGlyphs (!glyphLst) [newSymbol] [];
upperMatch t (newSymbol::symbols))
else upperMatch t symbols
)
| _ -> (getAbove symbols [])
;;
let basicMatch symbols =
let symbols = (singleMatch [] 1. 1. 1.)@symbols in
let symbols = (singleMatch [] 1. 1. 0.4)@symbols in
let symbols = (singleMatch [] 0.9 1. 0.4)@symbols in
let symbols = (singleMatch [] 1. 1.1 1.)@symbols in
let symbols = (singleMatch [] 1. 1.1 0.4)@symbols in
let symbols = sortSymbolY symbols in
let symbols = getAbove symbols [] in
symbols
;;
*)
let hasAbove ch =
(ch ="j") || (ch ="i") || (ch="equal") || (ch="colon") || (ch="greaterequal")
|| (ch="lessequal")|| (ch="semicolon")
;;
let addAbove symbol glyphs =
let above = findAbove (List.hd symbol.elList) glyphs [] in
{elList = symbol.elList; glList=(symbol.glList@above)}
;;
let rec getAbove inSymbols outSymbols =
match inSymbols with
h::t -> ( let c = (List.hd h.elList) in
match c with
Chr ch ->(
if (hasAbove ch.chname) then (
let newSymbol = addAbove h !glyphLst in
glyphLst := removeDupGlyphs !glyphLst [newSymbol] [];
getAbove t (newSymbol::outSymbols)
)
else getAbove t (h::outSymbols))
| _ -> getAbove t (h::outSymbols))
| [] -> outSymbols
;;
* glyph below elem
@edited : 18 - JUL-2012
@author : Josef Baker
@input :
@effects :
@output :
@edited: 18-JUL-2012
@author: Josef Baker
@input:
@effects:
@output:
*)
let rec lowerMatch elems symbols =
match elems with
h::t -> (let lower = findLower h (!glyphLst) [] in
if (List.length lower) = 1 then (
let newSymbol = {elList=[h];glList=lower} in
glyphLst := removeDupGlyphs (!glyphLst) [newSymbol] [];
elemLst := removeDupChars (!elemLst) [newSymbol] [];
lowerMatch t (newSymbol::symbols))
else lowerMatch t symbols
)
| _ -> symbols
;;
* glyph above elem
@edited : 18 - JUL-2012
@author : Josef Baker
@input :
@effects :
@output :
@edited: 18-JUL-2012
@author: Josef Baker
@input:
@effects:
@output:
*)
let rec upperMatch elems symbols =
match elems with
h::t -> (let upper = findUpper h (!glyphLst) [] in
if (List.length upper) = 1 then (
let newSymbol = {elList=[h];glList=upper} in
glyphLst := removeDupGlyphs (!glyphLst) [newSymbol] [];
elemLst := removeDupChars (!elemLst) [newSymbol] [];
upperMatch t (newSymbol::symbols))
else upperMatch t symbols
)
| _ -> symbols
;;
* glyph contains muliple elems
@edited : 17 - JUL-2012
@author : Josef Baker
@input :
@effects :
@output :
@edited: 17-JUL-2012
@author: Josef Baker
@input:
@effects:
@output:
*)
let rec bigGlyphMatch glyphs symbols=
match glyphs with
h::t ->(let overlap = getGlyphElemOverlap h (!elemLst) [] 0.8 1.1 0.8 in
if (List.length overlap) > 1 then(
let symbol = {glList=([h]); elList = overlap} in
glyphLst := removeDupGlyphs (!glyphLst) [symbol] [];
elemLst := removeDupChars (!elemLst) [symbol] [];
bigGlyphMatch t (symbol::symbols))
else bigGlyphMatch t symbols)
| _ -> symbols
;;
*
@edited : 11 - MAY-2012
@author : Josef Baker
@input :
@effects :
@output :
@edited: 11-MAY-2012
@author: Josef Baker
@input:
@effects:
@output:
*)
let rec singleMatchAux glyphs elems symbs hGScale vScale hEScale=
match glyphs with
h::t -> (let overlap = getGlyphElemOverlap h elems [] hGScale vScale hEScale in
if (List.length overlap) = 1 then
singleMatchAux t elems ({glList=([h]); elList =
overlap}::symbs) hGScale vScale hEScale
else
singleMatchAux t elems symbs hGScale vScale hEScale)
| _ -> symbs
;;
let rec singleMatch symbols elems glyphs hGScale vScale hEScale=
let symbols1 = (singleMatchAux glyphs elems [] hGScale vScale hEScale)@symbols in
let elems = removeDupChars elems symbols1 [] in
let glyphs = removeDupGlyphs glyphs symbols1 [] in
if (List.length symbols1) = (List.length symbols) then symbols1
else singleMatch symbols1 elems glyphs hGScale vScale hEScale
;;
*
@edited : 16 - JUL-2012
@author : Josef Baker
@input :
@effects :
@output : true if elem only overlaps one glyph
@edited: 16-JUL-2012
@author: Josef Baker
@input:
@effects:
@output: true if elem only overlaps one glyph
*)
let rec getSingleElemsAux elem glyphs overlap=
match glyphs with
| _ when (List.length overlap) = 2 -> false
| h::t -> if (glyphElemOverlap h elem 0.8 1.1 0.8)
then getSingleElemsAux elem t (h::overlap)
else getSingleElemsAux elem t overlap
| [] when (List.length overlap) = 1 -> true
| _ -> false
;;
*
@edited : 16 - JUL-2012
@author : Josef Baker
@input :
@effects :
@output : list of elements only overlapping one glyph
@edited: 16-JUL-2012
@author: Josef Baker
@input:
@effects:
@output: list of elements only overlapping one glyph
*)
let rec getSingleElems glyphs elems singles =
match elems with
| h::t -> if (getSingleElemsAux h glyphs [])
then getSingleElems glyphs t (h::singles)
else getSingleElems glyphs t singles
| _ -> singles
;;
*
@edited : 16 - JUL-2012
@author : Josef Baker
@input :
@effects :
@output : true if glyph only overlaps one elem
@edited: 16-JUL-2012
@author: Josef Baker
@input:
@effects:
@output: true if glyph only overlaps one elem
*)
let rec getSingleGlyphsAux elems glyph overlap=
match elems with
| _ when (List.length overlap) = 2 -> false
| h::t when glyphElemOverlap glyph h 0.8 1.1 0.8-> getSingleGlyphsAux t glyph (h::overlap)
| h::t -> getSingleGlyphsAux t glyph overlap
| [] when (List.length overlap) = 1 -> true
| _ -> false
;;
*
@edited : 16 - JUL-2012
@author : Josef Baker
@input :
@effects :
@output : List of glyphs only overlapping one elem
@edited: 16-JUL-2012
@author: Josef Baker
@input:
@effects:
@output: List of glyphs only overlapping one elem
*)
let rec getSingleGlyphs glyphs elems singles =
match glyphs with
| h::t when getSingleGlyphsAux elems h [] -> getSingleGlyphs t elems (h::singles)
| h::t -> getSingleGlyphs t elems singles
| _ -> singles
;;
*
@edited : 17 - JUL-2012
@author : Josef Baker
@input :
@effects :
@output :
@edited: 17-JUL-2012
@author: Josef Baker
@input:
@effects:
@output:
*)
let singlesMatch glyphs elems=
let glyphs = getSingleGlyphs glyphs elems [] in
let elems = getSingleElems glyphs elems [] in
let symbols = singleMatch [] elems glyphs 0.8 1.1 0.8 in
glyphLst := removeDupGlyphs (!glyphLst) symbols [];
elemLst := removeDupChars (!elemLst) symbols [];
symbols
;;
*
@edited : 17 - JUL-2012
@author : Josef Baker
@input : single glyph
@effects :
@output : a list of the elements , if any , making the root in that glyph
@edited: 17-JUL-2012
@author: Josef Baker
@input: single glyph
@effects:
@output: a list of the elements ,if any, making the root in that glyph
*)
let getEnclosedRoot glyph =
let line = getTopLine glyph (!elemLst) (!btLn) in
let root = getLeftRoot glyph (!elemLst) (!rtCh) in
if (glyphElemOverlap glyph line 1. 1. 1.)
&& (glyphElemOverlap glyph root 1. 1. 1.)
then [line;root]
else []
;;
*
@edited : 17 - JUL-2012
@author : Josef Baker
@input : glyph list
@effects : removes used glyphs and elems from their respective ref lists
@output : list of root symbols
@edited: 17-JUL-2012
@author: Josef Baker
@input: glyph list
@effects: removes used glyphs and elems from their respective ref lists
@output: list of root symbols
*)
let rec rootMatch glyphs symbols =
match glyphs with
| h::t ->( let root = getEnclosedRoot h in
if (List.length root = 2)
rootMatch t ({glList=([h]); elList =root}::symbols))
else rootMatch t symbols)
| _ -> ( glyphLst := removeDupGlyphs (!glyphLst) symbols [];
elemLst := removeDupChars (!elemLst) symbols [];
symbols)
*
@edited : 08 - MAY-2012
@author : Josef Baker
@input :
@effects :
@output :
@edited: 08-MAY-2012
@author: Josef Baker
@input:
@effects:
@output:
*)
let makeSymbols glyphs elems =
printElems ( elems ) ;
let bottom = findBottom glyphs 0 in
elemLst := sortElemX (cutElems bottom elems []);
glyphLst := sortGlyphX glyphs;
if (false)then (
print_endline ("Before Match");
printGlyphs (!glyphLst);
printElems (!elemLst);
);
let roots = rootMatch (!glyphLst) [] in
let singles = singlesMatch (!glyphLst) (!elemLst) in
let bigGlyphs = bigGlyphMatch (!glyphLst) [] in
let singles2 = singlesMatch (!glyphLst) (!elemLst) in
let bigGlyphs2 = bigGlyphMatch (!glyphLst) [] in
let uppers = upperMatch (!elemLst) []in
let singles3 = singlesMatch (!glyphLst) (!elemLst) in
let bigGlyphs3 = bigGlyphMatch (!glyphLst) [] in
let lowers = lowerMatch (!elemLst) [] in
let symbols = roots@singles@bigGlyphs@uppers@singles2@bigGlyphs2@singles3@bigGlyphs3@lowers in
let symbols = getAbove symbols [] in
let symbols = basicMatch [ ] in
let symbols = ( ( upperMatch ! [ ] ) @symbols ) in
elemLst : = removeDupChars ! symbols [ ] ;
glyphLst : = sortGlyphX ! glyphLst ;
elemLst : = sortElemX ! ;
let symbols = basicMatch [] in
let symbols = ((upperMatch !elemLst [])@symbols) in
elemLst := removeDupChars !elemLst symbols [];
glyphLst := sortGlyphX !glyphLst;
elemLst := sortElemX !elemLst;
*)
if (List.length (!glyphLst) <(-1))then(
print_endline ("After Match");
printGlyphs (!glyphLst);
printElems (!elemLst);
print_endline ("Next Line");
);
symbols
;;
|
668139bb90eb1d6c31f4abf08febb30f8275dbc4710be29f9e3b2960b7e56399
|
karimarttila/clojure
|
reset_azure_table_storage_sessions.clj
|
(ns simpleserver.testutils.reset-azure-table-storage-sessions
(:require [clojure.tools.logging :as log]
[environ.core :as environ]
[simpleserver.sessiondb.session-factory :as ss-session-factory]
[simpleserver.sessiondb.session-service-interface :as ss-session-interface]
[simpleserver.sessiondb.session-table-storage :as ss-session-table-storage]
))
(def session-svc (ss-session-factory/create-session))
(defn reset-azure-table-storage-sessions
[]
(log/debug "ENTER reset-azure-table-storage-sessions")
(let [current-sessions (ss-session-interface/get-sessions session-svc)]
; Calling function directly since it is not part of the actual session interface.
; In real production code we should check the result values, of course.
; Note: we have to embed the map call with dorun since the function remove-token
; is just run for the side effect.
(dorun (map ss-session-table-storage/remove-token current-sessions)))
)
| null |
https://raw.githubusercontent.com/karimarttila/clojure/ee1261b9a8e6be92cb47aeb325f82a278f2c1ed3/clj-ring-cljs-reagent-demo/simple-server/test/simpleserver/testutils/reset_azure_table_storage_sessions.clj
|
clojure
|
Calling function directly since it is not part of the actual session interface.
In real production code we should check the result values, of course.
Note: we have to embed the map call with dorun since the function remove-token
is just run for the side effect.
|
(ns simpleserver.testutils.reset-azure-table-storage-sessions
(:require [clojure.tools.logging :as log]
[environ.core :as environ]
[simpleserver.sessiondb.session-factory :as ss-session-factory]
[simpleserver.sessiondb.session-service-interface :as ss-session-interface]
[simpleserver.sessiondb.session-table-storage :as ss-session-table-storage]
))
(def session-svc (ss-session-factory/create-session))
(defn reset-azure-table-storage-sessions
[]
(log/debug "ENTER reset-azure-table-storage-sessions")
(let [current-sessions (ss-session-interface/get-sessions session-svc)]
(dorun (map ss-session-table-storage/remove-token current-sessions)))
)
|
e0a08b7a4f5fd2ae51e82aafa7e66aa9e1ee588e1c41558b02ab9f627d92b927
|
fluree/db
|
schema.cljc
|
(ns fluree.db.query.schema
(:require [fluree.db.flake :as flake]
[fluree.db.dbproto :as dbproto]
[fluree.db.constants :as const]
[fluree.db.util.async :refer [<? go-try]]
[fluree.db.query.range :as query-range]
[clojure.core.async :refer [go <!] :as async]
[fluree.db.util.log :as log :include-macros true]
[fluree.db.util.core :as util #?(:clj :refer :cljs :refer-macros) [try* catch*]]
[fluree.db.util.schema :as schema-util]))
#?(:clj (set! *warn-on-reflection* true))
(defn pred-name->keyword
"Takes an predicate name (string) and returns the namespace portion of it as a keyword."
[pred-name]
(when (string? pred-name)
(-> (re-find #"[^/]+$" pred-name) ;; take everything after the '/'
keyword)))
(defn- convert-type-to-kw
"Converts a tag sid for a _predicate/type attributes into a keyword of just the 'name'."
[type-tag-sid db]
(go-try
(-> (<? (dbproto/-tag db type-tag-sid "_predicate/type"))
(keyword))))
(defn pred-objects-unique?
[db pred-id]
(go-try
(let [os (->> (query-range/index-range db :psot = [pred-id])
(<?)
(map #(flake/o %)))]
(if (and os (not (empty? os)))
(apply distinct? os) true))))
(defn new-pred-changes
"Returns a map of predicate changes with their respective old
value and new value, both the key and value of the map are two-tuples as follows:
{subid {:new? true
:type {:old :int :new :long}
:index {:old nil :new true }}}
If the predicate being changed is the :type, it resolves the type _tag to its short keyword name
When an old value does not exist, old-val is nil.
If they subject being created is completely new, :new? true "
[db tempids flakes filter?]
(go-try
(let [pred-flakes (if filter?
(filter schema-util/is-pred-flake? flakes)
flakes)
a set of all the new , to be used as a fn
new-map (reduce
#(let [f %2]
(assoc-in %1 [(flake/s f) :new?]
(boolean (is-new? (flake/s f)))))
{} pred-flakes)]
(loop [[f & r] pred-flakes
acc new-map]
(if-not f
acc
(let [pid (flake/p f)
pid-keyword (-> (dbproto/-p-prop db :name pid) (pred-name->keyword))
old-val? (false? (flake/op f))
v (if (= :type pid-keyword)
(<? (convert-type-to-kw (flake/o f) db))
(flake/o f))]
(recur r (if old-val?
(assoc-in acc [(flake/s f) pid-keyword :old] v)
(assoc-in acc [(flake/s f) pid-keyword :new] v)))))))))
(defn type-error
"Throw an error if schema update attempt is invalid."
([current-type new-type throw?]
(type-error nil current-type new-type throw?))
([db current-type new-type throw?]
(let [message (str "Cannot convert an _predicate from " (name current-type) " to " (name new-type) ".")]
(if throw?
(throw (ex-info message
{:status 400
:error :db/invalid-tx}))
db))))
;; TODO - refactor!
(defn predicate-change-error
"Accepts a db (should have root permissions) and a map of predicate changes as produced by new-pred-changes.
Returns a db with updated idxs if relevant, i.e. if non-unique predicate converted to unique
If optional throw? parameter is true, will throw with an ex-info error."
([pred-changes db] (predicate-change-error pred-changes db false))
([pred-changes db throw?]
(go-try
(loop [[[pred-id changes] & r] pred-changes
db db]
(if-not pred-id
db
TODO - use smart functions ?
db* (if (and
(:multi changes)
(false? (:new (:multi changes))) ;; check for explicitly false, not nil
(true? (:old (:multi changes))))
(type-error db "multi-cardinality" "single-cardinality" throw?)
db)
TODO - use smart functions ?
;; :unique cannot be set to true if type is boolean, cannot change from anything to boolean,
;; so only need to check new predicates
db* (if (and
(:unique changes)
(:type changes)
(true? (:new? changes))
(= :boolean (:new (:type changes)))
(true? (:new (:unique changes))))
(if throw?
(throw (ex-info (str "A boolean _predicate cannot be unique.")
{:status 400
:error :db/invalid-tx}))
db*)
db*)
TODO - use smart functions ?
;; :component cannot be set to true for an existing subject (it can be set to false).
db* (if (and
(:component changes)
(not (:new? changes))
(true? (:new (:component changes))))
(type-error db* "a component" "a non-component" throw?)
db*)
;; :unique cannot be set to true for existing predicate if existing values are not unique
db* (if (and
(:unique changes)
(not (:new? changes))
(true? (:new (:unique changes)))
(not (<? (pred-objects-unique? db pred-id))))
(if throw?
(throw (ex-info (str "The _predicate " (dbproto/-p-prop db :name pred-id) " cannot be set to unique, because there are existing non-unique values.")
{:status 400
:error :db/invalid-tx}))
db*) db*)
db* (if (and (:type changes) ;; must be setting the predicate :type
(:old (:type changes)))
(let [{:keys [old new]} (:type changes)]
(cond
(= new old)
db*
;; These types cannot be converted into anything else - and float?
(#{:string :bigint :bigdec} old)
(type-error old new throw?)
:else (case new
;; a long can only be converted from an int or instant
:long (if (#{:int :instant} old)
db* (type-error old new throw?))
BigIntegers can only be converted from an int , long , or instant
:bigint (if (#{:int :long :instant} old)
db* (type-error old new throw?))
;; a double can only be converted from a float, long, or int
:double (if (#{:float :long :int} old)
db* (type-error old new throw?))
;; a float can only be converted from an int, long, or float
:float (if (#{:int :float :long} old)
db* (type-error old new throw?))
BigDecimals can only be converted from a float , double , int , long , biginteger
:bigdec (if (#{:float :double :int :long :bigint} old)
db* (type-error old new throw?))
Strings can be converted from json , geojson , bytes , uuid , uri
:string (if (#{:json :geojson :bytes :uuid :uri} old)
db* (type-error old new throw?))
;; an instant can be converted from a long or int.
:instant (if (#{:long :int} old)
db* (type-error old new throw?))
;; else don't allow any other changes
(type-error old new throw?))))
db*)
;; TODO - use collection spec
;; If new subject, has to specify type. If it has :component true, then :type needs to be ref
db* (if (and
(true? (:new? changes))
(:component changes)
(true? (:new (:component changes)))
(not (= :ref (:new (:type changes)))))
(if throw?
(throw (ex-info (str "A component _predicate must be of type \"ref.\"")
{:status 400
:error :db/invalid-tx}))
db*)
db*)]
(recur r db*)))))))
(defn validate-schema-change
([db tempids flakes] (validate-schema-change db tempids flakes true))
([db tempids flakes filter?]
(go-try
(let [changes (<? (new-pred-changes db tempids flakes filter?))]
(if (empty? changes)
db
(<? (predicate-change-error changes db true)))))))
(def predicate-re #"(?:([^/]+)/)([^/]+)")
(def pred-reverse-ref-re #"(?:([^/]+)/)_([^/]+)")
(defn reverse-ref?
"Reverse refs must be strings that include a '/_' in them, which characters before and after."
([predicate-name]
(reverse-ref? predicate-name false))
([predicate-name throw?]
(if (string? predicate-name)
(boolean (re-matches pred-reverse-ref-re predicate-name))
(if throw?
(throw (ex-info (str "Bad predicate name, should be string: " (pr-str predicate-name))
{:status 400
:error :db/invalid-predicate}))
false))))
(defn reverse-ref
"Reverses an predicate name."
[predicate-name]
(if (string? predicate-name)
(let [[_ ns name] (re-matches #"(?:([^/]+)/)?([^/]+)" predicate-name)]
(if ns
(if (= \_ (nth name 0))
(str ns "/" (subs name 1))
(str ns "/_" name))
(throw (ex-info (str "Bad predicate name, does not contain a namespace portion: " (pr-str predicate-name))
{:status 400
:error :db/invalid-predicate}))))
(throw (ex-info (str "Bad predicate name, should be string: " (pr-str predicate-name))
{:status 400
:error :db/invalid-predicate}))))
;; map of tag subject ids for each of the _predicate/type values for quick lookups
(def ^:const type-sid->type {(flake/->sid const/$_tag const/_predicate$type:string) :string
(flake/->sid const/$_tag const/_predicate$type:ref) :ref
(flake/->sid const/$_tag const/_predicate$type:boolean) :boolean
(flake/->sid const/$_tag const/_predicate$type:instant) :instant
(flake/->sid const/$_tag const/_predicate$type:uuid) :uuid
(flake/->sid const/$_tag const/_predicate$type:uri) :uri
(flake/->sid const/$_tag const/_predicate$type:bytes) :bytes
(flake/->sid const/$_tag const/_predicate$type:int) :int
(flake/->sid const/$_tag const/_predicate$type:long) :long
(flake/->sid const/$_tag const/_predicate$type:bigint) :bigint
(flake/->sid const/$_tag const/_predicate$type:float) :float
(flake/->sid const/$_tag const/_predicate$type:double) :double
(flake/->sid const/$_tag const/_predicate$type:bigdec) :bigdec
(flake/->sid const/$_tag const/_predicate$type:tag) :tag
(flake/->sid const/$_tag const/_predicate$type:json) :json
(flake/->sid const/$_tag const/_predicate$type:geojson) :geojson})
(def ^:const lang-sid->lang {(flake/->sid const/$_tag const/_setting$language:ar) :ar
(flake/->sid const/$_tag const/_setting$language:bn) :bn
(flake/->sid const/$_tag const/_setting$language:br) :br
(flake/->sid const/$_tag const/_setting$language:cn) :cn
(flake/->sid const/$_tag const/_setting$language:en) :en
(flake/->sid const/$_tag const/_setting$language:es) :es
(flake/->sid const/$_tag const/_setting$language:fr) :fr
(flake/->sid const/$_tag const/_setting$language:hi) :hi
(flake/->sid const/$_tag const/_setting$language:id) :id
(flake/->sid const/$_tag const/_setting$language:ru) :ru})
(defn flake->pred-map
[flakes]
(reduce (fn [acc flake] ;; quick lookup map of predicate's predicate ids
(let [p (flake/p flake)
o (flake/o flake)
existing? (get acc p)]
(cond (and existing? (vector? existing?))
(update acc p conj o)
existing?
(update acc p #(vec [%1 %2]) o)
:else
(assoc acc p o))))
{} flakes))
(defn- extract-spec-ids
[spec-pid schema-flakes]
(->> schema-flakes
(keep #(let [f %]
(when (= spec-pid (flake/p f)) (flake/o f))))
vec))
(defn schema-map
"Returns a map of the schema for a db to allow quick lookups of schema properties.
Schema is a map with keys:
- :t - the 't' value when schema built, allows schema equality checks
- :coll - collection info, mapping cid->name and name->cid all within the same map
- :pred - predicate info, mapping pid->properties and name->properties for quick lookup based on id or name respectively
- :fullText - contains predicate ids that need fulltext search
"
[db]
(go-try
(let [schema-flakes (->> (query-range/index-range db :spot >= [(flake/max-subject-id const/$_collection)] <= [0])
(<?))
[collection-flakes predicate-flakes] (partition-by #(<= (flake/s %)
flake/MAX-COLL-SUBJECTS)
schema-flakes)
coll (->> collection-flakes
(partition-by #(flake/s %))
(reduce (fn [acc coll-flakes]
(let [first-flake (first coll-flakes)
sid (flake/s first-flake)
p->v (->> coll-flakes ;; quick lookup map of collection's predicate ids
(reduce
#(let [f %2]
(assoc %1 (flake/p f) (flake/o f)))
{}))
partition (or (get p->v const/$_collection:partition)
(flake/sid->i sid))
c-name (get p->v const/$_collection:name)
specs (when (get p->v const/$_collection:spec) ;; specs are multi-cardinality - if one exists filter through to get all
(extract-spec-ids const/$_collection:spec coll-flakes))
specDoc (get p->v const/$_collection:specDoc)
c-props {:name c-name
:sid sid
:spec specs
:specDoc specDoc
:id partition ;; TODO - deprecate! (use partition instead)
:partition partition}]
(assoc acc partition c-props
c-name c-props)))
put in defaults for _ tx
{-1 {:name "_tx" :id -1 :sid -1 :partition -1 :spec nil :specDoc nil}
"_tx" {:name "_tx" :id -1 :sid -1 :partition -1 :spec nil :specDoc nil}}))
[pred fullText] (->> predicate-flakes
(partition-by #(flake/s %))
(reduce (fn [[pred fullText] pred-flakes]
(let [first-flake (first pred-flakes)
id (flake/s first-flake)
p->v (flake->pred-map pred-flakes)
p-name (get p->v const/$_predicate:name)
p-type (->> (get p->v const/$_predicate:type)
(get type-sid->type))
ref? (boolean (#{:ref :tag} p-type))
idx? (boolean (or ref?
(get p->v const/$_predicate:index)
(get p->v const/$_predicate:unique)))
fullText? (get p->v const/$_predicate:fullText)
p-props {:name p-name
:id id
:type p-type
:ref? ref?
:idx? idx?
:unique (boolean (get p->v const/$_predicate:unique))
:multi (boolean (get p->v const/$_predicate:multi))
:index (boolean (get p->v const/$_predicate:index))
:upsert (boolean (get p->v const/$_predicate:upsert))
:component (boolean (get p->v const/$_predicate:component))
:noHistory (boolean (get p->v const/$_predicate:noHistory))
:restrictCollection (get p->v const/$_predicate:restrictCollection)
:retractDuplicates (boolean (get p->v const/$_predicate:retractDuplicates))
:spec (when (get p->v const/$_predicate:spec) ;; specs are multi-cardinality - if one exists filter through to get all
(extract-spec-ids const/$_predicate:spec pred-flakes))
:specDoc (get p->v const/$_predicate:specDoc)
:txSpec (when (get p->v const/$_predicate:txSpec) ;; specs are multi-cardinality - if one exists filter through to get all
(extract-spec-ids const/$_predicate:txSpec pred-flakes))
:txSpecDoc (get p->v const/$_predicate:txSpecDoc)
:restrictTag (get p->v const/$_predicate:restrictTag)
:fullText fullText?}]
[(assoc pred id p-props
p-name p-props)
(if fullText? (conj fullText id) fullText)])) [{} #{}]))]
{:t (:t db) ;; record time of spec generation, can use to determine cache validity
:coll coll
:pred pred
:fullText fullText})))
(defn setting-map
[db]
(go-try
(let [setting-flakes (try*
(<? (query-range/index-range db :spot = [["_setting/id" "root"]]))
(catch* e nil))
setting-flakes (flake->pred-map setting-flakes)
settings {:passwords (boolean (get setting-flakes const/$_setting:passwords))
:anonymous (get setting-flakes const/$_setting:anonymous)
:language (get lang-sid->lang (get setting-flakes const/$_setting:language))
:ledgers (get setting-flakes const/$_setting:ledgers)
:txMax (get setting-flakes const/$_setting:txMax)
:consensus (get setting-flakes const/$_setting:consensus)}]
settings)))
(defn version
"Returns schema version from a db, which is the :t when the schema was last updated."
[db]
(get-in db [:schema :t]))
| null |
https://raw.githubusercontent.com/fluree/db/27999d15dcd90053119b06c6eb19c24fff137a56/src/fluree/db/query/schema.cljc
|
clojure
|
take everything after the '/'
TODO - refactor!
check for explicitly false, not nil
:unique cannot be set to true if type is boolean, cannot change from anything to boolean,
so only need to check new predicates
:component cannot be set to true for an existing subject (it can be set to false).
:unique cannot be set to true for existing predicate if existing values are not unique
must be setting the predicate :type
These types cannot be converted into anything else - and float?
a long can only be converted from an int or instant
a double can only be converted from a float, long, or int
a float can only be converted from an int, long, or float
an instant can be converted from a long or int.
else don't allow any other changes
TODO - use collection spec
If new subject, has to specify type. If it has :component true, then :type needs to be ref
map of tag subject ids for each of the _predicate/type values for quick lookups
quick lookup map of predicate's predicate ids
quick lookup map of collection's predicate ids
specs are multi-cardinality - if one exists filter through to get all
TODO - deprecate! (use partition instead)
specs are multi-cardinality - if one exists filter through to get all
specs are multi-cardinality - if one exists filter through to get all
record time of spec generation, can use to determine cache validity
|
(ns fluree.db.query.schema
(:require [fluree.db.flake :as flake]
[fluree.db.dbproto :as dbproto]
[fluree.db.constants :as const]
[fluree.db.util.async :refer [<? go-try]]
[fluree.db.query.range :as query-range]
[clojure.core.async :refer [go <!] :as async]
[fluree.db.util.log :as log :include-macros true]
[fluree.db.util.core :as util #?(:clj :refer :cljs :refer-macros) [try* catch*]]
[fluree.db.util.schema :as schema-util]))
#?(:clj (set! *warn-on-reflection* true))
(defn pred-name->keyword
"Takes an predicate name (string) and returns the namespace portion of it as a keyword."
[pred-name]
(when (string? pred-name)
keyword)))
(defn- convert-type-to-kw
"Converts a tag sid for a _predicate/type attributes into a keyword of just the 'name'."
[type-tag-sid db]
(go-try
(-> (<? (dbproto/-tag db type-tag-sid "_predicate/type"))
(keyword))))
(defn pred-objects-unique?
[db pred-id]
(go-try
(let [os (->> (query-range/index-range db :psot = [pred-id])
(<?)
(map #(flake/o %)))]
(if (and os (not (empty? os)))
(apply distinct? os) true))))
(defn new-pred-changes
"Returns a map of predicate changes with their respective old
value and new value, both the key and value of the map are two-tuples as follows:
{subid {:new? true
:type {:old :int :new :long}
:index {:old nil :new true }}}
If the predicate being changed is the :type, it resolves the type _tag to its short keyword name
When an old value does not exist, old-val is nil.
If they subject being created is completely new, :new? true "
[db tempids flakes filter?]
(go-try
(let [pred-flakes (if filter?
(filter schema-util/is-pred-flake? flakes)
flakes)
a set of all the new , to be used as a fn
new-map (reduce
#(let [f %2]
(assoc-in %1 [(flake/s f) :new?]
(boolean (is-new? (flake/s f)))))
{} pred-flakes)]
(loop [[f & r] pred-flakes
acc new-map]
(if-not f
acc
(let [pid (flake/p f)
pid-keyword (-> (dbproto/-p-prop db :name pid) (pred-name->keyword))
old-val? (false? (flake/op f))
v (if (= :type pid-keyword)
(<? (convert-type-to-kw (flake/o f) db))
(flake/o f))]
(recur r (if old-val?
(assoc-in acc [(flake/s f) pid-keyword :old] v)
(assoc-in acc [(flake/s f) pid-keyword :new] v)))))))))
(defn type-error
"Throw an error if schema update attempt is invalid."
([current-type new-type throw?]
(type-error nil current-type new-type throw?))
([db current-type new-type throw?]
(let [message (str "Cannot convert an _predicate from " (name current-type) " to " (name new-type) ".")]
(if throw?
(throw (ex-info message
{:status 400
:error :db/invalid-tx}))
db))))
(defn predicate-change-error
"Accepts a db (should have root permissions) and a map of predicate changes as produced by new-pred-changes.
Returns a db with updated idxs if relevant, i.e. if non-unique predicate converted to unique
If optional throw? parameter is true, will throw with an ex-info error."
([pred-changes db] (predicate-change-error pred-changes db false))
([pred-changes db throw?]
(go-try
(loop [[[pred-id changes] & r] pred-changes
db db]
(if-not pred-id
db
TODO - use smart functions ?
db* (if (and
(:multi changes)
(true? (:old (:multi changes))))
(type-error db "multi-cardinality" "single-cardinality" throw?)
db)
TODO - use smart functions ?
db* (if (and
(:unique changes)
(:type changes)
(true? (:new? changes))
(= :boolean (:new (:type changes)))
(true? (:new (:unique changes))))
(if throw?
(throw (ex-info (str "A boolean _predicate cannot be unique.")
{:status 400
:error :db/invalid-tx}))
db*)
db*)
TODO - use smart functions ?
db* (if (and
(:component changes)
(not (:new? changes))
(true? (:new (:component changes))))
(type-error db* "a component" "a non-component" throw?)
db*)
db* (if (and
(:unique changes)
(not (:new? changes))
(true? (:new (:unique changes)))
(not (<? (pred-objects-unique? db pred-id))))
(if throw?
(throw (ex-info (str "The _predicate " (dbproto/-p-prop db :name pred-id) " cannot be set to unique, because there are existing non-unique values.")
{:status 400
:error :db/invalid-tx}))
db*) db*)
(:old (:type changes)))
(let [{:keys [old new]} (:type changes)]
(cond
(= new old)
db*
(#{:string :bigint :bigdec} old)
(type-error old new throw?)
:else (case new
:long (if (#{:int :instant} old)
db* (type-error old new throw?))
BigIntegers can only be converted from an int , long , or instant
:bigint (if (#{:int :long :instant} old)
db* (type-error old new throw?))
:double (if (#{:float :long :int} old)
db* (type-error old new throw?))
:float (if (#{:int :float :long} old)
db* (type-error old new throw?))
BigDecimals can only be converted from a float , double , int , long , biginteger
:bigdec (if (#{:float :double :int :long :bigint} old)
db* (type-error old new throw?))
Strings can be converted from json , geojson , bytes , uuid , uri
:string (if (#{:json :geojson :bytes :uuid :uri} old)
db* (type-error old new throw?))
:instant (if (#{:long :int} old)
db* (type-error old new throw?))
(type-error old new throw?))))
db*)
db* (if (and
(true? (:new? changes))
(:component changes)
(true? (:new (:component changes)))
(not (= :ref (:new (:type changes)))))
(if throw?
(throw (ex-info (str "A component _predicate must be of type \"ref.\"")
{:status 400
:error :db/invalid-tx}))
db*)
db*)]
(recur r db*)))))))
(defn validate-schema-change
([db tempids flakes] (validate-schema-change db tempids flakes true))
([db tempids flakes filter?]
(go-try
(let [changes (<? (new-pred-changes db tempids flakes filter?))]
(if (empty? changes)
db
(<? (predicate-change-error changes db true)))))))
(def predicate-re #"(?:([^/]+)/)([^/]+)")
(def pred-reverse-ref-re #"(?:([^/]+)/)_([^/]+)")
(defn reverse-ref?
"Reverse refs must be strings that include a '/_' in them, which characters before and after."
([predicate-name]
(reverse-ref? predicate-name false))
([predicate-name throw?]
(if (string? predicate-name)
(boolean (re-matches pred-reverse-ref-re predicate-name))
(if throw?
(throw (ex-info (str "Bad predicate name, should be string: " (pr-str predicate-name))
{:status 400
:error :db/invalid-predicate}))
false))))
(defn reverse-ref
"Reverses an predicate name."
[predicate-name]
(if (string? predicate-name)
(let [[_ ns name] (re-matches #"(?:([^/]+)/)?([^/]+)" predicate-name)]
(if ns
(if (= \_ (nth name 0))
(str ns "/" (subs name 1))
(str ns "/_" name))
(throw (ex-info (str "Bad predicate name, does not contain a namespace portion: " (pr-str predicate-name))
{:status 400
:error :db/invalid-predicate}))))
(throw (ex-info (str "Bad predicate name, should be string: " (pr-str predicate-name))
{:status 400
:error :db/invalid-predicate}))))
(def ^:const type-sid->type {(flake/->sid const/$_tag const/_predicate$type:string) :string
(flake/->sid const/$_tag const/_predicate$type:ref) :ref
(flake/->sid const/$_tag const/_predicate$type:boolean) :boolean
(flake/->sid const/$_tag const/_predicate$type:instant) :instant
(flake/->sid const/$_tag const/_predicate$type:uuid) :uuid
(flake/->sid const/$_tag const/_predicate$type:uri) :uri
(flake/->sid const/$_tag const/_predicate$type:bytes) :bytes
(flake/->sid const/$_tag const/_predicate$type:int) :int
(flake/->sid const/$_tag const/_predicate$type:long) :long
(flake/->sid const/$_tag const/_predicate$type:bigint) :bigint
(flake/->sid const/$_tag const/_predicate$type:float) :float
(flake/->sid const/$_tag const/_predicate$type:double) :double
(flake/->sid const/$_tag const/_predicate$type:bigdec) :bigdec
(flake/->sid const/$_tag const/_predicate$type:tag) :tag
(flake/->sid const/$_tag const/_predicate$type:json) :json
(flake/->sid const/$_tag const/_predicate$type:geojson) :geojson})
(def ^:const lang-sid->lang {(flake/->sid const/$_tag const/_setting$language:ar) :ar
(flake/->sid const/$_tag const/_setting$language:bn) :bn
(flake/->sid const/$_tag const/_setting$language:br) :br
(flake/->sid const/$_tag const/_setting$language:cn) :cn
(flake/->sid const/$_tag const/_setting$language:en) :en
(flake/->sid const/$_tag const/_setting$language:es) :es
(flake/->sid const/$_tag const/_setting$language:fr) :fr
(flake/->sid const/$_tag const/_setting$language:hi) :hi
(flake/->sid const/$_tag const/_setting$language:id) :id
(flake/->sid const/$_tag const/_setting$language:ru) :ru})
(defn flake->pred-map
[flakes]
(let [p (flake/p flake)
o (flake/o flake)
existing? (get acc p)]
(cond (and existing? (vector? existing?))
(update acc p conj o)
existing?
(update acc p #(vec [%1 %2]) o)
:else
(assoc acc p o))))
{} flakes))
(defn- extract-spec-ids
[spec-pid schema-flakes]
(->> schema-flakes
(keep #(let [f %]
(when (= spec-pid (flake/p f)) (flake/o f))))
vec))
(defn schema-map
"Returns a map of the schema for a db to allow quick lookups of schema properties.
Schema is a map with keys:
- :t - the 't' value when schema built, allows schema equality checks
- :coll - collection info, mapping cid->name and name->cid all within the same map
- :pred - predicate info, mapping pid->properties and name->properties for quick lookup based on id or name respectively
- :fullText - contains predicate ids that need fulltext search
"
[db]
(go-try
(let [schema-flakes (->> (query-range/index-range db :spot >= [(flake/max-subject-id const/$_collection)] <= [0])
(<?))
[collection-flakes predicate-flakes] (partition-by #(<= (flake/s %)
flake/MAX-COLL-SUBJECTS)
schema-flakes)
coll (->> collection-flakes
(partition-by #(flake/s %))
(reduce (fn [acc coll-flakes]
(let [first-flake (first coll-flakes)
sid (flake/s first-flake)
(reduce
#(let [f %2]
(assoc %1 (flake/p f) (flake/o f)))
{}))
partition (or (get p->v const/$_collection:partition)
(flake/sid->i sid))
c-name (get p->v const/$_collection:name)
(extract-spec-ids const/$_collection:spec coll-flakes))
specDoc (get p->v const/$_collection:specDoc)
c-props {:name c-name
:sid sid
:spec specs
:specDoc specDoc
:partition partition}]
(assoc acc partition c-props
c-name c-props)))
put in defaults for _ tx
{-1 {:name "_tx" :id -1 :sid -1 :partition -1 :spec nil :specDoc nil}
"_tx" {:name "_tx" :id -1 :sid -1 :partition -1 :spec nil :specDoc nil}}))
[pred fullText] (->> predicate-flakes
(partition-by #(flake/s %))
(reduce (fn [[pred fullText] pred-flakes]
(let [first-flake (first pred-flakes)
id (flake/s first-flake)
p->v (flake->pred-map pred-flakes)
p-name (get p->v const/$_predicate:name)
p-type (->> (get p->v const/$_predicate:type)
(get type-sid->type))
ref? (boolean (#{:ref :tag} p-type))
idx? (boolean (or ref?
(get p->v const/$_predicate:index)
(get p->v const/$_predicate:unique)))
fullText? (get p->v const/$_predicate:fullText)
p-props {:name p-name
:id id
:type p-type
:ref? ref?
:idx? idx?
:unique (boolean (get p->v const/$_predicate:unique))
:multi (boolean (get p->v const/$_predicate:multi))
:index (boolean (get p->v const/$_predicate:index))
:upsert (boolean (get p->v const/$_predicate:upsert))
:component (boolean (get p->v const/$_predicate:component))
:noHistory (boolean (get p->v const/$_predicate:noHistory))
:restrictCollection (get p->v const/$_predicate:restrictCollection)
:retractDuplicates (boolean (get p->v const/$_predicate:retractDuplicates))
(extract-spec-ids const/$_predicate:spec pred-flakes))
:specDoc (get p->v const/$_predicate:specDoc)
(extract-spec-ids const/$_predicate:txSpec pred-flakes))
:txSpecDoc (get p->v const/$_predicate:txSpecDoc)
:restrictTag (get p->v const/$_predicate:restrictTag)
:fullText fullText?}]
[(assoc pred id p-props
p-name p-props)
(if fullText? (conj fullText id) fullText)])) [{} #{}]))]
:coll coll
:pred pred
:fullText fullText})))
(defn setting-map
[db]
(go-try
(let [setting-flakes (try*
(<? (query-range/index-range db :spot = [["_setting/id" "root"]]))
(catch* e nil))
setting-flakes (flake->pred-map setting-flakes)
settings {:passwords (boolean (get setting-flakes const/$_setting:passwords))
:anonymous (get setting-flakes const/$_setting:anonymous)
:language (get lang-sid->lang (get setting-flakes const/$_setting:language))
:ledgers (get setting-flakes const/$_setting:ledgers)
:txMax (get setting-flakes const/$_setting:txMax)
:consensus (get setting-flakes const/$_setting:consensus)}]
settings)))
(defn version
"Returns schema version from a db, which is the :t when the schema was last updated."
[db]
(get-in db [:schema :t]))
|
a48f93a6339e2d6282ce479dc2534d14587ad2492ea659c2517d79bff4261752
|
acieroid/scala-am
|
incdec5.scm
|
(letrec ((counter 0)
(lock (new-lock))
(inc (lambda ()
(acquire lock)
(set! counter (+ counter 1))
(release lock)))
(dec (lambda ()
(acquire lock)
(set! counter (- counter 1))
(release lock)))
(t1 (fork (inc)))
(t2 (fork (dec)))
(t3 (fork (inc)))
(t4 (fork (dec)))
(t5 (fork (inc))))
(join t1)
(join t2)
(join t3)
(join t4)
(join t5))
| null |
https://raw.githubusercontent.com/acieroid/scala-am/13ef3befbfc664b77f31f56847c30d60f4ee7dfe/test/concurrentScheme/threads/variations/incdec5.scm
|
scheme
|
(letrec ((counter 0)
(lock (new-lock))
(inc (lambda ()
(acquire lock)
(set! counter (+ counter 1))
(release lock)))
(dec (lambda ()
(acquire lock)
(set! counter (- counter 1))
(release lock)))
(t1 (fork (inc)))
(t2 (fork (dec)))
(t3 (fork (inc)))
(t4 (fork (dec)))
(t5 (fork (inc))))
(join t1)
(join t2)
(join t3)
(join t4)
(join t5))
|
|
1bcbd0ffe83c91ac2f90c5648f73006282eb68f997bdcb50de1289400f824a9f
|
facebookarchive/duckling_old
|
time.clj
|
(
; Context map
Tuesday Feb 12 , 2013 at 4:30am is the " now " for the tests
{:reference-time (time/t -2 2013 2 12 4 30 0)
:min (time/t -2 1900)
:max (time/t -2 2100)}
"nu"
"just nu"
(datetime 2013 2 12 4 30 00)
"idag"
(datetime 2013 2 12)
"igår"
(datetime 2013 2 11)
"imorgon"
(datetime 2013 2 13)
"måndag"
"mån"
"på måndag"
(datetime 2013 2 18 :day-of-week 1)
"Måndag den 18 februari"
"Mån, 18 februari"
(datetime 2013 2 18 :day-of-week 1 :day 18 :month 2)
"tisdag"
(datetime 2013 2 19)
"torsdag"
"tors"
"tors."
(datetime 2013 2 14)
"fredag"
"fre"
"fre."
(datetime 2013 2 15)
"lördag"
"lör"
"lör."
(datetime 2013 2 16)
"söndag"
"sön"
"sön."
(datetime 2013 2 17)
"Den förste mars"
"Den första mars"
"1. mars"
"Den 1. mars"
(datetime 2013 3 1 :day 1 :month 3)
"3 mars"
"den tredje mars"
"den 3. mars"
(datetime 2013 3 3 :day 3 :month 3)
"3 mars 2015"
"tredje mars 2015"
"3. mars 2015"
"3-3-2015"
"03-03-2015"
"3/3/2015"
"3/3/15"
"2015-3-3"
"2015-03-03"
(datetime 2015 3 3 :day 3 :month 3 :year 2015)
"På den 15."
"På den 15"
"Den 15."
"Den 15"
(datetime 2013 2 15 :day 15)
"den 15. februari"
"15. februari"
"februari 15"
"15-02"
"15/02"
(datetime 2013 2 15 :day 15 :month 2)
"8 Aug"
(datetime 2013 8 8 :day 8 :month 8)
"Oktober 2014"
(datetime 2014 10 :year 2014 :month 10)
"31/10/1974"
"31/10/74"
"31-10-74"
(datetime 1974 10 31 :day 31 :month 10 :year 1974)
"14april 2015"
"April 14, 2015"
"fjortonde April 15"
(datetime 2015 4 14 :day 14 :month 4 :years 2015)
"nästa fredag igen"
(datetime 2013 2 22 :day-of-week 2)
"nästa mars"
(datetime 2013 3)
"nästa mars igen"
(datetime 2014 3)
"Söndag, 10 feb"
"Söndag 10 Feb"
(datetime 2013 2 10 :day-of-week 7 :day 10 :month 2)
"Ons, Feb13"
"Ons feb13"
(datetime 2013 2 13 :day-of-week 3 :day 13 :month 2)
"Måndag, Feb 18"
"Mån, februari 18"
(datetime 2013 2 18 :day-of-week 1 :day 18 :month 2)
; ;; Cycles
"denna vecka"
(datetime 2013 2 11 :grain :week)
"förra vecka"
(datetime 2013 2 4 :grain :week)
"nästa vecka"
(datetime 2013 2 18 :grain :week)
"förra månad"
(datetime 2013 1)
"nästa månad"
(datetime 2013 3)
"detta kvartal"
(datetime 2013 1 1 :grain :quarter)
"nästa kvartal"
(datetime 2013 4 1 :grain :quarter)
"tredje kvartalet"
"3. kvartal"
"3 kvartal"
(datetime 2013 7 1 :grain :quarter)
"4. kvartal 2018"
"fjärde kvartalet 2018"
(datetime 2018 10 1 :grain :quarter)
"förra år"
(datetime 2012)
"i fjol"
(datetime 2012)
"i år"
"detta år"
(datetime 2013)
"nästa år"
(datetime 2014)
"förra söndag"
"söndag i förra veckan"
"söndag förra veckan"
(datetime 2013 2 10 :day-of-week 7)
"förra tisdag"
(datetime 2013 2 5 :day-of-week 2)
when today is Tuesday , " nästa tirsdag " ( next tuesday ) is a week from now
(datetime 2013 2 19 :day-of-week 2)
when today is Tuesday , " nästa onsdag " ( next wednesday ) is tomorrow
(datetime 2013 2 13 :day-of-week 3)
"onsdag i nästa vecka"
"onsdag nästa vecka"
"nästa onsdag igen"
(datetime 2013 2 20 :day-of-week 3)
"nästa fredag igen"
(datetime 2013 2 22 :day-of-week 5)
"måndag denna veckan"
(datetime 2013 2 11 :day-of-week 1)
"tisdag denna vecka"
(datetime 2013 2 12 :day-of-week 2)
"onsdag denna vecka"
(datetime 2013 2 13 :day-of-week 3)
"i överimorgon"
(datetime 2013 2 14)
"i förrgår"
(datetime 2013 2 10)
"sista måndag i mars"
(datetime 2013 3 25 :day-of-week 1)
"sista söndag i mars 2014"
(datetime 2014 3 30 :day-of-week 7)
"tredje dagen i oktober"
"tredje dagen i Oktober"
(datetime 2013 10 3)
"första veckan i oktober 2014"
"första veckan i Oktober 2014"
(datetime 2014 10 6 :grain :week)
" the week of october 6th "
" the week of october 7th "
( datetime 2013 10 7 : grain : week )
"sista dagen i oktober 2015"
"sista dagen i Oktober 2015"
(datetime 2015 10 31)
"sista veckan i september 2014"
"sista veckan i September 2014"
(datetime 2014 9 22 :grain :week)
;; nth of
"första tisdag i oktober"
"första tisdagen i Oktober"
(datetime 2013 10 1)
"tredje tisdagen i september 2014"
"tredje tisdagen i September 2014"
(datetime 2014 9 16)
"första onsdagen i oktober 2014"
"första onsdagen i Oktober 2014"
(datetime 2014 10 1)
"andra onsdagen i oktober 2014"
"andra onsdagen i Oktober 2014"
(datetime 2014 10 8)
;; Hours
"klockan 3"
"kl. 3"
(datetime 2013 2 13 3)
"3:18"
(datetime 2013 2 13 3 18)
"klockan 15"
"kl. 15"
"15h"
(datetime 2013 2 12 15 :hour 3 :meridiem :pm)
"ca. kl. 15" ;; FIXME pm overrides precision
"cirka kl. 15"
"omkring klockan 15"
(datetime 2013 2 12 15 :hour 3 :meridiem :pm) ;; :precision "approximate"
FIXME precision is lost
"imorgon kl. 17 precis"
(datetime 2013 2 13 17 :hour 5 :meridiem :pm) ;; :precision "exact"
"kvart över 15"
"15:15"
(datetime 2013 2 12 15 15 :hour 3 :minute 15 :meridiem :pm)
"kl. 20 över 15"
"klockan 20 över 15"
"tjugo över 15"
"kl. 15:20"
"15:20"
(datetime 2013 2 12 15 20 :hour 3 :minute 20 :meridiem :pm)
"15:30"
(datetime 2013 2 12 15 30 :hour 3 :minute 30 :meridiem :pm)
"15:23:24"
(datetime 2013 2 12 15 23 24 :hour 15 :minute 23 :second 24)
"kvart i 12"
"kvart i tolv"
"11:45"
(datetime 2013 2 12 11 45 :hour 11 :minute 45)
;; Mixing date and time
"klockan 9 på lördag"
(datetime 2013 2 16 9 :day-of-week 6 :hour 9 :meridiem :am)
"Fre, Jul 18, 2014 19:00"
(datetime 2014 7 18 19 0 :day-of-week 5 :hour 7 :meridiem :pm)
"kl. 19:30, Lör, 20 sep"
(datetime 2014 9 20 19 30 :day-of-week 6 :hour 7 :minute 30 :meridiem :pm)
; ;; Involving periods
"om 1 sekund"
"om en sekund"
"en sekund från nu"
(datetime 2013 2 12 4 30 1)
"om 1 minut"
"om en minut"
(datetime 2013 2 12 4 31 0)
"om 2 minuter"
"om två minuter"
"om 2 minuter mer"
"om två minuter mer"
"2 minuter från nu"
"två minuter från nu"
(datetime 2013 2 12 4 32 0)
"om 60 minuter"
(datetime 2013 2 12 5 30 0)
"om en halv timme"
(datetime 2013 2 12 5 0 0)
"om 2,5 timme"
"om 2 och en halv timme"
"om två och en halv timme"
(datetime 2013 2 12 7 0 0)
"om en timme"
"om 1 timme"
"om 1t"
(datetime 2013 2 12 5 30)
"om ett par timmar"
(datetime 2013 2 12 6 30)
"om 24 timmar"
(datetime 2013 2 13 4 30)
"om en dag"
(datetime 2013 2 13 4)
"3 år från idag"
(datetime 2016 2)
"om 7 dagar"
(datetime 2013 2 19 4)
"om en vecka"
(datetime 2013 2 19)
FIXME precision is lost
"om cirka en halv timme"
(datetime 2013 2 12 5 0 0) ;; :precision "approximate"
"7 dagar sedan"
"sju dagar sedan"
(datetime 2013 2 5 4)
"14 dagar sedan"
"fjorton dagar sedan"
(datetime 2013 1 29 4)
"en vecka sedan"
"1 vecka sedan"
(datetime 2013 2 5)
"3 veckor sedan"
"tre veckor sedan"
(datetime 2013 1 22)
"3 månader sedan"
"tre månader sedan"
(datetime 2012 11 12)
"två år sedan"
"2 år sedan"
(datetime 2011 2)
"1954"
(datetime 1954)
" 1 år efter "
" ett år efter "
( datetime 2013 12 ) ; resolves as after last Xmas ...
; Seasons
"denna sommaren"
"den här sommaren"
(datetime-interval [2013 6 21] [2013 9 24])
"denna vintern"
"den här vintern"
(datetime-interval [2012 12 21] [2013 3 21])
US holidays ( / )
"juldagen"
(datetime 2013 12 25)
"nyårsafton"
(datetime 2013 12 31)
"nyårsdagen"
"nyårsdag"
(datetime 2014 1 1)
; Part of day (morning, afternoon...)
"ikväll"
(datetime-interval [2013 2 12 18] [2013 2 13 00])
"förra helg"
(datetime-interval [2013 2 8 18] [2013 2 11 00])
"imorgon kväll"
(datetime-interval [2013 2 13 18] [2013 2 14 00])
"imorgon lunch"
(datetime-interval [2013 2 13 12] [2013 2 13 14])
"igår kväll"
(datetime-interval [2013 2 11 18] [2013 2 12 00])
"denna helgen"
"denna helg"
"i helgen"
(datetime-interval [2013 2 15 18] [2013 2 18 00])
"måndag morgon"
(datetime-interval [2013 2 18 4] [2013 2 18 12])
; Intervals involving cycles
"senaste 2 sekunder"
"senaste två sekunderna"
(datetime-interval [2013 2 12 4 29 58] [2013 2 12 4 30 00])
"nästa 3 sekunder"
"nästa tre sekunder"
(datetime-interval [2013 2 12 4 30 01] [2013 2 12 4 30 04])
"senaste 2 minuter"
"senaste två minuter"
(datetime-interval [2013 2 12 4 28] [2013 2 12 4 30])
"nästa 3 minuter"
"nästa tre minuter"
(datetime-interval [2013 2 12 4 31] [2013 2 12 4 34])
"senaste 1 timme"
" timme "
(datetime-interval [2013 2 12 3] [2013 2 12 4])
"nästa 3 timmar"
"nästa tre timmar"
(datetime-interval [2013 2 12 5] [2013 2 12 8])
"senaste 2 dagar"
"senaste två dagar"
"senaste 2 dagar"
(datetime-interval [2013 2 10] [2013 2 12])
"nästa 3 dagar"
"nästa tre dagar"
(datetime-interval [2013 2 13] [2013 2 16])
"senaste 2 veckor"
"senaste två veckorna"
"senaste två veckor"
(datetime-interval [2013 1 28 :grain :week] [2013 2 11 :grain :week])
"nästa 3 veckor"
"nästa tre veckorna"
(datetime-interval [2013 2 18 :grain :week] [2013 3 11 :grain :week])
"senaste 2 månader"
"senaste två månader"
"senaste två månader"
(datetime-interval [2012 12] [2013 02])
"nästa 3 månader"
"nästa tre månader"
(datetime-interval [2013 3] [2013 6])
"senaste 2 år"
"senaste två år"
"senaste 2 år"
(datetime-interval [2011] [2013])
"nästa 3 år"
"nästa tre år"
(datetime-interval [2014] [2017])
; Explicit intervals
"13-15 juli"
"13-15 Juli"
"13 till 15 Juli"
"13 juli till 15 juli"
(datetime-interval [2013 7 13] [2013 7 16])
"8 Aug - 12 Aug"
"8 Aug - 12 aug"
"8 aug - 12 aug"
"8 augusti - 12 augusti"
(datetime-interval [2013 8 8] [2013 8 13])
"9:30 - 11:00"
"9:30 till 11:00"
(datetime-interval [2013 2 12 9 30] [2013 2 12 11 1])
"från 9:30 - 11:00 på torsdag"
"från 9:30 till 11:00 på torsdag"
"mellan 9:30 och 11:00 på torsdag"
"9:30 - 11:00 på torsdag"
"9:30 till 11:00 på torsdag"
"efter 9:30 men före 11:00 på torsdag"
"torsdag från 9:30 till 11:00"
"torsdag mellan 9:30 och 11:00"
"från 9:30 till 11:00 på torsdag"
(datetime-interval [2013 2 14 9 30] [2013 2 14 11 1])
"torsdag från 9 till 11"
(datetime-interval [2013 2 14 9] [2013 2 14 12])
"11:30-13:30" ; go train this rule!
"11:30-13:30"
"11:30-13:30"
"11:30-13:30"
"11:30-13:30"
"11:30-13:30"
(datetime-interval [2013 2 12 11 30] [2013 2 12 13 31])
"inom 2 veckor"
(datetime-interval [2013 2 12 4 30 0] [2013 2 26])
"innan kl. 14"
"innan klockan 14"
(datetime 2013 2 12 14 :direction :before)
; Timezones
"16 CET"
"kl. 16 CET"
"klockan 16 CET"
(datetime 2013 2 12 16 :hour 4 :meridiem :pm :timezone "CET")
"torsdag kl. 8:00 GMT"
"torsdag klockan 8:00 GMT"
"torsdag 08:00 GMT"
(datetime 2013 2 14 8 00 :timezone "GMT")
tests
"idag kl. 14"
"idag klockan 14"
"kl. 14"
"klockan 14"
(datetime 2013 2 12 14)
"25/4 kl. 16:00"
"25/4 klockan 16:00"
"25-04 klockan 16:00"
"25-4 kl. 16:00"
(datetime 2013 4 25 16 0)
"15:00 imorgon"
"kl. 15:00 imorgon"
"klockan 15:00 imorgon"
(datetime 2013 2 13 15 0)
"efter kl. 14"
"efter klockan 14"
(datetime 2013 2 12 14 :direction :after)
"efter 5 dagar"
"efter fem dagar"
(datetime 2013 2 17 4 :direction :after)
"om 5 dagar"
"om fem dagar"
(datetime 2013 2 17 4)
"efter imorgon kl. 14"
"efter imorgon klockan 14"
FIXME this is actually not ambiguous it 's 2 pm - midnight .
"imorgon efter klockan 14"
(datetime 2013 2 13 14 :direction :after)
"före kl. 11"
"före klockan 11"
(datetime 2013 2 12 11 :direction :before)
FIXME this is actually not ambiguous . it 's midnight to 11 am
"imorgon före klockan 11"
(datetime 2013 2 13 11 :direction :before)
"under eftermiddagen"
(datetime-interval [2013 2 12 12] [2013 2 12 19])
"kl. 13:30"
"klockan 13:30"
(datetime 2013 2 12 13 30)
"om 15 minuter"
(datetime 2013 2 12 4 45 0)
"efter lunch"
(datetime-interval [2013 2 12 13] [2013 2 12 17])
"10:30"
(datetime 2013 2 12 10 30)
"morgon" ;; how should we deal with fb mornings?
(datetime-interval [2013 2 12 4] [2013 2 12 12])
"nästa måndag"
(datetime 2013 2 18 :day-of-week 1)
)
| null |
https://raw.githubusercontent.com/facebookarchive/duckling_old/bf5bb9758c36313b56e136a28ba401696eeff10b/resources/languages/sv/corpus/time.clj
|
clojure
|
Context map
;; Cycles
nth of
Hours
FIXME pm overrides precision
:precision "approximate"
:precision "exact"
Mixing date and time
;; Involving periods
:precision "approximate"
resolves as after last Xmas ...
Seasons
Part of day (morning, afternoon...)
Intervals involving cycles
Explicit intervals
go train this rule!
Timezones
how should we deal with fb mornings?
|
(
Tuesday Feb 12 , 2013 at 4:30am is the " now " for the tests
{:reference-time (time/t -2 2013 2 12 4 30 0)
:min (time/t -2 1900)
:max (time/t -2 2100)}
"nu"
"just nu"
(datetime 2013 2 12 4 30 00)
"idag"
(datetime 2013 2 12)
"igår"
(datetime 2013 2 11)
"imorgon"
(datetime 2013 2 13)
"måndag"
"mån"
"på måndag"
(datetime 2013 2 18 :day-of-week 1)
"Måndag den 18 februari"
"Mån, 18 februari"
(datetime 2013 2 18 :day-of-week 1 :day 18 :month 2)
"tisdag"
(datetime 2013 2 19)
"torsdag"
"tors"
"tors."
(datetime 2013 2 14)
"fredag"
"fre"
"fre."
(datetime 2013 2 15)
"lördag"
"lör"
"lör."
(datetime 2013 2 16)
"söndag"
"sön"
"sön."
(datetime 2013 2 17)
"Den förste mars"
"Den första mars"
"1. mars"
"Den 1. mars"
(datetime 2013 3 1 :day 1 :month 3)
"3 mars"
"den tredje mars"
"den 3. mars"
(datetime 2013 3 3 :day 3 :month 3)
"3 mars 2015"
"tredje mars 2015"
"3. mars 2015"
"3-3-2015"
"03-03-2015"
"3/3/2015"
"3/3/15"
"2015-3-3"
"2015-03-03"
(datetime 2015 3 3 :day 3 :month 3 :year 2015)
"På den 15."
"På den 15"
"Den 15."
"Den 15"
(datetime 2013 2 15 :day 15)
"den 15. februari"
"15. februari"
"februari 15"
"15-02"
"15/02"
(datetime 2013 2 15 :day 15 :month 2)
"8 Aug"
(datetime 2013 8 8 :day 8 :month 8)
"Oktober 2014"
(datetime 2014 10 :year 2014 :month 10)
"31/10/1974"
"31/10/74"
"31-10-74"
(datetime 1974 10 31 :day 31 :month 10 :year 1974)
"14april 2015"
"April 14, 2015"
"fjortonde April 15"
(datetime 2015 4 14 :day 14 :month 4 :years 2015)
"nästa fredag igen"
(datetime 2013 2 22 :day-of-week 2)
"nästa mars"
(datetime 2013 3)
"nästa mars igen"
(datetime 2014 3)
"Söndag, 10 feb"
"Söndag 10 Feb"
(datetime 2013 2 10 :day-of-week 7 :day 10 :month 2)
"Ons, Feb13"
"Ons feb13"
(datetime 2013 2 13 :day-of-week 3 :day 13 :month 2)
"Måndag, Feb 18"
"Mån, februari 18"
(datetime 2013 2 18 :day-of-week 1 :day 18 :month 2)
"denna vecka"
(datetime 2013 2 11 :grain :week)
"förra vecka"
(datetime 2013 2 4 :grain :week)
"nästa vecka"
(datetime 2013 2 18 :grain :week)
"förra månad"
(datetime 2013 1)
"nästa månad"
(datetime 2013 3)
"detta kvartal"
(datetime 2013 1 1 :grain :quarter)
"nästa kvartal"
(datetime 2013 4 1 :grain :quarter)
"tredje kvartalet"
"3. kvartal"
"3 kvartal"
(datetime 2013 7 1 :grain :quarter)
"4. kvartal 2018"
"fjärde kvartalet 2018"
(datetime 2018 10 1 :grain :quarter)
"förra år"
(datetime 2012)
"i fjol"
(datetime 2012)
"i år"
"detta år"
(datetime 2013)
"nästa år"
(datetime 2014)
"förra söndag"
"söndag i förra veckan"
"söndag förra veckan"
(datetime 2013 2 10 :day-of-week 7)
"förra tisdag"
(datetime 2013 2 5 :day-of-week 2)
when today is Tuesday , " nästa tirsdag " ( next tuesday ) is a week from now
(datetime 2013 2 19 :day-of-week 2)
when today is Tuesday , " nästa onsdag " ( next wednesday ) is tomorrow
(datetime 2013 2 13 :day-of-week 3)
"onsdag i nästa vecka"
"onsdag nästa vecka"
"nästa onsdag igen"
(datetime 2013 2 20 :day-of-week 3)
"nästa fredag igen"
(datetime 2013 2 22 :day-of-week 5)
"måndag denna veckan"
(datetime 2013 2 11 :day-of-week 1)
"tisdag denna vecka"
(datetime 2013 2 12 :day-of-week 2)
"onsdag denna vecka"
(datetime 2013 2 13 :day-of-week 3)
"i överimorgon"
(datetime 2013 2 14)
"i förrgår"
(datetime 2013 2 10)
"sista måndag i mars"
(datetime 2013 3 25 :day-of-week 1)
"sista söndag i mars 2014"
(datetime 2014 3 30 :day-of-week 7)
"tredje dagen i oktober"
"tredje dagen i Oktober"
(datetime 2013 10 3)
"första veckan i oktober 2014"
"första veckan i Oktober 2014"
(datetime 2014 10 6 :grain :week)
" the week of october 6th "
" the week of october 7th "
( datetime 2013 10 7 : grain : week )
"sista dagen i oktober 2015"
"sista dagen i Oktober 2015"
(datetime 2015 10 31)
"sista veckan i september 2014"
"sista veckan i September 2014"
(datetime 2014 9 22 :grain :week)
"första tisdag i oktober"
"första tisdagen i Oktober"
(datetime 2013 10 1)
"tredje tisdagen i september 2014"
"tredje tisdagen i September 2014"
(datetime 2014 9 16)
"första onsdagen i oktober 2014"
"första onsdagen i Oktober 2014"
(datetime 2014 10 1)
"andra onsdagen i oktober 2014"
"andra onsdagen i Oktober 2014"
(datetime 2014 10 8)
"klockan 3"
"kl. 3"
(datetime 2013 2 13 3)
"3:18"
(datetime 2013 2 13 3 18)
"klockan 15"
"kl. 15"
"15h"
(datetime 2013 2 12 15 :hour 3 :meridiem :pm)
"cirka kl. 15"
"omkring klockan 15"
FIXME precision is lost
"imorgon kl. 17 precis"
"kvart över 15"
"15:15"
(datetime 2013 2 12 15 15 :hour 3 :minute 15 :meridiem :pm)
"kl. 20 över 15"
"klockan 20 över 15"
"tjugo över 15"
"kl. 15:20"
"15:20"
(datetime 2013 2 12 15 20 :hour 3 :minute 20 :meridiem :pm)
"15:30"
(datetime 2013 2 12 15 30 :hour 3 :minute 30 :meridiem :pm)
"15:23:24"
(datetime 2013 2 12 15 23 24 :hour 15 :minute 23 :second 24)
"kvart i 12"
"kvart i tolv"
"11:45"
(datetime 2013 2 12 11 45 :hour 11 :minute 45)
"klockan 9 på lördag"
(datetime 2013 2 16 9 :day-of-week 6 :hour 9 :meridiem :am)
"Fre, Jul 18, 2014 19:00"
(datetime 2014 7 18 19 0 :day-of-week 5 :hour 7 :meridiem :pm)
"kl. 19:30, Lör, 20 sep"
(datetime 2014 9 20 19 30 :day-of-week 6 :hour 7 :minute 30 :meridiem :pm)
"om 1 sekund"
"om en sekund"
"en sekund från nu"
(datetime 2013 2 12 4 30 1)
"om 1 minut"
"om en minut"
(datetime 2013 2 12 4 31 0)
"om 2 minuter"
"om två minuter"
"om 2 minuter mer"
"om två minuter mer"
"2 minuter från nu"
"två minuter från nu"
(datetime 2013 2 12 4 32 0)
"om 60 minuter"
(datetime 2013 2 12 5 30 0)
"om en halv timme"
(datetime 2013 2 12 5 0 0)
"om 2,5 timme"
"om 2 och en halv timme"
"om två och en halv timme"
(datetime 2013 2 12 7 0 0)
"om en timme"
"om 1 timme"
"om 1t"
(datetime 2013 2 12 5 30)
"om ett par timmar"
(datetime 2013 2 12 6 30)
"om 24 timmar"
(datetime 2013 2 13 4 30)
"om en dag"
(datetime 2013 2 13 4)
"3 år från idag"
(datetime 2016 2)
"om 7 dagar"
(datetime 2013 2 19 4)
"om en vecka"
(datetime 2013 2 19)
FIXME precision is lost
"om cirka en halv timme"
"7 dagar sedan"
"sju dagar sedan"
(datetime 2013 2 5 4)
"14 dagar sedan"
"fjorton dagar sedan"
(datetime 2013 1 29 4)
"en vecka sedan"
"1 vecka sedan"
(datetime 2013 2 5)
"3 veckor sedan"
"tre veckor sedan"
(datetime 2013 1 22)
"3 månader sedan"
"tre månader sedan"
(datetime 2012 11 12)
"två år sedan"
"2 år sedan"
(datetime 2011 2)
"1954"
(datetime 1954)
" 1 år efter "
" ett år efter "
"denna sommaren"
"den här sommaren"
(datetime-interval [2013 6 21] [2013 9 24])
"denna vintern"
"den här vintern"
(datetime-interval [2012 12 21] [2013 3 21])
US holidays ( / )
"juldagen"
(datetime 2013 12 25)
"nyårsafton"
(datetime 2013 12 31)
"nyårsdagen"
"nyårsdag"
(datetime 2014 1 1)
"ikväll"
(datetime-interval [2013 2 12 18] [2013 2 13 00])
"förra helg"
(datetime-interval [2013 2 8 18] [2013 2 11 00])
"imorgon kväll"
(datetime-interval [2013 2 13 18] [2013 2 14 00])
"imorgon lunch"
(datetime-interval [2013 2 13 12] [2013 2 13 14])
"igår kväll"
(datetime-interval [2013 2 11 18] [2013 2 12 00])
"denna helgen"
"denna helg"
"i helgen"
(datetime-interval [2013 2 15 18] [2013 2 18 00])
"måndag morgon"
(datetime-interval [2013 2 18 4] [2013 2 18 12])
"senaste 2 sekunder"
"senaste två sekunderna"
(datetime-interval [2013 2 12 4 29 58] [2013 2 12 4 30 00])
"nästa 3 sekunder"
"nästa tre sekunder"
(datetime-interval [2013 2 12 4 30 01] [2013 2 12 4 30 04])
"senaste 2 minuter"
"senaste två minuter"
(datetime-interval [2013 2 12 4 28] [2013 2 12 4 30])
"nästa 3 minuter"
"nästa tre minuter"
(datetime-interval [2013 2 12 4 31] [2013 2 12 4 34])
"senaste 1 timme"
" timme "
(datetime-interval [2013 2 12 3] [2013 2 12 4])
"nästa 3 timmar"
"nästa tre timmar"
(datetime-interval [2013 2 12 5] [2013 2 12 8])
"senaste 2 dagar"
"senaste två dagar"
"senaste 2 dagar"
(datetime-interval [2013 2 10] [2013 2 12])
"nästa 3 dagar"
"nästa tre dagar"
(datetime-interval [2013 2 13] [2013 2 16])
"senaste 2 veckor"
"senaste två veckorna"
"senaste två veckor"
(datetime-interval [2013 1 28 :grain :week] [2013 2 11 :grain :week])
"nästa 3 veckor"
"nästa tre veckorna"
(datetime-interval [2013 2 18 :grain :week] [2013 3 11 :grain :week])
"senaste 2 månader"
"senaste två månader"
"senaste två månader"
(datetime-interval [2012 12] [2013 02])
"nästa 3 månader"
"nästa tre månader"
(datetime-interval [2013 3] [2013 6])
"senaste 2 år"
"senaste två år"
"senaste 2 år"
(datetime-interval [2011] [2013])
"nästa 3 år"
"nästa tre år"
(datetime-interval [2014] [2017])
"13-15 juli"
"13-15 Juli"
"13 till 15 Juli"
"13 juli till 15 juli"
(datetime-interval [2013 7 13] [2013 7 16])
"8 Aug - 12 Aug"
"8 Aug - 12 aug"
"8 aug - 12 aug"
"8 augusti - 12 augusti"
(datetime-interval [2013 8 8] [2013 8 13])
"9:30 - 11:00"
"9:30 till 11:00"
(datetime-interval [2013 2 12 9 30] [2013 2 12 11 1])
"från 9:30 - 11:00 på torsdag"
"från 9:30 till 11:00 på torsdag"
"mellan 9:30 och 11:00 på torsdag"
"9:30 - 11:00 på torsdag"
"9:30 till 11:00 på torsdag"
"efter 9:30 men före 11:00 på torsdag"
"torsdag från 9:30 till 11:00"
"torsdag mellan 9:30 och 11:00"
"från 9:30 till 11:00 på torsdag"
(datetime-interval [2013 2 14 9 30] [2013 2 14 11 1])
"torsdag från 9 till 11"
(datetime-interval [2013 2 14 9] [2013 2 14 12])
"11:30-13:30"
"11:30-13:30"
"11:30-13:30"
"11:30-13:30"
"11:30-13:30"
(datetime-interval [2013 2 12 11 30] [2013 2 12 13 31])
"inom 2 veckor"
(datetime-interval [2013 2 12 4 30 0] [2013 2 26])
"innan kl. 14"
"innan klockan 14"
(datetime 2013 2 12 14 :direction :before)
"16 CET"
"kl. 16 CET"
"klockan 16 CET"
(datetime 2013 2 12 16 :hour 4 :meridiem :pm :timezone "CET")
"torsdag kl. 8:00 GMT"
"torsdag klockan 8:00 GMT"
"torsdag 08:00 GMT"
(datetime 2013 2 14 8 00 :timezone "GMT")
tests
"idag kl. 14"
"idag klockan 14"
"kl. 14"
"klockan 14"
(datetime 2013 2 12 14)
"25/4 kl. 16:00"
"25/4 klockan 16:00"
"25-04 klockan 16:00"
"25-4 kl. 16:00"
(datetime 2013 4 25 16 0)
"15:00 imorgon"
"kl. 15:00 imorgon"
"klockan 15:00 imorgon"
(datetime 2013 2 13 15 0)
"efter kl. 14"
"efter klockan 14"
(datetime 2013 2 12 14 :direction :after)
"efter 5 dagar"
"efter fem dagar"
(datetime 2013 2 17 4 :direction :after)
"om 5 dagar"
"om fem dagar"
(datetime 2013 2 17 4)
"efter imorgon kl. 14"
"efter imorgon klockan 14"
FIXME this is actually not ambiguous it 's 2 pm - midnight .
"imorgon efter klockan 14"
(datetime 2013 2 13 14 :direction :after)
"före kl. 11"
"före klockan 11"
(datetime 2013 2 12 11 :direction :before)
FIXME this is actually not ambiguous . it 's midnight to 11 am
"imorgon före klockan 11"
(datetime 2013 2 13 11 :direction :before)
"under eftermiddagen"
(datetime-interval [2013 2 12 12] [2013 2 12 19])
"kl. 13:30"
"klockan 13:30"
(datetime 2013 2 12 13 30)
"om 15 minuter"
(datetime 2013 2 12 4 45 0)
"efter lunch"
(datetime-interval [2013 2 12 13] [2013 2 12 17])
"10:30"
(datetime 2013 2 12 10 30)
(datetime-interval [2013 2 12 4] [2013 2 12 12])
"nästa måndag"
(datetime 2013 2 18 :day-of-week 1)
)
|
61dc9b35c0605c86669d7eb52d2275f173e3fc09341c8caaf3b98684b094732d
|
yzh44yzh/practical_erlang
|
test.erl
|
-module(test).
-export([run/0]).
run() ->
case mcache_tests:test() of
ok -> init:stop(0);
error -> init:stop(1)
end.
| null |
https://raw.githubusercontent.com/yzh44yzh/practical_erlang/c9eec8cf44e152bf50d9bc6d5cb87fee4764f609/16_sockets/solution/test/test.erl
|
erlang
|
-module(test).
-export([run/0]).
run() ->
case mcache_tests:test() of
ok -> init:stop(0);
error -> init:stop(1)
end.
|
|
973b1de08741321d8ddcc219b5e1d8baf5df252cc4e667dd1f76dbc844154c66
|
hanshuebner/vlm
|
compile-Minima-for-VLM.lisp
|
-*- Mode : LISP ; Syntax : Common - Lisp ; Package : USER ; Base : 10 ; Patch - File : T -*-
Patch file for Private version 0.0
;;; Reason: Function MINIMA-COMPILER::COMPILE-ENVIRONMENT-FILE: .
;;; Function MINIMA-COMPILER::LOAD-ENVIRONMENT-FILE: .
;;; Function MINIMA-COMPILER::COMPILE-A-FILE: .
;;; Function MINIMA-COMPILER::COMPILE-FORM-TO-STREAM: .
Written by , 2/04/93 10:59:22
while running on Sour Cream from FEP0:>dMinima-49 - E.ilod.1
with Experimental System 447.30 , Experimental CLOS 433.1 , Experimental RPC 437.0 ,
Experimental Embedding Support 429.1 , Experimental MacIvory Support 443.1 ,
Experimental UX Support 438.0 , Experimental Development Utilities 433.0 ,
Experimental Old TV 431.0 , Experimental Zwei 431.4 , Experimental Utilities 440.6 ,
Experimental RPC Development 432.0 , Experimental MacIvory Development 430.0 ,
Experimental UX Development 437.0 , Experimental Server Utilities 438.1 ,
Experimental Serial 431.0 , Experimental Hardcopy 441.2 , Experimental Zmail 438.0 ,
Experimental LMFS Defstorage 416.0 , Experimental SCSI 427.3 ,
Experimental Tape 440.0 , Experimental LMFS 439.0 , Experimental NSage 436.1 ,
Experimental Extended Help 437.0 , Experimental CL Developer 424.0 ,
Experimental Documentation Database 438.1 , Experimental IP - TCP 447.2 ,
Experimental IP - TCP Documentation 420.0 , Experimental CLX 443.0 ,
Experimental X Remote Screen 441.2 , Experimental X Documentation 419.0 ,
Experimental NFS Client 437.0 , Experimental NFS Documentation 421.0 ,
Experimental Serial Networks 4.3 , Experimental Serial Networks Documentation 7.0 ,
Experimental DNA 435.0 , Experimental Metering 440.0 ,
Experimental Metering Substrate 440.0 , Experimental Conversion Tools 432.0 ,
Experimental Hacks 436.0 , Experimental Mac Dex 429.0 ,
Experimental HyperCard / MacIvory 429.0 , Experimental Statice Runtime 461.3 ,
Experimental Statice 461.1 , Experimental Statice Browser 461.0 ,
Experimental Statice Documentation 424.0 , Experimental CLIM 63.21 ,
Experimental Genera CLIM 63.5 , Experimental CLX CLIM 63.1 ,
Experimental PostScript CLIM 63.1 , Experimental CLIM Documentation 63.0 ,
Experimental CLIM Demo 63.3 , Experimental Symbolics Concordia 440.1 ,
Experimental Essential Image Substrate 428.0 , Experimental Image Substrate 436.0 ,
Experimental Graphic Editing Documentation 430.0 ,
Experimental Graphic Editing 437.0 , Experimental Graphic Editor 436.0 ,
Experimental Bitmap Editor 437.0 , Experimental Postscript 432.0 ,
Experimental Concordia Documentation 430.0 , Experimental Lock Simple 433.0 ,
Experimental Producer 417.0 , Version Control 404.4 , Compare Merge 403.0 ,
VC Documentation 401.0 , Symbolics In - House 439.1 ,
Symbolics In - House Documentation 422.0 , SCRC 437.0 , Weather User 421.0 ,
Logical Pathnames Translation Files NEWEST , Experimental IFEP Compiler 52.2 ,
Experimental IFEP Kernel 329.7 , Experimental IFEP Utilities 329.1 ,
Experimental Minima Developer 49.4 , Experimental Minima Kernel 32.15 ,
Experimental Minima Debugger 29.2 , Experimental Minima Documentation 21.0 ,
Palter 's Environment 24.0 , Experimental Alpha Assembler NEWEST ,
Experimental Alpha Ivory Emulator NEWEST , cold load 1 ,
Ivory Revision 4A ( FPA enabled ) , FEP 329 , FEP0:>I329 - loaders.flod(4 ) ,
FEP0:>I329 - info.flod(4 ) , FEP0:>I329 - debug.flod(4 ) , ) ,
FEP0:>I329 - kernel.fep(45 ) , Boot ROM version 320 , Device PROM version 325 ,
Genera application 5.6 , MacIvory SCSI Manager Server 4.3.1 , Toolbox Servers 4.2 ,
MacIvory & RPC library 6.3.2 , MacIvory life support 4.3.6 ,
Macintosh System Software 7.1 , 1152x806 Screen with fonts ,
Machine serial number 30014 , Macintosh IIfx , Apple Extended Keyboard II ,
;;; Add a control register view to the Minima Debugger (from S:>Palter>VLM>control-register-view.lisp.2),
;;; Clear all Minima Debugger histories (from S:>Palter>VLM>clear-all-histories.lisp.1),
Provide access path to UNIX emulator ( from VLM : EMULATOR;UNIX - ACCESS - PATH.LISP.6 ) ,
;;; Force the FEP to print backtraces in error messages by default (from S:>Palter>VLM>FEP-prints-backtraces),
Fake a Rev5 trap dispatch table for the IFEP ( from S:>Palter > VLM > FEP - Rev5 - trap - dispatch - table ) .
#+(OR MINIMA-RUNTIME MINIMA-DEVELOPER) (IN-PACKAGE "COMMON-LISP-USER")
(SCT:FILES-PATCHED-IN-THIS-PATCH-FILE
"MINIMA:COMPILER;TOP-LEVEL-FORMS.LISP.68")
0(SCT : NOTE - PRIVATE - PATCH " Add the : VLM feature while compiling Minima files " )
;========================
(SCT:BEGIN-PATCH-SECTION)
(SCT:PATCH-SECTION-SOURCE-FILE "MINIMA:COMPILER;TOP-LEVEL-FORMS.LISP.68")
#+IMACH
(SCT:PATCH-SECTION-ATTRIBUTES
"-*- Mode: LISP; Syntax: ANSI-Common-Lisp; Package: Minima-Compiler; Lowercase: Yes -*-")
#+IMACH
(defun compile-environment-file (file &key output-file package (verbose *compile-verbose*) (print *compile-print*))
(setf file (merge-pathnames file (make-pathname :type "LISP")))
(when verbose
(format t "~&; Compiling file ~A~%" file))
(setf output-file (make-pathname :type "MEBIN" :defaults (if output-file (merge-pathnames output-file file) file)))
(with-open-file (lisp file)
(with-minima-environment ()
(let* ((*package* (sys:find-package-for-syntax (or package "COMMON-LISP-USER")
:minima))
(minima-common-lisp:*package* *package*)
(cl:*readtable* si:*minima-readtable*)
(*readtable* *standard-readtable*)
(minima-common-lisp:*readtable* *readtable*)
(minima-common-lisp:*compile-file-pathname* file)
(minima-common-lisp:*compile-file-truename* (truename lisp))
(sys:fdefine-file-pathname (scl:send file :generic-pathname))
(*other-features* '((2 0 (NIL 0) (NIL :BOLD NIL) "CPTFONTCB"):VLM 0:Minima-Developer))
(eof '#:eof)
(first-form t))
(values
output-file
minima-common-lisp:*compile-file-truename*
(si:writing-bin-file (bin output-file)
(loop
(let ((form (read lisp nil eof)))
(when (eq form eof) (return))
(process-top-level-form
form
'(() () () () (((compile-file))))
#'minima-macroexpand-1
#'(lambda (form env)
(setf form
(compiler:optimize-top-level-form
form :repeat t :do-macro-expansion t :do-named-constants t
:do-constant-folding t :do-function-args t
:environment env))
(eval form env))
#'(lambda (form env)
(catch 'compiler:phase-1
(setf form
(compiler:optimize-top-level-form
form :compile t :do-style-checking t :environment env
:compile-function #'(lambda (lambda-exp env)
(let ((compiler:*compile-function* #'compiler:compile-to-file)
(compiler:*&rest-arguments-always-dynamic* nil)
(compiler:compiler-verbose print))
(compiler:compile-lambda-exp lambda-exp t nil env))))))
(when (shiftf first-form nil)
(unless (and (consp form)
(member (first form)
'(minima-minima-internals::in-package-1
minima-minima-internals::defpackage-1)))
(warn "~A does not begin with an IN-PACKAGE form." file))
(let ((source-file-id minima-common-lisp:*compile-file-pathname*)
(truename minima-common-lisp:*compile-file-truename*))
(when (eq :newest (pathname-version source-file-id))
(setf source-file-id
(make-pathname :version (pathname-version truename)
:defaults source-file-id)))
(si:dump-attribute-list
`(:syntax :ansi-common-lisp
:package ,(intern (si:pkg-name *package*) "KEYWORD")
:binary-source-file-original-truename ,(string truename)
:qfasl-source-file-unique-id ,source-file-id
:source-file-generic-pathname ,sys:fdefine-file-pathname
)
bin)))
(si:dump-form-to-eval form bin)))))))))))
;========================
(SCT:BEGIN-PATCH-SECTION)
(SCT:PATCH-SECTION-SOURCE-FILE "MINIMA:COMPILER;TOP-LEVEL-FORMS.LISP.68")
#+IMACH
(SCT:PATCH-SECTION-ATTRIBUTES
"-*- Mode: LISP; Syntax: ANSI-Common-Lisp; Package: Minima-Compiler; Lowercase: Yes -*-")
#+IMACH
(defun load-environment-file (file &key (verbose *load-verbose*) (print *load-print*) package default-package)
(flet ((load-it (lisp)
(with-minima-environment ()
(let* ((*package* (sys:find-package-for-syntax
(or package default-package "COMMON-LISP-USER")
:minima))
(minima-common-lisp:*package* *package*)
(cl:*readtable* si:*minima-readtable*)
(*readtable* *standard-readtable*)
(minima-common-lisp:*readtable* *readtable*)
(minima-common-lisp:*load-pathname* (pathname lisp)))
(when verbose
(format t "~&; Loading file ~A~%" minima-common-lisp:*load-pathname*))
(if (subtypep (stream-element-type lisp) 'character)
(let ((*other-features* '(2:VLM 0:Minima-Developer))
(eof '#:eof)
(first-form (not (or package default-package))))
(loop
(let ((form (read lisp nil eof)))
(when (eq form eof) (return))
(when (and (shiftf first-form nil)
(not (and (consp form)
(member (first form)
'(minima-common-lisp:in-package
minima-common-lisp:defpackage)))))
(warn "~A does not begin with an IN-PACKAGE form." file))
(process-top-level-form
form nil #'minima-macroexpand-1 nil
#'(lambda (form env)
(setf form
(compiler:optimize-top-level-form
form :compile t :do-style-checking t :environment env
:compile-function #'(lambda (lambda-exp env)
(let ((compiler:*compile-function* #'compiler:compile-to-core)
(compiler:*&rest-arguments-always-dynamic* nil)
(compiler:compiler-verbose print))
(compiler:compile-lambda-exp lambda-exp t nil env)))))
(if print
(map nil #'print (multiple-value-list (eval form env)))
(eval form env)))))))
(fs:load-stream lisp package t))))))
(cond ((streamp file)
(load-it file)
nil)
(t
(setf file (pathname file))
(sys:with-open-file-search (lisp ('load-environment-file *default-pathname-defaults* nil)
((lambda (pathname)
(case (pathname-type pathname)
((nil :unspecific)
(values '(:mebin :lisp) pathname))
(otherwise
(values (list (pathname-type pathname)) pathname))))
file)
:element-type :default)
(let ((sys:fdefine-file-pathname (scl:send (pathname lisp) :generic-pathname))
(minima-common-lisp:*load-truename* (truename lisp)))
(load-it lisp)
minima-common-lisp:*load-truename*))))))
;========================
(SCT:BEGIN-PATCH-SECTION)
(SCT:PATCH-SECTION-SOURCE-FILE "MINIMA:COMPILER;TOP-LEVEL-FORMS.LISP.68")
#+IMACH
(SCT:PATCH-SECTION-ATTRIBUTES
"-*- Mode: LISP; Syntax: ANSI-Common-Lisp; Package: Minima-Compiler; Lowercase: Yes -*-")
#+IMACH
(defun compile-a-file (file &key (output-file file) package (verbose *compile-verbose*) (print *compile-print*))
(setf file (merge-pathnames file (make-pathname :type "LISP")))
(when verbose
(format t "~&; Compiling file ~A~%" file))
(setf output-file (make-pathname :type "MBIN"
:defaults (if output-file
(merge-pathnames output-file file)
file)))
(with-open-file (lisp file)
(with-open-file (bin output-file :direction :output :element-type '(unsigned-byte 8))
(minima-minima-internals::with-dumper-1
#'(lambda (dumper)
(with-minima-environment ()
(let* ((*package* (sys:find-package-for-syntax (or package "COMMON-LISP-USER")
:minima))
(cl:*readtable* si:*minima-readtable*)
(*readtable* *standard-readtable*)
(eof '#:eof)
(minima-common-lisp:*compile-file-pathname* (pathname lisp))
(minima-common-lisp:*compile-file-truename* (truename lisp))
(sys:fdefine-file-pathname
(scl:send minima-common-lisp:*compile-file-pathname* :generic-pathname))
(minima-common-lisp:*package* *package*)
(*other-features* '(2:VLM 0:Minima-Runtime))
(first-form t))
(loop
(let ((form (read lisp nil eof)))
(when (eq form eof) (return))
(when (and (shiftf first-form nil)
(not (and (consp form)
(member (first form)
'(minima-common-lisp:in-package
minima-common-lisp:defpackage)))))
(warn "~A does not begin with an IN-PACKAGE form." file))
(process-top-level-form
form '(() () () () (((compile-file))))
#'minima-macroexpand-1
#'eval
#'(lambda (form env)
(unless (constantp form env)
(let ((compiler:compiler-verbose print))
(minima-minima-internals::dump-form-to-evaluate form env dumper bin)))))))
(values output-file
minima-common-lisp:*compile-file-truename*
(truename bin)))))
bin))))
;========================
(SCT:BEGIN-PATCH-SECTION)
(SCT:PATCH-SECTION-SOURCE-FILE "MINIMA:COMPILER;TOP-LEVEL-FORMS.LISP.68")
#+IMACH
(SCT:PATCH-SECTION-ATTRIBUTES
"-*- Mode: LISP; Syntax: ANSI-Common-Lisp; Package: Minima-Compiler; Lowercase: Yes -*-")
#+IMACH
(defun compile-form-to-stream (form bin)
(minima-minima-internals::with-dumper-1
#'(lambda (dumper)
(with-minima-environment ()
(let* ((*package* (sys:find-package-for-syntax "COMMON-LISP-USER" :minima))
(cl:*readtable* si:*minima-readtable*)
(*readtable* *standard-readtable*)
(minima-common-lisp:*compile-file-pathname* nil)
(minima-common-lisp:*compile-file-truename* nil)
(sys:fdefine-file-pathname nil)
(minima-common-lisp:*package* *package*)
(*other-features* '(2:VLM 0:Minima-Runtime)))
(process-top-level-form
form '(() () () () (((compile-file))))
#'minima-macroexpand-1
#'eval
#'(lambda (form env)
(unless (constantp form env)
(minima-minima-internals::dump-form-to-evaluate form env dumper bin)))))))
bin))
| null |
https://raw.githubusercontent.com/hanshuebner/vlm/20510ddc98b52252a406012a50a4d3bbd1b75dd0/support/compile-Minima-for-VLM.lisp
|
lisp
|
Syntax : Common - Lisp ; Package : USER ; Base : 10 ; Patch - File : T -*-
Reason: Function MINIMA-COMPILER::COMPILE-ENVIRONMENT-FILE: .
Function MINIMA-COMPILER::LOAD-ENVIRONMENT-FILE: .
Function MINIMA-COMPILER::COMPILE-A-FILE: .
Function MINIMA-COMPILER::COMPILE-FORM-TO-STREAM: .
Add a control register view to the Minima Debugger (from S:>Palter>VLM>control-register-view.lisp.2),
Clear all Minima Debugger histories (from S:>Palter>VLM>clear-all-histories.lisp.1),
UNIX - ACCESS - PATH.LISP.6 ) ,
Force the FEP to print backtraces in error messages by default (from S:>Palter>VLM>FEP-prints-backtraces),
========================
========================
========================
========================
|
Patch file for Private version 0.0
Written by , 2/04/93 10:59:22
while running on Sour Cream from FEP0:>dMinima-49 - E.ilod.1
with Experimental System 447.30 , Experimental CLOS 433.1 , Experimental RPC 437.0 ,
Experimental Embedding Support 429.1 , Experimental MacIvory Support 443.1 ,
Experimental UX Support 438.0 , Experimental Development Utilities 433.0 ,
Experimental Old TV 431.0 , Experimental Zwei 431.4 , Experimental Utilities 440.6 ,
Experimental RPC Development 432.0 , Experimental MacIvory Development 430.0 ,
Experimental UX Development 437.0 , Experimental Server Utilities 438.1 ,
Experimental Serial 431.0 , Experimental Hardcopy 441.2 , Experimental Zmail 438.0 ,
Experimental LMFS Defstorage 416.0 , Experimental SCSI 427.3 ,
Experimental Tape 440.0 , Experimental LMFS 439.0 , Experimental NSage 436.1 ,
Experimental Extended Help 437.0 , Experimental CL Developer 424.0 ,
Experimental Documentation Database 438.1 , Experimental IP - TCP 447.2 ,
Experimental IP - TCP Documentation 420.0 , Experimental CLX 443.0 ,
Experimental X Remote Screen 441.2 , Experimental X Documentation 419.0 ,
Experimental NFS Client 437.0 , Experimental NFS Documentation 421.0 ,
Experimental Serial Networks 4.3 , Experimental Serial Networks Documentation 7.0 ,
Experimental DNA 435.0 , Experimental Metering 440.0 ,
Experimental Metering Substrate 440.0 , Experimental Conversion Tools 432.0 ,
Experimental Hacks 436.0 , Experimental Mac Dex 429.0 ,
Experimental HyperCard / MacIvory 429.0 , Experimental Statice Runtime 461.3 ,
Experimental Statice 461.1 , Experimental Statice Browser 461.0 ,
Experimental Statice Documentation 424.0 , Experimental CLIM 63.21 ,
Experimental Genera CLIM 63.5 , Experimental CLX CLIM 63.1 ,
Experimental PostScript CLIM 63.1 , Experimental CLIM Documentation 63.0 ,
Experimental CLIM Demo 63.3 , Experimental Symbolics Concordia 440.1 ,
Experimental Essential Image Substrate 428.0 , Experimental Image Substrate 436.0 ,
Experimental Graphic Editing Documentation 430.0 ,
Experimental Graphic Editing 437.0 , Experimental Graphic Editor 436.0 ,
Experimental Bitmap Editor 437.0 , Experimental Postscript 432.0 ,
Experimental Concordia Documentation 430.0 , Experimental Lock Simple 433.0 ,
Experimental Producer 417.0 , Version Control 404.4 , Compare Merge 403.0 ,
VC Documentation 401.0 , Symbolics In - House 439.1 ,
Symbolics In - House Documentation 422.0 , SCRC 437.0 , Weather User 421.0 ,
Logical Pathnames Translation Files NEWEST , Experimental IFEP Compiler 52.2 ,
Experimental IFEP Kernel 329.7 , Experimental IFEP Utilities 329.1 ,
Experimental Minima Developer 49.4 , Experimental Minima Kernel 32.15 ,
Experimental Minima Debugger 29.2 , Experimental Minima Documentation 21.0 ,
Palter 's Environment 24.0 , Experimental Alpha Assembler NEWEST ,
Experimental Alpha Ivory Emulator NEWEST , cold load 1 ,
Ivory Revision 4A ( FPA enabled ) , FEP 329 , FEP0:>I329 - loaders.flod(4 ) ,
FEP0:>I329 - info.flod(4 ) , FEP0:>I329 - debug.flod(4 ) , ) ,
FEP0:>I329 - kernel.fep(45 ) , Boot ROM version 320 , Device PROM version 325 ,
Genera application 5.6 , MacIvory SCSI Manager Server 4.3.1 , Toolbox Servers 4.2 ,
MacIvory & RPC library 6.3.2 , MacIvory life support 4.3.6 ,
Macintosh System Software 7.1 , 1152x806 Screen with fonts ,
Machine serial number 30014 , Macintosh IIfx , Apple Extended Keyboard II ,
Fake a Rev5 trap dispatch table for the IFEP ( from S:>Palter > VLM > FEP - Rev5 - trap - dispatch - table ) .
#+(OR MINIMA-RUNTIME MINIMA-DEVELOPER) (IN-PACKAGE "COMMON-LISP-USER")
(SCT:FILES-PATCHED-IN-THIS-PATCH-FILE
"MINIMA:COMPILER;TOP-LEVEL-FORMS.LISP.68")
0(SCT : NOTE - PRIVATE - PATCH " Add the : VLM feature while compiling Minima files " )
(SCT:BEGIN-PATCH-SECTION)
(SCT:PATCH-SECTION-SOURCE-FILE "MINIMA:COMPILER;TOP-LEVEL-FORMS.LISP.68")
#+IMACH
(SCT:PATCH-SECTION-ATTRIBUTES
"-*- Mode: LISP; Syntax: ANSI-Common-Lisp; Package: Minima-Compiler; Lowercase: Yes -*-")
#+IMACH
(defun compile-environment-file (file &key output-file package (verbose *compile-verbose*) (print *compile-print*))
(setf file (merge-pathnames file (make-pathname :type "LISP")))
(when verbose
(format t "~&; Compiling file ~A~%" file))
(setf output-file (make-pathname :type "MEBIN" :defaults (if output-file (merge-pathnames output-file file) file)))
(with-open-file (lisp file)
(with-minima-environment ()
(let* ((*package* (sys:find-package-for-syntax (or package "COMMON-LISP-USER")
:minima))
(minima-common-lisp:*package* *package*)
(cl:*readtable* si:*minima-readtable*)
(*readtable* *standard-readtable*)
(minima-common-lisp:*readtable* *readtable*)
(minima-common-lisp:*compile-file-pathname* file)
(minima-common-lisp:*compile-file-truename* (truename lisp))
(sys:fdefine-file-pathname (scl:send file :generic-pathname))
(*other-features* '((2 0 (NIL 0) (NIL :BOLD NIL) "CPTFONTCB"):VLM 0:Minima-Developer))
(eof '#:eof)
(first-form t))
(values
output-file
minima-common-lisp:*compile-file-truename*
(si:writing-bin-file (bin output-file)
(loop
(let ((form (read lisp nil eof)))
(when (eq form eof) (return))
(process-top-level-form
form
'(() () () () (((compile-file))))
#'minima-macroexpand-1
#'(lambda (form env)
(setf form
(compiler:optimize-top-level-form
form :repeat t :do-macro-expansion t :do-named-constants t
:do-constant-folding t :do-function-args t
:environment env))
(eval form env))
#'(lambda (form env)
(catch 'compiler:phase-1
(setf form
(compiler:optimize-top-level-form
form :compile t :do-style-checking t :environment env
:compile-function #'(lambda (lambda-exp env)
(let ((compiler:*compile-function* #'compiler:compile-to-file)
(compiler:*&rest-arguments-always-dynamic* nil)
(compiler:compiler-verbose print))
(compiler:compile-lambda-exp lambda-exp t nil env))))))
(when (shiftf first-form nil)
(unless (and (consp form)
(member (first form)
'(minima-minima-internals::in-package-1
minima-minima-internals::defpackage-1)))
(warn "~A does not begin with an IN-PACKAGE form." file))
(let ((source-file-id minima-common-lisp:*compile-file-pathname*)
(truename minima-common-lisp:*compile-file-truename*))
(when (eq :newest (pathname-version source-file-id))
(setf source-file-id
(make-pathname :version (pathname-version truename)
:defaults source-file-id)))
(si:dump-attribute-list
`(:syntax :ansi-common-lisp
:package ,(intern (si:pkg-name *package*) "KEYWORD")
:binary-source-file-original-truename ,(string truename)
:qfasl-source-file-unique-id ,source-file-id
:source-file-generic-pathname ,sys:fdefine-file-pathname
)
bin)))
(si:dump-form-to-eval form bin)))))))))))
(SCT:BEGIN-PATCH-SECTION)
(SCT:PATCH-SECTION-SOURCE-FILE "MINIMA:COMPILER;TOP-LEVEL-FORMS.LISP.68")
#+IMACH
(SCT:PATCH-SECTION-ATTRIBUTES
"-*- Mode: LISP; Syntax: ANSI-Common-Lisp; Package: Minima-Compiler; Lowercase: Yes -*-")
#+IMACH
(defun load-environment-file (file &key (verbose *load-verbose*) (print *load-print*) package default-package)
(flet ((load-it (lisp)
(with-minima-environment ()
(let* ((*package* (sys:find-package-for-syntax
(or package default-package "COMMON-LISP-USER")
:minima))
(minima-common-lisp:*package* *package*)
(cl:*readtable* si:*minima-readtable*)
(*readtable* *standard-readtable*)
(minima-common-lisp:*readtable* *readtable*)
(minima-common-lisp:*load-pathname* (pathname lisp)))
(when verbose
(format t "~&; Loading file ~A~%" minima-common-lisp:*load-pathname*))
(if (subtypep (stream-element-type lisp) 'character)
(let ((*other-features* '(2:VLM 0:Minima-Developer))
(eof '#:eof)
(first-form (not (or package default-package))))
(loop
(let ((form (read lisp nil eof)))
(when (eq form eof) (return))
(when (and (shiftf first-form nil)
(not (and (consp form)
(member (first form)
'(minima-common-lisp:in-package
minima-common-lisp:defpackage)))))
(warn "~A does not begin with an IN-PACKAGE form." file))
(process-top-level-form
form nil #'minima-macroexpand-1 nil
#'(lambda (form env)
(setf form
(compiler:optimize-top-level-form
form :compile t :do-style-checking t :environment env
:compile-function #'(lambda (lambda-exp env)
(let ((compiler:*compile-function* #'compiler:compile-to-core)
(compiler:*&rest-arguments-always-dynamic* nil)
(compiler:compiler-verbose print))
(compiler:compile-lambda-exp lambda-exp t nil env)))))
(if print
(map nil #'print (multiple-value-list (eval form env)))
(eval form env)))))))
(fs:load-stream lisp package t))))))
(cond ((streamp file)
(load-it file)
nil)
(t
(setf file (pathname file))
(sys:with-open-file-search (lisp ('load-environment-file *default-pathname-defaults* nil)
((lambda (pathname)
(case (pathname-type pathname)
((nil :unspecific)
(values '(:mebin :lisp) pathname))
(otherwise
(values (list (pathname-type pathname)) pathname))))
file)
:element-type :default)
(let ((sys:fdefine-file-pathname (scl:send (pathname lisp) :generic-pathname))
(minima-common-lisp:*load-truename* (truename lisp)))
(load-it lisp)
minima-common-lisp:*load-truename*))))))
(SCT:BEGIN-PATCH-SECTION)
(SCT:PATCH-SECTION-SOURCE-FILE "MINIMA:COMPILER;TOP-LEVEL-FORMS.LISP.68")
#+IMACH
(SCT:PATCH-SECTION-ATTRIBUTES
"-*- Mode: LISP; Syntax: ANSI-Common-Lisp; Package: Minima-Compiler; Lowercase: Yes -*-")
#+IMACH
(defun compile-a-file (file &key (output-file file) package (verbose *compile-verbose*) (print *compile-print*))
(setf file (merge-pathnames file (make-pathname :type "LISP")))
(when verbose
(format t "~&; Compiling file ~A~%" file))
(setf output-file (make-pathname :type "MBIN"
:defaults (if output-file
(merge-pathnames output-file file)
file)))
(with-open-file (lisp file)
(with-open-file (bin output-file :direction :output :element-type '(unsigned-byte 8))
(minima-minima-internals::with-dumper-1
#'(lambda (dumper)
(with-minima-environment ()
(let* ((*package* (sys:find-package-for-syntax (or package "COMMON-LISP-USER")
:minima))
(cl:*readtable* si:*minima-readtable*)
(*readtable* *standard-readtable*)
(eof '#:eof)
(minima-common-lisp:*compile-file-pathname* (pathname lisp))
(minima-common-lisp:*compile-file-truename* (truename lisp))
(sys:fdefine-file-pathname
(scl:send minima-common-lisp:*compile-file-pathname* :generic-pathname))
(minima-common-lisp:*package* *package*)
(*other-features* '(2:VLM 0:Minima-Runtime))
(first-form t))
(loop
(let ((form (read lisp nil eof)))
(when (eq form eof) (return))
(when (and (shiftf first-form nil)
(not (and (consp form)
(member (first form)
'(minima-common-lisp:in-package
minima-common-lisp:defpackage)))))
(warn "~A does not begin with an IN-PACKAGE form." file))
(process-top-level-form
form '(() () () () (((compile-file))))
#'minima-macroexpand-1
#'eval
#'(lambda (form env)
(unless (constantp form env)
(let ((compiler:compiler-verbose print))
(minima-minima-internals::dump-form-to-evaluate form env dumper bin)))))))
(values output-file
minima-common-lisp:*compile-file-truename*
(truename bin)))))
bin))))
(SCT:BEGIN-PATCH-SECTION)
(SCT:PATCH-SECTION-SOURCE-FILE "MINIMA:COMPILER;TOP-LEVEL-FORMS.LISP.68")
#+IMACH
(SCT:PATCH-SECTION-ATTRIBUTES
"-*- Mode: LISP; Syntax: ANSI-Common-Lisp; Package: Minima-Compiler; Lowercase: Yes -*-")
#+IMACH
(defun compile-form-to-stream (form bin)
(minima-minima-internals::with-dumper-1
#'(lambda (dumper)
(with-minima-environment ()
(let* ((*package* (sys:find-package-for-syntax "COMMON-LISP-USER" :minima))
(cl:*readtable* si:*minima-readtable*)
(*readtable* *standard-readtable*)
(minima-common-lisp:*compile-file-pathname* nil)
(minima-common-lisp:*compile-file-truename* nil)
(sys:fdefine-file-pathname nil)
(minima-common-lisp:*package* *package*)
(*other-features* '(2:VLM 0:Minima-Runtime)))
(process-top-level-form
form '(() () () () (((compile-file))))
#'minima-macroexpand-1
#'eval
#'(lambda (form env)
(unless (constantp form env)
(minima-minima-internals::dump-form-to-evaluate form env dumper bin)))))))
bin))
|
ba1d379805f0faa84d366ac81de86d8334967b3d88f963530f24f97488e65601
|
BranchTaken/Hemlock
|
test_of_real_to_real.ml
|
open! Basis.Rudiments
open! Basis
open U64
let test () =
let rec test_rs rs = begin
match rs with
| [] -> ()
| r :: rs' -> begin
let x = of_real r in
File.Fmt.stdout
|> Fmt.fmt "of_real "
|> Real.fmt ~alt:true ~radix:Radix.Hex r
|> Fmt.fmt " -> "
|> fmt ~alt:true ~zpad:true ~width:16L ~radix:Radix.Hex x
|> Fmt.fmt "; to_real -> "
|> Real.fmt ~alt:true ~radix:Radix.Hex (to_real x)
|> Fmt.fmt "\n"
|> ignore;
test_rs rs'
end
end in
let rs = [
-1.;
0.;
0x1.1p-1;
1.;
0x1.f_ffff_ffff_ffffp48;
0x1.f_ffff_ffff_ffffp52;
0x1.f_ffff_ffff_ffffp56;
0x1.f_ffff_ffff_ffffp63;
0x1.f_ffff_ffff_ffffp64;
0x1.f_ffff_ffff_ffffp68;
0x1p62;
0x1p63;
0x1p64;
] in
test_rs rs;
File.Fmt.stdout |> Fmt.fmt "\n" |> ignore;
let rec test_xs xs = begin
match xs with
| [] -> ()
| x :: xs' -> begin
let r = to_real x in
File.Fmt.stdout
|> Fmt.fmt "to_real "
|> fmt ~alt:true ~zpad:true ~width:16L ~radix:Radix.Hex x
|> Fmt.fmt " -> "
|> Real.fmt ~alt:true ~radix:Radix.Hex r
|> Fmt.fmt "; of_real -> "
|> fmt ~alt:true ~zpad:true ~width:16L ~radix:Radix.Hex (of_real r)
|> Fmt.fmt "\n"
|> ignore;
test_xs xs'
end
end in
let xs = [
zero;
one;
max_value;
] in
test_xs xs
let _ = test ()
| null |
https://raw.githubusercontent.com/BranchTaken/Hemlock/a07e362d66319108c1478a4cbebab765c1808b1a/bootstrap/test/basis/u64/test_of_real_to_real.ml
|
ocaml
|
open! Basis.Rudiments
open! Basis
open U64
let test () =
let rec test_rs rs = begin
match rs with
| [] -> ()
| r :: rs' -> begin
let x = of_real r in
File.Fmt.stdout
|> Fmt.fmt "of_real "
|> Real.fmt ~alt:true ~radix:Radix.Hex r
|> Fmt.fmt " -> "
|> fmt ~alt:true ~zpad:true ~width:16L ~radix:Radix.Hex x
|> Fmt.fmt "; to_real -> "
|> Real.fmt ~alt:true ~radix:Radix.Hex (to_real x)
|> Fmt.fmt "\n"
|> ignore;
test_rs rs'
end
end in
let rs = [
-1.;
0.;
0x1.1p-1;
1.;
0x1.f_ffff_ffff_ffffp48;
0x1.f_ffff_ffff_ffffp52;
0x1.f_ffff_ffff_ffffp56;
0x1.f_ffff_ffff_ffffp63;
0x1.f_ffff_ffff_ffffp64;
0x1.f_ffff_ffff_ffffp68;
0x1p62;
0x1p63;
0x1p64;
] in
test_rs rs;
File.Fmt.stdout |> Fmt.fmt "\n" |> ignore;
let rec test_xs xs = begin
match xs with
| [] -> ()
| x :: xs' -> begin
let r = to_real x in
File.Fmt.stdout
|> Fmt.fmt "to_real "
|> fmt ~alt:true ~zpad:true ~width:16L ~radix:Radix.Hex x
|> Fmt.fmt " -> "
|> Real.fmt ~alt:true ~radix:Radix.Hex r
|> Fmt.fmt "; of_real -> "
|> fmt ~alt:true ~zpad:true ~width:16L ~radix:Radix.Hex (of_real r)
|> Fmt.fmt "\n"
|> ignore;
test_xs xs'
end
end in
let xs = [
zero;
one;
max_value;
] in
test_xs xs
let _ = test ()
|
|
b2bb0bf933c611aa895d16d3d19a30d4535ba3d9e3cbaf5758f3c563b62a441c
|
aarkerio/ZentaurLMS
|
tests_view.clj
|
(ns zentaur.hiccup.tests-view
(:require [hiccup.form :as f]
[hiccup.core :as c]
[clojure.tools.logging :as log]
[hiccup.element :refer [link-to]]
[hiccup.page :refer [include-css include-js]]
[zentaur.hiccup.helpers-view :as hv]))
(defn formatted-test [{:keys [title created_at tags published id subject uurlid level]}]
(let [formatted-date (hv/format-date created_at)]
[:tr
[:td [:a {:href (str "/vclass/tests/edit/" uurlid)} [:img {:src "/img/icon_edit_test.png" :alt "Bearbeiten" :title "Bearbeiten"}]]]
[:td title]
[:td tags]
[:td subject]
[:td level]
[:td formatted-date]
[:td [:a {:href (str "/vclass/tests/exportpdf/" uurlid)} [:img {:src "/img/icon_export_pdf.png" :alt "Export PDF" :title "Export PDF"}]]]
[:td [:a {:href (str "/vclass/tests/exportodt/" uurlid)} [:img {:src "/img/icon_export_odt.png" :alt "Export ODT" :title "Export ODT"}]]]
[:td [:a {:href (str "/vclass/tests/apply/" uurlid)} [:img {:src "/img/icon_apply.png" :alt "Bewerben Sie sich für die Klasse" :title "Bewerben Sie sich für die Klasse"}]]]
[:td [:a {:onclick (str "zentaur.core.deletetest('" uurlid "')")} [:img {:src "/img/icon_delete.png" :alt "Delete test" :title "Delete test"}]]]]))
(defn- ^:private test-new-form [csrf-field subjects levels langs]
[:div.hidden-div {:id "hidden-form"}
[:form {:id "submit-test-form" :action "/vclass/tests" :method "post" :class "css-class-form"}
(f/hidden-field {:value csrf-field} "__anti-forgery-token")
[:label {:for "title"} "Title:"]
[:div.div-separator (f/text-field {:maxlength 150 :size 90 :placeholder "Title"} "title")]
[:label {:for "tags"} "Tags:"]
[:div.div-separator (f/text-field {:maxlength 150 :size 70 :placeholder "Tags"} "tags")]
[:label {:for "subject_id"} "Subject:"]
[:div.div-separator
[:select.form-control.mr-sm-2 {:name "subject_id"}
(for [subject subjects]
[:option {:value (:id subject)} (:subject subject)])]]
[:label {:for "level_id"} "Level:"]
[:div.div-separator
[:select.form-control.mr-sm-2 {:name "level_id"}
(for [level levels]
[:option {:value (:id level)} (:level level)])]]
[:label {:for "lang_id"} "Lang:"]
[:div.div-separator
[:select.form-control.mr-sm-2 {:name "lang_id"}
(for [lang langs]
[:option {:value (:id lang)} (:lang lang)])]]
(f/submit-button {:class "btn btn-outline-success my-2 my-sm-0" :id "button-save" :name "button-save"} "Speichern")]])
(defn index [tests base subjects levels langs]
(let [csrf-field (:csrf-field base)
formatted-tests (for [test tests]
(formatted-test test))]
[:div {:id "cont"}
[:h1 "Dein genialer Quiz Test"]
[:div [:img {:src "/img/icon_add.png" :alt "Quizz test hinzüfugen" :title "Quizz test hinzüfugen" :id "button-show-div"}]]
(test-new-form csrf-field subjects levels langs)
[:div {:id "content"}
[:table {:class "some-table-class"}
[:thead
[:tr
[:th "Bearbeiten"]
[:th "Titel"]
[:th "Stichworte"]
[:th "Fach"]
[:th "Stufe"]
[:th "Erstellt"]
[:th "Export PDF"]
[:th "Export ODF"]
[:th "Apply to Classroom"]
[:th "Löschen"]]]
[:tbody formatted-tests]]]
(hv/pagination "tests")]))
(defn edit [base uurlid]
(let [csrf-field (:csrf-field base)
user-id (-> base :identity :id)]
[:div
[:h1 "Bearbeiten Quizz Test"]
[:div (f/form-to [:id "hidden-form"]
(f/hidden-field {:value csrf-field} "__anti-forgery-token")
(f/hidden-field {:value uurlid} "uurlid")
(f/hidden-field {:value user-id} "user-id"))]
[:div {:id "test-root-app"}]]))
(defn search [base]
(log/info (str ">>> BASEE >>>>> " base))
(let [csrf-field (:csrf-field base)
user-uuid (-> base :identity :uuid)]
[:div
[:h1 "Search and select Questions to create a new test"]
[:div {:id "search-root-app"}]
[:div.hidden-div {:id "csrf-field"} csrf-field]
[:div.hidden-div {:id "user-uuid"} user-uuid]]))
| null |
https://raw.githubusercontent.com/aarkerio/ZentaurLMS/adb43fb879b88d6a35f7f556cb225f7930d524f9/src/clj/zentaur/hiccup/tests_view.clj
|
clojure
|
(ns zentaur.hiccup.tests-view
(:require [hiccup.form :as f]
[hiccup.core :as c]
[clojure.tools.logging :as log]
[hiccup.element :refer [link-to]]
[hiccup.page :refer [include-css include-js]]
[zentaur.hiccup.helpers-view :as hv]))
(defn formatted-test [{:keys [title created_at tags published id subject uurlid level]}]
(let [formatted-date (hv/format-date created_at)]
[:tr
[:td [:a {:href (str "/vclass/tests/edit/" uurlid)} [:img {:src "/img/icon_edit_test.png" :alt "Bearbeiten" :title "Bearbeiten"}]]]
[:td title]
[:td tags]
[:td subject]
[:td level]
[:td formatted-date]
[:td [:a {:href (str "/vclass/tests/exportpdf/" uurlid)} [:img {:src "/img/icon_export_pdf.png" :alt "Export PDF" :title "Export PDF"}]]]
[:td [:a {:href (str "/vclass/tests/exportodt/" uurlid)} [:img {:src "/img/icon_export_odt.png" :alt "Export ODT" :title "Export ODT"}]]]
[:td [:a {:href (str "/vclass/tests/apply/" uurlid)} [:img {:src "/img/icon_apply.png" :alt "Bewerben Sie sich für die Klasse" :title "Bewerben Sie sich für die Klasse"}]]]
[:td [:a {:onclick (str "zentaur.core.deletetest('" uurlid "')")} [:img {:src "/img/icon_delete.png" :alt "Delete test" :title "Delete test"}]]]]))
(defn- ^:private test-new-form [csrf-field subjects levels langs]
[:div.hidden-div {:id "hidden-form"}
[:form {:id "submit-test-form" :action "/vclass/tests" :method "post" :class "css-class-form"}
(f/hidden-field {:value csrf-field} "__anti-forgery-token")
[:label {:for "title"} "Title:"]
[:div.div-separator (f/text-field {:maxlength 150 :size 90 :placeholder "Title"} "title")]
[:label {:for "tags"} "Tags:"]
[:div.div-separator (f/text-field {:maxlength 150 :size 70 :placeholder "Tags"} "tags")]
[:label {:for "subject_id"} "Subject:"]
[:div.div-separator
[:select.form-control.mr-sm-2 {:name "subject_id"}
(for [subject subjects]
[:option {:value (:id subject)} (:subject subject)])]]
[:label {:for "level_id"} "Level:"]
[:div.div-separator
[:select.form-control.mr-sm-2 {:name "level_id"}
(for [level levels]
[:option {:value (:id level)} (:level level)])]]
[:label {:for "lang_id"} "Lang:"]
[:div.div-separator
[:select.form-control.mr-sm-2 {:name "lang_id"}
(for [lang langs]
[:option {:value (:id lang)} (:lang lang)])]]
(f/submit-button {:class "btn btn-outline-success my-2 my-sm-0" :id "button-save" :name "button-save"} "Speichern")]])
(defn index [tests base subjects levels langs]
(let [csrf-field (:csrf-field base)
formatted-tests (for [test tests]
(formatted-test test))]
[:div {:id "cont"}
[:h1 "Dein genialer Quiz Test"]
[:div [:img {:src "/img/icon_add.png" :alt "Quizz test hinzüfugen" :title "Quizz test hinzüfugen" :id "button-show-div"}]]
(test-new-form csrf-field subjects levels langs)
[:div {:id "content"}
[:table {:class "some-table-class"}
[:thead
[:tr
[:th "Bearbeiten"]
[:th "Titel"]
[:th "Stichworte"]
[:th "Fach"]
[:th "Stufe"]
[:th "Erstellt"]
[:th "Export PDF"]
[:th "Export ODF"]
[:th "Apply to Classroom"]
[:th "Löschen"]]]
[:tbody formatted-tests]]]
(hv/pagination "tests")]))
(defn edit [base uurlid]
(let [csrf-field (:csrf-field base)
user-id (-> base :identity :id)]
[:div
[:h1 "Bearbeiten Quizz Test"]
[:div (f/form-to [:id "hidden-form"]
(f/hidden-field {:value csrf-field} "__anti-forgery-token")
(f/hidden-field {:value uurlid} "uurlid")
(f/hidden-field {:value user-id} "user-id"))]
[:div {:id "test-root-app"}]]))
(defn search [base]
(log/info (str ">>> BASEE >>>>> " base))
(let [csrf-field (:csrf-field base)
user-uuid (-> base :identity :uuid)]
[:div
[:h1 "Search and select Questions to create a new test"]
[:div {:id "search-root-app"}]
[:div.hidden-div {:id "csrf-field"} csrf-field]
[:div.hidden-div {:id "user-uuid"} user-uuid]]))
|
|
16da804f58d8da05687c70cae2bd69ed755ded1d65d7a2f15972c9a2a27ac436
|
magicant/flesh
|
Warning.hs
|
Copyright ( C ) 2017 WATANABE >
This program is free software ; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
( at your option ) any later version .
This program is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with this program . If not , see < / > .
Copyright (C) 2017 WATANABE Yuki <>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see </>.
-}
{-# LANGUAGE Safe #-}
|
Copyright : ( C ) 2017 WATANABE Yuki
License :
Portability : portable
This module defines warnings and monads to produce them .
Copyright : (C) 2017 WATANABE Yuki
License : GPL-2
Portability : portable
This module defines warnings and monads to produce them.
-}
module Flesh.Language.Parser.Warning (
-- * Warning
Warning(..),
-- * Monad for recording warnings
ReportT) where
import Control.Monad.Writer.Strict (WriterT)
-- | Warning produced during source code parsing.
FIXME
-- | Writer monad specialized for recording warnings.
type ReportT = WriterT [Warning]
-- vim: set et sw=2 sts=2 tw=78:
| null |
https://raw.githubusercontent.com/magicant/flesh/0e76312d291aae8f890ba55d8131ade78600d7e8/src/Flesh/Language/Parser/Warning.hs
|
haskell
|
# LANGUAGE Safe #
* Warning
* Monad for recording warnings
| Warning produced during source code parsing.
| Writer monad specialized for recording warnings.
vim: set et sw=2 sts=2 tw=78:
|
Copyright ( C ) 2017 WATANABE >
This program is free software ; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
( at your option ) any later version .
This program is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with this program . If not , see < / > .
Copyright (C) 2017 WATANABE Yuki <>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see </>.
-}
|
Copyright : ( C ) 2017 WATANABE Yuki
License :
Portability : portable
This module defines warnings and monads to produce them .
Copyright : (C) 2017 WATANABE Yuki
License : GPL-2
Portability : portable
This module defines warnings and monads to produce them.
-}
module Flesh.Language.Parser.Warning (
Warning(..),
ReportT) where
import Control.Monad.Writer.Strict (WriterT)
FIXME
type ReportT = WriterT [Warning]
|
0cbeeabccc87813e11cd8f3dbd60660d7d27654780255d84ffdc3da56d75757f
|
naproche/naproche
|
Main.hs
|
-- |
Authors : ( 2001 - 2008 ) ,
( 2017 - 2018 ) ,
( 2018 )
--
Main application entry point : console or server mode .
# LANGUAGE TupleSections #
{-# LANGUAGE OverloadedStrings #-}
module SAD.Main where
import Control.Monad (unless, when)
import Data.Char (toLower)
import Data.Time (UTCTime, addUTCTime, getCurrentTime, diffUTCTime)
import Data.List (isSuffixOf)
import Data.Maybe (mapMaybe)
import Control.Exception qualified as Exception
import Control.Exception (catch)
import Data.Text.Lazy qualified as Text
import System.Console.GetOpt qualified as GetOpt
import System.Environment qualified as Environment
import Network.Socket (Socket)
import SAD.Prove.MESON qualified as MESON
import SAD.Export.Prover qualified as Prover
import SAD.Data.Instr
import SAD.API
import Isabelle.Bytes qualified as Bytes
import Isabelle.Bytes (Bytes)
import Isabelle.Byte_Message qualified as Byte_Message
import Isabelle.Naproche qualified as Naproche
import Isabelle.Server qualified as Server
import Isabelle.Options qualified as Options
import Isabelle.Isabelle_Thread qualified as Isabelle_Thread
import Isabelle.UUID qualified as UUID
import Isabelle.Position qualified as Position
import Isabelle.YXML qualified as YXML
import Isabelle.Process_Result qualified as Process_Result
import Isabelle.Library
import Naproche.Program qualified as Program
import Naproche.Console qualified as Console
import Naproche.Param qualified as Param
main :: IO ()
main = do
Console.setup
-- command line and init file
args0 <- Environment.getArgs
(opts0, pk, fileArg) <- readArgs args0
text0 <- (map (uncurry ProofTextInstr) (reverse opts0) ++) <$> case fileArg of
Nothing -> do
stdin <- getContents
pure [ProofTextInstr Position.none $ GetArgument (Text pk) (Text.pack stdin)]
Just name -> do
pure [ProofTextInstr Position.none $ GetArgument (File pk) (Text.pack name)]
let opts1 = map snd opts0
mesonCache <- MESON.init_cache
proverCache <- Prover.init_cache
if getInstr helpParam opts1 then
putStr (GetOpt.usageInfo usageHeader options)
else -- main body with explicit error handling, notably for PIDE
(if getInstr serverParam opts1 then
Server.server (Server.publish_stdout "Naproche-SAD") (mainServer mesonCache proverCache args0)
else do
Program.init_console
rc <- do
mainBody mesonCache proverCache opts1 text0 fileArg
`catch` (\Exception.UserInterrupt -> do
Program.exit_thread
Console.stderr ("Interrupt" :: String)
return Process_Result.interrupt_rc)
`catch` (\(err :: Exception.SomeException) -> do
Program.exit_thread
Console.stderr (Exception.displayException err)
return 1)
Console.exit rc)
mainServer :: MESON.Cache -> Prover.Cache -> [String] -> Socket -> IO ()
mainServer mesonCache proverCache args0 socket =
let
exchange_message0 = Byte_Message.exchange_message0 socket
robust_error msg =
exchange_message0 [Naproche.output_error_command, msg]
`catch` (\(_ :: Exception.IOException) -> return ())
in
do
chunks <- Byte_Message.read_message socket
case chunks of
Just (command : threads) | command == Naproche.cancel_program ->
mapM_ Isabelle_Thread.stop_uuid (mapMaybe UUID.parse threads)
Just [command, more_args, opts, text] | command == Naproche.forthel_program -> do
let options = Options.decode $ YXML.parse_body opts
Exception.bracket_ (Program.init_pide socket options)
Program.exit_thread
(do
thread_uuid <- Isabelle_Thread.my_uuid
mapM_ (\uuid -> exchange_message0 [Naproche.threads_command, UUID.print uuid]) thread_uuid
let more_text = Text.pack $ make_string text
(opts0, pk, fileArg) <- readArgs (args0 ++ lines (make_string more_args))
let opts1 = map snd opts0
let text0 = map (uncurry ProofTextInstr) (reverse opts0)
let text1 = text0 ++ [ProofTextInstr Position.none (GetArgument (Text pk) more_text)]
rc <- do
mainBody mesonCache proverCache opts1 text1 fileArg
`catch` (\(err :: Program.Error) -> do
robust_error $ Program.print_error err
return 0)
`catch` (\(err :: Exception.SomeException) -> do
robust_error $ make_bytes $ Exception.displayException err
return 0)
when (rc /= 0) $ robust_error "ERROR")
_ -> return ()
mainBody :: MESON.Cache -> Prover.Cache -> [Instr] -> [ProofText] -> Maybe FilePath -> IO Int
mainBody mesonCache proverCache opts0 text0 fileArg = do
startTime <- getCurrentTime
-- parse input text
txts <- readProofText (getInstr libraryParam opts0) text0
case map toLower $make_string $ getInstr theoryParam opts0 of
"fol" -> do
-- if -T / --onlytranslate is passed as an option, only print the translated text
if getInstr onlytranslateParam opts0
then do { showTranslation txts startTime; return 0 }
else do
success <- proveFOL txts opts0 mesonCache proverCache startTime fileArg
MESON.prune_cache mesonCache
Prover.prune_cache proverCache
return (if success then 0 else 1)
"cic" -> return 0
"lean" -> do { exportLean txts; return 0 }
s -> errorWithoutStackTrace ("Bad theory (fol|cic|lean): " <> quote s)
showTranslation :: [ProofText] -> UTCTime -> IO ()
showTranslation txts startTime = do
let timeDifference finishTime = showTimeDiff (diffUTCTime finishTime startTime)
mapM_ (\case ProofTextBlock bl -> print bl; _ -> return ()) txts
-- print statistics
finishTime <- getCurrentTime
outputMain TRACING Position.none $ make_bytes $ "total " <> timeDifference finishTime
exportCiC :: ProofText -> IO ()
exportCiC pt = do
case fmap (unlines . map ppForthelExpr) $ mapM toStatement $ extractBlocks pt of
Left t -> putStrLn $ Text.unpack t
Right s -> putStrLn s
return ()
exportLean :: [ProofText] -> IO ()
exportLean txts = do
case fmap toLeanCode $ mapM toStatement $ concatMap extractBlocks txts of
Left t -> putStrLn $ Text.unpack t
Right t -> putStrLn $ Text.unpack t
return ()
proveFOL :: [ProofText] -> [Instr] -> MESON.Cache -> Prover.Cache -> UTCTime
-> Maybe FilePath -> IO Bool
proveFOL txts opts0 mesonCache proverCache startTime fileArg = do
-- initialize reasoner state
proveStart <- getCurrentTime
(success, trackers) <- case concatMap parseErrors txts of
[] -> do
let file = maybe "" Text.pack fileArg
let filePos = Position.file_only $ make_bytes file
let txts' = ProofTextInstr Position.none (GetArgument (File Ftl) file) : txts
verifyRoot mesonCache proverCache filePos txts'
err : _ -> do
errorParser (errorPos err) (show_bytes err)
pure (False, [])
finishTime <- getCurrentTime
let accumulate = sumCounter trackers
-- print statistics
(outputMain TRACING Position.none . make_bytes) $
"sections " ++ show (accumulate Sections)
++ " - goals " ++ show (accumulate Goals)
++ (case accumulate FailedGoals of
0 -> ""
n -> " - failed " ++ show n)
++ " - trivial " ++ show (accumulate TrivialGoals)
++ " - proved " ++ show (accumulate SuccessfulGoals)
++ " - equations " ++ show (accumulate Equations)
++ (case accumulate FailedEquations of
0 -> ""
n -> " - failed " ++ show n)
let trivialChecks = accumulate TrivialChecks
(outputMain TRACING Position.none . make_bytes) $
"symbols " ++ show (accumulate Symbols)
++ " - checks " ++ show (sumCounter trackers HardChecks + trivialChecks)
++ " - trivial " ++ show trivialChecks
++ " - proved " ++ show (accumulate SuccessfulChecks)
++ " - unfolds " ++ show (accumulate Unfolds)
let proverTime = sumTimer trackers ProofTimer
let simplifyTime = sumTimer trackers SimplifyTimer
let proveFinish = addUTCTime proverTime proveStart
let simplifyFinish = addUTCTime simplifyTime proveFinish
(outputMain TRACING Position.none . make_bytes) $
"parser " <> showTimeDiff (diffUTCTime proveStart startTime)
<> " - reasoner " <> showTimeDiff (diffUTCTime finishTime simplifyFinish)
<> " - simplifier " <> showTimeDiff simplifyTime
<> " - prover " <> showTimeDiff proverTime
<> "/" <> showTimeDiff (maximalTimer trackers SuccessTimer)
(outputMain TRACING Position.none . make_bytes) $
"total " <> showTimeDiff (diffUTCTime finishTime startTime)
return success
-- Command line parsing
readArgs :: [String] -> IO ([(Position.T, Instr)], ParserKind, Maybe FilePath)
readArgs args = do
let (instrs, files, errs) = GetOpt.getOpt GetOpt.Permute options args
let fail msgs = errorWithoutStackTrace (unlines (map trim_line msgs))
unless (null errs) $ fail errs
initFile <- readInit (getInstr initParam instrs)
let initialOpts = initFile ++ map (Position.none,) instrs
let revInitialOpts = reverse initialOpts
let useTexArg = getInstr texParam $ map snd revInitialOpts
let fileArg =
case files of
[file] -> Just file
[] -> Nothing
_ -> fail ["More than one file argument\n"]
let parserKind =
if useTexArg || maybe False (\f -> ".tex.ftl" `isSuffixOf` f || ".ftl.tex" `isSuffixOf` f) fileArg
then Tex else Ftl
pure (revInitialOpts, parserKind, fileArg)
usageHeader :: String
usageHeader =
"\nUsage: Naproche-SAD <options...> <file...>\n\n At most one file argument may be given; \"\" refers to stdin.\n\n FLAG may be {on|off} or {yes|no}.\n\n THEORY may be:\n fol (First-Order-Logic)\n cic (Calculus of inductive Constructions)\n lean (Lean Prover)\n\n Options are:\n"
optParam :: [Char] -> Param.T a -> GetOpt.ArgDescr b -> String -> GetOpt.OptDescr b
optParam chars p = GetOpt.Option chars [name | not (null name)]
where name = make_string $ Param.name p
optSwitch :: [Char] -> Param.T Bool -> Bool -> Bytes -> GetOpt.OptDescr Instr
optSwitch chars p b s = optParam chars (if b then p else Param.unnamed p) arg s'
where arg = GetOpt.NoArg (SetBool p b)
s' = make_string (if Bytes.null s then Param.description p else s)
optFlag :: [Char] -> Param.T Bool -> GetOpt.OptDescr Instr
optFlag chars p = optParam chars p arg s
where arg = GetOpt.ReqArg (SetBool p . Param.parse p . make_bytes) "FLAG"
s = make_string $ Param.description_default p
optNat :: [Char] -> Param.T Int -> GetOpt.OptDescr Instr
optNat chars p = optParam chars p arg s
where arg = GetOpt.ReqArg (SetInt p . Param.parse p . make_bytes) "N"
s = make_string $ Param.description_default p
optArgument :: [Char] -> Param.T Bytes -> String -> GetOpt.OptDescr Instr
optArgument chars p a = optParam chars p arg s
where arg = GetOpt.ReqArg (SetBytes p . make_bytes) a
s = make_string $ Param.description_default p
options :: [GetOpt.OptDescr Instr]
options = [
optSwitch "h" helpParam True "",
optArgument "" initParam "FILE",
optSwitch "T" onlytranslateParam True "",
optFlag "" translationParam,
optSwitch "" serverParam True "",
optArgument "" libraryParam "DIR",
optArgument "P" proverParam "NAME",
optNat "t" timelimitParam,
optNat "m" memorylimitParam,
optNat "" depthlimitParam,
optNat "" checktimeParam,
optNat "" checkdepthParam,
optSwitch "n" proveParam False "cursory mode (equivalent to --prove=off)",
optSwitch "r" checkParam False "raw mode (equivalent to --check=off)",
optFlag "" proveParam,
optArgument "" theoryParam "THEORY",
optFlag "" checkParam,
optFlag "" symsignParam,
optFlag "" infoParam,
optFlag "" thesisParam,
optFlag "" filterParam,
optFlag "" skipfailParam,
optFlag "" flatParam,
GetOpt.Option "q" [] (GetOpt.NoArg (Verbose False)) "print no details",
GetOpt.Option "v" [] (GetOpt.NoArg (Verbose True)) "print more details",
optFlag "" printgoalParam,
optFlag "" printreasonParam,
optFlag "" printsectionParam,
optFlag "" printcheckParam,
optFlag "" printproverParam,
optFlag "" printunfoldParam,
optFlag "" printfulltaskParam,
optFlag "" printsimpParam,
optFlag "" printthesisParam,
optFlag "" unfoldlowParam,
optFlag "" unfoldParam,
optFlag "" unfoldsfParam,
optFlag "" unfoldlowsfParam,
optFlag "" dumpParam,
optFlag "" texParam]
| null |
https://raw.githubusercontent.com/naproche/naproche/6284a64b4b84eaa53dd0eb7ecb39737fb9135a0d/src/SAD/Main.hs
|
haskell
|
|
# LANGUAGE OverloadedStrings #
command line and init file
main body with explicit error handling, notably for PIDE
parse input text
if -T / --onlytranslate is passed as an option, only print the translated text
print statistics
initialize reasoner state
print statistics
Command line parsing
|
Authors : ( 2001 - 2008 ) ,
( 2017 - 2018 ) ,
( 2018 )
Main application entry point : console or server mode .
# LANGUAGE TupleSections #
module SAD.Main where
import Control.Monad (unless, when)
import Data.Char (toLower)
import Data.Time (UTCTime, addUTCTime, getCurrentTime, diffUTCTime)
import Data.List (isSuffixOf)
import Data.Maybe (mapMaybe)
import Control.Exception qualified as Exception
import Control.Exception (catch)
import Data.Text.Lazy qualified as Text
import System.Console.GetOpt qualified as GetOpt
import System.Environment qualified as Environment
import Network.Socket (Socket)
import SAD.Prove.MESON qualified as MESON
import SAD.Export.Prover qualified as Prover
import SAD.Data.Instr
import SAD.API
import Isabelle.Bytes qualified as Bytes
import Isabelle.Bytes (Bytes)
import Isabelle.Byte_Message qualified as Byte_Message
import Isabelle.Naproche qualified as Naproche
import Isabelle.Server qualified as Server
import Isabelle.Options qualified as Options
import Isabelle.Isabelle_Thread qualified as Isabelle_Thread
import Isabelle.UUID qualified as UUID
import Isabelle.Position qualified as Position
import Isabelle.YXML qualified as YXML
import Isabelle.Process_Result qualified as Process_Result
import Isabelle.Library
import Naproche.Program qualified as Program
import Naproche.Console qualified as Console
import Naproche.Param qualified as Param
main :: IO ()
main = do
Console.setup
args0 <- Environment.getArgs
(opts0, pk, fileArg) <- readArgs args0
text0 <- (map (uncurry ProofTextInstr) (reverse opts0) ++) <$> case fileArg of
Nothing -> do
stdin <- getContents
pure [ProofTextInstr Position.none $ GetArgument (Text pk) (Text.pack stdin)]
Just name -> do
pure [ProofTextInstr Position.none $ GetArgument (File pk) (Text.pack name)]
let opts1 = map snd opts0
mesonCache <- MESON.init_cache
proverCache <- Prover.init_cache
if getInstr helpParam opts1 then
putStr (GetOpt.usageInfo usageHeader options)
(if getInstr serverParam opts1 then
Server.server (Server.publish_stdout "Naproche-SAD") (mainServer mesonCache proverCache args0)
else do
Program.init_console
rc <- do
mainBody mesonCache proverCache opts1 text0 fileArg
`catch` (\Exception.UserInterrupt -> do
Program.exit_thread
Console.stderr ("Interrupt" :: String)
return Process_Result.interrupt_rc)
`catch` (\(err :: Exception.SomeException) -> do
Program.exit_thread
Console.stderr (Exception.displayException err)
return 1)
Console.exit rc)
mainServer :: MESON.Cache -> Prover.Cache -> [String] -> Socket -> IO ()
mainServer mesonCache proverCache args0 socket =
let
exchange_message0 = Byte_Message.exchange_message0 socket
robust_error msg =
exchange_message0 [Naproche.output_error_command, msg]
`catch` (\(_ :: Exception.IOException) -> return ())
in
do
chunks <- Byte_Message.read_message socket
case chunks of
Just (command : threads) | command == Naproche.cancel_program ->
mapM_ Isabelle_Thread.stop_uuid (mapMaybe UUID.parse threads)
Just [command, more_args, opts, text] | command == Naproche.forthel_program -> do
let options = Options.decode $ YXML.parse_body opts
Exception.bracket_ (Program.init_pide socket options)
Program.exit_thread
(do
thread_uuid <- Isabelle_Thread.my_uuid
mapM_ (\uuid -> exchange_message0 [Naproche.threads_command, UUID.print uuid]) thread_uuid
let more_text = Text.pack $ make_string text
(opts0, pk, fileArg) <- readArgs (args0 ++ lines (make_string more_args))
let opts1 = map snd opts0
let text0 = map (uncurry ProofTextInstr) (reverse opts0)
let text1 = text0 ++ [ProofTextInstr Position.none (GetArgument (Text pk) more_text)]
rc <- do
mainBody mesonCache proverCache opts1 text1 fileArg
`catch` (\(err :: Program.Error) -> do
robust_error $ Program.print_error err
return 0)
`catch` (\(err :: Exception.SomeException) -> do
robust_error $ make_bytes $ Exception.displayException err
return 0)
when (rc /= 0) $ robust_error "ERROR")
_ -> return ()
mainBody :: MESON.Cache -> Prover.Cache -> [Instr] -> [ProofText] -> Maybe FilePath -> IO Int
mainBody mesonCache proverCache opts0 text0 fileArg = do
startTime <- getCurrentTime
txts <- readProofText (getInstr libraryParam opts0) text0
case map toLower $make_string $ getInstr theoryParam opts0 of
"fol" -> do
if getInstr onlytranslateParam opts0
then do { showTranslation txts startTime; return 0 }
else do
success <- proveFOL txts opts0 mesonCache proverCache startTime fileArg
MESON.prune_cache mesonCache
Prover.prune_cache proverCache
return (if success then 0 else 1)
"cic" -> return 0
"lean" -> do { exportLean txts; return 0 }
s -> errorWithoutStackTrace ("Bad theory (fol|cic|lean): " <> quote s)
showTranslation :: [ProofText] -> UTCTime -> IO ()
showTranslation txts startTime = do
let timeDifference finishTime = showTimeDiff (diffUTCTime finishTime startTime)
mapM_ (\case ProofTextBlock bl -> print bl; _ -> return ()) txts
finishTime <- getCurrentTime
outputMain TRACING Position.none $ make_bytes $ "total " <> timeDifference finishTime
exportCiC :: ProofText -> IO ()
exportCiC pt = do
case fmap (unlines . map ppForthelExpr) $ mapM toStatement $ extractBlocks pt of
Left t -> putStrLn $ Text.unpack t
Right s -> putStrLn s
return ()
exportLean :: [ProofText] -> IO ()
exportLean txts = do
case fmap toLeanCode $ mapM toStatement $ concatMap extractBlocks txts of
Left t -> putStrLn $ Text.unpack t
Right t -> putStrLn $ Text.unpack t
return ()
proveFOL :: [ProofText] -> [Instr] -> MESON.Cache -> Prover.Cache -> UTCTime
-> Maybe FilePath -> IO Bool
proveFOL txts opts0 mesonCache proverCache startTime fileArg = do
proveStart <- getCurrentTime
(success, trackers) <- case concatMap parseErrors txts of
[] -> do
let file = maybe "" Text.pack fileArg
let filePos = Position.file_only $ make_bytes file
let txts' = ProofTextInstr Position.none (GetArgument (File Ftl) file) : txts
verifyRoot mesonCache proverCache filePos txts'
err : _ -> do
errorParser (errorPos err) (show_bytes err)
pure (False, [])
finishTime <- getCurrentTime
let accumulate = sumCounter trackers
(outputMain TRACING Position.none . make_bytes) $
"sections " ++ show (accumulate Sections)
++ " - goals " ++ show (accumulate Goals)
++ (case accumulate FailedGoals of
0 -> ""
n -> " - failed " ++ show n)
++ " - trivial " ++ show (accumulate TrivialGoals)
++ " - proved " ++ show (accumulate SuccessfulGoals)
++ " - equations " ++ show (accumulate Equations)
++ (case accumulate FailedEquations of
0 -> ""
n -> " - failed " ++ show n)
let trivialChecks = accumulate TrivialChecks
(outputMain TRACING Position.none . make_bytes) $
"symbols " ++ show (accumulate Symbols)
++ " - checks " ++ show (sumCounter trackers HardChecks + trivialChecks)
++ " - trivial " ++ show trivialChecks
++ " - proved " ++ show (accumulate SuccessfulChecks)
++ " - unfolds " ++ show (accumulate Unfolds)
let proverTime = sumTimer trackers ProofTimer
let simplifyTime = sumTimer trackers SimplifyTimer
let proveFinish = addUTCTime proverTime proveStart
let simplifyFinish = addUTCTime simplifyTime proveFinish
(outputMain TRACING Position.none . make_bytes) $
"parser " <> showTimeDiff (diffUTCTime proveStart startTime)
<> " - reasoner " <> showTimeDiff (diffUTCTime finishTime simplifyFinish)
<> " - simplifier " <> showTimeDiff simplifyTime
<> " - prover " <> showTimeDiff proverTime
<> "/" <> showTimeDiff (maximalTimer trackers SuccessTimer)
(outputMain TRACING Position.none . make_bytes) $
"total " <> showTimeDiff (diffUTCTime finishTime startTime)
return success
readArgs :: [String] -> IO ([(Position.T, Instr)], ParserKind, Maybe FilePath)
readArgs args = do
let (instrs, files, errs) = GetOpt.getOpt GetOpt.Permute options args
let fail msgs = errorWithoutStackTrace (unlines (map trim_line msgs))
unless (null errs) $ fail errs
initFile <- readInit (getInstr initParam instrs)
let initialOpts = initFile ++ map (Position.none,) instrs
let revInitialOpts = reverse initialOpts
let useTexArg = getInstr texParam $ map snd revInitialOpts
let fileArg =
case files of
[file] -> Just file
[] -> Nothing
_ -> fail ["More than one file argument\n"]
let parserKind =
if useTexArg || maybe False (\f -> ".tex.ftl" `isSuffixOf` f || ".ftl.tex" `isSuffixOf` f) fileArg
then Tex else Ftl
pure (revInitialOpts, parserKind, fileArg)
usageHeader :: String
usageHeader =
"\nUsage: Naproche-SAD <options...> <file...>\n\n At most one file argument may be given; \"\" refers to stdin.\n\n FLAG may be {on|off} or {yes|no}.\n\n THEORY may be:\n fol (First-Order-Logic)\n cic (Calculus of inductive Constructions)\n lean (Lean Prover)\n\n Options are:\n"
optParam :: [Char] -> Param.T a -> GetOpt.ArgDescr b -> String -> GetOpt.OptDescr b
optParam chars p = GetOpt.Option chars [name | not (null name)]
where name = make_string $ Param.name p
optSwitch :: [Char] -> Param.T Bool -> Bool -> Bytes -> GetOpt.OptDescr Instr
optSwitch chars p b s = optParam chars (if b then p else Param.unnamed p) arg s'
where arg = GetOpt.NoArg (SetBool p b)
s' = make_string (if Bytes.null s then Param.description p else s)
optFlag :: [Char] -> Param.T Bool -> GetOpt.OptDescr Instr
optFlag chars p = optParam chars p arg s
where arg = GetOpt.ReqArg (SetBool p . Param.parse p . make_bytes) "FLAG"
s = make_string $ Param.description_default p
optNat :: [Char] -> Param.T Int -> GetOpt.OptDescr Instr
optNat chars p = optParam chars p arg s
where arg = GetOpt.ReqArg (SetInt p . Param.parse p . make_bytes) "N"
s = make_string $ Param.description_default p
optArgument :: [Char] -> Param.T Bytes -> String -> GetOpt.OptDescr Instr
optArgument chars p a = optParam chars p arg s
where arg = GetOpt.ReqArg (SetBytes p . make_bytes) a
s = make_string $ Param.description_default p
options :: [GetOpt.OptDescr Instr]
options = [
optSwitch "h" helpParam True "",
optArgument "" initParam "FILE",
optSwitch "T" onlytranslateParam True "",
optFlag "" translationParam,
optSwitch "" serverParam True "",
optArgument "" libraryParam "DIR",
optArgument "P" proverParam "NAME",
optNat "t" timelimitParam,
optNat "m" memorylimitParam,
optNat "" depthlimitParam,
optNat "" checktimeParam,
optNat "" checkdepthParam,
optSwitch "n" proveParam False "cursory mode (equivalent to --prove=off)",
optSwitch "r" checkParam False "raw mode (equivalent to --check=off)",
optFlag "" proveParam,
optArgument "" theoryParam "THEORY",
optFlag "" checkParam,
optFlag "" symsignParam,
optFlag "" infoParam,
optFlag "" thesisParam,
optFlag "" filterParam,
optFlag "" skipfailParam,
optFlag "" flatParam,
GetOpt.Option "q" [] (GetOpt.NoArg (Verbose False)) "print no details",
GetOpt.Option "v" [] (GetOpt.NoArg (Verbose True)) "print more details",
optFlag "" printgoalParam,
optFlag "" printreasonParam,
optFlag "" printsectionParam,
optFlag "" printcheckParam,
optFlag "" printproverParam,
optFlag "" printunfoldParam,
optFlag "" printfulltaskParam,
optFlag "" printsimpParam,
optFlag "" printthesisParam,
optFlag "" unfoldlowParam,
optFlag "" unfoldParam,
optFlag "" unfoldsfParam,
optFlag "" unfoldlowsfParam,
optFlag "" dumpParam,
optFlag "" texParam]
|
3925dc2f92934796d76b4dc7cb06f8bb94fe9ba551ea23f470c97043ec48ad37
|
mveety/lispmake
|
plugin.lisp
|
(in-package :lispmake)
lispmake , written by , et al .
( c ) 2012 - 2021 . Under BSD License .
(defun load-plugin (name file toplevel init-toplevel)
(setf toplevel (car toplevel))
(setf init-toplevel (car init-toplevel))
(lm-debug "load-plugin" "loading a plugin")
(format t "debug: toplevel=~A init-toplevel=~A~%"
(type-of toplevel) (type-of init-toplevel))
(format t "debug: toplevel=~A init-toplevel=~A~%"
toplevel init-toplevel)
(if *debugging*
(format t
"lispmake: debug: installing plugin ~A from file ~A with
toplevel ~A and running ~A~%"
name file toplevel init-toplevel))
(if (equal (type-of name) 'keyword)
(progn
(load file)
(setf *plugins* (append *plugins* (list (list name toplevel))))
(lm-debug "load-plugin" "running toplevel function")
(funcall init-toplevel))
(lm-error "load-plugin" "arg name should be type keyword")))
(defun install-plugin (name toplevel)
(if *debugging*
(format t "lispmake: debug: installing plugin ~A with toplevel ~A~%"
name (if (functionp toplevel)
"#<FUNCTION>"
toplevel)))
(if (or (functionp toplevel)
(symbolp toplevel))
(if (keywordp name)
(setf *plugins* (append *plugins* (list name toplevel)))
(lm-error "install-plugin" "arg name should by type keyword"))
(lm-error "install-plugin" "arg toplevel should be type symbol")))
(defmacro install-fn-plugin (name &body list-of-forms)
`(install-plugin
,name
(lambda (args) ,@list-of-forms)))
(defun run-plugin-pregen (x)
(lm-debug "run-plugin-pregen" "running pre-generation hooks")
(let ((*standard-output* x))
(if (not (equal *pregen-hooks* nil))
(dolist (y *pregen-hooks*)
(funcall y))
nil)))
(defun run-plugin-postgen (x)
(lm-debug "run-plugin-postgen" "running post-generation hooks")
(let ((*standard-output* x))
(if (not (equal *postgen-hooks* nil))
(dolist (y *postgen-hooks*)
(funcall y))
nil)))
(defun install-pregen-hook (fname)
(lm-debug "install-pregen-hook" (format nil "adding pre-generation hook ~A" fname))
(if (not (equal (type-of fname) 'symbol))
(lm-warning "install-pregen-hook" "fname is not of type symbol")
(setf *pregen-hooks* (append *pregen-hooks* (list fname)))))
(defun install-postgen-hook (fname)
(lm-debug "install-postgen-hook" (format nil "adding post-generation hook ~A" fname))
(if (not (equal (type-of fname) 'symbol))
(lm-warning "install-postgen-hook" "fname is not of type symbol")
(setf *postgen-hooks* (append *postgen-hooks* (list fname)))))
(defun pl-plugin (args)
(if (not (equal (length args) 4))
(lm-error "pl-plugin" "error parsing plugin def")
(load-plugin (car args) (cadr args) (caddr args) (cadddr args))))
| null |
https://raw.githubusercontent.com/mveety/lispmake/3eb4e8ac280de3c31b4826424093b11c7dbbf289/plugin.lisp
|
lisp
|
(in-package :lispmake)
lispmake , written by , et al .
( c ) 2012 - 2021 . Under BSD License .
(defun load-plugin (name file toplevel init-toplevel)
(setf toplevel (car toplevel))
(setf init-toplevel (car init-toplevel))
(lm-debug "load-plugin" "loading a plugin")
(format t "debug: toplevel=~A init-toplevel=~A~%"
(type-of toplevel) (type-of init-toplevel))
(format t "debug: toplevel=~A init-toplevel=~A~%"
toplevel init-toplevel)
(if *debugging*
(format t
"lispmake: debug: installing plugin ~A from file ~A with
toplevel ~A and running ~A~%"
name file toplevel init-toplevel))
(if (equal (type-of name) 'keyword)
(progn
(load file)
(setf *plugins* (append *plugins* (list (list name toplevel))))
(lm-debug "load-plugin" "running toplevel function")
(funcall init-toplevel))
(lm-error "load-plugin" "arg name should be type keyword")))
(defun install-plugin (name toplevel)
(if *debugging*
(format t "lispmake: debug: installing plugin ~A with toplevel ~A~%"
name (if (functionp toplevel)
"#<FUNCTION>"
toplevel)))
(if (or (functionp toplevel)
(symbolp toplevel))
(if (keywordp name)
(setf *plugins* (append *plugins* (list name toplevel)))
(lm-error "install-plugin" "arg name should by type keyword"))
(lm-error "install-plugin" "arg toplevel should be type symbol")))
(defmacro install-fn-plugin (name &body list-of-forms)
`(install-plugin
,name
(lambda (args) ,@list-of-forms)))
(defun run-plugin-pregen (x)
(lm-debug "run-plugin-pregen" "running pre-generation hooks")
(let ((*standard-output* x))
(if (not (equal *pregen-hooks* nil))
(dolist (y *pregen-hooks*)
(funcall y))
nil)))
(defun run-plugin-postgen (x)
(lm-debug "run-plugin-postgen" "running post-generation hooks")
(let ((*standard-output* x))
(if (not (equal *postgen-hooks* nil))
(dolist (y *postgen-hooks*)
(funcall y))
nil)))
(defun install-pregen-hook (fname)
(lm-debug "install-pregen-hook" (format nil "adding pre-generation hook ~A" fname))
(if (not (equal (type-of fname) 'symbol))
(lm-warning "install-pregen-hook" "fname is not of type symbol")
(setf *pregen-hooks* (append *pregen-hooks* (list fname)))))
(defun install-postgen-hook (fname)
(lm-debug "install-postgen-hook" (format nil "adding post-generation hook ~A" fname))
(if (not (equal (type-of fname) 'symbol))
(lm-warning "install-postgen-hook" "fname is not of type symbol")
(setf *postgen-hooks* (append *postgen-hooks* (list fname)))))
(defun pl-plugin (args)
(if (not (equal (length args) 4))
(lm-error "pl-plugin" "error parsing plugin def")
(load-plugin (car args) (cadr args) (caddr args) (cadddr args))))
|
|
cd96816063bb0c0b037ace382e8e4f4e19d220c32565af050840d8a200052465
|
leftaroundabout/manifolds
|
PseudoAffine.hs
|
-- |
-- Module : Math.Manifold.Core.PseudoAffine
Copyright : ( c ) 2016
-- License : GPL v3
--
-- Maintainer : (@) jsag $ hvl.no
-- Stability : experimental
-- Portability : portable
--
# LANGUAGE FlexibleInstances #
# LANGUAGE UndecidableInstances #
{-# LANGUAGE TypeFamilies #-}
# LANGUAGE FlexibleContexts #
{-# LANGUAGE GADTs #-}
# LANGUAGE DefaultSignatures #
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE StandaloneDeriving #-}
# LANGUAGE UnicodeSyntax #
{-# LANGUAGE EmptyCase #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE CPP #-}
module Math.Manifold.Core.PseudoAffine where
import Data.VectorSpace
import Data.AffineSpace
import Data.Basis
import Data.Fixed (mod')
import Data.Void
import Math.Manifold.Core.Types.Internal
import Math.Manifold.VectorSpace.ZeroDimensional
import Control.Applicative
import Control.Arrow
import qualified GHC.Generics as Gnrx
import GHC.Generics (Generic, (:*:)(..))
import Data.CallStack (HasCallStack)
type ℝeal r = (RealFloat r, PseudoAffine r, Semimanifold r, Needle r ~ r)
-- | This is the reified form of the property that the interior of a semimanifold
-- is a manifold. These constraints would ideally be expressed directly as
-- superclass constraints, but that would require the @UndecidableSuperclasses@
-- extension, which is not reliable yet.
--
Also , if all those equality constraints are in scope , GHC tends to infer needlessly
-- complicated types like @'Needle' ('Needle' ('Needle' x))@, which is
-- the same as just @'Needle' x@.
data SemimanifoldWitness x where
SemimanifoldWitness ::
( Semimanifold (Needle x)
, Needle (Needle x) ~ Needle x )
=> SemimanifoldWitness x
data PseudoAffineWitness x where
PseudoAffineWitness :: PseudoAffine (Needle x)
=> SemimanifoldWitness x -> PseudoAffineWitness x
infix 6 .-~., .-~!
infixl 6 .+~^, .-~^
class AdditiveGroup (Needle x) => Semimanifold x where
-- | The space of “ways” starting from some reference point
-- and going to some particular target point. Hence,
-- the name: like a compass needle, but also with an actual length.
-- For affine spaces, 'Needle' is simply the space of
line segments ( aka vectors ) between two points , i.e. the same as ' Diff ' .
The ' AffineManifold ' constraint makes that requirement explicit .
--
-- This space should be isomorphic to the tangent space (and in fact
-- serves an in many ways similar role), however whereas the tangent space
-- of a manifold is really infinitesimally small, needles actually allow
-- macroscopic displacements.
type Needle x :: *
type Needle x = GenericNeedle x
-- | Generalisation of the translation operation '.+^' to possibly non-flat
-- manifolds, instead of affine spaces.
(.+~^) :: x -> Needle x -> x
default (.+~^) :: ( Generic x, Semimanifold (VRep x)
, Needle x ~ GenericNeedle x )
=> x -> Needle x -> x
p.+~^GenericNeedle v = Gnrx.to (Gnrx.from p.+~^v :: Gnrx.Rep x Void)
-- | Shorthand for @\\p v -> p .+~^ 'negateV' v@, which should obey the /asymptotic/ law
--
-- @
-- p .-~^ v .+~^ v ≅ p
-- @
--
Meaning : if @v@ is scaled down with sufficiently small factors /η/ , then
-- the difference @(p.-~^v.+~^v) .-~. p@ should eventually scale down even faster:
-- as /O/ (/η/²). For large vectors, it may however behave differently,
except in flat spaces ( where all this should be equivalent to the ' AffineSpace '
-- instance).
(.-~^) :: x -> Needle x -> x
p .-~^ v = p .+~^ negateV v
semimanifoldWitness :: SemimanifoldWitness x
default semimanifoldWitness ::
( Semimanifold (Needle x), Needle (Needle x) ~ Needle x )
=> SemimanifoldWitness x
semimanifoldWitness = SemimanifoldWitness
-- | This is the class underlying what we understand as manifolds.
--
-- The interface is almost identical to the better-known
' AffineSpace ' class , but we do n't require associativity of ' .+~^ ' with ' ^+^ '
& # x2013 ; except in an /asymptotic sense/ for small vectors .
--
-- That innocent-looking change makes the class applicable to vastly more general types:
-- while an affine space is basically nothing but a vector space without particularly
-- designated origin, a pseudo-affine space can have nontrivial topology on the global
-- scale, and yet be used in practically the same way as an affine space. At least the
-- usual spheres and tori make good instances, perhaps the class is in fact equivalent to
-- manifolds in their usual maths definition (with an atlas of charts: a family of
-- overlapping regions of the topological space, each homeomorphic to the 'Needle'
-- vector space or some simply-connected subset thereof).
--
The ' Semimanifold ' and ' PseudoAffine ' classes can be @anyclass@-derived
-- or empty-instantiated based on 'Generic' for product types (including newtypes) of
existing ' PseudoAffine ' instances . For example , the definition
--
-- @
data Cylinder = CylinderPolar { zCyl : : ! D¹ , : : ! S¹ }
deriving ( Generic , , PseudoAffine )
-- @
--
-- is equivalent to
--
-- @
data Cylinder = CylinderPolar { zCyl : : ! D¹ , : : ! S¹ }
--
data CylinderNeedle = CylinderPolarNeedle { δzCyl : : ! ( Needle D¹ ) , : : ! ( Needle S¹ ) }
--
instance where
-- type Needle Cylinder = CylinderNeedle
z φ .+~^ CylinderPolarNeedle δz δφ
= ( z.+~^δz ) ( )
--
instance PseudoAffine Cylinder where
z₁ φ₁ .-~. CylinderPolar z₀ φ₀
-- = CylinderPolarNeedle <$> z₁.-~.z₀ <*> φ₁.-~.φ₀
z₁ φ₁ .-~ ! CylinderPolar z₀ φ₀
= CylinderPolarNeedle ( z₁.-~!z₀ ) ( φ₁.-~.φ₀ )
-- @
class Semimanifold x => PseudoAffine x where
| The path reaching from one point to another .
-- Should only yield 'Nothing' if the points are on disjoint segments
-- of a non–path-connected space.
--
-- For a connected manifold, you may define this method as
--
-- @
-- p.-~.q = pure (p.-~!q)
-- @
(.-~.) :: x -> x -> Maybe (Needle x)
default (.-~.) :: ( Generic x, PseudoAffine (VRep x)
, Needle x ~ GenericNeedle x )
=> x -> x -> Maybe (Needle x)
p.-~.q = GenericNeedle <$> Gnrx.from p .-~. (Gnrx.from q :: Gnrx.Rep x Void)
| Unsafe version of ' .-~. ' . If the two points lie in disjoint regions ,
-- the behaviour is undefined.
--
Whenever @p@ and lie in a connected region , the identity
--
-- @
-- p .+~^ (q.-~.p) ≡ q
-- @
--
-- should hold (up to possible floating point rounding etc.).
-- Meanwhile, you will in general have
--
-- @
-- (p.+~^v).-~^v ≠ p
-- @
--
( though in many instances this is at least for sufficiently small @v@ approximately equal ) .
(.-~!) :: HasCallStack => x -> x -> Needle x
default (.-~!) :: ( Generic x, PseudoAffine (VRep x)
, Needle x ~ GenericNeedle x )
=> x -> x -> Needle x
p.-~!q = GenericNeedle $ Gnrx.from p .-~! (Gnrx.from q :: Gnrx.Rep x Void)
{-# INLINE (.-~!) #-}
pseudoAffineWitness :: PseudoAffineWitness x
default pseudoAffineWitness ::
PseudoAffine (Needle x)
=> PseudoAffineWitness x
pseudoAffineWitness = PseudoAffineWitness semimanifoldWitness
| A fibre bundle combines points in the /base space/ @b@ with points in the /fibre/
@f@. The type @FibreBundle b f@ is thus isomorphic to the tuple space @(b , f)@ , but
it can have a different topology , the prime example being ' TangentBundle ' , where
-- nearby points may have differently-oriented tangent spaces.
data FibreBundle b f = FibreBundle
{ baseSpace :: !b
, fibreSpace :: !f
} deriving (Generic, Show)
-- | Points on a manifold, combined with vectors in the respective tangent space.
type TangentBundle m = FibreBundle m (Needle m)
-- | Interpolate between points, approximately linearly. For
-- points that aren't close neighbours (i.e. lie in an almost
-- flat region), the pathway is basically undefined – save for
-- its end points.
--
-- A proper, really well-defined (on global scales) interpolation
only makes sense on a Riemannian manifold , as ' Data . Manifold . Riemannian . Geodesic ' .
palerp :: ∀ x. (PseudoAffine x, VectorSpace (Needle x))
=> x -> x -> Maybe (Scalar (Needle x) -> x)
palerp p₀ p₁ = case p₁.-~.p₀ of
Just v -> return $ \t -> p₀ .+~^ t *^ v
_ -> Nothing
-- | Like 'palerp', but actually restricted to the interval between the points,
-- with a signature like 'Data.Manifold.Riemannian.geodesicBetween'
-- rather than 'Data.AffineSpace.alerp'.
palerpB :: ∀ x. (PseudoAffine x, VectorSpace (Needle x), Scalar (Needle x) ~ ℝ)
=> x -> x -> Maybe (D¹ -> x)
palerpB p₀ p₁ = case p₁.-~.p₀ of
Just v -> return $ \(D¹ t) -> p₀ .+~^ ((t+1)/2) *^ v
_ -> Nothing
-- | Like 'alerp', but actually restricted to the interval between the points.
alerpB :: ∀ x. (AffineSpace x, VectorSpace (Diff x), Scalar (Diff x) ~ ℝ)
=> x -> x -> D¹ -> x
alerpB p1 p2 = case p2 .-. p1 of
v -> \(D¹ t) -> p1 .+^ ((t+1)/2) *^ v
#define deriveAffine(c,t) \
instance (c) => Semimanifold (t) where { \
type Needle (t) = Diff (t); \
(.+~^) = (.+^) }; \
instance (c) => PseudoAffine (t) where { \
a.-~.b = pure (a.-.b); \
(.-~!) = (.-.) }
deriveAffine((),Double)
deriveAffine((),Float)
deriveAffine((),Rational)
instance Semimanifold (ZeroDim k) where
type Needle (ZeroDim k) = ZeroDim k
Origin .+~^ Origin = Origin
Origin .-~^ Origin = Origin
instance PseudoAffine (ZeroDim k) where
Origin .-~! Origin = Origin
Origin .-~. Origin = pure Origin
instance ∀ a b . (Semimanifold a, Semimanifold b) => Semimanifold (a,b) where
type Needle (a,b) = (Needle a, Needle b)
(a,b).+~^(v,w) = (a.+~^v, b.+~^w)
(a,b).-~^(v,w) = (a.-~^v, b.-~^w)
semimanifoldWitness = case ( semimanifoldWitness :: SemimanifoldWitness a
, semimanifoldWitness :: SemimanifoldWitness b ) of
(SemimanifoldWitness, SemimanifoldWitness) -> SemimanifoldWitness
instance (PseudoAffine a, PseudoAffine b) => PseudoAffine (a,b) where
(a,b).-~.(c,d) = liftA2 (,) (a.-~.c) (b.-~.d)
(a,b).-~!(c,d) = (a.-~!c, b.-~!d)
pseudoAffineWitness = case ( pseudoAffineWitness :: PseudoAffineWitness a
, pseudoAffineWitness :: PseudoAffineWitness b ) of
( PseudoAffineWitness (SemimanifoldWitness)
, PseudoAffineWitness (SemimanifoldWitness) )
->PseudoAffineWitness (SemimanifoldWitness)
instance ∀ a b c . (Semimanifold a, Semimanifold b, Semimanifold c)
=> Semimanifold (a,b,c) where
type Needle (a,b,c) = (Needle a, Needle b, Needle c)
(a,b,c).+~^(v,w,x) = (a.+~^v, b.+~^w, c.+~^x)
(a,b,c).-~^(v,w,x) = (a.-~^v, b.-~^w, c.-~^x)
semimanifoldWitness = case ( semimanifoldWitness :: SemimanifoldWitness a
, semimanifoldWitness :: SemimanifoldWitness b
, semimanifoldWitness :: SemimanifoldWitness c ) of
( SemimanifoldWitness, SemimanifoldWitness, SemimanifoldWitness )
-> SemimanifoldWitness
instance (PseudoAffine a, PseudoAffine b, PseudoAffine c) => PseudoAffine (a,b,c) where
(a,b,c).-~!(d,e,f) = (a.-~!d, b.-~!e, c.-~!f)
(a,b,c).-~.(d,e,f) = liftA3 (,,) (a.-~.d) (b.-~.e) (c.-~.f)
pseudoAffineWitness = case ( pseudoAffineWitness :: PseudoAffineWitness a
, pseudoAffineWitness :: PseudoAffineWitness b
, pseudoAffineWitness :: PseudoAffineWitness c ) of
( PseudoAffineWitness SemimanifoldWitness
, PseudoAffineWitness SemimanifoldWitness
, PseudoAffineWitness SemimanifoldWitness )
->PseudoAffineWitness SemimanifoldWitness
instance Semimanifold (ℝP⁰_ r) where
type Needle (ℝP⁰_ r) = ZeroDim r
p .+~^ Origin = p
p .-~^ Origin = p
instance PseudoAffine (ℝP⁰_ r) where
ℝPZero .-~! ℝPZero = Origin
ℝPZero .-~. ℝPZero = pure Origin
instance ℝeal r => Semimanifold (ℝP¹_ r) where
type Needle (ℝP¹_ r) = r
HemisphereℝP¹Polar r₀ .+~^ δr = HemisphereℝP¹Polar . toℝP¹range $ r₀ + δr
instance ℝeal r => PseudoAffine (ℝP¹_ r) where
p.-~.q = pure (p.-~!q)
HemisphereℝP¹Polar φ₁ .-~! HemisphereℝP¹Polar φ₀
| δφ > pi/2 = δφ - pi
| δφ < (-pi/2) = δφ + pi
| otherwise = δφ
where δφ = φ₁ - φ₀
tau :: RealFloat r => r
tau = 2 * pi
toS¹range :: RealFloat r => r -> r
toS¹range φ = (φ+pi)`mod'`tau - pi
toℝP¹range :: RealFloat r => r -> r
toℝP¹range φ = (φ+pi/2)`mod'`pi - pi/2
toUnitrange :: RealFloat r => r -> r
toUnitrange φ = (φ+1)`mod'`2 - 1
data NeedleProductSpace f g p = NeedleProductSpace
!(Needle (f p)) !(Needle (g p)) deriving (Generic)
instance (Semimanifold (f p), Semimanifold (g p))
=> AdditiveGroup (NeedleProductSpace f g p)
instance ( Semimanifold (f p), Semimanifold (g p)
, VectorSpace (Needle (f p)), VectorSpace (Needle (g p))
, Scalar (Needle (f p)) ~ Scalar (Needle (g p)) )
=> VectorSpace (NeedleProductSpace f g p)
instance ( Semimanifold (f p), Semimanifold (g p)
, InnerSpace (Needle (f p)), InnerSpace (Needle (g p))
, Scalar (Needle (f p)) ~ Scalar (Needle (g p))
, Num (Scalar (Needle (f p))) )
=> InnerSpace (NeedleProductSpace f g p)
instance (Semimanifold (f p), Semimanifold (g p))
=> AffineSpace (NeedleProductSpace f g p) where
type Diff (NeedleProductSpace f g p) = NeedleProductSpace f g p
(.+^) = (^+^)
(.-.) = (^-^)
instance (Semimanifold (f p), Semimanifold (g p))
=> Semimanifold (NeedleProductSpace f g p) where
type Needle (NeedleProductSpace f g p) = NeedleProductSpace f g p
(.+~^) = (^+^)
instance (PseudoAffine (f p), PseudoAffine (g p))
=> PseudoAffine (NeedleProductSpace f g p) where
p.-~.q = Just $ p.-.q
(.-~!) = (.-.)
instance ( Semimanifold (f p), Semimanifold (g p)
, HasBasis (Needle (f p)), HasBasis (Needle (g p))
, Scalar (Needle (f p)) ~ Scalar (Needle (g p)) )
=> HasBasis (NeedleProductSpace f g p) where
type Basis (NeedleProductSpace f g p) = Either (Basis (Needle (f p)))
(Basis (Needle (g p)))
basisValue (Left bf) = NeedleProductSpace (basisValue bf) zeroV
basisValue (Right bg) = NeedleProductSpace zeroV (basisValue bg)
decompose (NeedleProductSpace vf vg)
= map (first Left) (decompose vf) ++ map (first Right) (decompose vg)
decompose' (NeedleProductSpace vf _) (Left bf) = decompose' vf bf
decompose' (NeedleProductSpace _ vg) (Right bg) = decompose' vg bg
newtype GenericNeedle x = GenericNeedle {getGenericNeedle :: Needle (VRep x)}
deriving (Generic)
instance AdditiveGroup (Needle (VRep x)) => AdditiveGroup (GenericNeedle x) where
GenericNeedle v ^+^ GenericNeedle w = GenericNeedle $ v ^+^ w
negateV = GenericNeedle . negateV . getGenericNeedle
zeroV = GenericNeedle zeroV
instance VectorSpace (Needle (VRep x)) => VectorSpace (GenericNeedle x) where
type Scalar (GenericNeedle x) = Scalar (Needle (VRep x))
(*^) μ = GenericNeedle . (*^) μ . getGenericNeedle
instance InnerSpace (Needle (VRep x)) => InnerSpace (GenericNeedle x) where
GenericNeedle v <.> GenericNeedle w = v <.> w
instance AdditiveGroup (Needle (VRep x)) => AffineSpace (GenericNeedle x) where
type Diff (GenericNeedle x) = GenericNeedle x
(.-.) = (^-^)
(.+^) = (^+^)
instance AdditiveGroup (Needle (VRep x)) => Semimanifold (GenericNeedle x) where
type Needle (GenericNeedle x) = GenericNeedle x
(.+~^) = (.+^)
instance AdditiveGroup (Needle (VRep x)) => PseudoAffine (GenericNeedle x) where
GenericNeedle v .-~. GenericNeedle w = Just $ GenericNeedle (v ^-^ w)
GenericNeedle v .-~! GenericNeedle w = GenericNeedle (v ^-^ w)
instance ∀ a s . Semimanifold a => Semimanifold (Gnrx.Rec0 a s) where
type Needle (Gnrx.Rec0 a s) = Needle a
semimanifoldWitness = case semimanifoldWitness :: SemimanifoldWitness a of
SemimanifoldWitness
-> SemimanifoldWitness
Gnrx.K1 p .+~^ v = Gnrx.K1 $ p .+~^ v
instance ∀ f p i c . Semimanifold (f p) => Semimanifold (Gnrx.M1 i c f p) where
type Needle (Gnrx.M1 i c f p) = Needle (f p)
semimanifoldWitness = case semimanifoldWitness :: SemimanifoldWitness (f p) of
SemimanifoldWitness -> SemimanifoldWitness
Gnrx.M1 p.+~^v = Gnrx.M1 $ p.+~^v
instance ∀ f g p . (Semimanifold (f p), Semimanifold (g p))
=> Semimanifold ((f :*: g) p) where
type Needle ((f:*:g) p) = NeedleProductSpace f g p
semimanifoldWitness = case ( semimanifoldWitness :: SemimanifoldWitness (f p)
, semimanifoldWitness :: SemimanifoldWitness (g p) ) of
( SemimanifoldWitness, SemimanifoldWitness )
-> SemimanifoldWitness
(p:*:q).+~^(NeedleProductSpace v w) = (p.+~^v) :*: (q.+~^w)
instance ∀ a s . PseudoAffine a => PseudoAffine (Gnrx.Rec0 a s) where
pseudoAffineWitness = case pseudoAffineWitness :: PseudoAffineWitness a of
PseudoAffineWitness SemimanifoldWitness
-> PseudoAffineWitness SemimanifoldWitness
Gnrx.K1 p .-~. Gnrx.K1 q = p .-~. q
Gnrx.K1 p .-~! Gnrx.K1 q = p .-~! q
instance ∀ f p i c . PseudoAffine (f p) => PseudoAffine (Gnrx.M1 i c f p) where
pseudoAffineWitness = case pseudoAffineWitness :: PseudoAffineWitness (f p) of
PseudoAffineWitness SemimanifoldWitness
-> PseudoAffineWitness SemimanifoldWitness
Gnrx.M1 p .-~. Gnrx.M1 q = p .-~. q
Gnrx.M1 p .-~! Gnrx.M1 q = p .-~! q
instance ∀ f g p . (PseudoAffine (f p), PseudoAffine (g p))
=> PseudoAffine ((f :*: g) p) where
pseudoAffineWitness = case ( pseudoAffineWitness :: PseudoAffineWitness (f p)
, pseudoAffineWitness :: PseudoAffineWitness (g p) ) of
( PseudoAffineWitness SemimanifoldWitness
,PseudoAffineWitness SemimanifoldWitness )
-> PseudoAffineWitness SemimanifoldWitness
(pf:*:pg) .-~. (qf:*:qg) = NeedleProductSpace <$> (pf.-~.qf) <*> (pg.-~.qg)
(pf:*:pg) .-~! (qf:*:qg) = NeedleProductSpace (pf.-~!qf) (pg.-~!qg)
type VRep x = Gnrx.Rep x Void
-- | A (closed) cone over a space @x@ is the product of @x@ with the closed interval 'D¹'
-- of “heights”,
-- except on its “tip”: here, @x@ is smashed to a single point.
--
This construct becomes ( homeomorphic - to- ) an actual geometric cone ( and to ' D² ' ) in the
special case @x = ' S¹'@.
^ Range @[0 , 1]@
^ Irrelevant at @h = 0@.
} deriving (Generic)
deriving instance (Show x, Show (Scalar (Needle x))) => Show (CD¹ x)
-- | An open cone is homeomorphic to a closed cone without the “lid”,
-- i.e. without the “last copy” of @x@, at the far end of the height
-- interval. Since that means the height does not include its supremum, it is actually
-- more natural to express it as the entire real ray, hence the name.
data Cℝay x = Cℝay { hParamCℝay :: !(Scalar (Needle x)) -- ^ Range @[0, ∞[@
^ Irrelevant at @h = 0@.
} deriving (Generic)
deriving instance (Show x, Show (Scalar (Needle x))) => Show (Cℝay x)
| null |
https://raw.githubusercontent.com/leftaroundabout/manifolds/55330e7760fa8ea8948988a10a06bbf19e69b5f5/manifolds-core/Math/Manifold/Core/PseudoAffine.hs
|
haskell
|
|
Module : Math.Manifold.Core.PseudoAffine
License : GPL v3
Maintainer : (@) jsag $ hvl.no
Stability : experimental
Portability : portable
# LANGUAGE TypeFamilies #
# LANGUAGE GADTs #
# LANGUAGE DeriveGeneric #
# LANGUAGE StandaloneDeriving #
# LANGUAGE EmptyCase #
# LANGUAGE ConstraintKinds #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeOperators #
# LANGUAGE CPP #
| This is the reified form of the property that the interior of a semimanifold
is a manifold. These constraints would ideally be expressed directly as
superclass constraints, but that would require the @UndecidableSuperclasses@
extension, which is not reliable yet.
complicated types like @'Needle' ('Needle' ('Needle' x))@, which is
the same as just @'Needle' x@.
| The space of “ways” starting from some reference point
and going to some particular target point. Hence,
the name: like a compass needle, but also with an actual length.
For affine spaces, 'Needle' is simply the space of
This space should be isomorphic to the tangent space (and in fact
serves an in many ways similar role), however whereas the tangent space
of a manifold is really infinitesimally small, needles actually allow
macroscopic displacements.
| Generalisation of the translation operation '.+^' to possibly non-flat
manifolds, instead of affine spaces.
| Shorthand for @\\p v -> p .+~^ 'negateV' v@, which should obey the /asymptotic/ law
@
p .-~^ v .+~^ v ≅ p
@
the difference @(p.-~^v.+~^v) .-~. p@ should eventually scale down even faster:
as /O/ (/η/²). For large vectors, it may however behave differently,
instance).
| This is the class underlying what we understand as manifolds.
The interface is almost identical to the better-known
That innocent-looking change makes the class applicable to vastly more general types:
while an affine space is basically nothing but a vector space without particularly
designated origin, a pseudo-affine space can have nontrivial topology on the global
scale, and yet be used in practically the same way as an affine space. At least the
usual spheres and tori make good instances, perhaps the class is in fact equivalent to
manifolds in their usual maths definition (with an atlas of charts: a family of
overlapping regions of the topological space, each homeomorphic to the 'Needle'
vector space or some simply-connected subset thereof).
or empty-instantiated based on 'Generic' for product types (including newtypes) of
@
@
is equivalent to
@
type Needle Cylinder = CylinderNeedle
= CylinderPolarNeedle <$> z₁.-~.z₀ <*> φ₁.-~.φ₀
@
Should only yield 'Nothing' if the points are on disjoint segments
of a non–path-connected space.
For a connected manifold, you may define this method as
@
p.-~.q = pure (p.-~!q)
@
the behaviour is undefined.
@
p .+~^ (q.-~.p) ≡ q
@
should hold (up to possible floating point rounding etc.).
Meanwhile, you will in general have
@
(p.+~^v).-~^v ≠ p
@
# INLINE (.-~!) #
nearby points may have differently-oriented tangent spaces.
| Points on a manifold, combined with vectors in the respective tangent space.
| Interpolate between points, approximately linearly. For
points that aren't close neighbours (i.e. lie in an almost
flat region), the pathway is basically undefined – save for
its end points.
A proper, really well-defined (on global scales) interpolation
| Like 'palerp', but actually restricted to the interval between the points,
with a signature like 'Data.Manifold.Riemannian.geodesicBetween'
rather than 'Data.AffineSpace.alerp'.
| Like 'alerp', but actually restricted to the interval between the points.
| A (closed) cone over a space @x@ is the product of @x@ with the closed interval 'D¹'
of “heights”,
except on its “tip”: here, @x@ is smashed to a single point.
| An open cone is homeomorphic to a closed cone without the “lid”,
i.e. without the “last copy” of @x@, at the far end of the height
interval. Since that means the height does not include its supremum, it is actually
more natural to express it as the entire real ray, hence the name.
^ Range @[0, ∞[@
|
Copyright : ( c ) 2016
# LANGUAGE FlexibleInstances #
# LANGUAGE UndecidableInstances #
# LANGUAGE FlexibleContexts #
# LANGUAGE DefaultSignatures #
# LANGUAGE UnicodeSyntax #
module Math.Manifold.Core.PseudoAffine where
import Data.VectorSpace
import Data.AffineSpace
import Data.Basis
import Data.Fixed (mod')
import Data.Void
import Math.Manifold.Core.Types.Internal
import Math.Manifold.VectorSpace.ZeroDimensional
import Control.Applicative
import Control.Arrow
import qualified GHC.Generics as Gnrx
import GHC.Generics (Generic, (:*:)(..))
import Data.CallStack (HasCallStack)
type ℝeal r = (RealFloat r, PseudoAffine r, Semimanifold r, Needle r ~ r)
Also , if all those equality constraints are in scope , GHC tends to infer needlessly
data SemimanifoldWitness x where
SemimanifoldWitness ::
( Semimanifold (Needle x)
, Needle (Needle x) ~ Needle x )
=> SemimanifoldWitness x
data PseudoAffineWitness x where
PseudoAffineWitness :: PseudoAffine (Needle x)
=> SemimanifoldWitness x -> PseudoAffineWitness x
infix 6 .-~., .-~!
infixl 6 .+~^, .-~^
class AdditiveGroup (Needle x) => Semimanifold x where
line segments ( aka vectors ) between two points , i.e. the same as ' Diff ' .
The ' AffineManifold ' constraint makes that requirement explicit .
type Needle x :: *
type Needle x = GenericNeedle x
(.+~^) :: x -> Needle x -> x
default (.+~^) :: ( Generic x, Semimanifold (VRep x)
, Needle x ~ GenericNeedle x )
=> x -> Needle x -> x
p.+~^GenericNeedle v = Gnrx.to (Gnrx.from p.+~^v :: Gnrx.Rep x Void)
Meaning : if @v@ is scaled down with sufficiently small factors /η/ , then
except in flat spaces ( where all this should be equivalent to the ' AffineSpace '
(.-~^) :: x -> Needle x -> x
p .-~^ v = p .+~^ negateV v
semimanifoldWitness :: SemimanifoldWitness x
default semimanifoldWitness ::
( Semimanifold (Needle x), Needle (Needle x) ~ Needle x )
=> SemimanifoldWitness x
semimanifoldWitness = SemimanifoldWitness
' AffineSpace ' class , but we do n't require associativity of ' .+~^ ' with ' ^+^ '
& # x2013 ; except in an /asymptotic sense/ for small vectors .
The ' Semimanifold ' and ' PseudoAffine ' classes can be @anyclass@-derived
existing ' PseudoAffine ' instances . For example , the definition
data Cylinder = CylinderPolar { zCyl : : ! D¹ , : : ! S¹ }
deriving ( Generic , , PseudoAffine )
data Cylinder = CylinderPolar { zCyl : : ! D¹ , : : ! S¹ }
data CylinderNeedle = CylinderPolarNeedle { δzCyl : : ! ( Needle D¹ ) , : : ! ( Needle S¹ ) }
instance where
z φ .+~^ CylinderPolarNeedle δz δφ
= ( z.+~^δz ) ( )
instance PseudoAffine Cylinder where
z₁ φ₁ .-~. CylinderPolar z₀ φ₀
z₁ φ₁ .-~ ! CylinderPolar z₀ φ₀
= CylinderPolarNeedle ( z₁.-~!z₀ ) ( φ₁.-~.φ₀ )
class Semimanifold x => PseudoAffine x where
| The path reaching from one point to another .
(.-~.) :: x -> x -> Maybe (Needle x)
default (.-~.) :: ( Generic x, PseudoAffine (VRep x)
, Needle x ~ GenericNeedle x )
=> x -> x -> Maybe (Needle x)
p.-~.q = GenericNeedle <$> Gnrx.from p .-~. (Gnrx.from q :: Gnrx.Rep x Void)
| Unsafe version of ' .-~. ' . If the two points lie in disjoint regions ,
Whenever @p@ and lie in a connected region , the identity
( though in many instances this is at least for sufficiently small @v@ approximately equal ) .
(.-~!) :: HasCallStack => x -> x -> Needle x
default (.-~!) :: ( Generic x, PseudoAffine (VRep x)
, Needle x ~ GenericNeedle x )
=> x -> x -> Needle x
p.-~!q = GenericNeedle $ Gnrx.from p .-~! (Gnrx.from q :: Gnrx.Rep x Void)
pseudoAffineWitness :: PseudoAffineWitness x
default pseudoAffineWitness ::
PseudoAffine (Needle x)
=> PseudoAffineWitness x
pseudoAffineWitness = PseudoAffineWitness semimanifoldWitness
| A fibre bundle combines points in the /base space/ @b@ with points in the /fibre/
@f@. The type @FibreBundle b f@ is thus isomorphic to the tuple space @(b , f)@ , but
it can have a different topology , the prime example being ' TangentBundle ' , where
data FibreBundle b f = FibreBundle
{ baseSpace :: !b
, fibreSpace :: !f
} deriving (Generic, Show)
type TangentBundle m = FibreBundle m (Needle m)
only makes sense on a Riemannian manifold , as ' Data . Manifold . Riemannian . Geodesic ' .
palerp :: ∀ x. (PseudoAffine x, VectorSpace (Needle x))
=> x -> x -> Maybe (Scalar (Needle x) -> x)
palerp p₀ p₁ = case p₁.-~.p₀ of
Just v -> return $ \t -> p₀ .+~^ t *^ v
_ -> Nothing
palerpB :: ∀ x. (PseudoAffine x, VectorSpace (Needle x), Scalar (Needle x) ~ ℝ)
=> x -> x -> Maybe (D¹ -> x)
palerpB p₀ p₁ = case p₁.-~.p₀ of
Just v -> return $ \(D¹ t) -> p₀ .+~^ ((t+1)/2) *^ v
_ -> Nothing
alerpB :: ∀ x. (AffineSpace x, VectorSpace (Diff x), Scalar (Diff x) ~ ℝ)
=> x -> x -> D¹ -> x
alerpB p1 p2 = case p2 .-. p1 of
v -> \(D¹ t) -> p1 .+^ ((t+1)/2) *^ v
#define deriveAffine(c,t) \
instance (c) => Semimanifold (t) where { \
type Needle (t) = Diff (t); \
(.+~^) = (.+^) }; \
instance (c) => PseudoAffine (t) where { \
a.-~.b = pure (a.-.b); \
(.-~!) = (.-.) }
deriveAffine((),Double)
deriveAffine((),Float)
deriveAffine((),Rational)
instance Semimanifold (ZeroDim k) where
type Needle (ZeroDim k) = ZeroDim k
Origin .+~^ Origin = Origin
Origin .-~^ Origin = Origin
instance PseudoAffine (ZeroDim k) where
Origin .-~! Origin = Origin
Origin .-~. Origin = pure Origin
instance ∀ a b . (Semimanifold a, Semimanifold b) => Semimanifold (a,b) where
type Needle (a,b) = (Needle a, Needle b)
(a,b).+~^(v,w) = (a.+~^v, b.+~^w)
(a,b).-~^(v,w) = (a.-~^v, b.-~^w)
semimanifoldWitness = case ( semimanifoldWitness :: SemimanifoldWitness a
, semimanifoldWitness :: SemimanifoldWitness b ) of
(SemimanifoldWitness, SemimanifoldWitness) -> SemimanifoldWitness
instance (PseudoAffine a, PseudoAffine b) => PseudoAffine (a,b) where
(a,b).-~.(c,d) = liftA2 (,) (a.-~.c) (b.-~.d)
(a,b).-~!(c,d) = (a.-~!c, b.-~!d)
pseudoAffineWitness = case ( pseudoAffineWitness :: PseudoAffineWitness a
, pseudoAffineWitness :: PseudoAffineWitness b ) of
( PseudoAffineWitness (SemimanifoldWitness)
, PseudoAffineWitness (SemimanifoldWitness) )
->PseudoAffineWitness (SemimanifoldWitness)
instance ∀ a b c . (Semimanifold a, Semimanifold b, Semimanifold c)
=> Semimanifold (a,b,c) where
type Needle (a,b,c) = (Needle a, Needle b, Needle c)
(a,b,c).+~^(v,w,x) = (a.+~^v, b.+~^w, c.+~^x)
(a,b,c).-~^(v,w,x) = (a.-~^v, b.-~^w, c.-~^x)
semimanifoldWitness = case ( semimanifoldWitness :: SemimanifoldWitness a
, semimanifoldWitness :: SemimanifoldWitness b
, semimanifoldWitness :: SemimanifoldWitness c ) of
( SemimanifoldWitness, SemimanifoldWitness, SemimanifoldWitness )
-> SemimanifoldWitness
instance (PseudoAffine a, PseudoAffine b, PseudoAffine c) => PseudoAffine (a,b,c) where
(a,b,c).-~!(d,e,f) = (a.-~!d, b.-~!e, c.-~!f)
(a,b,c).-~.(d,e,f) = liftA3 (,,) (a.-~.d) (b.-~.e) (c.-~.f)
pseudoAffineWitness = case ( pseudoAffineWitness :: PseudoAffineWitness a
, pseudoAffineWitness :: PseudoAffineWitness b
, pseudoAffineWitness :: PseudoAffineWitness c ) of
( PseudoAffineWitness SemimanifoldWitness
, PseudoAffineWitness SemimanifoldWitness
, PseudoAffineWitness SemimanifoldWitness )
->PseudoAffineWitness SemimanifoldWitness
instance Semimanifold (ℝP⁰_ r) where
type Needle (ℝP⁰_ r) = ZeroDim r
p .+~^ Origin = p
p .-~^ Origin = p
instance PseudoAffine (ℝP⁰_ r) where
ℝPZero .-~! ℝPZero = Origin
ℝPZero .-~. ℝPZero = pure Origin
instance ℝeal r => Semimanifold (ℝP¹_ r) where
type Needle (ℝP¹_ r) = r
HemisphereℝP¹Polar r₀ .+~^ δr = HemisphereℝP¹Polar . toℝP¹range $ r₀ + δr
instance ℝeal r => PseudoAffine (ℝP¹_ r) where
p.-~.q = pure (p.-~!q)
HemisphereℝP¹Polar φ₁ .-~! HemisphereℝP¹Polar φ₀
| δφ > pi/2 = δφ - pi
| δφ < (-pi/2) = δφ + pi
| otherwise = δφ
where δφ = φ₁ - φ₀
tau :: RealFloat r => r
tau = 2 * pi
toS¹range :: RealFloat r => r -> r
toS¹range φ = (φ+pi)`mod'`tau - pi
toℝP¹range :: RealFloat r => r -> r
toℝP¹range φ = (φ+pi/2)`mod'`pi - pi/2
toUnitrange :: RealFloat r => r -> r
toUnitrange φ = (φ+1)`mod'`2 - 1
data NeedleProductSpace f g p = NeedleProductSpace
!(Needle (f p)) !(Needle (g p)) deriving (Generic)
instance (Semimanifold (f p), Semimanifold (g p))
=> AdditiveGroup (NeedleProductSpace f g p)
instance ( Semimanifold (f p), Semimanifold (g p)
, VectorSpace (Needle (f p)), VectorSpace (Needle (g p))
, Scalar (Needle (f p)) ~ Scalar (Needle (g p)) )
=> VectorSpace (NeedleProductSpace f g p)
instance ( Semimanifold (f p), Semimanifold (g p)
, InnerSpace (Needle (f p)), InnerSpace (Needle (g p))
, Scalar (Needle (f p)) ~ Scalar (Needle (g p))
, Num (Scalar (Needle (f p))) )
=> InnerSpace (NeedleProductSpace f g p)
instance (Semimanifold (f p), Semimanifold (g p))
=> AffineSpace (NeedleProductSpace f g p) where
type Diff (NeedleProductSpace f g p) = NeedleProductSpace f g p
(.+^) = (^+^)
(.-.) = (^-^)
instance (Semimanifold (f p), Semimanifold (g p))
=> Semimanifold (NeedleProductSpace f g p) where
type Needle (NeedleProductSpace f g p) = NeedleProductSpace f g p
(.+~^) = (^+^)
instance (PseudoAffine (f p), PseudoAffine (g p))
=> PseudoAffine (NeedleProductSpace f g p) where
p.-~.q = Just $ p.-.q
(.-~!) = (.-.)
instance ( Semimanifold (f p), Semimanifold (g p)
, HasBasis (Needle (f p)), HasBasis (Needle (g p))
, Scalar (Needle (f p)) ~ Scalar (Needle (g p)) )
=> HasBasis (NeedleProductSpace f g p) where
type Basis (NeedleProductSpace f g p) = Either (Basis (Needle (f p)))
(Basis (Needle (g p)))
basisValue (Left bf) = NeedleProductSpace (basisValue bf) zeroV
basisValue (Right bg) = NeedleProductSpace zeroV (basisValue bg)
decompose (NeedleProductSpace vf vg)
= map (first Left) (decompose vf) ++ map (first Right) (decompose vg)
decompose' (NeedleProductSpace vf _) (Left bf) = decompose' vf bf
decompose' (NeedleProductSpace _ vg) (Right bg) = decompose' vg bg
newtype GenericNeedle x = GenericNeedle {getGenericNeedle :: Needle (VRep x)}
deriving (Generic)
instance AdditiveGroup (Needle (VRep x)) => AdditiveGroup (GenericNeedle x) where
GenericNeedle v ^+^ GenericNeedle w = GenericNeedle $ v ^+^ w
negateV = GenericNeedle . negateV . getGenericNeedle
zeroV = GenericNeedle zeroV
instance VectorSpace (Needle (VRep x)) => VectorSpace (GenericNeedle x) where
type Scalar (GenericNeedle x) = Scalar (Needle (VRep x))
(*^) μ = GenericNeedle . (*^) μ . getGenericNeedle
instance InnerSpace (Needle (VRep x)) => InnerSpace (GenericNeedle x) where
GenericNeedle v <.> GenericNeedle w = v <.> w
instance AdditiveGroup (Needle (VRep x)) => AffineSpace (GenericNeedle x) where
type Diff (GenericNeedle x) = GenericNeedle x
(.-.) = (^-^)
(.+^) = (^+^)
instance AdditiveGroup (Needle (VRep x)) => Semimanifold (GenericNeedle x) where
type Needle (GenericNeedle x) = GenericNeedle x
(.+~^) = (.+^)
instance AdditiveGroup (Needle (VRep x)) => PseudoAffine (GenericNeedle x) where
GenericNeedle v .-~. GenericNeedle w = Just $ GenericNeedle (v ^-^ w)
GenericNeedle v .-~! GenericNeedle w = GenericNeedle (v ^-^ w)
instance ∀ a s . Semimanifold a => Semimanifold (Gnrx.Rec0 a s) where
type Needle (Gnrx.Rec0 a s) = Needle a
semimanifoldWitness = case semimanifoldWitness :: SemimanifoldWitness a of
SemimanifoldWitness
-> SemimanifoldWitness
Gnrx.K1 p .+~^ v = Gnrx.K1 $ p .+~^ v
instance ∀ f p i c . Semimanifold (f p) => Semimanifold (Gnrx.M1 i c f p) where
type Needle (Gnrx.M1 i c f p) = Needle (f p)
semimanifoldWitness = case semimanifoldWitness :: SemimanifoldWitness (f p) of
SemimanifoldWitness -> SemimanifoldWitness
Gnrx.M1 p.+~^v = Gnrx.M1 $ p.+~^v
instance ∀ f g p . (Semimanifold (f p), Semimanifold (g p))
=> Semimanifold ((f :*: g) p) where
type Needle ((f:*:g) p) = NeedleProductSpace f g p
semimanifoldWitness = case ( semimanifoldWitness :: SemimanifoldWitness (f p)
, semimanifoldWitness :: SemimanifoldWitness (g p) ) of
( SemimanifoldWitness, SemimanifoldWitness )
-> SemimanifoldWitness
(p:*:q).+~^(NeedleProductSpace v w) = (p.+~^v) :*: (q.+~^w)
instance ∀ a s . PseudoAffine a => PseudoAffine (Gnrx.Rec0 a s) where
pseudoAffineWitness = case pseudoAffineWitness :: PseudoAffineWitness a of
PseudoAffineWitness SemimanifoldWitness
-> PseudoAffineWitness SemimanifoldWitness
Gnrx.K1 p .-~. Gnrx.K1 q = p .-~. q
Gnrx.K1 p .-~! Gnrx.K1 q = p .-~! q
instance ∀ f p i c . PseudoAffine (f p) => PseudoAffine (Gnrx.M1 i c f p) where
pseudoAffineWitness = case pseudoAffineWitness :: PseudoAffineWitness (f p) of
PseudoAffineWitness SemimanifoldWitness
-> PseudoAffineWitness SemimanifoldWitness
Gnrx.M1 p .-~. Gnrx.M1 q = p .-~. q
Gnrx.M1 p .-~! Gnrx.M1 q = p .-~! q
instance ∀ f g p . (PseudoAffine (f p), PseudoAffine (g p))
=> PseudoAffine ((f :*: g) p) where
pseudoAffineWitness = case ( pseudoAffineWitness :: PseudoAffineWitness (f p)
, pseudoAffineWitness :: PseudoAffineWitness (g p) ) of
( PseudoAffineWitness SemimanifoldWitness
,PseudoAffineWitness SemimanifoldWitness )
-> PseudoAffineWitness SemimanifoldWitness
(pf:*:pg) .-~. (qf:*:qg) = NeedleProductSpace <$> (pf.-~.qf) <*> (pg.-~.qg)
(pf:*:pg) .-~! (qf:*:qg) = NeedleProductSpace (pf.-~!qf) (pg.-~!qg)
type VRep x = Gnrx.Rep x Void
This construct becomes ( homeomorphic - to- ) an actual geometric cone ( and to ' D² ' ) in the
special case @x = ' S¹'@.
^ Range @[0 , 1]@
^ Irrelevant at @h = 0@.
} deriving (Generic)
deriving instance (Show x, Show (Scalar (Needle x))) => Show (CD¹ x)
^ Irrelevant at @h = 0@.
} deriving (Generic)
deriving instance (Show x, Show (Scalar (Needle x))) => Show (Cℝay x)
|
23418110addb0bacccfcfb542b14d64467815eda88b5dc0876c7e4e4bece8843
|
facebookarchive/pfff
|
visitor_php.mli
|
(*s: visitor_php.mli *)
open Ast_php
s : type visitor_in
(* the hooks *)
type visitor_in = {
kexpr: (expr -> unit) * visitor_out -> expr -> unit;
kstmt: (stmt -> unit) * visitor_out -> stmt -> unit;
ktop: (toplevel -> unit) * visitor_out -> toplevel -> unit;
kconstant: (constant -> unit) * visitor_out -> constant -> unit;
kscalar: (scalar -> unit) * visitor_out -> scalar -> unit;
kencaps: (encaps -> unit) * visitor_out -> encaps -> unit;
kclass_stmt: (class_stmt -> unit) * visitor_out -> class_stmt -> unit;
kparameter: (parameter -> unit) * visitor_out -> parameter -> unit;
kargument: (argument -> unit) * visitor_out -> argument -> unit;
kcatch: (catch -> unit) * visitor_out -> catch -> unit;
kfinally: (finally -> unit) * visitor_out -> finally -> unit;
xhp :
kxhp_html: (xhp_html -> unit) * visitor_out -> xhp_html -> unit;
kxhp_tag: (xhp_tag wrap -> unit) * visitor_out -> xhp_tag wrap -> unit;
kxhp_attribute:
(xhp_attribute -> unit) * visitor_out -> xhp_attribute -> unit;
kxhp_attr_decl:
(xhp_attribute_decl -> unit) * visitor_out -> xhp_attribute_decl -> unit;
kxhp_children_decl:
(xhp_children_decl -> unit) * visitor_out -> xhp_children_decl -> unit;
kfunc_def helps abstracting away whether a function / class ... is defined
* in a nested way or at the toplevel ( e.g. FuncDefNested vs FuncDef ) .
* Note that kfunc_def is also run for methods now . Look in
* def.f_type to decide what to do if you want to filter methods .
* ! note ! short lambdas are currently not in func_def , so take care
* to visit also this case in kexpr .
* in a nested way or at the toplevel (e.g. FuncDefNested vs FuncDef).
* Note that kfunc_def is also run for methods now. Look in
* def.f_type to decide what to do if you want to filter methods.
* !note! short lambdas are currently not in func_def, so take care
* to visit also this case in kexpr.
*)
kfunc_def: (func_def -> unit) * visitor_out -> func_def -> unit;
kclass_def: (class_def -> unit) * visitor_out -> class_def -> unit;
kmethod_def: (method_def -> unit) * visitor_out -> method_def -> unit;
(* Helps intercepting all the new blocks that in a real language should
* defined a new scope
*)
kstmt_and_def_list_scope:
(stmt_and_def list -> unit) * visitor_out -> stmt_and_def list -> unit;
kname: (name -> unit) * visitor_out -> name -> unit;
khint_type: (hint_type -> unit) * visitor_out -> hint_type -> unit;
ktparam: (type_param -> unit) * visitor_out -> type_param -> unit;
karray_pair: (array_pair -> unit) * visitor_out -> array_pair -> unit;
karguments: (argument comma_list paren -> unit) * visitor_out ->
argument comma_list paren -> unit;
kcomma: (tok -> unit) * visitor_out -> tok -> unit;
kinfo: (tok -> unit) * visitor_out -> tok -> unit;
}
e : type visitor_in
(*s: type visitor_out *)
and visitor_out = any -> unit
(*e: type visitor_out *)
(*s: visitor functions *)
val default_visitor : visitor_in
(*x: visitor functions *)
val mk_visitor: visitor_in -> visitor_out
(*x: visitor functions *)
val do_visit_with_ref:
('a list ref -> visitor_in) -> any -> 'a list
(*e: visitor functions *)
(*e: visitor_php.mli *)
| null |
https://raw.githubusercontent.com/facebookarchive/pfff/ec21095ab7d445559576513a63314e794378c367/lang_php/parsing/visitor_php.mli
|
ocaml
|
s: visitor_php.mli
the hooks
Helps intercepting all the new blocks that in a real language should
* defined a new scope
s: type visitor_out
e: type visitor_out
s: visitor functions
x: visitor functions
x: visitor functions
e: visitor functions
e: visitor_php.mli
|
open Ast_php
s : type visitor_in
type visitor_in = {
kexpr: (expr -> unit) * visitor_out -> expr -> unit;
kstmt: (stmt -> unit) * visitor_out -> stmt -> unit;
ktop: (toplevel -> unit) * visitor_out -> toplevel -> unit;
kconstant: (constant -> unit) * visitor_out -> constant -> unit;
kscalar: (scalar -> unit) * visitor_out -> scalar -> unit;
kencaps: (encaps -> unit) * visitor_out -> encaps -> unit;
kclass_stmt: (class_stmt -> unit) * visitor_out -> class_stmt -> unit;
kparameter: (parameter -> unit) * visitor_out -> parameter -> unit;
kargument: (argument -> unit) * visitor_out -> argument -> unit;
kcatch: (catch -> unit) * visitor_out -> catch -> unit;
kfinally: (finally -> unit) * visitor_out -> finally -> unit;
xhp :
kxhp_html: (xhp_html -> unit) * visitor_out -> xhp_html -> unit;
kxhp_tag: (xhp_tag wrap -> unit) * visitor_out -> xhp_tag wrap -> unit;
kxhp_attribute:
(xhp_attribute -> unit) * visitor_out -> xhp_attribute -> unit;
kxhp_attr_decl:
(xhp_attribute_decl -> unit) * visitor_out -> xhp_attribute_decl -> unit;
kxhp_children_decl:
(xhp_children_decl -> unit) * visitor_out -> xhp_children_decl -> unit;
kfunc_def helps abstracting away whether a function / class ... is defined
* in a nested way or at the toplevel ( e.g. FuncDefNested vs FuncDef ) .
* Note that kfunc_def is also run for methods now . Look in
* def.f_type to decide what to do if you want to filter methods .
* ! note ! short lambdas are currently not in func_def , so take care
* to visit also this case in kexpr .
* in a nested way or at the toplevel (e.g. FuncDefNested vs FuncDef).
* Note that kfunc_def is also run for methods now. Look in
* def.f_type to decide what to do if you want to filter methods.
* !note! short lambdas are currently not in func_def, so take care
* to visit also this case in kexpr.
*)
kfunc_def: (func_def -> unit) * visitor_out -> func_def -> unit;
kclass_def: (class_def -> unit) * visitor_out -> class_def -> unit;
kmethod_def: (method_def -> unit) * visitor_out -> method_def -> unit;
kstmt_and_def_list_scope:
(stmt_and_def list -> unit) * visitor_out -> stmt_and_def list -> unit;
kname: (name -> unit) * visitor_out -> name -> unit;
khint_type: (hint_type -> unit) * visitor_out -> hint_type -> unit;
ktparam: (type_param -> unit) * visitor_out -> type_param -> unit;
karray_pair: (array_pair -> unit) * visitor_out -> array_pair -> unit;
karguments: (argument comma_list paren -> unit) * visitor_out ->
argument comma_list paren -> unit;
kcomma: (tok -> unit) * visitor_out -> tok -> unit;
kinfo: (tok -> unit) * visitor_out -> tok -> unit;
}
e : type visitor_in
and visitor_out = any -> unit
val default_visitor : visitor_in
val mk_visitor: visitor_in -> visitor_out
val do_visit_with_ref:
('a list ref -> visitor_in) -> any -> 'a list
|
b4a5f9b7ff9cd8c4b2f99c9f8f93eedf53a19f7ff886cfa298e37bd802195dad
|
HuwCampbell/grenade
|
Relu.hs
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeOperators #-}
# LANGUAGE TypeFamilies #
# LANGUAGE MultiParamTypeClasses #
|
Module : Grenade . Layers . Relu
Description : Rectifying linear unit layer
Copyright : ( c ) , 2016 - 2017
License : BSD2
Stability : experimental
Module : Grenade.Layers.Relu
Description : Rectifying linear unit layer
Copyright : (c) Huw Campbell, 2016-2017
License : BSD2
Stability : experimental
-}
module Grenade.Layers.Relu (
Relu (..)
) where
import Data.Serialize
import GHC.TypeLits
import Grenade.Core
import qualified Numeric.LinearAlgebra.Static as LAS
| A rectifying linear unit .
-- A layer which can act between any shape of the same dimension, acting as a
-- diode on every neuron individually.
data Relu = Relu
deriving Show
instance UpdateLayer Relu where
type Gradient Relu = ()
runUpdate _ _ _ = Relu
createRandom = return Relu
instance Serialize Relu where
put _ = return ()
get = return Relu
instance ( KnownNat i) => Layer Relu ('D1 i) ('D1 i) where
type Tape Relu ('D1 i) ('D1 i) = S ('D1 i)
runForwards _ (S1D y) = (S1D y, S1D (relu y))
where
relu = LAS.dvmap (\a -> if a <= 0 then 0 else a)
runBackwards _ (S1D y) (S1D dEdy) = ((), S1D (relu' y * dEdy))
where
relu' = LAS.dvmap (\a -> if a <= 0 then 0 else 1)
instance (KnownNat i, KnownNat j) => Layer Relu ('D2 i j) ('D2 i j) where
type Tape Relu ('D2 i j) ('D2 i j) = S ('D2 i j)
runForwards _ (S2D y) = (S2D y, S2D (relu y))
where
relu = LAS.dmmap (\a -> if a <= 0 then 0 else a)
runBackwards _ (S2D y) (S2D dEdy) = ((), S2D (relu' y * dEdy))
where
relu' = LAS.dmmap (\a -> if a <= 0 then 0 else 1)
instance (KnownNat i, KnownNat j, KnownNat k) => Layer Relu ('D3 i j k) ('D3 i j k) where
type Tape Relu ('D3 i j k) ('D3 i j k) = S ('D3 i j k)
runForwards _ (S3D y) = (S3D y, S3D (relu y))
where
relu = LAS.dmmap (\a -> if a <= 0 then 0 else a)
runBackwards _ (S3D y) (S3D dEdy) = ((), S3D (relu' y * dEdy))
where
relu' = LAS.dmmap (\a -> if a <= 0 then 0 else 1)
| null |
https://raw.githubusercontent.com/HuwCampbell/grenade/5206c95c423d9755e620f41576470a281ba59c89/src/Grenade/Layers/Relu.hs
|
haskell
|
# LANGUAGE DataKinds #
# LANGUAGE TypeOperators #
A layer which can act between any shape of the same dimension, acting as a
diode on every neuron individually.
|
# LANGUAGE TypeFamilies #
# LANGUAGE MultiParamTypeClasses #
|
Module : Grenade . Layers . Relu
Description : Rectifying linear unit layer
Copyright : ( c ) , 2016 - 2017
License : BSD2
Stability : experimental
Module : Grenade.Layers.Relu
Description : Rectifying linear unit layer
Copyright : (c) Huw Campbell, 2016-2017
License : BSD2
Stability : experimental
-}
module Grenade.Layers.Relu (
Relu (..)
) where
import Data.Serialize
import GHC.TypeLits
import Grenade.Core
import qualified Numeric.LinearAlgebra.Static as LAS
| A rectifying linear unit .
data Relu = Relu
deriving Show
instance UpdateLayer Relu where
type Gradient Relu = ()
runUpdate _ _ _ = Relu
createRandom = return Relu
instance Serialize Relu where
put _ = return ()
get = return Relu
instance ( KnownNat i) => Layer Relu ('D1 i) ('D1 i) where
type Tape Relu ('D1 i) ('D1 i) = S ('D1 i)
runForwards _ (S1D y) = (S1D y, S1D (relu y))
where
relu = LAS.dvmap (\a -> if a <= 0 then 0 else a)
runBackwards _ (S1D y) (S1D dEdy) = ((), S1D (relu' y * dEdy))
where
relu' = LAS.dvmap (\a -> if a <= 0 then 0 else 1)
instance (KnownNat i, KnownNat j) => Layer Relu ('D2 i j) ('D2 i j) where
type Tape Relu ('D2 i j) ('D2 i j) = S ('D2 i j)
runForwards _ (S2D y) = (S2D y, S2D (relu y))
where
relu = LAS.dmmap (\a -> if a <= 0 then 0 else a)
runBackwards _ (S2D y) (S2D dEdy) = ((), S2D (relu' y * dEdy))
where
relu' = LAS.dmmap (\a -> if a <= 0 then 0 else 1)
instance (KnownNat i, KnownNat j, KnownNat k) => Layer Relu ('D3 i j k) ('D3 i j k) where
type Tape Relu ('D3 i j k) ('D3 i j k) = S ('D3 i j k)
runForwards _ (S3D y) = (S3D y, S3D (relu y))
where
relu = LAS.dmmap (\a -> if a <= 0 then 0 else a)
runBackwards _ (S3D y) (S3D dEdy) = ((), S3D (relu' y * dEdy))
where
relu' = LAS.dmmap (\a -> if a <= 0 then 0 else 1)
|
e86995788b9f6339647945798baf4dee38e592da1312ae9cf3ad88803cbd67bb
|
ghc/packages-dph
|
Tuple.hs
|
-- | Closure converted tuple data constructors used by the vectoriser.
module Data.Array.Parallel.Prelude.Tuple
(tup2, tup3, tup4, tup5)
where
import Data.Array.Parallel.Lifted.Closure
import Data.Array.Parallel.PArray.PRepr
import qualified Data.Array.Parallel.PArray as PA
tup2 :: (PA a, PA b)
=> a :-> b :-> (a, b)
tup2 = closure2' (,) PA.zip
# INLINE tup2 #
tup3 :: (PA a, PA b, PA c)
=> a :-> b :-> c :-> (a, b, c)
tup3 = closure3' (,,) PA.zip3
# INLINE tup3 #
tup4 :: (PA a, PA b, PA c, PA d)
=> a :-> b :-> c :-> d :-> (a, b, c, d)
tup4 = closure4' (,,,) PA.zip4
# INLINE tup4 #
tup5 :: (PA a, PA b, PA c, PA d, PA e)
=> a :-> b :-> c :-> d :-> e :-> (a, b, c, d, e)
tup5 = closure5' (,,,,) PA.zip5
# INLINE tup5 #
| null |
https://raw.githubusercontent.com/ghc/packages-dph/64eca669f13f4d216af9024474a3fc73ce101793/dph-lifted-vseg/Data/Array/Parallel/Prelude/Tuple.hs
|
haskell
|
| Closure converted tuple data constructors used by the vectoriser.
|
module Data.Array.Parallel.Prelude.Tuple
(tup2, tup3, tup4, tup5)
where
import Data.Array.Parallel.Lifted.Closure
import Data.Array.Parallel.PArray.PRepr
import qualified Data.Array.Parallel.PArray as PA
tup2 :: (PA a, PA b)
=> a :-> b :-> (a, b)
tup2 = closure2' (,) PA.zip
# INLINE tup2 #
tup3 :: (PA a, PA b, PA c)
=> a :-> b :-> c :-> (a, b, c)
tup3 = closure3' (,,) PA.zip3
# INLINE tup3 #
tup4 :: (PA a, PA b, PA c, PA d)
=> a :-> b :-> c :-> d :-> (a, b, c, d)
tup4 = closure4' (,,,) PA.zip4
# INLINE tup4 #
tup5 :: (PA a, PA b, PA c, PA d, PA e)
=> a :-> b :-> c :-> d :-> e :-> (a, b, c, d, e)
tup5 = closure5' (,,,,) PA.zip5
# INLINE tup5 #
|
0a3014a2ca7acd9dbb2c97e40dd2d514aff81eb5bbf3cbfa9f249559100d706f
|
vito/atomo
|
Method.hs
|
module Atomo.Method
( addMethod
, elemsMap
, emptyMap
, insertMethod
, insertMap
, lookupMap
, memberMap
, noMethods
, nullMap
, toMethods
) where
import Data.List (elemIndices)
import Data.Maybe (isJust)
import qualified Data.IntMap as M
import Atomo.Types
-- referring to the left side:
-- LT = is higher-precision
GT = is lower - precision
comparePrecision :: Pattern -> Pattern -> Ordering
comparePrecision (PNamed _ a) (PNamed _ b) =
comparePrecision a b
comparePrecision (PNamed _ a) b = comparePrecision a b
comparePrecision a (PNamed _ b) = comparePrecision a b
comparePrecision PAny PAny = EQ
comparePrecision PThis PThis = EQ
comparePrecision (PMatch a@(Object {})) (PMatch b@(Object {}))
| delegatesTo a b = LT
| delegatesTo a b = GT
| otherwise = EQ
comparePrecision (PMatch _) (PMatch _) = EQ
comparePrecision (PList as) (PList bs) =
comparePrecisions as bs
comparePrecision (PPMKeyword _ as) (PPMKeyword _ bs) =
comparePrecisions as bs
comparePrecision (PHeadTail ah at) (PHeadTail bh bt) =
comparePrecisions [ah, at] [bh, bt]
comparePrecision (PMessage (Single { mTarget = at })) (PMessage (Single { mTarget = bt })) =
comparePrecision at bt
comparePrecision (PMessage (Keyword { mTargets = as })) (PMessage (Keyword { mTargets = bs })) =
compareHeads as bs
comparePrecision (PObject _) (PObject _) = EQ
comparePrecision PAny _ = GT
comparePrecision _ PAny = LT
comparePrecision PThis (PMatch (Object {})) = LT
comparePrecision (PMatch (Object {})) PThis = GT
comparePrecision (PMatch _) _ = LT
comparePrecision _ (PMatch _) = GT
comparePrecision (PExpr a) (PExpr b) = exprPrecision 0 a b
comparePrecision (PExpr _) _ = LT
comparePrecision _ (PExpr _) = GT
comparePrecision (PList _) _ = LT
comparePrecision _ (PList _) = GT
comparePrecision (PPMKeyword _ _) _ = LT
comparePrecision _ (PPMKeyword _ _) = GT
comparePrecision (PHeadTail _ _) _ = LT
comparePrecision _ (PHeadTail _ _) = GT
comparePrecision PThis _ = LT
comparePrecision _ PThis = GT
comparePrecision (PObject _) _ = LT
comparePrecision _ (PObject _) = GT
comparePrecision _ _ = GT
compareHeads :: [Pattern] -> [Pattern] -> Ordering
compareHeads [a] [b] = comparePrecision a b
compareHeads (a:as) (b:bs) =
case comparePrecision a b of
EQ -> compareHeads as bs
x -> x
compareHeads a b = error $ "impossible: compareHeads on " ++ show (a, b)
comparePrecisions :: [Pattern] -> [Pattern] -> Ordering
comparePrecisions = comparePrecisionsWith comparePrecision
comparePrecisionsWith :: (a -> a -> Ordering) -> [a] -> [a] -> Ordering
comparePrecisionsWith cmp as bs =
compare gt lt
where
compared = zipWith cmp as bs
gt = length $ elemIndices GT compared
lt = length $ elemIndices LT compared
delegatesTo :: Value -> Value -> Bool
delegatesTo (Object { oDelegates = ds }) t =
t `elem` ds || any (`delegatesTo` t) ds
delegatesTo _ _ = False
exprPrecision :: Int -> Expr -> Expr -> Ordering
exprPrecision 0 (EUnquote {}) (EUnquote {}) = EQ
exprPrecision 0 (EUnquote {}) _ = GT
exprPrecision 0 _ (EUnquote {}) = LT
exprPrecision n (EDefine { eExpr = a }) (EDefine { eExpr = b }) =
exprPrecision n a b
exprPrecision n (ESet { eExpr = a }) (ESet { eExpr = b }) =
exprPrecision n a b
exprPrecision n (EDispatch { eMessage = am@(Keyword {}) }) (EDispatch { eMessage = bm@(Keyword {}) }) =
comparePrecisionsWith (exprPrecision n) (mTargets am) (mTargets bm)
exprPrecision n (EDispatch { eMessage = am@(Single {}) }) (EDispatch { eMessage = bm@(Single {}) }) =
exprPrecision n (mTarget am) (mTarget bm)
exprPrecision n (EBlock { eContents = as }) (EBlock { eContents = bs }) =
comparePrecisionsWith (exprPrecision n) as bs
exprPrecision n (EList { eContents = as }) (EList { eContents = bs }) =
comparePrecisionsWith (exprPrecision n) as bs
exprPrecision n (EMacro { eExpr = a }) (EMacro { eExpr = b }) =
exprPrecision n a b
exprPrecision n (EParticle { eParticle = ap' }) (EParticle { eParticle = bp }) =
case (ap', bp) of
(Keyword { mTargets = ames }, Keyword { mTargets = bmes }) ->
comparePrecisionsWith (exprPrecision n) (firsts ames bmes) (seconds ames bmes)
_ -> EQ
where
pairs ames bmes = map (\(Just a, Just b) -> (a, b)) $ filter (\(a, b) -> isJust a && isJust b) $ zip ames bmes
firsts ames = fst . unzip . pairs ames
seconds ames = fst . unzip . pairs ames
exprPrecision n (EQuote { eExpr = a }) (EQuote { eExpr = b }) =
exprPrecision (n + 1) a b
exprPrecision _ _ _ = EQ
| Insert a method into a MethodMap based on its pattern 's ID and precision .
addMethod :: Method -> MethodMap -> MethodMap
addMethod m mm =
M.insertWith (\[m'] ms -> insertMethod m' ms) key [m] mm
where
key = mID $ mPattern m
| Insert a method into a list of existing methods most precise goes first ,
-- equivalent patterns are replaced.
insertMethod :: Method -> [Method] -> [Method]
insertMethod x [] = [x]
insertMethod x (y:ys)
| mPattern x `samePattern` mPattern y = x : ys
| otherwise =
case comparePrecision (PMessage (mPattern x)) (PMessage (mPattern y)) of
stop at LT so it 's after all of the definitons before this one
LT -> x : y : ys
keep looking if we 're EQ or GT
_ -> y : insertMethod x ys
-- | Like ==, but ignore optionals.
samePattern :: Message Pattern -> Message Pattern -> Bool
samePattern (Single { mID = ai, mTarget = at })
(Single { mID = bi, mTarget = bt }) =
ai == bi && at == bt
samePattern (Keyword { mID = ai, mTargets = ats })
(Keyword { mID = bi, mTargets = bts }) =
ai == bi && ats == bts
samePattern _ _ = False
| Convert a list of slots to a MethodMap .
toMethods :: [(Message Pattern, Value)] -> MethodMap
toMethods = foldl (\ss (p, v) -> addMethod (Slot p v) ss) emptyMap
| A pair of two empty MethodMaps ; one for single methods and one for keyword
-- methods.
noMethods :: (MethodMap, MethodMap)
noMethods = (M.empty, M.empty)
-- | An empty MethodMap.
emptyMap :: MethodMap
emptyMap = M.empty
-- | Find methods in a MethodMap by the pattern ID.
lookupMap :: Int -> MethodMap -> Maybe [Method]
lookupMap = M.lookup
-- | Is a MethodMap empty?.
nullMap :: MethodMap -> Bool
nullMap = M.null
| All of the methods in a MethodMap .
elemsMap :: MethodMap -> [[Method]]
elemsMap = M.elems
-- | Is a key set in a map?
memberMap :: Int -> MethodMap -> Bool
memberMap = M.member
| Insert a method into a MethodMap , replacing all other methods with the
-- same ID.
insertMap :: Method -> MethodMap -> MethodMap
insertMap m mm = M.insert key [m] mm
where
key = mID $ mPattern m
| null |
https://raw.githubusercontent.com/vito/atomo/df22fcb3fbe80abb30b9ab3c6f5d50d3b8477f90/src/Atomo/Method.hs
|
haskell
|
referring to the left side:
LT = is higher-precision
equivalent patterns are replaced.
| Like ==, but ignore optionals.
methods.
| An empty MethodMap.
| Find methods in a MethodMap by the pattern ID.
| Is a MethodMap empty?.
| Is a key set in a map?
same ID.
|
module Atomo.Method
( addMethod
, elemsMap
, emptyMap
, insertMethod
, insertMap
, lookupMap
, memberMap
, noMethods
, nullMap
, toMethods
) where
import Data.List (elemIndices)
import Data.Maybe (isJust)
import qualified Data.IntMap as M
import Atomo.Types
GT = is lower - precision
comparePrecision :: Pattern -> Pattern -> Ordering
comparePrecision (PNamed _ a) (PNamed _ b) =
comparePrecision a b
comparePrecision (PNamed _ a) b = comparePrecision a b
comparePrecision a (PNamed _ b) = comparePrecision a b
comparePrecision PAny PAny = EQ
comparePrecision PThis PThis = EQ
comparePrecision (PMatch a@(Object {})) (PMatch b@(Object {}))
| delegatesTo a b = LT
| delegatesTo a b = GT
| otherwise = EQ
comparePrecision (PMatch _) (PMatch _) = EQ
comparePrecision (PList as) (PList bs) =
comparePrecisions as bs
comparePrecision (PPMKeyword _ as) (PPMKeyword _ bs) =
comparePrecisions as bs
comparePrecision (PHeadTail ah at) (PHeadTail bh bt) =
comparePrecisions [ah, at] [bh, bt]
comparePrecision (PMessage (Single { mTarget = at })) (PMessage (Single { mTarget = bt })) =
comparePrecision at bt
comparePrecision (PMessage (Keyword { mTargets = as })) (PMessage (Keyword { mTargets = bs })) =
compareHeads as bs
comparePrecision (PObject _) (PObject _) = EQ
comparePrecision PAny _ = GT
comparePrecision _ PAny = LT
comparePrecision PThis (PMatch (Object {})) = LT
comparePrecision (PMatch (Object {})) PThis = GT
comparePrecision (PMatch _) _ = LT
comparePrecision _ (PMatch _) = GT
comparePrecision (PExpr a) (PExpr b) = exprPrecision 0 a b
comparePrecision (PExpr _) _ = LT
comparePrecision _ (PExpr _) = GT
comparePrecision (PList _) _ = LT
comparePrecision _ (PList _) = GT
comparePrecision (PPMKeyword _ _) _ = LT
comparePrecision _ (PPMKeyword _ _) = GT
comparePrecision (PHeadTail _ _) _ = LT
comparePrecision _ (PHeadTail _ _) = GT
comparePrecision PThis _ = LT
comparePrecision _ PThis = GT
comparePrecision (PObject _) _ = LT
comparePrecision _ (PObject _) = GT
comparePrecision _ _ = GT
compareHeads :: [Pattern] -> [Pattern] -> Ordering
compareHeads [a] [b] = comparePrecision a b
compareHeads (a:as) (b:bs) =
case comparePrecision a b of
EQ -> compareHeads as bs
x -> x
compareHeads a b = error $ "impossible: compareHeads on " ++ show (a, b)
comparePrecisions :: [Pattern] -> [Pattern] -> Ordering
comparePrecisions = comparePrecisionsWith comparePrecision
comparePrecisionsWith :: (a -> a -> Ordering) -> [a] -> [a] -> Ordering
comparePrecisionsWith cmp as bs =
compare gt lt
where
compared = zipWith cmp as bs
gt = length $ elemIndices GT compared
lt = length $ elemIndices LT compared
delegatesTo :: Value -> Value -> Bool
delegatesTo (Object { oDelegates = ds }) t =
t `elem` ds || any (`delegatesTo` t) ds
delegatesTo _ _ = False
exprPrecision :: Int -> Expr -> Expr -> Ordering
exprPrecision 0 (EUnquote {}) (EUnquote {}) = EQ
exprPrecision 0 (EUnquote {}) _ = GT
exprPrecision 0 _ (EUnquote {}) = LT
exprPrecision n (EDefine { eExpr = a }) (EDefine { eExpr = b }) =
exprPrecision n a b
exprPrecision n (ESet { eExpr = a }) (ESet { eExpr = b }) =
exprPrecision n a b
exprPrecision n (EDispatch { eMessage = am@(Keyword {}) }) (EDispatch { eMessage = bm@(Keyword {}) }) =
comparePrecisionsWith (exprPrecision n) (mTargets am) (mTargets bm)
exprPrecision n (EDispatch { eMessage = am@(Single {}) }) (EDispatch { eMessage = bm@(Single {}) }) =
exprPrecision n (mTarget am) (mTarget bm)
exprPrecision n (EBlock { eContents = as }) (EBlock { eContents = bs }) =
comparePrecisionsWith (exprPrecision n) as bs
exprPrecision n (EList { eContents = as }) (EList { eContents = bs }) =
comparePrecisionsWith (exprPrecision n) as bs
exprPrecision n (EMacro { eExpr = a }) (EMacro { eExpr = b }) =
exprPrecision n a b
exprPrecision n (EParticle { eParticle = ap' }) (EParticle { eParticle = bp }) =
case (ap', bp) of
(Keyword { mTargets = ames }, Keyword { mTargets = bmes }) ->
comparePrecisionsWith (exprPrecision n) (firsts ames bmes) (seconds ames bmes)
_ -> EQ
where
pairs ames bmes = map (\(Just a, Just b) -> (a, b)) $ filter (\(a, b) -> isJust a && isJust b) $ zip ames bmes
firsts ames = fst . unzip . pairs ames
seconds ames = fst . unzip . pairs ames
exprPrecision n (EQuote { eExpr = a }) (EQuote { eExpr = b }) =
exprPrecision (n + 1) a b
exprPrecision _ _ _ = EQ
| Insert a method into a MethodMap based on its pattern 's ID and precision .
addMethod :: Method -> MethodMap -> MethodMap
addMethod m mm =
M.insertWith (\[m'] ms -> insertMethod m' ms) key [m] mm
where
key = mID $ mPattern m
| Insert a method into a list of existing methods most precise goes first ,
insertMethod :: Method -> [Method] -> [Method]
insertMethod x [] = [x]
insertMethod x (y:ys)
| mPattern x `samePattern` mPattern y = x : ys
| otherwise =
case comparePrecision (PMessage (mPattern x)) (PMessage (mPattern y)) of
stop at LT so it 's after all of the definitons before this one
LT -> x : y : ys
keep looking if we 're EQ or GT
_ -> y : insertMethod x ys
samePattern :: Message Pattern -> Message Pattern -> Bool
samePattern (Single { mID = ai, mTarget = at })
(Single { mID = bi, mTarget = bt }) =
ai == bi && at == bt
samePattern (Keyword { mID = ai, mTargets = ats })
(Keyword { mID = bi, mTargets = bts }) =
ai == bi && ats == bts
samePattern _ _ = False
| Convert a list of slots to a MethodMap .
toMethods :: [(Message Pattern, Value)] -> MethodMap
toMethods = foldl (\ss (p, v) -> addMethod (Slot p v) ss) emptyMap
| A pair of two empty MethodMaps ; one for single methods and one for keyword
noMethods :: (MethodMap, MethodMap)
noMethods = (M.empty, M.empty)
emptyMap :: MethodMap
emptyMap = M.empty
lookupMap :: Int -> MethodMap -> Maybe [Method]
lookupMap = M.lookup
nullMap :: MethodMap -> Bool
nullMap = M.null
| All of the methods in a MethodMap .
elemsMap :: MethodMap -> [[Method]]
elemsMap = M.elems
memberMap :: Int -> MethodMap -> Bool
memberMap = M.member
| Insert a method into a MethodMap , replacing all other methods with the
insertMap :: Method -> MethodMap -> MethodMap
insertMap m mm = M.insert key [m] mm
where
key = mID $ mPattern m
|
e8f10ff5e400d686570be6f2e21c9fbf31cb4e266933688adae3bd51702b097a
|
svenssonjoel/Obsidian
|
CompileIM.hs
|
# LANGUAGE QuasiQuotes #
{-# LANGUAGE PackageImports #-}
# LANGUAGE GeneralizedNewtypeDeriving #
Joel Svensson 2013 .. 2017
Joel Svensson 2013..2017
-}
module Obsidian.CodeGen.CompileIM where
import Language.C.Quote.CUDA hiding (Block)
import qualified Language.C.Quote.OpenCL as CL
import qualified "language-c-quote" Language.C.Syntax as C
import Obsidian.Exp (IExp(..),IBinOp(..),IUnOp(..))
import Obsidian.Types as T
import Obsidian.DimSpec
import Obsidian.CodeGen.Program
import Data.Word
Notes :
2017 - 04 - 22 : Generate only CUDA
* TODO : Make sure tid always has correct Value
2017-04-22: Generate only CUDA
* TODO: Make sure tid always has correct Value
-}
---------------------------------------------------------------------------
-- Config
---------------------------------------------------------------------------
data Config = Config { configThreadsPerBlock :: Word32,
configSharedMem :: Word32}
---------------------------------------------------------------------------
-- compileExp (maybe a bad name)
---------------------------------------------------------------------------
compileExp :: IExp -> C.Exp
compileExp (IVar name t) = [cexp| $id:name |]
-- TODO: Fix all this!
-- compileExp (IBlockIdx X) = [cexp| $id:("bid")|] -- [cexp| $id:("blockIdx.x") |]
-- compileExp (IBlockIdx Y) = [cexp| $id:("blockIdx.y") |]
compileExp ( IBlockIdx Z ) = [ cexp| $ id:("blockIdx.z " ) | ]
compileExp ( IThreadIdx X ) = [ cexp| $ id:("threadIdx.x " ) | ]
compileExp ( IThreadIdx Y ) = [ cexp| $ id:("threadIdx.y " ) | ]
compileExp ( IThreadIdx Z ) = [ cexp| $ id:("threadIdx.z " ) | ]
compileExp ( IBlockDim X ) = [ cexp| $ id:("blockDim.x " ) | ]
-- compileExp (IBlockDim Y) = [cexp| $id:("blockDim.y") |]
-- compileExp (IBlockDim Z) = [cexp| $id:("blockDim.z") |]
-- compileExp (IGridDim X) = [cexp| $id:("GridDim.x") |]
-- compileExp (IGridDim Y) = [cexp| $id:("GridDim.y") |]
-- compileExp (IGridDim Z) = [cexp| $id:("GridDim.z") |]
compileExp (IBool True) = [cexp|1|]
compileExp (IBool False) = [cexp|0|]
compileExp (IInt8 n) = [cexp| $int:(toInteger n) |]
compileExp (IInt16 n) = [cexp| $int:(toInteger n) |]
compileExp (IInt32 n) = [cexp| $int:(toInteger n) |]
compileExp (IInt64 n) = [cexp| $lint:(toInteger n) |]
compileExp (IWord8 n) = [cexp| $uint:(toInteger n) |]
compileExp (IWord16 n) = [cexp| $uint:(toInteger n) |]
compileExp (IWord32 n) = [cexp| $uint:(toInteger n) |]
compileExp (IWord64 n) = [cexp| $ulint:(toInteger n) |]
compileExp (IFloat n) = [cexp| $float:(n) |]
compileExp (IDouble n) = [cexp| $double:(n) |]
-- Implementing these may be a bit awkward
-- given there are no vector literals in cuda.
compileExp (IFloat2 n m) = error "IFloat2 unhandled"
compileExp (IFloat3 n m l) = error "IFloat3 unhandled"
compileExp (IFloat4 n m l k) = error "IFloat4 unhandled"
compileExp (IDouble2 n m) = error "IDouble2 unhandled"
compileExp (IInt8_2 n m) = error "FIXME"
compileExp (IInt8_3 n m k) = error "FIXME"
compileExp (IInt8_4 n m k l) = error "FIXME"
compileExp (IInt16_2 n m ) = error "FIXME"
compileExp (IInt16_3 n m k) = error "FIXME"
compileExp (IInt16_4 n m k l) = error "FIXME"
compileExp (IInt32_2 n m) = error "FIXME"
compileExp (IInt32_3 n m k) = error "FIXME"
compileExp (IInt32_4 n m k l) = error "FIXME"
compileExp (IInt64_2 n m) = error "FIXME"
compileExp (IInt64_3 n m k) = error "FIXME"
compileExp (IInt64_4 n m k l) = error "FIXME"
compileExp (IWord8_2 n m) = error "FIXME"
compileExp (IWord8_3 n m k) = error "FIXME"
compileExp (IWord8_4 n m k l) = error "FIXME"
compileExp (IWord16_2 n m ) = error "FIXME"
compileExp (IWord16_3 n m k) = error "FIXME"
compileExp (IWord16_4 n m k l) = error "FIXME"
compileExp (IWord32_2 n m) = error "FIXME"
compileExp (IWord32_3 n m k) = error "FIXME"
compileExp (IWord32_4 n m k l) = error "FIXME"
compileExp (IWord64_2 n m) = error "FIXME"
compileExp (IWord64_3 n m k) = error "FIXME"
compileExp (IWord64_4 n m k l) = error "FIXME"
compileExp (IIndex (i1,[e]) t) = [cexp| $(compileExp i1)[$(compileExp e)] |]
compileExp a@(IIndex (_,_) _) = error $ "compileExp: Malformed index expression " ++ show a
compileExp (ICond e1 e2 e3 t) = [cexp| $(compileExp e1) ? $(compileExp e2) : $(compileExp e3) |]
compileExp (IBinOp op e1 e2 t) = go op
where
x = compileExp e1
y = compileExp e2
go IAdd = [cexp| $x + $y |]
go ISub = [cexp| $x - $y |]
go IMul = [cexp| $x * $y |]
go IDiv = [cexp| $x / $y |]
go IFDiv = [cexp| $x / $y |]
go IMod = [cexp| $x % $y |]
go IEq = [cexp| $x == $y |]
go INotEq = [cexp| $x != $y |]
go ILt = [cexp| $x < $y |]
go IGt = [cexp| $x > $y |]
go IGEq = [cexp| $x >= $y |]
go ILEq = [cexp| $x <= $y |]
go IAnd = [cexp| $x && $y |]
go IOr = [cexp| $x || $y |]
go IPow = case t of
Float -> [cexp|powf($x,$y) |]
Double -> [cexp|pow($x,$y) |]
_ -> error $ "IPow applied at wrong type"
go IBitwiseAnd = [cexp| $x & $y |]
go IBitwiseOr = [cexp| $x | $y |]
go IBitwiseXor = [cexp| $x ^ $y |]
go IShiftL = [cexp| $x << $y |]
go IShiftR = [cexp| $x >> $y |]
compileExp (IUnOp op e t) = go op
where
x = compileExp e
go IBitwiseNeg = [cexp| ~$x|]
go INot = [cexp| !$x|]
go IGetX = [cexp| $x.x|]
go IGetY = [cexp| $x.y|]
go IGetZ = [cexp| $x.z|]
go IGetW = [cexp| $x.w|]
compileExp (IFunCall name es t) = [cexp| $fc |]
where
es' = map compileExp es
fc = [cexp| $id:(name)($args:(es')) |]
compileExp (ICast e t) = [cexp| ($ty:(compileType t)) $e' |]
where
e' = compileExp e
compileExp any = error $ show any
compileType :: T.Type -> C.Type
compileType (Int8) = [cty| typename int8_t |]
compileType (Int16) = [cty| typename int16_t |]
compileType (Int32) = [cty| typename int32_t |]
compileType (Int64) = [cty| typename int64_t |]
compileType (Word8) = [cty| typename uint8_t |]
compileType (Word16) = [cty| typename uint16_t |]
compileType (Word32) = [cty| typename uint32_t |]
compileType (Word64) = [cty| typename uint64_t |]
compileType (Float) = [cty| float |]
compileType (Double) = [cty| double |]
compileType (Vec2 Float) = [cty| float4|]
compileType (Vec3 Float) = [cty| float3|]
compileType (Vec4 Float) = [cty| float2|]
compileType (Vec2 Double) = [cty| double2|]
-- How does this interplay with my use of uint8_t etc. Here it is char!
compileType (Vec2 Int8) = [cty| char2|]
compileType (Vec3 Int8) = [cty| char3|]
compileType (Vec4 Int8) = [cty| char4|]
compileType (Vec2 Int16) = [cty| short2|]
compileType (Vec3 Int16) = [cty| short3|]
compileType (Vec4 Int16) = [cty| short4|]
compileType (Vec2 Int32) = [cty| int2|]
compileType (Vec3 Int32) = [cty| int3|]
compileType (Vec4 Int32) = [cty| int4|]
compileType (Vec2 Word8) = [cty| uchar2|]
compileType (Vec3 Word8) = [cty| uchar3|]
compileType (Vec4 Word8) = [cty| uchar4|]
compileType (Vec2 Word16) = [cty| ushort2|]
compileType (Vec3 Word16) = [cty| ushort3|]
compileType (Vec4 Word16) = [cty| ushort4|]
compileType (Vec2 Word32) = [cty| uint2|]
compileType (Vec3 Word32) = [cty| uint3|]
compileType (Vec4 Word32) = [cty| uint4|]
compileType (Shared t) = [cty| __shared__ $ty:(compileType t) |]
compileType (Pointer t) = [cty| $ty:(compileType t)* |]
compileType (Volatile t) = [cty| volatile $ty:(compileType t)|]
compileType t = error $ "compileType: Not implemented " ++ show t
---------------------------------------------------------------------------
-- Statement t to Stm
---------------------------------------------------------------------------
compileStm :: Config -> Statement t -> [C.Stm]
compileStm c (SAssign name [] e) =
[[cstm| $(compileExp name) = $(compileExp e);|]]
compileStm c (SAssign name [ix] e) =
[[cstm| $(compileExp name)[$(compileExp ix)] = $(compileExp e); |]]
compileStm c (SAtomicOp name ix atop) =
case atop of
AtInc -> [[cstm| atomicInc(&$(compileExp name)[$(compileExp ix)],0xFFFFFFFF); |]]
AtAdd e -> [[cstm| atomicAdd(&$(compileExp name)[$(compileExp ix)],$(compileExp e));|]]
AtSub e -> [[cstm| atomicSub(&$(compileExp name)[$(compileExp ix)],$(compileExp e));|]]
AtExch e -> [[cstm| atomicExch(&$(compileExp name)[$(compileExp ix)],$(compileExp e));|]]
compileStm c (SCond be im) = [[cstm| if ($(compileExp be)) { $stms:body } |]]
where
( compileIM p c i m )
compileStm c (SSeqFor loopVar n im) =
[[cstm| for (int $id:loopVar = 0; $id:loopVar < $(compileExp n); ++$id:loopVar)
{ $stms:body } |]]
-- end a sequential for loop with a sync (or begin).
-- Maybe only if the loop is on block level (that is across all threads)
-- __syncthreads();} |]]
where
body = compileIM c im
-- Just relay to specific compileFunction
compileStm c a@(SForAll lvl n im) = compileForAll c a
compileStm c a@(SDistrPar lvl n im) = compileDistr c a
compileStm c (SSeqWhile b im) =
[[cstm| while ($(compileExp b)) { $stms:body}|]]
where
body = compileIM c im
compileStm c SSynchronize = [[cstm| __syncthreads(); |]]
compileStm _ (SAllocate _ _ _) = []
compileStm _ (SDeclare name t) = []
compileStm _ a = error $ "compileStm: missing case "
---------------------------------------------------------------------------
DistrPar
---------------------------------------------------------------------------
compileDistr :: Config -> Statement t -> [C.Stm]
compileDistr c (SDistrPar Block n im) = codeQ ++ codeR
-- New here is BLOCK virtualisation
where
cim = compileIM c im -- ++ [[cstm| __syncthreads();|]]
numBlocks = [cexp| $id:("gridDim.x") |]
blocksQ = [cexp| $exp:(compileExp n) / $exp:numBlocks|]
blocksR = [cexp| $exp:(compileExp n) % $exp:numBlocks|]
codeQ = [[cstm| for (int b = 0; b < $exp:blocksQ; ++b) { $stms:bodyQ }|]]
bodyQ = [cstm| $id:("bid") = blockIdx.x * $exp:blocksQ + b;|] : cim ++
[[cstm| bid = blockIdx.x;|],
[cstm| __syncthreads();|]] -- yes no ?
codeR = [[cstm| bid = ($exp:numBlocks * $exp:blocksQ) + blockIdx.x;|],
[cstm| if (blockIdx.x < $exp:blocksR) { $stms:cim }|],
[cstm| bid = blockIdx.x;|],
[cstm| __syncthreads();|]] -- yes no ?
-- Can I be absolutely sure that 'n' here is statically known ?
I must look over the functions that can potentially create this IM .
-- Can make a separate case for unknown 'n' but generate worse code.
-- (That is true for all levels)
compileDistr c (SDistrPar Warp (IWord32 n) im) = codeQ ++ codeR
-- Here the 'im' should be distributed over 'n'warps.
-- 'im' uses a warpID variable to identify what warp it is.
-- 'n' may be higher than the actual number of warps we have!
-- So GPU warp virtualisation is needed.
where
cim = compileIM c im
nWarps = fromIntegral $ configThreadsPerBlock c `div` 32
numWarps = [cexp| $int:nWarps|]
(wq, wr) = (n `div` nWarps, n `mod` nWarps)
warpsQ = [cexp| $int:wq|]
warpsR = [cexp| $int:wr|]
codeQ = [[cstm| for (int w = 0; w < $exp:warpsQ; ++w) { $stms:bodyQ } |]]
bodyQ = [cstm| warpID = (threadIdx.x / 32) * $exp:warpsQ + w;|] : cim ++
--[cstm| warpID = w * $exp:warpsQ + (threadIdx.x / 32);|] : cim ++
[[cstm| warpID = threadIdx.x / 32;|]]
codeR = case (n `mod` nWarps) of
0 -> []
n -> [[cstm| warpID = ($exp:numWarps * $exp:warpsQ)+ (threadIdx.x / 32);|],
[cstm| if (threadIdx.x / 32 < $exp:warpsR) { $stms:cim } |],
[cstm| warpID = threadIdx.x / 32; |],
[cstm| __syncthreads();|]]
---------------------------------------------------------------------------
ForAll is compiled differently for different platforms
---------------------------------------------------------------------------
compileForAll :: Config -> Statement t -> [C.Stm]
compileForAll c (SForAll Warp (IWord32 n) im) = codeQ ++ codeR
where
nt = 32
q = n `div` nt
r = n `mod` nt
cim = compileIM c im
codeQ =
case q of
0 -> []
1 -> cim
n -> [[cstm| for ( int vw = 0; vw < $int:q; ++vw) { $stms:body } |],
[cstm| $id:("warpIx") = threadIdx.x % 32; |]]
-- [cstm| __syncthreads();|]]
where
body = [cstm|$id:("warpIx") = vw*$int:nt + (threadIdx.x % 32); |] : cim
body = [ cstm|$id:("warpIx " ) = ( threadIdx.x % 32 ) * q + vw ; | ] : cim
q32 = q * 32 -- break out because: parseExp: cannot parse 'q*32'
codeR =
case r of
0 -> []
n -> [[cstm| if ((threadIdx.x % 32) < $int:r) {
$id:("warpIx") = $int:(q32) + (threadIdx.x % 32);
$stms:cim } |],
-- [cstm| __syncthreads();|],
[cstm| $id:("warpIx") = threadIdx.x % 32; |]]
compileForAll c (SForAll Block (IWord32 n) im) = goQ ++ goR
where
cim = compileIM c im -- ++ [[cstm| __syncthreads();|]]
nt = configThreadsPerBlock c
q = n `quot` nt
r = n `rem` nt
-- q is the number full "passes" needed to cover the iteration
-- space given we have nt threads.
goQ =
case q of
0 -> []
[ cstm|$id : loopVar = threadIdx.x ;
--do
-- stm <- updateTid [cexp| threadIdx.x |]
return $ [ cstm| $ id : loopVar = threadIdx.x ; | ] : cim
n -> [[cstm| for ( int i = 0; i < $int:q; ++i) { $stms:body } |],
-- __syncthreads(); } |],
[cstm| $id:("tid") = threadIdx.x; |]]
-- [cstm| __syncthreads();|]]
where
body = [cstm|$id:("tid") = i*$int:nt + threadIdx.x; |] : cim
-- r is the number of elements left.
-- This generates code for when fewer threads are
-- needed than available. (some threads shut down due to the conditional).
break out because : parseExp : can not parse ' '
goR =
case (r,q) of
(0,_) -> []
--(n,0) -> [[cstm| if (threadIdx.x < $int:n) {
-- $stms:cim } |]]
(n,m) -> [[cstm| if (threadIdx.x < $int:n) {
$id:("tid") = $int:(qnt) + threadIdx.x;
$stms:cim } |],
[cstm| $id:("tid") = threadIdx.x; |]]
compileForAll c (SForAll Grid n im) = error "compileForAll: Grid" -- cim
The grid case is special . May need more thought
--
-- The problem with this case is that
-- I need to come up with a blocksize (but without any guidance)
-- from the programmer.
-- Though! There is no way the programmer could provide any
-- such info ...
-- where
-- cim = compileIM c im
compileForAll PlatformC c ( SForAll lvl ( IWord32 n ) i m ) = go
-- where
body = compileIM PlatformC c i m
go = [ [ cstm| for ( int i = 0 ; i < $ int : n ; + + i ) { $ stms : body } | ] ]
---------------------------------------------------------------------------
-- CompileIM to list of Stm
---------------------------------------------------------------------------
compileIM :: Config -> IMList a -> [C.Stm]
compileIM conf im = concatMap ((compileStm conf) . fst) im
---------------------------------------------------------------------------
Generate entire
---------------------------------------------------------------------------
type Parameters = [(String,T.Type)]
compile :: Config -> String -> (Parameters,IMList a) -> C.Definition
compile config kname (params,im)
= go
where
stms = compileIM config im
ps = compileParams params
go = [cedecl| extern "C" __global__ void $id:kname($params:ps) {$items:cudabody} |]
cudabody = (if (configSharedMem config > 0)
-- then [BlockDecl [cdecl| extern volatile __shared__ typename uint8_t sbase[]; |]]
then [C.BlockDecl [cdecl| __shared__ typename uint8_t sbase[$uint:(configSharedMem config)] ; |]]
else []) ++
--[BlockDecl [cdecl| typename uint32_t tid = threadIdx.x; |]] ++
--[BlockDecl [cdecl| typename uint32_t warpID = threadIdx.x / 32; |],
BlockDecl [ cdecl| typename uint32_t warpIx = threadIdx.x % 32 ; | ] ] + +
[ BlockDecl [ cdecl| typename uint32_t bid = blockIdx.x ; | ] ] + +
(if (usesGid im)
then [C.BlockDecl [cdecl| typename uint32_t gid = blockIdx.x * blockDim.x + threadIdx.x; |]]
else []) ++
(if (usesBid im)
then [C.BlockDecl [cdecl| typename uint32_t bid = blockIdx.x; |]]
else []) ++
(if (usesTid im)
then [C.BlockDecl [cdecl| typename uint32_t tid = threadIdx.x; |]]
else []) ++
(if (usesWarps im)
then [C.BlockDecl [cdecl| typename uint32_t warpID = threadIdx.x / 32; |],
C.BlockDecl [cdecl| typename uint32_t warpIx = threadIdx.x % 32; |]]
else []) ++
-- All variables used will be unique and can be declared
-- at the top level
concatMap declares im ++
-- Not sure if I am using language.C correctly.
Maybe compileSTM should create BlockStms ?
TODO : look how does it .
map C.BlockStm stms
cbody = -- add memory allocation
map C.BlockStm stms
-- Declare variables.
declares :: (Statement t,t) -> [C.BlockItem]
declares (SDeclare name t,_) = [C.BlockDecl [cdecl| $ty:(compileType t) $id:name;|]]
declares (SCond _ im,_) = concatMap declares im
declares (SSeqWhile _ im,_) = concatMap declares im
declares (SForAll _ _ im,_) = concatMap declares im
declares (SDistrPar _ _ im,_) = concatMap declares im
declares (SSeqFor _ _ im,_) = concatMap declares im
declares _ = []
---------------------------------------------------------------------------
-- Parameter lists for functions (kernel head)
---------------------------------------------------------------------------
compileParams :: Parameters -> [C.Param]
compileParams = map go
where
go (name,t) = [cparam| $ty:(compileType t) $id:name |]
---------------------------------------------------------------------------
-- Compile with shared memory arrays declared at top
---------------------------------------------------------------------------
-- CODE DUPLICATION FOR NOW
compileDeclsTop :: Config -> [(String,((Word32,Word32),T.Type))] -> String -> (Parameters,IMList a) -> C.Definition
compileDeclsTop config toplevelarrs kname (params,im)
= go
where
stms = compileIM config im
ps = compileParams params
go = [cedecl| extern "C" __global__ void $id:kname($params:ps) {$items:cudabody} |]
cudabody = (if (configSharedMem config > 0)
-- then [BlockDecl [cdecl| extern volatile __shared__ typename uint8_t sbase[]; |]]
then [C.BlockDecl [cdecl| __shared__ typename uint8_t sbase[$uint:(configSharedMem config)]; |]]
else []) ++
--[BlockDecl [cdecl| typename uint32_t tid = threadIdx.x; |]] ++
--[BlockDecl [cdecl| typename uint32_t warpID = threadIdx.x / 32; |],
BlockDecl [ cdecl| typename uint32_t warpIx = threadIdx.x % 32 ; | ] ] + +
[ BlockDecl [ cdecl| typename uint32_t bid = blockIdx.x ; | ] ] + +
(if (usesGid im)
then [C.BlockDecl [cdecl| typename uint32_t gid = blockIdx.x * blockDim.x + threadIdx.x; |]]
else []) ++
(if (usesBid im)
then [C.BlockDecl [cdecl| typename uint32_t bid = blockIdx.x; |]]
else []) ++
(if (usesTid im)
then [C.BlockDecl [cdecl| typename uint32_t tid = threadIdx.x; |]]
else []) ++
(if (usesWarps im)
then [C.BlockDecl [cdecl| typename uint32_t warpID = threadIdx.x / 32; |],
C.BlockDecl [cdecl| typename uint32_t warpIx = threadIdx.x % 32; |]]
else []) ++
-- declare all arrays used
concatMap declareArr toplevelarrs ++
-- All variables used will be unique and can be declared
-- at the top level
concatMap declares im ++
-- Not sure if I am using language.C correctly.
Maybe compileSTM should create BlockStms ?
TODO : look how does it .
map C.BlockStm stms
cbody = -- add memory allocation
map C.BlockStm stms
declareArr :: (String, ((Word32,Word32),T.Type)) -> [C.BlockItem]
declareArr (arr,((_,addr),t)) =
[C.BlockDecl [cdecl| $ty:(compileType t) $id:arr = ($ty:(compileType t))(sbase + $int:addr);|]]
| null |
https://raw.githubusercontent.com/svenssonjoel/Obsidian/90886a0ef513cdaa58bb63765c230fa193d1ef10/Obsidian/CodeGen/CompileIM.hs
|
haskell
|
# LANGUAGE PackageImports #
-------------------------------------------------------------------------
Config
-------------------------------------------------------------------------
-------------------------------------------------------------------------
compileExp (maybe a bad name)
-------------------------------------------------------------------------
TODO: Fix all this!
compileExp (IBlockIdx X) = [cexp| $id:("bid")|] -- [cexp| $id:("blockIdx.x") |]
compileExp (IBlockIdx Y) = [cexp| $id:("blockIdx.y") |]
compileExp (IBlockDim Y) = [cexp| $id:("blockDim.y") |]
compileExp (IBlockDim Z) = [cexp| $id:("blockDim.z") |]
compileExp (IGridDim X) = [cexp| $id:("GridDim.x") |]
compileExp (IGridDim Y) = [cexp| $id:("GridDim.y") |]
compileExp (IGridDim Z) = [cexp| $id:("GridDim.z") |]
Implementing these may be a bit awkward
given there are no vector literals in cuda.
How does this interplay with my use of uint8_t etc. Here it is char!
-------------------------------------------------------------------------
Statement t to Stm
-------------------------------------------------------------------------
end a sequential for loop with a sync (or begin).
Maybe only if the loop is on block level (that is across all threads)
__syncthreads();} |]]
Just relay to specific compileFunction
-------------------------------------------------------------------------
-------------------------------------------------------------------------
New here is BLOCK virtualisation
++ [[cstm| __syncthreads();|]]
yes no ?
yes no ?
Can I be absolutely sure that 'n' here is statically known ?
Can make a separate case for unknown 'n' but generate worse code.
(That is true for all levels)
Here the 'im' should be distributed over 'n'warps.
'im' uses a warpID variable to identify what warp it is.
'n' may be higher than the actual number of warps we have!
So GPU warp virtualisation is needed.
[cstm| warpID = w * $exp:warpsQ + (threadIdx.x / 32);|] : cim ++
-------------------------------------------------------------------------
-------------------------------------------------------------------------
[cstm| __syncthreads();|]]
break out because: parseExp: cannot parse 'q*32'
[cstm| __syncthreads();|],
++ [[cstm| __syncthreads();|]]
q is the number full "passes" needed to cover the iteration
space given we have nt threads.
do
stm <- updateTid [cexp| threadIdx.x |]
__syncthreads(); } |],
[cstm| __syncthreads();|]]
r is the number of elements left.
This generates code for when fewer threads are
needed than available. (some threads shut down due to the conditional).
(n,0) -> [[cstm| if (threadIdx.x < $int:n) {
$stms:cim } |]]
cim
The problem with this case is that
I need to come up with a blocksize (but without any guidance)
from the programmer.
Though! There is no way the programmer could provide any
such info ...
where
cim = compileIM c im
where
-------------------------------------------------------------------------
CompileIM to list of Stm
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
then [BlockDecl [cdecl| extern volatile __shared__ typename uint8_t sbase[]; |]]
[BlockDecl [cdecl| typename uint32_t tid = threadIdx.x; |]] ++
[BlockDecl [cdecl| typename uint32_t warpID = threadIdx.x / 32; |],
All variables used will be unique and can be declared
at the top level
Not sure if I am using language.C correctly.
add memory allocation
Declare variables.
-------------------------------------------------------------------------
Parameter lists for functions (kernel head)
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Compile with shared memory arrays declared at top
-------------------------------------------------------------------------
CODE DUPLICATION FOR NOW
then [BlockDecl [cdecl| extern volatile __shared__ typename uint8_t sbase[]; |]]
[BlockDecl [cdecl| typename uint32_t tid = threadIdx.x; |]] ++
[BlockDecl [cdecl| typename uint32_t warpID = threadIdx.x / 32; |],
declare all arrays used
All variables used will be unique and can be declared
at the top level
Not sure if I am using language.C correctly.
add memory allocation
|
# LANGUAGE QuasiQuotes #
# LANGUAGE GeneralizedNewtypeDeriving #
Joel Svensson 2013 .. 2017
Joel Svensson 2013..2017
-}
module Obsidian.CodeGen.CompileIM where
import Language.C.Quote.CUDA hiding (Block)
import qualified Language.C.Quote.OpenCL as CL
import qualified "language-c-quote" Language.C.Syntax as C
import Obsidian.Exp (IExp(..),IBinOp(..),IUnOp(..))
import Obsidian.Types as T
import Obsidian.DimSpec
import Obsidian.CodeGen.Program
import Data.Word
Notes :
2017 - 04 - 22 : Generate only CUDA
* TODO : Make sure tid always has correct Value
2017-04-22: Generate only CUDA
* TODO: Make sure tid always has correct Value
-}
data Config = Config { configThreadsPerBlock :: Word32,
configSharedMem :: Word32}
compileExp :: IExp -> C.Exp
compileExp (IVar name t) = [cexp| $id:name |]
compileExp ( IBlockIdx Z ) = [ cexp| $ id:("blockIdx.z " ) | ]
compileExp ( IThreadIdx X ) = [ cexp| $ id:("threadIdx.x " ) | ]
compileExp ( IThreadIdx Y ) = [ cexp| $ id:("threadIdx.y " ) | ]
compileExp ( IThreadIdx Z ) = [ cexp| $ id:("threadIdx.z " ) | ]
compileExp ( IBlockDim X ) = [ cexp| $ id:("blockDim.x " ) | ]
compileExp (IBool True) = [cexp|1|]
compileExp (IBool False) = [cexp|0|]
compileExp (IInt8 n) = [cexp| $int:(toInteger n) |]
compileExp (IInt16 n) = [cexp| $int:(toInteger n) |]
compileExp (IInt32 n) = [cexp| $int:(toInteger n) |]
compileExp (IInt64 n) = [cexp| $lint:(toInteger n) |]
compileExp (IWord8 n) = [cexp| $uint:(toInteger n) |]
compileExp (IWord16 n) = [cexp| $uint:(toInteger n) |]
compileExp (IWord32 n) = [cexp| $uint:(toInteger n) |]
compileExp (IWord64 n) = [cexp| $ulint:(toInteger n) |]
compileExp (IFloat n) = [cexp| $float:(n) |]
compileExp (IDouble n) = [cexp| $double:(n) |]
compileExp (IFloat2 n m) = error "IFloat2 unhandled"
compileExp (IFloat3 n m l) = error "IFloat3 unhandled"
compileExp (IFloat4 n m l k) = error "IFloat4 unhandled"
compileExp (IDouble2 n m) = error "IDouble2 unhandled"
compileExp (IInt8_2 n m) = error "FIXME"
compileExp (IInt8_3 n m k) = error "FIXME"
compileExp (IInt8_4 n m k l) = error "FIXME"
compileExp (IInt16_2 n m ) = error "FIXME"
compileExp (IInt16_3 n m k) = error "FIXME"
compileExp (IInt16_4 n m k l) = error "FIXME"
compileExp (IInt32_2 n m) = error "FIXME"
compileExp (IInt32_3 n m k) = error "FIXME"
compileExp (IInt32_4 n m k l) = error "FIXME"
compileExp (IInt64_2 n m) = error "FIXME"
compileExp (IInt64_3 n m k) = error "FIXME"
compileExp (IInt64_4 n m k l) = error "FIXME"
compileExp (IWord8_2 n m) = error "FIXME"
compileExp (IWord8_3 n m k) = error "FIXME"
compileExp (IWord8_4 n m k l) = error "FIXME"
compileExp (IWord16_2 n m ) = error "FIXME"
compileExp (IWord16_3 n m k) = error "FIXME"
compileExp (IWord16_4 n m k l) = error "FIXME"
compileExp (IWord32_2 n m) = error "FIXME"
compileExp (IWord32_3 n m k) = error "FIXME"
compileExp (IWord32_4 n m k l) = error "FIXME"
compileExp (IWord64_2 n m) = error "FIXME"
compileExp (IWord64_3 n m k) = error "FIXME"
compileExp (IWord64_4 n m k l) = error "FIXME"
compileExp (IIndex (i1,[e]) t) = [cexp| $(compileExp i1)[$(compileExp e)] |]
compileExp a@(IIndex (_,_) _) = error $ "compileExp: Malformed index expression " ++ show a
compileExp (ICond e1 e2 e3 t) = [cexp| $(compileExp e1) ? $(compileExp e2) : $(compileExp e3) |]
compileExp (IBinOp op e1 e2 t) = go op
where
x = compileExp e1
y = compileExp e2
go IAdd = [cexp| $x + $y |]
go ISub = [cexp| $x - $y |]
go IMul = [cexp| $x * $y |]
go IDiv = [cexp| $x / $y |]
go IFDiv = [cexp| $x / $y |]
go IMod = [cexp| $x % $y |]
go IEq = [cexp| $x == $y |]
go INotEq = [cexp| $x != $y |]
go ILt = [cexp| $x < $y |]
go IGt = [cexp| $x > $y |]
go IGEq = [cexp| $x >= $y |]
go ILEq = [cexp| $x <= $y |]
go IAnd = [cexp| $x && $y |]
go IOr = [cexp| $x || $y |]
go IPow = case t of
Float -> [cexp|powf($x,$y) |]
Double -> [cexp|pow($x,$y) |]
_ -> error $ "IPow applied at wrong type"
go IBitwiseAnd = [cexp| $x & $y |]
go IBitwiseOr = [cexp| $x | $y |]
go IBitwiseXor = [cexp| $x ^ $y |]
go IShiftL = [cexp| $x << $y |]
go IShiftR = [cexp| $x >> $y |]
compileExp (IUnOp op e t) = go op
where
x = compileExp e
go IBitwiseNeg = [cexp| ~$x|]
go INot = [cexp| !$x|]
go IGetX = [cexp| $x.x|]
go IGetY = [cexp| $x.y|]
go IGetZ = [cexp| $x.z|]
go IGetW = [cexp| $x.w|]
compileExp (IFunCall name es t) = [cexp| $fc |]
where
es' = map compileExp es
fc = [cexp| $id:(name)($args:(es')) |]
compileExp (ICast e t) = [cexp| ($ty:(compileType t)) $e' |]
where
e' = compileExp e
compileExp any = error $ show any
compileType :: T.Type -> C.Type
compileType (Int8) = [cty| typename int8_t |]
compileType (Int16) = [cty| typename int16_t |]
compileType (Int32) = [cty| typename int32_t |]
compileType (Int64) = [cty| typename int64_t |]
compileType (Word8) = [cty| typename uint8_t |]
compileType (Word16) = [cty| typename uint16_t |]
compileType (Word32) = [cty| typename uint32_t |]
compileType (Word64) = [cty| typename uint64_t |]
compileType (Float) = [cty| float |]
compileType (Double) = [cty| double |]
compileType (Vec2 Float) = [cty| float4|]
compileType (Vec3 Float) = [cty| float3|]
compileType (Vec4 Float) = [cty| float2|]
compileType (Vec2 Double) = [cty| double2|]
compileType (Vec2 Int8) = [cty| char2|]
compileType (Vec3 Int8) = [cty| char3|]
compileType (Vec4 Int8) = [cty| char4|]
compileType (Vec2 Int16) = [cty| short2|]
compileType (Vec3 Int16) = [cty| short3|]
compileType (Vec4 Int16) = [cty| short4|]
compileType (Vec2 Int32) = [cty| int2|]
compileType (Vec3 Int32) = [cty| int3|]
compileType (Vec4 Int32) = [cty| int4|]
compileType (Vec2 Word8) = [cty| uchar2|]
compileType (Vec3 Word8) = [cty| uchar3|]
compileType (Vec4 Word8) = [cty| uchar4|]
compileType (Vec2 Word16) = [cty| ushort2|]
compileType (Vec3 Word16) = [cty| ushort3|]
compileType (Vec4 Word16) = [cty| ushort4|]
compileType (Vec2 Word32) = [cty| uint2|]
compileType (Vec3 Word32) = [cty| uint3|]
compileType (Vec4 Word32) = [cty| uint4|]
compileType (Shared t) = [cty| __shared__ $ty:(compileType t) |]
compileType (Pointer t) = [cty| $ty:(compileType t)* |]
compileType (Volatile t) = [cty| volatile $ty:(compileType t)|]
compileType t = error $ "compileType: Not implemented " ++ show t
compileStm :: Config -> Statement t -> [C.Stm]
compileStm c (SAssign name [] e) =
[[cstm| $(compileExp name) = $(compileExp e);|]]
compileStm c (SAssign name [ix] e) =
[[cstm| $(compileExp name)[$(compileExp ix)] = $(compileExp e); |]]
compileStm c (SAtomicOp name ix atop) =
case atop of
AtInc -> [[cstm| atomicInc(&$(compileExp name)[$(compileExp ix)],0xFFFFFFFF); |]]
AtAdd e -> [[cstm| atomicAdd(&$(compileExp name)[$(compileExp ix)],$(compileExp e));|]]
AtSub e -> [[cstm| atomicSub(&$(compileExp name)[$(compileExp ix)],$(compileExp e));|]]
AtExch e -> [[cstm| atomicExch(&$(compileExp name)[$(compileExp ix)],$(compileExp e));|]]
compileStm c (SCond be im) = [[cstm| if ($(compileExp be)) { $stms:body } |]]
where
( compileIM p c i m )
compileStm c (SSeqFor loopVar n im) =
[[cstm| for (int $id:loopVar = 0; $id:loopVar < $(compileExp n); ++$id:loopVar)
{ $stms:body } |]]
where
body = compileIM c im
compileStm c a@(SForAll lvl n im) = compileForAll c a
compileStm c a@(SDistrPar lvl n im) = compileDistr c a
compileStm c (SSeqWhile b im) =
[[cstm| while ($(compileExp b)) { $stms:body}|]]
where
body = compileIM c im
compileStm c SSynchronize = [[cstm| __syncthreads(); |]]
compileStm _ (SAllocate _ _ _) = []
compileStm _ (SDeclare name t) = []
compileStm _ a = error $ "compileStm: missing case "
DistrPar
compileDistr :: Config -> Statement t -> [C.Stm]
compileDistr c (SDistrPar Block n im) = codeQ ++ codeR
where
numBlocks = [cexp| $id:("gridDim.x") |]
blocksQ = [cexp| $exp:(compileExp n) / $exp:numBlocks|]
blocksR = [cexp| $exp:(compileExp n) % $exp:numBlocks|]
codeQ = [[cstm| for (int b = 0; b < $exp:blocksQ; ++b) { $stms:bodyQ }|]]
bodyQ = [cstm| $id:("bid") = blockIdx.x * $exp:blocksQ + b;|] : cim ++
[[cstm| bid = blockIdx.x;|],
codeR = [[cstm| bid = ($exp:numBlocks * $exp:blocksQ) + blockIdx.x;|],
[cstm| if (blockIdx.x < $exp:blocksR) { $stms:cim }|],
[cstm| bid = blockIdx.x;|],
I must look over the functions that can potentially create this IM .
compileDistr c (SDistrPar Warp (IWord32 n) im) = codeQ ++ codeR
where
cim = compileIM c im
nWarps = fromIntegral $ configThreadsPerBlock c `div` 32
numWarps = [cexp| $int:nWarps|]
(wq, wr) = (n `div` nWarps, n `mod` nWarps)
warpsQ = [cexp| $int:wq|]
warpsR = [cexp| $int:wr|]
codeQ = [[cstm| for (int w = 0; w < $exp:warpsQ; ++w) { $stms:bodyQ } |]]
bodyQ = [cstm| warpID = (threadIdx.x / 32) * $exp:warpsQ + w;|] : cim ++
[[cstm| warpID = threadIdx.x / 32;|]]
codeR = case (n `mod` nWarps) of
0 -> []
n -> [[cstm| warpID = ($exp:numWarps * $exp:warpsQ)+ (threadIdx.x / 32);|],
[cstm| if (threadIdx.x / 32 < $exp:warpsR) { $stms:cim } |],
[cstm| warpID = threadIdx.x / 32; |],
[cstm| __syncthreads();|]]
ForAll is compiled differently for different platforms
compileForAll :: Config -> Statement t -> [C.Stm]
compileForAll c (SForAll Warp (IWord32 n) im) = codeQ ++ codeR
where
nt = 32
q = n `div` nt
r = n `mod` nt
cim = compileIM c im
codeQ =
case q of
0 -> []
1 -> cim
n -> [[cstm| for ( int vw = 0; vw < $int:q; ++vw) { $stms:body } |],
[cstm| $id:("warpIx") = threadIdx.x % 32; |]]
where
body = [cstm|$id:("warpIx") = vw*$int:nt + (threadIdx.x % 32); |] : cim
body = [ cstm|$id:("warpIx " ) = ( threadIdx.x % 32 ) * q + vw ; | ] : cim
codeR =
case r of
0 -> []
n -> [[cstm| if ((threadIdx.x % 32) < $int:r) {
$id:("warpIx") = $int:(q32) + (threadIdx.x % 32);
$stms:cim } |],
[cstm| $id:("warpIx") = threadIdx.x % 32; |]]
compileForAll c (SForAll Block (IWord32 n) im) = goQ ++ goR
where
nt = configThreadsPerBlock c
q = n `quot` nt
r = n `rem` nt
goQ =
case q of
0 -> []
[ cstm|$id : loopVar = threadIdx.x ;
return $ [ cstm| $ id : loopVar = threadIdx.x ; | ] : cim
n -> [[cstm| for ( int i = 0; i < $int:q; ++i) { $stms:body } |],
[cstm| $id:("tid") = threadIdx.x; |]]
where
body = [cstm|$id:("tid") = i*$int:nt + threadIdx.x; |] : cim
break out because : parseExp : can not parse ' '
goR =
case (r,q) of
(0,_) -> []
(n,m) -> [[cstm| if (threadIdx.x < $int:n) {
$id:("tid") = $int:(qnt) + threadIdx.x;
$stms:cim } |],
[cstm| $id:("tid") = threadIdx.x; |]]
The grid case is special . May need more thought
compileForAll PlatformC c ( SForAll lvl ( IWord32 n ) i m ) = go
body = compileIM PlatformC c i m
go = [ [ cstm| for ( int i = 0 ; i < $ int : n ; + + i ) { $ stms : body } | ] ]
compileIM :: Config -> IMList a -> [C.Stm]
compileIM conf im = concatMap ((compileStm conf) . fst) im
Generate entire
type Parameters = [(String,T.Type)]
compile :: Config -> String -> (Parameters,IMList a) -> C.Definition
compile config kname (params,im)
= go
where
stms = compileIM config im
ps = compileParams params
go = [cedecl| extern "C" __global__ void $id:kname($params:ps) {$items:cudabody} |]
cudabody = (if (configSharedMem config > 0)
then [C.BlockDecl [cdecl| __shared__ typename uint8_t sbase[$uint:(configSharedMem config)] ; |]]
else []) ++
BlockDecl [ cdecl| typename uint32_t warpIx = threadIdx.x % 32 ; | ] ] + +
[ BlockDecl [ cdecl| typename uint32_t bid = blockIdx.x ; | ] ] + +
(if (usesGid im)
then [C.BlockDecl [cdecl| typename uint32_t gid = blockIdx.x * blockDim.x + threadIdx.x; |]]
else []) ++
(if (usesBid im)
then [C.BlockDecl [cdecl| typename uint32_t bid = blockIdx.x; |]]
else []) ++
(if (usesTid im)
then [C.BlockDecl [cdecl| typename uint32_t tid = threadIdx.x; |]]
else []) ++
(if (usesWarps im)
then [C.BlockDecl [cdecl| typename uint32_t warpID = threadIdx.x / 32; |],
C.BlockDecl [cdecl| typename uint32_t warpIx = threadIdx.x % 32; |]]
else []) ++
concatMap declares im ++
Maybe compileSTM should create BlockStms ?
TODO : look how does it .
map C.BlockStm stms
map C.BlockStm stms
declares :: (Statement t,t) -> [C.BlockItem]
declares (SDeclare name t,_) = [C.BlockDecl [cdecl| $ty:(compileType t) $id:name;|]]
declares (SCond _ im,_) = concatMap declares im
declares (SSeqWhile _ im,_) = concatMap declares im
declares (SForAll _ _ im,_) = concatMap declares im
declares (SDistrPar _ _ im,_) = concatMap declares im
declares (SSeqFor _ _ im,_) = concatMap declares im
declares _ = []
compileParams :: Parameters -> [C.Param]
compileParams = map go
where
go (name,t) = [cparam| $ty:(compileType t) $id:name |]
compileDeclsTop :: Config -> [(String,((Word32,Word32),T.Type))] -> String -> (Parameters,IMList a) -> C.Definition
compileDeclsTop config toplevelarrs kname (params,im)
= go
where
stms = compileIM config im
ps = compileParams params
go = [cedecl| extern "C" __global__ void $id:kname($params:ps) {$items:cudabody} |]
cudabody = (if (configSharedMem config > 0)
then [C.BlockDecl [cdecl| __shared__ typename uint8_t sbase[$uint:(configSharedMem config)]; |]]
else []) ++
BlockDecl [ cdecl| typename uint32_t warpIx = threadIdx.x % 32 ; | ] ] + +
[ BlockDecl [ cdecl| typename uint32_t bid = blockIdx.x ; | ] ] + +
(if (usesGid im)
then [C.BlockDecl [cdecl| typename uint32_t gid = blockIdx.x * blockDim.x + threadIdx.x; |]]
else []) ++
(if (usesBid im)
then [C.BlockDecl [cdecl| typename uint32_t bid = blockIdx.x; |]]
else []) ++
(if (usesTid im)
then [C.BlockDecl [cdecl| typename uint32_t tid = threadIdx.x; |]]
else []) ++
(if (usesWarps im)
then [C.BlockDecl [cdecl| typename uint32_t warpID = threadIdx.x / 32; |],
C.BlockDecl [cdecl| typename uint32_t warpIx = threadIdx.x % 32; |]]
else []) ++
concatMap declareArr toplevelarrs ++
concatMap declares im ++
Maybe compileSTM should create BlockStms ?
TODO : look how does it .
map C.BlockStm stms
map C.BlockStm stms
declareArr :: (String, ((Word32,Word32),T.Type)) -> [C.BlockItem]
declareArr (arr,((_,addr),t)) =
[C.BlockDecl [cdecl| $ty:(compileType t) $id:arr = ($ty:(compileType t))(sbase + $int:addr);|]]
|
7a8f8d48c0d493d8e3e52db099d5c7864fc83aaeb2adfa7c825cd8eb24a80104
|
racket/typed-racket
|
predicate.rkt
|
#;
(exn-pred #rx"could not be converted")
#lang typed/racket/optional
(define-predicate p? (All (A) (Listof A)))
| null |
https://raw.githubusercontent.com/racket/typed-racket/1dde78d165472d67ae682b68622d2b7ee3e15e1e/typed-racket-test/fail/optional/predicate.rkt
|
racket
|
(exn-pred #rx"could not be converted")
#lang typed/racket/optional
(define-predicate p? (All (A) (Listof A)))
|
|
3dd046deddd8672312f418dc768735b1b9b6704e2ab86fd3ba3bd0ff36c2d022
|
TyOverby/mono
|
private_ssl.real.ml
|
open Core
open Async
open Async_ssl
let verify_certificate connection =
match Ssl.Connection.peer_certificate connection with
| None -> return false
| Some (Error _) -> return false
| Some (Ok _) -> return true
let teardown_connection r w =
Writer.close ~force_close:(Clock.after (sec 30.)) w >>= fun () ->
Reader.close r
One needs to be careful around Async Readers and Writers that share the same underyling
file descriptor , which is something that happens when they 're used for sockets .
Closing the Reader before the Writer will cause the Writer to throw and complain about
its underlying file descriptor being closed . This is why instead of using Reader.pipe
directly below , we write out an equivalent version which will first close the Writer
before closing the Reader once the input pipe is fully consumed .
Additionally , [ Writer.pipe ] will not close the writer if the pipe is closed , so in
order to avoid leaking file descriptors , we allow the pipe 30 seconds to flush before
closing the writer .
file descriptor, which is something that happens when they're used for sockets.
Closing the Reader before the Writer will cause the Writer to throw and complain about
its underlying file descriptor being closed. This is why instead of using Reader.pipe
directly below, we write out an equivalent version which will first close the Writer
before closing the Reader once the input pipe is fully consumed.
Additionally, [Writer.pipe] will not close the writer if the pipe is closed, so in
order to avoid leaking file descriptors, we allow the pipe 30 seconds to flush before
closing the writer. *)
let reader_writer_pipes r w =
let reader_pipe_r, reader_pipe_w = Pipe.create () in
let writer_pipe = Writer.pipe w in
upon (Reader.transfer r reader_pipe_w) (fun () ->
teardown_connection r w >>> fun () -> Pipe.close reader_pipe_w);
upon (Pipe.closed writer_pipe) (fun () ->
Deferred.choose
[
Deferred.choice (Clock.after (sec 30.)) (fun () -> ());
Deferred.choice (Pipe.downstream_flushed writer_pipe)
(fun (_ : Pipe.Flushed_result.t) -> ());
]
>>> fun () -> don't_wait_for (teardown_connection r w));
(reader_pipe_r, writer_pipe)
(* [Reader.of_pipe] will not close the pipe when the returned [Reader] is closed, so we
manually do that ourselves.
[Writer.of_pipe] will create a writer that will raise once the pipe is closed, so we
set [raise_when_consumer_leaves] to false. *)
let reader_writer_of_pipes app_rd app_wr =
Reader.of_pipe (Info.of_string "async_conduit_ssl_reader") app_rd
>>= fun app_reader ->
upon (Reader.close_finished app_reader) (fun () -> Pipe.close_read app_rd);
Writer.of_pipe (Info.of_string "async_conduit_ssl_writer") app_wr
>>| fun (app_writer, _) ->
Writer.set_raise_when_consumer_leaves app_writer false;
(app_reader, app_writer)
module V1 = struct
module Ssl = struct
module Config = struct
type t = {
version : Ssl.Version.t option;
name : string option;
ca_file : string option;
ca_path : string option;
session : (Ssl.Session.t[@sexp.opaque]) option;
verify : (Ssl.Connection.t -> bool Deferred.t) option;
}
[@@deriving sexp]
let verify_certificate = verify_certificate
let create ?version ?name ?ca_file ?ca_path ?session ?verify () =
{ version; name; ca_file; ca_path; session; verify }
end
let connect cfg r w =
let { Config.version; name; ca_file; ca_path; session; verify } = cfg in
let net_to_ssl, ssl_to_net = reader_writer_pipes r w in
let app_to_ssl, app_wr = Pipe.create () in
let app_rd, ssl_to_app = Pipe.create () in
let verify_connection =
match verify with None -> Fn.const (return true) | Some f -> f
in
Ssl.client ?version ?name ?ca_file ?ca_path ?session ~app_to_ssl
~ssl_to_app ~net_to_ssl ~ssl_to_net ()
>>= function
| Error error -> teardown_connection r w >>= fun () -> Error.raise error
| Ok conn -> (
verify_connection conn >>= function
| false ->
teardown_connection r w >>= fun () ->
failwith "Connection verification failed."
| true ->
reader_writer_of_pipes app_rd app_wr
>>| fun (app_reader, app_writer) -> (app_reader, app_writer))
let listen ?(version = Ssl.Version.Tlsv1_2) ?ca_file ?ca_path ~crt_file
~key_file r w =
let net_to_ssl, ssl_to_net = reader_writer_pipes r w in
let app_to_ssl, app_wr = Pipe.create () in
let app_rd, ssl_to_app = Pipe.create () in
Ssl.server ?ca_file ?ca_path ~version ~crt_file ~key_file ~app_to_ssl
~ssl_to_app ~net_to_ssl ~ssl_to_net ()
>>= function
| Error error -> teardown_connection r w >>= fun () -> Error.raise error
| Ok _ ->
reader_writer_of_pipes app_rd app_wr
>>| fun (app_reader, app_writer) -> (app_reader, app_writer)
type session = (Ssl.Session.t[@sexp.opaque]) [@@deriving sexp]
type version = Ssl.Version.t [@@deriving sexp]
type connection = (Ssl.Connection.t[@sexp.opaque]) [@@deriving sexp]
end
end
module V2 = struct
module Ssl = struct
type allowed_ciphers = [ `Only of string list | `Openssl_default | `Secure ]
[@@deriving sexp]
module Config = struct
type t = {
version : Ssl.Version.t option;
options : Ssl.Opt.t list option;
name : string option;
hostname : string option;
allowed_ciphers : allowed_ciphers option;
ca_file : string option;
ca_path : string option;
crt_file : string option;
key_file : string option;
session : (Ssl.Session.t[@sexp.opaque]) option;
verify_modes : (Verify_mode.t[@sexp.opaque]) list option;
verify : (Ssl.Connection.t -> bool Deferred.t) option;
}
[@@deriving sexp_of]
let verify_certificate = verify_certificate
let create ?version ?options ?name ?hostname ?allowed_ciphers ?ca_file
?ca_path ?crt_file ?key_file ?session ?verify_modes ?verify () =
{
version;
options;
name;
hostname;
allowed_ciphers;
ca_file;
ca_path;
crt_file;
key_file;
session;
verify_modes;
verify;
}
end
let connect ?(cfg = Config.create ()) r w =
let {
Config.version;
options;
name;
hostname;
allowed_ciphers;
ca_file;
ca_path;
crt_file;
key_file;
session;
verify_modes;
verify;
} =
cfg
in
let net_to_ssl, ssl_to_net = reader_writer_pipes r w in
let app_to_ssl, app_wr = Pipe.create () in
let app_rd, ssl_to_app = Pipe.create () in
let verify_connection =
match verify with None -> Fn.const (return true) | Some f -> f
in
Ssl.client ?version ?options ?name ?hostname ?allowed_ciphers ?ca_file
?ca_path ?crt_file ?key_file ?session ?verify_modes ~app_to_ssl
~ssl_to_app ~net_to_ssl ~ssl_to_net ()
>>= function
| Error error -> teardown_connection r w >>= fun () -> Error.raise error
| Ok conn -> (
verify_connection conn >>= function
| false ->
teardown_connection r w >>= fun () ->
failwith "Connection verification failed."
| true ->
reader_writer_of_pipes app_rd app_wr
>>| fun (app_reader, app_writer) -> (app_reader, app_writer))
let listen
{
Config.version;
options;
name;
allowed_ciphers;
ca_file;
ca_path;
crt_file;
key_file;
verify_modes;
_;
} r w =
let crt_file, key_file =
match (crt_file, key_file) with
| Some crt_file, Some key_file -> (crt_file, key_file)
| _ ->
invalid_arg
"Conduit_async_ssl.ssl_listen: crt_file and key_file must be \
specified in cfg."
in
let net_to_ssl, ssl_to_net = reader_writer_pipes r w in
let app_to_ssl, app_wr = Pipe.create () in
let app_rd, ssl_to_app = Pipe.create () in
Ssl.server ?version ?options ?name ?allowed_ciphers ?ca_file ?ca_path
~crt_file ~key_file ?verify_modes ~app_to_ssl ~ssl_to_app ~net_to_ssl
~ssl_to_net ()
>>= function
| Error error -> teardown_connection r w >>= fun () -> Error.raise error
| Ok _ ->
reader_writer_of_pipes app_rd app_wr
>>| fun (app_reader, app_writer) -> (app_reader, app_writer)
type verify_mode = Ssl.Verify_mode.t [@@deriving sexp_of]
type session = (Ssl.Session.t[@sexp.opaque]) [@@deriving sexp_of]
type version = Ssl.Version.t [@@deriving sexp]
type connection = Ssl.Connection.t [@@deriving sexp_of]
type opt = Ssl.Opt.t [@@deriving sexp]
end
end
| null |
https://raw.githubusercontent.com/TyOverby/mono/8d6b3484d5db63f2f5472c7367986ea30290764d/vendor/mirage-ocaml-conduit/src/conduit-async/private_ssl.real.ml
|
ocaml
|
[Reader.of_pipe] will not close the pipe when the returned [Reader] is closed, so we
manually do that ourselves.
[Writer.of_pipe] will create a writer that will raise once the pipe is closed, so we
set [raise_when_consumer_leaves] to false.
|
open Core
open Async
open Async_ssl
let verify_certificate connection =
match Ssl.Connection.peer_certificate connection with
| None -> return false
| Some (Error _) -> return false
| Some (Ok _) -> return true
let teardown_connection r w =
Writer.close ~force_close:(Clock.after (sec 30.)) w >>= fun () ->
Reader.close r
One needs to be careful around Async Readers and Writers that share the same underyling
file descriptor , which is something that happens when they 're used for sockets .
Closing the Reader before the Writer will cause the Writer to throw and complain about
its underlying file descriptor being closed . This is why instead of using Reader.pipe
directly below , we write out an equivalent version which will first close the Writer
before closing the Reader once the input pipe is fully consumed .
Additionally , [ Writer.pipe ] will not close the writer if the pipe is closed , so in
order to avoid leaking file descriptors , we allow the pipe 30 seconds to flush before
closing the writer .
file descriptor, which is something that happens when they're used for sockets.
Closing the Reader before the Writer will cause the Writer to throw and complain about
its underlying file descriptor being closed. This is why instead of using Reader.pipe
directly below, we write out an equivalent version which will first close the Writer
before closing the Reader once the input pipe is fully consumed.
Additionally, [Writer.pipe] will not close the writer if the pipe is closed, so in
order to avoid leaking file descriptors, we allow the pipe 30 seconds to flush before
closing the writer. *)
let reader_writer_pipes r w =
let reader_pipe_r, reader_pipe_w = Pipe.create () in
let writer_pipe = Writer.pipe w in
upon (Reader.transfer r reader_pipe_w) (fun () ->
teardown_connection r w >>> fun () -> Pipe.close reader_pipe_w);
upon (Pipe.closed writer_pipe) (fun () ->
Deferred.choose
[
Deferred.choice (Clock.after (sec 30.)) (fun () -> ());
Deferred.choice (Pipe.downstream_flushed writer_pipe)
(fun (_ : Pipe.Flushed_result.t) -> ());
]
>>> fun () -> don't_wait_for (teardown_connection r w));
(reader_pipe_r, writer_pipe)
let reader_writer_of_pipes app_rd app_wr =
Reader.of_pipe (Info.of_string "async_conduit_ssl_reader") app_rd
>>= fun app_reader ->
upon (Reader.close_finished app_reader) (fun () -> Pipe.close_read app_rd);
Writer.of_pipe (Info.of_string "async_conduit_ssl_writer") app_wr
>>| fun (app_writer, _) ->
Writer.set_raise_when_consumer_leaves app_writer false;
(app_reader, app_writer)
module V1 = struct
module Ssl = struct
module Config = struct
type t = {
version : Ssl.Version.t option;
name : string option;
ca_file : string option;
ca_path : string option;
session : (Ssl.Session.t[@sexp.opaque]) option;
verify : (Ssl.Connection.t -> bool Deferred.t) option;
}
[@@deriving sexp]
let verify_certificate = verify_certificate
let create ?version ?name ?ca_file ?ca_path ?session ?verify () =
{ version; name; ca_file; ca_path; session; verify }
end
let connect cfg r w =
let { Config.version; name; ca_file; ca_path; session; verify } = cfg in
let net_to_ssl, ssl_to_net = reader_writer_pipes r w in
let app_to_ssl, app_wr = Pipe.create () in
let app_rd, ssl_to_app = Pipe.create () in
let verify_connection =
match verify with None -> Fn.const (return true) | Some f -> f
in
Ssl.client ?version ?name ?ca_file ?ca_path ?session ~app_to_ssl
~ssl_to_app ~net_to_ssl ~ssl_to_net ()
>>= function
| Error error -> teardown_connection r w >>= fun () -> Error.raise error
| Ok conn -> (
verify_connection conn >>= function
| false ->
teardown_connection r w >>= fun () ->
failwith "Connection verification failed."
| true ->
reader_writer_of_pipes app_rd app_wr
>>| fun (app_reader, app_writer) -> (app_reader, app_writer))
let listen ?(version = Ssl.Version.Tlsv1_2) ?ca_file ?ca_path ~crt_file
~key_file r w =
let net_to_ssl, ssl_to_net = reader_writer_pipes r w in
let app_to_ssl, app_wr = Pipe.create () in
let app_rd, ssl_to_app = Pipe.create () in
Ssl.server ?ca_file ?ca_path ~version ~crt_file ~key_file ~app_to_ssl
~ssl_to_app ~net_to_ssl ~ssl_to_net ()
>>= function
| Error error -> teardown_connection r w >>= fun () -> Error.raise error
| Ok _ ->
reader_writer_of_pipes app_rd app_wr
>>| fun (app_reader, app_writer) -> (app_reader, app_writer)
type session = (Ssl.Session.t[@sexp.opaque]) [@@deriving sexp]
type version = Ssl.Version.t [@@deriving sexp]
type connection = (Ssl.Connection.t[@sexp.opaque]) [@@deriving sexp]
end
end
module V2 = struct
module Ssl = struct
type allowed_ciphers = [ `Only of string list | `Openssl_default | `Secure ]
[@@deriving sexp]
module Config = struct
type t = {
version : Ssl.Version.t option;
options : Ssl.Opt.t list option;
name : string option;
hostname : string option;
allowed_ciphers : allowed_ciphers option;
ca_file : string option;
ca_path : string option;
crt_file : string option;
key_file : string option;
session : (Ssl.Session.t[@sexp.opaque]) option;
verify_modes : (Verify_mode.t[@sexp.opaque]) list option;
verify : (Ssl.Connection.t -> bool Deferred.t) option;
}
[@@deriving sexp_of]
let verify_certificate = verify_certificate
let create ?version ?options ?name ?hostname ?allowed_ciphers ?ca_file
?ca_path ?crt_file ?key_file ?session ?verify_modes ?verify () =
{
version;
options;
name;
hostname;
allowed_ciphers;
ca_file;
ca_path;
crt_file;
key_file;
session;
verify_modes;
verify;
}
end
let connect ?(cfg = Config.create ()) r w =
let {
Config.version;
options;
name;
hostname;
allowed_ciphers;
ca_file;
ca_path;
crt_file;
key_file;
session;
verify_modes;
verify;
} =
cfg
in
let net_to_ssl, ssl_to_net = reader_writer_pipes r w in
let app_to_ssl, app_wr = Pipe.create () in
let app_rd, ssl_to_app = Pipe.create () in
let verify_connection =
match verify with None -> Fn.const (return true) | Some f -> f
in
Ssl.client ?version ?options ?name ?hostname ?allowed_ciphers ?ca_file
?ca_path ?crt_file ?key_file ?session ?verify_modes ~app_to_ssl
~ssl_to_app ~net_to_ssl ~ssl_to_net ()
>>= function
| Error error -> teardown_connection r w >>= fun () -> Error.raise error
| Ok conn -> (
verify_connection conn >>= function
| false ->
teardown_connection r w >>= fun () ->
failwith "Connection verification failed."
| true ->
reader_writer_of_pipes app_rd app_wr
>>| fun (app_reader, app_writer) -> (app_reader, app_writer))
let listen
{
Config.version;
options;
name;
allowed_ciphers;
ca_file;
ca_path;
crt_file;
key_file;
verify_modes;
_;
} r w =
let crt_file, key_file =
match (crt_file, key_file) with
| Some crt_file, Some key_file -> (crt_file, key_file)
| _ ->
invalid_arg
"Conduit_async_ssl.ssl_listen: crt_file and key_file must be \
specified in cfg."
in
let net_to_ssl, ssl_to_net = reader_writer_pipes r w in
let app_to_ssl, app_wr = Pipe.create () in
let app_rd, ssl_to_app = Pipe.create () in
Ssl.server ?version ?options ?name ?allowed_ciphers ?ca_file ?ca_path
~crt_file ~key_file ?verify_modes ~app_to_ssl ~ssl_to_app ~net_to_ssl
~ssl_to_net ()
>>= function
| Error error -> teardown_connection r w >>= fun () -> Error.raise error
| Ok _ ->
reader_writer_of_pipes app_rd app_wr
>>| fun (app_reader, app_writer) -> (app_reader, app_writer)
type verify_mode = Ssl.Verify_mode.t [@@deriving sexp_of]
type session = (Ssl.Session.t[@sexp.opaque]) [@@deriving sexp_of]
type version = Ssl.Version.t [@@deriving sexp]
type connection = Ssl.Connection.t [@@deriving sexp_of]
type opt = Ssl.Opt.t [@@deriving sexp]
end
end
|
f59dce790c970573e2a02c13956bbf0abfc250281ba64df2ed4f0ac137d74c56
|
spurious/sagittarius-scheme-mirror
|
all-tests.scm
|
;; all tests for extensions
(add-load-path ".")
(add-load-path "../lib")
(add-load-path "../sitelib")
(cond-expand
(sagittarius.os.windows
(add-dynamic-load-path "../build/modules"))
(else
(add-dynamic-load-path "../build")))
(import (rnrs) (util file) (core errors) (srfi :39 parameters)
(srfi :64 testing))
(define-constant resource-file ".sagittarius-exttestrc")
(define search-path #f)
(if (file-exists? resource-file)
(call-with-input-file resource-file
(lambda (p)
(let ((line (get-line p)))
(unless (eof-object? line)
(set! search-path line)
(do ((path (get-line p) (get-line p)))
((eof-object? path))
(add-load-path path))))))
to avoid to use installed time library . for ext / thread
(add-load-path "./time"))
(define (test-on-test-end-detail runner)
(define (%test-write-result1 pair port)
(display " " port)
(display (car pair) port)
(display ": " port)
(write (cdr pair) port)
(newline port))
(let ((log (test-runner-aux-value runner))
(kind (test-result-ref runner 'result-kind)))
(when (memq kind '(fail))
(let* ((results (test-result-alist runner))
(source-file (assq 'source-file results))
(source-line (assq 'source-line results))
(test-name (assq 'test-name results)))
(when (or source-file source-line)
(if source-file (display (cdr source-file)))
(display ":")
(if source-line (display (cdr source-line)))
(display ":"))
(display (if (eq? kind 'xpass) "XPASS" "FAIL"))
(when test-name
(display " ")(display (cdr test-name)))
(newline))
(let ((expected (test-result-ref runner 'expected-value))
(actual (test-result-ref runner 'actual-value)))
(display #\tab)(display "expected value: ")(display expected)(newline)
(display #\tab)(display " actual value: ")(display actual)(newline)))
(when (output-port? log)
(display "Test end:" log)
(newline log)
(let loop ((list (test-result-alist runner)))
(if (pair? list)
(let ((pair (car list)))
;; Write out properties not written out by on-test-begin.
(if (not (memq (car pair)
'(test-name source-file source-line source-form)))
(%test-write-result1 pair log))
(loop (cdr list))))))))
(define (test-runner-detail)
(let ((runner (test-runner-simple)))
(test-runner-on-test-end! runner test-on-test-end-detail)
runner))
(let* ((files (find-files (or search-path ".") :pattern "^test.scm$"))
(thunks (map (lambda (file) (lambda () (load file))) files)))
(for-each (lambda (file thunk)
(parameterize ((test-runner-factory test-runner-detail))
(parameterize ((test-runner-current (test-runner-create)))
(guard (e (else (report-error e)))
(thunk))))
(newline))
files thunks))
| null |
https://raw.githubusercontent.com/spurious/sagittarius-scheme-mirror/53f104188934109227c01b1e9a9af5312f9ce997/ext/all-tests.scm
|
scheme
|
all tests for extensions
Write out properties not written out by on-test-begin.
|
(add-load-path ".")
(add-load-path "../lib")
(add-load-path "../sitelib")
(cond-expand
(sagittarius.os.windows
(add-dynamic-load-path "../build/modules"))
(else
(add-dynamic-load-path "../build")))
(import (rnrs) (util file) (core errors) (srfi :39 parameters)
(srfi :64 testing))
(define-constant resource-file ".sagittarius-exttestrc")
(define search-path #f)
(if (file-exists? resource-file)
(call-with-input-file resource-file
(lambda (p)
(let ((line (get-line p)))
(unless (eof-object? line)
(set! search-path line)
(do ((path (get-line p) (get-line p)))
((eof-object? path))
(add-load-path path))))))
to avoid to use installed time library . for ext / thread
(add-load-path "./time"))
(define (test-on-test-end-detail runner)
(define (%test-write-result1 pair port)
(display " " port)
(display (car pair) port)
(display ": " port)
(write (cdr pair) port)
(newline port))
(let ((log (test-runner-aux-value runner))
(kind (test-result-ref runner 'result-kind)))
(when (memq kind '(fail))
(let* ((results (test-result-alist runner))
(source-file (assq 'source-file results))
(source-line (assq 'source-line results))
(test-name (assq 'test-name results)))
(when (or source-file source-line)
(if source-file (display (cdr source-file)))
(display ":")
(if source-line (display (cdr source-line)))
(display ":"))
(display (if (eq? kind 'xpass) "XPASS" "FAIL"))
(when test-name
(display " ")(display (cdr test-name)))
(newline))
(let ((expected (test-result-ref runner 'expected-value))
(actual (test-result-ref runner 'actual-value)))
(display #\tab)(display "expected value: ")(display expected)(newline)
(display #\tab)(display " actual value: ")(display actual)(newline)))
(when (output-port? log)
(display "Test end:" log)
(newline log)
(let loop ((list (test-result-alist runner)))
(if (pair? list)
(let ((pair (car list)))
(if (not (memq (car pair)
'(test-name source-file source-line source-form)))
(%test-write-result1 pair log))
(loop (cdr list))))))))
(define (test-runner-detail)
(let ((runner (test-runner-simple)))
(test-runner-on-test-end! runner test-on-test-end-detail)
runner))
(let* ((files (find-files (or search-path ".") :pattern "^test.scm$"))
(thunks (map (lambda (file) (lambda () (load file))) files)))
(for-each (lambda (file thunk)
(parameterize ((test-runner-factory test-runner-detail))
(parameterize ((test-runner-current (test-runner-create)))
(guard (e (else (report-error e)))
(thunk))))
(newline))
files thunks))
|
8344ebb45863d3cf71f0f1dbac4c4c25b7812e390a50fe113fa7f94e784cbc79
|
Viasat/halite
|
data_err_maps.clj
|
Copyright ( c ) 2022 Viasat , Inc.
Licensed under the MIT license
(ns com.viasat.halite.doc.data-err-maps)
(set! *warn-on-reflection* true)
(def err-maps
{'h-err/abs-failure {:doc "The way the number space is divided the value of zero comes out of the positive number space. This means there is one more negative number than there are positive numbers. So there is one negative number whose absolute value cannot be represented. That negative number is the most negative value."}
'h-err/accumulator-target-must-be-bare-symbol {:doc "In 'reduce', it is necesary to define a symbol to reference the accumulated value of the reduction. This symbol must not include a namespace."
:err-ref ['h-err/element-binding-target-must-be-bare-symbol]}
'h-err/arg-type-mismatch {:doc "A relatively generic exception that indicates the operator being invoked cannot operate on the type of value provided."}
'h-err/not-both-vectors {:doc "When the first argument to 'concat' is a vector, the second must also be a vector. A vector can be concated onto a set, but a set cannot be concated onto a vector."}
'h-err/argument-empty {:doc "The 'first' operation cannot be invoked on an empty collection."}
'h-err/argument-not-set-or-vector {:doc "The operation must be invoked a collection."}
'h-err/argument-not-vector {:doc "The operation can only be invoked on a vector."}
'h-err/argument-not-collection {:doc "The operation can only be invoked on a collection."}
'h-err/arguments-not-sets {:doc "The operation can only be invoked on set arguments."}
'h-err/not-set-with-single-value {:doc "The operation cannot be invoked on a set if the set contains more than one value. This applies to the 'first' operation which can only be used to retrieve an item from a set with a single value."}
'h-err/binding-target-must-be-bare-symbol {:doc "In binding forms, the first value of each pair must be a symbol without a namespace. This symbol is an identifier that will be bound to the value of the second item in the pair."}
'h-err/cannot-bind-reserved-word {:doc "There are a small number of symbols that are reserved for system use and cannot be used by users in bindings."}
'h-err/cannot-conj-unset {:doc "Only actual values can be added into collections. Specifically 'unset' cannot be added into a collection."}
'h-err/comprehend-binding-wrong-count {:doc "Collection comprehensions require a single binding that defines the symbol to be bound to the elements of the collection."}
'h-err/comprehend-collection-invalid-type {:doc "Collection comprehensions can only be applied to collections, i.e. vectors or sets."}
'h-err/divide-by-zero {:doc "Division by zero, whether directly or indirectly via modulus cannot be performed."}
'h-err/element-accumulator-same-symbol {:doc "The 'reduce' operation requires distinct symbols for referring to the accumulator and the collection element."}
'h-err/element-binding-target-must-be-bare-symbol {:doc "In 'reduce', it is necesary to define a symbol without a namepsace which is used to hold each element of the collection."
:err-ref ['h-err/accumulator-target-must-be-bare-symbol]}
'h-err/get-in-path-must-be-vector-literal {:doc "The path to navigate in 'get-in' must be a literal, i.e. it cannot be an expression to compute a vector."}
'h-err/if-value-must-be-bare-symbol {:doc "The 'if-value' operator can only be applied to a bare symbol that is already bound to an optional value."}
'h-err/index-out-of-bounds {:doc "The index falls outside of the bounds of the vector. A way to avoid this is to first test the length of the vector."}
'h-err/invalid-exponent {:doc "The exponent cannot be negative."}
'h-err/invalid-expression {:doc "The expression itself was not recognized as a value that could be evaluated."}
'h-err/field-value-of-wrong-type {:doc "The value did not match the type of the spec field."}
'h-err/value-of-wrong-type {:doc "The value did not match the expected type for this symbol in the context."}
'h-err/invalid-instance {:doc "An attempt was made to create an instance that violated a spec constraint."}
'h-err/invalid-instance-index {:doc "An attempt was made to a read a value from an instance, but a field name was not provided as an index, instead a value such as an integer was provided."}
'h-err/invalid-keyword-char {:doc "Only certain characters, in certain sequences are allowed to appear in keywords."}
'h-err/invalid-keyword-length {:doc "The length of keywords is limited. The supplied keyword exceeded the limit."}
'h-err/invalid-lookup-target {:doc "An attempt was made to retrieve a field from an instance but the value was not known to be an instance of a specific spec. For example, the value may have been missing, as in the case of an optional field. Or perhaps the instance was a result of an expression and the result could have been an instance of many alternative specs."}
'h-err/invalid-refinement-expression {:doc "A refinement expression must produce an instance whose :$type matches the type that is declared on the refinement."
:doc-j "A refinement expression must produce an instance whose $type matches the type that is declared on the refinement."}
'h-err/invalid-refines-to-bound {:doc "Propagate cannot use the given bounds because it refers to a refinement path that doesn't exist."}
'h-err/invalid-refines-to-bound-conflict {:doc "Propagate cannot use the given bounds because a $refines-to bound specifies a target spec must be :Unset by one entry, but required by another."}
'h-err/invalid-symbol-char {:doc "Only certain characters, in certain sequences are allowed to appear in symbols."}
'h-err/invalid-symbol-length {:doc "The length of symbols is limited. The supplied symbol exceeded the limit."}
'h-err/invalid-type-value {:doc "The value of the :$type field in an instance must be a keyword that includes a '/' separator."
:doc-j "The value of the $type field in an instance must be a symbol that includes a '/' separator."}
'h-err/invalid-collection-type {:doc "This indicates that a collection value was provided, but the collection is not of a type that is supported."}
'h-err/invalid-value {:doc "A value was supplied, but the type of the value is not recognized."}
'h-err/invalid-vector-index {:doc "An index was supplied to lookup a value in a vector, but the index was not an integer."}
'h-err/let-bindings-odd-count {:doc "A 'let' expression included an odd number of elements in the binding vector. This is invalid as the bindings are to be a sequence of pairs."}
'h-err/let-needs-bare-symbol {:doc "In a 'let' expression, the first item in each pair of items must be a symbol."}
'h-err/limit-exceeded {:doc "There are various, context specific, limits that are enforced. e.g. limits on how deeply expressions may be nested. One of these limits was violated. See the exception data for more details."}
'h-err/literal-must-evaluate-to-value {:doc "All of the expressions that appear as elements in a collection literal, must be guaranteed to evaluate to values, i.e. they must never evaluate to 'unset'."}
'h-err/missing-required-vars {:doc "An attempt was made to construct an instance of a spec, without all of its mandatory fields being assigned values."}
'h-err/missing-type-field {:doc "An attempt was made to construct an instance without providing a value for the :$type field."
:doc-j "An attempt was made to construct an instance without providing a value for the $type field."}
'h-err/must-produce-value {:doc "When using 'map', the expression being evaluated on each element, must produce an actual value, i.e. it must be never produce 'unset'."}
'h-err/no-abstract {:doc "An attempt was made to construct a concrete instance with a field whose value is an instance of an abstract spec. Any instances used to compose a concrete instance, must themselves be concrete."}
'h-err/no-refinement-path {:doc "There was no refinement path found to convert a specific instance to a target spec type. There may have been a conditional refinement that did not match the instance, or perhaps there is no refinement path at all."}
'h-err/no-matching-signature {:doc "An attempt was made to invoke an operation, but either the number of arguments or the types of the arguments was not valid."}
'h-err/not-boolean-body {:doc "Either an 'any?', 'every?', or 'filter' call was attempted but the expression to evaluate for each element in the collection did not produce a boolean value."}
'h-err/not-boolean-constraint {:doc "All constraint expressions on specs must produce boolean values. The constraints are predicates which evaluate to true or false to indicate whether the constraint has been met by the instance state."}
'h-err/not-sortable-body {:doc "When using 'sort-by', the expression used for sorting must produce a value that can be sorted."}
'h-err/overflow {:doc "The mathematical operation resulted in a number that is too large to fit in the bytes alloted for the numeric type."}
'h-err/reduce-not-vector {:doc "The 'reduce' operation can only be applied to vectors. Specifically, sets cannot be reduced."}
'h-err/refinement-error {:doc "An unanticipated error condition was encountered while computing the refinement of an instance."}
'h-err/resource-spec-not-found {:doc "The spec identifier provided did not correspond to a known spec."}
'h-err/size-exceeded {:doc "There are various, context specific, limits that are enforced. e.g. limits on the lengths of strings. One of these limits was violated. See the exception data for more details."}
'h-err/sort-value-collision {:doc "When sorting a collection with 'sort-by', the sort expression must produce a unique value for each element in the collection."}
'h-err/spec-threw {:doc "This error can occur in two situations. First, it occurs from an explicit invocation of the 'error' operation was encountered in a spec. This indicates that the spec author considers it not possible to proceed in the encountered situation. See the error string in the exception detail for details. Second, this error is produced when a spec if being type checked at runtime."}
'h-err/instance-threw {:doc "An instance literal produced errors. The specific errors are included in the error data."}
'h-err/symbol-undefined {:doc "An unbound symbol was referenced in an expression at evaluation time."}
'h-err/symbols-not-bound {:doc "Unbound symbols are referenced in an expression at type-check time."}
'h-err/syntax-error {:doc "An object appeared in an expression that is not one of the expected values for the language."}
'h-err/undefined-symbol {:doc "An unbound symbol was referenced in an expression at type-check time."}
'h-err/unknown-function-or-operator {:doc "The operator being invoked is not recognized as a valid operation."}
'h-err/field-name-not-in-spec {:doc "The field name is not valid for the spec. The field name was provided to either define a field value in an instance or to lookup a field in an instance."}
'h-err/wrong-arg-count {:doc "The number of arguments provided to the operation did not match what was expected."}
'h-err/wrong-arg-count-min {:doc "The operation expected at least a certain number of arguments. This minimum was not met."}
'h-err/wrong-arg-count-odd {:doc "The operation expected an odd number of arguments."}
'h-err/spec-cycle-runtime {:doc "Specs cannot be defined to refine to themselves either directly or transitively. At execution time, this was violated."}
'h-err/refinement-diamond {:doc "Spec refinements cannot be defined that allow multiple refinement paths between the same two specs."}
'h-err/spec-cycle {:doc "Dependencies between specs cannot form a cycle."}
'h-err/spec-map-needed {:doc "This is a low-level exception indicating that an operation was invoked that provided an interface to retreive specs, rather than a literal spec-map."}
'h-err/unknown-type-collection {:doc "Collections of heterogenous types are not allowed. Similarly collections whose element type cannot be statically determined are not allowed."}
'l-err/binding-expression-not-optional {:doc "The expression being tested in an 'if-value-let' statement must optionally produce a value."}
'l-err/binding-target-invalid-symbol {:doc "The symbols to be bound are not to start with a '$'."}
'l-err/cannot-bind-nothing {:doc "It is not permitted to bind a symbol to 'nothing'."}
'l-err/cannot-bind-unset {:doc "It is not permitted to rebind the symbol used to represent 'unset'. Instead of defining a symbol for this, consider using '$no-value'."}
'l-err/disallowed-nothing {:doc "An expression was encountered that does not have a value, but it was used in a place where a value is required. Examples of expressions that do not have values are an invocation of 'error' and the binding of a symbol to an element in an empty collection."}
'l-err/first-argument-not-optional {:doc "The value being tested in an 'if-value' statement must be optional."}
'l-err/get-in-path-empty {:doc "A path must be provided to the 'get-in' operation."}
'l-err/let-bindings-empty {:doc "The bindings form of the 'let' cannot be empty. If there is nothing to bind, then the 'let' can be omitted."}
'l-err/result-always-known {:doc "The result of the equality check is always the same and can be known in advance, so a check is not needed."}
'l-err/disallowed-unset-variable {:doc "It is not allowed to bind 'unset' to symbols other than the built-in '$no-value'."}})
| null |
https://raw.githubusercontent.com/Viasat/halite/1145fdf49b5148acb389dd5100059b0d2ef959e1/src/com/viasat/halite/doc/data_err_maps.clj
|
clojure
|
Copyright ( c ) 2022 Viasat , Inc.
Licensed under the MIT license
(ns com.viasat.halite.doc.data-err-maps)
(set! *warn-on-reflection* true)
(def err-maps
{'h-err/abs-failure {:doc "The way the number space is divided the value of zero comes out of the positive number space. This means there is one more negative number than there are positive numbers. So there is one negative number whose absolute value cannot be represented. That negative number is the most negative value."}
'h-err/accumulator-target-must-be-bare-symbol {:doc "In 'reduce', it is necesary to define a symbol to reference the accumulated value of the reduction. This symbol must not include a namespace."
:err-ref ['h-err/element-binding-target-must-be-bare-symbol]}
'h-err/arg-type-mismatch {:doc "A relatively generic exception that indicates the operator being invoked cannot operate on the type of value provided."}
'h-err/not-both-vectors {:doc "When the first argument to 'concat' is a vector, the second must also be a vector. A vector can be concated onto a set, but a set cannot be concated onto a vector."}
'h-err/argument-empty {:doc "The 'first' operation cannot be invoked on an empty collection."}
'h-err/argument-not-set-or-vector {:doc "The operation must be invoked a collection."}
'h-err/argument-not-vector {:doc "The operation can only be invoked on a vector."}
'h-err/argument-not-collection {:doc "The operation can only be invoked on a collection."}
'h-err/arguments-not-sets {:doc "The operation can only be invoked on set arguments."}
'h-err/not-set-with-single-value {:doc "The operation cannot be invoked on a set if the set contains more than one value. This applies to the 'first' operation which can only be used to retrieve an item from a set with a single value."}
'h-err/binding-target-must-be-bare-symbol {:doc "In binding forms, the first value of each pair must be a symbol without a namespace. This symbol is an identifier that will be bound to the value of the second item in the pair."}
'h-err/cannot-bind-reserved-word {:doc "There are a small number of symbols that are reserved for system use and cannot be used by users in bindings."}
'h-err/cannot-conj-unset {:doc "Only actual values can be added into collections. Specifically 'unset' cannot be added into a collection."}
'h-err/comprehend-binding-wrong-count {:doc "Collection comprehensions require a single binding that defines the symbol to be bound to the elements of the collection."}
'h-err/comprehend-collection-invalid-type {:doc "Collection comprehensions can only be applied to collections, i.e. vectors or sets."}
'h-err/divide-by-zero {:doc "Division by zero, whether directly or indirectly via modulus cannot be performed."}
'h-err/element-accumulator-same-symbol {:doc "The 'reduce' operation requires distinct symbols for referring to the accumulator and the collection element."}
'h-err/element-binding-target-must-be-bare-symbol {:doc "In 'reduce', it is necesary to define a symbol without a namepsace which is used to hold each element of the collection."
:err-ref ['h-err/accumulator-target-must-be-bare-symbol]}
'h-err/get-in-path-must-be-vector-literal {:doc "The path to navigate in 'get-in' must be a literal, i.e. it cannot be an expression to compute a vector."}
'h-err/if-value-must-be-bare-symbol {:doc "The 'if-value' operator can only be applied to a bare symbol that is already bound to an optional value."}
'h-err/index-out-of-bounds {:doc "The index falls outside of the bounds of the vector. A way to avoid this is to first test the length of the vector."}
'h-err/invalid-exponent {:doc "The exponent cannot be negative."}
'h-err/invalid-expression {:doc "The expression itself was not recognized as a value that could be evaluated."}
'h-err/field-value-of-wrong-type {:doc "The value did not match the type of the spec field."}
'h-err/value-of-wrong-type {:doc "The value did not match the expected type for this symbol in the context."}
'h-err/invalid-instance {:doc "An attempt was made to create an instance that violated a spec constraint."}
'h-err/invalid-instance-index {:doc "An attempt was made to a read a value from an instance, but a field name was not provided as an index, instead a value such as an integer was provided."}
'h-err/invalid-keyword-char {:doc "Only certain characters, in certain sequences are allowed to appear in keywords."}
'h-err/invalid-keyword-length {:doc "The length of keywords is limited. The supplied keyword exceeded the limit."}
'h-err/invalid-lookup-target {:doc "An attempt was made to retrieve a field from an instance but the value was not known to be an instance of a specific spec. For example, the value may have been missing, as in the case of an optional field. Or perhaps the instance was a result of an expression and the result could have been an instance of many alternative specs."}
'h-err/invalid-refinement-expression {:doc "A refinement expression must produce an instance whose :$type matches the type that is declared on the refinement."
:doc-j "A refinement expression must produce an instance whose $type matches the type that is declared on the refinement."}
'h-err/invalid-refines-to-bound {:doc "Propagate cannot use the given bounds because it refers to a refinement path that doesn't exist."}
'h-err/invalid-refines-to-bound-conflict {:doc "Propagate cannot use the given bounds because a $refines-to bound specifies a target spec must be :Unset by one entry, but required by another."}
'h-err/invalid-symbol-char {:doc "Only certain characters, in certain sequences are allowed to appear in symbols."}
'h-err/invalid-symbol-length {:doc "The length of symbols is limited. The supplied symbol exceeded the limit."}
'h-err/invalid-type-value {:doc "The value of the :$type field in an instance must be a keyword that includes a '/' separator."
:doc-j "The value of the $type field in an instance must be a symbol that includes a '/' separator."}
'h-err/invalid-collection-type {:doc "This indicates that a collection value was provided, but the collection is not of a type that is supported."}
'h-err/invalid-value {:doc "A value was supplied, but the type of the value is not recognized."}
'h-err/invalid-vector-index {:doc "An index was supplied to lookup a value in a vector, but the index was not an integer."}
'h-err/let-bindings-odd-count {:doc "A 'let' expression included an odd number of elements in the binding vector. This is invalid as the bindings are to be a sequence of pairs."}
'h-err/let-needs-bare-symbol {:doc "In a 'let' expression, the first item in each pair of items must be a symbol."}
'h-err/limit-exceeded {:doc "There are various, context specific, limits that are enforced. e.g. limits on how deeply expressions may be nested. One of these limits was violated. See the exception data for more details."}
'h-err/literal-must-evaluate-to-value {:doc "All of the expressions that appear as elements in a collection literal, must be guaranteed to evaluate to values, i.e. they must never evaluate to 'unset'."}
'h-err/missing-required-vars {:doc "An attempt was made to construct an instance of a spec, without all of its mandatory fields being assigned values."}
'h-err/missing-type-field {:doc "An attempt was made to construct an instance without providing a value for the :$type field."
:doc-j "An attempt was made to construct an instance without providing a value for the $type field."}
'h-err/must-produce-value {:doc "When using 'map', the expression being evaluated on each element, must produce an actual value, i.e. it must be never produce 'unset'."}
'h-err/no-abstract {:doc "An attempt was made to construct a concrete instance with a field whose value is an instance of an abstract spec. Any instances used to compose a concrete instance, must themselves be concrete."}
'h-err/no-refinement-path {:doc "There was no refinement path found to convert a specific instance to a target spec type. There may have been a conditional refinement that did not match the instance, or perhaps there is no refinement path at all."}
'h-err/no-matching-signature {:doc "An attempt was made to invoke an operation, but either the number of arguments or the types of the arguments was not valid."}
'h-err/not-boolean-body {:doc "Either an 'any?', 'every?', or 'filter' call was attempted but the expression to evaluate for each element in the collection did not produce a boolean value."}
'h-err/not-boolean-constraint {:doc "All constraint expressions on specs must produce boolean values. The constraints are predicates which evaluate to true or false to indicate whether the constraint has been met by the instance state."}
'h-err/not-sortable-body {:doc "When using 'sort-by', the expression used for sorting must produce a value that can be sorted."}
'h-err/overflow {:doc "The mathematical operation resulted in a number that is too large to fit in the bytes alloted for the numeric type."}
'h-err/reduce-not-vector {:doc "The 'reduce' operation can only be applied to vectors. Specifically, sets cannot be reduced."}
'h-err/refinement-error {:doc "An unanticipated error condition was encountered while computing the refinement of an instance."}
'h-err/resource-spec-not-found {:doc "The spec identifier provided did not correspond to a known spec."}
'h-err/size-exceeded {:doc "There are various, context specific, limits that are enforced. e.g. limits on the lengths of strings. One of these limits was violated. See the exception data for more details."}
'h-err/sort-value-collision {:doc "When sorting a collection with 'sort-by', the sort expression must produce a unique value for each element in the collection."}
'h-err/spec-threw {:doc "This error can occur in two situations. First, it occurs from an explicit invocation of the 'error' operation was encountered in a spec. This indicates that the spec author considers it not possible to proceed in the encountered situation. See the error string in the exception detail for details. Second, this error is produced when a spec if being type checked at runtime."}
'h-err/instance-threw {:doc "An instance literal produced errors. The specific errors are included in the error data."}
'h-err/symbol-undefined {:doc "An unbound symbol was referenced in an expression at evaluation time."}
'h-err/symbols-not-bound {:doc "Unbound symbols are referenced in an expression at type-check time."}
'h-err/syntax-error {:doc "An object appeared in an expression that is not one of the expected values for the language."}
'h-err/undefined-symbol {:doc "An unbound symbol was referenced in an expression at type-check time."}
'h-err/unknown-function-or-operator {:doc "The operator being invoked is not recognized as a valid operation."}
'h-err/field-name-not-in-spec {:doc "The field name is not valid for the spec. The field name was provided to either define a field value in an instance or to lookup a field in an instance."}
'h-err/wrong-arg-count {:doc "The number of arguments provided to the operation did not match what was expected."}
'h-err/wrong-arg-count-min {:doc "The operation expected at least a certain number of arguments. This minimum was not met."}
'h-err/wrong-arg-count-odd {:doc "The operation expected an odd number of arguments."}
'h-err/spec-cycle-runtime {:doc "Specs cannot be defined to refine to themselves either directly or transitively. At execution time, this was violated."}
'h-err/refinement-diamond {:doc "Spec refinements cannot be defined that allow multiple refinement paths between the same two specs."}
'h-err/spec-cycle {:doc "Dependencies between specs cannot form a cycle."}
'h-err/spec-map-needed {:doc "This is a low-level exception indicating that an operation was invoked that provided an interface to retreive specs, rather than a literal spec-map."}
'h-err/unknown-type-collection {:doc "Collections of heterogenous types are not allowed. Similarly collections whose element type cannot be statically determined are not allowed."}
'l-err/binding-expression-not-optional {:doc "The expression being tested in an 'if-value-let' statement must optionally produce a value."}
'l-err/binding-target-invalid-symbol {:doc "The symbols to be bound are not to start with a '$'."}
'l-err/cannot-bind-nothing {:doc "It is not permitted to bind a symbol to 'nothing'."}
'l-err/cannot-bind-unset {:doc "It is not permitted to rebind the symbol used to represent 'unset'. Instead of defining a symbol for this, consider using '$no-value'."}
'l-err/disallowed-nothing {:doc "An expression was encountered that does not have a value, but it was used in a place where a value is required. Examples of expressions that do not have values are an invocation of 'error' and the binding of a symbol to an element in an empty collection."}
'l-err/first-argument-not-optional {:doc "The value being tested in an 'if-value' statement must be optional."}
'l-err/get-in-path-empty {:doc "A path must be provided to the 'get-in' operation."}
'l-err/let-bindings-empty {:doc "The bindings form of the 'let' cannot be empty. If there is nothing to bind, then the 'let' can be omitted."}
'l-err/result-always-known {:doc "The result of the equality check is always the same and can be known in advance, so a check is not needed."}
'l-err/disallowed-unset-variable {:doc "It is not allowed to bind 'unset' to symbols other than the built-in '$no-value'."}})
|
|
29b75af9836473089cc88d77fa441cfd7789039b7bcb42b7ffc76dbfaec98dcc
|
fukamachi/integral
|
connection.lisp
|
(in-package :cl-user)
(defpackage integral-test.connection
(:use :cl
:integral
:integral-test.init
:prove)
(:import-from :integral.connection
:connection-handle))
(in-package :integral-test.connection)
(plan 2)
(let ((*db* (make-connection :mysql
:database-name "integral_test"
:username "root")))
(ok (not (connection-handle *db*)))
(get-connection)
(ok (connection-handle *db*)))
(finalize)
| null |
https://raw.githubusercontent.com/fukamachi/integral/5fdf506233e9f6aa814a2da529bed0a551183110/t/connection.lisp
|
lisp
|
(in-package :cl-user)
(defpackage integral-test.connection
(:use :cl
:integral
:integral-test.init
:prove)
(:import-from :integral.connection
:connection-handle))
(in-package :integral-test.connection)
(plan 2)
(let ((*db* (make-connection :mysql
:database-name "integral_test"
:username "root")))
(ok (not (connection-handle *db*)))
(get-connection)
(ok (connection-handle *db*)))
(finalize)
|
|
9f3f06619a18a674a86c4644864bb4dbe38ae93bc8e0fbcc57256360dda46bb6
|
techascent/tvm-clj
|
project.clj
|
(defproject tvm-clj "6.00-beta-1-SNAPSHOT"
:description "Clojure bindings and exploration of the tvm library"
:url "-clj"
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.10.2-alpha1"]
[cnuernber/dtype-next "6.00-beta-5"]
[techascent/tech.jna "4.05"]]
:java-source-paths ["java"]
:profiles {:dev {:dependencies [[criterium "0.4.5"]]}
:codox
{:dependencies [[codox-theme-rdash "0.1.2"]]
:plugins [[lein-codox "0.10.7"]]
:codox {:project {:name "tvm-clj"}
:metadata {:doc/format :markdown}
:themes [:rdash]
:source-paths ["src"]
:output-path "docs"
:doc-paths ["topics"]
:source-uri "-clj/blob/master/{filepath}#L{line}"
:namespaces [tvm-clj.ast
tvm-clj.schedule
tvm-clj.compiler
tvm-clj.module
tvm-clj.device
tvm-clj.application.image
tvm-clj.application.kmeans]}}}
:aliases {"codox" ["with-profile" "codox,dev" "codox"]})
| null |
https://raw.githubusercontent.com/techascent/tvm-clj/1088845bd613b4ba14b00381ffe3cdbd3d8b639e/project.clj
|
clojure
|
(defproject tvm-clj "6.00-beta-1-SNAPSHOT"
:description "Clojure bindings and exploration of the tvm library"
:url "-clj"
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.10.2-alpha1"]
[cnuernber/dtype-next "6.00-beta-5"]
[techascent/tech.jna "4.05"]]
:java-source-paths ["java"]
:profiles {:dev {:dependencies [[criterium "0.4.5"]]}
:codox
{:dependencies [[codox-theme-rdash "0.1.2"]]
:plugins [[lein-codox "0.10.7"]]
:codox {:project {:name "tvm-clj"}
:metadata {:doc/format :markdown}
:themes [:rdash]
:source-paths ["src"]
:output-path "docs"
:doc-paths ["topics"]
:source-uri "-clj/blob/master/{filepath}#L{line}"
:namespaces [tvm-clj.ast
tvm-clj.schedule
tvm-clj.compiler
tvm-clj.module
tvm-clj.device
tvm-clj.application.image
tvm-clj.application.kmeans]}}}
:aliases {"codox" ["with-profile" "codox,dev" "codox"]})
|
|
6d8fa97a5cce75d9c2802e5cc2eb386961ad3aafe00cb7c556d00ced4096fbea
|
ghc/nofib
|
TG_iter.hs
|
Taylor - Galerkin / Pressure - correction algorithm .
Solving four increamental matrix equations iteratively :
const1*M(U'-U ) = rh1(U , P )
const2*M(U''-U ) = rh1(U',P )
const3*K(P'-P ) = rh2(U ' )
const4*M(U'''-U '' ) = rh3(P'-P )
The 3rd equation is solved by using the Choleski
decomposition menthod and the rest are solved by using
the iteration method .
XZ , 24/10/91
Taylor-Galerkin/Pressure-correction algorithm.
Solving four increamental matrix equations iteratively:
const1*M(U'-U) = rh1(U,P)
const2*M(U''-U) = rh1(U',P)
const3*K(P'-P) = rh2(U')
const4*M(U'''-U'') = rh3(P'-P)
The 3rd equation is solved by using the Choleski
decomposition menthod and the rest are solved by using
the Jacobi iteration method.
XZ, 24/10/91
-}
Modified to adopt S_arrays .
Evaluations are forced by using normalize_obj .
( This is currently necessary for running the whole
program on a 200 element problem ) .
XZ , 19/2/92
Modified to adopt S_arrays.
Evaluations are forced by using normalize_obj.
(This is currently necessary for running the whole
program on a 200 element problem).
XZ, 19/2/92
-}
Iteration along time - step implemented
XZ , 25/2/92
Iteration along time-step implemented
XZ, 25/2/92
-}
module TG_iter ( tg_iter ) where
import Defs
import S_Array -- not needed w/ proper module handling
import Norm -- ditto
import Asb_routs
import Rhs_Asb_routs
import Jcb_method
import Chl_method
import Tol_cal
-----------------------------------------------------------
-- Iterative TG algorithm. --
-- Functions called : --
-- for equation solving: "chl_method" and "jcb_method" --
-- for RHS assembling : "get_rh1", "get_rh2" and --
-- "get_rh3" --
-----------------------------------------------------------
tg_iter
:: Bool -> Int -> Float -> Int -> Float -> Float -> Float
-> (S_array (Float, ((Float, Float, Float), (Float, Float, Float))))
-> (S_array [(Int, Int)])
-> (S_array [(Int, Int)])
-> (S_array [Int])
-> (S_array [Int])
-> (S_array Bool, (S_array Bool, S_array Bool))
-> [Int]
-> (S_array (S_array Float, S_array (Int, [Float])), S_array Int)
-> (S_array Float, (S_array Float, S_array Float))
-> String
tg_iter
mon m_iter m_toler max_jcb_iter jcb_toler
relax dlt_t el_det_fac v_asb_table p_asb_table
v_steer p_steer bry_nodes p_fixed chl_fac (p,u) =
do_tg_iter m_iter (pack_obj p,pack_obj u) []
where
do_tg_iter n (old_p,old_u) res =
if (n<=1) || (max_tol<m_toler)
then new_res ++ show_final
else
if max_tol>large
then error "main iteration: overflow!"
else do_tg_iter (n-1) (new_p,new_u) new_res
where
new_res =
res ++
"at time step " ++ (shows (m_iter-n+1) ":\n\n") ++
(if mon
then
"initial presure:\n" ++ (shows (retrieve_obj old_p) "\n") ++
"inititial velocities:\n" ++ (shows (retrieve_obj old_u) "\n") ++
"rh1a:\n" ++ (shows rh1a "\n") ++
"velocities after the 1st half of step 1:\n" ++ (shows (retrieve_obj u1a) "\n") ++
"rh1b:\n" ++ (shows rh1b "\n") ++
"velocities after the 2nd half of step 1:\n" ++ (shows (retrieve_obj u1b) "\n") ++
"rh2:\n" ++ (shows (retrieve_obj rh2) "\n") ++
"rh3:\n" ++ (shows rh3 "\n")
else "")
show_final =
"presure:\n" ++ (shows (retrieve_obj new_p) "\n") ++
"velocities:\n" ++ (shows (retrieve_obj new_u) "\n")
tmp_p | normalize_obj new_p = retrieve_obj new_p
(tmp_u_x,tmp_u_y) | normalize_obj new_u = retrieve_obj new_u
max_tol = max tol_u tol_p
tol_u =
tol_cal ((s_elems tmp_u_x)++(s_elems tmp_u_y))
((subs tmp_u_x old_u_x) ++
(subs tmp_u_y old_u_y))
False
where
(old_u_x,old_u_y) | normalize_obj old_u = retrieve_obj old_u
tol_p | normalize_obj old_p =
tol_cal (s_elems tmp_p)
(subs tmp_p (retrieve_obj old_p)) False
step 1a
rh1a | normalize_obj old_p && normalize_obj old_u =
get_rh1 el_det_fac v_asb_table v_steer p_steer
(retrieve_obj old_p,retrieve_obj old_u)
del_u1a =
jcb_method 1 el_det_fac v_asb_table v_steer
bry_nodes rh1a (2/dlt_t) max_jcb_iter jcb_toler relax
u1a = pack_obj (add_u (retrieve_obj old_u) del_u1a)
step 1b
rh1b | normalize_obj u1a =
get_rh1 el_det_fac v_asb_table v_steer p_steer
(retrieve_obj old_p,retrieve_obj u1a)
del_u1b =
jcb_method 1 el_det_fac v_asb_table v_steer
bry_nodes rh1b (1/dlt_t) max_jcb_iter jcb_toler relax
u1b = pack_obj (add_u (retrieve_obj old_u) del_u1b)
step 2
rh2 | normalize_obj u1b =
pack_obj
(get_rh2 el_det_fac p_asb_table v_steer p_fixed
(retrieve_obj u1b))
del_p | normalize_obj rh2 =
pack_obj (chl_method chl_fac (retrieve_obj rh2) (2/dlt_t))
new_p =
pack_obj (add_mat (retrieve_obj old_p) (retrieve_obj del_p))
step 3
rh3 | normalize_obj del_p =
get_rh3 el_det_fac v_asb_table p_steer (retrieve_obj del_p)
del_u3 =
jcb_method 3 el_det_fac v_asb_table v_steer
bry_nodes rh3 (2/dlt_t) max_jcb_iter jcb_toler relax
new_u = pack_obj (add_u (retrieve_obj u1b) del_u3)
subs = \x' x -> zipWith (-) (s_elems x') (s_elems x)
| null |
https://raw.githubusercontent.com/ghc/nofib/f34b90b5a6ce46284693119a06d1133908b11856/real/fluid/TG_iter.hs
|
haskell
|
not needed w/ proper module handling
ditto
---------------------------------------------------------
Iterative TG algorithm. --
Functions called : --
for equation solving: "chl_method" and "jcb_method" --
for RHS assembling : "get_rh1", "get_rh2" and --
"get_rh3" --
---------------------------------------------------------
|
Taylor - Galerkin / Pressure - correction algorithm .
Solving four increamental matrix equations iteratively :
const1*M(U'-U ) = rh1(U , P )
const2*M(U''-U ) = rh1(U',P )
const3*K(P'-P ) = rh2(U ' )
const4*M(U'''-U '' ) = rh3(P'-P )
The 3rd equation is solved by using the Choleski
decomposition menthod and the rest are solved by using
the iteration method .
XZ , 24/10/91
Taylor-Galerkin/Pressure-correction algorithm.
Solving four increamental matrix equations iteratively:
const1*M(U'-U) = rh1(U,P)
const2*M(U''-U) = rh1(U',P)
const3*K(P'-P) = rh2(U')
const4*M(U'''-U'') = rh3(P'-P)
The 3rd equation is solved by using the Choleski
decomposition menthod and the rest are solved by using
the Jacobi iteration method.
XZ, 24/10/91
-}
Modified to adopt S_arrays .
Evaluations are forced by using normalize_obj .
( This is currently necessary for running the whole
program on a 200 element problem ) .
XZ , 19/2/92
Modified to adopt S_arrays.
Evaluations are forced by using normalize_obj.
(This is currently necessary for running the whole
program on a 200 element problem).
XZ, 19/2/92
-}
Iteration along time - step implemented
XZ , 25/2/92
Iteration along time-step implemented
XZ, 25/2/92
-}
module TG_iter ( tg_iter ) where
import Defs
import Asb_routs
import Rhs_Asb_routs
import Jcb_method
import Chl_method
import Tol_cal
tg_iter
:: Bool -> Int -> Float -> Int -> Float -> Float -> Float
-> (S_array (Float, ((Float, Float, Float), (Float, Float, Float))))
-> (S_array [(Int, Int)])
-> (S_array [(Int, Int)])
-> (S_array [Int])
-> (S_array [Int])
-> (S_array Bool, (S_array Bool, S_array Bool))
-> [Int]
-> (S_array (S_array Float, S_array (Int, [Float])), S_array Int)
-> (S_array Float, (S_array Float, S_array Float))
-> String
tg_iter
mon m_iter m_toler max_jcb_iter jcb_toler
relax dlt_t el_det_fac v_asb_table p_asb_table
v_steer p_steer bry_nodes p_fixed chl_fac (p,u) =
do_tg_iter m_iter (pack_obj p,pack_obj u) []
where
do_tg_iter n (old_p,old_u) res =
if (n<=1) || (max_tol<m_toler)
then new_res ++ show_final
else
if max_tol>large
then error "main iteration: overflow!"
else do_tg_iter (n-1) (new_p,new_u) new_res
where
new_res =
res ++
"at time step " ++ (shows (m_iter-n+1) ":\n\n") ++
(if mon
then
"initial presure:\n" ++ (shows (retrieve_obj old_p) "\n") ++
"inititial velocities:\n" ++ (shows (retrieve_obj old_u) "\n") ++
"rh1a:\n" ++ (shows rh1a "\n") ++
"velocities after the 1st half of step 1:\n" ++ (shows (retrieve_obj u1a) "\n") ++
"rh1b:\n" ++ (shows rh1b "\n") ++
"velocities after the 2nd half of step 1:\n" ++ (shows (retrieve_obj u1b) "\n") ++
"rh2:\n" ++ (shows (retrieve_obj rh2) "\n") ++
"rh3:\n" ++ (shows rh3 "\n")
else "")
show_final =
"presure:\n" ++ (shows (retrieve_obj new_p) "\n") ++
"velocities:\n" ++ (shows (retrieve_obj new_u) "\n")
tmp_p | normalize_obj new_p = retrieve_obj new_p
(tmp_u_x,tmp_u_y) | normalize_obj new_u = retrieve_obj new_u
max_tol = max tol_u tol_p
tol_u =
tol_cal ((s_elems tmp_u_x)++(s_elems tmp_u_y))
((subs tmp_u_x old_u_x) ++
(subs tmp_u_y old_u_y))
False
where
(old_u_x,old_u_y) | normalize_obj old_u = retrieve_obj old_u
tol_p | normalize_obj old_p =
tol_cal (s_elems tmp_p)
(subs tmp_p (retrieve_obj old_p)) False
step 1a
rh1a | normalize_obj old_p && normalize_obj old_u =
get_rh1 el_det_fac v_asb_table v_steer p_steer
(retrieve_obj old_p,retrieve_obj old_u)
del_u1a =
jcb_method 1 el_det_fac v_asb_table v_steer
bry_nodes rh1a (2/dlt_t) max_jcb_iter jcb_toler relax
u1a = pack_obj (add_u (retrieve_obj old_u) del_u1a)
step 1b
rh1b | normalize_obj u1a =
get_rh1 el_det_fac v_asb_table v_steer p_steer
(retrieve_obj old_p,retrieve_obj u1a)
del_u1b =
jcb_method 1 el_det_fac v_asb_table v_steer
bry_nodes rh1b (1/dlt_t) max_jcb_iter jcb_toler relax
u1b = pack_obj (add_u (retrieve_obj old_u) del_u1b)
step 2
rh2 | normalize_obj u1b =
pack_obj
(get_rh2 el_det_fac p_asb_table v_steer p_fixed
(retrieve_obj u1b))
del_p | normalize_obj rh2 =
pack_obj (chl_method chl_fac (retrieve_obj rh2) (2/dlt_t))
new_p =
pack_obj (add_mat (retrieve_obj old_p) (retrieve_obj del_p))
step 3
rh3 | normalize_obj del_p =
get_rh3 el_det_fac v_asb_table p_steer (retrieve_obj del_p)
del_u3 =
jcb_method 3 el_det_fac v_asb_table v_steer
bry_nodes rh3 (2/dlt_t) max_jcb_iter jcb_toler relax
new_u = pack_obj (add_u (retrieve_obj u1b) del_u3)
subs = \x' x -> zipWith (-) (s_elems x') (s_elems x)
|
4805051e6a139918aa321dd414a70dc56edd814e42f74a75a803e9d2daf45084
|
tlikonen/gpg-utilities
|
string-io.lisp
|
Author : < >
;;
;; License: Creative Commons CC0 (public domain dedication)
;;
(defpackage #:string-io
(:use #:cl)
(:export #:parse-quoted-word
#:current-string #:new-string
#:closing-quote-missing
#:parsed-string
#:escape-characters
#:quote-string
#:sql-string
#:sql-escape-like
#:unescape-c-string))
(in-package #:string-io)
(define-condition closing-quote-missing (end-of-file)
((string :reader parsed-string :initarg :string))
(:report "The end of file was reached before closing quote character."))
(defun make-adjustable-string (length)
(make-array length :element-type 'character
:adjustable t :fill-pointer 0))
(defun parse-quoted-word (string-or-stream
&key output-stream
(quote-start-char #\") (quote-end-char #\")
(escape-char #\\)
(separator-chars '(#\space #\tab))
(unescape t))
"Parse STRING-OR-STREAM and return the next word separated by
SEPARATOR-CHARS. If nothing could be parsed return NIL. If OUTPUT-STREAM
is a stream write also output to that stream.
If QUOTE-START-CHAR and QUOTE-END-CHAR are characters then all
characters surrounded by them are part of a word. Also, if ESCAPE-CHAR
is non-NIL then characters that come right after ESCAPE-CHAR are part of
a word (ESCAPE-CHAR protects a char from having a special meaning).
If UNESCAPE is non-NIL interpret and remove ESCAPE-CHARs (when they are
if UNESCAPE is NIL interpret
ESCAPE-CHARs but don't remove them.
If there is opening quote in the input but the closing quote is missing
signal CLOSING-QUOTE-MISSING condition (subtype of CL:END-OF-FILE). The
condition object contains the stream and it can be read with
CL:STREAM-ERROR-STREAM function. The currently parsed string can be read
with PARSED-STRING function. There are two restarts available. When
CURRENT-STRING restart is invoked the function continues and accepts the
currently parsed string (if any). Another restart NEW-STRING must be
invoked with a new string value which is then used and returned."
(check-type output-stream (or stream null))
(check-type quote-start-char (or character null))
(check-type quote-end-char (or character null))
(assert (or (and quote-start-char quote-end-char)
(and (null quote-start-char)
(null quote-end-char)))
nil "Must define both or neither of QUOTE-START-CHAR and ~
QUOTE-END-CHAR.")
(check-type escape-char (or character null))
(assert (or (null escape-char)
(and escape-char
(not (eql escape-char quote-start-char))
(not (eql escape-char quote-end-char))))
nil "ESCAPE-CHAR (if defined) must not be same character as ~
QUOTE-START-CHAR or QUOTE-END-CHAR.")
(check-type separator-chars sequence)
(labels ((separator-char-p (char)
(find char separator-chars))
(parse-stream (in out)
(let ((quote nil))
(handler-case
(loop
:with content := nil
:with esc := nil
:for char := (read-char in)
:do (cond
((and (separator-char-p char)
(not content)
(not quote)
(not esc)))
((and (separator-char-p char)
content
(not quote)
(not esc))
(return))
((and escape-char
(char= escape-char char)
(not esc))
(setf esc t)
(unless unescape
(vector-push-extend char out)))
((and quote-start-char
quote-end-char
(char= quote-start-char char)
(not quote)
(not esc))
(setf quote t))
((and quote-start-char
quote-end-char
(char= quote-end-char char)
quote
(not esc))
(setf quote nil))
(t (vector-push-extend char out)
(setf content t)
(setf esc nil))))
(end-of-file (c)
(if quote (error 'closing-quote-missing
:stream (stream-error-stream c)
:string out)))))))
(let ((output (make-adjustable-string 20)))
(restart-case
(etypecase string-or-stream
(string (with-input-from-string (s string-or-stream)
(parse-stream s output)))
(stream (parse-stream string-or-stream output)))
(current-string ()
:report "Continue and accept the current string.")
(new-string (new)
:report "Supply a new string and continue."
(check-type new string)
(setf output new)))
(when (plusp (length output))
(when output-stream
(princ output output-stream))
output))))
(defun escape-characters (string needs-escaping escape-char)
"Return STRING which has the ESCAPE-CHAR before every character in
NEEDS-ESCAPING sequence."
(check-type string string)
(check-type needs-escaping sequence)
(check-type escape-char character)
(let ((out (make-adjustable-string (length string))))
(loop :for char :across string
:do (when (find char needs-escaping)
(vector-push-extend escape-char out))
(vector-push-extend char out))
out))
(defun quote-string (string &key output-stream
(quote-start-char #\") (quote-end-char #\")
(escape-char #\\)
(needs-escaping (list quote-start-char
quote-end-char
escape-char)))
"Put STRING inside quotes (QUOTE-START-CHAR, QUOTE-END-CHAR).
If ESCAPE-CHAR is a non-NIL put it before every character in
NEEDS-ESCAPING sequence.
When OUTPUT-STREAM is a stream write the output to that stream."
(check-type string string)
(check-type output-stream (or stream null))
(check-type quote-start-char character)
(check-type quote-end-char character)
(check-type escape-char (or character null))
(check-type needs-escaping sequence)
(let ((output
(concatenate 'string
(string quote-start-char)
(if escape-char
(escape-characters string needs-escaping
escape-char)
string)
(string quote-end-char))))
(when output-stream (princ output output-stream))
output))
(defun sql-string (string &key output-stream)
"Return STRING as an SQL language string ('...') and escape all '
characters.
If OUTPUT-STREAM is a stream write the output to that stream."
(quote-string string :output-stream output-stream
:quote-start-char #\'
:quote-end-char #\'
:escape-char #\'
:needs-escaping "'"))
(defun sql-escape-like (string &key output-stream wild-before wild-after)
"Return STRING as an SQL language string ('...') and escape all '
characters as well as all special characters of SQL LIKE operator. If
WILD-BEFORE or WILD-AFTER is non-NIL put LIKE operator's % wild card
character at the beginning or the end of the output string,
respectively. This function's output is only useful with SQL LIKE
operator.
If OUTPUT-STREAM is a stream write the output to that stream too."
(check-type output-stream (or stream null))
(let ((output (concatenate
'string
(sql-string (concatenate
'string
(if wild-before "%")
(escape-characters string "_%\\" #\\)
(if wild-after "%"))
:output-stream nil)
" ESCAPE '\\'")))
(when output-stream
(princ output output-stream))
output))
(defun unescape-c-string (string-or-stream &key output-stream)
"Parse a C language STRING and unescape \"\\\" escape sequences.
Return a new string.
If OUTPUT-STREAM is a stream write the output to the stream too.
The function assumes that the Common Lisp implementation uses Unicode
character codes."
(check-type string-or-stream (or string stream))
(check-type output-stream (or stream null))
(labels
((read-chars (in predicate max)
(let ((chars (make-adjustable-string max)))
(loop :repeat max
:for c := (peek-char nil in)
:do (if (funcall predicate c)
(vector-push-extend (read-char in) chars)
(loop-finish)))
chars))
(hex-char-p (char)
(digit-char-p char 16))
(octal-char-p (char)
(digit-char-p char 8))
(parse-int-base (base string)
(parse-integer string :radix base))
(parse-stream (in out)
(loop
:for char := (read-char in)
:if (char= char #\\) :do
(let ((sub-char (read-char in)))
(case sub-char
(#\a (vector-push-extend #\Bel out))
(#\b (vector-push-extend #\Backspace out))
(#\e (vector-push-extend #\Esc out))
(#\f (vector-push-extend #\Page out))
(#\n (vector-push-extend #\Newline out))
(#\r (vector-push-extend #\Return out))
(#\t (vector-push-extend #\Tab out))
(#\v (vector-push-extend #\Vt out))
(#\\ (vector-push-extend #\\ out))
(#\' (vector-push-extend #\' out))
(#\" (vector-push-extend #\" out))
(#\? (vector-push-extend #\? out))
(#\u
(let ((cp (read-chars in #'hex-char-p 4)))
(when (= 4 (length cp))
(vector-push-extend (code-char (parse-int-base 16 cp))
out))))
(#\U
(let ((cp (read-chars in #'hex-char-p 8)))
(when (= 8 (length cp))
(vector-push-extend (code-char (parse-int-base 16 cp))
out))))
(#\x
(let ((cp (read-chars in #'hex-char-p 2)))
(when (plusp (length cp))
(vector-push-extend (code-char (parse-int-base 16 cp))
out))))
(t
(unread-char sub-char in)
(let ((cp (read-chars in #'octal-char-p 3)))
(when (plusp (length cp))
(vector-push-extend (code-char (parse-int-base 8 cp))
out))))))
:else :do (vector-push-extend char out))))
(let ((output (make-adjustable-string 15)))
(handler-case (etypecase string-or-stream
(string (with-input-from-string (s string-or-stream)
(parse-stream s output)))
(stream (parse-stream string-or-stream output)))
(end-of-file () nil))
(when output-stream
(princ output output-stream))
output)))
| null |
https://raw.githubusercontent.com/tlikonen/gpg-utilities/8971c814dbcbced3ed05f2024b29fd9e5096e96d/src/string-io.lisp
|
lisp
|
License: Creative Commons CC0 (public domain dedication)
|
Author : < >
(defpackage #:string-io
(:use #:cl)
(:export #:parse-quoted-word
#:current-string #:new-string
#:closing-quote-missing
#:parsed-string
#:escape-characters
#:quote-string
#:sql-string
#:sql-escape-like
#:unescape-c-string))
(in-package #:string-io)
(define-condition closing-quote-missing (end-of-file)
((string :reader parsed-string :initarg :string))
(:report "The end of file was reached before closing quote character."))
(defun make-adjustable-string (length)
(make-array length :element-type 'character
:adjustable t :fill-pointer 0))
(defun parse-quoted-word (string-or-stream
&key output-stream
(quote-start-char #\") (quote-end-char #\")
(escape-char #\\)
(separator-chars '(#\space #\tab))
(unescape t))
"Parse STRING-OR-STREAM and return the next word separated by
SEPARATOR-CHARS. If nothing could be parsed return NIL. If OUTPUT-STREAM
is a stream write also output to that stream.
If QUOTE-START-CHAR and QUOTE-END-CHAR are characters then all
characters surrounded by them are part of a word. Also, if ESCAPE-CHAR
is non-NIL then characters that come right after ESCAPE-CHAR are part of
a word (ESCAPE-CHAR protects a char from having a special meaning).
If UNESCAPE is non-NIL interpret and remove ESCAPE-CHARs (when they are
if UNESCAPE is NIL interpret
ESCAPE-CHARs but don't remove them.
If there is opening quote in the input but the closing quote is missing
signal CLOSING-QUOTE-MISSING condition (subtype of CL:END-OF-FILE). The
condition object contains the stream and it can be read with
CL:STREAM-ERROR-STREAM function. The currently parsed string can be read
with PARSED-STRING function. There are two restarts available. When
CURRENT-STRING restart is invoked the function continues and accepts the
currently parsed string (if any). Another restart NEW-STRING must be
invoked with a new string value which is then used and returned."
(check-type output-stream (or stream null))
(check-type quote-start-char (or character null))
(check-type quote-end-char (or character null))
(assert (or (and quote-start-char quote-end-char)
(and (null quote-start-char)
(null quote-end-char)))
nil "Must define both or neither of QUOTE-START-CHAR and ~
QUOTE-END-CHAR.")
(check-type escape-char (or character null))
(assert (or (null escape-char)
(and escape-char
(not (eql escape-char quote-start-char))
(not (eql escape-char quote-end-char))))
nil "ESCAPE-CHAR (if defined) must not be same character as ~
QUOTE-START-CHAR or QUOTE-END-CHAR.")
(check-type separator-chars sequence)
(labels ((separator-char-p (char)
(find char separator-chars))
(parse-stream (in out)
(let ((quote nil))
(handler-case
(loop
:with content := nil
:with esc := nil
:for char := (read-char in)
:do (cond
((and (separator-char-p char)
(not content)
(not quote)
(not esc)))
((and (separator-char-p char)
content
(not quote)
(not esc))
(return))
((and escape-char
(char= escape-char char)
(not esc))
(setf esc t)
(unless unescape
(vector-push-extend char out)))
((and quote-start-char
quote-end-char
(char= quote-start-char char)
(not quote)
(not esc))
(setf quote t))
((and quote-start-char
quote-end-char
(char= quote-end-char char)
quote
(not esc))
(setf quote nil))
(t (vector-push-extend char out)
(setf content t)
(setf esc nil))))
(end-of-file (c)
(if quote (error 'closing-quote-missing
:stream (stream-error-stream c)
:string out)))))))
(let ((output (make-adjustable-string 20)))
(restart-case
(etypecase string-or-stream
(string (with-input-from-string (s string-or-stream)
(parse-stream s output)))
(stream (parse-stream string-or-stream output)))
(current-string ()
:report "Continue and accept the current string.")
(new-string (new)
:report "Supply a new string and continue."
(check-type new string)
(setf output new)))
(when (plusp (length output))
(when output-stream
(princ output output-stream))
output))))
(defun escape-characters (string needs-escaping escape-char)
"Return STRING which has the ESCAPE-CHAR before every character in
NEEDS-ESCAPING sequence."
(check-type string string)
(check-type needs-escaping sequence)
(check-type escape-char character)
(let ((out (make-adjustable-string (length string))))
(loop :for char :across string
:do (when (find char needs-escaping)
(vector-push-extend escape-char out))
(vector-push-extend char out))
out))
(defun quote-string (string &key output-stream
(quote-start-char #\") (quote-end-char #\")
(escape-char #\\)
(needs-escaping (list quote-start-char
quote-end-char
escape-char)))
"Put STRING inside quotes (QUOTE-START-CHAR, QUOTE-END-CHAR).
If ESCAPE-CHAR is a non-NIL put it before every character in
NEEDS-ESCAPING sequence.
When OUTPUT-STREAM is a stream write the output to that stream."
(check-type string string)
(check-type output-stream (or stream null))
(check-type quote-start-char character)
(check-type quote-end-char character)
(check-type escape-char (or character null))
(check-type needs-escaping sequence)
(let ((output
(concatenate 'string
(string quote-start-char)
(if escape-char
(escape-characters string needs-escaping
escape-char)
string)
(string quote-end-char))))
(when output-stream (princ output output-stream))
output))
(defun sql-string (string &key output-stream)
"Return STRING as an SQL language string ('...') and escape all '
characters.
If OUTPUT-STREAM is a stream write the output to that stream."
(quote-string string :output-stream output-stream
:quote-start-char #\'
:quote-end-char #\'
:escape-char #\'
:needs-escaping "'"))
(defun sql-escape-like (string &key output-stream wild-before wild-after)
"Return STRING as an SQL language string ('...') and escape all '
characters as well as all special characters of SQL LIKE operator. If
WILD-BEFORE or WILD-AFTER is non-NIL put LIKE operator's % wild card
character at the beginning or the end of the output string,
respectively. This function's output is only useful with SQL LIKE
operator.
If OUTPUT-STREAM is a stream write the output to that stream too."
(check-type output-stream (or stream null))
(let ((output (concatenate
'string
(sql-string (concatenate
'string
(if wild-before "%")
(escape-characters string "_%\\" #\\)
(if wild-after "%"))
:output-stream nil)
" ESCAPE '\\'")))
(when output-stream
(princ output output-stream))
output))
(defun unescape-c-string (string-or-stream &key output-stream)
"Parse a C language STRING and unescape \"\\\" escape sequences.
Return a new string.
If OUTPUT-STREAM is a stream write the output to the stream too.
The function assumes that the Common Lisp implementation uses Unicode
character codes."
(check-type string-or-stream (or string stream))
(check-type output-stream (or stream null))
(labels
((read-chars (in predicate max)
(let ((chars (make-adjustable-string max)))
(loop :repeat max
:for c := (peek-char nil in)
:do (if (funcall predicate c)
(vector-push-extend (read-char in) chars)
(loop-finish)))
chars))
(hex-char-p (char)
(digit-char-p char 16))
(octal-char-p (char)
(digit-char-p char 8))
(parse-int-base (base string)
(parse-integer string :radix base))
(parse-stream (in out)
(loop
:for char := (read-char in)
:if (char= char #\\) :do
(let ((sub-char (read-char in)))
(case sub-char
(#\a (vector-push-extend #\Bel out))
(#\b (vector-push-extend #\Backspace out))
(#\e (vector-push-extend #\Esc out))
(#\f (vector-push-extend #\Page out))
(#\n (vector-push-extend #\Newline out))
(#\r (vector-push-extend #\Return out))
(#\t (vector-push-extend #\Tab out))
(#\v (vector-push-extend #\Vt out))
(#\\ (vector-push-extend #\\ out))
(#\' (vector-push-extend #\' out))
(#\" (vector-push-extend #\" out))
(#\? (vector-push-extend #\? out))
(#\u
(let ((cp (read-chars in #'hex-char-p 4)))
(when (= 4 (length cp))
(vector-push-extend (code-char (parse-int-base 16 cp))
out))))
(#\U
(let ((cp (read-chars in #'hex-char-p 8)))
(when (= 8 (length cp))
(vector-push-extend (code-char (parse-int-base 16 cp))
out))))
(#\x
(let ((cp (read-chars in #'hex-char-p 2)))
(when (plusp (length cp))
(vector-push-extend (code-char (parse-int-base 16 cp))
out))))
(t
(unread-char sub-char in)
(let ((cp (read-chars in #'octal-char-p 3)))
(when (plusp (length cp))
(vector-push-extend (code-char (parse-int-base 8 cp))
out))))))
:else :do (vector-push-extend char out))))
(let ((output (make-adjustable-string 15)))
(handler-case (etypecase string-or-stream
(string (with-input-from-string (s string-or-stream)
(parse-stream s output)))
(stream (parse-stream string-or-stream output)))
(end-of-file () nil))
(when output-stream
(princ output output-stream))
output)))
|
0f080e415efed98a332994b68fe0294b24df427f4a9bf09ac448e741ff620caa
|
solita/mnt-teet
|
log.clj
|
(ns teet.log
(:require [taoensso.timbre :as timbre]))
;;
;; Re-export selected timbre functions
;;
(intern 'teet.log
(with-meta 'debug {:macro true})
@#'timbre/debug)
(intern 'teet.log
(with-meta 'info {:macro true})
@#'timbre/info)
(intern 'teet.log
(with-meta 'warn {:macro true})
@#'timbre/warn)
(intern 'teet.log
(with-meta 'error {:macro true})
@#'timbre/error)
(intern 'teet.log
(with-meta 'fatal {:macro true})
@#'timbre/fatal)
(intern 'teet.log
(with-meta 'spy {:macro true})
@#'timbre/spy)
| null |
https://raw.githubusercontent.com/solita/mnt-teet/7a5124975ce1c7f3e7a7c55fe23257ca3f7b6411/app/frontend/src/cljs/teet/log.clj
|
clojure
|
Re-export selected timbre functions
|
(ns teet.log
(:require [taoensso.timbre :as timbre]))
(intern 'teet.log
(with-meta 'debug {:macro true})
@#'timbre/debug)
(intern 'teet.log
(with-meta 'info {:macro true})
@#'timbre/info)
(intern 'teet.log
(with-meta 'warn {:macro true})
@#'timbre/warn)
(intern 'teet.log
(with-meta 'error {:macro true})
@#'timbre/error)
(intern 'teet.log
(with-meta 'fatal {:macro true})
@#'timbre/fatal)
(intern 'teet.log
(with-meta 'spy {:macro true})
@#'timbre/spy)
|
9b9dc60805ba475017c4d6642fe197d99c7df046b1d889e86ad90bbc5c67e82a
|
esl/MongooseIM
|
mongoose_graphql_stanza_admin_subscription.erl
|
-module(mongoose_graphql_stanza_admin_subscription).
-behaviour(mongoose_graphql).
-import(mongoose_graphql_helper, [format_result/2]).
-export([execute/4]).
-ignore_xref([execute/4]).
-include("../mongoose_graphql_types.hrl").
execute(Ctx, _Obj, <<"subscribeForMessages">>, Args) ->
subscribe_for_messages(Ctx, Args).
subscribe_for_messages(#{event := terminate, stream := Session}, _) ->
mongoose_stanza_api:close_session(Session),
{ok, null, [{stream, closed}]};
subscribe_for_messages(#{event := Event}, _) ->
mongoose_graphql_stanza_helper:handle_event(Event);
subscribe_for_messages(_Ctx, #{<<"caller">> := Jid}) ->
case mongoose_stanza_api:open_session(Jid, true) of
{ok, Stream} ->
{ok, null, [{stream, Stream}]};
Error ->
format_result(Error, #{caller => Jid})
end.
| null |
https://raw.githubusercontent.com/esl/MongooseIM/7c7419889d3babba1a842903fe515c8f61752e7d/src/graphql/admin/mongoose_graphql_stanza_admin_subscription.erl
|
erlang
|
-module(mongoose_graphql_stanza_admin_subscription).
-behaviour(mongoose_graphql).
-import(mongoose_graphql_helper, [format_result/2]).
-export([execute/4]).
-ignore_xref([execute/4]).
-include("../mongoose_graphql_types.hrl").
execute(Ctx, _Obj, <<"subscribeForMessages">>, Args) ->
subscribe_for_messages(Ctx, Args).
subscribe_for_messages(#{event := terminate, stream := Session}, _) ->
mongoose_stanza_api:close_session(Session),
{ok, null, [{stream, closed}]};
subscribe_for_messages(#{event := Event}, _) ->
mongoose_graphql_stanza_helper:handle_event(Event);
subscribe_for_messages(_Ctx, #{<<"caller">> := Jid}) ->
case mongoose_stanza_api:open_session(Jid, true) of
{ok, Stream} ->
{ok, null, [{stream, Stream}]};
Error ->
format_result(Error, #{caller => Jid})
end.
|
|
4936f0776297ba6630b63132ac5b2ae6b4f9b6bae341ccd9cebc6216383c0022
|
tarleb/jira-wiki-markup
|
Printer.hs
|
# LANGUAGE CPP #
# LANGUAGE LambdaCase #
|
Module : Text . Jira . Parser
Copyright : © 2019–2023 : MIT
Maintainer :
Stability : alpha
Portability : portable
wiki markup text from an abstract syntax tree .
Module : Text.Jira.Parser
Copyright : © 2019–2023 Albert Krewinkel
License : MIT
Maintainer : Albert Krewinkel <>
Stability : alpha
Portability : portable
Generate Jira wiki markup text from an abstract syntax tree.
-}
module Text.Jira.Printer
( pretty
, renderBlock
, renderInline
, prettyBlocks
, prettyInlines
, JiraPrinter
, PrinterState (..)
, startState
, withDefault
) where
import Data.Char (isAlphaNum)
#if !MIN_VERSION_base(4,11,0)
import Data.Monoid ((<>))
#endif
import Control.Monad ((<=<))
import Control.Monad.Reader (Reader, runReader, asks, local)
import Data.Text (Text)
import Text.Jira.Markup
import qualified Data.Text as T
| document as formatted text .
pretty :: Doc -> Text
pretty (Doc blks) = prettyBlocks blks
-- | Render a list of Jira blocks as Jira wiki formatted text.
prettyBlocks :: [Block] -> Text
prettyBlocks blks = runReader (renderBlocks blks) startState
-- | Renders a list of Jira inline markup elements.
prettyInlines :: [Inline] -> Text
prettyInlines = \case
[] ->
""
s@Str{} : Styled style inlns : rest ->
renderInline s <> renderStyledSafely style inlns <> prettyInlines rest
Styled style inlns : s@(Str t) : rest | startsWithAlphaNum t ->
renderStyledSafely style inlns <> renderInline s <> prettyInlines rest
-- Most special chars don't need escaping when surrounded by spaces or within
-- a word. Braces are the exception, they should always be escaped.
s@Str{} : SpecialChar c : rest@(Str {}:_) | not (isBrace c) ->
(renderInline s `T.snoc` c) <> prettyInlines rest
s@Space : SpecialChar c : rest@(Space {}:_) | not (isBrace c) ->
(renderInline s `T.snoc` c) <> prettyInlines rest
s@Linebreak : SpecialChar c : rest@(Space {}:_) | not (isBrace c) ->
(renderInline s `T.snoc` c) <> prettyInlines rest
-- Colon and semicolon only need escaping if they could otherwise
-- become part of a smiley.
SpecialChar c : rest@(x : _) | c `elem` [':', ';'] && not (isSmileyStr x) ->
T.singleton c <> prettyInlines rest
[SpecialChar c] | c `elem` [':', ';'] ->
T.singleton c
do n't have to be escaped unless in groups of two
SpecialChar '?' : rest | not (startsWithQuestionMark rest) ->
"?" <> prettyInlines rest
(x:xs) ->
renderInline x <> prettyInlines xs
where
isBrace = \case
'{' -> True
'}' -> True
_ -> False
startsWithAlphaNum t = case T.uncons t of
Just (c, _) -> isAlphaNum c
_ -> False
isSmileyStr = \case
Str x | x `elem` ["D", ")", "(", "P"] -> True
_ -> False
startsWithQuestionMark = \case
SpecialChar '?' : _ -> True
_ -> False
-- | Internal state used by the printer.
data PrinterState = PrinterState
{ stateInTable :: Bool
, stateListLevel :: Text
}
type JiraPrinter a = Reader PrinterState a
-- | Run with default state.
withDefault :: JiraPrinter a -> a
withDefault = flip runReader startState
-- | Default start state of the printer.
startState :: PrinterState
startState = PrinterState
{ stateInTable = False
, stateListLevel = ""
}
-- | Render a block as Jira wiki format.
renderBlocks :: [Block] -> JiraPrinter Text
renderBlocks = concatBlocks <=< mapM renderBlock
-- | Combine the texts produced from rendering a list of blocks.
concatBlocks :: [Text] -> JiraPrinter Text
concatBlocks = return . T.intercalate "\n"
-- | Add a newline character unless we are within a list or table.
appendNewline :: Text -> JiraPrinter Text
appendNewline text = do
listLevel <- asks stateListLevel
inTable <- asks stateInTable
return $
-- add final newline only if we are neither within a table nor a list.
if inTable || not (T.null listLevel)
then text
else text <> "\n"
-- | Render a block as Jira wiki format.
renderBlock :: Block -> JiraPrinter Text
renderBlock = \case
Code lang params content -> return $ T.concat
[ "{code:"
, T.intercalate "|"
(renderLang lang : map renderParam params)
, "}\n"
, content
, "\n{code}"
]
Color colorName blocks -> renderBlocks blocks >>= \blks -> return $ T.concat
[ "{color:", colorText colorName, "}\n"
, blks
, "{color}"
]
BlockQuote [Para xs] | Linebreak `notElem` xs
-> return $ "bq. " <> prettyInlines xs
BlockQuote blocks -> renderBlocks blocks >>= \blks -> return $ T.concat
[ "{quote}\n"
, blks
, "{quote}"]
Header lvl inlines -> return $ T.concat
[ "h", T.pack (show lvl), ". "
, prettyInlines inlines
]
HorizontalRule -> return "----"
List style items -> listWithMarker items (styleChar style) >>=
appendNewline
NoFormat params content -> return $ T.concat
[ "{noformat"
, renderBlockParams params
, "}\n"
, content
, "{noformat}"
]
Panel params blocks -> renderBlocks blocks >>= \blks ->
return $ T.concat
[ "{panel"
, renderBlockParams params
, "}\n"
, blks
, "{panel}"
]
Para inlines -> appendNewline $ prettyInlines inlines
Table rows ->
local (\st -> st { stateInTable = True }) $
fmap T.unlines (mapM renderRow rows)
-- | Returns the ext representation of a color
colorText :: ColorName -> Text
colorText (ColorName c) = c
renderLang :: Language -> Text
renderLang (Language lang) = lang
renderBlockParams :: [Parameter] -> Text
renderBlockParams = \case
[] -> mempty
xs -> T.cons ':' (renderParams xs)
renderParams :: [Parameter] -> Text
renderParams = T.intercalate "|" . map renderParam
renderParam :: Parameter -> Text
renderParam (Parameter key value) = key <> "=" <> value
renderRow :: Row -> JiraPrinter Text
renderRow (Row cells) = do
rendered <- mapM renderCell cells
let closing = if all isHeaderCell cells then " ||" else " |"
return $ T.unwords rendered <> closing
where
isHeaderCell HeaderCell {} = True
isHeaderCell BodyCell {} = False
renderCell :: Cell -> JiraPrinter Text
renderCell cell = let (cellStart, blocks) = case cell of
(HeaderCell bs) -> ("|| ", bs)
(BodyCell bs) -> ("| ", bs)
in (cellStart <>) <$> renderBlocks blocks
styleChar :: ListStyle -> Char
styleChar = \case
CircleBullets -> '*'
SquareBullets -> '-'
Enumeration -> '#'
-- | Create a list using the given character as bullet item marker.
listWithMarker :: [[Block]]
-> Char
-> JiraPrinter Text
listWithMarker items marker = do
let addItem s = s { stateListLevel = stateListLevel s `T.snoc` marker }
renderedBlocks <- local addItem $ mapM listItemToJira items
return $ T.intercalate "\n" renderedBlocks
| Convert bullet or ordered list item ( list of blocks ) to Jira .
listItemToJira :: [Block]
-> JiraPrinter Text
listItemToJira items = do
contents <- renderBlocks items
marker <- asks stateListLevel
return $ case items of
List{} : _ -> contents
_ -> marker <> " " <> contents
-- | Renders a single inline item as Jira markup.
renderInline :: Inline -> Text
renderInline = \case
Anchor name -> "{anchor:" <> name <> "}"
AutoLink url -> fromURL url
Citation ils -> "??" <> prettyInlines ils <> "??"
ColorInline color ils -> "{color:" <> colorText color <> "}" <>
prettyInlines ils <> "{color}"
Emoji icon -> iconText icon
Entity entity -> "&" <> entity <> ";"
Image ps url -> "!" <> fromURL url <> renderImageParams ps <> "!"
Linebreak -> "\n"
Link lt ils url -> renderLink lt ils url
Monospaced inlines -> "{{" <> prettyInlines inlines <> "}}"
Space -> " "
SpecialChar c -> case c of
-- backslash is unescapable, render as entity
'\\' -> "\"
_ -> "\\" `T.snoc` c
Str txt -> txt
Styled style inlines -> renderWrapped (delimiterChar style) inlines
renderStyledSafely :: InlineStyle -> [Inline] -> Text
renderStyledSafely style =
let delim = T.pack ['{', delimiterChar style, '}']
in (delim <>) . (<> delim) . prettyInlines
renderLink :: LinkType -> [Inline] -> URL -> Text
renderLink linkType inlines url = case linkType of
Attachment -> "[" <> prettyInlines inlines <> "^" <> fromURL url <> "]"
Email -> link' $ "mailto:" <> fromURL url
External -> link' $ fromURL url
SmartCard -> smartLink (fromURL url) "smart-card"
SmartLink -> smartLink (fromURL url) "smart-link"
User -> link' $ "~" <> fromURL url
where
link' urlText = case inlines of
[] -> "[" <> urlText <> "]"
_ -> "[" <> prettyInlines inlines <> "|" <> urlText <> "]"
smartLink urlText smartType =
"[" <> prettyInlines inlines <> "|" <> urlText <> "|" <> smartType <> "]"
delimiterChar :: InlineStyle -> Char
delimiterChar = \case
Emphasis -> '_'
Insert -> '+'
Strong -> '*'
Strikeout -> '-'
Subscript -> '~'
Superscript -> '^'
-- | Render image parameters (i.e., separate by comma).
renderImageParams :: [Parameter] -> Text
renderImageParams = \case
[] -> ""
ps | "thumbnail" `elem` map parameterKey ps -> "|thumbnail"
ps -> "|" <> T.intercalate ", " (map renderParam ps)
renderWrapped :: Char -> [Inline] -> Text
renderWrapped c = T.cons c . flip T.snoc c . prettyInlines
| null |
https://raw.githubusercontent.com/tarleb/jira-wiki-markup/553d525355f75314d0c439cc77e11118e8736a73/src/Text/Jira/Printer.hs
|
haskell
|
| Render a list of Jira blocks as Jira wiki formatted text.
| Renders a list of Jira inline markup elements.
Most special chars don't need escaping when surrounded by spaces or within
a word. Braces are the exception, they should always be escaped.
Colon and semicolon only need escaping if they could otherwise
become part of a smiley.
| Internal state used by the printer.
| Run with default state.
| Default start state of the printer.
| Render a block as Jira wiki format.
| Combine the texts produced from rendering a list of blocks.
| Add a newline character unless we are within a list or table.
add final newline only if we are neither within a table nor a list.
| Render a block as Jira wiki format.
| Returns the ext representation of a color
| Create a list using the given character as bullet item marker.
| Renders a single inline item as Jira markup.
backslash is unescapable, render as entity
| Render image parameters (i.e., separate by comma).
|
# LANGUAGE CPP #
# LANGUAGE LambdaCase #
|
Module : Text . Jira . Parser
Copyright : © 2019–2023 : MIT
Maintainer :
Stability : alpha
Portability : portable
wiki markup text from an abstract syntax tree .
Module : Text.Jira.Parser
Copyright : © 2019–2023 Albert Krewinkel
License : MIT
Maintainer : Albert Krewinkel <>
Stability : alpha
Portability : portable
Generate Jira wiki markup text from an abstract syntax tree.
-}
module Text.Jira.Printer
( pretty
, renderBlock
, renderInline
, prettyBlocks
, prettyInlines
, JiraPrinter
, PrinterState (..)
, startState
, withDefault
) where
import Data.Char (isAlphaNum)
#if !MIN_VERSION_base(4,11,0)
import Data.Monoid ((<>))
#endif
import Control.Monad ((<=<))
import Control.Monad.Reader (Reader, runReader, asks, local)
import Data.Text (Text)
import Text.Jira.Markup
import qualified Data.Text as T
| document as formatted text .
pretty :: Doc -> Text
pretty (Doc blks) = prettyBlocks blks
prettyBlocks :: [Block] -> Text
prettyBlocks blks = runReader (renderBlocks blks) startState
prettyInlines :: [Inline] -> Text
prettyInlines = \case
[] ->
""
s@Str{} : Styled style inlns : rest ->
renderInline s <> renderStyledSafely style inlns <> prettyInlines rest
Styled style inlns : s@(Str t) : rest | startsWithAlphaNum t ->
renderStyledSafely style inlns <> renderInline s <> prettyInlines rest
s@Str{} : SpecialChar c : rest@(Str {}:_) | not (isBrace c) ->
(renderInline s `T.snoc` c) <> prettyInlines rest
s@Space : SpecialChar c : rest@(Space {}:_) | not (isBrace c) ->
(renderInline s `T.snoc` c) <> prettyInlines rest
s@Linebreak : SpecialChar c : rest@(Space {}:_) | not (isBrace c) ->
(renderInline s `T.snoc` c) <> prettyInlines rest
SpecialChar c : rest@(x : _) | c `elem` [':', ';'] && not (isSmileyStr x) ->
T.singleton c <> prettyInlines rest
[SpecialChar c] | c `elem` [':', ';'] ->
T.singleton c
do n't have to be escaped unless in groups of two
SpecialChar '?' : rest | not (startsWithQuestionMark rest) ->
"?" <> prettyInlines rest
(x:xs) ->
renderInline x <> prettyInlines xs
where
isBrace = \case
'{' -> True
'}' -> True
_ -> False
startsWithAlphaNum t = case T.uncons t of
Just (c, _) -> isAlphaNum c
_ -> False
isSmileyStr = \case
Str x | x `elem` ["D", ")", "(", "P"] -> True
_ -> False
startsWithQuestionMark = \case
SpecialChar '?' : _ -> True
_ -> False
data PrinterState = PrinterState
{ stateInTable :: Bool
, stateListLevel :: Text
}
type JiraPrinter a = Reader PrinterState a
withDefault :: JiraPrinter a -> a
withDefault = flip runReader startState
startState :: PrinterState
startState = PrinterState
{ stateInTable = False
, stateListLevel = ""
}
renderBlocks :: [Block] -> JiraPrinter Text
renderBlocks = concatBlocks <=< mapM renderBlock
concatBlocks :: [Text] -> JiraPrinter Text
concatBlocks = return . T.intercalate "\n"
appendNewline :: Text -> JiraPrinter Text
appendNewline text = do
listLevel <- asks stateListLevel
inTable <- asks stateInTable
return $
if inTable || not (T.null listLevel)
then text
else text <> "\n"
renderBlock :: Block -> JiraPrinter Text
renderBlock = \case
Code lang params content -> return $ T.concat
[ "{code:"
, T.intercalate "|"
(renderLang lang : map renderParam params)
, "}\n"
, content
, "\n{code}"
]
Color colorName blocks -> renderBlocks blocks >>= \blks -> return $ T.concat
[ "{color:", colorText colorName, "}\n"
, blks
, "{color}"
]
BlockQuote [Para xs] | Linebreak `notElem` xs
-> return $ "bq. " <> prettyInlines xs
BlockQuote blocks -> renderBlocks blocks >>= \blks -> return $ T.concat
[ "{quote}\n"
, blks
, "{quote}"]
Header lvl inlines -> return $ T.concat
[ "h", T.pack (show lvl), ". "
, prettyInlines inlines
]
HorizontalRule -> return "----"
List style items -> listWithMarker items (styleChar style) >>=
appendNewline
NoFormat params content -> return $ T.concat
[ "{noformat"
, renderBlockParams params
, "}\n"
, content
, "{noformat}"
]
Panel params blocks -> renderBlocks blocks >>= \blks ->
return $ T.concat
[ "{panel"
, renderBlockParams params
, "}\n"
, blks
, "{panel}"
]
Para inlines -> appendNewline $ prettyInlines inlines
Table rows ->
local (\st -> st { stateInTable = True }) $
fmap T.unlines (mapM renderRow rows)
colorText :: ColorName -> Text
colorText (ColorName c) = c
renderLang :: Language -> Text
renderLang (Language lang) = lang
renderBlockParams :: [Parameter] -> Text
renderBlockParams = \case
[] -> mempty
xs -> T.cons ':' (renderParams xs)
renderParams :: [Parameter] -> Text
renderParams = T.intercalate "|" . map renderParam
renderParam :: Parameter -> Text
renderParam (Parameter key value) = key <> "=" <> value
renderRow :: Row -> JiraPrinter Text
renderRow (Row cells) = do
rendered <- mapM renderCell cells
let closing = if all isHeaderCell cells then " ||" else " |"
return $ T.unwords rendered <> closing
where
isHeaderCell HeaderCell {} = True
isHeaderCell BodyCell {} = False
renderCell :: Cell -> JiraPrinter Text
renderCell cell = let (cellStart, blocks) = case cell of
(HeaderCell bs) -> ("|| ", bs)
(BodyCell bs) -> ("| ", bs)
in (cellStart <>) <$> renderBlocks blocks
styleChar :: ListStyle -> Char
styleChar = \case
CircleBullets -> '*'
SquareBullets -> '-'
Enumeration -> '#'
listWithMarker :: [[Block]]
-> Char
-> JiraPrinter Text
listWithMarker items marker = do
let addItem s = s { stateListLevel = stateListLevel s `T.snoc` marker }
renderedBlocks <- local addItem $ mapM listItemToJira items
return $ T.intercalate "\n" renderedBlocks
| Convert bullet or ordered list item ( list of blocks ) to Jira .
listItemToJira :: [Block]
-> JiraPrinter Text
listItemToJira items = do
contents <- renderBlocks items
marker <- asks stateListLevel
return $ case items of
List{} : _ -> contents
_ -> marker <> " " <> contents
renderInline :: Inline -> Text
renderInline = \case
Anchor name -> "{anchor:" <> name <> "}"
AutoLink url -> fromURL url
Citation ils -> "??" <> prettyInlines ils <> "??"
ColorInline color ils -> "{color:" <> colorText color <> "}" <>
prettyInlines ils <> "{color}"
Emoji icon -> iconText icon
Entity entity -> "&" <> entity <> ";"
Image ps url -> "!" <> fromURL url <> renderImageParams ps <> "!"
Linebreak -> "\n"
Link lt ils url -> renderLink lt ils url
Monospaced inlines -> "{{" <> prettyInlines inlines <> "}}"
Space -> " "
SpecialChar c -> case c of
'\\' -> "\"
_ -> "\\" `T.snoc` c
Str txt -> txt
Styled style inlines -> renderWrapped (delimiterChar style) inlines
renderStyledSafely :: InlineStyle -> [Inline] -> Text
renderStyledSafely style =
let delim = T.pack ['{', delimiterChar style, '}']
in (delim <>) . (<> delim) . prettyInlines
renderLink :: LinkType -> [Inline] -> URL -> Text
renderLink linkType inlines url = case linkType of
Attachment -> "[" <> prettyInlines inlines <> "^" <> fromURL url <> "]"
Email -> link' $ "mailto:" <> fromURL url
External -> link' $ fromURL url
SmartCard -> smartLink (fromURL url) "smart-card"
SmartLink -> smartLink (fromURL url) "smart-link"
User -> link' $ "~" <> fromURL url
where
link' urlText = case inlines of
[] -> "[" <> urlText <> "]"
_ -> "[" <> prettyInlines inlines <> "|" <> urlText <> "]"
smartLink urlText smartType =
"[" <> prettyInlines inlines <> "|" <> urlText <> "|" <> smartType <> "]"
delimiterChar :: InlineStyle -> Char
delimiterChar = \case
Emphasis -> '_'
Insert -> '+'
Strong -> '*'
Strikeout -> '-'
Subscript -> '~'
Superscript -> '^'
renderImageParams :: [Parameter] -> Text
renderImageParams = \case
[] -> ""
ps | "thumbnail" `elem` map parameterKey ps -> "|thumbnail"
ps -> "|" <> T.intercalate ", " (map renderParam ps)
renderWrapped :: Char -> [Inline] -> Text
renderWrapped c = T.cons c . flip T.snoc c . prettyInlines
|
d52fffedd25e939dbae99411c0bf809025fb78f3d13b5a9e7e6f148c84228f21
|
avsm/melange
|
spl_cfg.mli
|
* Copyright ( c ) 2005 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
*
* $ I d : spl_cfg.mli , v 1.6 2006/02/09 17:44:52 avsm Exp $
* Copyright (c) 2005 Anil Madhavapeddy <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
* $Id: spl_cfg.mli,v 1.6 2006/02/09 17:44:52 avsm Exp $
*)
type transition_method =
| Condition of Spl_syntaxtree.expr
| Message of Spl_syntaxtree.id
| Assignment of (Spl_syntaxtree.id * Spl_syntaxtree.expr)
| Terminate
type transition_class =
| T_handle
| T_normal
type state = {
label : string;
mutable edges : transition list;
}
and transition = {
t : transition_method;
target : state ref;
cl : transition_class;
loc : Spl_location.t option;
}
type env = {
func_name : string;
initial_state : state option ref;
final_state : state option ref;
blocks : (string, state) Hashtbl.t;
registers : (Spl_syntaxtree.var_type, unit) Hashtbl.t;
functions_called : (string, unit) Hashtbl.t;
}
type compiled_functions = (string, env * Spl_syntaxtree.func) Hashtbl.t
type global_env = {
filename : string;
functions : compiled_functions;
counter : int ref;
mutable webpage: string;
}
exception Block_not_unique of string
exception Unknown_variable of string
exception Unknown_function of string
exception Type_checking_invariant_failure
val string_of_transition_class : transition_class -> string
val initial_state_of_env : env -> state
val final_state_of_env : env -> state
val list_of_registers : env -> Spl_syntaxtree.var_type list
val blocks_of_function : env -> compiled_functions -> state list
val generate_states : string -> Spl_syntaxtree.func list -> global_env
| null |
https://raw.githubusercontent.com/avsm/melange/e92240e6dc8a440cafa91488a1fc367e2ba57de1/tools/spl/spl_cfg.mli
|
ocaml
|
* Copyright ( c ) 2005 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
*
* $ I d : spl_cfg.mli , v 1.6 2006/02/09 17:44:52 avsm Exp $
* Copyright (c) 2005 Anil Madhavapeddy <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
* $Id: spl_cfg.mli,v 1.6 2006/02/09 17:44:52 avsm Exp $
*)
type transition_method =
| Condition of Spl_syntaxtree.expr
| Message of Spl_syntaxtree.id
| Assignment of (Spl_syntaxtree.id * Spl_syntaxtree.expr)
| Terminate
type transition_class =
| T_handle
| T_normal
type state = {
label : string;
mutable edges : transition list;
}
and transition = {
t : transition_method;
target : state ref;
cl : transition_class;
loc : Spl_location.t option;
}
type env = {
func_name : string;
initial_state : state option ref;
final_state : state option ref;
blocks : (string, state) Hashtbl.t;
registers : (Spl_syntaxtree.var_type, unit) Hashtbl.t;
functions_called : (string, unit) Hashtbl.t;
}
type compiled_functions = (string, env * Spl_syntaxtree.func) Hashtbl.t
type global_env = {
filename : string;
functions : compiled_functions;
counter : int ref;
mutable webpage: string;
}
exception Block_not_unique of string
exception Unknown_variable of string
exception Unknown_function of string
exception Type_checking_invariant_failure
val string_of_transition_class : transition_class -> string
val initial_state_of_env : env -> state
val final_state_of_env : env -> state
val list_of_registers : env -> Spl_syntaxtree.var_type list
val blocks_of_function : env -> compiled_functions -> state list
val generate_states : string -> Spl_syntaxtree.func list -> global_env
|
|
427492c76487e6362c9d639ef87e98f34f59d6782e221e12f2dd70f648f50818
|
DanielG/ghc-mod
|
ShellParse.hs
|
ghc - mod : Happy Haskell Hacking
Copyright ( C ) 2015 < >
--
-- This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation , either version 3 of the License , or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU Affero General Public License for more details.
--
You should have received a copy of the GNU Affero General Public License
-- along with this program. If not, see </>.
module GhcMod.Exe.Options.ShellParse (parseCmdLine) where
import Data.Char
import Data.List
go :: String -> String -> [String] -> Bool -> [String]
-- result
go [] curarg accargs _ = reverse $ reverse curarg : accargs
go (c:cl) curarg accargs quotes
-- open quotes
| c == '\STX', not quotes
= go cl curarg accargs True
-- close quotes
| c == '\ETX', quotes
= go cl curarg accargs False
-- space separates arguments outside quotes
| isSpace c, not quotes
= if null curarg
then go cl curarg accargs quotes
else go cl [] (reverse curarg : accargs) quotes
-- general character
| otherwise = go cl (c:curarg) accargs quotes
parseCmdLine :: String -> [String]
parseCmdLine comline'
| Just comline <- stripPrefix "ascii-escape " $ dropWhile isSpace comline'
= go (dropWhile isSpace comline) [] [] False
parseCmdLine [] = [""]
parseCmdLine comline = words comline
| null |
https://raw.githubusercontent.com/DanielG/ghc-mod/391e187a5dfef4421aab2508fa6ff7875cc8259d/src/GhcMod/Exe/Options/ShellParse.hs
|
haskell
|
This program is free software: you can redistribute it and/or modify
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
along with this program. If not, see </>.
result
open quotes
close quotes
space separates arguments outside quotes
general character
|
ghc - mod : Happy Haskell Hacking
Copyright ( C ) 2015 < >
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU Affero General Public License
module GhcMod.Exe.Options.ShellParse (parseCmdLine) where
import Data.Char
import Data.List
go :: String -> String -> [String] -> Bool -> [String]
go [] curarg accargs _ = reverse $ reverse curarg : accargs
go (c:cl) curarg accargs quotes
| c == '\STX', not quotes
= go cl curarg accargs True
| c == '\ETX', quotes
= go cl curarg accargs False
| isSpace c, not quotes
= if null curarg
then go cl curarg accargs quotes
else go cl [] (reverse curarg : accargs) quotes
| otherwise = go cl (c:curarg) accargs quotes
parseCmdLine :: String -> [String]
parseCmdLine comline'
| Just comline <- stripPrefix "ascii-escape " $ dropWhile isSpace comline'
= go (dropWhile isSpace comline) [] [] False
parseCmdLine [] = [""]
parseCmdLine comline = words comline
|
ee4350ea9c21aa7c940a6969563fb1b532aa8ffb4e3147d577d064267859910c
|
chef/chef-server
|
chef_wm_depsolver.erl
|
-*- erlang - indent - level : 4;indent - tabs - mode : nil ; fill - column : 92 -*-
%% ex: ts=4 sw=4 et
@author < >
@author < >
@doc Resource module for Chef Depsolver endpoint
Copyright 2012 - 2014 Chef Software , Inc. All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
-module(chef_wm_depsolver).
%% chef_wm behaviour callbacks
-include("oc_chef_wm.hrl").
-behaviour(chef_wm).
-export([auth_info/2,
init/1,
init_resource_state/1,
malformed_request_message/3,
request_type/0,
validate_request/3]).
-mixin([{oc_chef_wm_base, [forbidden/2,
is_authorized/2,
service_available/2,
content_types_accepted/2,
content_types_provided/2,
finish_request/2,
malformed_request/2,
ping/2]}]).
-export([allowed_methods/2,
post_is_create/2,
process_post/2]).
%% Internal types
-type cookbook_with_version() :: binary() | {binary(), binary()}.
-define(CACHE_RETRY_INTERVAL, 200).
-define(CACHE_MAX_RETRIES, 10).
init(Config) ->
oc_chef_wm_base:init(?MODULE, Config).
init_resource_state(_Config) ->
{ok, #depsolver_state{}}.
request_type() ->
"depsolver".
allowed_methods(Req, State) ->
{['POST'], Req, State}.
malformed_request_message(Any, _Req, _State) ->
error({unexpected_malformed_request_message, Any}).
validate_request('POST', Req, #base_state{resource_state=DepsolverState}=State) ->
Body = wrq:req_body(Req),
{ok, JsonBody} = chef_depsolver:parse_binary_json(Body),
Runlist = ej:get({<<"run_list">>}, JsonBody),
CookbookList = cookbooks_for_runlist(Runlist),
EnvName = chef_wm_util:object_name(environment, Req),
State1 = State#base_state{resource_state = DepsolverState#depsolver_state{run_list_cookbooks = CookbookList,
environment_name = EnvName}},
{Req, State1}.
auth_info(Req, #base_state{chef_db_context = DbContext,
organization_guid = OrgId,
resource_state = #depsolver_state{environment_name = EnvName}} = State) ->
Environment = chef_db:fetch(#chef_environment{org_id = OrgId, name = EnvName}, DbContext),
forbidden_for_environment(Environment, Req, State).
@doc helper function for auth_info/2 which when given the output of chef_db : fetch_environment ,
%% checks the permissions of the requestor against the environment and cookbook container
forbidden_for_environment(not_found, Req,
#base_state{resource_state = #depsolver_state{environment_name = EnvName}} = State) ->
{{halt, 404},
chef_wm_util:with_error_body(Req, not_found_message(environment, EnvName)),
State#base_state{log_msg = environment_not_found}};
forbidden_for_environment(#chef_environment{authz_id = EnvAuthzId} = Env, Req,
#base_state{resource_state = ResourceState} = State) ->
%% Set this here before passing it out; downstream functions will need it
State1 = State#base_state{resource_state = ResourceState#depsolver_state{chef_environment = Env}},
{[{container, cookbook, read}, {object, EnvAuthzId, read}], Req, State1}.
post_is_create(Req, State) ->
{false, Req, State}.
process_post(Req, #base_state{reqid = ReqId,
chef_db_context = DbContext,
organization_name = OrgName,
organization_guid = OrgId,
resource_state = #depsolver_state{run_list_cookbooks = Cookbooks,
environment_name = EnvName,
chef_environment = Env}} = State) ->
EnvConstraints = chef_object_base:depsolver_constraints(Env),
case chef_db:fetch_all_cookbook_version_dependencies(DbContext, OrgId) of
{error, Error} ->
lager:error("Dependency retrieval failure for org ~p with environment ~p: ~p~n",
[OrgName, EnvName, Error]),
server_error(Req, State, <<"Dependency retrieval failed">>, dep_retrieval_failure);
AllVersions ->
case not_found_cookbooks(AllVersions, Cookbooks) of
ok ->
Deps = ?SH_TIME(ReqId, chef_depsolver, solve_dependencies,
(AllVersions, EnvConstraints, Cookbooks)),
handle_depsolver_results(ok, Deps, Req, State);
NotFound ->
We ignore result if expanded run list contains missing
cookbooks , so no need to call depsolver at all .
handle_depsolver_results(NotFound, ignore, Req, State)
end
end.
%%------------------------------------------------------------------------------
%% Internal Functions
%%------------------------------------------------------------------------------
@doc We are supplied with a list of recipes . chef - client 0.10 has
%% already expanded roles before passing them to us. We can have bare
%% recipes (== foo), default recipes ( == foo::default) or named
recipes (= = foo::bar ) . We also can have these three variants with
%% a version appended (== foo::[email protected])
%%
%% We expand the runlist to a set of cookbooks with dups removed. If
%% a versioned recipe is provided in the runlist we return it as tuple
%% of {cookbook_name, version}
-spec cookbooks_for_runlist(Runlist::[binary()]) -> [cookbook_with_version()].
cookbooks_for_runlist(Runlist) ->
Cookbooks = [ cookbook_for_recipe(split_version(Item)) || Item <- Runlist ],
remove_dups(Cookbooks).
-spec split_version(Recipe::binary()) -> cookbook_with_version().
split_version(Recipe) when is_binary(Recipe) ->
case re:split(Recipe, <<"@">>) of
[Name] ->
Name;
[Name, Version] ->
{Name, Version}
end.
%% @doc helper function which translates a full recipe names to the
%% name of the cookbook which contains the recipe.
%%
%% If a version is specified in the recipe it is retained in the
%% cookbook tuple
-spec cookbook_for_recipe(cookbook_with_version()) -> cookbook_with_version().
cookbook_for_recipe({Recipe, Version}) ->
{cookbook_for_recipe(Recipe), Version};
cookbook_for_recipe(Recipe) ->
case re:split(Recipe, <<"::">>) of
[Cookbook, _Recipe] ->
Cookbook;
[Cookbook] ->
Cookbook
end.
-spec remove_dups([cookbook_with_version()]) -> [cookbook_with_version()].
remove_dups(L) ->
WithIdx = lists:zip(L, lists:seq(1, length(L))),
[ Elt || {Elt, _} <- lists:ukeysort(2, lists:ukeysort(1, WithIdx)) ].
%% @doc given a map of cookbook names to versions and a list of
%% cookbook versions in the run_list, return the list of cookbook
%% which are not in the database.
%% @end
%%
%% TODO - look at the nested loops and complexity of this operation -
%% cookbook_missing calls proplists:is_defined/2 which will traverse
the AllVersions structure for each cookbook lookup . It might be
%% better to loop over the list of Cookbooks instead
-spec not_found_cookbooks(AllVersions :: [chef_depsolver:dependency_set()],
Cookbooks :: [cookbook_with_version()]) ->
ok | {not_found, [binary(),...]}.
not_found_cookbooks(AllVersions, Cookbooks) ->
NotFound = [ cookbook_name(Cookbook) || Cookbook <- Cookbooks, cookbook_missing(Cookbook, AllVersions)],
case NotFound of
[] -> ok;
_ -> {not_found, NotFound}
end.
%% @doc helper function to return the name of a cookbook that is in a
%% processed run_list where it could be either a name or a {Name,
%% Version} tuple
-spec cookbook_name(cookbook_with_version()) -> binary().
cookbook_name(Cookbook) when is_binary(Cookbook) ->
Cookbook;
cookbook_name({Name, _Version}) ->
Name.
%% @doc helper function to check if a (possibly versioned) cookbook is in the
%% set of all cookbook versions.
%%
%% In order to work in the same manner as the ruby code it will only check for a
%% cookbook name in the list of all cookbook version. This means if any version of a cookbook
%% exists it returns false
-spec cookbook_missing(CB::cookbook_with_version(),
AllVersions::[chef_depsolver:dependency_set()]) -> boolean().
cookbook_missing(CB, AllVersions) when is_binary(CB) ->
not proplists:is_defined(CB, AllVersions);
cookbook_missing({Name, _Version}, AllVersions) ->
cookbook_missing(Name, AllVersions).
%% @doc Given the output from not_found_cookbooks/2 and
chef_depsolver : , format an appropriate response
%% document
handle_depsolver_results({not_found, CookbookNames}, _Deps, Req, State) when is_list(CookbookNames)->
precondition_failed(Req, State,
not_found_message(cookbook_version, CookbookNames),
cookbook_version_not_found);
handle_depsolver_results(ok, {error, resolution_timeout}, Req, State) ->
precondition_failed(Req, State,
timeout_message(),
{timeout, depsolver});
handle_depsolver_results(ok, {error, no_depsolver_workers}, Req, State) ->
wm_halt(503,
Req,
State,
<<"Dependency solver overloaded. Try again later.">>,
no_depsolver_workers);
log the exception and return a 500
handle_depsolver_results(ok, {error, exception, Message, Backtrace}, Req, State) ->
lager:error([{module, ?MODULE},
{error_type, depsolver_ruby_exception},
{message, Message},
{backtrace, Backtrace}]),
server_error(Req, State, <<"Dependency solver exception.">>, depsolver_ruby_exception);
handle_depsolver_results(ok, {error, invalid_constraints, Detail}, Req, State) ->
precondition_failed(Req, State,
invalid_constraints_message(Detail),
invalid_constraints);
handle_depsolver_results(ok, {error, no_solution, Detail}, Req, State) ->
precondition_failed(Req, State, {Detail}, no_solution);
handle_depsolver_results(ok, {error, {unreachable_package, Unreachable}}, Req, State) ->
precondition_failed(Req, State,
not_reachable_message(Unreachable),
unreachable_dep);
handle_depsolver_results(ok, {ok, Cookbooks}, Req, #base_state{reqid = _ReqId,
chef_db_context = DbContext,
organization_guid = OrgId } = State) ->
%% TODO - helper function to deal with the call and match on a chef_cookbook version
assemble_response(Req, State,
chef_db:bulk_fetch_minimal_cookbook_versions(DbContext, OrgId, Cookbooks)).
%% @doc Utility function to remove some of the verbosity
precondition_failed(Req, State, ErrorData, LogMsg) ->
wm_halt(412, Req, State, ErrorData, LogMsg).
%% @doc Utility function to remove some of the verbosity. Note that
%% this is specific to Chef, and has absolutely nothing to do with the
Webmachine callback .
forbid(Req, State, ErrorData, LogMsg) ->
wm_halt(403, Req, State, ErrorData, LogMsg).
server_error(Req, State, ErrorData, LogMsg) ->
wm_halt(500, Req, State, ErrorData, LogMsg).
wm_halt(Code, Req, State, ErrorData, LogMsg) ->
{{halt, Code},
chef_wm_util:with_error_body(Req, ErrorData),
State#base_state{log_msg = LogMsg}}.
%% @doc Assemble a JSON response object which is a map of cookbook
%% name to cookbook object for all cookbook versions which have been
%% found by depsolving.
%%
%% Note the cookbook object we return back is a stripped-down version,
%% removing large fields such as long_description and attributes in
%% the metadata that are not required by chef-client
assemble_response(Req, #base_state{organization_guid = OrgId, server_api_version = ApiVersion} = State, CookbookVersions) ->
case oc_chef_wm_base:check_cookbook_authz(CookbookVersions, Req, State) of
ok ->
case make_json_list(OrgId, CookbookVersions, chef_wm_util:base_uri(Req), ApiVersion) of
{error, busy} ->
Force backoff until the cache catches up with demand . Occurs when caching is enabled
% and the cache message queue is overloaded. Also occurs when we give up on waiting for
% another process that has claimed a given cache key to complete its work.
wm_halt(503, Req, State, <<"cookbook versions cache unavailable. Try again shortly.">>, cbv_cache_timeout);
JsonList ->
{true, wrq:append_to_response_body(chef_json:encode(JsonList), Req), State}
end;
{error, Msg} ->
forbid(Req, State, Msg, {forbidden, read})
end.
make_json_list(OrgId, CookbookVersions, URI, ApiVersion) ->
Hash = erlang:phash2(CookbookVersions, 134217728),
make_json_list(CookbookVersions, URI, ApiVersion, {OrgId, Hash}, 0).
make_json_list(_CookbookVersions, _URI, _ApiVersion, Key, ?CACHE_MAX_RETRIES) ->
% Waiting is good, but let's not hang the client up forever.
lager:info("chef_wm_depsolver:make_json_list ~p - forcing retry after giving up on ~p", [self(), Key]),
{error, busy};
make_json_list(CookbookVersions, URI, ApiVersion, Key, NumAttempts) ->
case chef_cbv_cache:get(Key) of
{error, retry} ->
% Someone else is calculating this, pause to let them finish and try again
timer:sleep(?CACHE_RETRY_INTERVAL),
make_json_list(CookbookVersions, URI, ApiVersion, Key, NumAttempts + 1);
undefined ->
% It is not in the cache and nobody is working on it. Stake our claim and
% do the work.
case chef_cbv_cache:claim(Key) of
Response when Response =:= undefined orelse Response =:= ok->
%% We iterate over the list again since we only want to construct the s3urls
%% if the authz check has succeeded (in caller). We respond with a minimal version of the
%% cookbook which has just enough information for chef-client to run
Note that it is possible for the final result to contain thousands of entries .
Result = {
[ { CBV#chef_cookbook_version.name,
chef_cookbook_version:minimal_cookbook_ejson(CBV, URI, ApiVersion) }
|| CBV <- CookbookVersions ]
},
chef_cbv_cache:put(Key, Result),
Result;
{error, retry} ->
% Someone snuck in and claimed it at around the same time as us. They got
there first , so we 'll wait and retry the get .
timer:sleep(?CACHE_RETRY_INTERVAL),
make_json_list(CookbookVersions, URI, ApiVersion, Key, NumAttempts + 1);
{error, busy} ->
% cache service is overloaded, we're done here.
{error, busy}
end;
Result ->
Result
end.
%%------------------------------------------------------------------------------
%% Message Functions
%%------------------------------------------------------------------------------
invalid_constraints_message(ErrorDetails) ->
NonExistentCBs = proplists:get_value(non_existent_cookbooks, ErrorDetails),
ConstraintsNotMet = proplists:get_value(constraints_not_met, ErrorDetails),
Msg1 = build_constraints_message(<<"">>, non_existent_cookbooks, NonExistentCBs),
Msg2 = build_constraints_message(Msg1, constraints_not_met, ConstraintsNotMet),
Message = iolist_to_binary(["Run list contains invalid items: ", Msg2, "."]),
{[{<<"message">>, Message},
{<<"non_existent_cookbooks">>, NonExistentCBs},
{<<"cookbooks_with_no_versions">>, ConstraintsNotMet}]}.
build_constraints_message(<<"">>, Part, Data) ->
build_constraints_message_part(Part, Data);
build_constraints_message(Message, Part, Data) ->
iolist_to_binary([Message,
<<"; ">>,
build_constraints_message_part(Part, Data)]).
%% TODO: refactor common parts into sub-function
%% OR: handle the error reporting on the ruby side
build_constraints_message_part(non_existent_cookbooks, []) ->
<<"">>;
build_constraints_message_part(non_existent_cookbooks, [Cookbook]) ->
iolist_to_binary(["no such cokbook ", Cookbook]);
build_constraints_message_part(non_existent_cookbooks, Cookbooks) ->
iolist_to_binary(["no such cookbooks ", bin_str_join(Cookbooks, <<", ">>)]);
build_constraints_message_part(constraints_not_met, []) ->
<<"">>;
build_constraints_message_part(constraints_not_met, [Constraint]) ->
iolist_to_binary(["no versions match the constraints on cookbook ", Constraint]);
build_constraints_message_part(constraints_not_met, Constraints) ->
iolist_to_binary(["no versions match the constraints on cookbooks ",
bin_str_join(Constraints, <<", ">>)]).
-spec not_found_message(environment | cookbook_version,
EnvironmentName :: binary() | [CookbookName :: binary()]) ->
Message :: binary() | {[{Key :: binary(), Message :: binary() |
[CookbookName :: binary()]}]}.
not_found_message(environment, Name) ->
iolist_to_binary(["environment '", Name, "' not found"]);
not_found_message(cookbook_version, [CookbookName]) when is_binary(CookbookName) ->
{[{<<"message">>, list_to_binary(["Run list contains invalid items: no such cookbook ",
CookbookName, "."])},
{<<"non_existent_cookbooks">>, [CookbookName]},
{<<"cookbooks_with_no_versions">>, []}]};
not_found_message(cookbook_version, CookbookNames) ->
Reason = iolist_to_binary(["Run list contains invalid items: no such cookbooks ",
bin_str_join(CookbookNames, <<", ">>), "."]),
{[{<<"message">>, Reason},
{<<"non_existent_cookbooks">>, CookbookNames},
{<<"cookbooks_with_no_versions">>, []}]}.
not_reachable_message(CookbookName) ->
Reason = iolist_to_binary(["Unable to satisfy constraints on cookbook ",
CookbookName,
", which does not exist."]),
{[{<<"message">>, Reason},
{<<"non_existent_cookbooks">>, [ CookbookName ]},
{<<"most_constrained_cookbooks">>,[]}]}.
timeout_message() ->
{[{<<"message">>, <<"unable to solve dependencies in alotted time">>},
{<<"non_existent_cookbooks">>, []},
{<<"most_constrained_cookbooks">>,[]}]}.
%%------------------------------------------------------------------------------
%% Miscellaneous Utilities
%%------------------------------------------------------------------------------
%% Helpers to construct pieces of error messages from lists of
%% cookbook names
-spec bin_str_join(Names::[binary()],
Sep::<<_:8,_:_*8>>,
Acc::[binary()]) -> [binary()].
bin_str_join([], _Sep, Acc) ->
Acc;
bin_str_join([H], _Sep, Acc) ->
[H | Acc];
bin_str_join([Name| Rest], Sep, Acc) ->
bin_str_join(Rest, Sep, [Sep , Name | Acc]).
-spec bin_str_join(Names::[binary()], Sep::<<_:8,_:_*8>>) -> binary().
bin_str_join(Names, Sep) ->
Reverse = lists:reverse(Names),
list_to_binary(bin_str_join(Reverse, Sep, [])).
| null |
https://raw.githubusercontent.com/chef/chef-server/7cdc962f8c4fb77a3d59cb02c8beb4339ba218b1/src/oc_erchef/apps/oc_chef_wm/src/chef_wm_depsolver.erl
|
erlang
|
ex: ts=4 sw=4 et
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
chef_wm behaviour callbacks
Internal types
checks the permissions of the requestor against the environment and cookbook container
Set this here before passing it out; downstream functions will need it
------------------------------------------------------------------------------
Internal Functions
------------------------------------------------------------------------------
already expanded roles before passing them to us. We can have bare
recipes (== foo), default recipes ( == foo::default) or named
a version appended (== foo::[email protected])
We expand the runlist to a set of cookbooks with dups removed. If
a versioned recipe is provided in the runlist we return it as tuple
of {cookbook_name, version}
@doc helper function which translates a full recipe names to the
name of the cookbook which contains the recipe.
If a version is specified in the recipe it is retained in the
cookbook tuple
@doc given a map of cookbook names to versions and a list of
cookbook versions in the run_list, return the list of cookbook
which are not in the database.
@end
TODO - look at the nested loops and complexity of this operation -
cookbook_missing calls proplists:is_defined/2 which will traverse
better to loop over the list of Cookbooks instead
@doc helper function to return the name of a cookbook that is in a
processed run_list where it could be either a name or a {Name,
Version} tuple
@doc helper function to check if a (possibly versioned) cookbook is in the
set of all cookbook versions.
In order to work in the same manner as the ruby code it will only check for a
cookbook name in the list of all cookbook version. This means if any version of a cookbook
exists it returns false
@doc Given the output from not_found_cookbooks/2 and
document
TODO - helper function to deal with the call and match on a chef_cookbook version
@doc Utility function to remove some of the verbosity
@doc Utility function to remove some of the verbosity. Note that
this is specific to Chef, and has absolutely nothing to do with the
@doc Assemble a JSON response object which is a map of cookbook
name to cookbook object for all cookbook versions which have been
found by depsolving.
Note the cookbook object we return back is a stripped-down version,
removing large fields such as long_description and attributes in
the metadata that are not required by chef-client
and the cache message queue is overloaded. Also occurs when we give up on waiting for
another process that has claimed a given cache key to complete its work.
Waiting is good, but let's not hang the client up forever.
Someone else is calculating this, pause to let them finish and try again
It is not in the cache and nobody is working on it. Stake our claim and
do the work.
We iterate over the list again since we only want to construct the s3urls
if the authz check has succeeded (in caller). We respond with a minimal version of the
cookbook which has just enough information for chef-client to run
Someone snuck in and claimed it at around the same time as us. They got
cache service is overloaded, we're done here.
------------------------------------------------------------------------------
Message Functions
------------------------------------------------------------------------------
TODO: refactor common parts into sub-function
OR: handle the error reporting on the ruby side
------------------------------------------------------------------------------
Miscellaneous Utilities
------------------------------------------------------------------------------
Helpers to construct pieces of error messages from lists of
cookbook names
|
-*- erlang - indent - level : 4;indent - tabs - mode : nil ; fill - column : 92 -*-
@author < >
@author < >
@doc Resource module for Chef Depsolver endpoint
Copyright 2012 - 2014 Chef Software , Inc. All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
-module(chef_wm_depsolver).
-include("oc_chef_wm.hrl").
-behaviour(chef_wm).
-export([auth_info/2,
init/1,
init_resource_state/1,
malformed_request_message/3,
request_type/0,
validate_request/3]).
-mixin([{oc_chef_wm_base, [forbidden/2,
is_authorized/2,
service_available/2,
content_types_accepted/2,
content_types_provided/2,
finish_request/2,
malformed_request/2,
ping/2]}]).
-export([allowed_methods/2,
post_is_create/2,
process_post/2]).
-type cookbook_with_version() :: binary() | {binary(), binary()}.
-define(CACHE_RETRY_INTERVAL, 200).
-define(CACHE_MAX_RETRIES, 10).
init(Config) ->
oc_chef_wm_base:init(?MODULE, Config).
init_resource_state(_Config) ->
{ok, #depsolver_state{}}.
request_type() ->
"depsolver".
allowed_methods(Req, State) ->
{['POST'], Req, State}.
malformed_request_message(Any, _Req, _State) ->
error({unexpected_malformed_request_message, Any}).
validate_request('POST', Req, #base_state{resource_state=DepsolverState}=State) ->
Body = wrq:req_body(Req),
{ok, JsonBody} = chef_depsolver:parse_binary_json(Body),
Runlist = ej:get({<<"run_list">>}, JsonBody),
CookbookList = cookbooks_for_runlist(Runlist),
EnvName = chef_wm_util:object_name(environment, Req),
State1 = State#base_state{resource_state = DepsolverState#depsolver_state{run_list_cookbooks = CookbookList,
environment_name = EnvName}},
{Req, State1}.
auth_info(Req, #base_state{chef_db_context = DbContext,
organization_guid = OrgId,
resource_state = #depsolver_state{environment_name = EnvName}} = State) ->
Environment = chef_db:fetch(#chef_environment{org_id = OrgId, name = EnvName}, DbContext),
forbidden_for_environment(Environment, Req, State).
@doc helper function for auth_info/2 which when given the output of chef_db : fetch_environment ,
forbidden_for_environment(not_found, Req,
#base_state{resource_state = #depsolver_state{environment_name = EnvName}} = State) ->
{{halt, 404},
chef_wm_util:with_error_body(Req, not_found_message(environment, EnvName)),
State#base_state{log_msg = environment_not_found}};
forbidden_for_environment(#chef_environment{authz_id = EnvAuthzId} = Env, Req,
#base_state{resource_state = ResourceState} = State) ->
State1 = State#base_state{resource_state = ResourceState#depsolver_state{chef_environment = Env}},
{[{container, cookbook, read}, {object, EnvAuthzId, read}], Req, State1}.
post_is_create(Req, State) ->
{false, Req, State}.
process_post(Req, #base_state{reqid = ReqId,
chef_db_context = DbContext,
organization_name = OrgName,
organization_guid = OrgId,
resource_state = #depsolver_state{run_list_cookbooks = Cookbooks,
environment_name = EnvName,
chef_environment = Env}} = State) ->
EnvConstraints = chef_object_base:depsolver_constraints(Env),
case chef_db:fetch_all_cookbook_version_dependencies(DbContext, OrgId) of
{error, Error} ->
lager:error("Dependency retrieval failure for org ~p with environment ~p: ~p~n",
[OrgName, EnvName, Error]),
server_error(Req, State, <<"Dependency retrieval failed">>, dep_retrieval_failure);
AllVersions ->
case not_found_cookbooks(AllVersions, Cookbooks) of
ok ->
Deps = ?SH_TIME(ReqId, chef_depsolver, solve_dependencies,
(AllVersions, EnvConstraints, Cookbooks)),
handle_depsolver_results(ok, Deps, Req, State);
NotFound ->
We ignore result if expanded run list contains missing
cookbooks , so no need to call depsolver at all .
handle_depsolver_results(NotFound, ignore, Req, State)
end
end.
@doc We are supplied with a list of recipes . chef - client 0.10 has
recipes (= = foo::bar ) . We also can have these three variants with
-spec cookbooks_for_runlist(Runlist::[binary()]) -> [cookbook_with_version()].
cookbooks_for_runlist(Runlist) ->
Cookbooks = [ cookbook_for_recipe(split_version(Item)) || Item <- Runlist ],
remove_dups(Cookbooks).
-spec split_version(Recipe::binary()) -> cookbook_with_version().
split_version(Recipe) when is_binary(Recipe) ->
case re:split(Recipe, <<"@">>) of
[Name] ->
Name;
[Name, Version] ->
{Name, Version}
end.
-spec cookbook_for_recipe(cookbook_with_version()) -> cookbook_with_version().
cookbook_for_recipe({Recipe, Version}) ->
{cookbook_for_recipe(Recipe), Version};
cookbook_for_recipe(Recipe) ->
case re:split(Recipe, <<"::">>) of
[Cookbook, _Recipe] ->
Cookbook;
[Cookbook] ->
Cookbook
end.
-spec remove_dups([cookbook_with_version()]) -> [cookbook_with_version()].
remove_dups(L) ->
WithIdx = lists:zip(L, lists:seq(1, length(L))),
[ Elt || {Elt, _} <- lists:ukeysort(2, lists:ukeysort(1, WithIdx)) ].
the AllVersions structure for each cookbook lookup . It might be
-spec not_found_cookbooks(AllVersions :: [chef_depsolver:dependency_set()],
Cookbooks :: [cookbook_with_version()]) ->
ok | {not_found, [binary(),...]}.
not_found_cookbooks(AllVersions, Cookbooks) ->
NotFound = [ cookbook_name(Cookbook) || Cookbook <- Cookbooks, cookbook_missing(Cookbook, AllVersions)],
case NotFound of
[] -> ok;
_ -> {not_found, NotFound}
end.
-spec cookbook_name(cookbook_with_version()) -> binary().
cookbook_name(Cookbook) when is_binary(Cookbook) ->
Cookbook;
cookbook_name({Name, _Version}) ->
Name.
-spec cookbook_missing(CB::cookbook_with_version(),
AllVersions::[chef_depsolver:dependency_set()]) -> boolean().
cookbook_missing(CB, AllVersions) when is_binary(CB) ->
not proplists:is_defined(CB, AllVersions);
cookbook_missing({Name, _Version}, AllVersions) ->
cookbook_missing(Name, AllVersions).
chef_depsolver : , format an appropriate response
handle_depsolver_results({not_found, CookbookNames}, _Deps, Req, State) when is_list(CookbookNames)->
precondition_failed(Req, State,
not_found_message(cookbook_version, CookbookNames),
cookbook_version_not_found);
handle_depsolver_results(ok, {error, resolution_timeout}, Req, State) ->
precondition_failed(Req, State,
timeout_message(),
{timeout, depsolver});
handle_depsolver_results(ok, {error, no_depsolver_workers}, Req, State) ->
wm_halt(503,
Req,
State,
<<"Dependency solver overloaded. Try again later.">>,
no_depsolver_workers);
log the exception and return a 500
handle_depsolver_results(ok, {error, exception, Message, Backtrace}, Req, State) ->
lager:error([{module, ?MODULE},
{error_type, depsolver_ruby_exception},
{message, Message},
{backtrace, Backtrace}]),
server_error(Req, State, <<"Dependency solver exception.">>, depsolver_ruby_exception);
handle_depsolver_results(ok, {error, invalid_constraints, Detail}, Req, State) ->
precondition_failed(Req, State,
invalid_constraints_message(Detail),
invalid_constraints);
handle_depsolver_results(ok, {error, no_solution, Detail}, Req, State) ->
precondition_failed(Req, State, {Detail}, no_solution);
handle_depsolver_results(ok, {error, {unreachable_package, Unreachable}}, Req, State) ->
precondition_failed(Req, State,
not_reachable_message(Unreachable),
unreachable_dep);
handle_depsolver_results(ok, {ok, Cookbooks}, Req, #base_state{reqid = _ReqId,
chef_db_context = DbContext,
organization_guid = OrgId } = State) ->
assemble_response(Req, State,
chef_db:bulk_fetch_minimal_cookbook_versions(DbContext, OrgId, Cookbooks)).
precondition_failed(Req, State, ErrorData, LogMsg) ->
wm_halt(412, Req, State, ErrorData, LogMsg).
Webmachine callback .
forbid(Req, State, ErrorData, LogMsg) ->
wm_halt(403, Req, State, ErrorData, LogMsg).
server_error(Req, State, ErrorData, LogMsg) ->
wm_halt(500, Req, State, ErrorData, LogMsg).
wm_halt(Code, Req, State, ErrorData, LogMsg) ->
{{halt, Code},
chef_wm_util:with_error_body(Req, ErrorData),
State#base_state{log_msg = LogMsg}}.
assemble_response(Req, #base_state{organization_guid = OrgId, server_api_version = ApiVersion} = State, CookbookVersions) ->
case oc_chef_wm_base:check_cookbook_authz(CookbookVersions, Req, State) of
ok ->
case make_json_list(OrgId, CookbookVersions, chef_wm_util:base_uri(Req), ApiVersion) of
{error, busy} ->
Force backoff until the cache catches up with demand . Occurs when caching is enabled
wm_halt(503, Req, State, <<"cookbook versions cache unavailable. Try again shortly.">>, cbv_cache_timeout);
JsonList ->
{true, wrq:append_to_response_body(chef_json:encode(JsonList), Req), State}
end;
{error, Msg} ->
forbid(Req, State, Msg, {forbidden, read})
end.
make_json_list(OrgId, CookbookVersions, URI, ApiVersion) ->
Hash = erlang:phash2(CookbookVersions, 134217728),
make_json_list(CookbookVersions, URI, ApiVersion, {OrgId, Hash}, 0).
make_json_list(_CookbookVersions, _URI, _ApiVersion, Key, ?CACHE_MAX_RETRIES) ->
lager:info("chef_wm_depsolver:make_json_list ~p - forcing retry after giving up on ~p", [self(), Key]),
{error, busy};
make_json_list(CookbookVersions, URI, ApiVersion, Key, NumAttempts) ->
case chef_cbv_cache:get(Key) of
{error, retry} ->
timer:sleep(?CACHE_RETRY_INTERVAL),
make_json_list(CookbookVersions, URI, ApiVersion, Key, NumAttempts + 1);
undefined ->
case chef_cbv_cache:claim(Key) of
Response when Response =:= undefined orelse Response =:= ok->
Note that it is possible for the final result to contain thousands of entries .
Result = {
[ { CBV#chef_cookbook_version.name,
chef_cookbook_version:minimal_cookbook_ejson(CBV, URI, ApiVersion) }
|| CBV <- CookbookVersions ]
},
chef_cbv_cache:put(Key, Result),
Result;
{error, retry} ->
there first , so we 'll wait and retry the get .
timer:sleep(?CACHE_RETRY_INTERVAL),
make_json_list(CookbookVersions, URI, ApiVersion, Key, NumAttempts + 1);
{error, busy} ->
{error, busy}
end;
Result ->
Result
end.
invalid_constraints_message(ErrorDetails) ->
NonExistentCBs = proplists:get_value(non_existent_cookbooks, ErrorDetails),
ConstraintsNotMet = proplists:get_value(constraints_not_met, ErrorDetails),
Msg1 = build_constraints_message(<<"">>, non_existent_cookbooks, NonExistentCBs),
Msg2 = build_constraints_message(Msg1, constraints_not_met, ConstraintsNotMet),
Message = iolist_to_binary(["Run list contains invalid items: ", Msg2, "."]),
{[{<<"message">>, Message},
{<<"non_existent_cookbooks">>, NonExistentCBs},
{<<"cookbooks_with_no_versions">>, ConstraintsNotMet}]}.
build_constraints_message(<<"">>, Part, Data) ->
build_constraints_message_part(Part, Data);
build_constraints_message(Message, Part, Data) ->
iolist_to_binary([Message,
<<"; ">>,
build_constraints_message_part(Part, Data)]).
build_constraints_message_part(non_existent_cookbooks, []) ->
<<"">>;
build_constraints_message_part(non_existent_cookbooks, [Cookbook]) ->
iolist_to_binary(["no such cokbook ", Cookbook]);
build_constraints_message_part(non_existent_cookbooks, Cookbooks) ->
iolist_to_binary(["no such cookbooks ", bin_str_join(Cookbooks, <<", ">>)]);
build_constraints_message_part(constraints_not_met, []) ->
<<"">>;
build_constraints_message_part(constraints_not_met, [Constraint]) ->
iolist_to_binary(["no versions match the constraints on cookbook ", Constraint]);
build_constraints_message_part(constraints_not_met, Constraints) ->
iolist_to_binary(["no versions match the constraints on cookbooks ",
bin_str_join(Constraints, <<", ">>)]).
-spec not_found_message(environment | cookbook_version,
EnvironmentName :: binary() | [CookbookName :: binary()]) ->
Message :: binary() | {[{Key :: binary(), Message :: binary() |
[CookbookName :: binary()]}]}.
not_found_message(environment, Name) ->
iolist_to_binary(["environment '", Name, "' not found"]);
not_found_message(cookbook_version, [CookbookName]) when is_binary(CookbookName) ->
{[{<<"message">>, list_to_binary(["Run list contains invalid items: no such cookbook ",
CookbookName, "."])},
{<<"non_existent_cookbooks">>, [CookbookName]},
{<<"cookbooks_with_no_versions">>, []}]};
not_found_message(cookbook_version, CookbookNames) ->
Reason = iolist_to_binary(["Run list contains invalid items: no such cookbooks ",
bin_str_join(CookbookNames, <<", ">>), "."]),
{[{<<"message">>, Reason},
{<<"non_existent_cookbooks">>, CookbookNames},
{<<"cookbooks_with_no_versions">>, []}]}.
not_reachable_message(CookbookName) ->
Reason = iolist_to_binary(["Unable to satisfy constraints on cookbook ",
CookbookName,
", which does not exist."]),
{[{<<"message">>, Reason},
{<<"non_existent_cookbooks">>, [ CookbookName ]},
{<<"most_constrained_cookbooks">>,[]}]}.
timeout_message() ->
{[{<<"message">>, <<"unable to solve dependencies in alotted time">>},
{<<"non_existent_cookbooks">>, []},
{<<"most_constrained_cookbooks">>,[]}]}.
-spec bin_str_join(Names::[binary()],
Sep::<<_:8,_:_*8>>,
Acc::[binary()]) -> [binary()].
bin_str_join([], _Sep, Acc) ->
Acc;
bin_str_join([H], _Sep, Acc) ->
[H | Acc];
bin_str_join([Name| Rest], Sep, Acc) ->
bin_str_join(Rest, Sep, [Sep , Name | Acc]).
-spec bin_str_join(Names::[binary()], Sep::<<_:8,_:_*8>>) -> binary().
bin_str_join(Names, Sep) ->
Reverse = lists:reverse(Names),
list_to_binary(bin_str_join(Reverse, Sep, [])).
|
d2aff7491e0f97acca2e86cb28e083f8d7d5097d1db3a84bae57ab1a53c9f83f
|
azimut/shiny
|
8bitmusictheory.lisp
|
(in-package :shiny)
;;
;; Gbmaj13 - Fm9
;; Dm9 - Abm6
| null |
https://raw.githubusercontent.com/azimut/shiny/774381a9bde21c4ec7e7092c7516dd13a5a50780/compositions/drafts/8bitmusictheory.lisp
|
lisp
|
Gbmaj13 - Fm9
Dm9 - Abm6
|
(in-package :shiny)
|
873986541985f4a170eb140e80afc3981b52478724bf73810341ec185c76ab12
|
haskell-mafia/projector
|
ModuleGraph.hs
|
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE NoImplicitPrelude #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE TupleSections #
module Projector.Html.ModuleGraph (
ModuleGraph (..)
, buildModuleGraph
, DependencyGraph (..)
, buildDependencyGraph
, deriveImports
, deriveImportsIncremental
, dependencyOrder
, rebuildOrder
, detectCycles
, GraphError (..)
, renderGraphError
) where
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import qualified Data.Graph as G
import Data.Set (Set)
import qualified Data.Set as S
import qualified Data.Text as T
import qualified Data.Tree as Tree
import P
import Projector.Core
import Projector.Html.Data.Module
data GraphError
= GraphCycle [[ModuleName]]
deriving (Eq, Ord, Show)
-- | The call graph.
newtype ModuleGraph = ModuleGraph {
unModuleGraph :: Map ModuleName (Set ModuleName)
} deriving (Eq, Ord, Show)
-- | An inverted ModuleGraph.
newtype DependencyGraph = DependencyGraph {
unDependencyGraph :: Map ModuleName (Set ModuleName)
} deriving (Eq, Ord, Show)
-- | Figure out the complete set of imports for a set of modules.
-- Since we have globally-unique names (i.e. our modules are a
-- compilation detail), we can figure these out automatically.
deriveImports :: Map ModuleName (Module b l a) -> Map ModuleName (Module b l a)
deriveImports =
deriveImportsIncremental mempty
deriveImportsIncremental :: Map ModuleName (Set Name) -> Map ModuleName (Module b l a) -> Map ModuleName (Module b l a)
deriveImportsIncremental known mods =
let modfrees :: Map ModuleName (Set Name)
modfrees = fmap moduleFree mods
modbinds :: Map ModuleName (Set Name)
modbinds = known <> fmap moduleBound mods
inverted :: Map Name ModuleName
inverted = M.foldMapWithKey (\k vs -> foldl' (\acc v -> M.insert v k acc) mempty vs) modbinds
mg :: Map ModuleName (Set ModuleName)
mg = with modfrees $ \frees ->
S.fromList (catMaybes (with (toList frees) (flip M.lookup inverted)))
in flip M.mapWithKey mods $ \k m@(Module typs imps exps) ->
mcase (M.lookup k mg) m $ \newimps ->
Module typs (imps <> (M.fromList (fmap (,OpenImport) (toList newimps)))) exps
-- | Construct the module graph for some set of modules.
buildModuleGraph :: Map ModuleName (Module b l a) -> ModuleGraph
buildModuleGraph mods =
ModuleGraph (with mods (\(Module _typs imps _exps) -> S.fromList (M.keys imps)))
-- | Construct the dependency graph from the call graph.
buildDependencyGraph :: ModuleGraph -> DependencyGraph
buildDependencyGraph (ModuleGraph calls) =
DependencyGraph
(M.foldlWithKey
(\acc i call ->
foldl' (\acc' c -> M.insertWith (<>) c (S.singleton i) acc') acc call)
(fmap (const S.empty) calls)
calls)
-- | The order in which we need to typecheck / compile this set of modules.
The result is only correct if the graph forms a DAG .
TODO This should probably return a forest , would be nice to parallelise .
-- TODO we also probably want a function to do reachability -> rebuild subtree
dependencyOrder :: DependencyGraph -> [ModuleName]
dependencyOrder (DependencyGraph deps) =
let (g, lv, _) = G.graphFromEdges (fmap (\(i, depends) -> (i, i, toList depends)) (M.toList deps))
in fmap (\x -> case lv x of (a,_,_) -> a) (G.topSort g)
-- | Given a list of dirty/changed modules, figure out which dependent
-- mods also need to be rebuilt, and in what order.
rebuildOrder :: DependencyGraph -> [ModuleName] -> [ModuleName]
rebuildOrder dg@(DependencyGraph deps) dirty =
let (g, lv, vl) = G.graphFromEdges (fmap (\(i, depends) -> (i, i, toList depends)) (M.toList deps))
dirty' = S.map (\x -> case lv x of (a,_,_) -> a)
(foldMap S.fromList (fmap (maybe mempty (G.reachable g) . vl) dirty))
in filter (flip S.member dirty') (dependencyOrder dg)
| Report an error if the call graph does not form a DAG .
-- This does not return an error for free variables or reflexive edges.
detectCycles :: ModuleGraph -> Either GraphError ()
detectCycles cg =
let (g, lv, _) =
G.graphFromEdges
. fmap (\(n, es) -> (n, n, S.toList es))
. M.toList
$ unModuleGraph cg
sccs = G.scc g
-- for each cycle, take a representative path for error reporting.
path n = Tree.rootLabel n : case Tree.subForest n of [] -> []; (x:_) -> path x
labelled = fmap ((\(a, _, _) -> a) . lv)
in case filter (not . null . Tree.subForest) sccs of
[] ->
pure ()
xs ->
Left (GraphCycle (fmap (labelled . path) xs))
renderGraphError :: GraphError -> Text
renderGraphError ce =
case ce of
GraphCycle cycles ->
T.intercalate "\n\n" (fmap ppCycle cycles)
ppCycle :: [ModuleName] -> Text
ppCycle cycle =
case cycle of
[] ->
mempty
(x:xs) ->
mconcat (
"A cycle was detected in the module graph:\n"
: " Module " <> renderName x <> "\n"
: with xs (\y -> " reaches " <> renderName y <> "\n")
<> [" reaches " <> renderName x <> ", forming a cycle."]
)
where
renderName (ModuleName z) = "'" <> z <> "'"
| null |
https://raw.githubusercontent.com/haskell-mafia/projector/6af7c7f1e8a428b14c2c5a508f7d4a3ac2decd52/projector-html/src/Projector/Html/ModuleGraph.hs
|
haskell
|
# LANGUAGE OverloadedStrings #
| The call graph.
| An inverted ModuleGraph.
| Figure out the complete set of imports for a set of modules.
Since we have globally-unique names (i.e. our modules are a
compilation detail), we can figure these out automatically.
| Construct the module graph for some set of modules.
| Construct the dependency graph from the call graph.
| The order in which we need to typecheck / compile this set of modules.
TODO we also probably want a function to do reachability -> rebuild subtree
| Given a list of dirty/changed modules, figure out which dependent
mods also need to be rebuilt, and in what order.
This does not return an error for free variables or reflexive edges.
for each cycle, take a representative path for error reporting.
|
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE NoImplicitPrelude #
# LANGUAGE TupleSections #
module Projector.Html.ModuleGraph (
ModuleGraph (..)
, buildModuleGraph
, DependencyGraph (..)
, buildDependencyGraph
, deriveImports
, deriveImportsIncremental
, dependencyOrder
, rebuildOrder
, detectCycles
, GraphError (..)
, renderGraphError
) where
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import qualified Data.Graph as G
import Data.Set (Set)
import qualified Data.Set as S
import qualified Data.Text as T
import qualified Data.Tree as Tree
import P
import Projector.Core
import Projector.Html.Data.Module
data GraphError
= GraphCycle [[ModuleName]]
deriving (Eq, Ord, Show)
newtype ModuleGraph = ModuleGraph {
unModuleGraph :: Map ModuleName (Set ModuleName)
} deriving (Eq, Ord, Show)
newtype DependencyGraph = DependencyGraph {
unDependencyGraph :: Map ModuleName (Set ModuleName)
} deriving (Eq, Ord, Show)
deriveImports :: Map ModuleName (Module b l a) -> Map ModuleName (Module b l a)
deriveImports =
deriveImportsIncremental mempty
deriveImportsIncremental :: Map ModuleName (Set Name) -> Map ModuleName (Module b l a) -> Map ModuleName (Module b l a)
deriveImportsIncremental known mods =
let modfrees :: Map ModuleName (Set Name)
modfrees = fmap moduleFree mods
modbinds :: Map ModuleName (Set Name)
modbinds = known <> fmap moduleBound mods
inverted :: Map Name ModuleName
inverted = M.foldMapWithKey (\k vs -> foldl' (\acc v -> M.insert v k acc) mempty vs) modbinds
mg :: Map ModuleName (Set ModuleName)
mg = with modfrees $ \frees ->
S.fromList (catMaybes (with (toList frees) (flip M.lookup inverted)))
in flip M.mapWithKey mods $ \k m@(Module typs imps exps) ->
mcase (M.lookup k mg) m $ \newimps ->
Module typs (imps <> (M.fromList (fmap (,OpenImport) (toList newimps)))) exps
buildModuleGraph :: Map ModuleName (Module b l a) -> ModuleGraph
buildModuleGraph mods =
ModuleGraph (with mods (\(Module _typs imps _exps) -> S.fromList (M.keys imps)))
buildDependencyGraph :: ModuleGraph -> DependencyGraph
buildDependencyGraph (ModuleGraph calls) =
DependencyGraph
(M.foldlWithKey
(\acc i call ->
foldl' (\acc' c -> M.insertWith (<>) c (S.singleton i) acc') acc call)
(fmap (const S.empty) calls)
calls)
The result is only correct if the graph forms a DAG .
TODO This should probably return a forest , would be nice to parallelise .
dependencyOrder :: DependencyGraph -> [ModuleName]
dependencyOrder (DependencyGraph deps) =
let (g, lv, _) = G.graphFromEdges (fmap (\(i, depends) -> (i, i, toList depends)) (M.toList deps))
in fmap (\x -> case lv x of (a,_,_) -> a) (G.topSort g)
rebuildOrder :: DependencyGraph -> [ModuleName] -> [ModuleName]
rebuildOrder dg@(DependencyGraph deps) dirty =
let (g, lv, vl) = G.graphFromEdges (fmap (\(i, depends) -> (i, i, toList depends)) (M.toList deps))
dirty' = S.map (\x -> case lv x of (a,_,_) -> a)
(foldMap S.fromList (fmap (maybe mempty (G.reachable g) . vl) dirty))
in filter (flip S.member dirty') (dependencyOrder dg)
| Report an error if the call graph does not form a DAG .
detectCycles :: ModuleGraph -> Either GraphError ()
detectCycles cg =
let (g, lv, _) =
G.graphFromEdges
. fmap (\(n, es) -> (n, n, S.toList es))
. M.toList
$ unModuleGraph cg
sccs = G.scc g
path n = Tree.rootLabel n : case Tree.subForest n of [] -> []; (x:_) -> path x
labelled = fmap ((\(a, _, _) -> a) . lv)
in case filter (not . null . Tree.subForest) sccs of
[] ->
pure ()
xs ->
Left (GraphCycle (fmap (labelled . path) xs))
renderGraphError :: GraphError -> Text
renderGraphError ce =
case ce of
GraphCycle cycles ->
T.intercalate "\n\n" (fmap ppCycle cycles)
ppCycle :: [ModuleName] -> Text
ppCycle cycle =
case cycle of
[] ->
mempty
(x:xs) ->
mconcat (
"A cycle was detected in the module graph:\n"
: " Module " <> renderName x <> "\n"
: with xs (\y -> " reaches " <> renderName y <> "\n")
<> [" reaches " <> renderName x <> ", forming a cycle."]
)
where
renderName (ModuleName z) = "'" <> z <> "'"
|
f0d1f90bb3e07be755547ad882055a45ee57d546b7cf258c2fabe969c7af048f
|
Limvot/kraken
|
fib_let.scm
|
(pretty-print ((letrec ((fib (lambda (n) (cond ((equal? n 0) 1)
((equal? n 1) 1)
(#t (let (
(r1 (fib (- n 1)))
(r2 (fib (- n 2)))
) (+ r1 r2)))))))
fib) (read (open-input-string (list-ref (command-line) 1)))))
| null |
https://raw.githubusercontent.com/Limvot/kraken/ca68826fbcc6abd11e2845c44092d7125ea92d04/fib_test/fib_let.scm
|
scheme
|
(pretty-print ((letrec ((fib (lambda (n) (cond ((equal? n 0) 1)
((equal? n 1) 1)
(#t (let (
(r1 (fib (- n 1)))
(r2 (fib (- n 2)))
) (+ r1 r2)))))))
fib) (read (open-input-string (list-ref (command-line) 1)))))
|
|
49b3e26ced703ae379f385b7d55fc211f46c0eff13c26f9e207b4bebb48ff3a5
|
camfort/camfort
|
InferenceBackend.hs
|
Copyright 2016 , , , , under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
See the License for the specific language governing permissions and
limitations under the License .
Copyright 2016, Dominic Orchard, Andrew Rice, Mistral Contrastin, Matthew Danish
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
Units of measure extension to : backend
Units of measure extension to Fortran: backend
-}
# LANGUAGE TupleSections #
# LANGUAGE ScopedTypeVariables #
module Camfort.Specification.Units.InferenceBackend
( chooseImplicitNames
, criticalVariables
, inconsistentConstraints
, inferVariables
-- mainly for debugging and testing:
, shiftTerms
, flattenConstraints
, flattenUnits
, constraintsToMatrix
, constraintsToMatrices
, rref
, genUnitAssignments
, genUnitAssignments'
, provenance
, splitNormHNF
) where
import Camfort.Specification.Units.Environment
import qualified Camfort.Specification.Units.InferenceBackendFlint as Flint
import Control.Arrow (first, second, (***))
import Control.Monad
import Control.Monad.ST
import Control.Parallel.Strategies
import qualified Data.Array as A
import Data.Generics.Uniplate.Operations
(transformBi, universeBi)
import Data.Graph.Inductive hiding ((><))
import qualified Data.IntMap as IM
import qualified Data.IntSet as IS
import Data.List
((\\), findIndex, inits, nub, partition, sort, sortBy, group, tails, foldl')
import qualified Data.Map.Strict as M
import Data.Maybe (fromMaybe, mapMaybe)
import Data.Ord
import Data.Tuple (swap)
import Numeric.LinearAlgebra
( atIndex, (<>)
, rank, (?), (¿)
, rows, cols
, subMatrix, diag
, fromBlocks, ident
)
import qualified Numeric.LinearAlgebra as H
import Numeric.LinearAlgebra.Devel
( newMatrix, readMatrix
, writeMatrix, runSTMatrix
, freezeMatrix, STMatrix
)
import Prelude hiding ((<>))
-- | Returns list of formerly-undetermined variables and their units.
inferVariables :: Constraints -> [(VV, UnitInfo)]
inferVariables cons = unitVarAssignments
where
unitAssignments = genUnitAssignments cons
-- Find the rows corresponding to the distilled "unit :: var"
-- information for ordinary (non-polymorphic) variables.
unitVarAssignments =
[ (var, units) | ([UnitPow (UnitVar var) k], units) <- unitAssignments, k `approxEq` 1 ] ++
[ (var, units) | ([UnitPow (UnitParamVarAbs (_, var)) k], units) <- unitAssignments, k `approxEq` 1 ]
-- Detect inconsistency if concrete units are assigned an implicit
-- abstract unit variable with coefficients not equal, or there are
-- monomorphic literals being given parametric polymorphic units.
detectInconsistency :: [([UnitInfo], UnitInfo)] -> Constraints
detectInconsistency unitAssignments = inconsist
where
ua' = map (shiftTerms . fmap flattenUnits) unitAssignments
badImplicits = [ fmap foldUnits a | a@([UnitPow (UnitParamImpAbs _) k1], rhs) <- ua'
, UnitPow _ k2 <- rhs
, k1 /= k2 ]
inconsist = unitAssignmentsToConstraints badImplicits ++ mustBeUnitless unitAssignments
-- Must be unitless: any assignments of parametric abstract units to
-- monomorphic literals.
mustBeUnitless :: [([UnitInfo], UnitInfo)] -> Constraints
mustBeUnitless unitAssignments = mbu
where
mbu = [ ConEq UnitlessLit (UnitPow (UnitLiteral l) k)
| (UnitPow (UnitLiteral l) k:_, rhs) <- ua''
, any isParametric (universeBi rhs :: [UnitInfo]) ]
ua ' = map ( shiftTerms . fmap flattenUnits ) unitAssignments
ua'' = map (shiftTermsBy isLiteral . fmap flattenUnits) unitAssignments
isLiteral UnitLiteral{} = True
isLiteral (UnitPow UnitLiteral{} _) = True
isLiteral _ = False
isParametric UnitParamVarAbs{} = True
isParametric UnitParamPosAbs{} = True
isParametric UnitParamEAPAbs{} = True
isParametric UnitParamLitAbs{} = True
isParametric UnitParamImpAbs{} = True
isParametric (UnitPow u _) = isParametric u
isParametric _ = False
-- convert the assignment format back into constraints
unitAssignmentsToConstraints :: [([UnitInfo], UnitInfo)] -> Constraints
unitAssignmentsToConstraints = map (uncurry ConEq . first foldUnits)
-- | Raw units-assignment pairs.
genUnitAssignments :: Constraints -> [([UnitInfo], UnitInfo)]
genUnitAssignments cons
-- if the results include any mappings that must be forced to be unitless...
| mbu <- mustBeUnitless ua, not (null mbu) = genUnitAssignments (mbu ++ unitAssignmentsToConstraints ua)
| null (detectInconsistency ua) = ua
| otherwise = []
where
ua = genUnitAssignments' colSort cons
| Break up the problem of solving normHNF on each group of related
-- columns, then bring it all back together.
splitNormHNF :: H.Matrix Double -> (H.Matrix Double, [Int])
splitNormHNF unsolvedM = (combinedMat, allNewColIndices)
where
combinedMat = joinMat (map (first fst) solvedMs)
allNewColIndices = concatMap (snd . fst) solvedMs
inParallel = (`using` parTuple2 (parList rseq) rseq)
(solvedMs, _) = inParallel . foldl' eachResult ([], cols unsolvedM) $ map (first Flint.normHNF) (splitMat unsolvedM)
-- for each result re-number the generated columns & add mappings for each.
eachResult (ms, startI) ((m, newColIndices), origCols) = (((m, newColIndices'), origCols'):ms, endI)
where
-- produce (length newColIndices) number of mappings
endI = startI + length newColIndices
-- re-number the newColIndices according to the lookup list
newColIndices' = map (origCols !!) newColIndices
-- add columns in the (combined) matrix for the newly
generated columns from running normHNF on m.
origCols' = origCols ++ [startI .. endI-1]
genUnitAssignments' :: SortFn -> Constraints -> [([UnitInfo], UnitInfo)]
genUnitAssignments' _ [] = []
genUnitAssignments' sortfn cons
| null colList = []
| null inconsists = unitAssignments
| otherwise = []
where
(lhsM, rhsM, inconsists, lhsColA, rhsColA) = constraintsToMatrices' sortfn cons
unsolvedM | rows rhsM == 0 || cols rhsM == 0 = lhsM
| rows lhsM == 0 || cols lhsM == 0 = rhsM
| otherwise = fromBlocks [[lhsM, rhsM]]
(solvedM, newColIndices) = splitNormHNF unsolvedM
solvedM can have additional columns and rows from normHNF ;
-- cosolvedM corresponds to the original lhsM.
cosolvedM = subMatrix ( 0 , 0 ) ( rows solvedM , cols lhsM ) solvedM
-- cosolvedMrhs = subMatrix (0, cols lhsM) (rows solvedM, cols solvedM - cols lhsM) solvedM
-- generate a colList with both the original columns and new ones generated
-- if a new column generated was derived from the right-hand side then negate it
numLhsCols = 1 + snd (A.bounds lhsColA)
colList = map (1,) (A.elems lhsColA ++ A.elems rhsColA) ++ map genC newColIndices
genC n | n >= numLhsCols = (-k, UnitParamImpAbs (show u))
| otherwise = (k, UnitParamImpAbs (show u))
where (k, u) = colList !! n
-- Convert the rows of the solved matrix into flattened unit
-- expressions in the form of "unit ** k".
unitPow (k, u) x = UnitPow u (k * x)
unitPows = map (concatMap flattenUnits . zipWith unitPow colList) (H.toLists solvedM)
-- Variables to the left, unit names to the right side of the equation.
unitAssignments = map (fmap (foldUnits . map negatePosAbs) . checkSanity . partition (not . isUnitRHS')) unitPows
isUnitRHS' (UnitPow (UnitName _) _) = True
isUnitRHS' (UnitPow (UnitParamEAPAbs _) _) = True
-- Because this version of isUnitRHS different from
-- constraintsToMatrix interpretation, we need to ensure that any
-- moved ParamPosAbs units are negated, because they are
-- effectively being shifted across the equal-sign:
isUnitRHS' (UnitPow (UnitParamImpAbs _) _) = True
isUnitRHS' (UnitPow (UnitParamPosAbs (_, 0)) _) = False
isUnitRHS' (UnitPow (UnitParamPosAbs _) _) = True
isUnitRHS' _ = False
checkSanity :: ([UnitInfo], [UnitInfo]) -> ([UnitInfo], [UnitInfo])
checkSanity (u1@[UnitPow (UnitVar _) _], u2)
| or $ [ True | UnitParamPosAbs (_, _) <- universeBi u2 ]
++ [ True | UnitParamImpAbs _ <- universeBi u2 ] = (u1++u2,[])
checkSanity (u1@[UnitPow (UnitParamVarAbs (f, _)) _], u2)
| or [ True | UnitParamPosAbs (f', _) <- universeBi u2, f' /= f ] = (u1++u2,[])
checkSanity c = c
--------------------------------------------------
-- FIXME: you know better...
approxEq :: Double -> Double -> Bool
approxEq a b = abs (b - a) < epsilon
notApproxEq :: Double -> Double -> Bool
notApproxEq a b = not (approxEq a b)
epsilon :: Double
epsilon = 0.001 -- arbitrary
--------------------------------------------------
type RowNum = Int -- ^ 'row number' of matrix
type ColNum = Int -- ^ 'column number' of matrix
-- | Represents a subproblem of AX=B where the row numbers and column
-- numbers help you re-map back to the original problem.
type Subproblem = ([RowNum], (H.Matrix Double, H.Matrix Double), [ColNum])
-- | Divide up the AX=B problem into smaller problems based on the
-- 'related columns' and their corresponding rows in the
right - hand - side of the equation . Where = A and rhsM = B. The
-- resulting list of subproblems contains the new, smaller As and Bs
-- as well as a list of original row numbers and column numbers to
aide re - mapping back to the original lhsM and rhsM.
splitMatWithRHS :: H.Matrix Double -> H.Matrix Double -> [Subproblem]
splitMatWithRHS lhsM rhsM | cols lhsM > 0 = map (eachComponent . sort) $ scc (relatedColumnsGraph lhsM)
| otherwise = []
where
-- Gets called on every strongly-connected component / related set of columns.
eachComponent cs = (rs, mats, cs)
where
-- Selected columns
lhsSelCols :: H.Matrix Double
lhsSelCols = lhsM ¿ cs
csLen = cols lhsSelCols
Find the row numbers of the ' all zero ' rows in lhsM.
lhsAllZeroRows :: [RowNum]
lhsAllZeroRows = map fst . filter (all (approxEq 0) . snd) . zip [0..] $ H.toLists lhsM
-- Find the row numbers that correspond to the non-zero co-efficients in the selected columns.
lhsNonZeroColRows :: [(RowNum, [Double])]
lhsNonZeroColRows = filter (any (notApproxEq 0) . snd) . zip [0..] . H.toLists $ lhsSelCols
List of all the row numbers and row values combined from the two above variables .
lhsNumberedRows :: [(RowNum, [Double])]
lhsNumberedRows = sortBy (comparing fst) $ lhsNonZeroColRows ++ zip lhsAllZeroRows (repeat (replicate csLen 0))
For each of the above LHS rows find a corresponding RHS row .
rhsSelRows :: [[Double]]
rhsSelRows | rows rhsM > 0 = H.toLists (rhsM ? map fst lhsNumberedRows)
| otherwise = []
reassoc (a, b) c = (a, (b, c))
notAllZero (_, (lhs, rhs)) = any (notApproxEq 0) (lhs ++ rhs)
Zip the selected LHS , RHS rows together , filter out any that are all zeroes .
numberedRows :: ([RowNum], [([Double], [Double])])
numberedRows = unzip . filter notAllZero $ zipWith reassoc lhsNumberedRows rhsSelRows
rs :: [RowNum] -- list of row numbers in the subproblem
LHS / RHS subproblem matrices
(rs, mats) = second ((H.fromLists *** H.fromLists) . unzip) numberedRows
-- | Split the lhsM/rhsM problem into subproblems and then look for
-- inconsistent rows in each subproblem, concatenating all of the
-- inconsistent row numbers found (in terms of the rows of the
original ) .
splitFindInconsistentRows :: H.Matrix Double -> H.Matrix Double -> [RowNum]
splitFindInconsistentRows lhsMat rhsMat = concatMap eachComponent $ splitMatWithRHS lhsMat rhsMat
where
eachComponent (rs, (lhsM, rhsM), _) = map (rs !!) $ findInconsistentRows lhsM augM
where
Augmented matrix is defined as the combined LHS / RHS matrices .
augM
| rows rhsM == 0 || cols rhsM == 0 = lhsM
| rows lhsM == 0 || cols lhsM == 0 = rhsM
| otherwise = fromBlocks [[lhsM, rhsM]]
-- | Break out the 'unrelated' columns in a single matrix into
-- separate matrices, along with a list of their original column
-- positions.
splitMat :: H.Matrix Double -> [(H.Matrix Double, [ColNum])]
splitMat m = map (eachComponent . sort) $ scc (relatedColumnsGraph m)
where
eachComponent cs = (H.fromLists . filter (any (/= 0)) . H.toLists $ m ¿ cs, cs)
-- | Bring together the split matrices and put the columns back in
-- their original order. Rows may not be in the same order as the
-- original, but the constraints should be equivalent.
joinMat :: [(H.Matrix Double, [Int])] -> H.Matrix Double
joinMat ms = sortedM
where
disorderedM = H.diagBlock (map fst ms)
colsWithIdx = zip (concatMap snd ms) . H.toColumns $ disorderedM
sortedM = H.fromColumns . map snd . sortBy (comparing fst) $ colsWithIdx
-- | Turn a matrix into a graph where each node represents a column
and each edge represents two columns that have non - zero
-- co-efficients in some row. Basically, 'related columns'. Also
-- includes self-refs for each node..
relatedColumnsGraph :: H.Matrix Double -> Gr () ()
relatedColumnsGraph m = mkGraph (map (,()) ns) (map (\ (a,b) -> (a,b,())) es)
where
nonZeroCols = [ [ j | j <- [0..cols m - 1], not (m `atIndex` (i, j) `approxEq` 0) ] | i <- [0..rows m - 1] ]
ns = nub $ concat nonZeroCols
es = [ (i, j) | cs <- nonZeroCols, [i, j] <- sequence [cs, cs] ]
-- Convert a set of constraints into a matrix of co-efficients, and a
-- reverse mapping of column numbers to units.
constraintsToMatrix :: Constraints -> (H.Matrix Double, [Int], A.Array Int UnitInfo)
constraintsToMatrix cons
| all null lhs = (H.ident 0, [], A.listArray (0, -1) [])
| otherwise = (augM, inconsists, A.listArray (0, length colElems - 1) colElems)
where
convert each constraint into the form ( lhs , rhs )
consPairs = filter (uncurry (/=)) $ flattenConstraints cons
-- ensure terms are on the correct side of the equal sign
shiftedCons = map shiftTerms consPairs
lhs = map fst shiftedCons
rhs = map snd shiftedCons
(lhsM, lhsCols) = flattenedToMatrix colSort lhs
(rhsM, rhsCols) = flattenedToMatrix colSort rhs
colElems = A.elems lhsCols ++ A.elems rhsCols
augM = if rows rhsM == 0 || cols rhsM == 0 then lhsM else if rows lhsM == 0 || cols lhsM == 0 then rhsM else fromBlocks [[lhsM, rhsM]]
inconsists = splitFindInconsistentRows lhsM rhsM
constraintsToMatrices :: Constraints -> (H.Matrix Double, H.Matrix Double, [Int], A.Array Int UnitInfo, A.Array Int UnitInfo)
constraintsToMatrices cons = constraintsToMatrices' colSort cons
constraintsToMatrices' :: SortFn -> Constraints -> (H.Matrix Double, H.Matrix Double, [Int], A.Array Int UnitInfo, A.Array Int UnitInfo)
constraintsToMatrices' sortfn cons
| all null lhs = (H.ident 0, H.ident 0, [], A.listArray (0, -1) [], A.listArray (0, -1) [])
| otherwise = (lhsM, rhsM, inconsists, lhsCols, rhsCols)
where
convert each constraint into the form ( lhs , rhs )
consPairs = filter (uncurry (/=)) $ flattenConstraints cons
-- ensure terms are on the correct side of the equal sign
shiftedCons = map shiftTerms consPairs
lhs = map fst shiftedCons
rhs = map snd shiftedCons
(lhsM, lhsCols) = flattenedToMatrix sortfn lhs
(rhsM, rhsCols) = flattenedToMatrix sortfn rhs
inconsists = splitFindInconsistentRows lhsM rhsM
[ [ UnitInfo ] ] is a list of flattened constraints
flattenedToMatrix :: SortFn -> [[UnitInfo]] -> (H.Matrix Double, A.Array Int UnitInfo)
flattenedToMatrix sortfn cons = (m, A.array (0, numCols - 1) (map swap uniqUnits))
where
m = runSTMatrix $ do
newM <- newMatrix 0 numRows numCols
-- loop through all constraints
forM_ (zip cons [0..]) $ \ (unitPows, row) -> do
write co - efficients for the lhs of the constraint
forM_ unitPows $ \ (UnitPow u k) -> do
case M.lookup u colMap of
Just col -> readMatrix newM row col >>= (writeMatrix newM row col . (+k))
_ -> return ()
return newM
-- identify and enumerate every unit uniquely
uniqUnits = flip zip [0..] . map head . group . sortBy sortfn $ [ u | UnitPow u _ <- concat cons ]
-- map units to their unique column number
colMap = M.fromList uniqUnits
numRows = length cons
numCols = M.size colMap
negateCons :: [UnitInfo] -> [UnitInfo]
negateCons = map (\ (UnitPow u k) -> UnitPow u (-k))
negatePosAbs :: UnitInfo -> UnitInfo
negatePosAbs (UnitPow (UnitParamPosAbs x) k) = UnitPow (UnitParamPosAbs x) (-k)
negatePosAbs (UnitPow (UnitParamImpAbs v) k) = UnitPow (UnitParamImpAbs v) (-k)
negatePosAbs u = u
--------------------------------------------------
-- Units that should appear on the right-hand-side of the matrix during solving
isUnitRHS :: UnitInfo -> Bool
isUnitRHS (UnitPow (UnitName _) _) = True
isUnitRHS (UnitPow (UnitParamEAPAbs _) _) = True
isUnitRHS _ = False
| Shift UnitNames / EAPAbs poly units to the RHS , and all else to the LHS .
shiftTerms :: ([UnitInfo], [UnitInfo]) -> ([UnitInfo], [UnitInfo])
shiftTerms (lhs, rhs) = (lhsOk ++ negateCons rhsShift, rhsOk ++ negateCons lhsShift)
where
(lhsOk, lhsShift) = partition (not . isUnitRHS) lhs
(rhsOk, rhsShift) = partition isUnitRHS rhs
-- | Shift terms based on function f (<- True, False ->).
shiftTermsBy :: (UnitInfo -> Bool) -> ([UnitInfo], [UnitInfo]) -> ([UnitInfo], [UnitInfo])
shiftTermsBy f (lhs, rhs) = (lhsOk ++ negateCons rhsShift, rhsOk ++ negateCons lhsShift)
where
(lhsOk, lhsShift) = partition f lhs
(rhsOk, rhsShift) = partition (not . f) rhs
| Translate all constraints into a LHS , RHS side of units .
flattenConstraints :: Constraints -> [([UnitInfo], [UnitInfo])]
flattenConstraints = map (\ (ConEq u1 u2) -> (flattenUnits u1, flattenUnits u2))
--------------------------------------------------
-- Matrix solving functions based on HMatrix
-- | Returns given matrix transformed into Reduced Row Echelon Form
rref :: H.Matrix Double -> H.Matrix Double
rref a = snd $ rrefMatrices' a 0 0 []
where
-- (a', den, r) = Flint.rref a
-- Provenance of matrices.
data RRefOp
= ElemRowSwap Int Int -- ^ swapped row with row
| ElemRowMult Int Double -- ^ scaled row by constant
| ElemRowAdds [(Int, Int)] -- ^ set of added row onto row ops
deriving (Show, Eq, Ord)
-- worker function
-- invariant: the matrix a is in rref except within the submatrix (j-k,j) to (n,n)
rrefMatrices' :: H.Matrix Double -> Int -> Int -> [(H.Matrix Double, RRefOp)] ->
([(H.Matrix Double, RRefOp)], H.Matrix Double)
rrefMatrices' a j k mats
-- Base cases:
| j - k == n = (mats, a)
| j == m = (mats, a)
When we have n't yet found the first non - zero number in the row , but we really need one :
| a `atIndex` (j - k, j) == 0 = case findIndex (/= 0) below of
this column is all 0s below current row , must move onto the next column
Nothing -> rrefMatrices' a (j + 1) (k + 1) mats
-- we've found a row that has a non-zero element that can be swapped into this row
Just i' -> rrefMatrices' (swapMat <> a) j k ((swapMat, ElemRowSwap i (j - k)):mats)
where i = j - k + i'
swapMat = elemRowSwap n i (j - k)
-- We have found a non-zero cell at (j - k, j), so transform it into
a 1 if needed using elemRowMult , and then clear out any lingering
-- non-zero values that might appear in the same column, using
-- elemRowAdd:
| otherwise = rrefMatrices' a2 (j + 1) k mats2
where
n = rows a
m = cols a
below = getColumnBelow a (j - k, j)
scale = recip (a `atIndex` (j - k, j))
erm = elemRowMult n (j - k) scale
scale the row if the cell is not already equal to 1
(a1, mats1) | a `atIndex` (j - k, j) /= 1 = (erm <> a, (erm, ElemRowMult (j - k) scale):mats)
| otherwise = (a, mats)
Locate any non - zero values in the same column as ( j - k , j ) and
-- cancel them out. Optimisation: instead of constructing a
separate elemRowAdd matrix for each cancellation that are then
-- multiplied together, simply build a single matrix that cancels
-- all of them out at the same time, using the ST Monad.
findAdds _ curM ms
| isWritten = (newMat <> curM, (newMat, ElemRowAdds matOps):ms)
| otherwise = (curM, ms)
where
(isWritten, matOps, newMat) = runST $ do
newM <- newMatrix 0 n n :: ST s (STMatrix s Double)
sequence_ [ writeMatrix newM i' i' 1 | i' <- [0 .. (n - 1)] ]
let f w o i | i >= n = return (w, o)
| i == j - k = f w o (i + 1)
| a `atIndex` (i, j) == 0 = f w o (i + 1)
| otherwise = writeMatrix newM i (j - k) (- (a `atIndex` (i, j)))
>> f True ((i, j - k):o) (i + 1)
(isW, ops) <- f False [] 0
(isW, ops,) `fmap` freezeMatrix newM
(a2, mats2) = findAdds (0::Int) a1 mats1
-- Get a list of values that occur below (i, j) in the matrix a.
getColumnBelow :: H.Matrix Double -> (Int, Int) -> [Double]
getColumnBelow a (i, j) = concat . H.toLists $ subMatrix (i, j) (n - i, 1) a
where n = rows a
-- 'Elementary row operation' matrices
elemRowMult :: Int -> Int -> Double -> H.Matrix Double
elemRowMult n i k = diag (H.fromList (replicate i 1.0 ++ [k] ++ replicate (n - i - 1) 1.0))
elemRowSwap :: Int -> Int -> Int -> H.Matrix Double
elemRowSwap n i j
| i == j = ident n
| i > j = elemRowSwap n j i
| otherwise = ident n ? ([0..i-1] ++ [j] ++ [i+1..j-1] ++ [i] ++ [j+1..n-1])
--------------------------------------------------
type GraphCol = IM.IntMap IS.IntSet -- graph from origin to dest.
type Provenance = IM.IntMap IS.IntSet -- graph from dest. to origin
opToGraphCol :: RRefOp -> GraphCol
opToGraphCol ElemRowMult{} = IM.empty
opToGraphCol (ElemRowSwap i j) = IM.fromList [ (i, IS.singleton j), (j, IS.singleton i) ]
opToGraphCol (ElemRowAdds l) = IM.fromList $ concat [ [(i, IS.fromList [i,j]), (j, IS.singleton j)] | (i, j) <- l ]
graphColCombine :: GraphCol -> GraphCol -> GraphCol
graphColCombine g1 g2 = IM.unionWith (curry snd) g1 $ IM.map (IS.fromList . trans . IS.toList) g2
where
trans = concatMap (\ i -> [i] `fromMaybe` (IS.toList <$> IM.lookup i g1))
invertGraphCol :: GraphCol -> GraphCol
invertGraphCol g = IM.fromListWith IS.union [ (i, IS.singleton j) | (j, jset) <- IM.toList g, i <- IS.toList jset ]
provenance :: H.Matrix Double -> (H.Matrix Double, Provenance)
provenance m = (m', p)
where
(matOps, m') = rrefMatrices' m 0 0 []
p = invertGraphCol . foldl' graphColCombine IM.empty . map opToGraphCol $ map snd matOps
-- Worker functions:
findInconsistentRows :: H.Matrix Double -> H.Matrix Double -> [Int]
findInconsistentRows coA augA | rows augA < 2 = []
| otherwise = inconsistent
where
inconsistent = [0..(rows augA - 1)] \\ consistent
consistent
-- if the space is relatively small, try it all
| rows augA < 16 = head (filter tryRows (powerset $ reverse [0..(rows augA - 1)]))
| otherwise = head (filter tryRows (tails ( [0..(rows augA - 1)])) ++ [[]])
powerset = filterM (const [True, False])
Rouché – Capelli theorem is that if the rank of the coefficient
-- matrix is not equal to the rank of the augmented matrix then
-- the system of linear equations is inconsistent.
tryRows [] = True
tryRows ns = (rank coA' == rank augA')
where
coA' = coA ? ns
augA' = augA ? ns
-- | Create unique names for all of the inferred implicit polymorphic
-- unit variables.
chooseImplicitNames :: [(VV, UnitInfo)] -> [(VV, UnitInfo)]
chooseImplicitNames vars = replaceImplicitNames (genImplicitNamesMap vars) vars
genImplicitNamesMap :: Data a => a -> M.Map UnitInfo UnitInfo
genImplicitNamesMap x = M.fromList [ (absU, UnitParamEAPAbs (newN, newN)) | (absU, newN) <- zip absUnits newNames ]
where
absUnits = nub [ u | u@(UnitParamPosAbs _) <- universeBi x ] ++
nub [ u | u@(UnitParamImpAbs _) <- universeBi x ]
eapNames = nub $ [ n | (UnitParamEAPAbs (_, n)) <- universeBi x ] ++
[ n | (UnitParamEAPUse ((_, n), _)) <- universeBi x ]
newNames = filter (`notElem` eapNames) . map ('\'':) $ nameGen
nameGen = concatMap sequence . tail . inits $ repeat ['a'..'z']
replaceImplicitNames :: Data a => M.Map UnitInfo UnitInfo -> a -> a
replaceImplicitNames implicitMap = transformBi replace
where
replace u@(UnitParamPosAbs _) = fromMaybe u $ M.lookup u implicitMap
replace u@(UnitParamImpAbs _) = fromMaybe u $ M.lookup u implicitMap
replace u = u
-- | Identifies the variables that need to be annotated in order for
-- inference or checking to work.
criticalVariables :: Constraints -> [UnitInfo]
criticalVariables [] = []
criticalVariables cons = filter (not . isUnitRHS') $ map (colA A.!) criticalIndices
where
(unsolvedM, _, colA) = constraintsToMatrix cons
solvedM = rref unsolvedM
uncriticalIndices = mapMaybe (findIndex (/= 0)) $ H.toLists solvedM
criticalIndices = A.indices colA \\ uncriticalIndices
isUnitRHS' (UnitName _) = True; isUnitRHS' _ = False
-- | Returns just the list of constraints that were identified as
-- being possible candidates for inconsistency, if there is a problem.
inconsistentConstraints :: Constraints -> Maybe Constraints
inconsistentConstraints [] = Nothing
inconsistentConstraints cons
| not (null direct) = Just direct
| null inconsists = Nothing
| otherwise = Just [ con | (con, i) <- zip cons [0..], i `elem` inconsists ]
where
(_, _, inconsists, _, _) = constraintsToMatrices cons
direct = detectInconsistency $ genUnitAssignments' colSort cons
| null |
https://raw.githubusercontent.com/camfort/camfort/3421e85f6fbbcaa6503a266b3fae029a09d2ff24/src/Camfort/Specification/Units/InferenceBackend.hs
|
haskell
|
mainly for debugging and testing:
| Returns list of formerly-undetermined variables and their units.
Find the rows corresponding to the distilled "unit :: var"
information for ordinary (non-polymorphic) variables.
Detect inconsistency if concrete units are assigned an implicit
abstract unit variable with coefficients not equal, or there are
monomorphic literals being given parametric polymorphic units.
Must be unitless: any assignments of parametric abstract units to
monomorphic literals.
convert the assignment format back into constraints
| Raw units-assignment pairs.
if the results include any mappings that must be forced to be unitless...
columns, then bring it all back together.
for each result re-number the generated columns & add mappings for each.
produce (length newColIndices) number of mappings
re-number the newColIndices according to the lookup list
add columns in the (combined) matrix for the newly
cosolvedM corresponds to the original lhsM.
cosolvedMrhs = subMatrix (0, cols lhsM) (rows solvedM, cols solvedM - cols lhsM) solvedM
generate a colList with both the original columns and new ones generated
if a new column generated was derived from the right-hand side then negate it
Convert the rows of the solved matrix into flattened unit
expressions in the form of "unit ** k".
Variables to the left, unit names to the right side of the equation.
Because this version of isUnitRHS different from
constraintsToMatrix interpretation, we need to ensure that any
moved ParamPosAbs units are negated, because they are
effectively being shifted across the equal-sign:
------------------------------------------------
FIXME: you know better...
arbitrary
------------------------------------------------
^ 'row number' of matrix
^ 'column number' of matrix
| Represents a subproblem of AX=B where the row numbers and column
numbers help you re-map back to the original problem.
| Divide up the AX=B problem into smaller problems based on the
'related columns' and their corresponding rows in the
resulting list of subproblems contains the new, smaller As and Bs
as well as a list of original row numbers and column numbers to
Gets called on every strongly-connected component / related set of columns.
Selected columns
Find the row numbers that correspond to the non-zero co-efficients in the selected columns.
list of row numbers in the subproblem
| Split the lhsM/rhsM problem into subproblems and then look for
inconsistent rows in each subproblem, concatenating all of the
inconsistent row numbers found (in terms of the rows of the
| Break out the 'unrelated' columns in a single matrix into
separate matrices, along with a list of their original column
positions.
| Bring together the split matrices and put the columns back in
their original order. Rows may not be in the same order as the
original, but the constraints should be equivalent.
| Turn a matrix into a graph where each node represents a column
co-efficients in some row. Basically, 'related columns'. Also
includes self-refs for each node..
Convert a set of constraints into a matrix of co-efficients, and a
reverse mapping of column numbers to units.
ensure terms are on the correct side of the equal sign
ensure terms are on the correct side of the equal sign
loop through all constraints
identify and enumerate every unit uniquely
map units to their unique column number
------------------------------------------------
Units that should appear on the right-hand-side of the matrix during solving
| Shift terms based on function f (<- True, False ->).
------------------------------------------------
Matrix solving functions based on HMatrix
| Returns given matrix transformed into Reduced Row Echelon Form
(a', den, r) = Flint.rref a
Provenance of matrices.
^ swapped row with row
^ scaled row by constant
^ set of added row onto row ops
worker function
invariant: the matrix a is in rref except within the submatrix (j-k,j) to (n,n)
Base cases:
we've found a row that has a non-zero element that can be swapped into this row
We have found a non-zero cell at (j - k, j), so transform it into
non-zero values that might appear in the same column, using
elemRowAdd:
cancel them out. Optimisation: instead of constructing a
multiplied together, simply build a single matrix that cancels
all of them out at the same time, using the ST Monad.
Get a list of values that occur below (i, j) in the matrix a.
'Elementary row operation' matrices
------------------------------------------------
graph from origin to dest.
graph from dest. to origin
Worker functions:
if the space is relatively small, try it all
matrix is not equal to the rank of the augmented matrix then
the system of linear equations is inconsistent.
| Create unique names for all of the inferred implicit polymorphic
unit variables.
| Identifies the variables that need to be annotated in order for
inference or checking to work.
| Returns just the list of constraints that were identified as
being possible candidates for inconsistency, if there is a problem.
|
Copyright 2016 , , , , under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
See the License for the specific language governing permissions and
limitations under the License .
Copyright 2016, Dominic Orchard, Andrew Rice, Mistral Contrastin, Matthew Danish
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
Units of measure extension to : backend
Units of measure extension to Fortran: backend
-}
# LANGUAGE TupleSections #
# LANGUAGE ScopedTypeVariables #
module Camfort.Specification.Units.InferenceBackend
( chooseImplicitNames
, criticalVariables
, inconsistentConstraints
, inferVariables
, shiftTerms
, flattenConstraints
, flattenUnits
, constraintsToMatrix
, constraintsToMatrices
, rref
, genUnitAssignments
, genUnitAssignments'
, provenance
, splitNormHNF
) where
import Camfort.Specification.Units.Environment
import qualified Camfort.Specification.Units.InferenceBackendFlint as Flint
import Control.Arrow (first, second, (***))
import Control.Monad
import Control.Monad.ST
import Control.Parallel.Strategies
import qualified Data.Array as A
import Data.Generics.Uniplate.Operations
(transformBi, universeBi)
import Data.Graph.Inductive hiding ((><))
import qualified Data.IntMap as IM
import qualified Data.IntSet as IS
import Data.List
((\\), findIndex, inits, nub, partition, sort, sortBy, group, tails, foldl')
import qualified Data.Map.Strict as M
import Data.Maybe (fromMaybe, mapMaybe)
import Data.Ord
import Data.Tuple (swap)
import Numeric.LinearAlgebra
( atIndex, (<>)
, rank, (?), (¿)
, rows, cols
, subMatrix, diag
, fromBlocks, ident
)
import qualified Numeric.LinearAlgebra as H
import Numeric.LinearAlgebra.Devel
( newMatrix, readMatrix
, writeMatrix, runSTMatrix
, freezeMatrix, STMatrix
)
import Prelude hiding ((<>))
inferVariables :: Constraints -> [(VV, UnitInfo)]
inferVariables cons = unitVarAssignments
where
unitAssignments = genUnitAssignments cons
unitVarAssignments =
[ (var, units) | ([UnitPow (UnitVar var) k], units) <- unitAssignments, k `approxEq` 1 ] ++
[ (var, units) | ([UnitPow (UnitParamVarAbs (_, var)) k], units) <- unitAssignments, k `approxEq` 1 ]
detectInconsistency :: [([UnitInfo], UnitInfo)] -> Constraints
detectInconsistency unitAssignments = inconsist
where
ua' = map (shiftTerms . fmap flattenUnits) unitAssignments
badImplicits = [ fmap foldUnits a | a@([UnitPow (UnitParamImpAbs _) k1], rhs) <- ua'
, UnitPow _ k2 <- rhs
, k1 /= k2 ]
inconsist = unitAssignmentsToConstraints badImplicits ++ mustBeUnitless unitAssignments
mustBeUnitless :: [([UnitInfo], UnitInfo)] -> Constraints
mustBeUnitless unitAssignments = mbu
where
mbu = [ ConEq UnitlessLit (UnitPow (UnitLiteral l) k)
| (UnitPow (UnitLiteral l) k:_, rhs) <- ua''
, any isParametric (universeBi rhs :: [UnitInfo]) ]
ua ' = map ( shiftTerms . fmap flattenUnits ) unitAssignments
ua'' = map (shiftTermsBy isLiteral . fmap flattenUnits) unitAssignments
isLiteral UnitLiteral{} = True
isLiteral (UnitPow UnitLiteral{} _) = True
isLiteral _ = False
isParametric UnitParamVarAbs{} = True
isParametric UnitParamPosAbs{} = True
isParametric UnitParamEAPAbs{} = True
isParametric UnitParamLitAbs{} = True
isParametric UnitParamImpAbs{} = True
isParametric (UnitPow u _) = isParametric u
isParametric _ = False
unitAssignmentsToConstraints :: [([UnitInfo], UnitInfo)] -> Constraints
unitAssignmentsToConstraints = map (uncurry ConEq . first foldUnits)
genUnitAssignments :: Constraints -> [([UnitInfo], UnitInfo)]
genUnitAssignments cons
| mbu <- mustBeUnitless ua, not (null mbu) = genUnitAssignments (mbu ++ unitAssignmentsToConstraints ua)
| null (detectInconsistency ua) = ua
| otherwise = []
where
ua = genUnitAssignments' colSort cons
| Break up the problem of solving normHNF on each group of related
splitNormHNF :: H.Matrix Double -> (H.Matrix Double, [Int])
splitNormHNF unsolvedM = (combinedMat, allNewColIndices)
where
combinedMat = joinMat (map (first fst) solvedMs)
allNewColIndices = concatMap (snd . fst) solvedMs
inParallel = (`using` parTuple2 (parList rseq) rseq)
(solvedMs, _) = inParallel . foldl' eachResult ([], cols unsolvedM) $ map (first Flint.normHNF) (splitMat unsolvedM)
eachResult (ms, startI) ((m, newColIndices), origCols) = (((m, newColIndices'), origCols'):ms, endI)
where
endI = startI + length newColIndices
newColIndices' = map (origCols !!) newColIndices
generated columns from running normHNF on m.
origCols' = origCols ++ [startI .. endI-1]
genUnitAssignments' :: SortFn -> Constraints -> [([UnitInfo], UnitInfo)]
genUnitAssignments' _ [] = []
genUnitAssignments' sortfn cons
| null colList = []
| null inconsists = unitAssignments
| otherwise = []
where
(lhsM, rhsM, inconsists, lhsColA, rhsColA) = constraintsToMatrices' sortfn cons
unsolvedM | rows rhsM == 0 || cols rhsM == 0 = lhsM
| rows lhsM == 0 || cols lhsM == 0 = rhsM
| otherwise = fromBlocks [[lhsM, rhsM]]
(solvedM, newColIndices) = splitNormHNF unsolvedM
solvedM can have additional columns and rows from normHNF ;
cosolvedM = subMatrix ( 0 , 0 ) ( rows solvedM , cols lhsM ) solvedM
numLhsCols = 1 + snd (A.bounds lhsColA)
colList = map (1,) (A.elems lhsColA ++ A.elems rhsColA) ++ map genC newColIndices
genC n | n >= numLhsCols = (-k, UnitParamImpAbs (show u))
| otherwise = (k, UnitParamImpAbs (show u))
where (k, u) = colList !! n
unitPow (k, u) x = UnitPow u (k * x)
unitPows = map (concatMap flattenUnits . zipWith unitPow colList) (H.toLists solvedM)
unitAssignments = map (fmap (foldUnits . map negatePosAbs) . checkSanity . partition (not . isUnitRHS')) unitPows
isUnitRHS' (UnitPow (UnitName _) _) = True
isUnitRHS' (UnitPow (UnitParamEAPAbs _) _) = True
isUnitRHS' (UnitPow (UnitParamImpAbs _) _) = True
isUnitRHS' (UnitPow (UnitParamPosAbs (_, 0)) _) = False
isUnitRHS' (UnitPow (UnitParamPosAbs _) _) = True
isUnitRHS' _ = False
checkSanity :: ([UnitInfo], [UnitInfo]) -> ([UnitInfo], [UnitInfo])
checkSanity (u1@[UnitPow (UnitVar _) _], u2)
| or $ [ True | UnitParamPosAbs (_, _) <- universeBi u2 ]
++ [ True | UnitParamImpAbs _ <- universeBi u2 ] = (u1++u2,[])
checkSanity (u1@[UnitPow (UnitParamVarAbs (f, _)) _], u2)
| or [ True | UnitParamPosAbs (f', _) <- universeBi u2, f' /= f ] = (u1++u2,[])
checkSanity c = c
approxEq :: Double -> Double -> Bool
approxEq a b = abs (b - a) < epsilon
notApproxEq :: Double -> Double -> Bool
notApproxEq a b = not (approxEq a b)
epsilon :: Double
type Subproblem = ([RowNum], (H.Matrix Double, H.Matrix Double), [ColNum])
right - hand - side of the equation . Where = A and rhsM = B. The
aide re - mapping back to the original lhsM and rhsM.
splitMatWithRHS :: H.Matrix Double -> H.Matrix Double -> [Subproblem]
splitMatWithRHS lhsM rhsM | cols lhsM > 0 = map (eachComponent . sort) $ scc (relatedColumnsGraph lhsM)
| otherwise = []
where
eachComponent cs = (rs, mats, cs)
where
lhsSelCols :: H.Matrix Double
lhsSelCols = lhsM ¿ cs
csLen = cols lhsSelCols
Find the row numbers of the ' all zero ' rows in lhsM.
lhsAllZeroRows :: [RowNum]
lhsAllZeroRows = map fst . filter (all (approxEq 0) . snd) . zip [0..] $ H.toLists lhsM
lhsNonZeroColRows :: [(RowNum, [Double])]
lhsNonZeroColRows = filter (any (notApproxEq 0) . snd) . zip [0..] . H.toLists $ lhsSelCols
List of all the row numbers and row values combined from the two above variables .
lhsNumberedRows :: [(RowNum, [Double])]
lhsNumberedRows = sortBy (comparing fst) $ lhsNonZeroColRows ++ zip lhsAllZeroRows (repeat (replicate csLen 0))
For each of the above LHS rows find a corresponding RHS row .
rhsSelRows :: [[Double]]
rhsSelRows | rows rhsM > 0 = H.toLists (rhsM ? map fst lhsNumberedRows)
| otherwise = []
reassoc (a, b) c = (a, (b, c))
notAllZero (_, (lhs, rhs)) = any (notApproxEq 0) (lhs ++ rhs)
Zip the selected LHS , RHS rows together , filter out any that are all zeroes .
numberedRows :: ([RowNum], [([Double], [Double])])
numberedRows = unzip . filter notAllZero $ zipWith reassoc lhsNumberedRows rhsSelRows
LHS / RHS subproblem matrices
(rs, mats) = second ((H.fromLists *** H.fromLists) . unzip) numberedRows
original ) .
splitFindInconsistentRows :: H.Matrix Double -> H.Matrix Double -> [RowNum]
splitFindInconsistentRows lhsMat rhsMat = concatMap eachComponent $ splitMatWithRHS lhsMat rhsMat
where
eachComponent (rs, (lhsM, rhsM), _) = map (rs !!) $ findInconsistentRows lhsM augM
where
Augmented matrix is defined as the combined LHS / RHS matrices .
augM
| rows rhsM == 0 || cols rhsM == 0 = lhsM
| rows lhsM == 0 || cols lhsM == 0 = rhsM
| otherwise = fromBlocks [[lhsM, rhsM]]
splitMat :: H.Matrix Double -> [(H.Matrix Double, [ColNum])]
splitMat m = map (eachComponent . sort) $ scc (relatedColumnsGraph m)
where
eachComponent cs = (H.fromLists . filter (any (/= 0)) . H.toLists $ m ¿ cs, cs)
joinMat :: [(H.Matrix Double, [Int])] -> H.Matrix Double
joinMat ms = sortedM
where
disorderedM = H.diagBlock (map fst ms)
colsWithIdx = zip (concatMap snd ms) . H.toColumns $ disorderedM
sortedM = H.fromColumns . map snd . sortBy (comparing fst) $ colsWithIdx
and each edge represents two columns that have non - zero
relatedColumnsGraph :: H.Matrix Double -> Gr () ()
relatedColumnsGraph m = mkGraph (map (,()) ns) (map (\ (a,b) -> (a,b,())) es)
where
nonZeroCols = [ [ j | j <- [0..cols m - 1], not (m `atIndex` (i, j) `approxEq` 0) ] | i <- [0..rows m - 1] ]
ns = nub $ concat nonZeroCols
es = [ (i, j) | cs <- nonZeroCols, [i, j] <- sequence [cs, cs] ]
constraintsToMatrix :: Constraints -> (H.Matrix Double, [Int], A.Array Int UnitInfo)
constraintsToMatrix cons
| all null lhs = (H.ident 0, [], A.listArray (0, -1) [])
| otherwise = (augM, inconsists, A.listArray (0, length colElems - 1) colElems)
where
convert each constraint into the form ( lhs , rhs )
consPairs = filter (uncurry (/=)) $ flattenConstraints cons
shiftedCons = map shiftTerms consPairs
lhs = map fst shiftedCons
rhs = map snd shiftedCons
(lhsM, lhsCols) = flattenedToMatrix colSort lhs
(rhsM, rhsCols) = flattenedToMatrix colSort rhs
colElems = A.elems lhsCols ++ A.elems rhsCols
augM = if rows rhsM == 0 || cols rhsM == 0 then lhsM else if rows lhsM == 0 || cols lhsM == 0 then rhsM else fromBlocks [[lhsM, rhsM]]
inconsists = splitFindInconsistentRows lhsM rhsM
constraintsToMatrices :: Constraints -> (H.Matrix Double, H.Matrix Double, [Int], A.Array Int UnitInfo, A.Array Int UnitInfo)
constraintsToMatrices cons = constraintsToMatrices' colSort cons
constraintsToMatrices' :: SortFn -> Constraints -> (H.Matrix Double, H.Matrix Double, [Int], A.Array Int UnitInfo, A.Array Int UnitInfo)
constraintsToMatrices' sortfn cons
| all null lhs = (H.ident 0, H.ident 0, [], A.listArray (0, -1) [], A.listArray (0, -1) [])
| otherwise = (lhsM, rhsM, inconsists, lhsCols, rhsCols)
where
convert each constraint into the form ( lhs , rhs )
consPairs = filter (uncurry (/=)) $ flattenConstraints cons
shiftedCons = map shiftTerms consPairs
lhs = map fst shiftedCons
rhs = map snd shiftedCons
(lhsM, lhsCols) = flattenedToMatrix sortfn lhs
(rhsM, rhsCols) = flattenedToMatrix sortfn rhs
inconsists = splitFindInconsistentRows lhsM rhsM
[ [ UnitInfo ] ] is a list of flattened constraints
flattenedToMatrix :: SortFn -> [[UnitInfo]] -> (H.Matrix Double, A.Array Int UnitInfo)
flattenedToMatrix sortfn cons = (m, A.array (0, numCols - 1) (map swap uniqUnits))
where
m = runSTMatrix $ do
newM <- newMatrix 0 numRows numCols
forM_ (zip cons [0..]) $ \ (unitPows, row) -> do
write co - efficients for the lhs of the constraint
forM_ unitPows $ \ (UnitPow u k) -> do
case M.lookup u colMap of
Just col -> readMatrix newM row col >>= (writeMatrix newM row col . (+k))
_ -> return ()
return newM
uniqUnits = flip zip [0..] . map head . group . sortBy sortfn $ [ u | UnitPow u _ <- concat cons ]
colMap = M.fromList uniqUnits
numRows = length cons
numCols = M.size colMap
negateCons :: [UnitInfo] -> [UnitInfo]
negateCons = map (\ (UnitPow u k) -> UnitPow u (-k))
negatePosAbs :: UnitInfo -> UnitInfo
negatePosAbs (UnitPow (UnitParamPosAbs x) k) = UnitPow (UnitParamPosAbs x) (-k)
negatePosAbs (UnitPow (UnitParamImpAbs v) k) = UnitPow (UnitParamImpAbs v) (-k)
negatePosAbs u = u
isUnitRHS :: UnitInfo -> Bool
isUnitRHS (UnitPow (UnitName _) _) = True
isUnitRHS (UnitPow (UnitParamEAPAbs _) _) = True
isUnitRHS _ = False
| Shift UnitNames / EAPAbs poly units to the RHS , and all else to the LHS .
shiftTerms :: ([UnitInfo], [UnitInfo]) -> ([UnitInfo], [UnitInfo])
shiftTerms (lhs, rhs) = (lhsOk ++ negateCons rhsShift, rhsOk ++ negateCons lhsShift)
where
(lhsOk, lhsShift) = partition (not . isUnitRHS) lhs
(rhsOk, rhsShift) = partition isUnitRHS rhs
shiftTermsBy :: (UnitInfo -> Bool) -> ([UnitInfo], [UnitInfo]) -> ([UnitInfo], [UnitInfo])
shiftTermsBy f (lhs, rhs) = (lhsOk ++ negateCons rhsShift, rhsOk ++ negateCons lhsShift)
where
(lhsOk, lhsShift) = partition f lhs
(rhsOk, rhsShift) = partition (not . f) rhs
| Translate all constraints into a LHS , RHS side of units .
flattenConstraints :: Constraints -> [([UnitInfo], [UnitInfo])]
flattenConstraints = map (\ (ConEq u1 u2) -> (flattenUnits u1, flattenUnits u2))
rref :: H.Matrix Double -> H.Matrix Double
rref a = snd $ rrefMatrices' a 0 0 []
where
data RRefOp
deriving (Show, Eq, Ord)
rrefMatrices' :: H.Matrix Double -> Int -> Int -> [(H.Matrix Double, RRefOp)] ->
([(H.Matrix Double, RRefOp)], H.Matrix Double)
rrefMatrices' a j k mats
| j - k == n = (mats, a)
| j == m = (mats, a)
When we have n't yet found the first non - zero number in the row , but we really need one :
| a `atIndex` (j - k, j) == 0 = case findIndex (/= 0) below of
this column is all 0s below current row , must move onto the next column
Nothing -> rrefMatrices' a (j + 1) (k + 1) mats
Just i' -> rrefMatrices' (swapMat <> a) j k ((swapMat, ElemRowSwap i (j - k)):mats)
where i = j - k + i'
swapMat = elemRowSwap n i (j - k)
a 1 if needed using elemRowMult , and then clear out any lingering
| otherwise = rrefMatrices' a2 (j + 1) k mats2
where
n = rows a
m = cols a
below = getColumnBelow a (j - k, j)
scale = recip (a `atIndex` (j - k, j))
erm = elemRowMult n (j - k) scale
scale the row if the cell is not already equal to 1
(a1, mats1) | a `atIndex` (j - k, j) /= 1 = (erm <> a, (erm, ElemRowMult (j - k) scale):mats)
| otherwise = (a, mats)
Locate any non - zero values in the same column as ( j - k , j ) and
separate elemRowAdd matrix for each cancellation that are then
findAdds _ curM ms
| isWritten = (newMat <> curM, (newMat, ElemRowAdds matOps):ms)
| otherwise = (curM, ms)
where
(isWritten, matOps, newMat) = runST $ do
newM <- newMatrix 0 n n :: ST s (STMatrix s Double)
sequence_ [ writeMatrix newM i' i' 1 | i' <- [0 .. (n - 1)] ]
let f w o i | i >= n = return (w, o)
| i == j - k = f w o (i + 1)
| a `atIndex` (i, j) == 0 = f w o (i + 1)
| otherwise = writeMatrix newM i (j - k) (- (a `atIndex` (i, j)))
>> f True ((i, j - k):o) (i + 1)
(isW, ops) <- f False [] 0
(isW, ops,) `fmap` freezeMatrix newM
(a2, mats2) = findAdds (0::Int) a1 mats1
getColumnBelow :: H.Matrix Double -> (Int, Int) -> [Double]
getColumnBelow a (i, j) = concat . H.toLists $ subMatrix (i, j) (n - i, 1) a
where n = rows a
elemRowMult :: Int -> Int -> Double -> H.Matrix Double
elemRowMult n i k = diag (H.fromList (replicate i 1.0 ++ [k] ++ replicate (n - i - 1) 1.0))
elemRowSwap :: Int -> Int -> Int -> H.Matrix Double
elemRowSwap n i j
| i == j = ident n
| i > j = elemRowSwap n j i
| otherwise = ident n ? ([0..i-1] ++ [j] ++ [i+1..j-1] ++ [i] ++ [j+1..n-1])
opToGraphCol :: RRefOp -> GraphCol
opToGraphCol ElemRowMult{} = IM.empty
opToGraphCol (ElemRowSwap i j) = IM.fromList [ (i, IS.singleton j), (j, IS.singleton i) ]
opToGraphCol (ElemRowAdds l) = IM.fromList $ concat [ [(i, IS.fromList [i,j]), (j, IS.singleton j)] | (i, j) <- l ]
graphColCombine :: GraphCol -> GraphCol -> GraphCol
graphColCombine g1 g2 = IM.unionWith (curry snd) g1 $ IM.map (IS.fromList . trans . IS.toList) g2
where
trans = concatMap (\ i -> [i] `fromMaybe` (IS.toList <$> IM.lookup i g1))
invertGraphCol :: GraphCol -> GraphCol
invertGraphCol g = IM.fromListWith IS.union [ (i, IS.singleton j) | (j, jset) <- IM.toList g, i <- IS.toList jset ]
provenance :: H.Matrix Double -> (H.Matrix Double, Provenance)
provenance m = (m', p)
where
(matOps, m') = rrefMatrices' m 0 0 []
p = invertGraphCol . foldl' graphColCombine IM.empty . map opToGraphCol $ map snd matOps
findInconsistentRows :: H.Matrix Double -> H.Matrix Double -> [Int]
findInconsistentRows coA augA | rows augA < 2 = []
| otherwise = inconsistent
where
inconsistent = [0..(rows augA - 1)] \\ consistent
consistent
| rows augA < 16 = head (filter tryRows (powerset $ reverse [0..(rows augA - 1)]))
| otherwise = head (filter tryRows (tails ( [0..(rows augA - 1)])) ++ [[]])
powerset = filterM (const [True, False])
Rouché – Capelli theorem is that if the rank of the coefficient
tryRows [] = True
tryRows ns = (rank coA' == rank augA')
where
coA' = coA ? ns
augA' = augA ? ns
chooseImplicitNames :: [(VV, UnitInfo)] -> [(VV, UnitInfo)]
chooseImplicitNames vars = replaceImplicitNames (genImplicitNamesMap vars) vars
genImplicitNamesMap :: Data a => a -> M.Map UnitInfo UnitInfo
genImplicitNamesMap x = M.fromList [ (absU, UnitParamEAPAbs (newN, newN)) | (absU, newN) <- zip absUnits newNames ]
where
absUnits = nub [ u | u@(UnitParamPosAbs _) <- universeBi x ] ++
nub [ u | u@(UnitParamImpAbs _) <- universeBi x ]
eapNames = nub $ [ n | (UnitParamEAPAbs (_, n)) <- universeBi x ] ++
[ n | (UnitParamEAPUse ((_, n), _)) <- universeBi x ]
newNames = filter (`notElem` eapNames) . map ('\'':) $ nameGen
nameGen = concatMap sequence . tail . inits $ repeat ['a'..'z']
replaceImplicitNames :: Data a => M.Map UnitInfo UnitInfo -> a -> a
replaceImplicitNames implicitMap = transformBi replace
where
replace u@(UnitParamPosAbs _) = fromMaybe u $ M.lookup u implicitMap
replace u@(UnitParamImpAbs _) = fromMaybe u $ M.lookup u implicitMap
replace u = u
criticalVariables :: Constraints -> [UnitInfo]
criticalVariables [] = []
criticalVariables cons = filter (not . isUnitRHS') $ map (colA A.!) criticalIndices
where
(unsolvedM, _, colA) = constraintsToMatrix cons
solvedM = rref unsolvedM
uncriticalIndices = mapMaybe (findIndex (/= 0)) $ H.toLists solvedM
criticalIndices = A.indices colA \\ uncriticalIndices
isUnitRHS' (UnitName _) = True; isUnitRHS' _ = False
inconsistentConstraints :: Constraints -> Maybe Constraints
inconsistentConstraints [] = Nothing
inconsistentConstraints cons
| not (null direct) = Just direct
| null inconsists = Nothing
| otherwise = Just [ con | (con, i) <- zip cons [0..], i `elem` inconsists ]
where
(_, _, inconsists, _, _) = constraintsToMatrices cons
direct = detectInconsistency $ genUnitAssignments' colSort cons
|
33e962513cab1d0a698c5ed302388ed9f0fdd7805b51ef0b881f8fb9338760cf
|
UU-ComputerScience/uhc
|
Ratio.hs
|
# LANGUAGE CPP #
-----------------------------------------------------------------------------
-- |
-- Module : Data.Ratio
Copyright : ( c ) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer :
-- Stability : stable
-- Portability : portable
--
-- Standard functions on rational numbers
--
-----------------------------------------------------------------------------
module Data.Ratio
( Ratio
, Rational
, (%) -- :: (Integral a) => a -> a -> Ratio a
, numerator -- :: (Integral a) => Ratio a -> a
, denominator -- :: (Integral a) => Ratio a -> a
: : ( RealFrac a ) = > a - > a - > Rational
-- Ratio instances:
-- (Integral a) => Eq (Ratio a)
-- (Integral a) => Ord (Ratio a)
( Integral a ) = > ( Ratio a )
-- (Integral a) => Real (Ratio a)
-- (Integral a) => Fractional (Ratio a)
-- (Integral a) => RealFrac (Ratio a)
( Integral a ) = > ( Ratio a )
-- (Read a, Integral a) => Read (Ratio a)
-- (Integral a) => Show (Ratio a)
) where
import Prelude
#ifdef __GLASGOW_HASKELL__
import GHC.Real -- The basic defns for Ratio
#endif
#ifdef __HUGS__
import Hugs.Prelude(Ratio(..), (%), numerator, denominator)
#endif
#ifdef __NHC__
import Ratio (Ratio(..), (%), numerator, denominator, approxRational)
#else
-- -----------------------------------------------------------------------------
-- approxRational
| ' approxRational ' , applied to two real fractional numbers @x@ and @epsilon@ ,
returns the simplest rational number within @epsilon@ of @x@.
-- A rational number @y@ is said to be /simpler/ than another @y'@ if
--
-- * @'abs' ('numerator' y) <= 'abs' ('numerator' y')@, and
--
-- * @'denominator' y <= 'denominator' y'@.
--
-- Any real interval contains a unique simplest rational;
in particular , note that @0\/1@ is the simplest rational of all .
-- Implementation details: Here, for simplicity, we assume a closed rational
interval . If such an interval includes at least one whole number , then
-- the simplest rational is the absolutely least whole number. Otherwise,
-- the bounds are of the form q%1 + r%d and q%1 + r'%d', where abs r < d
-- and abs r' < d', and the simplest rational is q%1 + the reciprocal of
the simplest rational between d'%r ' and d%r .
approxRational :: (RealFrac a) => a -> a -> Rational
approxRational rat eps = simplest (rat-eps) (rat+eps)
where simplest x y | y < x = simplest y x
| x == y = xr
| x > 0 = simplest' n d n' d'
| y < 0 = - simplest' (-n') d' (-n) d
| otherwise = 0 :% 1
where xr = toRational x
n = numerator xr
d = denominator xr
nd' = toRational y
n' = numerator nd'
d' = denominator nd'
simplest' n d n' d' -- assumes 0 < n%d < n'%d'
| r == 0 = q :% 1
| q /= q' = (q+1) :% 1
| otherwise = (q*n''+d'') :% n''
where (q,r) = quotRem n d
(q',r') = quotRem n' d'
nd'' = simplest' d' r' d r
n'' = numerator nd''
d'' = denominator nd''
#endif
| null |
https://raw.githubusercontent.com/UU-ComputerScience/uhc/f2b94a90d26e2093d84044b3832a9a3e3c36b129/EHC/ehclib/base/Data/Ratio.hs
|
haskell
|
---------------------------------------------------------------------------
|
Module : Data.Ratio
License : BSD-style (see the file libraries/base/LICENSE)
Maintainer :
Stability : stable
Portability : portable
Standard functions on rational numbers
---------------------------------------------------------------------------
:: (Integral a) => a -> a -> Ratio a
:: (Integral a) => Ratio a -> a
:: (Integral a) => Ratio a -> a
Ratio instances:
(Integral a) => Eq (Ratio a)
(Integral a) => Ord (Ratio a)
(Integral a) => Real (Ratio a)
(Integral a) => Fractional (Ratio a)
(Integral a) => RealFrac (Ratio a)
(Read a, Integral a) => Read (Ratio a)
(Integral a) => Show (Ratio a)
The basic defns for Ratio
-----------------------------------------------------------------------------
approxRational
A rational number @y@ is said to be /simpler/ than another @y'@ if
* @'abs' ('numerator' y) <= 'abs' ('numerator' y')@, and
* @'denominator' y <= 'denominator' y'@.
Any real interval contains a unique simplest rational;
Implementation details: Here, for simplicity, we assume a closed rational
the simplest rational is the absolutely least whole number. Otherwise,
the bounds are of the form q%1 + r%d and q%1 + r'%d', where abs r < d
and abs r' < d', and the simplest rational is q%1 + the reciprocal of
assumes 0 < n%d < n'%d'
|
# LANGUAGE CPP #
Copyright : ( c ) The University of Glasgow 2001
module Data.Ratio
( Ratio
, Rational
: : ( RealFrac a ) = > a - > a - > Rational
( Integral a ) = > ( Ratio a )
( Integral a ) = > ( Ratio a )
) where
import Prelude
#ifdef __GLASGOW_HASKELL__
#endif
#ifdef __HUGS__
import Hugs.Prelude(Ratio(..), (%), numerator, denominator)
#endif
#ifdef __NHC__
import Ratio (Ratio(..), (%), numerator, denominator, approxRational)
#else
| ' approxRational ' , applied to two real fractional numbers @x@ and @epsilon@ ,
returns the simplest rational number within @epsilon@ of @x@.
in particular , note that @0\/1@ is the simplest rational of all .
interval . If such an interval includes at least one whole number , then
the simplest rational between d'%r ' and d%r .
approxRational :: (RealFrac a) => a -> a -> Rational
approxRational rat eps = simplest (rat-eps) (rat+eps)
where simplest x y | y < x = simplest y x
| x == y = xr
| x > 0 = simplest' n d n' d'
| y < 0 = - simplest' (-n') d' (-n) d
| otherwise = 0 :% 1
where xr = toRational x
n = numerator xr
d = denominator xr
nd' = toRational y
n' = numerator nd'
d' = denominator nd'
| r == 0 = q :% 1
| q /= q' = (q+1) :% 1
| otherwise = (q*n''+d'') :% n''
where (q,r) = quotRem n d
(q',r') = quotRem n' d'
nd'' = simplest' d' r' d r
n'' = numerator nd''
d'' = denominator nd''
#endif
|
c15f5ae5641e9352bc1e9838677a0ed41d41491f9870f1e06425ce87872e55a1
|
ashinn/alschemist
|
mzscheme-301.scm
|
For mzscheme-301
(require (lib "1.ss" "srfi"))
(require (lib "9.ss" "srfi"))
(require (lib "23.ss" "srfi"))
(require (lib "60.ss" "srfi"))
(require (lib "69.ss" "srfi"))
(load "other/srfi-60-pieces.scm")
| null |
https://raw.githubusercontent.com/ashinn/alschemist/13d7105c1291a881b8d8d69c8e60e045428c04b1/jkode/sassy/inits/mzscheme-301.scm
|
scheme
|
For mzscheme-301
(require (lib "1.ss" "srfi"))
(require (lib "9.ss" "srfi"))
(require (lib "23.ss" "srfi"))
(require (lib "60.ss" "srfi"))
(require (lib "69.ss" "srfi"))
(load "other/srfi-60-pieces.scm")
|
|
3077c459ce4c266dd6b2aeaeea09894d204847824c349e0e4293d854f93fa5a0
|
alvatar/spheres
|
c-define-struct#.scm
|
;; Helper for define-c-struct and define-c-union
(define^ (%%c-define-struct-or-union struct-or-union type fields)
(let* ((type-str (symbol->string type))
(struct-type-str (string-append
(case struct-or-union
((struct) "struct ")
((union) "union ")
(else
(error "%%c-define-struct-or-union: first parameter must be 'struct or 'union")))
type-str))
(struct-type*-str (string-append struct-type-str "*"))
(release-type-str (string-append "___release_" type-str))
(type* (%%generic-symbol-append type-str "*"))
(type*/nonnull (%%generic-symbol-append type-str "*/nonnull"))
(type*/release-rc (%%generic-symbol-append type-str "*/release-rc")))
(define (field-getter-setter field-spec)
(let* ((field (car field-spec))
(field-str (symbol->string field))
(field-description (cadr field-spec)))
(if (pair? field-description)
Field is either a ' struct ' , an ' array ' or an ' array of structs '
(let* ((field-tag (car field-description))
(field-type (cadr field-description))
(field-type-str (symbol->string field-type)))
(case field-tag
;; Struct
((struct)
`((define ,(%%generic-symbol-append type-str "-" field-str)
(c-lambda (,type*/nonnull)
,(%%generic-symbol-append field-type-str "*/nonnull")
,(string-append "___result_voidstar = &___arg1->" field-str ";")))
(define ,(%%generic-symbol-append type-str "-" field-str "-set!")
(c-lambda (,type*/nonnull ,field-type)
void
,(string-append "___arg1->" field-str " = ___arg2;")))))
;; Array of fundamental type
((array)
;; generate a getter and a setter
`((define ,(%%generic-symbol-append type-str "-" field-str "-ref")
(c-lambda (,type*/nonnull int)
,field-type
,(string-append "___result = ___arg1->" field-str "[___arg2];")))
(define ,(%%generic-symbol-append type-str "-" field-str "-set!")
(c-lambda (,type*/nonnull int ,field-type)
void
,(string-append "___arg1->" field-str "[___arg2] = ___arg3;")))))
;; Array of structs
((struct-array)
;; only generate a getter returning struct address
`((define ,(%%generic-symbol-append type-str "-" field-str "-ref")
(c-lambda (,type*/nonnull int)
,(%%generic-symbol-append field-type-str "*/nonnull")
,(string-append "___result_voidstar = &___arg1->" field-str "[___arg2];")))))))
Field is fundamental type
`((define ,(%%generic-symbol-append type-str "-" field-str)
(c-lambda (,type*/nonnull)
,field-description
,(string-append "___result = ___arg1->" field-str ";")))
(define ,(%%generic-symbol-append type-str "-" field-str "-set!")
(c-lambda (,type*/nonnull ,field-description)
void
,(string-append "___arg1->" field-str " = ___arg2;")))))))
(let ((expansion
`(begin
;; Define the release function which is called when the
;; object is no longer accessible from the Scheme world.
(c-declare
,(string-append
"static ___SCMOBJ " release-type-str "( void* ptr )\n"
"{\n"
" ___EXT(___release_rc)( ptr );\n"
" return ___FIX(___NO_ERR);\n"
"}\n"))
;; Define type allocator procedure.
(define ,(%%generic-symbol-append "alloc-" type-str)
(c-lambda ()
,type*/release-rc
,(string-append "___result_voidstar = ___EXT(___alloc_rc)( sizeof( " struct-type-str " ) );")))
Dereference
(define ,(%%generic-symbol-append "*->" type-str)
(c-lambda (,type*/nonnull)
,type
,(string-append "___result_voidstar = (" type-str "*)___arg1;")))
;; Define field getters and setters.
,@(apply append (map field-getter-setter fields)))))
(if #f ;; #t for debugging
(pp `(definition:
(c-define-struct ,type ,@fields)
expansion:
,expansion)))
expansion)))
! Defines the c - define - struct macro , which extends the Gambit FFI to
;; interface to C structures.
(define-macro (c-define-struct type . fields)
(%%c-define-struct-or-union 'struct type fields))
! Defines the c - define - union macro , which extends the Gambit FFI to
;; interface to C structures.
(define-macro (c-define-union type . fields)
(%%c-define-struct-or-union 'union type fields))
| null |
https://raw.githubusercontent.com/alvatar/spheres/568836f234a469ef70c69f4a2d9b56d41c3fc5bd/spheres/gambit/ffi/c-define-struct%23.scm
|
scheme
|
Helper for define-c-struct and define-c-union
Struct
Array of fundamental type
generate a getter and a setter
Array of structs
only generate a getter returning struct address
Define the release function which is called when the
object is no longer accessible from the Scheme world.
Define type allocator procedure.
Define field getters and setters.
#t for debugging
interface to C structures.
interface to C structures.
|
(define^ (%%c-define-struct-or-union struct-or-union type fields)
(let* ((type-str (symbol->string type))
(struct-type-str (string-append
(case struct-or-union
((struct) "struct ")
((union) "union ")
(else
(error "%%c-define-struct-or-union: first parameter must be 'struct or 'union")))
type-str))
(struct-type*-str (string-append struct-type-str "*"))
(release-type-str (string-append "___release_" type-str))
(type* (%%generic-symbol-append type-str "*"))
(type*/nonnull (%%generic-symbol-append type-str "*/nonnull"))
(type*/release-rc (%%generic-symbol-append type-str "*/release-rc")))
(define (field-getter-setter field-spec)
(let* ((field (car field-spec))
(field-str (symbol->string field))
(field-description (cadr field-spec)))
(if (pair? field-description)
Field is either a ' struct ' , an ' array ' or an ' array of structs '
(let* ((field-tag (car field-description))
(field-type (cadr field-description))
(field-type-str (symbol->string field-type)))
(case field-tag
((struct)
`((define ,(%%generic-symbol-append type-str "-" field-str)
(c-lambda (,type*/nonnull)
,(%%generic-symbol-append field-type-str "*/nonnull")
,(string-append "___result_voidstar = &___arg1->" field-str ";")))
(define ,(%%generic-symbol-append type-str "-" field-str "-set!")
(c-lambda (,type*/nonnull ,field-type)
void
,(string-append "___arg1->" field-str " = ___arg2;")))))
((array)
`((define ,(%%generic-symbol-append type-str "-" field-str "-ref")
(c-lambda (,type*/nonnull int)
,field-type
,(string-append "___result = ___arg1->" field-str "[___arg2];")))
(define ,(%%generic-symbol-append type-str "-" field-str "-set!")
(c-lambda (,type*/nonnull int ,field-type)
void
,(string-append "___arg1->" field-str "[___arg2] = ___arg3;")))))
((struct-array)
`((define ,(%%generic-symbol-append type-str "-" field-str "-ref")
(c-lambda (,type*/nonnull int)
,(%%generic-symbol-append field-type-str "*/nonnull")
,(string-append "___result_voidstar = &___arg1->" field-str "[___arg2];")))))))
Field is fundamental type
`((define ,(%%generic-symbol-append type-str "-" field-str)
(c-lambda (,type*/nonnull)
,field-description
,(string-append "___result = ___arg1->" field-str ";")))
(define ,(%%generic-symbol-append type-str "-" field-str "-set!")
(c-lambda (,type*/nonnull ,field-description)
void
,(string-append "___arg1->" field-str " = ___arg2;")))))))
(let ((expansion
`(begin
(c-declare
,(string-append
"static ___SCMOBJ " release-type-str "( void* ptr )\n"
"{\n"
" ___EXT(___release_rc)( ptr );\n"
" return ___FIX(___NO_ERR);\n"
"}\n"))
(define ,(%%generic-symbol-append "alloc-" type-str)
(c-lambda ()
,type*/release-rc
,(string-append "___result_voidstar = ___EXT(___alloc_rc)( sizeof( " struct-type-str " ) );")))
Dereference
(define ,(%%generic-symbol-append "*->" type-str)
(c-lambda (,type*/nonnull)
,type
,(string-append "___result_voidstar = (" type-str "*)___arg1;")))
,@(apply append (map field-getter-setter fields)))))
(pp `(definition:
(c-define-struct ,type ,@fields)
expansion:
,expansion)))
expansion)))
! Defines the c - define - struct macro , which extends the Gambit FFI to
(define-macro (c-define-struct type . fields)
(%%c-define-struct-or-union 'struct type fields))
! Defines the c - define - union macro , which extends the Gambit FFI to
(define-macro (c-define-union type . fields)
(%%c-define-struct-or-union 'union type fields))
|
12822357cefeeac6db61b8ddaaa544537b07e35f77b6e7ec9fe8348323025156
|
jguhlin/ODG
|
project.clj
|
(defproject odg "1.1.1"
:main odg.core
:aot [odg.core]
:profiles {
:uberjar { :aot :all}}
:description "FIXME: write description"
:url ""
:jvm-opts ^:replace [
"-Xms6G"
"-Xmx6G"
" -XX : " ; May have caused a huge slowdown ? At least in eclipse ...
;"-XX:+UseParallelGC"
"-XX:+UseConcMarkSweepGC"
"-XX:+UseCondCardMark"
"-XX:+UseBiasedLocking"
"-XX:+AggressiveOpts"
"-XX:+UseCompressedOops"
"-XX:+UseFastAccessorMethods"
"-XX:+DoEscapeAnalysis"
"-Xss64M"
"-d64"
"-server"
"-Dco.paralleluniverse.fibers.detectRunawayFibers=false"
;"-Dco.paralleluniverse.fibers.verifyInstrumentation"
"-XX:-OmitStackTraceInFastThrow"]
:plugins [[codox "0.6.6"]
[lein-ring "0.8.11"]]
:ring {:handler odg.query-server/handler
:port 6789
:init odg.query-server/dev-init
:auto-reload? true
:auto-refresh? true}
:java-agents [[co.paralleluniverse/quasar-core "0.7.8"]]
:resource-paths ["resources"]
:dependencies [[org.clojure/clojure "1.8.0"]
[co.paralleluniverse/pulsar "0.7.8"]
[co.paralleluniverse/quasar-core "0.7.8"]
[co.paralleluniverse/quasar-actors "0.7.8"]
[org.neo4j/neo4j "3.2.1"]
[clojure-csv/clojure-csv "2.0.2"]
[cheshire "5.7.1"]
[org.clojure/tools.cli "0.3.1"]
[criterium "0.4.4"]
[org.clojure/math.combinatorics "0.1.4"]
[org.clojure/math.numeric-tower "0.0.4"]
[iota "1.1.3"]
[foldable-seq "0.2"]
[org.clojure/data.xml "0.0.8"]
[org.clojure/data.zip "0.1.2"]
[incanter "1.5.5"]
[digest "1.4.5"]
[biotools "0.1.1-b1"]
[com.taoensso/timbre "4.10.0"]
[me.raynes/fs "1.4.6"]
[ring/ring-core "1.6.1"] ; :exclusions [org.clojure/tools.reader]]
[ring/ring-jetty-adapter "1.6.1"]
[ring/ring-json "0.4.0"]
[liberator "0.14.1"]
[compojure "1.6.0"]
[org.clojure/core.memoize "0.5.9"]])
| null |
https://raw.githubusercontent.com/jguhlin/ODG/c8a09f273c278ba7b3acbd37155477979f8b4851/project.clj
|
clojure
|
May have caused a huge slowdown ? At least in eclipse ...
"-XX:+UseParallelGC"
"-Dco.paralleluniverse.fibers.verifyInstrumentation"
:exclusions [org.clojure/tools.reader]]
|
(defproject odg "1.1.1"
:main odg.core
:aot [odg.core]
:profiles {
:uberjar { :aot :all}}
:description "FIXME: write description"
:url ""
:jvm-opts ^:replace [
"-Xms6G"
"-Xmx6G"
"-XX:+UseConcMarkSweepGC"
"-XX:+UseCondCardMark"
"-XX:+UseBiasedLocking"
"-XX:+AggressiveOpts"
"-XX:+UseCompressedOops"
"-XX:+UseFastAccessorMethods"
"-XX:+DoEscapeAnalysis"
"-Xss64M"
"-d64"
"-server"
"-Dco.paralleluniverse.fibers.detectRunawayFibers=false"
"-XX:-OmitStackTraceInFastThrow"]
:plugins [[codox "0.6.6"]
[lein-ring "0.8.11"]]
:ring {:handler odg.query-server/handler
:port 6789
:init odg.query-server/dev-init
:auto-reload? true
:auto-refresh? true}
:java-agents [[co.paralleluniverse/quasar-core "0.7.8"]]
:resource-paths ["resources"]
:dependencies [[org.clojure/clojure "1.8.0"]
[co.paralleluniverse/pulsar "0.7.8"]
[co.paralleluniverse/quasar-core "0.7.8"]
[co.paralleluniverse/quasar-actors "0.7.8"]
[org.neo4j/neo4j "3.2.1"]
[clojure-csv/clojure-csv "2.0.2"]
[cheshire "5.7.1"]
[org.clojure/tools.cli "0.3.1"]
[criterium "0.4.4"]
[org.clojure/math.combinatorics "0.1.4"]
[org.clojure/math.numeric-tower "0.0.4"]
[iota "1.1.3"]
[foldable-seq "0.2"]
[org.clojure/data.xml "0.0.8"]
[org.clojure/data.zip "0.1.2"]
[incanter "1.5.5"]
[digest "1.4.5"]
[biotools "0.1.1-b1"]
[com.taoensso/timbre "4.10.0"]
[me.raynes/fs "1.4.6"]
[ring/ring-jetty-adapter "1.6.1"]
[ring/ring-json "0.4.0"]
[liberator "0.14.1"]
[compojure "1.6.0"]
[org.clojure/core.memoize "0.5.9"]])
|
116c910dda19a8379d96d7ad531fa8aeb241a84b1ec5392ce126d77871ebcf63
|
namanmansukhani/ocaml-coreutils
|
yes.ml
|
let program_name = "yes"
let usage_msg = "usage: yes [args ...]"
let args = ref []
let anon_fun arg = args := arg :: !args
let speclist = []
let exit_code = ref 0
let yes args =
let word = (match args with | [] -> "y" | (x::_) -> x) in
while true do
try
Printf.printf "%s\n" word
with
Sys_error (err_msg) -> (Printf.printf "%s: %s\n" program_name err_msg;exit_code:=1)
done
let main () =
Arg.parse speclist anon_fun usage_msg;
yes (List.rev !args);
exit (!exit_code)
let _ = main ()
| null |
https://raw.githubusercontent.com/namanmansukhani/ocaml-coreutils/4e19d842818c7513291b652dcf3dd913110a6de9/bin/yes.ml
|
ocaml
|
let program_name = "yes"
let usage_msg = "usage: yes [args ...]"
let args = ref []
let anon_fun arg = args := arg :: !args
let speclist = []
let exit_code = ref 0
let yes args =
let word = (match args with | [] -> "y" | (x::_) -> x) in
while true do
try
Printf.printf "%s\n" word
with
Sys_error (err_msg) -> (Printf.printf "%s: %s\n" program_name err_msg;exit_code:=1)
done
let main () =
Arg.parse speclist anon_fun usage_msg;
yes (List.rev !args);
exit (!exit_code)
let _ = main ()
|
|
3b816401014377e7967f827f70169a69e31154271f05a1aa13b6ccf45e76a2b1
|
rbkmoney/fistful-server
|
ff_deposit_revert_utils.erl
|
%%
%% Index reverts management helpers
%%
-module(ff_deposit_revert_utils).
-opaque index() :: #{
reverts := #{id() => revert()},
Стек идентифкаторов возвратов . Голова списка точно является незавершенным ревертом .
Остальные реверты могут быть как завершенными , нет . Элементы
На практике , если машина не подвергалась починке , в стеке будут идентификаторы
только активных возвратов без повторений .
active := [id()]
}.
-type wrapped_event() ::
{revert, #{
id := id(),
payload := event()
}}.
-type unknown_revert_error() :: {unknown_revert, id()}.
-export_type([index/0]).
-export_type([wrapped_event/0]).
-export_type([unknown_revert_error/0]).
%% API
-export([new_index/0]).
-export([reverts/1]).
-export([is_active/1]).
-export([is_finished/1]).
-export([get_not_finished/1]).
-export([wrap_event/2]).
-export([wrap_events/2]).
-export([unwrap_event/1]).
-export([apply_event/2]).
-export([maybe_migrate/1]).
-export([get_by_id/2]).
-export([process_reverts/1]).
%% Internal types
-type id() :: ff_adjustment:id().
-type revert() :: ff_deposit_revert:revert().
-type event() :: ff_deposit_revert:event().
-type action() :: machinery:action() | undefined.
%% API
-spec new_index() -> index().
new_index() ->
#{
reverts => #{},
active => []
}.
-spec is_active(index()) -> boolean().
is_active(Index) ->
active_revert_id(Index) =/= undefined.
-spec is_finished(index()) -> boolean().
is_finished(Index) ->
lists:all(fun ff_deposit_revert:is_finished/1, reverts(Index)).
-spec get_not_finished(index()) -> {ok, id()} | error.
get_not_finished(Index) ->
do_get_not_finished(reverts(Index)).
-spec reverts(index()) -> [revert()].
reverts(Index) ->
#{reverts := Reverts} = Index,
maps:values(Reverts).
-spec get_by_id(id(), index()) -> {ok, revert()} | {error, unknown_revert_error()}.
get_by_id(RevertID, Index) ->
#{reverts := Reverts} = Index,
case maps:find(RevertID, Reverts) of
{ok, Revert} ->
{ok, Revert};
error ->
{error, {unknown_revert, RevertID}}
end.
-spec unwrap_event(wrapped_event()) -> {id(), event()}.
unwrap_event({revert, #{id := ID, payload := Event}}) ->
{ID, Event}.
-spec wrap_event(id(), event()) -> wrapped_event().
wrap_event(ID, Event) ->
{revert, #{id => ID, payload => Event}}.
-spec wrap_events(id(), [event()]) -> [wrapped_event()].
wrap_events(ID, Events) ->
[wrap_event(ID, Ev) || Ev <- Events].
-spec apply_event(wrapped_event(), index()) -> index().
apply_event(WrappedEvent, Index0) ->
{RevertID, Event} = unwrap_event(WrappedEvent),
#{reverts := Reverts} = Index0,
Revert0 = maps:get(RevertID, Reverts, undefined),
Revert1 = ff_deposit_revert:apply_event(Event, Revert0),
Index1 = Index0#{reverts := Reverts#{RevertID => Revert1}},
Index2 = update_active(Revert1, Index1),
Index2.
-spec maybe_migrate(wrapped_event() | any()) -> wrapped_event().
maybe_migrate(Event) ->
{ID, RevertEvent} = unwrap_event(Event),
Migrated = ff_deposit_revert:maybe_migrate(RevertEvent),
wrap_event(ID, Migrated).
-spec process_reverts(index()) -> {action(), [wrapped_event()]}.
process_reverts(Index) ->
RevertID = active_revert_id(Index),
#{reverts := #{RevertID := Revert}} = Index,
{RevertAction, Events} = ff_deposit_revert:process_transfer(Revert),
WrappedEvents = wrap_events(RevertID, Events),
NextIndex = lists:foldl(fun(E, Acc) -> ff_deposit_revert_utils:apply_event(E, Acc) end, Index, WrappedEvents),
Action =
case {RevertAction, ff_deposit_revert_utils:is_active(NextIndex)} of
{undefined, true} ->
continue;
_Other ->
RevertAction
end,
{Action, WrappedEvents}.
%% Internals
-spec update_active(revert(), index()) -> index().
update_active(Revert, Index) ->
#{active := Active} = Index,
IsRevertActive = ff_deposit_revert:is_active(Revert),
RevertID = ff_deposit_revert:id(Revert),
NewActive =
case {IsRevertActive, RevertID, Active} of
{false, RevertID, [RevertID | ActiveTail]} ->
drain_inactive_revert(ActiveTail, Index);
{false, _RevertID, _} ->
Active;
{true, RevertID, [RevertID | _]} ->
Active;
{true, RevertID, _} ->
[RevertID | Active]
end,
Index#{active => NewActive}.
-spec drain_inactive_revert([id()], index()) -> [id()].
drain_inactive_revert(RevertIDs, RevertsIndex) ->
#{reverts := Reverts} = RevertsIndex,
lists:dropwhile(
fun(RevertID) ->
#{RevertID := Revert} = Reverts,
not ff_deposit_revert:is_active(Revert)
end,
RevertIDs
).
-spec active_revert_id(index()) -> id() | undefined.
active_revert_id(Index) ->
#{active := Active} = Index,
case Active of
[RevertID | _] ->
RevertID;
[] ->
undefined
end.
-spec do_get_not_finished([revert()]) -> {ok, id()} | error.
do_get_not_finished([]) ->
error;
do_get_not_finished([Revert | Tail]) ->
case ff_deposit_revert:is_finished(Revert) of
true ->
do_get_not_finished(Tail);
false ->
{ok, ff_deposit_revert:id(Revert)}
end.
| null |
https://raw.githubusercontent.com/rbkmoney/fistful-server/60b964d0e07f911c841903bc61d8d9fb20a32658/apps/ff_transfer/src/ff_deposit_revert_utils.erl
|
erlang
|
Index reverts management helpers
API
Internal types
API
Internals
|
-module(ff_deposit_revert_utils).
-opaque index() :: #{
reverts := #{id() => revert()},
Стек идентифкаторов возвратов . Голова списка точно является незавершенным ревертом .
Остальные реверты могут быть как завершенными , нет . Элементы
На практике , если машина не подвергалась починке , в стеке будут идентификаторы
только активных возвратов без повторений .
active := [id()]
}.
-type wrapped_event() ::
{revert, #{
id := id(),
payload := event()
}}.
-type unknown_revert_error() :: {unknown_revert, id()}.
-export_type([index/0]).
-export_type([wrapped_event/0]).
-export_type([unknown_revert_error/0]).
-export([new_index/0]).
-export([reverts/1]).
-export([is_active/1]).
-export([is_finished/1]).
-export([get_not_finished/1]).
-export([wrap_event/2]).
-export([wrap_events/2]).
-export([unwrap_event/1]).
-export([apply_event/2]).
-export([maybe_migrate/1]).
-export([get_by_id/2]).
-export([process_reverts/1]).
-type id() :: ff_adjustment:id().
-type revert() :: ff_deposit_revert:revert().
-type event() :: ff_deposit_revert:event().
-type action() :: machinery:action() | undefined.
-spec new_index() -> index().
new_index() ->
#{
reverts => #{},
active => []
}.
-spec is_active(index()) -> boolean().
is_active(Index) ->
active_revert_id(Index) =/= undefined.
-spec is_finished(index()) -> boolean().
is_finished(Index) ->
lists:all(fun ff_deposit_revert:is_finished/1, reverts(Index)).
-spec get_not_finished(index()) -> {ok, id()} | error.
get_not_finished(Index) ->
do_get_not_finished(reverts(Index)).
-spec reverts(index()) -> [revert()].
reverts(Index) ->
#{reverts := Reverts} = Index,
maps:values(Reverts).
-spec get_by_id(id(), index()) -> {ok, revert()} | {error, unknown_revert_error()}.
get_by_id(RevertID, Index) ->
#{reverts := Reverts} = Index,
case maps:find(RevertID, Reverts) of
{ok, Revert} ->
{ok, Revert};
error ->
{error, {unknown_revert, RevertID}}
end.
-spec unwrap_event(wrapped_event()) -> {id(), event()}.
unwrap_event({revert, #{id := ID, payload := Event}}) ->
{ID, Event}.
-spec wrap_event(id(), event()) -> wrapped_event().
wrap_event(ID, Event) ->
{revert, #{id => ID, payload => Event}}.
-spec wrap_events(id(), [event()]) -> [wrapped_event()].
wrap_events(ID, Events) ->
[wrap_event(ID, Ev) || Ev <- Events].
-spec apply_event(wrapped_event(), index()) -> index().
apply_event(WrappedEvent, Index0) ->
{RevertID, Event} = unwrap_event(WrappedEvent),
#{reverts := Reverts} = Index0,
Revert0 = maps:get(RevertID, Reverts, undefined),
Revert1 = ff_deposit_revert:apply_event(Event, Revert0),
Index1 = Index0#{reverts := Reverts#{RevertID => Revert1}},
Index2 = update_active(Revert1, Index1),
Index2.
-spec maybe_migrate(wrapped_event() | any()) -> wrapped_event().
maybe_migrate(Event) ->
{ID, RevertEvent} = unwrap_event(Event),
Migrated = ff_deposit_revert:maybe_migrate(RevertEvent),
wrap_event(ID, Migrated).
-spec process_reverts(index()) -> {action(), [wrapped_event()]}.
process_reverts(Index) ->
RevertID = active_revert_id(Index),
#{reverts := #{RevertID := Revert}} = Index,
{RevertAction, Events} = ff_deposit_revert:process_transfer(Revert),
WrappedEvents = wrap_events(RevertID, Events),
NextIndex = lists:foldl(fun(E, Acc) -> ff_deposit_revert_utils:apply_event(E, Acc) end, Index, WrappedEvents),
Action =
case {RevertAction, ff_deposit_revert_utils:is_active(NextIndex)} of
{undefined, true} ->
continue;
_Other ->
RevertAction
end,
{Action, WrappedEvents}.
-spec update_active(revert(), index()) -> index().
update_active(Revert, Index) ->
#{active := Active} = Index,
IsRevertActive = ff_deposit_revert:is_active(Revert),
RevertID = ff_deposit_revert:id(Revert),
NewActive =
case {IsRevertActive, RevertID, Active} of
{false, RevertID, [RevertID | ActiveTail]} ->
drain_inactive_revert(ActiveTail, Index);
{false, _RevertID, _} ->
Active;
{true, RevertID, [RevertID | _]} ->
Active;
{true, RevertID, _} ->
[RevertID | Active]
end,
Index#{active => NewActive}.
-spec drain_inactive_revert([id()], index()) -> [id()].
drain_inactive_revert(RevertIDs, RevertsIndex) ->
#{reverts := Reverts} = RevertsIndex,
lists:dropwhile(
fun(RevertID) ->
#{RevertID := Revert} = Reverts,
not ff_deposit_revert:is_active(Revert)
end,
RevertIDs
).
-spec active_revert_id(index()) -> id() | undefined.
active_revert_id(Index) ->
#{active := Active} = Index,
case Active of
[RevertID | _] ->
RevertID;
[] ->
undefined
end.
-spec do_get_not_finished([revert()]) -> {ok, id()} | error.
do_get_not_finished([]) ->
error;
do_get_not_finished([Revert | Tail]) ->
case ff_deposit_revert:is_finished(Revert) of
true ->
do_get_not_finished(Tail);
false ->
{ok, ff_deposit_revert:id(Revert)}
end.
|
85ecd45ad153c2f17b97ee8dc7517bbf9a6beb106791dcc15bdbb25e20141559
|
NorfairKing/validity
|
CerealSpec.hs
|
# LANGUAGE TypeApplications #
module Test.Validity.CerealSpec where
import Data.GenValidity
import Test.Hspec
import Test.Validity.Cereal
spec :: Spec
spec = do
serializeSpecOnGen (genListOf $ pure 'a') "sequence of 'a's" (const [])
serializeSpec @Double DOES NOT HOLD
serializeSpec @Rational
serializeSpec @Int
serializeSpecOnArbitrary @Int
| null |
https://raw.githubusercontent.com/NorfairKing/validity/35bc8d45b27e6c21429e4b681b16e46ccd541b3b/genvalidity-hspec-cereal/test/Test/Validity/CerealSpec.hs
|
haskell
|
# LANGUAGE TypeApplications #
module Test.Validity.CerealSpec where
import Data.GenValidity
import Test.Hspec
import Test.Validity.Cereal
spec :: Spec
spec = do
serializeSpecOnGen (genListOf $ pure 'a') "sequence of 'a's" (const [])
serializeSpec @Double DOES NOT HOLD
serializeSpec @Rational
serializeSpec @Int
serializeSpecOnArbitrary @Int
|
|
6bba21369b03a82df7b312564509e4b65f7cfae2f21b4f4c8b6dcbe2e86573d8
|
mahsu/MariOCaml
|
sprite.mli
|
open Actors
(* Represents an xy vector *)
type xy = float * float (* x, y *)
(* Inherent sprite parameters from which to create the sprite *)
type sprite_params =
{
max_frames: int;
max_ticks: int;
img_src: string;
frame_size: xy;
src_offset: xy;
bbox_offset: xy;
bbox_size: xy;
loop: bool;
}
(* Concrete sprite created to visually represent an object *)
type sprite =
{
mutable params: sprite_params;
context: Dom_html.canvasRenderingContext2D Js.t;
frame: int ref;
ticks: int ref;
mutable img: Dom_html.imageElement Js.t;
}
(* Sets up a sprite to create *)
val setup_sprite : ?loop:bool -> ?bb_off:float*float-> ?bb_sz:float*float
-> string -> int -> int -> xy -> xy
-> sprite_params
(* Creates a sprite given the actor type *)
val make : Actors.spawn_typ -> Actors.dir_1d
-> Dom_html.canvasRenderingContext2D Js.t
-> sprite
(* Make a background *)
val make_bgd : Dom_html.canvasRenderingContext2D Js.t -> sprite
(* Make a particle corresponding to the given type *)
val make_particle : Actors.part_typ
-> Dom_html.canvasRenderingContext2D Js.t -> sprite
(* Transform an enemy sprite based on direction *)
val transform_enemy : Actors.enemy_typ -> sprite -> Actors.dir_1d -> unit
(* Updates the sprite's animation *)
val update_animation : sprite -> unit
| null |
https://raw.githubusercontent.com/mahsu/MariOCaml/d2359260895b77390648ca5ad126a897e639e7e4/sprite.mli
|
ocaml
|
Represents an xy vector
x, y
Inherent sprite parameters from which to create the sprite
Concrete sprite created to visually represent an object
Sets up a sprite to create
Creates a sprite given the actor type
Make a background
Make a particle corresponding to the given type
Transform an enemy sprite based on direction
Updates the sprite's animation
|
open Actors
type sprite_params =
{
max_frames: int;
max_ticks: int;
img_src: string;
frame_size: xy;
src_offset: xy;
bbox_offset: xy;
bbox_size: xy;
loop: bool;
}
type sprite =
{
mutable params: sprite_params;
context: Dom_html.canvasRenderingContext2D Js.t;
frame: int ref;
ticks: int ref;
mutable img: Dom_html.imageElement Js.t;
}
val setup_sprite : ?loop:bool -> ?bb_off:float*float-> ?bb_sz:float*float
-> string -> int -> int -> xy -> xy
-> sprite_params
val make : Actors.spawn_typ -> Actors.dir_1d
-> Dom_html.canvasRenderingContext2D Js.t
-> sprite
val make_bgd : Dom_html.canvasRenderingContext2D Js.t -> sprite
val make_particle : Actors.part_typ
-> Dom_html.canvasRenderingContext2D Js.t -> sprite
val transform_enemy : Actors.enemy_typ -> sprite -> Actors.dir_1d -> unit
val update_animation : sprite -> unit
|
d90bbcf8ac33e3aede50c7591a52e25dc5b46f139308da0c713fbfad5621e885
|
kadena-io/pact
|
API.hs
|
# LANGUAGE DeriveGeneric #
# LANGUAGE FlexibleContexts #
# LANGUAGE GeneralizedNewtypeDeriving #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
# LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TemplateHaskell #
-- |
-- Module : Pact.Types.API
Copyright : ( C ) 2016
-- License : BSD-style (see the file LICENSE)
Maintainer : < >
--
-- Pact REST API types.
--
module Pact.Types.API
( RequestKeys(..), rkRequestKeys
, SubmitBatch(..), sbCmds
, Poll(..)
, PollResponses(..)
, ListenerRequest(..)
, ListenResponse(..)
) where
import Control.Applicative ((<|>))
import Control.Arrow
import Control.DeepSeq (NFData)
import Control.Lens hiding ((.=))
import Control.Monad
import Data.Text (Text)
import Data.Aeson hiding (Success)
import qualified Data.HashMap.Strict as HM
import Data.List.NonEmpty (NonEmpty)
import GHC.Generics
import Pact.Types.Command
import Pact.Types.Runtime
newtype RequestKeys = RequestKeys { _rkRequestKeys :: NonEmpty RequestKey }
deriving (Show, Eq, Ord, Generic, NFData)
makeLenses ''RequestKeys
instance ToJSON RequestKeys where
toJSON = lensyToJSON 3
instance FromJSON RequestKeys where
parseJSON = lensyParseJSON 3
-- | Submit new commands for execution
newtype SubmitBatch = SubmitBatch { _sbCmds :: NonEmpty (Command Text) }
deriving (Eq,Generic,Show)
makeLenses ''SubmitBatch
instance ToJSON SubmitBatch where
toJSON = lensyToJSON 3
instance FromJSON SubmitBatch where
parseJSON = lensyParseJSON 3
| Poll for results by RequestKey
newtype Poll = Poll { _pRequestKeys :: NonEmpty RequestKey }
deriving (Eq,Show,Generic)
instance ToJSON Poll where
toJSON = lensyToJSON 2
instance FromJSON Poll where
parseJSON = lensyParseJSON 2
-- | What you get back from a Poll
newtype PollResponses = PollResponses (HM.HashMap RequestKey (CommandResult Hash))
deriving (Eq, Show, Generic)
instance ToJSON PollResponses where
toJSON (PollResponses m) = object $ map (requestKeyToB16Text *** toJSON) $ HM.toList m
instance FromJSON PollResponses where
parseJSON = withObject "PollResponses" $ \o ->
(PollResponses . HM.fromList <$> forM (HM.toList o)
(\(k,v) -> (,) <$> parseJSON (String k) <*> parseJSON v))
| ListenerRequest for results by RequestKey
newtype ListenerRequest = ListenerRequest { _lrListen :: RequestKey }
deriving (Eq,Show,Generic)
instance ToJSON ListenerRequest where
toJSON (ListenerRequest r) = object ["listen" .= r]
instance FromJSON ListenerRequest where
parseJSON = withObject "ListenerRequest" $ \o -> ListenerRequest <$> o .: "listen"
data ListenResponse
= ListenTimeout Int
| ListenResponse (CommandResult Hash)
deriving (Eq,Show,Generic)
instance ToJSON ListenResponse where
toJSON (ListenResponse r) = toJSON r
toJSON (ListenTimeout i) =
object [ "status" .= ("timeout" :: String),
"timeout-micros" .= i ]
instance FromJSON ListenResponse where
parseJSON v =
(ListenResponse <$> parseJSON v) <|>
(ListenTimeout <$> parseTimeout v)
where
parseTimeout = withObject "ListenTimeout" $ \o -> do
(s :: Text) <- o .: "status"
case s of
"timeout" -> o .: "timeout-micros"
_ -> fail "Expected timeout status"
| null |
https://raw.githubusercontent.com/kadena-io/pact/4971ab6078b75eb612d83d56f1e7cd139a5a2ba8/src/Pact/Types/API.hs
|
haskell
|
# LANGUAGE OverloadedStrings #
# LANGUAGE RankNTypes #
|
Module : Pact.Types.API
License : BSD-style (see the file LICENSE)
Pact REST API types.
| Submit new commands for execution
| What you get back from a Poll
|
# LANGUAGE DeriveGeneric #
# LANGUAGE FlexibleContexts #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TemplateHaskell #
Copyright : ( C ) 2016
Maintainer : < >
module Pact.Types.API
( RequestKeys(..), rkRequestKeys
, SubmitBatch(..), sbCmds
, Poll(..)
, PollResponses(..)
, ListenerRequest(..)
, ListenResponse(..)
) where
import Control.Applicative ((<|>))
import Control.Arrow
import Control.DeepSeq (NFData)
import Control.Lens hiding ((.=))
import Control.Monad
import Data.Text (Text)
import Data.Aeson hiding (Success)
import qualified Data.HashMap.Strict as HM
import Data.List.NonEmpty (NonEmpty)
import GHC.Generics
import Pact.Types.Command
import Pact.Types.Runtime
newtype RequestKeys = RequestKeys { _rkRequestKeys :: NonEmpty RequestKey }
deriving (Show, Eq, Ord, Generic, NFData)
makeLenses ''RequestKeys
instance ToJSON RequestKeys where
toJSON = lensyToJSON 3
instance FromJSON RequestKeys where
parseJSON = lensyParseJSON 3
newtype SubmitBatch = SubmitBatch { _sbCmds :: NonEmpty (Command Text) }
deriving (Eq,Generic,Show)
makeLenses ''SubmitBatch
instance ToJSON SubmitBatch where
toJSON = lensyToJSON 3
instance FromJSON SubmitBatch where
parseJSON = lensyParseJSON 3
| Poll for results by RequestKey
newtype Poll = Poll { _pRequestKeys :: NonEmpty RequestKey }
deriving (Eq,Show,Generic)
instance ToJSON Poll where
toJSON = lensyToJSON 2
instance FromJSON Poll where
parseJSON = lensyParseJSON 2
newtype PollResponses = PollResponses (HM.HashMap RequestKey (CommandResult Hash))
deriving (Eq, Show, Generic)
instance ToJSON PollResponses where
toJSON (PollResponses m) = object $ map (requestKeyToB16Text *** toJSON) $ HM.toList m
instance FromJSON PollResponses where
parseJSON = withObject "PollResponses" $ \o ->
(PollResponses . HM.fromList <$> forM (HM.toList o)
(\(k,v) -> (,) <$> parseJSON (String k) <*> parseJSON v))
| ListenerRequest for results by RequestKey
newtype ListenerRequest = ListenerRequest { _lrListen :: RequestKey }
deriving (Eq,Show,Generic)
instance ToJSON ListenerRequest where
toJSON (ListenerRequest r) = object ["listen" .= r]
instance FromJSON ListenerRequest where
parseJSON = withObject "ListenerRequest" $ \o -> ListenerRequest <$> o .: "listen"
data ListenResponse
= ListenTimeout Int
| ListenResponse (CommandResult Hash)
deriving (Eq,Show,Generic)
instance ToJSON ListenResponse where
toJSON (ListenResponse r) = toJSON r
toJSON (ListenTimeout i) =
object [ "status" .= ("timeout" :: String),
"timeout-micros" .= i ]
instance FromJSON ListenResponse where
parseJSON v =
(ListenResponse <$> parseJSON v) <|>
(ListenTimeout <$> parseTimeout v)
where
parseTimeout = withObject "ListenTimeout" $ \o -> do
(s :: Text) <- o .: "status"
case s of
"timeout" -> o .: "timeout-micros"
_ -> fail "Expected timeout status"
|
f4d0d32178a949ad0169bb4d0fd07ec6a52b37683f6ca6579684adc7bd48e752
|
PuercoPop/Movitz
|
movitz.lisp
|
;;;;------------------------------------------------------------------
;;;;
Copyright ( C ) 20012000 , 2002 - 2004 ,
Department of Computer Science , University of Tromso , Norway
;;;;
;;;; Filename: movitz.lisp
;;;; Description:
Author : < >
;;;; Created at: Mon Oct 9 20:52:58 2000
;;;; Distribution: See the accompanying file COPYING.
;;;;
$ I d : movitz.lisp , v 1.12 2007/03/13 20:40:10 ffjeld Exp $
;;;;
;;;;------------------------------------------------------------------
(in-package movitz)
(defvar *i* nil) ; These hold the previous built images,
(defvar *ii* nil) ; for interactive use.
(defvar *image* nil)
(define-symbol-macro *movitz-nil*
(image-nil-object *image*))
(define-unsigned lu16 2 :little-endian)
(define-unsigned lu32 4 :little-endian)
(defconstant +code-vector-word-offset+ 2)
(defconstant +code-vector-transient-word+
(ldb (byte 32 0)
(- +code-vector-word-offset+)))
(defvar +movitz-multiple-values-limit+ 63)
(defvar *bq-level* 0)
(defvar *default-image-init-file* (asdf:system-relative-pathname :movitz #P"losp/los0.lisp"))
(defvar *default-image-file* (asdf:system-relative-pathname :movitz #P"los0-image"))
(defvar *movitz-host-features* *features*
"The *features* of the host implementation.")
(defmacro with-host-environment (options &body body)
"Execute body in a `normal' host environment."
(declare (ignore options))
`(let ((*features* *movitz-host-features*))
,@body))
(defmacro print-unreadable-movitz-object ((object stream &rest key-args) &body body)
"Just like print-unreadable-object, just adorn output so as to
make clear it's a Movitz object, with extra <..>"
(let ((stream-var (gensym "unreadable-movitz-stream-")))
`(let ((,stream-var ,stream))
(print-unreadable-object (,object ,stream-var ,@key-args)
(write-char #\< ,stream-var)
,@body
(write-char #\> ,stream-var)))))
(defun movitz-syntax-sharp-dot (stream subchar arg)
(declare (ignore arg subchar))
(let ((form (read stream t nil t)))
(values (unless *read-suppress*
(eval (muerte::translate-program form :muerte.cl :cl))))))
(defmacro with-movitz-syntax (options &body body)
(declare (ignore options))
`(let ((*readtable* (copy-readtable)))
(set-dispatch-macro-character #\# #\'
(lambda (stream subchar arg)
(declare (ignore subchar arg))
(list 'muerte.common-lisp::function
(read stream t nil t))))
(set-dispatch-macro-character #\# #\{
(lambda (stream subchar arg)
(declare (ignore subchar arg))
(let ((data (read-delimited-list #\} stream)))
(make-movitz-vector (length data)
:element-type 'movitz-unboxed-integer-u8
:initial-contents data))))
(set-dispatch-macro-character #\# #\. (lambda (stream subchar arg)
(declare (ignore arg subchar))
(let ((form (read stream t nil t)))
(values (unless *read-suppress*
(eval (muerte::translate-program form :muerte.cl :cl)))))))
(set-macro-character #\` (lambda (stream char)
(declare (ignore char))
(let ((*bq-level* (1+ *bq-level*)))
(list 'muerte::movitz-backquote (read stream t nil t)))))
(set-macro-character #\, (lambda (stream char)
(declare (ignore char))
(assert (plusp *bq-level*) ()
"Comma not inside backquote.")
(let* ((next-char (read-char stream t nil t))
(comma-type (case next-char
(#\@ 'backquote-comma-at)
(#\. 'backquote-comma-dot)
(t (unread-char next-char stream)
'backquote-comma))))
(list comma-type (read stream t nil t)))))
,@body))
(defun un-backquote (form level)
"Dont ask.."
(declare (notinline un-backquote))
(assert (not (minusp level)))
(values
(typecase form
(null nil)
(list
(case (car form)
(backquote-comma
(cadr form))
(t (cons 'append
(loop for sub-form-head on form
as sub-form = (and (consp sub-form-head)
(car sub-form-head))
collecting
(cond
((atom sub-form-head)
(list 'quote sub-form-head))
((atom sub-form)
(list 'quote (list sub-form)))
(t (case (car sub-form)
(muerte::movitz-backquote
(list 'list
(list 'list (list 'quote 'muerte::movitz-backquote)
(un-backquote (cadr sub-form) (1+ level)))))
(backquote-comma
(cond
((= 0 level)
(list 'list (cadr sub-form)))
((and (listp (cadr sub-form))
(eq 'backquote-comma-at (caadr sub-form)))
(list 'append
(list 'mapcar
'(lambda (x) (list 'backquote-comma x))
(cadr (cadr sub-form)))))
(t (list 'list
(list 'list
(list 'quote 'backquote-comma)
(un-backquote (cadr sub-form) (1- level)))))))
(backquote-comma-at
(if (= 0 level)
(cadr sub-form)
(list 'list
(list 'list
(list 'quote 'backquote-comma-at)
(un-backquote (cadr sub-form) (1- level))))))
(t (list 'list (un-backquote sub-form level))))))
when (not (listp (cdr sub-form-head)))
collect (list 'quote (cdr sub-form-head)))
))))
(array
(error "Array backquote not implemented."))
(t (list 'quote form)))))
(defmacro muerte::movitz-backquote (form)
(un-backquote form 0))
#+allegro
(excl:defsystem :movitz ()
(:serial
"movitz"
"parse"
"eval"
"multiboot"
"bootblock"
"environment"
"compiler-types"
(:definitions "compiler-protocol"
"storage-types")
"image"
"stream-image"
"procfs-image"
"assembly-syntax"
(:definitions "compiler-protocol"
(:parallel "compiler" "special-operators" "special-operators-cl"))))
#+allegro
(progn
(defun muerte.common-lisp::package-name (package)
(package-name package))
(defun muerte.cl:find-package (name)
(find-package name)))
| null |
https://raw.githubusercontent.com/PuercoPop/Movitz/7ffc41896c1e054aa43f44d64bbe9eaf3fcfa777/movitz.lisp
|
lisp
|
------------------------------------------------------------------
Filename: movitz.lisp
Description:
Created at: Mon Oct 9 20:52:58 2000
Distribution: See the accompanying file COPYING.
------------------------------------------------------------------
These hold the previous built images,
for interactive use.
|
Copyright ( C ) 20012000 , 2002 - 2004 ,
Department of Computer Science , University of Tromso , Norway
Author : < >
$ I d : movitz.lisp , v 1.12 2007/03/13 20:40:10 ffjeld Exp $
(in-package movitz)
(defvar *image* nil)
(define-symbol-macro *movitz-nil*
(image-nil-object *image*))
(define-unsigned lu16 2 :little-endian)
(define-unsigned lu32 4 :little-endian)
(defconstant +code-vector-word-offset+ 2)
(defconstant +code-vector-transient-word+
(ldb (byte 32 0)
(- +code-vector-word-offset+)))
(defvar +movitz-multiple-values-limit+ 63)
(defvar *bq-level* 0)
(defvar *default-image-init-file* (asdf:system-relative-pathname :movitz #P"losp/los0.lisp"))
(defvar *default-image-file* (asdf:system-relative-pathname :movitz #P"los0-image"))
(defvar *movitz-host-features* *features*
"The *features* of the host implementation.")
(defmacro with-host-environment (options &body body)
"Execute body in a `normal' host environment."
(declare (ignore options))
`(let ((*features* *movitz-host-features*))
,@body))
(defmacro print-unreadable-movitz-object ((object stream &rest key-args) &body body)
"Just like print-unreadable-object, just adorn output so as to
make clear it's a Movitz object, with extra <..>"
(let ((stream-var (gensym "unreadable-movitz-stream-")))
`(let ((,stream-var ,stream))
(print-unreadable-object (,object ,stream-var ,@key-args)
(write-char #\< ,stream-var)
,@body
(write-char #\> ,stream-var)))))
(defun movitz-syntax-sharp-dot (stream subchar arg)
(declare (ignore arg subchar))
(let ((form (read stream t nil t)))
(values (unless *read-suppress*
(eval (muerte::translate-program form :muerte.cl :cl))))))
(defmacro with-movitz-syntax (options &body body)
(declare (ignore options))
`(let ((*readtable* (copy-readtable)))
(set-dispatch-macro-character #\# #\'
(lambda (stream subchar arg)
(declare (ignore subchar arg))
(list 'muerte.common-lisp::function
(read stream t nil t))))
(set-dispatch-macro-character #\# #\{
(lambda (stream subchar arg)
(declare (ignore subchar arg))
(let ((data (read-delimited-list #\} stream)))
(make-movitz-vector (length data)
:element-type 'movitz-unboxed-integer-u8
:initial-contents data))))
(set-dispatch-macro-character #\# #\. (lambda (stream subchar arg)
(declare (ignore arg subchar))
(let ((form (read stream t nil t)))
(values (unless *read-suppress*
(eval (muerte::translate-program form :muerte.cl :cl)))))))
(set-macro-character #\` (lambda (stream char)
(declare (ignore char))
(let ((*bq-level* (1+ *bq-level*)))
(list 'muerte::movitz-backquote (read stream t nil t)))))
(set-macro-character #\, (lambda (stream char)
(declare (ignore char))
(assert (plusp *bq-level*) ()
"Comma not inside backquote.")
(let* ((next-char (read-char stream t nil t))
(comma-type (case next-char
(#\@ 'backquote-comma-at)
(#\. 'backquote-comma-dot)
(t (unread-char next-char stream)
'backquote-comma))))
(list comma-type (read stream t nil t)))))
,@body))
(defun un-backquote (form level)
"Dont ask.."
(declare (notinline un-backquote))
(assert (not (minusp level)))
(values
(typecase form
(null nil)
(list
(case (car form)
(backquote-comma
(cadr form))
(t (cons 'append
(loop for sub-form-head on form
as sub-form = (and (consp sub-form-head)
(car sub-form-head))
collecting
(cond
((atom sub-form-head)
(list 'quote sub-form-head))
((atom sub-form)
(list 'quote (list sub-form)))
(t (case (car sub-form)
(muerte::movitz-backquote
(list 'list
(list 'list (list 'quote 'muerte::movitz-backquote)
(un-backquote (cadr sub-form) (1+ level)))))
(backquote-comma
(cond
((= 0 level)
(list 'list (cadr sub-form)))
((and (listp (cadr sub-form))
(eq 'backquote-comma-at (caadr sub-form)))
(list 'append
(list 'mapcar
'(lambda (x) (list 'backquote-comma x))
(cadr (cadr sub-form)))))
(t (list 'list
(list 'list
(list 'quote 'backquote-comma)
(un-backquote (cadr sub-form) (1- level)))))))
(backquote-comma-at
(if (= 0 level)
(cadr sub-form)
(list 'list
(list 'list
(list 'quote 'backquote-comma-at)
(un-backquote (cadr sub-form) (1- level))))))
(t (list 'list (un-backquote sub-form level))))))
when (not (listp (cdr sub-form-head)))
collect (list 'quote (cdr sub-form-head)))
))))
(array
(error "Array backquote not implemented."))
(t (list 'quote form)))))
(defmacro muerte::movitz-backquote (form)
(un-backquote form 0))
#+allegro
(excl:defsystem :movitz ()
(:serial
"movitz"
"parse"
"eval"
"multiboot"
"bootblock"
"environment"
"compiler-types"
(:definitions "compiler-protocol"
"storage-types")
"image"
"stream-image"
"procfs-image"
"assembly-syntax"
(:definitions "compiler-protocol"
(:parallel "compiler" "special-operators" "special-operators-cl"))))
#+allegro
(progn
(defun muerte.common-lisp::package-name (package)
(package-name package))
(defun muerte.cl:find-package (name)
(find-package name)))
|
8680e7a94ba92993b62939650063adca3d413ab9fd886a644de32284861df2ba
|
zadean/xqerl
|
fn_contains_token_SUITE.erl
|
-module('fn_contains_token_SUITE').
-include_lib("common_test/include/ct.hrl").
-export([
all/0,
groups/0,
suite/0
]).
-export([
init_per_suite/1,
init_per_group/2,
end_per_group/2,
end_per_suite/1
]).
-export(['fn-contains-token-09'/1]).
-export(['fn-contains-token-10'/1]).
-export(['fn-contains-token-11'/1]).
-export(['fn-contains-token-12'/1]).
-export(['fn-contains-token-13'/1]).
-export(['fn-contains-token-14'/1]).
-export(['fn-contains-token-15'/1]).
-export(['fn-contains-token-16'/1]).
-export(['fn-contains-token-17'/1]).
-export(['fn-contains-token-18'/1]).
-export(['fn-contains-token-19'/1]).
-export(['fn-contains-token-20'/1]).
-export(['fn-contains-token-21'/1]).
-export(['fn-contains-token-22'/1]).
-export(['fn-contains-token-39'/1]).
-export(['fn-contains-token-40'/1]).
-export(['fn-contains-token-41'/1]).
-export(['fn-contains-token-42'/1]).
-export(['fn-contains-token-43'/1]).
-export(['fn-contains-token-44'/1]).
-export(['fn-contains-token-45'/1]).
-export(['fn-contains-token-46'/1]).
-export(['fn-contains-token-47'/1]).
-export(['fn-contains-token-48'/1]).
-export(['fn-contains-token-49'/1]).
-export(['fn-contains-token-50'/1]).
-export(['fn-contains-token-51'/1]).
-export(['fn-contains-token-52'/1]).
-export(['fn-contains-token-60'/1]).
-export(['fn-contains-token-61'/1]).
-export(['fn-contains-token-62'/1]).
-export(['fn-contains-token-63'/1]).
-export(['fn-contains-token-64'/1]).
-export(['fn-contains-token-65'/1]).
-export(['fn-contains-token-70'/1]).
-export(['fn-contains-token-71'/1]).
-export(['fn-contains-token-72'/1]).
suite() -> [{timetrap, {seconds, 180}}].
init_per_group(_, Config) -> Config.
end_per_group(_, _Config) ->
xqerl_code_server:unload(all).
end_per_suite(_Config) ->
ct:timetrap({seconds, 60}),
xqerl_code_server:unload(all).
init_per_suite(Config) ->
{ok, _} = application:ensure_all_started(xqerl),
DD = filename:dirname(filename:dirname(filename:dirname(?config(data_dir, Config)))),
TD = filename:join(DD, "QT3-test-suite"),
__BaseDir = filename:join(TD, "fn"),
[{base_dir, __BaseDir} | Config].
all() ->
[
{group, group_0},
{group, group_1}
].
groups() ->
[
{group_0, [parallel], [
'fn-contains-token-09',
'fn-contains-token-10',
'fn-contains-token-11',
'fn-contains-token-12',
'fn-contains-token-13',
'fn-contains-token-14',
'fn-contains-token-15',
'fn-contains-token-16',
'fn-contains-token-17',
'fn-contains-token-18',
'fn-contains-token-19',
'fn-contains-token-20',
'fn-contains-token-21',
'fn-contains-token-22',
'fn-contains-token-39',
'fn-contains-token-40',
'fn-contains-token-41',
'fn-contains-token-42',
'fn-contains-token-43',
'fn-contains-token-44',
'fn-contains-token-45',
'fn-contains-token-46',
'fn-contains-token-47'
]},
{group_1, [parallel], [
'fn-contains-token-48',
'fn-contains-token-49',
'fn-contains-token-50',
'fn-contains-token-51',
'fn-contains-token-52',
'fn-contains-token-60',
'fn-contains-token-61',
'fn-contains-token-62',
'fn-contains-token-63',
'fn-contains-token-64',
'fn-contains-token-65',
'fn-contains-token-70',
'fn-contains-token-71',
'fn-contains-token-72'
]}
].
'fn-contains-token-09'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(\"\", \"\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-09.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-10'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token((), \"\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-10.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-11'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(\" \", \" \")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-11.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-12'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(codepoints-to-string((9, 10, 13, 32, 13, 10, 9)), \"\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-12.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-13'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(\"abc\", \"abc\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-13.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-14'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(\" abc \", \"abc\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-14.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-15'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(codepoints-to-string((9, 10, 97, 98, 99, 13, 32)), \"abc\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-15.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-16'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(\"abc def\", \"def\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-16.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-17'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(codepoints-to-string((97, 98, 99, 13, 32, 10, 100, 101, 102)), \"def\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-17.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-18'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"contains-token(codepoints-to-string((9, 9, 97, 98, 99, 13, 32, 10, 100, 101, 102, 10, 10)), \"abc\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-18.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-19'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(\" the quick brown fox jumped over the lazy dog \", 'fox')",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-19.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-20'(Config) ->
__BaseDir = ?config(base_dir, Config),
{skip, "xml-version:1.1"}.
'fn-contains-token-21'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(codepoints-to-string((97, 98, 99, 160, 100, 101, 102)), \"abc\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-21.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-22'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "(contains-token#2, starts-with#2, ends-with#2)!.(\"abc def\", \"def\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-22.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_deep_eq(Res, "true(), false(), true()") of
true -> {comment, "Deep equal"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-39'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(\"\", \"zz\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-39.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-40'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token((), \"zz\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-40.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-41'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(\" \", \"zz\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-41.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-42'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(codepoints-to-string((9, 10, 13, 32, 13, 10, 9)), \"zz\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-42.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-43'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(\"abc\", \"zz\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-43.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-44'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(\" abc \", \"zz\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-44.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-45'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(codepoints-to-string((9, 10, 97, 98, 99, 13, 32)), \"zz\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-45.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-46'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(\"abc def\", \"zz\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-46.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-47'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(codepoints-to-string((97, 98, 99, 13, 32, 10, 100, 101, 102)), \"zz\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-47.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-48'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"contains-token(codepoints-to-string((9, 9, 97, 98, 99, 13, 32, 10, 100, 101, 102, 10, 10)), \"zz\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-48.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-49'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(\" the quick brown fox jumped over the lazy dog \", 'zz')",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-49.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-50'(Config) ->
__BaseDir = ?config(base_dir, Config),
{skip, "xml-version:1.1"}.
'fn-contains-token-51'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(codepoints-to-string((97, 98, 99, 160, 100, 101, 102)), \"abc\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-51.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-52'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "(contains-token#2, substring-before#2, substring-after#2)[1](\"abc def\", \"zz\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-52.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-60'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token((\"abc\", \"def\"), \"\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-60.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-61'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token((\"abc\", \"def\"), \" abc \")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-61.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-62'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"contains-token((\"abc\", \"def\"), codepoints-to-string((9, 10, 13, 97, 98, 99, 32, 13, 10, 9)))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-62.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-63'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token((\"abc def\", \"ghi\"), \"abc\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-63.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-64'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token((\" abc def \", \"ghi\"), \"def\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-64.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-65'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(codepoints-to-string((9, 10, 97, 98, 99, 13, 32)), \"abc\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-65.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-70'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"contains-token(\"the quick brown fox\", \"Fox\", \"-functions/collation/html-ascii-case-insensitive\")",
{Env, Opts} = xqerl_test:handle_environment([
{'decimal-formats', []},
{sources, []},
{collections, []},
{'static-base-uri', []},
{'context-item', [""]},
{vars, []},
{params, []},
{namespaces, []},
{schemas, []},
{resources, []},
{modules, []}
]),
Qry1 = lists:flatten(Env ++ Qry),
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-70.xq"),
Qry1
),
xqerl:run(Mod, Opts)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-71'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"contains-token(\"the quick brown fox\", \" QUICK \", \"-functions/collation/html-ascii-case-insensitive\")",
{Env, Opts} = xqerl_test:handle_environment([
{'decimal-formats', []},
{sources, []},
{collections, []},
{'static-base-uri', []},
{'context-item', [""]},
{vars, []},
{params, []},
{namespaces, []},
{schemas, []},
{resources, []},
{modules, []}
]),
Qry1 = lists:flatten(Env ++ Qry),
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-71.xq"),
Qry1
),
xqerl:run(Mod, Opts)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-72'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"contains-token(\"the quick brown fox\", \"quiçk\", \"-functions/collation/html-ascii-case-insensitive\")",
{Env, Opts} = xqerl_test:handle_environment([
{'decimal-formats', []},
{sources, []},
{collections, []},
{'static-base-uri', []},
{'context-item', [""]},
{vars, []},
{params, []},
{namespaces, []},
{schemas, []},
{resources, []},
{modules, []}
]),
Qry1 = lists:flatten(Env ++ Qry),
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-72.xq"),
Qry1
),
xqerl:run(Mod, Opts)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
| null |
https://raw.githubusercontent.com/zadean/xqerl/1a94833e996435495922346010ce918b4b0717f2/test/fn/fn_contains_token_SUITE.erl
|
erlang
|
-module('fn_contains_token_SUITE').
-include_lib("common_test/include/ct.hrl").
-export([
all/0,
groups/0,
suite/0
]).
-export([
init_per_suite/1,
init_per_group/2,
end_per_group/2,
end_per_suite/1
]).
-export(['fn-contains-token-09'/1]).
-export(['fn-contains-token-10'/1]).
-export(['fn-contains-token-11'/1]).
-export(['fn-contains-token-12'/1]).
-export(['fn-contains-token-13'/1]).
-export(['fn-contains-token-14'/1]).
-export(['fn-contains-token-15'/1]).
-export(['fn-contains-token-16'/1]).
-export(['fn-contains-token-17'/1]).
-export(['fn-contains-token-18'/1]).
-export(['fn-contains-token-19'/1]).
-export(['fn-contains-token-20'/1]).
-export(['fn-contains-token-21'/1]).
-export(['fn-contains-token-22'/1]).
-export(['fn-contains-token-39'/1]).
-export(['fn-contains-token-40'/1]).
-export(['fn-contains-token-41'/1]).
-export(['fn-contains-token-42'/1]).
-export(['fn-contains-token-43'/1]).
-export(['fn-contains-token-44'/1]).
-export(['fn-contains-token-45'/1]).
-export(['fn-contains-token-46'/1]).
-export(['fn-contains-token-47'/1]).
-export(['fn-contains-token-48'/1]).
-export(['fn-contains-token-49'/1]).
-export(['fn-contains-token-50'/1]).
-export(['fn-contains-token-51'/1]).
-export(['fn-contains-token-52'/1]).
-export(['fn-contains-token-60'/1]).
-export(['fn-contains-token-61'/1]).
-export(['fn-contains-token-62'/1]).
-export(['fn-contains-token-63'/1]).
-export(['fn-contains-token-64'/1]).
-export(['fn-contains-token-65'/1]).
-export(['fn-contains-token-70'/1]).
-export(['fn-contains-token-71'/1]).
-export(['fn-contains-token-72'/1]).
suite() -> [{timetrap, {seconds, 180}}].
init_per_group(_, Config) -> Config.
end_per_group(_, _Config) ->
xqerl_code_server:unload(all).
end_per_suite(_Config) ->
ct:timetrap({seconds, 60}),
xqerl_code_server:unload(all).
init_per_suite(Config) ->
{ok, _} = application:ensure_all_started(xqerl),
DD = filename:dirname(filename:dirname(filename:dirname(?config(data_dir, Config)))),
TD = filename:join(DD, "QT3-test-suite"),
__BaseDir = filename:join(TD, "fn"),
[{base_dir, __BaseDir} | Config].
all() ->
[
{group, group_0},
{group, group_1}
].
groups() ->
[
{group_0, [parallel], [
'fn-contains-token-09',
'fn-contains-token-10',
'fn-contains-token-11',
'fn-contains-token-12',
'fn-contains-token-13',
'fn-contains-token-14',
'fn-contains-token-15',
'fn-contains-token-16',
'fn-contains-token-17',
'fn-contains-token-18',
'fn-contains-token-19',
'fn-contains-token-20',
'fn-contains-token-21',
'fn-contains-token-22',
'fn-contains-token-39',
'fn-contains-token-40',
'fn-contains-token-41',
'fn-contains-token-42',
'fn-contains-token-43',
'fn-contains-token-44',
'fn-contains-token-45',
'fn-contains-token-46',
'fn-contains-token-47'
]},
{group_1, [parallel], [
'fn-contains-token-48',
'fn-contains-token-49',
'fn-contains-token-50',
'fn-contains-token-51',
'fn-contains-token-52',
'fn-contains-token-60',
'fn-contains-token-61',
'fn-contains-token-62',
'fn-contains-token-63',
'fn-contains-token-64',
'fn-contains-token-65',
'fn-contains-token-70',
'fn-contains-token-71',
'fn-contains-token-72'
]}
].
'fn-contains-token-09'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(\"\", \"\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-09.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-10'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token((), \"\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-10.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-11'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(\" \", \" \")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-11.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-12'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(codepoints-to-string((9, 10, 13, 32, 13, 10, 9)), \"\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-12.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-13'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(\"abc\", \"abc\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-13.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-14'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(\" abc \", \"abc\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-14.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-15'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(codepoints-to-string((9, 10, 97, 98, 99, 13, 32)), \"abc\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-15.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-16'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(\"abc def\", \"def\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-16.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-17'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(codepoints-to-string((97, 98, 99, 13, 32, 10, 100, 101, 102)), \"def\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-17.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-18'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"contains-token(codepoints-to-string((9, 9, 97, 98, 99, 13, 32, 10, 100, 101, 102, 10, 10)), \"abc\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-18.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-19'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(\" the quick brown fox jumped over the lazy dog \", 'fox')",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-19.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-20'(Config) ->
__BaseDir = ?config(base_dir, Config),
{skip, "xml-version:1.1"}.
'fn-contains-token-21'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(codepoints-to-string((97, 98, 99, 160, 100, 101, 102)), \"abc\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-21.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-22'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "(contains-token#2, starts-with#2, ends-with#2)!.(\"abc def\", \"def\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-22.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_deep_eq(Res, "true(), false(), true()") of
true -> {comment, "Deep equal"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-39'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(\"\", \"zz\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-39.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-40'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token((), \"zz\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-40.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-41'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(\" \", \"zz\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-41.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-42'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(codepoints-to-string((9, 10, 13, 32, 13, 10, 9)), \"zz\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-42.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-43'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(\"abc\", \"zz\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-43.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-44'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(\" abc \", \"zz\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-44.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-45'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(codepoints-to-string((9, 10, 97, 98, 99, 13, 32)), \"zz\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-45.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-46'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(\"abc def\", \"zz\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-46.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-47'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(codepoints-to-string((97, 98, 99, 13, 32, 10, 100, 101, 102)), \"zz\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-47.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-48'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"contains-token(codepoints-to-string((9, 9, 97, 98, 99, 13, 32, 10, 100, 101, 102, 10, 10)), \"zz\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-48.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-49'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(\" the quick brown fox jumped over the lazy dog \", 'zz')",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-49.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-50'(Config) ->
__BaseDir = ?config(base_dir, Config),
{skip, "xml-version:1.1"}.
'fn-contains-token-51'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(codepoints-to-string((97, 98, 99, 160, 100, 101, 102)), \"abc\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-51.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-52'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "(contains-token#2, substring-before#2, substring-after#2)[1](\"abc def\", \"zz\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-52.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-60'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token((\"abc\", \"def\"), \"\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-60.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-61'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token((\"abc\", \"def\"), \" abc \")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-61.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-62'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"contains-token((\"abc\", \"def\"), codepoints-to-string((9, 10, 13, 97, 98, 99, 32, 13, 10, 9)))",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-62.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-63'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token((\"abc def\", \"ghi\"), \"abc\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-63.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-64'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token((\" abc def \", \"ghi\"), \"def\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-64.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-65'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry = "contains-token(codepoints-to-string((9, 10, 97, 98, 99, 13, 32)), \"abc\")",
Qry1 = Qry,
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-65.xq"),
Qry1
),
xqerl:run(Mod)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-70'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"contains-token(\"the quick brown fox\", \"Fox\", \"-functions/collation/html-ascii-case-insensitive\")",
{Env, Opts} = xqerl_test:handle_environment([
{'decimal-formats', []},
{sources, []},
{collections, []},
{'static-base-uri', []},
{'context-item', [""]},
{vars, []},
{params, []},
{namespaces, []},
{schemas, []},
{resources, []},
{modules, []}
]),
Qry1 = lists:flatten(Env ++ Qry),
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-70.xq"),
Qry1
),
xqerl:run(Mod, Opts)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-71'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"contains-token(\"the quick brown fox\", \" QUICK \", \"-functions/collation/html-ascii-case-insensitive\")",
{Env, Opts} = xqerl_test:handle_environment([
{'decimal-formats', []},
{sources, []},
{collections, []},
{'static-base-uri', []},
{'context-item', [""]},
{vars, []},
{params, []},
{namespaces, []},
{schemas, []},
{resources, []},
{modules, []}
]),
Qry1 = lists:flatten(Env ++ Qry),
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-71.xq"),
Qry1
),
xqerl:run(Mod, Opts)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_true(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
'fn-contains-token-72'(Config) ->
__BaseDir = ?config(base_dir, Config),
Qry =
"contains-token(\"the quick brown fox\", \"quiçk\", \"-functions/collation/html-ascii-case-insensitive\")",
{Env, Opts} = xqerl_test:handle_environment([
{'decimal-formats', []},
{sources, []},
{collections, []},
{'static-base-uri', []},
{'context-item', [""]},
{vars, []},
{params, []},
{namespaces, []},
{schemas, []},
{resources, []},
{modules, []}
]),
Qry1 = lists:flatten(Env ++ Qry),
io:format("Qry1: ~p~n", [Qry1]),
Res =
try
Mod = xqerl_code_server:compile(
filename:join(__BaseDir, "fn-contains-token-72.xq"),
Qry1
),
xqerl:run(Mod, Opts)
of
D -> D
catch
_:E -> E
end,
Out =
case xqerl_test:assert_false(Res) of
true -> {comment, "Empty"};
{false, F} -> F
end,
case Out of
{comment, C} -> {comment, C};
Err -> ct:fail(Err)
end.
|
|
ada74993739b629ce993f2174c84e89ce4984ebccea21ccc238aaf3358ca758a
|
VincentToups/racket-lib
|
command-respond-window.rkt
|
#lang racket
(require racket/gui
functional/better-monads)
(define command-input%
(class text%
(init-field [command-callback (lambda (command) #t)])
(field (command #f))
(define/public (get-command)
(if command
(let ((c command))
(set! command #f)
c)
#f))
(define/public (text-substring start len)
(let loop ((acc '())
(i 0))
(if (< i len) (loop (cons (send this get-character (+ start i)) acc) (+ i 1))
(list->string (reverse acc)))))
(define/augment (after-insert start len)
(let ((string (send this text-substring start len)))
(if (string-contains-newline? string)
(let ((a-command (send this text-substring 2 (+ start len))))
(command-callback a-command)
(set! command a-command)
(send this erase)
(send this insert "> "))
#f)))
(super-new)))
(define (string-contains-newline? str)
(let loop
((chars (string->list str)))
(cond
((empty? chars) #f)
((equal? (first chars) #\newline) #t)
(#t (loop (rest chars))))))
(define (make-command-respond-window label command-handler)
(let* [(frame (new frame% [width 640]
[height 480]
[label label]))
(pane (new vertical-pane%
[min-height 480]
[min-width 480]
[parent frame]))
(output-text (new text%))
(output-editor (new editor-canvas%
[parent pane]
[style (list 'no-hscroll)]
[editor output-text]
[label "Output:"]
[min-width 640]
[min-height 480]))
(input-text (new command-input% [command-callback command-handler]))
(command-region (new editor-canvas%
[parent pane]
[style (list 'no-hscroll)]
[editor input-text]
[min-width 640]
[min-height 40]
[stretchable-height 40]
))
(interaction
(match-lambda
['command
(let loop ((command (send input-text get-command)))
(yield)
(if command command (loop (send input-text get-command))))
]
['clear (send output-text erase)]
[(? string? x) (send output-text insert x)]))]
(send frame show #t)
(send input-text insert "> ")
(send output-text insert "Output: ")
interaction))
(define (io-bind command future)
(lambda (io)
(let ((value (command io)))
((future value) io))))
(define (io-return item)
(lambda (io)
item))
(define (io-plus i1 i2)
(lambda (io) (i1 io) (i2 io)))
(define (read-command io)
(io 'command))
(define (write-output text)
(lambda (io)
(io text)
#f))
(define (clear io) (io 'clear))
(define io-monad (monad io-bind io-return io-plus #f))
(define (do-command-respond-window command)
(let ((io (make-command-respond-window "" (lambda (x) x))))
(command io)))
;(define interaction (make-command-respond-window "test" (lambda (x) x) ))
(provide make-command-respond-window do-command-respond-window); interaction)
| null |
https://raw.githubusercontent.com/VincentToups/racket-lib/d8aed0959fd148615b000ceecd7b8a6128cfcfa8/command-respond-window/command-respond-window.rkt
|
racket
|
(define interaction (make-command-respond-window "test" (lambda (x) x) ))
interaction)
|
#lang racket
(require racket/gui
functional/better-monads)
(define command-input%
(class text%
(init-field [command-callback (lambda (command) #t)])
(field (command #f))
(define/public (get-command)
(if command
(let ((c command))
(set! command #f)
c)
#f))
(define/public (text-substring start len)
(let loop ((acc '())
(i 0))
(if (< i len) (loop (cons (send this get-character (+ start i)) acc) (+ i 1))
(list->string (reverse acc)))))
(define/augment (after-insert start len)
(let ((string (send this text-substring start len)))
(if (string-contains-newline? string)
(let ((a-command (send this text-substring 2 (+ start len))))
(command-callback a-command)
(set! command a-command)
(send this erase)
(send this insert "> "))
#f)))
(super-new)))
(define (string-contains-newline? str)
(let loop
((chars (string->list str)))
(cond
((empty? chars) #f)
((equal? (first chars) #\newline) #t)
(#t (loop (rest chars))))))
(define (make-command-respond-window label command-handler)
(let* [(frame (new frame% [width 640]
[height 480]
[label label]))
(pane (new vertical-pane%
[min-height 480]
[min-width 480]
[parent frame]))
(output-text (new text%))
(output-editor (new editor-canvas%
[parent pane]
[style (list 'no-hscroll)]
[editor output-text]
[label "Output:"]
[min-width 640]
[min-height 480]))
(input-text (new command-input% [command-callback command-handler]))
(command-region (new editor-canvas%
[parent pane]
[style (list 'no-hscroll)]
[editor input-text]
[min-width 640]
[min-height 40]
[stretchable-height 40]
))
(interaction
(match-lambda
['command
(let loop ((command (send input-text get-command)))
(yield)
(if command command (loop (send input-text get-command))))
]
['clear (send output-text erase)]
[(? string? x) (send output-text insert x)]))]
(send frame show #t)
(send input-text insert "> ")
(send output-text insert "Output: ")
interaction))
(define (io-bind command future)
(lambda (io)
(let ((value (command io)))
((future value) io))))
(define (io-return item)
(lambda (io)
item))
(define (io-plus i1 i2)
(lambda (io) (i1 io) (i2 io)))
(define (read-command io)
(io 'command))
(define (write-output text)
(lambda (io)
(io text)
#f))
(define (clear io) (io 'clear))
(define io-monad (monad io-bind io-return io-plus #f))
(define (do-command-respond-window command)
(let ((io (make-command-respond-window "" (lambda (x) x))))
(command io)))
|
9357d5780dbdb898b4273d7cde9703eeb5dadfb7bfb6a31f4c172ff80e3e143d
|
PEZ/clerk
|
project.clj
|
(defproject routing-example "0.1.0-SNAPSHOT"
:description "Client side routing with bidi, accountant and clerk"
:url "-example"
:min-lein-version "2.5.3"
:dependencies [[org.clojure/clojure "1.9.0"]
[org.clojure/clojurescript "1.10.339"]
#_[org.clojure/core.async "0.3.443"
:exclusions [org.clojure/tools.reader]]
[org.clojure/core.async "0.4.474"]
[reagent "0.8.1"]
[reagent-utils "0.3.1"]
#_[pez/clerk "1.0.0-SNAPSHOT"] ;; this demo uses :source-paths instead since it lives in the clerk repo
[bidi "2.1.4"]
[venantius/accountant "0.2.4"]]
:plugins [[lein-figwheel "0.5.18"]
[lein-cljsbuild "1.1.7" :exclusions [[org.clojure/clojure]]]]
:source-paths ["src" "../../src"]
:clean-targets ^{:protect false} ["resources/public/js/compiled" "target"]
:profiles {:dev
{:source-paths ["dev"]
:dependencies [[prismatic/schema "1.1.7"]]}}
:cljsbuild {:builds
{:dev
{:source-paths ["src"]
:figwheel {:on-jsload "routing-example.core/on-js-reload"
:websocket-host :js-client-host}
:compiler {:main routing-example.core
:asset-path "/js/compiled/out"
:output-to "resources/public/js/compiled/routing_example.js"
:output-dir "resources/public/js/compiled/out"
:source-map-timestamp true}}
;; This next build is an compressed minified build for
;; production. You can build this with:
once min
:min
{:source-paths ["src"]
:compiler {:output-to "resources/public/js/compiled/routing_example.js"
:main routing-example.core
:optimizations :advanced
:pretty-print false}}}}
:figwheel {:http-server-root "public"
:server-port 4449
:server-ip "0.0.0.0"
:css-dirs ["resources/public/css"]
:ring-handler routing-example.server/handler}
:repl-options {:init-ns routing-example.user
:skip-default-init false
:nrepl-middleware [cider.piggieback/wrap-cljs-repl]})
| null |
https://raw.githubusercontent.com/PEZ/clerk/2ba48a73b8eda388979cad2ff7aa25f958005095/demo/reagent/project.clj
|
clojure
|
this demo uses :source-paths instead since it lives in the clerk repo
This next build is an compressed minified build for
production. You can build this with:
|
(defproject routing-example "0.1.0-SNAPSHOT"
:description "Client side routing with bidi, accountant and clerk"
:url "-example"
:min-lein-version "2.5.3"
:dependencies [[org.clojure/clojure "1.9.0"]
[org.clojure/clojurescript "1.10.339"]
#_[org.clojure/core.async "0.3.443"
:exclusions [org.clojure/tools.reader]]
[org.clojure/core.async "0.4.474"]
[reagent "0.8.1"]
[reagent-utils "0.3.1"]
[bidi "2.1.4"]
[venantius/accountant "0.2.4"]]
:plugins [[lein-figwheel "0.5.18"]
[lein-cljsbuild "1.1.7" :exclusions [[org.clojure/clojure]]]]
:source-paths ["src" "../../src"]
:clean-targets ^{:protect false} ["resources/public/js/compiled" "target"]
:profiles {:dev
{:source-paths ["dev"]
:dependencies [[prismatic/schema "1.1.7"]]}}
:cljsbuild {:builds
{:dev
{:source-paths ["src"]
:figwheel {:on-jsload "routing-example.core/on-js-reload"
:websocket-host :js-client-host}
:compiler {:main routing-example.core
:asset-path "/js/compiled/out"
:output-to "resources/public/js/compiled/routing_example.js"
:output-dir "resources/public/js/compiled/out"
:source-map-timestamp true}}
once min
:min
{:source-paths ["src"]
:compiler {:output-to "resources/public/js/compiled/routing_example.js"
:main routing-example.core
:optimizations :advanced
:pretty-print false}}}}
:figwheel {:http-server-root "public"
:server-port 4449
:server-ip "0.0.0.0"
:css-dirs ["resources/public/css"]
:ring-handler routing-example.server/handler}
:repl-options {:init-ns routing-example.user
:skip-default-init false
:nrepl-middleware [cider.piggieback/wrap-cljs-repl]})
|
e5e46bc6d0ab4c1330f4f3f919eb2a53bcd677fdf53859c0829d97e2fdee98b0
|
janestreet/core_unix
|
bench_zone.ml
|
open! Core
module Time = Time_float_unix
let epoch = Time.epoch
let winter = Time.of_string "2000-01-01 06:00:00Z"
let summer = Time.of_string "2020-08-20 18:00:00Z"
let hkg = Time.Zone.find_exn "Asia/Hong_Kong"
let ldn = Time.Zone.find_exn "Europe/London"
let nyc = Time.Zone.find_exn "America/New_York"
let utc = Time.Zone.utc
let next_clock_shift zone time = Time.Zone.next_clock_shift zone ~strictly_after:time
let%bench "next_clock_shift hkg epoch" = next_clock_shift hkg epoch
let%bench "next_clock_shift hkg winter" = next_clock_shift hkg winter
let%bench "next_clock_shift hkg summer" = next_clock_shift hkg summer
let%bench "next_clock_shift ldn epoch" = next_clock_shift ldn epoch
let%bench "next_clock_shift ldn winter" = next_clock_shift ldn winter
let%bench "next_clock_shift ldn summer" = next_clock_shift ldn summer
let%bench "next_clock_shift nyc epoch" = next_clock_shift nyc epoch
let%bench "next_clock_shift nyc winter" = next_clock_shift nyc winter
let%bench "next_clock_shift nyc summer" = next_clock_shift nyc summer
let%bench "next_clock_shift utc epoch" = next_clock_shift utc epoch
let%bench "next_clock_shift utc winter" = next_clock_shift utc winter
let%bench "next_clock_shift utc summer" = next_clock_shift utc summer
let prev_clock_shift zone time = Time.Zone.prev_clock_shift zone ~at_or_before:time
let%bench "prev_clock_shift hkg epoch" = prev_clock_shift hkg epoch
let%bench "prev_clock_shift hkg winter" = prev_clock_shift hkg winter
let%bench "prev_clock_shift hkg summer" = prev_clock_shift hkg summer
let%bench "prev_clock_shift ldn epoch" = prev_clock_shift ldn epoch
let%bench "prev_clock_shift ldn winter" = prev_clock_shift ldn winter
let%bench "prev_clock_shift ldn summer" = prev_clock_shift ldn summer
let%bench "prev_clock_shift nyc epoch" = prev_clock_shift nyc epoch
let%bench "prev_clock_shift nyc winter" = prev_clock_shift nyc winter
let%bench "prev_clock_shift nyc summer" = prev_clock_shift nyc summer
let%bench "prev_clock_shift utc epoch" = prev_clock_shift utc epoch
let%bench "prev_clock_shift utc winter" = prev_clock_shift utc winter
let%bench "prev_clock_shift utc summer" = prev_clock_shift utc summer
let to_date_ofday zone time = Time.to_date_ofday ~zone time
let%bench "to_date_ofday hkg epoch" = to_date_ofday hkg epoch
let%bench "to_date_ofday hkg winter" = to_date_ofday hkg winter
let%bench "to_date_ofday hkg summer" = to_date_ofday hkg summer
let%bench "to_date_ofday ldn epoch" = to_date_ofday ldn epoch
let%bench "to_date_ofday ldn winter" = to_date_ofday ldn winter
let%bench "to_date_ofday ldn summer" = to_date_ofday ldn summer
let%bench "to_date_ofday nyc epoch" = to_date_ofday nyc epoch
let%bench "to_date_ofday nyc winter" = to_date_ofday nyc winter
let%bench "to_date_ofday nyc summer" = to_date_ofday nyc summer
let%bench "to_date_ofday utc epoch" = to_date_ofday utc epoch
let%bench "to_date_ofday utc winter" = to_date_ofday utc winter
let%bench "to_date_ofday utc summer" = to_date_ofday utc summer
let of_date_ofday zone time =
let date = Time.to_date ~zone time in
let ofday = Time.to_ofday ~zone time in
fun () -> Time.of_date_ofday ~zone date ofday
;;
let%bench_fun "of_date_ofday hkg epoch" = of_date_ofday hkg epoch
let%bench_fun "of_date_ofday hkg winter" = of_date_ofday hkg winter
let%bench_fun "of_date_ofday hkg summer" = of_date_ofday hkg summer
let%bench_fun "of_date_ofday ldn epoch" = of_date_ofday ldn epoch
let%bench_fun "of_date_ofday ldn winter" = of_date_ofday ldn winter
let%bench_fun "of_date_ofday ldn summer" = of_date_ofday ldn summer
let%bench_fun "of_date_ofday nyc epoch" = of_date_ofday nyc epoch
let%bench_fun "of_date_ofday nyc winter" = of_date_ofday nyc winter
let%bench_fun "of_date_ofday nyc summer" = of_date_ofday nyc summer
let%bench_fun "of_date_ofday utc epoch" = of_date_ofday utc epoch
let%bench_fun "of_date_ofday utc winter" = of_date_ofday utc winter
let%bench_fun "of_date_ofday utc summer" = of_date_ofday utc summer
let reset_caches zone =
Time.reset_date_cache ();
Time.Zone.reset_transition_cache zone
;;
let reset_and_next_clock_shift zone time =
reset_caches zone;
Time.Zone.next_clock_shift zone ~strictly_after:time
;;
let%bench "reset + next_clock_shift hkg epoch" = reset_and_next_clock_shift hkg epoch
let%bench "reset + next_clock_shift hkg winter" = reset_and_next_clock_shift hkg winter
let%bench "reset + next_clock_shift hkg summer" = reset_and_next_clock_shift hkg summer
let%bench "reset + next_clock_shift ldn epoch" = reset_and_next_clock_shift ldn epoch
let%bench "reset + next_clock_shift ldn winter" = reset_and_next_clock_shift ldn winter
let%bench "reset + next_clock_shift ldn summer" = reset_and_next_clock_shift ldn summer
let%bench "reset + next_clock_shift nyc epoch" = reset_and_next_clock_shift nyc epoch
let%bench "reset + next_clock_shift nyc winter" = reset_and_next_clock_shift nyc winter
let%bench "reset + next_clock_shift nyc summer" = reset_and_next_clock_shift nyc summer
let%bench "reset + next_clock_shift utc epoch" = reset_and_next_clock_shift utc epoch
let%bench "reset + next_clock_shift utc winter" = reset_and_next_clock_shift utc winter
let%bench "reset + next_clock_shift utc summer" = reset_and_next_clock_shift utc summer
let reset_and_prev_clock_shift zone time =
reset_caches zone;
Time.Zone.prev_clock_shift zone ~at_or_before:time
;;
let%bench "reset + prev_clock_shift hkg epoch" = reset_and_prev_clock_shift hkg epoch
let%bench "reset + prev_clock_shift hkg winter" = reset_and_prev_clock_shift hkg winter
let%bench "reset + prev_clock_shift hkg summer" = reset_and_prev_clock_shift hkg summer
let%bench "reset + prev_clock_shift ldn epoch" = reset_and_prev_clock_shift ldn epoch
let%bench "reset + prev_clock_shift ldn winter" = reset_and_prev_clock_shift ldn winter
let%bench "reset + prev_clock_shift ldn summer" = reset_and_prev_clock_shift ldn summer
let%bench "reset + prev_clock_shift nyc epoch" = reset_and_prev_clock_shift nyc epoch
let%bench "reset + prev_clock_shift nyc winter" = reset_and_prev_clock_shift nyc winter
let%bench "reset + prev_clock_shift nyc summer" = reset_and_prev_clock_shift nyc summer
let%bench "reset + prev_clock_shift utc epoch" = reset_and_prev_clock_shift utc epoch
let%bench "reset + prev_clock_shift utc winter" = reset_and_prev_clock_shift utc winter
let%bench "reset + prev_clock_shift utc summer" = reset_and_prev_clock_shift utc summer
let reset_and_to_date_ofday zone time =
reset_caches zone;
Time.to_date_ofday ~zone time
;;
let%bench "reset + to_date_ofday hkg epoch" = reset_and_to_date_ofday hkg epoch
let%bench "reset + to_date_ofday hkg winter" = reset_and_to_date_ofday hkg winter
let%bench "reset + to_date_ofday hkg summer" = reset_and_to_date_ofday hkg summer
let%bench "reset + to_date_ofday ldn epoch" = reset_and_to_date_ofday ldn epoch
let%bench "reset + to_date_ofday ldn winter" = reset_and_to_date_ofday ldn winter
let%bench "reset + to_date_ofday ldn summer" = reset_and_to_date_ofday ldn summer
let%bench "reset + to_date_ofday nyc epoch" = reset_and_to_date_ofday nyc epoch
let%bench "reset + to_date_ofday nyc winter" = reset_and_to_date_ofday nyc winter
let%bench "reset + to_date_ofday nyc summer" = reset_and_to_date_ofday nyc summer
let%bench "reset + to_date_ofday utc epoch" = reset_and_to_date_ofday utc epoch
let%bench "reset + to_date_ofday utc winter" = reset_and_to_date_ofday utc winter
let%bench "reset + to_date_ofday utc summer" = reset_and_to_date_ofday utc summer
let reset_and_of_date_ofday zone time =
let date = Time.to_date ~zone time in
let ofday = Time.to_ofday ~zone time in
fun () ->
reset_caches zone;
Time.of_date_ofday ~zone date ofday
;;
let%bench_fun "reset + of_date_ofday hkg epoch" = reset_and_of_date_ofday hkg epoch
let%bench_fun "reset + of_date_ofday hkg winter" = reset_and_of_date_ofday hkg winter
let%bench_fun "reset + of_date_ofday hkg summer" = reset_and_of_date_ofday hkg summer
let%bench_fun "reset + of_date_ofday ldn epoch" = reset_and_of_date_ofday ldn epoch
let%bench_fun "reset + of_date_ofday ldn winter" = reset_and_of_date_ofday ldn winter
let%bench_fun "reset + of_date_ofday ldn summer" = reset_and_of_date_ofday ldn summer
let%bench_fun "reset + of_date_ofday nyc epoch" = reset_and_of_date_ofday nyc epoch
let%bench_fun "reset + of_date_ofday nyc winter" = reset_and_of_date_ofday nyc winter
let%bench_fun "reset + of_date_ofday nyc summer" = reset_and_of_date_ofday nyc summer
let%bench_fun "reset + of_date_ofday utc epoch" = reset_and_of_date_ofday utc epoch
let%bench_fun "reset + of_date_ofday utc winter" = reset_and_of_date_ofday utc winter
let%bench_fun "reset + of_date_ofday utc summer" = reset_and_of_date_ofday utc summer
| null |
https://raw.githubusercontent.com/janestreet/core_unix/59d04e163b49c7eeef9d96fccb2403fd49c44505/time_float_unix/bench/bench_zone.ml
|
ocaml
|
open! Core
module Time = Time_float_unix
let epoch = Time.epoch
let winter = Time.of_string "2000-01-01 06:00:00Z"
let summer = Time.of_string "2020-08-20 18:00:00Z"
let hkg = Time.Zone.find_exn "Asia/Hong_Kong"
let ldn = Time.Zone.find_exn "Europe/London"
let nyc = Time.Zone.find_exn "America/New_York"
let utc = Time.Zone.utc
let next_clock_shift zone time = Time.Zone.next_clock_shift zone ~strictly_after:time
let%bench "next_clock_shift hkg epoch" = next_clock_shift hkg epoch
let%bench "next_clock_shift hkg winter" = next_clock_shift hkg winter
let%bench "next_clock_shift hkg summer" = next_clock_shift hkg summer
let%bench "next_clock_shift ldn epoch" = next_clock_shift ldn epoch
let%bench "next_clock_shift ldn winter" = next_clock_shift ldn winter
let%bench "next_clock_shift ldn summer" = next_clock_shift ldn summer
let%bench "next_clock_shift nyc epoch" = next_clock_shift nyc epoch
let%bench "next_clock_shift nyc winter" = next_clock_shift nyc winter
let%bench "next_clock_shift nyc summer" = next_clock_shift nyc summer
let%bench "next_clock_shift utc epoch" = next_clock_shift utc epoch
let%bench "next_clock_shift utc winter" = next_clock_shift utc winter
let%bench "next_clock_shift utc summer" = next_clock_shift utc summer
let prev_clock_shift zone time = Time.Zone.prev_clock_shift zone ~at_or_before:time
let%bench "prev_clock_shift hkg epoch" = prev_clock_shift hkg epoch
let%bench "prev_clock_shift hkg winter" = prev_clock_shift hkg winter
let%bench "prev_clock_shift hkg summer" = prev_clock_shift hkg summer
let%bench "prev_clock_shift ldn epoch" = prev_clock_shift ldn epoch
let%bench "prev_clock_shift ldn winter" = prev_clock_shift ldn winter
let%bench "prev_clock_shift ldn summer" = prev_clock_shift ldn summer
let%bench "prev_clock_shift nyc epoch" = prev_clock_shift nyc epoch
let%bench "prev_clock_shift nyc winter" = prev_clock_shift nyc winter
let%bench "prev_clock_shift nyc summer" = prev_clock_shift nyc summer
let%bench "prev_clock_shift utc epoch" = prev_clock_shift utc epoch
let%bench "prev_clock_shift utc winter" = prev_clock_shift utc winter
let%bench "prev_clock_shift utc summer" = prev_clock_shift utc summer
let to_date_ofday zone time = Time.to_date_ofday ~zone time
let%bench "to_date_ofday hkg epoch" = to_date_ofday hkg epoch
let%bench "to_date_ofday hkg winter" = to_date_ofday hkg winter
let%bench "to_date_ofday hkg summer" = to_date_ofday hkg summer
let%bench "to_date_ofday ldn epoch" = to_date_ofday ldn epoch
let%bench "to_date_ofday ldn winter" = to_date_ofday ldn winter
let%bench "to_date_ofday ldn summer" = to_date_ofday ldn summer
let%bench "to_date_ofday nyc epoch" = to_date_ofday nyc epoch
let%bench "to_date_ofday nyc winter" = to_date_ofday nyc winter
let%bench "to_date_ofday nyc summer" = to_date_ofday nyc summer
let%bench "to_date_ofday utc epoch" = to_date_ofday utc epoch
let%bench "to_date_ofday utc winter" = to_date_ofday utc winter
let%bench "to_date_ofday utc summer" = to_date_ofday utc summer
let of_date_ofday zone time =
let date = Time.to_date ~zone time in
let ofday = Time.to_ofday ~zone time in
fun () -> Time.of_date_ofday ~zone date ofday
;;
let%bench_fun "of_date_ofday hkg epoch" = of_date_ofday hkg epoch
let%bench_fun "of_date_ofday hkg winter" = of_date_ofday hkg winter
let%bench_fun "of_date_ofday hkg summer" = of_date_ofday hkg summer
let%bench_fun "of_date_ofday ldn epoch" = of_date_ofday ldn epoch
let%bench_fun "of_date_ofday ldn winter" = of_date_ofday ldn winter
let%bench_fun "of_date_ofday ldn summer" = of_date_ofday ldn summer
let%bench_fun "of_date_ofday nyc epoch" = of_date_ofday nyc epoch
let%bench_fun "of_date_ofday nyc winter" = of_date_ofday nyc winter
let%bench_fun "of_date_ofday nyc summer" = of_date_ofday nyc summer
let%bench_fun "of_date_ofday utc epoch" = of_date_ofday utc epoch
let%bench_fun "of_date_ofday utc winter" = of_date_ofday utc winter
let%bench_fun "of_date_ofday utc summer" = of_date_ofday utc summer
let reset_caches zone =
Time.reset_date_cache ();
Time.Zone.reset_transition_cache zone
;;
let reset_and_next_clock_shift zone time =
reset_caches zone;
Time.Zone.next_clock_shift zone ~strictly_after:time
;;
let%bench "reset + next_clock_shift hkg epoch" = reset_and_next_clock_shift hkg epoch
let%bench "reset + next_clock_shift hkg winter" = reset_and_next_clock_shift hkg winter
let%bench "reset + next_clock_shift hkg summer" = reset_and_next_clock_shift hkg summer
let%bench "reset + next_clock_shift ldn epoch" = reset_and_next_clock_shift ldn epoch
let%bench "reset + next_clock_shift ldn winter" = reset_and_next_clock_shift ldn winter
let%bench "reset + next_clock_shift ldn summer" = reset_and_next_clock_shift ldn summer
let%bench "reset + next_clock_shift nyc epoch" = reset_and_next_clock_shift nyc epoch
let%bench "reset + next_clock_shift nyc winter" = reset_and_next_clock_shift nyc winter
let%bench "reset + next_clock_shift nyc summer" = reset_and_next_clock_shift nyc summer
let%bench "reset + next_clock_shift utc epoch" = reset_and_next_clock_shift utc epoch
let%bench "reset + next_clock_shift utc winter" = reset_and_next_clock_shift utc winter
let%bench "reset + next_clock_shift utc summer" = reset_and_next_clock_shift utc summer
let reset_and_prev_clock_shift zone time =
reset_caches zone;
Time.Zone.prev_clock_shift zone ~at_or_before:time
;;
let%bench "reset + prev_clock_shift hkg epoch" = reset_and_prev_clock_shift hkg epoch
let%bench "reset + prev_clock_shift hkg winter" = reset_and_prev_clock_shift hkg winter
let%bench "reset + prev_clock_shift hkg summer" = reset_and_prev_clock_shift hkg summer
let%bench "reset + prev_clock_shift ldn epoch" = reset_and_prev_clock_shift ldn epoch
let%bench "reset + prev_clock_shift ldn winter" = reset_and_prev_clock_shift ldn winter
let%bench "reset + prev_clock_shift ldn summer" = reset_and_prev_clock_shift ldn summer
let%bench "reset + prev_clock_shift nyc epoch" = reset_and_prev_clock_shift nyc epoch
let%bench "reset + prev_clock_shift nyc winter" = reset_and_prev_clock_shift nyc winter
let%bench "reset + prev_clock_shift nyc summer" = reset_and_prev_clock_shift nyc summer
let%bench "reset + prev_clock_shift utc epoch" = reset_and_prev_clock_shift utc epoch
let%bench "reset + prev_clock_shift utc winter" = reset_and_prev_clock_shift utc winter
let%bench "reset + prev_clock_shift utc summer" = reset_and_prev_clock_shift utc summer
let reset_and_to_date_ofday zone time =
reset_caches zone;
Time.to_date_ofday ~zone time
;;
let%bench "reset + to_date_ofday hkg epoch" = reset_and_to_date_ofday hkg epoch
let%bench "reset + to_date_ofday hkg winter" = reset_and_to_date_ofday hkg winter
let%bench "reset + to_date_ofday hkg summer" = reset_and_to_date_ofday hkg summer
let%bench "reset + to_date_ofday ldn epoch" = reset_and_to_date_ofday ldn epoch
let%bench "reset + to_date_ofday ldn winter" = reset_and_to_date_ofday ldn winter
let%bench "reset + to_date_ofday ldn summer" = reset_and_to_date_ofday ldn summer
let%bench "reset + to_date_ofday nyc epoch" = reset_and_to_date_ofday nyc epoch
let%bench "reset + to_date_ofday nyc winter" = reset_and_to_date_ofday nyc winter
let%bench "reset + to_date_ofday nyc summer" = reset_and_to_date_ofday nyc summer
let%bench "reset + to_date_ofday utc epoch" = reset_and_to_date_ofday utc epoch
let%bench "reset + to_date_ofday utc winter" = reset_and_to_date_ofday utc winter
let%bench "reset + to_date_ofday utc summer" = reset_and_to_date_ofday utc summer
let reset_and_of_date_ofday zone time =
let date = Time.to_date ~zone time in
let ofday = Time.to_ofday ~zone time in
fun () ->
reset_caches zone;
Time.of_date_ofday ~zone date ofday
;;
let%bench_fun "reset + of_date_ofday hkg epoch" = reset_and_of_date_ofday hkg epoch
let%bench_fun "reset + of_date_ofday hkg winter" = reset_and_of_date_ofday hkg winter
let%bench_fun "reset + of_date_ofday hkg summer" = reset_and_of_date_ofday hkg summer
let%bench_fun "reset + of_date_ofday ldn epoch" = reset_and_of_date_ofday ldn epoch
let%bench_fun "reset + of_date_ofday ldn winter" = reset_and_of_date_ofday ldn winter
let%bench_fun "reset + of_date_ofday ldn summer" = reset_and_of_date_ofday ldn summer
let%bench_fun "reset + of_date_ofday nyc epoch" = reset_and_of_date_ofday nyc epoch
let%bench_fun "reset + of_date_ofday nyc winter" = reset_and_of_date_ofday nyc winter
let%bench_fun "reset + of_date_ofday nyc summer" = reset_and_of_date_ofday nyc summer
let%bench_fun "reset + of_date_ofday utc epoch" = reset_and_of_date_ofday utc epoch
let%bench_fun "reset + of_date_ofday utc winter" = reset_and_of_date_ofday utc winter
let%bench_fun "reset + of_date_ofday utc summer" = reset_and_of_date_ofday utc summer
|
|
94e4da5050179b0c2d1d75e6bfab337c4a55e3c4105d963b46eef124624bdb93
|
softwarelanguageslab/maf
|
R5RS_scp1_slide-in-1.scm
|
; Changes:
* removed : 0
* added : 0
* swaps : 1
; * negated predicates: 0
; * swapped branches: 0
* calls to i d fun : 1
(letrec ((schuif-in! (lambda (l1 l2)
(if (null? (cdr l1))
(begin
(<change>
(set-cdr! l1 l2)
((lambda (x) x) (set-cdr! l1 l2)))
'ok)
(if (null? l2)
'ok
(let ((rest1 (cdr l1))
(rest2 (cdr l2)))
(<change>
(set-cdr! l1 l2)
(set-cdr! l2 rest1))
(<change>
(set-cdr! l2 rest1)
(set-cdr! l1 l2))
(schuif-in! rest1 rest2))))))
(lijst1 (__toplevel_cons 1 (__toplevel_cons 3 (__toplevel_cons 5 ()))))
(lijst2 (__toplevel_cons 2 (__toplevel_cons 4 (__toplevel_cons 6 (__toplevel_cons 8 ()))))))
(schuif-in! lijst1 lijst2)
(equal?
lijst1
(__toplevel_cons
1
(__toplevel_cons
2
(__toplevel_cons
3
(__toplevel_cons 4 (__toplevel_cons 5 (__toplevel_cons 6 (__toplevel_cons 8 ())))))))))
| null |
https://raw.githubusercontent.com/softwarelanguageslab/maf/11acedf56b9bf0c8e55ddb6aea754b6766d8bb40/test/changes/scheme/generated/R5RS_scp1_slide-in-1.scm
|
scheme
|
Changes:
* negated predicates: 0
* swapped branches: 0
|
* removed : 0
* added : 0
* swaps : 1
* calls to i d fun : 1
(letrec ((schuif-in! (lambda (l1 l2)
(if (null? (cdr l1))
(begin
(<change>
(set-cdr! l1 l2)
((lambda (x) x) (set-cdr! l1 l2)))
'ok)
(if (null? l2)
'ok
(let ((rest1 (cdr l1))
(rest2 (cdr l2)))
(<change>
(set-cdr! l1 l2)
(set-cdr! l2 rest1))
(<change>
(set-cdr! l2 rest1)
(set-cdr! l1 l2))
(schuif-in! rest1 rest2))))))
(lijst1 (__toplevel_cons 1 (__toplevel_cons 3 (__toplevel_cons 5 ()))))
(lijst2 (__toplevel_cons 2 (__toplevel_cons 4 (__toplevel_cons 6 (__toplevel_cons 8 ()))))))
(schuif-in! lijst1 lijst2)
(equal?
lijst1
(__toplevel_cons
1
(__toplevel_cons
2
(__toplevel_cons
3
(__toplevel_cons 4 (__toplevel_cons 5 (__toplevel_cons 6 (__toplevel_cons 8 ())))))))))
|
be97548a4f8f830aed391cb32a1e522eaba86e0673910fb86465abd3a80aa630
|
immutant/immutant
|
utils.clj
|
Copied and modified from potemkin , v0.4.3 ( ) , MIT licnensed , Copyright
;; Changes:
- removed fast - memoize and friends to remove need for clj - tuple
(ns ^:no-doc from.potemkin.utils
(:require
[from.potemkin.macros :refer [unify-gensyms]])
(:import
[java.util.concurrent
ConcurrentHashMap]))
(defmacro fast-bound-fn
"Creates a variant of bound-fn which doesn't assume you want a merged
context between the source and execution environments."
[& fn-body]
(let [{:keys [major minor]} *clojure-version*
use-thread-bindings? (and (= 1 major) (< minor 3))
use-get-binding? (and (= 1 major) (< minor 4))]
(if use-thread-bindings?
`(let [bindings# (get-thread-bindings)
f# (fn ~@fn-body)]
(fn [~'& args#]
(with-bindings bindings#
(apply f# args#))))
`(let [bound-frame# ~(if use-get-binding?
`(clojure.lang.Var/getThreadBindingFrame)
`(clojure.lang.Var/cloneThreadBindingFrame))
f# (fn ~@fn-body)]
(fn [~'& args#]
(let [curr-frame# (clojure.lang.Var/getThreadBindingFrame)]
(clojure.lang.Var/resetThreadBindingFrame bound-frame#)
(try
(apply f# args#)
(finally
(clojure.lang.Var/resetThreadBindingFrame curr-frame#)))))))))
(defn fast-bound-fn*
"Creates a function which conveys bindings, via fast-bound-fn."
[f]
(fast-bound-fn [& args]
(apply f args)))
(defn retry-exception? [x]
(= "clojure.lang.LockingTransaction$RetryEx" (.getName ^Class (class x))))
(defmacro try*
"A variant of try that is fully transparent to transaction retry exceptions"
[& body+catch]
(let [body (take-while
#(or (not (sequential? %)) (not (= 'catch (first %))))
body+catch)
catch (drop (count body) body+catch)
ignore-retry (fn [x]
(when x
(let [ex (nth x 2)]
`(~@(take 3 x)
(if (from.potemkin.utils/retry-exception? ~ex)
(throw ~ex)
(do ~@(drop 3 x)))))))
class->clause (-> (zipmap (map second catch) catch)
(update-in ['Throwable] ignore-retry)
(update-in ['Error] ignore-retry))]
`(try
~@body
~@(->> class->clause vals (remove nil?)))))
(defmacro condp-case
"A variant of condp which has case-like syntax for options. When comparing
smaller numbers of keywords, this can be faster, sometimes significantly."
[predicate value & cases]
(unify-gensyms
`(let [val## ~value
pred## ~predicate]
(cond
~@(->> cases
(partition 2)
(map
(fn [[vals expr]]
`(~(if (sequential? vals)
`(or ~@(map (fn [x] `(pred## val## ~x)) vals))
`(pred## val## ~vals))
~expr)))
(apply concat))
:else
~(if (even? (count cases))
`(throw (IllegalArgumentException. (str "no matching clause for " (pr-str val##))))
(last cases))))))
(defmacro doit
"A version of doseq that doesn't emit all that inline-destroying chunked-seq code."
[[x it] & body]
(let [it-sym (gensym "iterable")]
`(let [~it-sym ~it
it# (.iterator ~(with-meta it-sym {:tag "Iterable"}))]
(loop []
(when (.hasNext it#)
(let [~x (.next it#)]
~@body)
(recur))))))
(defmacro doary
"An array-specific version of doseq."
[[x ary] & body]
(let [ary-sym (gensym "ary")]
`(let [~(with-meta ary-sym {:tag "objects"}) ~ary]
(dotimes [idx# (alength ~ary-sym)]
(let [~x (aget ~ary-sym idx#)]
~@body)))))
| null |
https://raw.githubusercontent.com/immutant/immutant/6ff8fa03acf73929f61f2ca75446cb559ddfc1ef/web/src/from/potemkin/utils.clj
|
clojure
|
Changes:
|
Copied and modified from potemkin , v0.4.3 ( ) , MIT licnensed , Copyright
- removed fast - memoize and friends to remove need for clj - tuple
(ns ^:no-doc from.potemkin.utils
(:require
[from.potemkin.macros :refer [unify-gensyms]])
(:import
[java.util.concurrent
ConcurrentHashMap]))
(defmacro fast-bound-fn
"Creates a variant of bound-fn which doesn't assume you want a merged
context between the source and execution environments."
[& fn-body]
(let [{:keys [major minor]} *clojure-version*
use-thread-bindings? (and (= 1 major) (< minor 3))
use-get-binding? (and (= 1 major) (< minor 4))]
(if use-thread-bindings?
`(let [bindings# (get-thread-bindings)
f# (fn ~@fn-body)]
(fn [~'& args#]
(with-bindings bindings#
(apply f# args#))))
`(let [bound-frame# ~(if use-get-binding?
`(clojure.lang.Var/getThreadBindingFrame)
`(clojure.lang.Var/cloneThreadBindingFrame))
f# (fn ~@fn-body)]
(fn [~'& args#]
(let [curr-frame# (clojure.lang.Var/getThreadBindingFrame)]
(clojure.lang.Var/resetThreadBindingFrame bound-frame#)
(try
(apply f# args#)
(finally
(clojure.lang.Var/resetThreadBindingFrame curr-frame#)))))))))
(defn fast-bound-fn*
"Creates a function which conveys bindings, via fast-bound-fn."
[f]
(fast-bound-fn [& args]
(apply f args)))
(defn retry-exception? [x]
(= "clojure.lang.LockingTransaction$RetryEx" (.getName ^Class (class x))))
(defmacro try*
"A variant of try that is fully transparent to transaction retry exceptions"
[& body+catch]
(let [body (take-while
#(or (not (sequential? %)) (not (= 'catch (first %))))
body+catch)
catch (drop (count body) body+catch)
ignore-retry (fn [x]
(when x
(let [ex (nth x 2)]
`(~@(take 3 x)
(if (from.potemkin.utils/retry-exception? ~ex)
(throw ~ex)
(do ~@(drop 3 x)))))))
class->clause (-> (zipmap (map second catch) catch)
(update-in ['Throwable] ignore-retry)
(update-in ['Error] ignore-retry))]
`(try
~@body
~@(->> class->clause vals (remove nil?)))))
(defmacro condp-case
"A variant of condp which has case-like syntax for options. When comparing
smaller numbers of keywords, this can be faster, sometimes significantly."
[predicate value & cases]
(unify-gensyms
`(let [val## ~value
pred## ~predicate]
(cond
~@(->> cases
(partition 2)
(map
(fn [[vals expr]]
`(~(if (sequential? vals)
`(or ~@(map (fn [x] `(pred## val## ~x)) vals))
`(pred## val## ~vals))
~expr)))
(apply concat))
:else
~(if (even? (count cases))
`(throw (IllegalArgumentException. (str "no matching clause for " (pr-str val##))))
(last cases))))))
(defmacro doit
"A version of doseq that doesn't emit all that inline-destroying chunked-seq code."
[[x it] & body]
(let [it-sym (gensym "iterable")]
`(let [~it-sym ~it
it# (.iterator ~(with-meta it-sym {:tag "Iterable"}))]
(loop []
(when (.hasNext it#)
(let [~x (.next it#)]
~@body)
(recur))))))
(defmacro doary
"An array-specific version of doseq."
[[x ary] & body]
(let [ary-sym (gensym "ary")]
`(let [~(with-meta ary-sym {:tag "objects"}) ~ary]
(dotimes [idx# (alength ~ary-sym)]
(let [~x (aget ~ary-sym idx#)]
~@body)))))
|
0a65071968df2d0381009ce41627ca5049fb62a3a5bf0f5409ff904dc52d37bc
|
ghc/testsuite
|
tcfail088.hs
|
{-# LANGUAGE RankNTypes, FlexibleInstances #-}
-- !!! Check that forall types can't be arguments
module ShouldFail where
data T s a = MkT s a
instance Ord a => Ord (forall s. T s a)
A for - all should not appear as an argument to
g :: T s (forall b.b)
g = error "urk"
| null |
https://raw.githubusercontent.com/ghc/testsuite/998a816ae89c4fd573f4abd7c6abb346cf7ee9af/tests/typecheck/should_fail/tcfail088.hs
|
haskell
|
# LANGUAGE RankNTypes, FlexibleInstances #
!!! Check that forall types can't be arguments
|
module ShouldFail where
data T s a = MkT s a
instance Ord a => Ord (forall s. T s a)
A for - all should not appear as an argument to
g :: T s (forall b.b)
g = error "urk"
|
f2d524a7ef71a00a78137229632c6c6c19f39edeb885e282e3f845a269266c42
|
qiao/sicp-solutions
|
1.39.scm
|
(define (cont-frac-iter n d k)
(define (iter k acc)
(if (= 0 k)
acc
(iter (- k 1) (/ (n k)
(+ (d k) acc)))))
(iter k 0))
(define (tan-cf x k)
(define (n i)
(if (= 1 i)
x
(- (square x))))
(define (d i)
(- (* 2 i) 1))
(cont-frac-iter n d k))
| null |
https://raw.githubusercontent.com/qiao/sicp-solutions/a2fe069ba6909710a0867bdb705b2e58b2a281af/chapter1/1.39.scm
|
scheme
|
(define (cont-frac-iter n d k)
(define (iter k acc)
(if (= 0 k)
acc
(iter (- k 1) (/ (n k)
(+ (d k) acc)))))
(iter k 0))
(define (tan-cf x k)
(define (n i)
(if (= 1 i)
x
(- (square x))))
(define (d i)
(- (* 2 i) 1))
(cont-frac-iter n d k))
|
|
539ce42f5a21b9093796653e492e14a7f55cf0265c96ea32f93fcb9913fcc322
|
OCamlPro/alt-ergo
|
models.ml
|
(******************************************************************************)
(* *)
Alt - Ergo : The SMT Solver For Software Verification
Copyright ( C ) 2020 - 2020
(* *)
(* This file is distributed under the terms of the license indicated *)
(* in the file 'License.OCamlPro'. If 'License.OCamlPro' is not *)
(* present, please contact us to clarify licensing. *)
(* *)
(******************************************************************************)
open Format
open Options
module X = Shostak.Combine
module Ac = Shostak.Ac
module Ex = Explanation
module Sy = Symbols
module E = Expr
module ME = Expr.Map
module SE = Expr.Set
module MS = Map.Make(String)
let constraints = ref MS.empty
module Pp_smtlib_term = struct
let to_string_type t =
asprintf "%a" Ty.print t
let rec print fmt t =
let {E.f;xs;ty; _} = E.term_view t in
match f, xs with
| Sy.Lit lit, xs ->
begin
match lit, xs with
| Sy.L_eq, a::l ->
if get_output_smtlib () then
fprintf fmt "(= %a%a)"
print a (fun fmt -> List.iter (fprintf fmt " %a" print)) l
else
fprintf fmt "(%a%a)"
print a (fun fmt -> List.iter (fprintf fmt " = %a" print)) l
| Sy.L_neg_eq, [a; b] ->
if get_output_smtlib () then
fprintf fmt "(not (= %a %a))" print a print b
else
fprintf fmt "(%a <> %a)" print a print b
| Sy.L_neg_eq, a::l ->
if get_output_smtlib () then
fprintf fmt "(distinct %a%a)"
print a (fun fmt -> List.iter (fprintf fmt " %a" print)) l
else
fprintf fmt "distinct(%a%a)"
print a (fun fmt -> List.iter (fprintf fmt ", %a" print)) l
| Sy.L_built Sy.LE, [a;b] ->
if get_output_smtlib () then
fprintf fmt "(<= %a %a)" print a print b
else
fprintf fmt "(%a <= %a)" print a print b
| Sy.L_built Sy.LT, [a;b] ->
if get_output_smtlib () then
fprintf fmt "(< %a %a)" print a print b
else
fprintf fmt "(%a < %a)" print a print b
| Sy.L_neg_built Sy.LE, [a; b] ->
if get_output_smtlib () then
fprintf fmt "(> %a %a)" print a print b
else
fprintf fmt "(%a > %a)" print a print b
| Sy.L_neg_built Sy.LT, [a; b] ->
if get_output_smtlib () then
fprintf fmt "(>= %a %a)" print a print b
else
fprintf fmt "(%a >= %a)" print a print b
| Sy.L_neg_pred, [a] ->
fprintf fmt "(not %a)" print a
| Sy.L_built (Sy.IsConstr hs), [e] ->
if get_output_smtlib () then
fprintf fmt "((_ is %a) %a)" Hstring.print hs print e
else
fprintf fmt "(%a ? %a)" print e Hstring.print hs
| Sy.L_neg_built (Sy.IsConstr hs), [e] ->
if get_output_smtlib () then
fprintf fmt "(not ((_ is %a) %a))" Hstring.print hs print e
else
fprintf fmt "not (%a ? %a)" print e Hstring.print hs
| (Sy.L_built (Sy.LT | Sy.LE) | Sy.L_neg_built (Sy.LT | Sy.LE)
| Sy.L_neg_pred | Sy.L_eq | Sy.L_neg_eq
| Sy.L_built (Sy.IsConstr _)
| Sy.L_neg_built (Sy.IsConstr _)) , _ ->
assert false
end
| Sy.Op Sy.Get, [e1; e2] ->
if get_output_smtlib () then
fprintf fmt "(select %a %a)" print e1 print e2
else
fprintf fmt "%a[%a]" print e1 print e2
| Sy.Op Sy.Set, [e1; e2; e3] ->
if get_output_smtlib () then
fprintf fmt "(store %a %a %a)"
print e1
print e2
print e3
else
fprintf fmt "%a[%a<-%a]" print e1 print e2 print e3
| Sy.Op Sy.Concat, [e1; e2] ->
fprintf fmt "%a@@%a" print e1 print e2
| Sy.Op Sy.Extract, [e1; e2; e3] ->
fprintf fmt "%a^{%a,%a}" print e1 print e2 print e3
| Sy.Op (Sy.Access field), [e] ->
if get_output_smtlib () then
fprintf fmt "(%s %a)" (Hstring.view field) print e
else
fprintf fmt "%a.%s" print e (Hstring.view field)
| Sy.Op (Sy.Record), _ ->
begin match ty with
| Ty.Trecord { Ty.lbs = lbs; _ } ->
assert (List.length xs = List.length lbs);
fprintf fmt "{";
ignore (List.fold_left2 (fun first (field,_) e ->
fprintf fmt "%s%s = %a" (if first then "" else "; ")
(Hstring.view field) print e;
false
) true lbs xs);
fprintf fmt "}";
| _ -> assert false
end
TODO : introduce in the future to simplify this ?
| Sy.Op op, [e1; e2] when op == Sy.Pow || op == Sy.Integer_round ||
op == Sy.Max_real || op == Sy.Max_int ||
op == Sy.Min_real || op == Sy.Min_int ->
fprintf fmt "%a(%a,%a)" Sy.print f print e1 print e2
TODO : introduce in the future to simplify this ?
| Sy.Op (Sy.Constr hs), ((_::_) as l) ->
fprintf fmt "%a(%a)" Hstring.print hs print_list l
| Sy.Op _, [e1; e2] ->
if get_output_smtlib () then
fprintf fmt "(%a %a %a)" Sy.print f print e1 print e2
else
fprintf fmt "(%a %a %a)" print e1 Sy.print f print e2
| Sy.Op Sy.Destruct (hs, grded), [e] ->
fprintf fmt "%a#%s%a"
print e (if grded then "" else "!") Hstring.print hs
| Sy.In(lb, rb), [t] ->
fprintf fmt "(%a in %a, %a)" print t Sy.print_bound lb Sy.print_bound rb
| Sy.Name (n,_), l -> begin
let constraint_name =
try let constraint_name,_,_ =
(MS.find (Hstring.view n) !constraints) in
constraint_name
with _ ->
let constraint_name = "c_"^(Hstring.view n) in
constraints := MS.add (Hstring.view n)
(constraint_name,
to_string_type (E.type_info t),
List.map (fun e -> to_string_type (E.type_info e)) l
) !constraints;
constraint_name
in
match l with
| [] -> fprintf fmt "%s" constraint_name
| l ->
fprintf fmt "(%s %a)" constraint_name (Printer.pp_list_space print) l;
end
| _, [] ->
fprintf fmt "%a" Sy.print f
| _, _ ->
if get_output_smtlib () then
fprintf fmt "(%a %a)" Sy.print f print_list xs
else
fprintf fmt "%a(%a)" Sy.print f print_list xs
and print_list_sep sep fmt = function
| [] -> ()
| [t] -> print fmt t
| t::l -> Format.fprintf fmt "%a%s%a" print t sep (print_list_sep sep) l
and print_list fmt = print_list_sep "," fmt
end
module SmtlibCounterExample = struct
let x_print fmt (_ , ppr) = fprintf fmt "%s" ppr
let pp_term fmt t =
if Options.get_output_format () == Why3 then
Pp_smtlib_term.print fmt t
else
E.print fmt t
let dummy_value_of_type ty =
match ty with
Ty.Tint -> "0"
| Ty.Treal -> "0.0"
| Ty.Tbool -> "false"
| _ -> asprintf "%a" pp_term (Expr.fresh_name ty)
let pp_dummy_value_of_type fmt ty =
if not (Options.get_interpretation_use_underscore ()) then
let d = dummy_value_of_type ty in
fprintf fmt "%s " d
else
fprintf fmt "_ "
let add_records_destr records record_name destr_name rep =
let destrs =
try MS.find record_name records
with Not_found -> MS.empty
in
let destrs =
MS.add destr_name rep destrs in
MS.add record_name destrs records
let mk_records_constr records record_name
{ Ty.name = _n; record_constr = cstr; lbs = lbs; _} =
let find_destrs destr destrs =
try let rep = MS.find destr destrs in
Some rep
with Not_found -> None
in
let print_destr fmt (destrs,lbs) =
List.iter (fun (destr, ty_destr) ->
let destr = Hstring.view destr in
match find_destrs destr destrs with
| None ->
pp_dummy_value_of_type fmt ty_destr
| Some rep -> fprintf fmt "%s " rep
) lbs
in
let destrs =
try MS.find (Sy.to_string record_name) records
with Not_found -> MS.empty
in
asprintf "%s %a"
(Hstring.view cstr)
print_destr (destrs,lbs)
let add_record_constr records record_name
{ Ty.name = _n; record_constr = _cstr; lbs = lbs; _} xs_values =
List.fold_left2(fun records (destr,_) (rep,_) ->
add_records_destr
records
record_name
(Hstring.view destr)
(asprintf "%a" pp_term rep)
) records lbs xs_values
let check_records records xs_ty_named xs_values f ty rep =
match xs_ty_named with
| [Ty.Trecord _r, _arg] -> begin
match xs_values with
| [record_name,_] ->
add_records_destr
records
(asprintf "%a" Expr.print record_name)
(Sy.to_string f)
rep
| [] | _ -> records
end
| _ ->
match ty with
| Ty.Trecord r ->
add_record_constr records rep r xs_values
| _ -> records
let print_fun_def fmt name args ty t =
let print_args fmt (ty,name) =
Format.fprintf fmt "(%s %a)" name Ty.print ty in
let defined_value =
try
let res,_,_ = (MS.find (Sy.to_string name) !constraints) in res
with _ -> t
in
Printer.print_fmt ~flushed:false fmt
"(define-fun %a (%a) %a %s)@ "
Sy.print name
(Printer.pp_list_space (print_args)) args
Ty.print ty
defined_value
let output_constants_counterexample fmt records cprofs =
ModelMap.iter
(fun (f, xs_ty, ty) st ->
assert (xs_ty == []);
match ModelMap.V.elements st with
| [[], rep] ->
let rep = Format.asprintf "%a" x_print rep in
let rep =
match ty with
| Ty.Trecord r ->
let constr = mk_records_constr records f r in
sprintf "(%s)" constr
| _ -> rep
in
print_fun_def fmt f [] ty rep
| _ -> assert false
) cprofs
let output_functions_counterexample fmt records fprofs =
let records = ref records in
ModelMap.iter
(fun (f, xs_ty, ty) st ->
let xs_ty_named = List.mapi (fun i ty ->
ty,(sprintf "arg_%d" i)
) xs_ty in
let rep =
let representants =
ModelMap.V.fold (fun (xs_values,(_rep,srep)) acc ->
assert ((List.length xs_ty_named) = (List.length xs_values));
records :=
check_records !records xs_ty_named xs_values f ty srep;
let reps = try MS.find srep acc with Not_found -> [] in
MS.add srep (xs_values :: reps) acc
) st MS.empty in
let representants = MS.fold (fun srep xs_values_list acc ->
(srep,xs_values_list) :: acc) representants [] in
let rec mk_ite_and xs tys =
match xs, tys with
| [],[] -> assert false
| [xs,_],[_ty,name] ->
asprintf "(= %s %a)" name pp_term xs
| (xs,_) :: l1, (_ty,name) :: l2 ->
asprintf "(and (= %s %a) %s)"
name
pp_term xs
(mk_ite_and l1 l2)
| _, _ -> assert false
in
let mk_ite_or l =
let pp_or_list fmt xs_values =
fprintf fmt "%s" (mk_ite_and xs_values xs_ty_named)
in
match l with
| [] -> assert false
| [xs_values] -> mk_ite_and xs_values xs_ty_named
| xs_values :: l ->
asprintf "(or %s %a)"
(mk_ite_and xs_values xs_ty_named)
(Printer.pp_list_space pp_or_list) l
in
let rec reps_aux reps =
match reps with
| [] -> asprintf "%a" pp_dummy_value_of_type ty
| [srep,xs_values_list] ->
if Options.get_interpretation_use_underscore () then
asprintf "(ite %s %s %s)"
(mk_ite_or xs_values_list)
srep
(reps_aux [])
else
srep
| (srep,xs_values_list) :: l ->
asprintf "(ite %s %s %s)"
(mk_ite_or xs_values_list)
srep
(reps_aux l)
in
if List.length representants = 1 then
sprintf "%s" (fst (List.hd representants))
else
reps_aux representants
in
print_fun_def fmt f xs_ty_named ty rep;
) fprofs;
!records
let output_arrays_counterexample fmt _arrays =
Printer.print_fmt fmt "@ ; Arrays not yet supported@ "
end
(* of module SmtlibCounterExample *)
module Why3CounterExample = struct
let output_constraints fmt prop_model =
let assertions = SE.fold (fun e acc ->
(asprintf "%s(assert %a)@ " acc SmtlibCounterExample.pp_term e)
) prop_model "" in
Printer.print_fmt ~flushed:false fmt "@ ; constants@ ";
MS.iter (fun _ (name,ty,args_ty) ->
match args_ty with
| [] ->
Printer.print_fmt ~flushed:false fmt "(declare-const %s %s)@ "
name ty
| l ->
Printer.print_fmt ~flushed:false fmt "(declare-fun %s (%s) %s)@ "
name
(String.concat " " l)
ty
) !constraints;
Printer.print_fmt ~flushed:false fmt "@ ; assertions@ ";
Printer.print_fmt fmt ~flushed:false "%s" assertions
end
(* of module Why3CounterExample *)
let output_concrete_model fmt props ~functions ~constants ~arrays =
if get_interpretation () then begin
Printer.print_fmt ~flushed:false fmt "@[<v 0>unknown@ ";
Printer.print_fmt ~flushed:false fmt "@[<v 2>(model@,";
if Options.get_model_type_constraints () then begin
Why3CounterExample.output_constraints fmt props
end;
Printer.print_fmt fmt "@ ; Functions@ ";
let records = SmtlibCounterExample.output_functions_counterexample
fmt MS.empty functions in
Printer.print_fmt fmt "@ ; Constants@ ";
SmtlibCounterExample.output_constants_counterexample
fmt records constants;
SmtlibCounterExample.output_arrays_counterexample fmt arrays;
Printer.print_fmt fmt "@]@ )";
end;
| null |
https://raw.githubusercontent.com/OCamlPro/alt-ergo/695466427b5c3d48e92e90485b12c130c2bce2c1/src/lib/frontend/models.ml
|
ocaml
|
****************************************************************************
This file is distributed under the terms of the license indicated
in the file 'License.OCamlPro'. If 'License.OCamlPro' is not
present, please contact us to clarify licensing.
****************************************************************************
of module SmtlibCounterExample
of module Why3CounterExample
|
Alt - Ergo : The SMT Solver For Software Verification
Copyright ( C ) 2020 - 2020
open Format
open Options
module X = Shostak.Combine
module Ac = Shostak.Ac
module Ex = Explanation
module Sy = Symbols
module E = Expr
module ME = Expr.Map
module SE = Expr.Set
module MS = Map.Make(String)
let constraints = ref MS.empty
module Pp_smtlib_term = struct
let to_string_type t =
asprintf "%a" Ty.print t
let rec print fmt t =
let {E.f;xs;ty; _} = E.term_view t in
match f, xs with
| Sy.Lit lit, xs ->
begin
match lit, xs with
| Sy.L_eq, a::l ->
if get_output_smtlib () then
fprintf fmt "(= %a%a)"
print a (fun fmt -> List.iter (fprintf fmt " %a" print)) l
else
fprintf fmt "(%a%a)"
print a (fun fmt -> List.iter (fprintf fmt " = %a" print)) l
| Sy.L_neg_eq, [a; b] ->
if get_output_smtlib () then
fprintf fmt "(not (= %a %a))" print a print b
else
fprintf fmt "(%a <> %a)" print a print b
| Sy.L_neg_eq, a::l ->
if get_output_smtlib () then
fprintf fmt "(distinct %a%a)"
print a (fun fmt -> List.iter (fprintf fmt " %a" print)) l
else
fprintf fmt "distinct(%a%a)"
print a (fun fmt -> List.iter (fprintf fmt ", %a" print)) l
| Sy.L_built Sy.LE, [a;b] ->
if get_output_smtlib () then
fprintf fmt "(<= %a %a)" print a print b
else
fprintf fmt "(%a <= %a)" print a print b
| Sy.L_built Sy.LT, [a;b] ->
if get_output_smtlib () then
fprintf fmt "(< %a %a)" print a print b
else
fprintf fmt "(%a < %a)" print a print b
| Sy.L_neg_built Sy.LE, [a; b] ->
if get_output_smtlib () then
fprintf fmt "(> %a %a)" print a print b
else
fprintf fmt "(%a > %a)" print a print b
| Sy.L_neg_built Sy.LT, [a; b] ->
if get_output_smtlib () then
fprintf fmt "(>= %a %a)" print a print b
else
fprintf fmt "(%a >= %a)" print a print b
| Sy.L_neg_pred, [a] ->
fprintf fmt "(not %a)" print a
| Sy.L_built (Sy.IsConstr hs), [e] ->
if get_output_smtlib () then
fprintf fmt "((_ is %a) %a)" Hstring.print hs print e
else
fprintf fmt "(%a ? %a)" print e Hstring.print hs
| Sy.L_neg_built (Sy.IsConstr hs), [e] ->
if get_output_smtlib () then
fprintf fmt "(not ((_ is %a) %a))" Hstring.print hs print e
else
fprintf fmt "not (%a ? %a)" print e Hstring.print hs
| (Sy.L_built (Sy.LT | Sy.LE) | Sy.L_neg_built (Sy.LT | Sy.LE)
| Sy.L_neg_pred | Sy.L_eq | Sy.L_neg_eq
| Sy.L_built (Sy.IsConstr _)
| Sy.L_neg_built (Sy.IsConstr _)) , _ ->
assert false
end
| Sy.Op Sy.Get, [e1; e2] ->
if get_output_smtlib () then
fprintf fmt "(select %a %a)" print e1 print e2
else
fprintf fmt "%a[%a]" print e1 print e2
| Sy.Op Sy.Set, [e1; e2; e3] ->
if get_output_smtlib () then
fprintf fmt "(store %a %a %a)"
print e1
print e2
print e3
else
fprintf fmt "%a[%a<-%a]" print e1 print e2 print e3
| Sy.Op Sy.Concat, [e1; e2] ->
fprintf fmt "%a@@%a" print e1 print e2
| Sy.Op Sy.Extract, [e1; e2; e3] ->
fprintf fmt "%a^{%a,%a}" print e1 print e2 print e3
| Sy.Op (Sy.Access field), [e] ->
if get_output_smtlib () then
fprintf fmt "(%s %a)" (Hstring.view field) print e
else
fprintf fmt "%a.%s" print e (Hstring.view field)
| Sy.Op (Sy.Record), _ ->
begin match ty with
| Ty.Trecord { Ty.lbs = lbs; _ } ->
assert (List.length xs = List.length lbs);
fprintf fmt "{";
ignore (List.fold_left2 (fun first (field,_) e ->
fprintf fmt "%s%s = %a" (if first then "" else "; ")
(Hstring.view field) print e;
false
) true lbs xs);
fprintf fmt "}";
| _ -> assert false
end
TODO : introduce in the future to simplify this ?
| Sy.Op op, [e1; e2] when op == Sy.Pow || op == Sy.Integer_round ||
op == Sy.Max_real || op == Sy.Max_int ||
op == Sy.Min_real || op == Sy.Min_int ->
fprintf fmt "%a(%a,%a)" Sy.print f print e1 print e2
TODO : introduce in the future to simplify this ?
| Sy.Op (Sy.Constr hs), ((_::_) as l) ->
fprintf fmt "%a(%a)" Hstring.print hs print_list l
| Sy.Op _, [e1; e2] ->
if get_output_smtlib () then
fprintf fmt "(%a %a %a)" Sy.print f print e1 print e2
else
fprintf fmt "(%a %a %a)" print e1 Sy.print f print e2
| Sy.Op Sy.Destruct (hs, grded), [e] ->
fprintf fmt "%a#%s%a"
print e (if grded then "" else "!") Hstring.print hs
| Sy.In(lb, rb), [t] ->
fprintf fmt "(%a in %a, %a)" print t Sy.print_bound lb Sy.print_bound rb
| Sy.Name (n,_), l -> begin
let constraint_name =
try let constraint_name,_,_ =
(MS.find (Hstring.view n) !constraints) in
constraint_name
with _ ->
let constraint_name = "c_"^(Hstring.view n) in
constraints := MS.add (Hstring.view n)
(constraint_name,
to_string_type (E.type_info t),
List.map (fun e -> to_string_type (E.type_info e)) l
) !constraints;
constraint_name
in
match l with
| [] -> fprintf fmt "%s" constraint_name
| l ->
fprintf fmt "(%s %a)" constraint_name (Printer.pp_list_space print) l;
end
| _, [] ->
fprintf fmt "%a" Sy.print f
| _, _ ->
if get_output_smtlib () then
fprintf fmt "(%a %a)" Sy.print f print_list xs
else
fprintf fmt "%a(%a)" Sy.print f print_list xs
and print_list_sep sep fmt = function
| [] -> ()
| [t] -> print fmt t
| t::l -> Format.fprintf fmt "%a%s%a" print t sep (print_list_sep sep) l
and print_list fmt = print_list_sep "," fmt
end
module SmtlibCounterExample = struct
let x_print fmt (_ , ppr) = fprintf fmt "%s" ppr
let pp_term fmt t =
if Options.get_output_format () == Why3 then
Pp_smtlib_term.print fmt t
else
E.print fmt t
let dummy_value_of_type ty =
match ty with
Ty.Tint -> "0"
| Ty.Treal -> "0.0"
| Ty.Tbool -> "false"
| _ -> asprintf "%a" pp_term (Expr.fresh_name ty)
let pp_dummy_value_of_type fmt ty =
if not (Options.get_interpretation_use_underscore ()) then
let d = dummy_value_of_type ty in
fprintf fmt "%s " d
else
fprintf fmt "_ "
let add_records_destr records record_name destr_name rep =
let destrs =
try MS.find record_name records
with Not_found -> MS.empty
in
let destrs =
MS.add destr_name rep destrs in
MS.add record_name destrs records
let mk_records_constr records record_name
{ Ty.name = _n; record_constr = cstr; lbs = lbs; _} =
let find_destrs destr destrs =
try let rep = MS.find destr destrs in
Some rep
with Not_found -> None
in
let print_destr fmt (destrs,lbs) =
List.iter (fun (destr, ty_destr) ->
let destr = Hstring.view destr in
match find_destrs destr destrs with
| None ->
pp_dummy_value_of_type fmt ty_destr
| Some rep -> fprintf fmt "%s " rep
) lbs
in
let destrs =
try MS.find (Sy.to_string record_name) records
with Not_found -> MS.empty
in
asprintf "%s %a"
(Hstring.view cstr)
print_destr (destrs,lbs)
let add_record_constr records record_name
{ Ty.name = _n; record_constr = _cstr; lbs = lbs; _} xs_values =
List.fold_left2(fun records (destr,_) (rep,_) ->
add_records_destr
records
record_name
(Hstring.view destr)
(asprintf "%a" pp_term rep)
) records lbs xs_values
let check_records records xs_ty_named xs_values f ty rep =
match xs_ty_named with
| [Ty.Trecord _r, _arg] -> begin
match xs_values with
| [record_name,_] ->
add_records_destr
records
(asprintf "%a" Expr.print record_name)
(Sy.to_string f)
rep
| [] | _ -> records
end
| _ ->
match ty with
| Ty.Trecord r ->
add_record_constr records rep r xs_values
| _ -> records
let print_fun_def fmt name args ty t =
let print_args fmt (ty,name) =
Format.fprintf fmt "(%s %a)" name Ty.print ty in
let defined_value =
try
let res,_,_ = (MS.find (Sy.to_string name) !constraints) in res
with _ -> t
in
Printer.print_fmt ~flushed:false fmt
"(define-fun %a (%a) %a %s)@ "
Sy.print name
(Printer.pp_list_space (print_args)) args
Ty.print ty
defined_value
let output_constants_counterexample fmt records cprofs =
ModelMap.iter
(fun (f, xs_ty, ty) st ->
assert (xs_ty == []);
match ModelMap.V.elements st with
| [[], rep] ->
let rep = Format.asprintf "%a" x_print rep in
let rep =
match ty with
| Ty.Trecord r ->
let constr = mk_records_constr records f r in
sprintf "(%s)" constr
| _ -> rep
in
print_fun_def fmt f [] ty rep
| _ -> assert false
) cprofs
let output_functions_counterexample fmt records fprofs =
let records = ref records in
ModelMap.iter
(fun (f, xs_ty, ty) st ->
let xs_ty_named = List.mapi (fun i ty ->
ty,(sprintf "arg_%d" i)
) xs_ty in
let rep =
let representants =
ModelMap.V.fold (fun (xs_values,(_rep,srep)) acc ->
assert ((List.length xs_ty_named) = (List.length xs_values));
records :=
check_records !records xs_ty_named xs_values f ty srep;
let reps = try MS.find srep acc with Not_found -> [] in
MS.add srep (xs_values :: reps) acc
) st MS.empty in
let representants = MS.fold (fun srep xs_values_list acc ->
(srep,xs_values_list) :: acc) representants [] in
let rec mk_ite_and xs tys =
match xs, tys with
| [],[] -> assert false
| [xs,_],[_ty,name] ->
asprintf "(= %s %a)" name pp_term xs
| (xs,_) :: l1, (_ty,name) :: l2 ->
asprintf "(and (= %s %a) %s)"
name
pp_term xs
(mk_ite_and l1 l2)
| _, _ -> assert false
in
let mk_ite_or l =
let pp_or_list fmt xs_values =
fprintf fmt "%s" (mk_ite_and xs_values xs_ty_named)
in
match l with
| [] -> assert false
| [xs_values] -> mk_ite_and xs_values xs_ty_named
| xs_values :: l ->
asprintf "(or %s %a)"
(mk_ite_and xs_values xs_ty_named)
(Printer.pp_list_space pp_or_list) l
in
let rec reps_aux reps =
match reps with
| [] -> asprintf "%a" pp_dummy_value_of_type ty
| [srep,xs_values_list] ->
if Options.get_interpretation_use_underscore () then
asprintf "(ite %s %s %s)"
(mk_ite_or xs_values_list)
srep
(reps_aux [])
else
srep
| (srep,xs_values_list) :: l ->
asprintf "(ite %s %s %s)"
(mk_ite_or xs_values_list)
srep
(reps_aux l)
in
if List.length representants = 1 then
sprintf "%s" (fst (List.hd representants))
else
reps_aux representants
in
print_fun_def fmt f xs_ty_named ty rep;
) fprofs;
!records
let output_arrays_counterexample fmt _arrays =
Printer.print_fmt fmt "@ ; Arrays not yet supported@ "
end
module Why3CounterExample = struct
let output_constraints fmt prop_model =
let assertions = SE.fold (fun e acc ->
(asprintf "%s(assert %a)@ " acc SmtlibCounterExample.pp_term e)
) prop_model "" in
Printer.print_fmt ~flushed:false fmt "@ ; constants@ ";
MS.iter (fun _ (name,ty,args_ty) ->
match args_ty with
| [] ->
Printer.print_fmt ~flushed:false fmt "(declare-const %s %s)@ "
name ty
| l ->
Printer.print_fmt ~flushed:false fmt "(declare-fun %s (%s) %s)@ "
name
(String.concat " " l)
ty
) !constraints;
Printer.print_fmt ~flushed:false fmt "@ ; assertions@ ";
Printer.print_fmt fmt ~flushed:false "%s" assertions
end
let output_concrete_model fmt props ~functions ~constants ~arrays =
if get_interpretation () then begin
Printer.print_fmt ~flushed:false fmt "@[<v 0>unknown@ ";
Printer.print_fmt ~flushed:false fmt "@[<v 2>(model@,";
if Options.get_model_type_constraints () then begin
Why3CounterExample.output_constraints fmt props
end;
Printer.print_fmt fmt "@ ; Functions@ ";
let records = SmtlibCounterExample.output_functions_counterexample
fmt MS.empty functions in
Printer.print_fmt fmt "@ ; Constants@ ";
SmtlibCounterExample.output_constants_counterexample
fmt records constants;
SmtlibCounterExample.output_arrays_counterexample fmt arrays;
Printer.print_fmt fmt "@]@ )";
end;
|
36a421caefc95a219377ab57c73a46aa18b038ca4701b56127f3b0247dc45c41
|
AndrewMagerman/wizard-book-study
|
ch4-leval.rkt
|
;;;;LAZY EVALUATOR FROM SECTION 4.2 OF
;;;; STRUCTURE AND INTERPRETATION OF COMPUTER PROGRAMS
;;;;Matches code in ch4.scm
;;;; Also includes enlarged primitive-procedures list
;;;;This file can be loaded into Scheme as a whole.
;;;;**NOTE**This file loads the metacircular evaluator of
sections 4.1.1 - 4.1.4 , since it uses the expression representation ,
;;;; environment representation, etc.
;;;; You may need to change the (load ...) expression to work in your
;;;; version of Scheme.
;;;;**WARNING: Don't load mceval twice (or you'll lose the primitives
;;;; interface, due to renamings of apply).
;;;;Then you can initialize and start the evaluator by evaluating
the two lines at the end of the file ch4-mceval.scm
;;;; (setting up the global environment and starting the driver loop).
To run without memoization , reload the first version of force - it below
;;**implementation-dependent loading of evaluator file
Note : It is loaded first so that the section 4.2 definition
of eval overrides the definition from 4.1.1
(load "ch4-mceval.rkt")
(define primitive-procedures
(list (list 'car car)
(list 'cdr cdr)
(list 'cons cons)
(list 'null? null?)
(list 'list list)
(list '+ +)
(list '- -)
(list '* *)
(list '/ /)
(list '= =)
(list 'newline newline)
(list 'display display)
;; more primitives
))
SECTION 4.2.2
;;; Modifying the evaluator
(define (eval exp env)
(cond ((self-evaluating? exp) exp)
((variable? exp) (lookup-variable-value exp env))
((quoted? exp) (text-of-quotation exp))
((assignment? exp) (eval-assignment exp env))
((definition? exp) (eval-definition exp env))
((if? exp) (eval-if exp env))
((lambda? exp)
(make-procedure (lambda-parameters exp)
(lambda-body exp)
env))
((begin? exp)
(eval-sequence (begin-actions exp) env))
((cond? exp) (eval (cond->if exp) env))
((application? exp) ; clause from book
(apply-custom (actual-value (operator exp) env)
(operands exp)
env))
(else
(error "Unknown expression type -- EVAL" exp))))
(define (actual-value exp env)
(force-it (eval exp env)))
(define (apply-custom procedure arguments env)
(cond ((primitive-procedure? procedure)
(apply-primitive-procedure
procedure
(list-of-arg-values arguments env))) ; changed
((compound-procedure? procedure)
(eval-sequence
(procedure-body procedure)
(extend-environment
(procedure-parameters procedure)
(list-of-delayed-args arguments env) ; changed
(procedure-environment procedure))))
(else
(error
"Unknown procedure type -- APPLY" procedure))))
(define (list-of-arg-values exps env)
(if (no-operands? exps)
'()
(cons (actual-value (first-operand exps) env)
(list-of-arg-values (rest-operands exps)
env))))
(define (list-of-delayed-args exps env)
(if (no-operands? exps)
'()
(cons (delay-it (first-operand exps) env)
(list-of-delayed-args (rest-operands exps)
env))))
(define (eval-if exp env)
(if (true? (actual-value (if-predicate exp) env))
(eval (if-consequent exp) env)
(eval (if-alternative exp) env)))
(define input-prompt ";;; L-Eval input:")
(define output-prompt ";;; L-Eval value:")
(define (driver-loop)
(prompt-for-input input-prompt)
(let ((input (read)))
(let ((output
(actual-value input the-global-environment)))
(announce-output output-prompt)
(user-print output)))
(driver-loop))
;;; Representing thunks
;; non-memoizing version of force-it
(define (force-it obj)
(if (thunk? obj)
(actual-value (thunk-exp obj) (thunk-env obj))
obj))
;; thunks
(define (delay-it exp env)
(list 'thunk exp env))
(define (thunk? obj)
(tagged-list? obj 'thunk))
(define (thunk-exp thunk) (cadr thunk))
(define (thunk-env thunk) (caddr thunk))
;; "thunk" that has been forced and is storing its (memoized) value
(define (evaluated-thunk? obj)
(tagged-list? obj 'evaluated-thunk))
(define (thunk-value evaluated-thunk) (cadr evaluated-thunk))
memoizing version of force - it
(define (force-it obj)
(cond ((thunk? obj)
(let ((result (actual-value
(thunk-exp obj)
(thunk-env obj))))
(set-car! obj 'evaluated-thunk)
(set-car! (cdr obj) result) ; replace exp with its value
(set-cdr! (cdr obj) '()) ; forget unneeded env
result))
((evaluated-thunk? obj)
(thunk-value obj))
(else obj)))
'LAZY-EVALUATOR-LOADED
(define the-global-environment (setup-environment))
(driver-loop)
| null |
https://raw.githubusercontent.com/AndrewMagerman/wizard-book-study/77bf59e606df71281b321c5a6d1e6a400916c04d/missing_files/week_14/ch4-leval.rkt
|
racket
|
LAZY EVALUATOR FROM SECTION 4.2 OF
STRUCTURE AND INTERPRETATION OF COMPUTER PROGRAMS
Matches code in ch4.scm
Also includes enlarged primitive-procedures list
This file can be loaded into Scheme as a whole.
**NOTE**This file loads the metacircular evaluator of
environment representation, etc.
You may need to change the (load ...) expression to work in your
version of Scheme.
**WARNING: Don't load mceval twice (or you'll lose the primitives
interface, due to renamings of apply).
Then you can initialize and start the evaluator by evaluating
(setting up the global environment and starting the driver loop).
**implementation-dependent loading of evaluator file
more primitives
Modifying the evaluator
clause from book
changed
changed
Representing thunks
non-memoizing version of force-it
thunks
"thunk" that has been forced and is storing its (memoized) value
replace exp with its value
forget unneeded env
|
sections 4.1.1 - 4.1.4 , since it uses the expression representation ,
the two lines at the end of the file ch4-mceval.scm
To run without memoization , reload the first version of force - it below
Note : It is loaded first so that the section 4.2 definition
of eval overrides the definition from 4.1.1
(load "ch4-mceval.rkt")
(define primitive-procedures
(list (list 'car car)
(list 'cdr cdr)
(list 'cons cons)
(list 'null? null?)
(list 'list list)
(list '+ +)
(list '- -)
(list '* *)
(list '/ /)
(list '= =)
(list 'newline newline)
(list 'display display)
))
SECTION 4.2.2
(define (eval exp env)
(cond ((self-evaluating? exp) exp)
((variable? exp) (lookup-variable-value exp env))
((quoted? exp) (text-of-quotation exp))
((assignment? exp) (eval-assignment exp env))
((definition? exp) (eval-definition exp env))
((if? exp) (eval-if exp env))
((lambda? exp)
(make-procedure (lambda-parameters exp)
(lambda-body exp)
env))
((begin? exp)
(eval-sequence (begin-actions exp) env))
((cond? exp) (eval (cond->if exp) env))
(apply-custom (actual-value (operator exp) env)
(operands exp)
env))
(else
(error "Unknown expression type -- EVAL" exp))))
(define (actual-value exp env)
(force-it (eval exp env)))
(define (apply-custom procedure arguments env)
(cond ((primitive-procedure? procedure)
(apply-primitive-procedure
procedure
((compound-procedure? procedure)
(eval-sequence
(procedure-body procedure)
(extend-environment
(procedure-parameters procedure)
(procedure-environment procedure))))
(else
(error
"Unknown procedure type -- APPLY" procedure))))
(define (list-of-arg-values exps env)
(if (no-operands? exps)
'()
(cons (actual-value (first-operand exps) env)
(list-of-arg-values (rest-operands exps)
env))))
(define (list-of-delayed-args exps env)
(if (no-operands? exps)
'()
(cons (delay-it (first-operand exps) env)
(list-of-delayed-args (rest-operands exps)
env))))
(define (eval-if exp env)
(if (true? (actual-value (if-predicate exp) env))
(eval (if-consequent exp) env)
(eval (if-alternative exp) env)))
(define input-prompt ";;; L-Eval input:")
(define output-prompt ";;; L-Eval value:")
(define (driver-loop)
(prompt-for-input input-prompt)
(let ((input (read)))
(let ((output
(actual-value input the-global-environment)))
(announce-output output-prompt)
(user-print output)))
(driver-loop))
(define (force-it obj)
(if (thunk? obj)
(actual-value (thunk-exp obj) (thunk-env obj))
obj))
(define (delay-it exp env)
(list 'thunk exp env))
(define (thunk? obj)
(tagged-list? obj 'thunk))
(define (thunk-exp thunk) (cadr thunk))
(define (thunk-env thunk) (caddr thunk))
(define (evaluated-thunk? obj)
(tagged-list? obj 'evaluated-thunk))
(define (thunk-value evaluated-thunk) (cadr evaluated-thunk))
memoizing version of force - it
(define (force-it obj)
(cond ((thunk? obj)
(let ((result (actual-value
(thunk-exp obj)
(thunk-env obj))))
(set-car! obj 'evaluated-thunk)
result))
((evaluated-thunk? obj)
(thunk-value obj))
(else obj)))
'LAZY-EVALUATOR-LOADED
(define the-global-environment (setup-environment))
(driver-loop)
|
f443614df7873294a45be961d2a5ba20fca1e5a988201a15fc4659f532c8af33
|
webnf/webnf
|
project.clj
|
(defproject webnf.deps/logback "0.1.19-SNAPSHOT"
:plugins [[lein-modules "0.3.11"]]
:description "Basic slf4j logging config in form of a default logback.xml"
:dependencies [[ch.qos.logback/logback-classic "1.1.6"]
[org.slf4j/slf4j-api "LOGBACK" :upgrade false]
[org.slf4j/log4j-over-slf4j "LOGBACK" :upgrade false]
[org.slf4j/jcl-over-slf4j "LOGBACK" :upgrade false]
[org.slf4j/jul-to-slf4j "LOGBACK" :upgrade false]])
| null |
https://raw.githubusercontent.com/webnf/webnf/6a2ccaa755e6e40528eb13a5c36bae16ba4947e7/deps.logback/project.clj
|
clojure
|
(defproject webnf.deps/logback "0.1.19-SNAPSHOT"
:plugins [[lein-modules "0.3.11"]]
:description "Basic slf4j logging config in form of a default logback.xml"
:dependencies [[ch.qos.logback/logback-classic "1.1.6"]
[org.slf4j/slf4j-api "LOGBACK" :upgrade false]
[org.slf4j/log4j-over-slf4j "LOGBACK" :upgrade false]
[org.slf4j/jcl-over-slf4j "LOGBACK" :upgrade false]
[org.slf4j/jul-to-slf4j "LOGBACK" :upgrade false]])
|
|
250f2872174af7941fd137294aef18d03ff25ba0706223d556bd34339e151447
|
mirage/wodan
|
wodan_irmin_cli.ml
|
(********************************************************************************)
Copyright 2017 - 2019 Gabriel de Perthuis < >
(* *)
(* Permission to use, copy, modify, and/or distribute this software for any *)
(* purpose with or without fee is hereby granted, provided that the above *)
(* copyright notice and this permission notice appear in all copies. *)
(* *)
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
(* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF *)
(* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR *)
(* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES *)
WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
(* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR *)
(* IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. *)
(* *)
(********************************************************************************)
open Irmin_unix
module RamBlockCon = struct
include Ramdisk
let connect name = Ramdisk.connect ~name
let discard _ _ _ = Lwt.return (Ok ())
end
module DB_ram =
Wodan_irmin.DB_BUILDER (RamBlockCon) (Wodan_irmin.StandardSuperblockParams)
module FileBlockCon = struct
include Block
let connect name = Block.connect name
end
module DB_fs =
Wodan_irmin.DB_BUILDER (FileBlockCon) (Wodan_irmin.StandardSuperblockParams)
let _ =
Resolver.Store.add "wodan-mem"
(Resolver.Store.Variable_hash
(fun hash contents ->
Resolver.Store.v ?remote:None
(module Wodan_irmin.KV (DB_ram) ((val hash)) ((val contents))
: Irmin.S)));
Resolver.Store.add "wodan" ~default:true
(Resolver.Store.Variable_hash
(fun hash contents ->
Resolver.Store.v ?remote:None
(module Wodan_irmin.KV (DB_fs) ((val hash)) ((val contents))
: Irmin.S)))
let () = Cli.(run ~default commands)
| null |
https://raw.githubusercontent.com/mirage/wodan/fd70abdb45fa176557178435217e0ab114e4e4d0/src/wodan-irmin/bin/wodan_irmin_cli.ml
|
ocaml
|
******************************************************************************
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
******************************************************************************
|
Copyright 2017 - 2019 Gabriel de Perthuis < >
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
open Irmin_unix
module RamBlockCon = struct
include Ramdisk
let connect name = Ramdisk.connect ~name
let discard _ _ _ = Lwt.return (Ok ())
end
module DB_ram =
Wodan_irmin.DB_BUILDER (RamBlockCon) (Wodan_irmin.StandardSuperblockParams)
module FileBlockCon = struct
include Block
let connect name = Block.connect name
end
module DB_fs =
Wodan_irmin.DB_BUILDER (FileBlockCon) (Wodan_irmin.StandardSuperblockParams)
let _ =
Resolver.Store.add "wodan-mem"
(Resolver.Store.Variable_hash
(fun hash contents ->
Resolver.Store.v ?remote:None
(module Wodan_irmin.KV (DB_ram) ((val hash)) ((val contents))
: Irmin.S)));
Resolver.Store.add "wodan" ~default:true
(Resolver.Store.Variable_hash
(fun hash contents ->
Resolver.Store.v ?remote:None
(module Wodan_irmin.KV (DB_fs) ((val hash)) ((val contents))
: Irmin.S)))
let () = Cli.(run ~default commands)
|
76ca4e5c7e48154d979ed573a05ef1573fe204f07ce8ffa01529344faef288ee
|
bmourad01/ocamlchess
|
uci.ml
|
module M = Mutex
module T = Thread
open Core_kernel [@@warning "-D"]
open Chess
open Bap_future.Std
open Monads.Std
module Child = Position.Child
module Histogram = Position.Histogram
module Line = Search.Result.Line
module State = struct
module T = struct
type t = {
pos : Position.t;
histogram : Position.histogram;
tt : Search.tt;
stop : unit promise option;
ponder : unit promise option;
debug : bool;
book : Book.t option;
} [@@deriving fields]
end
include T
include Monad.State.Make(T)(Monad.Ident)
include Monad.State.T1(T)(Monad.Ident)
Update the position and histogram . If this is a new game , then
clear the histogram and TT .
clear the histogram and TT. *)
let set_position ?(new_game = false) pos = update @@ fun st ->
let histogram =
let h = if new_game then Histogram.empty else st.histogram in
Histogram.incr h pos in
if new_game then Search.Tt.clear st.tt;
{st with pos; histogram}
let play_move m = gets pos >>= fun pos ->
match Position.make_move pos m with
| Some m -> set_position @@ Child.self m
| None ->
failwithf "Received illegal move %s for position %s\n%!"
(Move.to_string m) (Position.Fen.to_string pos) ()
let clear_tt = gets @@ fun {tt; _} -> Search.Tt.clear tt
let set_debug debug = update @@ fun st -> {st with debug}
let new_stop () =
let f, p = Future.create () in
begin update @@ fun st ->
{st with stop = Some p}
end >>| fun () -> f
let new_ponder_when = function
| false -> return None
| true ->
let f, p = Future.create () in
begin update @@ fun st ->
{st with ponder = Some p}
end >>| fun () -> Some f
end
open State.Syntax
type 'a state = 'a State.t
let return = State.return
let cont () = return true
let finish () = return false
module Options = struct
module T = Uci.Send.Option.Type
(* Artificial type to resolve ambiguity between the Combo and String
constructors. *)
type combo = {v : string} [@@unboxed]
type _ t =
| Spin : {spin : T.spin; mutable value : int} -> int t
| Check : {default : bool; mutable value : bool} -> bool t
| Combo : {combo : T.combo; mutable value : string} -> combo t
| String : {default : string; mutable value : string} -> string t
| Button : unit t
let to_uci : type a. a t -> T.t = function
| Spin {spin; _} -> T.Spin spin
| Check {default; _} -> T.Check default
| Combo {combo; _} -> T.Combo combo
| String {default; _} -> T.String default
| Button -> T.Button
type 'a callback = 'a t -> 'a -> unit state
module Callbacks = struct
let spin : int t -> int -> unit state = fun (Spin c) n ->
return (c.value <- T.clamp n c.spin)
let check : bool t -> bool -> unit state = fun (Check c) b ->
return (c.value <- b)
let combo : combo t -> combo -> unit state = fun (Combo c) {v} ->
return @@ if T.is_var v c.combo then c.value <- v
let string : string t -> string -> unit state = fun (String s) v ->
return (s.value <- v)
let button : unit state -> (unit t -> unit -> unit state) = fun x ->
fun Button () -> x
end
let parse ~name ~value ~f = match value with
| None -> failwithf "Expected value for option %s" name ()
| Some value -> try f value with _ ->
failwithf "Failed to parse value %s for option %s" value name ()
let call :
type a.
a t ->
a callback ->
name:string ->
value:string option ->
unit state = fun t callback ~name ~value -> match t with
| Spin _ -> callback t @@ parse ~name ~value ~f:Int.of_string
| Check _ -> callback t @@ parse ~name ~value ~f:Bool.of_string
| Combo _ -> callback t @@ parse ~name ~value ~f:(fun v -> {v})
| String _ -> callback t @@ parse ~name ~value ~f:Fn.id
| Button -> callback t ()
type entry = E : 'a t * 'a callback -> entry
let spin s = E (Spin {spin = s; value = s.default}, Callbacks.spin)
let check c = E (Check {default = c; value = c}, Callbacks.check)
let combo c = E (Combo {combo = c; value = c.default}, Callbacks.combo)
let string s = E (String {default = s; value = s}, Callbacks.string)
let button c = E (Button, Callbacks.button c)
module Defaults = struct
let ponder = false
let own_book = false
let book_random = false
let book_path = "book.bin"
let multi_pv = T.{default = 1; min = 1; max = 500}
end
let tbl = Hashtbl.of_alist_exn (module String) [
"MultiPV", spin Defaults.multi_pv;
"Ponder", check Defaults.ponder;
"OwnBook", check Defaults.own_book;
"BookRandom", check Defaults.book_random;
"BookPath", string Defaults.book_path;
"Clear Hash", button State.clear_tt;
]
let spin_value name = match Hashtbl.find_exn tbl name with
| E (Spin {value; _}, _) -> value
| _ -> assert false
let check_value name = match Hashtbl.find_exn tbl name with
| E (Check {value; _}, _) -> value
| _ -> assert false
let combo_value name = match Hashtbl.find_exn tbl name with
| E (Combo {value; _}, _) -> value
| _ -> assert false
let string_value name = match Hashtbl.find_exn tbl name with
| E (String {value; _}, _) -> value
| _ -> assert false
end
let info_str s = Format.printf "%a\n%!" Uci.Send.pp @@ Info [String s]
module Book = struct
Just compare the file paths . We could have a stronger notion of equivalence
such as the md5sums of either file .
such as the md5sums of either file. *)
let same_book b path = String.(path = Book.filename b)
let load_book () =
if Options.check_value "OwnBook" then
let path = Options.string_value "BookPath" in
State.(gets book) >>= function
| Some b when same_book b path -> return @@ Some b
| Some _ | None ->
info_str "Loading Book";
match Book.create path with
| exception exn ->
failwithf "Error loading book: %s" (Exn.to_string exn) ()
| b -> State.(update @@ fun st -> {
st with book = Some b
}) >>| fun () ->
info_str "Book Loaded";
Some b
else return None
let book_move m =
let open Uci.Send in
info_str "Book Move";
Format.printf "%a\n%!" pp @@ Bestmove (Some Bestmove.{
move = Child.move m;
ponder = None
})
let run () =
load_book () >>= function
| None -> return false
| Some book ->
State.(gets pos) >>| fun pos ->
let random = Options.check_value "BookRandom" in
match Book.lookup book pos ~random with
| Ok m -> book_move m; true
| Error _ -> false
end
let uci =
let open Uci.Send in
let id = [
Id (`name (sprintf "ocamlchess v%d.%d" Version.major Version.minor));
Id (`author "Benjamin Mourad");
] in
let opt name t = Option.{name; typ = Options.to_uci t} in
fun () ->
List.iter id ~f:(fun cmd -> Format.printf "%a\n%!" pp cmd);
Hashtbl.iteri Options.tbl ~f:(fun ~key:name ~data:Options.(E (t, _)) ->
Format.printf "%a\n%!" Option.pp @@ opt name t);
Format.printf "%a\n%!" pp Uciok
let isready () =
Book.load_book () >>| fun _ ->
Format.printf "%a\n%!" Uci.Send.pp Readyok
let setoption ({name; value} : Uci.Recv.Setoption.t) =
let open Uci.Recv.Setoption in
match Hashtbl.find Options.tbl name with
| None -> return @@ Format.printf "No such option: %s\n%!" name
| Some Options.(E (t, callback)) -> Options.call t callback ~name ~value
let ucinewgame = State.set_position Position.start ~new_game:true
let position pos moves = match Position.Valid.check pos with
| Ok () ->
State.set_position pos >>= fun () ->
State.(List.iter moves ~f:play_move)
| Error err ->
failwithf "Received invalid position %s: %s\n%!"
(Position.Fen.to_string pos) (Position.Valid.Error.to_string err) ()
module Search_thread = struct
let t = Atomic.make None
let c = Condition.create ()
let m = M.create ()
let signal ps =
M.lock m;
List.iter ps ~f:(Option.iter ~f:(fun p -> Promise.fulfill p ()));
Condition.signal c;
M.unlock m
let wait stop ponder =
let open Future in
let cond = match ponder with
| Some ponder -> fun () -> is_decided stop || is_decided ponder
| None -> fun () -> is_decided stop in
M.lock m;
while not @@ cond () do Condition.wait c m done;
M.unlock m
For each iteration in the search , send a UCI ` info ` command about the
search .
search. *)
let info_of_result root tt result =
let depth = Search.Result.depth result in
let time = max 1 @@ Search.Result.time result in
let nodes = Search.Result.nodes result in
let nps = (nodes * 1000) / time in
Search.Result.lines result |> List.iteri ~f:(fun i line ->
let pv = Line.pv line |> List.map ~f:Child.move in
let score = Line.score line in
let seldepth = Line.seldepth line in
Format.printf "%a\n%!" Uci.Send.pp @@ Info Uci.Send.Info.[
Uci.Send.Info.Depth depth;
Seldepth seldepth;
Multipv (i + 1);
Score score;
Nodes nodes;
Nps nps;
Time time;
Pv pv;
])
let bestmove result =
let make ?p m =
let move = Child.move m in
let ponder = Option.map p ~f:Child.move in
Uci.Send.Bestmove.{move; ponder} in
let bestmove =
Search.Result.pv result |>
Option.bind ~f:(fun line -> match Line.pv line with
| m :: p :: _ -> Some (make m ~p)
| [m] -> Some (make m)
| [] -> None) in
Format.printf "%a\n%!" Uci.Send.pp @@ Bestmove bestmove
let currmove child ~n ~depth =
let m = Child.move child in
Format.printf "%a\n%!" Uci.Send.pp @@ Info [
Depth depth;
Currmove m;
Currmovenumber n;
]
(* The main search routine, should be run in a separate thread. *)
let search ~root ~limits ~histogram ~tt ~stop ~ponder =
let result = try
let iter = info_of_result root tt in
Search.go () ~root ~limits ~histogram ~tt ~ponder ~iter ~currmove
with exn ->
Format.eprintf "Search encountered an exception: %a\n%!" Exn.pp exn;
Err.exit () in
The UCI protocol says that ` infinite ` and ` ponder ` searches must
wait for a corresponding ` stop ` or ` ponderhit ` command before
sending ` bestmove ` .
wait for a corresponding `stop` or `ponderhit` command before
sending `bestmove`. *)
if Search.Limits.infinite limits then wait stop ponder;
(* Output the result. *)
bestmove result;
(* Thread completed. *)
Atomic.set t None
(* Abort if there's already a thread running. *)
let check =
State.(gets stop) >>= fun stop ->
State.(gets ponder) >>| fun ponder ->
Atomic.get t |> Option.iter ~f:(fun t ->
signal [stop; ponder];
T.join t;
failwith
"Error: tried to start a new search while the previous one is \
still running")
let start ~root ~limits ~histogram ~tt ~stop ~ponder =
Atomic.set t @@ Option.return @@
T.create (fun () -> search ~root ~limits ~histogram ~tt ~stop ~ponder) ()
end
module Go = struct
type t = {
mutable infinite : bool;
mutable nodes : int option;
mutable mate : int option;
mutable depth : int option;
mutable movetime : int option;
mutable wtime : int option;
mutable btime : int option;
mutable winc : int option;
mutable binc : int option;
mutable movestogo : int option;
mutable ponder : bool;
mutable moves : Move.t list;
} [@@deriving fields]
let create () =
Fields.create
~infinite:false
~nodes:None
~mate:None
~depth:None
~movetime:None
~wtime:None
~btime:None
~winc:None
~binc:None
~movestogo:None
~ponder:false
~moves:[]
let opt t v name ~f ~g = match g t with
| Some _ -> failwithf "Error in go command: duplicate option '%s'" name ()
| None -> f t @@ Some v
let lst t v name ~f ~g = match g t with
| _ :: _ -> failwithf "Error in go command: duplicate option '%s'" name ()
| [] -> f t v
let new_limits t active stop =
Search.Limits.create () ~active ~stop
~nodes:t.nodes
~mate:t.mate
~depth:t.depth
~movetime:t.movetime
~movestogo:t.movestogo
~wtime:t.wtime
~btime:t.btime
~binc:t.binc
~infinite:t.infinite
~moves:t.moves
~multipv:(Options.spin_value "MultiPV")
let parse (g : Uci.Recv.Go.t list) =
let t = create () in
(* As a hack, ponder mode will initially be set up as an infinite search.
Then, when the ponderhit command is sent, the search can continue with
the normal limits. *)
let pondering () = t.ponder <- true; t.infinite <- true in
If no parameters were given , then assume an infinite search . This is how
Stockfish behaves . To be fair , the UCI protocol is very underspecified
and underdocumented . It begs the question as to why it 's still so widely
supported .
Stockfish behaves. To be fair, the UCI protocol is very underspecified
and underdocumented. It begs the question as to why it's still so widely
supported. *)
if not @@ List.is_empty g then List.iter g ~f:(function
| Nodes n -> opt t n "nodes" ~f:set_nodes ~g:nodes
| Mate n -> opt t n "mate" ~f:set_mate ~g:mate
| Depth n -> opt t n "depth" ~f:set_depth ~g:depth
| Movetime n -> opt t n "movetime" ~f:set_movetime ~g:movetime
| Wtime n -> opt t n "wtime" ~f:set_wtime ~g:wtime
| Btime n -> opt t n "btime" ~f:set_btime ~g:btime
| Winc n -> opt t n "winc" ~f:set_winc ~g:winc
| Binc n -> opt t n "binc" ~f:set_binc ~g:binc
| Movestogo n -> opt t n "movestogo" ~f:set_movestogo ~g:movestogo
| Searchmoves l -> lst t l "searchmoves" ~f:set_moves ~g:moves
| Infinite -> t.infinite <- true
| Ponder -> pondering ())
else t.infinite <- true; t
let run g = Search_thread.check >>= Book.run >>= function
| true -> return ()
| false ->
(* Parse the arguments to the command *)
let t = parse g in
State.(gets pos) >>= fun root ->
State.new_stop () >>= fun stop ->
let limits = new_limits t (Position.active root) stop in
(* Start the search. *)
State.new_ponder_when t.ponder >>= fun ponder ->
State.(gets histogram) >>= fun histogram ->
State.(gets tt) >>| fun tt ->
Search_thread.start ~root ~limits ~histogram ~tt ~stop ~ponder
end
let stop = State.update @@ function
| {stop = (Some _ as p); _} as st ->
Search_thread.signal [p]; {st with stop = None}
| st -> st
let ponderhit = State.update @@ function
| {ponder = (Some _ as p); _} as st ->
Search_thread.signal [p]; {st with ponder = None}
| st -> st
(* This is free software, so no need to register! *)
let register _ = return ()
Interprets a command . Returns true if the main UCI loop shall continue .
let recv cmd = match (cmd : Uci.Recv.t) with
| Uci -> cont @@ uci ()
| Isready -> isready () >>= cont
| Setoption opt -> setoption opt >>= cont
| Ucinewgame -> ucinewgame >>= cont
| Position (`fen pos, moves) -> position pos moves >>= cont
| Position (`startpos, moves) -> position Position.start moves >>= cont
| Go g -> Go.run g >>= cont
| Stop -> stop >>= cont
| Quit -> finish ()
| Ponderhit -> ponderhit >>= cont
| Debug `off -> State.set_debug false >>= cont
| Debug `on -> State.set_debug true >>= cont
| Register r -> register r >>= cont
(* Main loop. *)
let rec loop () = match In_channel.(input_line stdin) with
| None -> return ()
| Some "" -> loop ()
| Some line -> match Uci.Recv.of_string line with
| None -> loop @@ Format.printf "Invalid command: %s\n%!" line
| Some cmd -> recv cmd >>= function
| false -> return ()
| true -> loop ()
(* Default histogram has the starting position. *)
let histogram = Histogram.singleton Position.start
let exec () =
let st =
Monad.State.exec (loop ()) @@
State.Fields.create ~histogram
~pos:Position.start
~tt:(Search.Tt.create ())
~stop:None
~ponder:None
~debug:false
~book:None in
State.[stop st; ponder st]
(* Entry point. *)
let run () =
(* Run the main interpreter loop. *)
let ps = try exec () with Failure msg ->
Format.eprintf "%s\n%!" msg;
Err.exit () in
(* Stop the search thread. *)
Atomic.get Search_thread.t |>
Option.iter ~f:(fun t ->
Search_thread.signal ps;
T.join t);
| null |
https://raw.githubusercontent.com/bmourad01/ocamlchess/0496db8ad7dddd5a048fb4868aacff221a706238/chess/bin/uci.ml
|
ocaml
|
Artificial type to resolve ambiguity between the Combo and String
constructors.
The main search routine, should be run in a separate thread.
Output the result.
Thread completed.
Abort if there's already a thread running.
As a hack, ponder mode will initially be set up as an infinite search.
Then, when the ponderhit command is sent, the search can continue with
the normal limits.
Parse the arguments to the command
Start the search.
This is free software, so no need to register!
Main loop.
Default histogram has the starting position.
Entry point.
Run the main interpreter loop.
Stop the search thread.
|
module M = Mutex
module T = Thread
open Core_kernel [@@warning "-D"]
open Chess
open Bap_future.Std
open Monads.Std
module Child = Position.Child
module Histogram = Position.Histogram
module Line = Search.Result.Line
module State = struct
module T = struct
type t = {
pos : Position.t;
histogram : Position.histogram;
tt : Search.tt;
stop : unit promise option;
ponder : unit promise option;
debug : bool;
book : Book.t option;
} [@@deriving fields]
end
include T
include Monad.State.Make(T)(Monad.Ident)
include Monad.State.T1(T)(Monad.Ident)
Update the position and histogram . If this is a new game , then
clear the histogram and TT .
clear the histogram and TT. *)
let set_position ?(new_game = false) pos = update @@ fun st ->
let histogram =
let h = if new_game then Histogram.empty else st.histogram in
Histogram.incr h pos in
if new_game then Search.Tt.clear st.tt;
{st with pos; histogram}
let play_move m = gets pos >>= fun pos ->
match Position.make_move pos m with
| Some m -> set_position @@ Child.self m
| None ->
failwithf "Received illegal move %s for position %s\n%!"
(Move.to_string m) (Position.Fen.to_string pos) ()
let clear_tt = gets @@ fun {tt; _} -> Search.Tt.clear tt
let set_debug debug = update @@ fun st -> {st with debug}
let new_stop () =
let f, p = Future.create () in
begin update @@ fun st ->
{st with stop = Some p}
end >>| fun () -> f
let new_ponder_when = function
| false -> return None
| true ->
let f, p = Future.create () in
begin update @@ fun st ->
{st with ponder = Some p}
end >>| fun () -> Some f
end
open State.Syntax
type 'a state = 'a State.t
let return = State.return
let cont () = return true
let finish () = return false
module Options = struct
module T = Uci.Send.Option.Type
type combo = {v : string} [@@unboxed]
type _ t =
| Spin : {spin : T.spin; mutable value : int} -> int t
| Check : {default : bool; mutable value : bool} -> bool t
| Combo : {combo : T.combo; mutable value : string} -> combo t
| String : {default : string; mutable value : string} -> string t
| Button : unit t
let to_uci : type a. a t -> T.t = function
| Spin {spin; _} -> T.Spin spin
| Check {default; _} -> T.Check default
| Combo {combo; _} -> T.Combo combo
| String {default; _} -> T.String default
| Button -> T.Button
type 'a callback = 'a t -> 'a -> unit state
module Callbacks = struct
let spin : int t -> int -> unit state = fun (Spin c) n ->
return (c.value <- T.clamp n c.spin)
let check : bool t -> bool -> unit state = fun (Check c) b ->
return (c.value <- b)
let combo : combo t -> combo -> unit state = fun (Combo c) {v} ->
return @@ if T.is_var v c.combo then c.value <- v
let string : string t -> string -> unit state = fun (String s) v ->
return (s.value <- v)
let button : unit state -> (unit t -> unit -> unit state) = fun x ->
fun Button () -> x
end
let parse ~name ~value ~f = match value with
| None -> failwithf "Expected value for option %s" name ()
| Some value -> try f value with _ ->
failwithf "Failed to parse value %s for option %s" value name ()
let call :
type a.
a t ->
a callback ->
name:string ->
value:string option ->
unit state = fun t callback ~name ~value -> match t with
| Spin _ -> callback t @@ parse ~name ~value ~f:Int.of_string
| Check _ -> callback t @@ parse ~name ~value ~f:Bool.of_string
| Combo _ -> callback t @@ parse ~name ~value ~f:(fun v -> {v})
| String _ -> callback t @@ parse ~name ~value ~f:Fn.id
| Button -> callback t ()
type entry = E : 'a t * 'a callback -> entry
let spin s = E (Spin {spin = s; value = s.default}, Callbacks.spin)
let check c = E (Check {default = c; value = c}, Callbacks.check)
let combo c = E (Combo {combo = c; value = c.default}, Callbacks.combo)
let string s = E (String {default = s; value = s}, Callbacks.string)
let button c = E (Button, Callbacks.button c)
module Defaults = struct
let ponder = false
let own_book = false
let book_random = false
let book_path = "book.bin"
let multi_pv = T.{default = 1; min = 1; max = 500}
end
let tbl = Hashtbl.of_alist_exn (module String) [
"MultiPV", spin Defaults.multi_pv;
"Ponder", check Defaults.ponder;
"OwnBook", check Defaults.own_book;
"BookRandom", check Defaults.book_random;
"BookPath", string Defaults.book_path;
"Clear Hash", button State.clear_tt;
]
let spin_value name = match Hashtbl.find_exn tbl name with
| E (Spin {value; _}, _) -> value
| _ -> assert false
let check_value name = match Hashtbl.find_exn tbl name with
| E (Check {value; _}, _) -> value
| _ -> assert false
let combo_value name = match Hashtbl.find_exn tbl name with
| E (Combo {value; _}, _) -> value
| _ -> assert false
let string_value name = match Hashtbl.find_exn tbl name with
| E (String {value; _}, _) -> value
| _ -> assert false
end
let info_str s = Format.printf "%a\n%!" Uci.Send.pp @@ Info [String s]
module Book = struct
Just compare the file paths . We could have a stronger notion of equivalence
such as the md5sums of either file .
such as the md5sums of either file. *)
let same_book b path = String.(path = Book.filename b)
let load_book () =
if Options.check_value "OwnBook" then
let path = Options.string_value "BookPath" in
State.(gets book) >>= function
| Some b when same_book b path -> return @@ Some b
| Some _ | None ->
info_str "Loading Book";
match Book.create path with
| exception exn ->
failwithf "Error loading book: %s" (Exn.to_string exn) ()
| b -> State.(update @@ fun st -> {
st with book = Some b
}) >>| fun () ->
info_str "Book Loaded";
Some b
else return None
let book_move m =
let open Uci.Send in
info_str "Book Move";
Format.printf "%a\n%!" pp @@ Bestmove (Some Bestmove.{
move = Child.move m;
ponder = None
})
let run () =
load_book () >>= function
| None -> return false
| Some book ->
State.(gets pos) >>| fun pos ->
let random = Options.check_value "BookRandom" in
match Book.lookup book pos ~random with
| Ok m -> book_move m; true
| Error _ -> false
end
let uci =
let open Uci.Send in
let id = [
Id (`name (sprintf "ocamlchess v%d.%d" Version.major Version.minor));
Id (`author "Benjamin Mourad");
] in
let opt name t = Option.{name; typ = Options.to_uci t} in
fun () ->
List.iter id ~f:(fun cmd -> Format.printf "%a\n%!" pp cmd);
Hashtbl.iteri Options.tbl ~f:(fun ~key:name ~data:Options.(E (t, _)) ->
Format.printf "%a\n%!" Option.pp @@ opt name t);
Format.printf "%a\n%!" pp Uciok
let isready () =
Book.load_book () >>| fun _ ->
Format.printf "%a\n%!" Uci.Send.pp Readyok
let setoption ({name; value} : Uci.Recv.Setoption.t) =
let open Uci.Recv.Setoption in
match Hashtbl.find Options.tbl name with
| None -> return @@ Format.printf "No such option: %s\n%!" name
| Some Options.(E (t, callback)) -> Options.call t callback ~name ~value
let ucinewgame = State.set_position Position.start ~new_game:true
let position pos moves = match Position.Valid.check pos with
| Ok () ->
State.set_position pos >>= fun () ->
State.(List.iter moves ~f:play_move)
| Error err ->
failwithf "Received invalid position %s: %s\n%!"
(Position.Fen.to_string pos) (Position.Valid.Error.to_string err) ()
module Search_thread = struct
let t = Atomic.make None
let c = Condition.create ()
let m = M.create ()
let signal ps =
M.lock m;
List.iter ps ~f:(Option.iter ~f:(fun p -> Promise.fulfill p ()));
Condition.signal c;
M.unlock m
let wait stop ponder =
let open Future in
let cond = match ponder with
| Some ponder -> fun () -> is_decided stop || is_decided ponder
| None -> fun () -> is_decided stop in
M.lock m;
while not @@ cond () do Condition.wait c m done;
M.unlock m
For each iteration in the search , send a UCI ` info ` command about the
search .
search. *)
let info_of_result root tt result =
let depth = Search.Result.depth result in
let time = max 1 @@ Search.Result.time result in
let nodes = Search.Result.nodes result in
let nps = (nodes * 1000) / time in
Search.Result.lines result |> List.iteri ~f:(fun i line ->
let pv = Line.pv line |> List.map ~f:Child.move in
let score = Line.score line in
let seldepth = Line.seldepth line in
Format.printf "%a\n%!" Uci.Send.pp @@ Info Uci.Send.Info.[
Uci.Send.Info.Depth depth;
Seldepth seldepth;
Multipv (i + 1);
Score score;
Nodes nodes;
Nps nps;
Time time;
Pv pv;
])
let bestmove result =
let make ?p m =
let move = Child.move m in
let ponder = Option.map p ~f:Child.move in
Uci.Send.Bestmove.{move; ponder} in
let bestmove =
Search.Result.pv result |>
Option.bind ~f:(fun line -> match Line.pv line with
| m :: p :: _ -> Some (make m ~p)
| [m] -> Some (make m)
| [] -> None) in
Format.printf "%a\n%!" Uci.Send.pp @@ Bestmove bestmove
let currmove child ~n ~depth =
let m = Child.move child in
Format.printf "%a\n%!" Uci.Send.pp @@ Info [
Depth depth;
Currmove m;
Currmovenumber n;
]
let search ~root ~limits ~histogram ~tt ~stop ~ponder =
let result = try
let iter = info_of_result root tt in
Search.go () ~root ~limits ~histogram ~tt ~ponder ~iter ~currmove
with exn ->
Format.eprintf "Search encountered an exception: %a\n%!" Exn.pp exn;
Err.exit () in
The UCI protocol says that ` infinite ` and ` ponder ` searches must
wait for a corresponding ` stop ` or ` ponderhit ` command before
sending ` bestmove ` .
wait for a corresponding `stop` or `ponderhit` command before
sending `bestmove`. *)
if Search.Limits.infinite limits then wait stop ponder;
bestmove result;
Atomic.set t None
let check =
State.(gets stop) >>= fun stop ->
State.(gets ponder) >>| fun ponder ->
Atomic.get t |> Option.iter ~f:(fun t ->
signal [stop; ponder];
T.join t;
failwith
"Error: tried to start a new search while the previous one is \
still running")
let start ~root ~limits ~histogram ~tt ~stop ~ponder =
Atomic.set t @@ Option.return @@
T.create (fun () -> search ~root ~limits ~histogram ~tt ~stop ~ponder) ()
end
module Go = struct
type t = {
mutable infinite : bool;
mutable nodes : int option;
mutable mate : int option;
mutable depth : int option;
mutable movetime : int option;
mutable wtime : int option;
mutable btime : int option;
mutable winc : int option;
mutable binc : int option;
mutable movestogo : int option;
mutable ponder : bool;
mutable moves : Move.t list;
} [@@deriving fields]
let create () =
Fields.create
~infinite:false
~nodes:None
~mate:None
~depth:None
~movetime:None
~wtime:None
~btime:None
~winc:None
~binc:None
~movestogo:None
~ponder:false
~moves:[]
let opt t v name ~f ~g = match g t with
| Some _ -> failwithf "Error in go command: duplicate option '%s'" name ()
| None -> f t @@ Some v
let lst t v name ~f ~g = match g t with
| _ :: _ -> failwithf "Error in go command: duplicate option '%s'" name ()
| [] -> f t v
let new_limits t active stop =
Search.Limits.create () ~active ~stop
~nodes:t.nodes
~mate:t.mate
~depth:t.depth
~movetime:t.movetime
~movestogo:t.movestogo
~wtime:t.wtime
~btime:t.btime
~binc:t.binc
~infinite:t.infinite
~moves:t.moves
~multipv:(Options.spin_value "MultiPV")
let parse (g : Uci.Recv.Go.t list) =
let t = create () in
let pondering () = t.ponder <- true; t.infinite <- true in
If no parameters were given , then assume an infinite search . This is how
Stockfish behaves . To be fair , the UCI protocol is very underspecified
and underdocumented . It begs the question as to why it 's still so widely
supported .
Stockfish behaves. To be fair, the UCI protocol is very underspecified
and underdocumented. It begs the question as to why it's still so widely
supported. *)
if not @@ List.is_empty g then List.iter g ~f:(function
| Nodes n -> opt t n "nodes" ~f:set_nodes ~g:nodes
| Mate n -> opt t n "mate" ~f:set_mate ~g:mate
| Depth n -> opt t n "depth" ~f:set_depth ~g:depth
| Movetime n -> opt t n "movetime" ~f:set_movetime ~g:movetime
| Wtime n -> opt t n "wtime" ~f:set_wtime ~g:wtime
| Btime n -> opt t n "btime" ~f:set_btime ~g:btime
| Winc n -> opt t n "winc" ~f:set_winc ~g:winc
| Binc n -> opt t n "binc" ~f:set_binc ~g:binc
| Movestogo n -> opt t n "movestogo" ~f:set_movestogo ~g:movestogo
| Searchmoves l -> lst t l "searchmoves" ~f:set_moves ~g:moves
| Infinite -> t.infinite <- true
| Ponder -> pondering ())
else t.infinite <- true; t
let run g = Search_thread.check >>= Book.run >>= function
| true -> return ()
| false ->
let t = parse g in
State.(gets pos) >>= fun root ->
State.new_stop () >>= fun stop ->
let limits = new_limits t (Position.active root) stop in
State.new_ponder_when t.ponder >>= fun ponder ->
State.(gets histogram) >>= fun histogram ->
State.(gets tt) >>| fun tt ->
Search_thread.start ~root ~limits ~histogram ~tt ~stop ~ponder
end
let stop = State.update @@ function
| {stop = (Some _ as p); _} as st ->
Search_thread.signal [p]; {st with stop = None}
| st -> st
let ponderhit = State.update @@ function
| {ponder = (Some _ as p); _} as st ->
Search_thread.signal [p]; {st with ponder = None}
| st -> st
let register _ = return ()
Interprets a command . Returns true if the main UCI loop shall continue .
let recv cmd = match (cmd : Uci.Recv.t) with
| Uci -> cont @@ uci ()
| Isready -> isready () >>= cont
| Setoption opt -> setoption opt >>= cont
| Ucinewgame -> ucinewgame >>= cont
| Position (`fen pos, moves) -> position pos moves >>= cont
| Position (`startpos, moves) -> position Position.start moves >>= cont
| Go g -> Go.run g >>= cont
| Stop -> stop >>= cont
| Quit -> finish ()
| Ponderhit -> ponderhit >>= cont
| Debug `off -> State.set_debug false >>= cont
| Debug `on -> State.set_debug true >>= cont
| Register r -> register r >>= cont
let rec loop () = match In_channel.(input_line stdin) with
| None -> return ()
| Some "" -> loop ()
| Some line -> match Uci.Recv.of_string line with
| None -> loop @@ Format.printf "Invalid command: %s\n%!" line
| Some cmd -> recv cmd >>= function
| false -> return ()
| true -> loop ()
let histogram = Histogram.singleton Position.start
let exec () =
let st =
Monad.State.exec (loop ()) @@
State.Fields.create ~histogram
~pos:Position.start
~tt:(Search.Tt.create ())
~stop:None
~ponder:None
~debug:false
~book:None in
State.[stop st; ponder st]
let run () =
let ps = try exec () with Failure msg ->
Format.eprintf "%s\n%!" msg;
Err.exit () in
Atomic.get Search_thread.t |>
Option.iter ~f:(fun t ->
Search_thread.signal ps;
T.join t);
|
69cf62f9ab93a93118b786efcb6d75171c4e784869855e1fb5666ecc05f4849d
|
janestreet/async_smtp
|
simplemail.mli
|
open! Core
open! Async
open Async_smtp_types
module Envelope_status = Client.Envelope_status
module Expert : sig
val send_envelope
: ?log:Log.t
-> ?credentials:Credentials.t
-> ?server:Host_and_port.t
-> Smtp_envelope.t
-> Envelope_status.t Deferred.Or_error.t
val send'
: ?log:Mail_log.t
-> ?credentials:Credentials.t
-> ?server:Host_and_port.t
-> sender:Smtp_envelope.Sender.t
-> ?sender_args:Smtp_envelope.Sender_argument.t list
-> recipients:Email_address.t list
-> Email.t
-> Envelope_status.t Deferred.Or_error.t
val send
: ?log:Mail_log.t
-> ?credentials:Credentials.t
-> ?server:Host_and_port.t
-> sender:Smtp_envelope.Sender.t
-> ?sender_args:Smtp_envelope.Sender_argument.t list
-> recipients:Email_address.t list
-> Email.t
-> unit Deferred.Or_error.t
include module type of Email.Simple.Expert
end
include module type of Email.Simple with module Expert := Email.Simple.Expert
val send'
: ?log:Mail_log.t
-> ?credentials:Credentials.t
-> ?server:Host_and_port.t
-> ?from:Email_address.t (* defaults to <user@host> *)
-> ?sender_args:Smtp_envelope.Sender_argument.t list
-> to_:Email_address.t list
-> ?cc:Email_address.t list
-> ?bcc:Email_address.t list
-> ?reply_to:
Email_address.t
-> ?bounce_to:
Email_address.t
(* defaults to [from] *)
-> subject:string
-> ?id:string
-> ?in_reply_to:string
-> ?date:Time_float.t
-> ?auto_generated:unit
-> ?extra_headers:(Email_headers.Name.t * Email_headers.Value.t) list
-> ?attachments:(attachment_name * Email.Simple.Content.t) list
-> ?no_tracing_headers:[ `Because_not_using_standard_email_infra ]
-> Email.Simple.Content.t
-> Envelope_status.t Deferred.Or_error.t
val send
: ?log:Mail_log.t
-> ?credentials:Credentials.t
-> ?server:Host_and_port.t
-> ?from:Email_address.t (* defaults to <user@host> *)
-> ?sender_args:Smtp_envelope.Sender_argument.t list
-> to_:Email_address.t list
-> ?cc:Email_address.t list
-> ?bcc:Email_address.t list
-> ?reply_to:
Email_address.t
-> ?bounce_to:
Email_address.t
(* defaults to [from] *)
-> subject:string
-> ?id:string
-> ?in_reply_to:string
-> ?date:Time_float.t
-> ?auto_generated:unit
-> ?extra_headers:(Email_headers.Name.t * Email_headers.Value.t) list
-> ?attachments:(attachment_name * Email.Simple.Content.t) list
-> ?no_tracing_headers:[ `Because_not_using_standard_email_infra ]
-> Email.Simple.Content.t
-> unit Deferred.Or_error.t
| null |
https://raw.githubusercontent.com/janestreet/async_smtp/72c538d76f5c7453bbc89af44d93931cd499a912/src/simplemail.mli
|
ocaml
|
defaults to <user@host>
defaults to [from]
defaults to <user@host>
defaults to [from]
|
open! Core
open! Async
open Async_smtp_types
module Envelope_status = Client.Envelope_status
module Expert : sig
val send_envelope
: ?log:Log.t
-> ?credentials:Credentials.t
-> ?server:Host_and_port.t
-> Smtp_envelope.t
-> Envelope_status.t Deferred.Or_error.t
val send'
: ?log:Mail_log.t
-> ?credentials:Credentials.t
-> ?server:Host_and_port.t
-> sender:Smtp_envelope.Sender.t
-> ?sender_args:Smtp_envelope.Sender_argument.t list
-> recipients:Email_address.t list
-> Email.t
-> Envelope_status.t Deferred.Or_error.t
val send
: ?log:Mail_log.t
-> ?credentials:Credentials.t
-> ?server:Host_and_port.t
-> sender:Smtp_envelope.Sender.t
-> ?sender_args:Smtp_envelope.Sender_argument.t list
-> recipients:Email_address.t list
-> Email.t
-> unit Deferred.Or_error.t
include module type of Email.Simple.Expert
end
include module type of Email.Simple with module Expert := Email.Simple.Expert
val send'
: ?log:Mail_log.t
-> ?credentials:Credentials.t
-> ?server:Host_and_port.t
-> ?sender_args:Smtp_envelope.Sender_argument.t list
-> to_:Email_address.t list
-> ?cc:Email_address.t list
-> ?bcc:Email_address.t list
-> ?reply_to:
Email_address.t
-> ?bounce_to:
Email_address.t
-> subject:string
-> ?id:string
-> ?in_reply_to:string
-> ?date:Time_float.t
-> ?auto_generated:unit
-> ?extra_headers:(Email_headers.Name.t * Email_headers.Value.t) list
-> ?attachments:(attachment_name * Email.Simple.Content.t) list
-> ?no_tracing_headers:[ `Because_not_using_standard_email_infra ]
-> Email.Simple.Content.t
-> Envelope_status.t Deferred.Or_error.t
val send
: ?log:Mail_log.t
-> ?credentials:Credentials.t
-> ?server:Host_and_port.t
-> ?sender_args:Smtp_envelope.Sender_argument.t list
-> to_:Email_address.t list
-> ?cc:Email_address.t list
-> ?bcc:Email_address.t list
-> ?reply_to:
Email_address.t
-> ?bounce_to:
Email_address.t
-> subject:string
-> ?id:string
-> ?in_reply_to:string
-> ?date:Time_float.t
-> ?auto_generated:unit
-> ?extra_headers:(Email_headers.Name.t * Email_headers.Value.t) list
-> ?attachments:(attachment_name * Email.Simple.Content.t) list
-> ?no_tracing_headers:[ `Because_not_using_standard_email_infra ]
-> Email.Simple.Content.t
-> unit Deferred.Or_error.t
|
ba3c6d37ae75a55c9872b3b90a72fe52de441ecdaa1f774f611b7ce651eec9b0
|
ocamllabs/ocaml-modular-implicits
|
t140-switch-2.ml
|
open Lib;;
match 1 with
| 0 -> raise Not_found
| 1 -> ()
| _ -> raise Not_found
;;
*
0 CONSTINT 42
2 PUSHACC0
3 MAKEBLOCK1 0
5 POP 1
7
9 CONST1
10
11 SWITCH
int 0 - > 17
int 1 - > 22
15 BRANCH 25
17 GETGLOBAL Not_found
19 MAKEBLOCK1 0
21 RAISE
22 CONST0
23 BRANCH 30
25 GETGLOBAL Not_found
27 MAKEBLOCK1 0
29 RAISE
30 POP 1
32 ATOM0
33
35 STOP
*
0 CONSTINT 42
2 PUSHACC0
3 MAKEBLOCK1 0
5 POP 1
7 SETGLOBAL Lib
9 CONST1
10 PUSHACC0
11 SWITCH
int 0 -> 17
int 1 -> 22
15 BRANCH 25
17 GETGLOBAL Not_found
19 MAKEBLOCK1 0
21 RAISE
22 CONST0
23 BRANCH 30
25 GETGLOBAL Not_found
27 MAKEBLOCK1 0
29 RAISE
30 POP 1
32 ATOM0
33 SETGLOBAL T140-switch-2
35 STOP
**)
| null |
https://raw.githubusercontent.com/ocamllabs/ocaml-modular-implicits/92e45da5c8a4c2db8b2cd5be28a5bec2ac2181f1/testsuite/tests/tool-ocaml/t140-switch-2.ml
|
ocaml
|
open Lib;;
match 1 with
| 0 -> raise Not_found
| 1 -> ()
| _ -> raise Not_found
;;
*
0 CONSTINT 42
2 PUSHACC0
3 MAKEBLOCK1 0
5 POP 1
7
9 CONST1
10
11 SWITCH
int 0 - > 17
int 1 - > 22
15 BRANCH 25
17 GETGLOBAL Not_found
19 MAKEBLOCK1 0
21 RAISE
22 CONST0
23 BRANCH 30
25 GETGLOBAL Not_found
27 MAKEBLOCK1 0
29 RAISE
30 POP 1
32 ATOM0
33
35 STOP
*
0 CONSTINT 42
2 PUSHACC0
3 MAKEBLOCK1 0
5 POP 1
7 SETGLOBAL Lib
9 CONST1
10 PUSHACC0
11 SWITCH
int 0 -> 17
int 1 -> 22
15 BRANCH 25
17 GETGLOBAL Not_found
19 MAKEBLOCK1 0
21 RAISE
22 CONST0
23 BRANCH 30
25 GETGLOBAL Not_found
27 MAKEBLOCK1 0
29 RAISE
30 POP 1
32 ATOM0
33 SETGLOBAL T140-switch-2
35 STOP
**)
|
|
c4c8e3cbe0b110c0dbfc70859eda036be84a9600cfc773c6e8f32d2d8bf5de3d
|
ucsd-progsys/dsolve
|
bad-checklist.ml
|
let rec check l =
match l with
| [] -> ()
| x :: [] -> ()
| x :: y :: ys ->
assert (x <= y); check (y :: ys)
let _ = check [5; 2; 3]
| null |
https://raw.githubusercontent.com/ucsd-progsys/dsolve/bfbbb8ed9bbf352d74561e9f9127ab07b7882c0c/negtests/bad-checklist.ml
|
ocaml
|
let rec check l =
match l with
| [] -> ()
| x :: [] -> ()
| x :: y :: ys ->
assert (x <= y); check (y :: ys)
let _ = check [5; 2; 3]
|
|
c1144c0d0b322721a303f477b8fd1b294a3ebc9a1f1785cde6d799d8ebc216be
|
ocaml-multicore/ocaml-tsan
|
domain.ml
|
(**************************************************************************)
(* *)
(* OCaml *)
(* *)
, Indian Institute of Technology , Madras
, University of Cambridge
, OCaml Labs Consultancy
(* *)
Copyright 2019 Indian Institute of Technology , Madras
Copyright 2014 University of Cambridge
Copyright 2021 OCaml Labs Consultancy Ltd
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
module Raw = struct
(* Low-level primitives provided by the runtime *)
type t = private int
external spawn : (unit -> unit) -> Mutex.t -> t
= "caml_domain_spawn"
external self : unit -> t
= "caml_ml_domain_id"
external cpu_relax : unit -> unit
= "caml_ml_domain_cpu_relax"
external get_recommended_domain_count: unit -> int
= "caml_recommended_domain_count" [@@noalloc]
end
let cpu_relax () = Raw.cpu_relax ()
type id = Raw.t
type 'a state =
| Running
| Finished of ('a, exn) result
type 'a t = {
domain : Raw.t;
term_mutex: Mutex.t;
term_condition: Condition.t;
term_state: 'a state ref (* protected by [term_mutex] *)
}
module DLS = struct
type dls_state = Obj.t array
let unique_value = Obj.repr (ref 0)
external get_dls_state : unit -> dls_state = "%dls_get"
external set_dls_state : dls_state -> unit =
"caml_domain_dls_set" [@@noalloc]
let create_dls () =
let st = Array.make 8 unique_value in
set_dls_state st
let _ = create_dls ()
type 'a key = int * (unit -> 'a)
let key_counter = Atomic.make 0
type key_initializer =
KI: 'a key * ('a -> 'a) -> key_initializer
let parent_keys = Atomic.make ([] : key_initializer list)
let rec add_parent_key ki =
let l = Atomic.get parent_keys in
if not (Atomic.compare_and_set parent_keys l (ki :: l))
then add_parent_key ki
let new_key ?split_from_parent init_orphan =
let idx = Atomic.fetch_and_add key_counter 1 in
let k = (idx, init_orphan) in
begin match split_from_parent with
| None -> ()
| Some split -> add_parent_key (KI(k, split))
end;
k
(* If necessary, grow the current domain's local state array such that [idx]
* is a valid index in the array. *)
let maybe_grow idx =
let st = get_dls_state () in
let sz = Array.length st in
if idx < sz then st
else begin
let rec compute_new_size s =
if idx < s then s else compute_new_size (2 * s)
in
let new_sz = compute_new_size sz in
let new_st = Array.make new_sz unique_value in
Array.blit st 0 new_st 0 sz;
set_dls_state new_st;
new_st
end
let set (idx, _init) x =
let st = maybe_grow idx in
(* [Sys.opaque_identity] ensures that flambda does not look at the type of
* [x], which may be a [float] and conclude that the [st] is a float array.
* We do not want OCaml's float array optimisation kicking in here. *)
st.(idx) <- Obj.repr (Sys.opaque_identity x)
let get (idx, init) =
let st = maybe_grow idx in
let v = st.(idx) in
if v == unique_value then
let v' = Obj.repr (init ()) in
st.(idx) <- (Sys.opaque_identity v');
Obj.magic v'
else Obj.magic v
let get_initial_keys () : (int * Obj.t) list =
List.map
(fun (KI ((idx, _) as k, split)) ->
(idx, Obj.repr (split (get k))))
(Atomic.get parent_keys)
let set_initial_keys (l: (int * Obj.t) list) =
List.iter
(fun (idx, v) ->
let st = maybe_grow idx in st.(idx) <- v)
l
end
(******** Identity **********)
let get_id { domain; _ } = domain
let self () = Raw.self ()
let is_main_domain () = (self () :> int) = 0
(******** Callbacks **********)
first spawn , domain startup and at exit functionality
let first_domain_spawned = Atomic.make false
let first_spawn_function = ref (fun () -> ())
let before_first_spawn f =
if Atomic.get first_domain_spawned then
raise (Invalid_argument "first domain already spawned")
else begin
let old_f = !first_spawn_function in
let new_f () = old_f (); f () in
first_spawn_function := new_f
end
let do_before_first_spawn () =
if not (Atomic.get first_domain_spawned) then begin
Atomic.set first_domain_spawned true;
!first_spawn_function();
(* Release the old function *)
first_spawn_function := (fun () -> ())
end
let at_exit_key = DLS.new_key (fun () -> (fun () -> ()))
let at_exit f =
let old_exit : unit -> unit = DLS.get at_exit_key in
let new_exit () =
The domain termination callbacks ( [ at_exit ] ) are run in
last - in - first - out ( LIFO ) order in order to be symmetric with the domain
creation callbacks ( [ at_each_spawn ] ) which run in first - in - fisrt - out
( FIFO ) order .
last-in-first-out (LIFO) order in order to be symmetric with the domain
creation callbacks ([at_each_spawn]) which run in first-in-fisrt-out
(FIFO) order. *)
f (); old_exit ()
in
DLS.set at_exit_key new_exit
let do_at_exit () =
let f : unit -> unit = DLS.get at_exit_key in
f ()
let _ = Stdlib.do_domain_local_at_exit := do_at_exit
(******* Creation and Termination ********)
let spawn f =
do_before_first_spawn ();
let pk = DLS.get_initial_keys () in
(* The [term_mutex] and [term_condition] are used to
synchronize with the joining domains *)
let term_mutex = Mutex.create () in
let term_condition = Condition.create () in
let term_state = ref Running in
let body () =
let result =
match
DLS.create_dls ();
DLS.set_initial_keys pk;
let res = f () in
res
with
| x -> Ok x
| exception ex -> Error ex
in
let result' =
(* Run the [at_exit] callbacks when the domain computation either
terminates normally or exceptionally. *)
match do_at_exit () with
| () -> result
| exception ex ->
begin match result with
| Ok _ ->
(* If the domain computation terminated normally, but the
[at_exit] callbacks raised an exception, then return the
exception. *)
Error ex
| Error _ ->
(* If both the domain computation and the [at_exit] callbacks
raised exceptions, then ignore the exception from the
[at_exit] callbacks and return the original exception. *)
result
end
in
(* Synchronize with joining domains *)
Mutex.lock term_mutex;
match !term_state with
| Running ->
term_state := Finished result';
Condition.broadcast term_condition;
| Finished _ ->
failwith "internal error: Am I already finished?"
(* [term_mutex] is unlocked in the runtime after the cleanup functions on
the C side are finished. *)
in
{ domain = Raw.spawn body term_mutex;
term_mutex;
term_condition;
term_state }
let join { term_mutex; term_condition; term_state; _ } =
Mutex.lock term_mutex;
let rec loop () =
match !term_state with
| Running ->
Condition.wait term_condition term_mutex;
loop ()
| Finished res ->
Mutex.unlock term_mutex;
res
in
match loop () with
| Ok x -> x
| Error ex -> raise ex
let recommended_domain_count = Raw.get_recommended_domain_count
| null |
https://raw.githubusercontent.com/ocaml-multicore/ocaml-tsan/ae9c1502103845550162a49fcd3f76276cdfa866/stdlib/domain.ml
|
ocaml
|
************************************************************************
OCaml
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
Low-level primitives provided by the runtime
protected by [term_mutex]
If necessary, grow the current domain's local state array such that [idx]
* is a valid index in the array.
[Sys.opaque_identity] ensures that flambda does not look at the type of
* [x], which may be a [float] and conclude that the [st] is a float array.
* We do not want OCaml's float array optimisation kicking in here.
******* Identity *********
******* Callbacks *********
Release the old function
****** Creation and Termination *******
The [term_mutex] and [term_condition] are used to
synchronize with the joining domains
Run the [at_exit] callbacks when the domain computation either
terminates normally or exceptionally.
If the domain computation terminated normally, but the
[at_exit] callbacks raised an exception, then return the
exception.
If both the domain computation and the [at_exit] callbacks
raised exceptions, then ignore the exception from the
[at_exit] callbacks and return the original exception.
Synchronize with joining domains
[term_mutex] is unlocked in the runtime after the cleanup functions on
the C side are finished.
|
, Indian Institute of Technology , Madras
, University of Cambridge
, OCaml Labs Consultancy
Copyright 2019 Indian Institute of Technology , Madras
Copyright 2014 University of Cambridge
Copyright 2021 OCaml Labs Consultancy Ltd
the GNU Lesser General Public License version 2.1 , with the
module Raw = struct
type t = private int
external spawn : (unit -> unit) -> Mutex.t -> t
= "caml_domain_spawn"
external self : unit -> t
= "caml_ml_domain_id"
external cpu_relax : unit -> unit
= "caml_ml_domain_cpu_relax"
external get_recommended_domain_count: unit -> int
= "caml_recommended_domain_count" [@@noalloc]
end
let cpu_relax () = Raw.cpu_relax ()
type id = Raw.t
type 'a state =
| Running
| Finished of ('a, exn) result
type 'a t = {
domain : Raw.t;
term_mutex: Mutex.t;
term_condition: Condition.t;
}
module DLS = struct
type dls_state = Obj.t array
let unique_value = Obj.repr (ref 0)
external get_dls_state : unit -> dls_state = "%dls_get"
external set_dls_state : dls_state -> unit =
"caml_domain_dls_set" [@@noalloc]
let create_dls () =
let st = Array.make 8 unique_value in
set_dls_state st
let _ = create_dls ()
type 'a key = int * (unit -> 'a)
let key_counter = Atomic.make 0
type key_initializer =
KI: 'a key * ('a -> 'a) -> key_initializer
let parent_keys = Atomic.make ([] : key_initializer list)
let rec add_parent_key ki =
let l = Atomic.get parent_keys in
if not (Atomic.compare_and_set parent_keys l (ki :: l))
then add_parent_key ki
let new_key ?split_from_parent init_orphan =
let idx = Atomic.fetch_and_add key_counter 1 in
let k = (idx, init_orphan) in
begin match split_from_parent with
| None -> ()
| Some split -> add_parent_key (KI(k, split))
end;
k
let maybe_grow idx =
let st = get_dls_state () in
let sz = Array.length st in
if idx < sz then st
else begin
let rec compute_new_size s =
if idx < s then s else compute_new_size (2 * s)
in
let new_sz = compute_new_size sz in
let new_st = Array.make new_sz unique_value in
Array.blit st 0 new_st 0 sz;
set_dls_state new_st;
new_st
end
let set (idx, _init) x =
let st = maybe_grow idx in
st.(idx) <- Obj.repr (Sys.opaque_identity x)
let get (idx, init) =
let st = maybe_grow idx in
let v = st.(idx) in
if v == unique_value then
let v' = Obj.repr (init ()) in
st.(idx) <- (Sys.opaque_identity v');
Obj.magic v'
else Obj.magic v
let get_initial_keys () : (int * Obj.t) list =
List.map
(fun (KI ((idx, _) as k, split)) ->
(idx, Obj.repr (split (get k))))
(Atomic.get parent_keys)
let set_initial_keys (l: (int * Obj.t) list) =
List.iter
(fun (idx, v) ->
let st = maybe_grow idx in st.(idx) <- v)
l
end
let get_id { domain; _ } = domain
let self () = Raw.self ()
let is_main_domain () = (self () :> int) = 0
first spawn , domain startup and at exit functionality
let first_domain_spawned = Atomic.make false
let first_spawn_function = ref (fun () -> ())
let before_first_spawn f =
if Atomic.get first_domain_spawned then
raise (Invalid_argument "first domain already spawned")
else begin
let old_f = !first_spawn_function in
let new_f () = old_f (); f () in
first_spawn_function := new_f
end
let do_before_first_spawn () =
if not (Atomic.get first_domain_spawned) then begin
Atomic.set first_domain_spawned true;
!first_spawn_function();
first_spawn_function := (fun () -> ())
end
let at_exit_key = DLS.new_key (fun () -> (fun () -> ()))
let at_exit f =
let old_exit : unit -> unit = DLS.get at_exit_key in
let new_exit () =
The domain termination callbacks ( [ at_exit ] ) are run in
last - in - first - out ( LIFO ) order in order to be symmetric with the domain
creation callbacks ( [ at_each_spawn ] ) which run in first - in - fisrt - out
( FIFO ) order .
last-in-first-out (LIFO) order in order to be symmetric with the domain
creation callbacks ([at_each_spawn]) which run in first-in-fisrt-out
(FIFO) order. *)
f (); old_exit ()
in
DLS.set at_exit_key new_exit
let do_at_exit () =
let f : unit -> unit = DLS.get at_exit_key in
f ()
let _ = Stdlib.do_domain_local_at_exit := do_at_exit
let spawn f =
do_before_first_spawn ();
let pk = DLS.get_initial_keys () in
let term_mutex = Mutex.create () in
let term_condition = Condition.create () in
let term_state = ref Running in
let body () =
let result =
match
DLS.create_dls ();
DLS.set_initial_keys pk;
let res = f () in
res
with
| x -> Ok x
| exception ex -> Error ex
in
let result' =
match do_at_exit () with
| () -> result
| exception ex ->
begin match result with
| Ok _ ->
Error ex
| Error _ ->
result
end
in
Mutex.lock term_mutex;
match !term_state with
| Running ->
term_state := Finished result';
Condition.broadcast term_condition;
| Finished _ ->
failwith "internal error: Am I already finished?"
in
{ domain = Raw.spawn body term_mutex;
term_mutex;
term_condition;
term_state }
let join { term_mutex; term_condition; term_state; _ } =
Mutex.lock term_mutex;
let rec loop () =
match !term_state with
| Running ->
Condition.wait term_condition term_mutex;
loop ()
| Finished res ->
Mutex.unlock term_mutex;
res
in
match loop () with
| Ok x -> x
| Error ex -> raise ex
let recommended_domain_count = Raw.get_recommended_domain_count
|
b5a56abd287ed45524cb74359c864004c30adf7c51d146804788002aeda5096c
|
jwiegley/notes
|
snippets.hs
|
splitString :: String -> String -> [String]
splitString = split' []
where split' acc s str@(x:xs)
| s `isPrefixOf` str = acc : split' [] s (drop (length s) str)
| otherwise = split' (acc ++ [x]) s xs
split' acc _ [] = [acc]
| null |
https://raw.githubusercontent.com/jwiegley/notes/24574b02bfd869845faa1521854f90e4e8bf5e9a/gists/f719a3d41696d48f6005/gists/83337/snippets.hs
|
haskell
|
splitString :: String -> String -> [String]
splitString = split' []
where split' acc s str@(x:xs)
| s `isPrefixOf` str = acc : split' [] s (drop (length s) str)
| otherwise = split' (acc ++ [x]) s xs
split' acc _ [] = [acc]
|
|
187d017d874273926a41ebe7a62b503a6cbb3fa500862a358b8c6061a4f7f35f
|
froggey/Mezzano
|
complex.lisp
|
;;;; Complex numbers
(in-package :mezzano.internals.numbers.complex)
(declaim (inline complexp))
(defun complexp (object)
(int::%object-of-type-range-p
object
int::+first-complex-object-tag+
int::+last-complex-object-tag+))
(defun complex (realpart &optional imagpart)
(check-type realpart real)
(check-type imagpart (or real null))
(unless imagpart
(setf imagpart (coerce 0 (type-of realpart))))
(cond ((or (typep realpart 'double-float)
(typep imagpart 'double-float))
(let ((result (mezzano.runtime::%allocate-object int::+object-tag-complex-double-float+ 0 2 nil)))
(setf (int::%object-ref-double-float result int::+complex-realpart+) (float realpart 0.0d0)
(int::%object-ref-double-float result int::+complex-imagpart+) (float imagpart 0.0d0))
result))
((or (typep realpart 'single-float)
(typep imagpart 'single-float))
(let ((result (mezzano.runtime::%allocate-object int::+object-tag-complex-single-float+ 0 1 nil)))
(setf (int::%object-ref-single-float result int::+complex-realpart+) (float realpart 0.0f0)
(int::%object-ref-single-float result int::+complex-imagpart+) (float imagpart 0.0f0))
result))
((or (typep realpart 'short-float)
(typep imagpart 'short-float))
(let ((result (mezzano.runtime::%allocate-object int::+object-tag-complex-short-float+ 0 1 nil)))
(setf (int::%object-ref-short-float result int::+complex-realpart+) (float realpart 0.0s0)
(int::%object-ref-short-float result int::+complex-imagpart+) (float imagpart 0.0s0))
result))
((not (zerop imagpart))
(let ((result (mezzano.runtime::%allocate-object int::+object-tag-complex-rational+ 0 2 nil)))
(setf (int::%object-ref-t result int::+complex-realpart+) realpart
(int::%object-ref-t result int::+complex-imagpart+) imagpart)
result))
(t
realpart)))
(defun realpart (number)
(cond
((int::%object-of-type-p number int::+object-tag-complex-rational+)
(int::%object-ref-t number int::+complex-realpart+))
((int::%object-of-type-p number int::+object-tag-complex-short-float+)
(int::%object-ref-short-float number int::+complex-realpart+))
((int::%object-of-type-p number int::+object-tag-complex-single-float+)
(int::%object-ref-single-float number int::+complex-realpart+))
((int::%object-of-type-p number int::+object-tag-complex-double-float+)
(int::%object-ref-double-float number int::+complex-realpart+))
(t
(check-type number number)
number)))
(defun imagpart (number)
(cond
((int::%object-of-type-p number int::+object-tag-complex-rational+)
(int::%object-ref-t number int::+complex-imagpart+))
((int::%object-of-type-p number int::+object-tag-complex-short-float+)
(int::%object-ref-short-float number int::+complex-imagpart+))
((int::%object-of-type-p number int::+object-tag-complex-single-float+)
(int::%object-ref-single-float number int::+complex-imagpart+))
((int::%object-of-type-p number int::+object-tag-complex-double-float+)
(int::%object-ref-double-float number int::+complex-imagpart+))
(t
(check-type number number)
(* 0 number))))
(declaim (inline complex-=))
(defun complex-= (x y)
(and (= (realpart x) (realpart y))
(= (imagpart x) (imagpart y))))
(defun complex-+ (x y)
(complex (+ (realpart x) (realpart y))
(+ (imagpart x) (imagpart y))))
(defun complex-- (x y)
(complex (- (realpart x) (realpart y))
(- (imagpart x) (imagpart y))))
(defun complex-* (x y)
(complex (- (* (realpart x) (realpart y))
(* (imagpart x) (imagpart y)))
(+ (* (imagpart x) (realpart y))
(* (realpart x) (imagpart y)))))
(defun complex-/ (x y)
(complex (/ (+ (* (realpart x) (realpart y))
(* (imagpart x) (imagpart y)))
(+ (expt (realpart y) 2)
(expt (imagpart y) 2)))
(/ (- (* (imagpart x) (realpart y))
(* (realpart x) (imagpart y)))
(+ (expt (realpart y) 2)
(expt (imagpart y) 2)))))
(defun complex-abs (number)
(sqrt (+ (expt (realpart number) 2)
(expt (imagpart number) 2))))
(defun complex-sqrt (number)
(exp (/ (log number) 2)))
(defun complex-sin (x)
(let ((real (realpart x))
(imag (imagpart x)))
(complex (* (sin real) (cosh imag))
(* (cos real) (sinh imag)))))
(defun complex-cos (x)
(let ((real (realpart x))
(imag (imagpart x)))
(complex (* (cos real) (cosh imag))
(- (* (sin real) (sinh imag))))))
(defun complex-log-e (number)
(complex (log (abs number)) (phase number)))
(defun complex-exp (number)
(* (exp (realpart number))
(cis (imagpart number))))
(defun cis (radians)
(complex (cos radians)
(sin radians)))
(defun conjugate (number)
(if (complexp number)
(complex (realpart number)
(- (imagpart number)))
number))
| null |
https://raw.githubusercontent.com/froggey/Mezzano/d506e0c3b9efcddd39155bfdd77d9f025c60930b/system/numbers/complex.lisp
|
lisp
|
Complex numbers
|
(in-package :mezzano.internals.numbers.complex)
(declaim (inline complexp))
(defun complexp (object)
(int::%object-of-type-range-p
object
int::+first-complex-object-tag+
int::+last-complex-object-tag+))
(defun complex (realpart &optional imagpart)
(check-type realpart real)
(check-type imagpart (or real null))
(unless imagpart
(setf imagpart (coerce 0 (type-of realpart))))
(cond ((or (typep realpart 'double-float)
(typep imagpart 'double-float))
(let ((result (mezzano.runtime::%allocate-object int::+object-tag-complex-double-float+ 0 2 nil)))
(setf (int::%object-ref-double-float result int::+complex-realpart+) (float realpart 0.0d0)
(int::%object-ref-double-float result int::+complex-imagpart+) (float imagpart 0.0d0))
result))
((or (typep realpart 'single-float)
(typep imagpart 'single-float))
(let ((result (mezzano.runtime::%allocate-object int::+object-tag-complex-single-float+ 0 1 nil)))
(setf (int::%object-ref-single-float result int::+complex-realpart+) (float realpart 0.0f0)
(int::%object-ref-single-float result int::+complex-imagpart+) (float imagpart 0.0f0))
result))
((or (typep realpart 'short-float)
(typep imagpart 'short-float))
(let ((result (mezzano.runtime::%allocate-object int::+object-tag-complex-short-float+ 0 1 nil)))
(setf (int::%object-ref-short-float result int::+complex-realpart+) (float realpart 0.0s0)
(int::%object-ref-short-float result int::+complex-imagpart+) (float imagpart 0.0s0))
result))
((not (zerop imagpart))
(let ((result (mezzano.runtime::%allocate-object int::+object-tag-complex-rational+ 0 2 nil)))
(setf (int::%object-ref-t result int::+complex-realpart+) realpart
(int::%object-ref-t result int::+complex-imagpart+) imagpart)
result))
(t
realpart)))
(defun realpart (number)
(cond
((int::%object-of-type-p number int::+object-tag-complex-rational+)
(int::%object-ref-t number int::+complex-realpart+))
((int::%object-of-type-p number int::+object-tag-complex-short-float+)
(int::%object-ref-short-float number int::+complex-realpart+))
((int::%object-of-type-p number int::+object-tag-complex-single-float+)
(int::%object-ref-single-float number int::+complex-realpart+))
((int::%object-of-type-p number int::+object-tag-complex-double-float+)
(int::%object-ref-double-float number int::+complex-realpart+))
(t
(check-type number number)
number)))
(defun imagpart (number)
(cond
((int::%object-of-type-p number int::+object-tag-complex-rational+)
(int::%object-ref-t number int::+complex-imagpart+))
((int::%object-of-type-p number int::+object-tag-complex-short-float+)
(int::%object-ref-short-float number int::+complex-imagpart+))
((int::%object-of-type-p number int::+object-tag-complex-single-float+)
(int::%object-ref-single-float number int::+complex-imagpart+))
((int::%object-of-type-p number int::+object-tag-complex-double-float+)
(int::%object-ref-double-float number int::+complex-imagpart+))
(t
(check-type number number)
(* 0 number))))
(declaim (inline complex-=))
(defun complex-= (x y)
(and (= (realpart x) (realpart y))
(= (imagpart x) (imagpart y))))
(defun complex-+ (x y)
(complex (+ (realpart x) (realpart y))
(+ (imagpart x) (imagpart y))))
(defun complex-- (x y)
(complex (- (realpart x) (realpart y))
(- (imagpart x) (imagpart y))))
(defun complex-* (x y)
(complex (- (* (realpart x) (realpart y))
(* (imagpart x) (imagpart y)))
(+ (* (imagpart x) (realpart y))
(* (realpart x) (imagpart y)))))
(defun complex-/ (x y)
(complex (/ (+ (* (realpart x) (realpart y))
(* (imagpart x) (imagpart y)))
(+ (expt (realpart y) 2)
(expt (imagpart y) 2)))
(/ (- (* (imagpart x) (realpart y))
(* (realpart x) (imagpart y)))
(+ (expt (realpart y) 2)
(expt (imagpart y) 2)))))
(defun complex-abs (number)
(sqrt (+ (expt (realpart number) 2)
(expt (imagpart number) 2))))
(defun complex-sqrt (number)
(exp (/ (log number) 2)))
(defun complex-sin (x)
(let ((real (realpart x))
(imag (imagpart x)))
(complex (* (sin real) (cosh imag))
(* (cos real) (sinh imag)))))
(defun complex-cos (x)
(let ((real (realpart x))
(imag (imagpart x)))
(complex (* (cos real) (cosh imag))
(- (* (sin real) (sinh imag))))))
(defun complex-log-e (number)
(complex (log (abs number)) (phase number)))
(defun complex-exp (number)
(* (exp (realpart number))
(cis (imagpart number))))
(defun cis (radians)
(complex (cos radians)
(sin radians)))
(defun conjugate (number)
(if (complexp number)
(complex (realpart number)
(- (imagpart number)))
number))
|
33789ec476ee5f256f5b2f04da696cf48eb822e12269ea21f7351f5979e854f4
|
pedrovgs/HaskellKatas
|
Main.hs
|
module PrimeFactors.Main where
import PrimeFactors.PrimeFactors
main = do print "Prime factors:"
let factorsOf1 = primeFactors 1
let factorsOf11 = primeFactors 11
let factorsOf66 = primeFactors 66
let factorsOfNegatie = primeFactors (-1)
print ("Prime factors of 1 = " ++ show factorsOf1)
print ("Prime factors of 11 = " ++ show factorsOf11)
print ("Prime factors of 66 = " ++ show factorsOf66)
print ("Prime factors of a negative number = " ++ show factorsOfNegatie)
| null |
https://raw.githubusercontent.com/pedrovgs/HaskellKatas/79ecaeecf80e71c44cfa06692f56dd3a6d1c7308/app/PrimeFactors/Main.hs
|
haskell
|
module PrimeFactors.Main where
import PrimeFactors.PrimeFactors
main = do print "Prime factors:"
let factorsOf1 = primeFactors 1
let factorsOf11 = primeFactors 11
let factorsOf66 = primeFactors 66
let factorsOfNegatie = primeFactors (-1)
print ("Prime factors of 1 = " ++ show factorsOf1)
print ("Prime factors of 11 = " ++ show factorsOf11)
print ("Prime factors of 66 = " ++ show factorsOf66)
print ("Prime factors of a negative number = " ++ show factorsOfNegatie)
|
|
485b38d994b0fca024da358f6f7805c166ceace7e9344255145cbb9a2e8c38c9
|
birthevdb/Latent-Effect-and-Handlers
|
CallByValue.hs
|
# LANGUAGE TypeOperators #
# LANGUAGE FlexibleContexts #
{-# LANGUAGE RankNTypes #-}
module Features.CallByValue where
import General
import Effects.Abstraction
import Prelude hiding (abs)
data LamExpr e = VarExpr Int | AbsExpr e | AppExpr e e
lamAlg :: (Abstracting v :<<: σ) => LamExpr (Tree σ Id v) -> Tree σ Id v
lamAlg (VarExpr n) = var n
lamAlg (AbsExpr e) = abs e
lamAlg (AppExpr e1 e2) = do
v1 <- e1
v2 <- e2
app v1 v2
| null |
https://raw.githubusercontent.com/birthevdb/Latent-Effect-and-Handlers/398167aa3a18572afa1ecc9ecdd6b37c97495f90/Features/CallByValue.hs
|
haskell
|
# LANGUAGE RankNTypes #
|
# LANGUAGE TypeOperators #
# LANGUAGE FlexibleContexts #
module Features.CallByValue where
import General
import Effects.Abstraction
import Prelude hiding (abs)
data LamExpr e = VarExpr Int | AbsExpr e | AppExpr e e
lamAlg :: (Abstracting v :<<: σ) => LamExpr (Tree σ Id v) -> Tree σ Id v
lamAlg (VarExpr n) = var n
lamAlg (AbsExpr e) = abs e
lamAlg (AppExpr e1 e2) = do
v1 <- e1
v2 <- e2
app v1 v2
|
9fcc4790aa713675e4e6f08b4c5328e98fd082babb8922fb2a08dc365c4af2bd
|
JonathanLorimer/weft
|
Resolve.hs
|
module Weft.Generics.Resolve
( HasResolve
, resolve
) where
import qualified Data.Map as M
import Data.Text (Text)
import GHC.Generics
import Weft.Internal.Types hiding (query)
------------------------------------------------------------------------------
-- |
type HasResolve record =
( GResolve (J record 'Resolver)
(J record 'Query)
(J record 'Response)
, Generic record
)
------------------------------------------------------------------------------
-- |
resolve
:: HasResolve record
=> JHKD record 'Resolver
-> JHKD record 'Query
-> IO (JHKD record 'Response)
resolve rv query = HKD <$> gResolve (runHKD rv) (runHKD query)
------------------------------------------------------------------------------
-- |
class GResolve (rv :: * -> *) (qu :: * -> *) (rp :: * -> *) where
gResolve :: rv x -> qu x -> IO (rp x)
instance ( GResolve rv qu rp
) => GResolve (M1 x y rv)
(M1 x y qu)
(M1 x y rp) where
gResolve (M1 rv) (M1 qu) = M1 <$> gResolve rv qu
instance ( GResolve frv fqu frp
, GResolve grv gqu grp
) => GResolve (frv :*: grv)
(fqu :*: gqu)
(frp :*: grp) where
gResolve (frv :*: grv) (fqu :*: gqu) =
(:*:) <$> gResolve frv fqu
<*> gResolve grv gqu
-- | Q, RV1, RP3
instance (ResolveField rv qu rp) =>
GResolve (K1 x rv)
(K1 x (M.Map Text qu))
(K1 x (M.Map Text rp)) where
gResolve (K1 rv) (K1 qu) = K1 <$> traverse (resolveField rv) qu
------------------------------------------------------------------------------
-- |
class ResolveField rv qu rp where
resolveField :: rv -> qu -> IO rp
-- | Base Cases
instance ResolveField (M1 _1 _2 q _3 -> IO (M1 _1 _2 rp _3))
(Args '[], M1 _1 _2 q _3)
(M1 _1 _2 rp _3) where
resolveField f (ANil, query) = f query
instance ResolveField (M1 _1 _2 q _3 -> IO [M1 _1 _2 rp _3])
(Args '[], M1 _1 _2 q _3)
[M1 _1 _2 rp _3] where
resolveField f (ANil, query) = f query
instance ResolveField (IO scalar)
(Args '[], ())
scalar where
resolveField s (ANil, ()) = s
instance ResolveField rv (Args args, ru) rp
=> ResolveField (Arg n t -> rv)
(Args ('(n, t) ': args), ru)
rp where
resolveField f (arg :@@ args, query) =
resolveField (f arg) (args, query)
-- | Q, RV1, RP3
instance GResolve (K1 x (Magic 'Resolver r))
(K1 x (Magic 'Query r))
(K1 x (Magic 'Response r)) =>
GResolve (K1 x (ToMagic 'Resolver r))
(K1 x (ToMagic 'Query r))
(K1 x (ToMagic 'Response r)) where
gResolve (K1 (ToMagic rv)) (K1 (ToMagic qu)) =
K1 . ToMagic . unK1 @x <$>
gResolve (K1 @x rv) (K1 @x qu)
| null |
https://raw.githubusercontent.com/JonathanLorimer/weft/fc0396240905ab0202c5896019cf1e482d216f8d/src/Weft/Generics/Resolve.hs
|
haskell
|
----------------------------------------------------------------------------
|
----------------------------------------------------------------------------
|
----------------------------------------------------------------------------
|
| Q, RV1, RP3
----------------------------------------------------------------------------
|
| Base Cases
| Q, RV1, RP3
|
module Weft.Generics.Resolve
( HasResolve
, resolve
) where
import qualified Data.Map as M
import Data.Text (Text)
import GHC.Generics
import Weft.Internal.Types hiding (query)
type HasResolve record =
( GResolve (J record 'Resolver)
(J record 'Query)
(J record 'Response)
, Generic record
)
resolve
:: HasResolve record
=> JHKD record 'Resolver
-> JHKD record 'Query
-> IO (JHKD record 'Response)
resolve rv query = HKD <$> gResolve (runHKD rv) (runHKD query)
class GResolve (rv :: * -> *) (qu :: * -> *) (rp :: * -> *) where
gResolve :: rv x -> qu x -> IO (rp x)
instance ( GResolve rv qu rp
) => GResolve (M1 x y rv)
(M1 x y qu)
(M1 x y rp) where
gResolve (M1 rv) (M1 qu) = M1 <$> gResolve rv qu
instance ( GResolve frv fqu frp
, GResolve grv gqu grp
) => GResolve (frv :*: grv)
(fqu :*: gqu)
(frp :*: grp) where
gResolve (frv :*: grv) (fqu :*: gqu) =
(:*:) <$> gResolve frv fqu
<*> gResolve grv gqu
instance (ResolveField rv qu rp) =>
GResolve (K1 x rv)
(K1 x (M.Map Text qu))
(K1 x (M.Map Text rp)) where
gResolve (K1 rv) (K1 qu) = K1 <$> traverse (resolveField rv) qu
class ResolveField rv qu rp where
resolveField :: rv -> qu -> IO rp
instance ResolveField (M1 _1 _2 q _3 -> IO (M1 _1 _2 rp _3))
(Args '[], M1 _1 _2 q _3)
(M1 _1 _2 rp _3) where
resolveField f (ANil, query) = f query
instance ResolveField (M1 _1 _2 q _3 -> IO [M1 _1 _2 rp _3])
(Args '[], M1 _1 _2 q _3)
[M1 _1 _2 rp _3] where
resolveField f (ANil, query) = f query
instance ResolveField (IO scalar)
(Args '[], ())
scalar where
resolveField s (ANil, ()) = s
instance ResolveField rv (Args args, ru) rp
=> ResolveField (Arg n t -> rv)
(Args ('(n, t) ': args), ru)
rp where
resolveField f (arg :@@ args, query) =
resolveField (f arg) (args, query)
instance GResolve (K1 x (Magic 'Resolver r))
(K1 x (Magic 'Query r))
(K1 x (Magic 'Response r)) =>
GResolve (K1 x (ToMagic 'Resolver r))
(K1 x (ToMagic 'Query r))
(K1 x (ToMagic 'Response r)) where
gResolve (K1 (ToMagic rv)) (K1 (ToMagic qu)) =
K1 . ToMagic . unK1 @x <$>
gResolve (K1 @x rv) (K1 @x qu)
|
fc81cf4b9a4e77af88ae1109cecb0478c091d944d6d9234d65419ba7cd9229cb
|
duelinmarkers/insfactor
|
zip_test.clj
|
(ns duelinmarkers.insfactor.zip-test
(:use clojure.test duelinmarkers.insfactor.zip))
(deftest of-zipper-seq
(is (= [[1 [2 3] 4] 1 [2 3] 2 3 4]
(zipper-seq (clojure.zip/vector-zip [1 [2 3] 4]))))
(is (= [[]]
(zipper-seq (clojure.zip/vector-zip [])))))
| null |
https://raw.githubusercontent.com/duelinmarkers/insfactor/8085c4e582735f928752cbed9f5957cf95321e57/test/duelinmarkers/insfactor/zip_test.clj
|
clojure
|
(ns duelinmarkers.insfactor.zip-test
(:use clojure.test duelinmarkers.insfactor.zip))
(deftest of-zipper-seq
(is (= [[1 [2 3] 4] 1 [2 3] 2 3 4]
(zipper-seq (clojure.zip/vector-zip [1 [2 3] 4]))))
(is (= [[]]
(zipper-seq (clojure.zip/vector-zip [])))))
|
|
9de5df3b789e3c4b539796d6cb43be43a8eb3da2dee1c0119cd906c92300b13f
|
JacquesCarette/Drasil
|
ClassInterface.hs
|
# LANGUAGE TypeFamilies #
| Defines a package extension for GOOL , with functions for pairing a GOOL
-- program with auxiliary, non-source-code files.
module Language.Drasil.Code.Imperative.GOOL.ClassInterface (
ReadMeInfo(..),
Typeclasses
PackageSym(..), AuxiliarySym(..)
) where
import Language.Drasil (Expr)
import Database.Drasil (ChunkDB)
import Language.Drasil.Code.DataDesc (DataDesc)
import Language.Drasil.Mod (Name, Version)
import Language.Drasil.Choices (Comments, ImplementationType, Verbosity)
import GOOL.Drasil (ProgData, GOOLState)
import Text.PrettyPrint.HughesPJ (Doc)
-- | Members of this class must have all the information necessary for
the ' AuxiliarySym ' in addition to information necessary to create a package .
class (AuxiliarySym r) => PackageSym r where
type Package r
package :: ProgData -> [r (Auxiliary r)] -> r (Package r)
| Members of this class must have a doxygen configuration , ReadMe file ,
sample input , omptimize doxygen document , information necessary for a makefile ,
-- auxiliary helper documents, and auxiliary from data documents.
class AuxiliarySym r where
type Auxiliary r
type AuxHelper r
doxConfig :: String -> GOOLState -> Verbosity -> r (Auxiliary r)
readMe :: ReadMeInfo -> r (Auxiliary r)
sampleInput :: ChunkDB -> DataDesc -> [Expr] -> r (Auxiliary r)
optimizeDox :: r (AuxHelper r)
makefile :: [FilePath] -> ImplementationType -> [Comments] -> GOOLState ->
ProgData -> r (Auxiliary r)
auxHelperDoc :: r (AuxHelper r) -> Doc
auxFromData :: FilePath -> Doc -> r (Auxiliary r)
-- | Language name.
type LangAbbrev = String
-- | Programming language version.
type LangVers = String
-- | Case name.
type CaseName = String
-- | Purpose of example
type ExamplePurpose = String
-- | Description of example
type ExampleDescr = String
-- | File contributors
type Contributor = String
-- | Holds all information needed to create a README file.
data ReadMeInfo = ReadMeInfo {
langName :: LangAbbrev,
langVersion :: LangVers,
invalidOS :: Maybe String,
implementType :: ImplementationType,
extLibNV :: [(Name,Version)],
extLibFP :: [FilePath],
contributors :: [Contributor],
configFP :: [FilePath],
caseName :: CaseName,
examplePurpose :: ExamplePurpose,
exampleDescr :: ExampleDescr
}
| null |
https://raw.githubusercontent.com/JacquesCarette/Drasil/84272acccc09574dec70d8d96c6ea994f15f8b22/code/drasil-code/lib/Language/Drasil/Code/Imperative/GOOL/ClassInterface.hs
|
haskell
|
program with auxiliary, non-source-code files.
| Members of this class must have all the information necessary for
auxiliary helper documents, and auxiliary from data documents.
| Language name.
| Programming language version.
| Case name.
| Purpose of example
| Description of example
| File contributors
| Holds all information needed to create a README file.
|
# LANGUAGE TypeFamilies #
| Defines a package extension for GOOL , with functions for pairing a GOOL
module Language.Drasil.Code.Imperative.GOOL.ClassInterface (
ReadMeInfo(..),
Typeclasses
PackageSym(..), AuxiliarySym(..)
) where
import Language.Drasil (Expr)
import Database.Drasil (ChunkDB)
import Language.Drasil.Code.DataDesc (DataDesc)
import Language.Drasil.Mod (Name, Version)
import Language.Drasil.Choices (Comments, ImplementationType, Verbosity)
import GOOL.Drasil (ProgData, GOOLState)
import Text.PrettyPrint.HughesPJ (Doc)
the ' AuxiliarySym ' in addition to information necessary to create a package .
class (AuxiliarySym r) => PackageSym r where
type Package r
package :: ProgData -> [r (Auxiliary r)] -> r (Package r)
| Members of this class must have a doxygen configuration , ReadMe file ,
sample input , omptimize doxygen document , information necessary for a makefile ,
class AuxiliarySym r where
type Auxiliary r
type AuxHelper r
doxConfig :: String -> GOOLState -> Verbosity -> r (Auxiliary r)
readMe :: ReadMeInfo -> r (Auxiliary r)
sampleInput :: ChunkDB -> DataDesc -> [Expr] -> r (Auxiliary r)
optimizeDox :: r (AuxHelper r)
makefile :: [FilePath] -> ImplementationType -> [Comments] -> GOOLState ->
ProgData -> r (Auxiliary r)
auxHelperDoc :: r (AuxHelper r) -> Doc
auxFromData :: FilePath -> Doc -> r (Auxiliary r)
type LangAbbrev = String
type LangVers = String
type CaseName = String
type ExamplePurpose = String
type ExampleDescr = String
type Contributor = String
data ReadMeInfo = ReadMeInfo {
langName :: LangAbbrev,
langVersion :: LangVers,
invalidOS :: Maybe String,
implementType :: ImplementationType,
extLibNV :: [(Name,Version)],
extLibFP :: [FilePath],
contributors :: [Contributor],
configFP :: [FilePath],
caseName :: CaseName,
examplePurpose :: ExamplePurpose,
exampleDescr :: ExampleDescr
}
|
ea0f8c773c70a72fde24ff0e650fa04e7013bf4cf3df4ff22d3a3dccdb74f1ba
|
shayne-fletcher/zen
|
flows.ml
|
type flow =
{
flow_start : Dates.t ;
flow_end : Dates.t ;
flow_pay : Dates.t ;
flow_accrual : float ;
}
;;
let rec parse_flow = parser
[< 'Genlex.Kwd "{" ;
s = Dates.parse_date ; 'Genlex.Kwd ";" ;
e = Dates.parse_date ; 'Genlex.Kwd ";" ;
p = Dates.parse_date ; 'Genlex.Kwd ";" ;
a = (parser | [< 'Genlex.Float f >] -> f);
'Genlex.Kwd "}" >] ->
{flow_start=s; flow_end=e; flow_pay=p; flow_accrual=a}
;;
let string_of_flow : flow -> string =
fun f ->
let s = Dates.string_of_date f.flow_start
and e = Dates.string_of_date f.flow_end
and p = Dates.string_of_date f.flow_pay
and a = string_of_float f.flow_accrual
in "{"^s^" ; "^ e^" ; "^p^ "; "^a^"}"
;;
let flow_of_string : string -> flow =
fun s ->
let lexer = Genlex.make_lexer ["{"; ";"; "}"] in
parse_flow (lexer (Stream.of_string s))
;;
type resolution = | DAY | WEEK | MONTH | YEAR ;;
let string_of_resolution : resolution -> string =
function
| DAY -> "DAY"
| WEEK -> "WEEK"
| MONTH -> "MONTH"
| YEAR -> "YEAR"
;;
let resolution_of_string : string -> resolution =
function
| "DAY" -> DAY
| "WEEK" -> WEEK
| "MONTH" -> MONTH
| "YEAR" -> YEAR
| s -> failwith ("Convert convert \""^s^"\" to a resolution")
;;
let parse_resolution = parser
| [< 'Genlex.Ident s >] -> resolution_of_string s
;;
let make_tenor u n =
match u with
| DAY -> CalendarLib.Date.Period.day n
| WEEK -> CalendarLib.Date.Period.week n
| MONTH -> CalendarLib.Date.Period.month n
| YEAR -> CalendarLib.Date.Period.year n
;;
type gen_flows_param_pack =
{
gfp_start : Dates.t ;
gfp_end : Dates.t ;
gfp_period : int ;
gfp_unit : resolution ;
gfp_accrual_shift_conv : Dates.shift_convention ;
gfp_accrual_basis : Dates.day_count ;
gfp_accrual_hols : string ;
gfp_payment_delay : int ;
gfp_payment_shift_conv : Dates.shift_convention ;
gfp_payment_basis : Dates.day_count ;
gfp_payment_hols : string
}
;;
let string_of_gen_flows_param_pack : gen_flows_param_pack -> string =
fun pack ->
"{"^
(Dates.string_of_date pack.gfp_start)^";"^
(Dates.string_of_date pack.gfp_end)^";"^
(string_of_int pack.gfp_period)^";"^
(string_of_resolution pack.gfp_unit)^";"^
(Dates.string_of_shift_convention pack.gfp_accrual_shift_conv)^";"^
(Dates.string_of_day_count pack.gfp_accrual_basis)^";"^
"\""^pack.gfp_accrual_hols^"\";"^
(string_of_int pack.gfp_payment_delay)^";"^
(Dates.string_of_shift_convention pack.gfp_payment_shift_conv)^";"^
(Dates.string_of_day_count pack.gfp_payment_basis)^";"^
"\""^pack.gfp_payment_hols^"\"}"
;;
let rec parse_gen_flows_param_pack = parser
[< 'Genlex.Kwd "{" ;
start = Dates.parse_date ; 'Genlex.Kwd ";" ;
end_ = Dates.parse_date ; 'Genlex.Kwd ";" ;
period = (parser [<'Genlex.Int i>]->i) ; 'Genlex.Kwd ";" ;
unit = parse_resolution ; 'Genlex.Kwd ";" ;
acc_shift = Dates.parse_shift_convention ; 'Genlex.Kwd ";" ;
acc_basis = Dates.parse_day_count ; 'Genlex.Kwd ";" ;
acc_hols = (parser [<'Genlex.String s>]->s) ; 'Genlex.Kwd ";" ;
pay_delay = (parser [<'Genlex.Int i>]->i) ; 'Genlex.Kwd ";" ;
pay_shift = Dates.parse_shift_convention ; 'Genlex.Kwd ";" ;
pay_basis = Dates.parse_day_count ; 'Genlex.Kwd ";" ;
pay_hols = ((parser [<'Genlex.String s>]->s)) ;
'Genlex.Kwd "}"
>] ->
{
gfp_start=start;
gfp_end=end_;
gfp_period=period;
gfp_unit=unit;
gfp_accrual_shift_conv=acc_shift;
gfp_accrual_basis=acc_basis;
gfp_accrual_hols=acc_hols;
gfp_payment_delay=pay_delay;
gfp_payment_shift_conv=pay_shift;
gfp_payment_basis=pay_basis;
gfp_payment_hols=pay_hols
}
;;
let gen_flows_param_pack_of_string : string -> gen_flows_param_pack =
fun s ->
let lexer = Genlex.make_lexer ["{"; ";";"}"]
in parse_gen_flows_param_pack (lexer (Stream.of_string s))
;;
let gen_flows : gen_flows_param_pack -> (flow) list =
fun params ->
let rec gen_flows_ =
fun params day i ->
let start = params.gfp_start
and end_ = params.gfp_end
and u = params.gfp_unit
in
if day >= end_ then []
else
let roll_start = CalendarLib.Date.add start (make_tenor u (i*params.gfp_period))
and roll_end = CalendarLib.Date.add start (make_tenor u ((i+1)*params.gfp_period))
in
let acc_start = Dates.shift roll_start params.gfp_accrual_shift_conv params.gfp_accrual_hols
and acc_end = Dates.shift roll_end params.gfp_accrual_shift_conv params.gfp_accrual_hols
and pay_delay = make_tenor DAY params.gfp_payment_delay
in
let pay = Dates.shift (CalendarLib.Date.add roll_end pay_delay) params.gfp_payment_shift_conv params.gfp_payment_hols
and alpha = Dates.year_fraction (acc_start, acc_end) params.gfp_accrual_basis
in {flow_start=acc_start; flow_end=acc_end; flow_pay=pay; flow_accrual=alpha} :: gen_flows_ params roll_end (i + 1)
and day = params.gfp_start
in
gen_flows_ params day 0
;;
| null |
https://raw.githubusercontent.com/shayne-fletcher/zen/10a1d0b9bf261bb133918dd62fb1593c3d4d21cb/ocaml/curve/flows.ml
|
ocaml
|
type flow =
{
flow_start : Dates.t ;
flow_end : Dates.t ;
flow_pay : Dates.t ;
flow_accrual : float ;
}
;;
let rec parse_flow = parser
[< 'Genlex.Kwd "{" ;
s = Dates.parse_date ; 'Genlex.Kwd ";" ;
e = Dates.parse_date ; 'Genlex.Kwd ";" ;
p = Dates.parse_date ; 'Genlex.Kwd ";" ;
a = (parser | [< 'Genlex.Float f >] -> f);
'Genlex.Kwd "}" >] ->
{flow_start=s; flow_end=e; flow_pay=p; flow_accrual=a}
;;
let string_of_flow : flow -> string =
fun f ->
let s = Dates.string_of_date f.flow_start
and e = Dates.string_of_date f.flow_end
and p = Dates.string_of_date f.flow_pay
and a = string_of_float f.flow_accrual
in "{"^s^" ; "^ e^" ; "^p^ "; "^a^"}"
;;
let flow_of_string : string -> flow =
fun s ->
let lexer = Genlex.make_lexer ["{"; ";"; "}"] in
parse_flow (lexer (Stream.of_string s))
;;
type resolution = | DAY | WEEK | MONTH | YEAR ;;
let string_of_resolution : resolution -> string =
function
| DAY -> "DAY"
| WEEK -> "WEEK"
| MONTH -> "MONTH"
| YEAR -> "YEAR"
;;
let resolution_of_string : string -> resolution =
function
| "DAY" -> DAY
| "WEEK" -> WEEK
| "MONTH" -> MONTH
| "YEAR" -> YEAR
| s -> failwith ("Convert convert \""^s^"\" to a resolution")
;;
let parse_resolution = parser
| [< 'Genlex.Ident s >] -> resolution_of_string s
;;
let make_tenor u n =
match u with
| DAY -> CalendarLib.Date.Period.day n
| WEEK -> CalendarLib.Date.Period.week n
| MONTH -> CalendarLib.Date.Period.month n
| YEAR -> CalendarLib.Date.Period.year n
;;
type gen_flows_param_pack =
{
gfp_start : Dates.t ;
gfp_end : Dates.t ;
gfp_period : int ;
gfp_unit : resolution ;
gfp_accrual_shift_conv : Dates.shift_convention ;
gfp_accrual_basis : Dates.day_count ;
gfp_accrual_hols : string ;
gfp_payment_delay : int ;
gfp_payment_shift_conv : Dates.shift_convention ;
gfp_payment_basis : Dates.day_count ;
gfp_payment_hols : string
}
;;
let string_of_gen_flows_param_pack : gen_flows_param_pack -> string =
fun pack ->
"{"^
(Dates.string_of_date pack.gfp_start)^";"^
(Dates.string_of_date pack.gfp_end)^";"^
(string_of_int pack.gfp_period)^";"^
(string_of_resolution pack.gfp_unit)^";"^
(Dates.string_of_shift_convention pack.gfp_accrual_shift_conv)^";"^
(Dates.string_of_day_count pack.gfp_accrual_basis)^";"^
"\""^pack.gfp_accrual_hols^"\";"^
(string_of_int pack.gfp_payment_delay)^";"^
(Dates.string_of_shift_convention pack.gfp_payment_shift_conv)^";"^
(Dates.string_of_day_count pack.gfp_payment_basis)^";"^
"\""^pack.gfp_payment_hols^"\"}"
;;
let rec parse_gen_flows_param_pack = parser
[< 'Genlex.Kwd "{" ;
start = Dates.parse_date ; 'Genlex.Kwd ";" ;
end_ = Dates.parse_date ; 'Genlex.Kwd ";" ;
period = (parser [<'Genlex.Int i>]->i) ; 'Genlex.Kwd ";" ;
unit = parse_resolution ; 'Genlex.Kwd ";" ;
acc_shift = Dates.parse_shift_convention ; 'Genlex.Kwd ";" ;
acc_basis = Dates.parse_day_count ; 'Genlex.Kwd ";" ;
acc_hols = (parser [<'Genlex.String s>]->s) ; 'Genlex.Kwd ";" ;
pay_delay = (parser [<'Genlex.Int i>]->i) ; 'Genlex.Kwd ";" ;
pay_shift = Dates.parse_shift_convention ; 'Genlex.Kwd ";" ;
pay_basis = Dates.parse_day_count ; 'Genlex.Kwd ";" ;
pay_hols = ((parser [<'Genlex.String s>]->s)) ;
'Genlex.Kwd "}"
>] ->
{
gfp_start=start;
gfp_end=end_;
gfp_period=period;
gfp_unit=unit;
gfp_accrual_shift_conv=acc_shift;
gfp_accrual_basis=acc_basis;
gfp_accrual_hols=acc_hols;
gfp_payment_delay=pay_delay;
gfp_payment_shift_conv=pay_shift;
gfp_payment_basis=pay_basis;
gfp_payment_hols=pay_hols
}
;;
let gen_flows_param_pack_of_string : string -> gen_flows_param_pack =
fun s ->
let lexer = Genlex.make_lexer ["{"; ";";"}"]
in parse_gen_flows_param_pack (lexer (Stream.of_string s))
;;
let gen_flows : gen_flows_param_pack -> (flow) list =
fun params ->
let rec gen_flows_ =
fun params day i ->
let start = params.gfp_start
and end_ = params.gfp_end
and u = params.gfp_unit
in
if day >= end_ then []
else
let roll_start = CalendarLib.Date.add start (make_tenor u (i*params.gfp_period))
and roll_end = CalendarLib.Date.add start (make_tenor u ((i+1)*params.gfp_period))
in
let acc_start = Dates.shift roll_start params.gfp_accrual_shift_conv params.gfp_accrual_hols
and acc_end = Dates.shift roll_end params.gfp_accrual_shift_conv params.gfp_accrual_hols
and pay_delay = make_tenor DAY params.gfp_payment_delay
in
let pay = Dates.shift (CalendarLib.Date.add roll_end pay_delay) params.gfp_payment_shift_conv params.gfp_payment_hols
and alpha = Dates.year_fraction (acc_start, acc_end) params.gfp_accrual_basis
in {flow_start=acc_start; flow_end=acc_end; flow_pay=pay; flow_accrual=alpha} :: gen_flows_ params roll_end (i + 1)
and day = params.gfp_start
in
gen_flows_ params day 0
;;
|
|
b006f6ce9ef862284f164007be61d483ee04c0cc0e008b9c5bf3267944daeb86
|
antoniogarrote/levanzo
|
validation.clj
|
(ns levanzo.validation)
| null |
https://raw.githubusercontent.com/antoniogarrote/levanzo/53a8fa134099b3ad6310bfc3493cc1fb9eb38f2f/src/levanzo/validation.clj
|
clojure
|
(ns levanzo.validation)
|
|
69ea70eb538df625d9ecc48f037cecd734cd699fb13517dffe92abd791ee3a4c
|
turtlesoupy/haskakafka
|
Haskakafka.hs
|
module Haskakafka
( fetchBrokerMetadata
, withKafkaConsumer
, consumeMessage
, consumeMessageBatch
, withKafkaProducer
, produceMessage
, produceKeyedMessage
, produceMessageBatch
, storeOffset
, seekToOffset
, getAllMetadata
, getTopicMetadata
, handleProduceErr
, producePartitionInteger
, pollEvents
, pollEventsSafe
-- Internal objects
, IS.newKafka
, IS.newKafkaTopic
, IS.dumpConfFromKafka
, IS.dumpConfFromKafkaTopic
, IS.setLogLevel
, IS.hPrintSupportedKafkaConf
, IS.hPrintKafka
, rdKafkaVersionStr
-- Type re-exports
, IT.Kafka(..)
, IT.KafkaTopic(..)
, IT.KafkaOffset(..)
, IT.KafkaMessage(..)
, IT.KafkaProduceMessage(..)
, IT.KafkaProducePartition(..)
, IT.KafkaMetadata(..)
, IT.KafkaBrokerMetadata(..)
, IT.KafkaTopicMetadata(..)
, IT.KafkaPartitionMetadata(..)
, IT.KafkaLogLevel(..)
, IT.KafkaError(..)
, RDE.RdKafkaRespErrT(..)
-- Pseudo-internal
, addBrokers
, startConsuming
, stopConsuming
, drainOutQueue
) where
import Haskakafka.InternalRdKafka
import Haskakafka.InternalRdKafkaEnum
import Haskakafka.InternalSetup
import Haskakafka.InternalShared
import Haskakafka.InternalTypes
import Control.Exception
import Control.Monad
import Foreign
import Foreign.C.Error
import Foreign.C.String
import Foreign.C.Types
import qualified Data.ByteString.Internal as BSI
import qualified Haskakafka.InternalRdKafkaEnum as RDE
import qualified Haskakafka.InternalSetup as IS
import qualified Haskakafka.InternalTypes as IT
import Data.Either
-- | Adds a broker string to a given kafka instance. You
-- probably shouldn't use this directly (see 'withKafkaConsumer'
-- and 'withKafkaProducer')
addBrokers :: Kafka -> String -> IO ()
addBrokers (Kafka kptr _) brokerStr = do
numBrokers <- rdKafkaBrokersAdd kptr brokerStr
when (numBrokers == 0)
(throw $ KafkaBadSpecification "No valid brokers specified")
-- | Starts consuming for a given topic. You probably do not need
-- to call this directly (it is called automatically by 'withKafkaConsumer') but
-- 'consumeMessage' won't work without it. This function is non-blocking.
startConsuming :: KafkaTopic -> Int -> KafkaOffset -> IO ()
startConsuming (KafkaTopic topicPtr _ _) partition offset =
throwOnError $ rdKafkaConsumeStart topicPtr partition $ offsetToInt64 offset
-- | Stops consuming for a given topic. You probably do not need to call
-- this directly (it is called automatically when 'withKafkaConsumer' terminates).
stopConsuming :: KafkaTopic -> Int -> IO ()
stopConsuming (KafkaTopic topicPtr _ _) partition =
throwOnError $ rdKafkaConsumeStop topicPtr partition
| Consumes a single message from a topic , waiting up to a given timeout
consumeMessage :: KafkaTopic
-> Int -- ^ partition number to consume from (must match 'withKafkaConsumer')
^ the timeout , in milliseconds ( 10 ^ 3 per second )
-> IO (Either KafkaError KafkaMessage) -- ^ Left on error or timeout, right for success
consumeMessage (KafkaTopic topicPtr _ _) partition timeout = do
ptr <- rdKafkaConsume topicPtr (fromIntegral partition) (fromIntegral timeout)
fromMessagePtr ptr
| Consumes a batch of messages from a topic , waiting up to a given timeout . Partial results
-- will be returned if a timeout occurs.
consumeMessageBatch :: KafkaTopic
-> Int -- ^ partition number to consume from (must match 'withKafkaConsumer')
^ timeout in milliseconds ( 10 ^ 3 per second )
-> Int -- ^ maximum number of messages to return
-> IO (Either KafkaError [KafkaMessage]) -- ^ Left on error, right with up to 'maxMessages' messages on success
consumeMessageBatch (KafkaTopic topicPtr _ _) partition timeout maxMessages =
allocaArray maxMessages $ \outputPtr -> do
numMessages <- rdKafkaConsumeBatch topicPtr (fromIntegral partition) timeout outputPtr (fromIntegral maxMessages)
if numMessages < 0 then getErrno >>= return . Left . kafkaRespErr
else do
ms <- if numMessages /= 0 then
forM [0..(numMessages - 1)] $ \mnum -> do
storablePtr <- peekElemOff outputPtr (fromIntegral mnum)
storable <- peek storablePtr
ret <- fromMessageStorable storable
fptr <- newForeignPtr_ storablePtr
withForeignPtr fptr $ \realPtr ->
rdKafkaMessageDestroy realPtr
if (err'RdKafkaMessageT storable) /= RdKafkaRespErrNoError then
return $ Left $ KafkaResponseError $ err'RdKafkaMessageT storable
else
return $ Right ret
else return []
case lefts ms of
[] -> return $ Right $ rights ms
l -> return $ Left $ head l
seekToOffset :: KafkaTopic
-> Int -- ^ partition number
-> KafkaOffset -- ^ destination
-> Int -- ^ timeout in milliseconds
-> IO (Maybe KafkaError)
seekToOffset (KafkaTopic ptr _ _) p ofs timeout = do
err <- rdKafkaSeek ptr (fromIntegral p)
(fromIntegral $ offsetToInt64 ofs) timeout
case err of
RdKafkaRespErrNoError -> return Nothing
e -> return $ Just $ KafkaResponseError e
-- | Store a partition's offset in librdkafka's offset store. This function only needs to be called
-- if auto.commit.enable is false. See <>
-- for information on how to configure the offset store.
storeOffset :: KafkaTopic -> Int -> Int -> IO (Maybe KafkaError)
storeOffset (KafkaTopic topicPtr _ _) partition offset = do
err <- rdKafkaOffsetStore topicPtr (fromIntegral partition) (fromIntegral offset)
case err of
RdKafkaRespErrNoError -> return Nothing
e -> return $ Just $ KafkaResponseError e
| Produce a single unkeyed message to either a random partition or specified partition . Since
-- librdkafka is backed by a queue, this function can return before messages are sent. See
-- 'drainOutQueue' to wait for queue to empty.
produceMessage :: KafkaTopic -- ^ topic pointer
-> KafkaProducePartition -- ^ the partition to produce to. Specify 'KafkaUnassignedPartition' if you don't care.
-> KafkaProduceMessage -- ^ the message to enqueue. This function is undefined for keyed messages.
-> IO (Maybe KafkaError) -- Nothing on success, error if something went wrong.
produceMessage (KafkaTopic topicPtr _ _) partition (KafkaProduceMessage payload) = do
let (payloadFPtr, payloadOffset, payloadLength) = BSI.toForeignPtr payload
withForeignPtr payloadFPtr $ \payloadPtr -> do
let passedPayload = payloadPtr `plusPtr` payloadOffset
handleProduceErr =<<
rdKafkaProduce topicPtr (producePartitionInteger partition)
copyMsgFlags passedPayload (fromIntegral payloadLength)
nullPtr (CSize 0) nullPtr
produceMessage _ _ (KafkaProduceKeyedMessage _ _) = undefined
-- | Produce a single keyed message. Since librdkafka is backed by a queue, this function can return
-- before messages are sent. See 'drainOutQueue' to wait for a queue to be empty
produceKeyedMessage :: KafkaTopic -- ^ topic pointer
^ keyed message . This function is undefined for unkeyed messages .
-> IO (Maybe KafkaError) -- ^ Nothing on success, error if something went wrong.
produceKeyedMessage _ (KafkaProduceMessage _) = undefined
produceKeyedMessage (KafkaTopic topicPtr _ _) (KafkaProduceKeyedMessage key payload) = do
let (payloadFPtr, payloadOffset, payloadLength) = BSI.toForeignPtr payload
(keyFPtr, keyOffset, keyLength) = BSI.toForeignPtr key
withForeignPtr payloadFPtr $ \payloadPtr -> do
withForeignPtr keyFPtr $ \keyPtr -> do
let passedPayload = payloadPtr `plusPtr` payloadOffset
passedKey = keyPtr `plusPtr` keyOffset
handleProduceErr =<<
rdKafkaProduce topicPtr (producePartitionInteger KafkaUnassignedPartition)
copyMsgFlags passedPayload (fromIntegral payloadLength)
passedKey (fromIntegral keyLength) nullPtr
-- | Produce a batch of messages. Since librdkafka is backed by a queue, this function can return
-- before messages are sent. See 'drainOutQueue' to wait for the queue to be empty.
produceMessageBatch :: KafkaTopic -- ^ topic pointer
^ partition to produce to . Specify ' KafkaUnassignedPartition ' if you do n't care , or you have keyed messsages .
-> [KafkaProduceMessage] -- ^ list of messages to enqueue.
-> IO ([(KafkaProduceMessage, KafkaError)]) -- list of failed messages with their errors. This will be empty on success.
produceMessageBatch (KafkaTopic topicPtr _ _) partition pms = do
storables <- forM pms produceMessageToMessage
withArray storables $ \batchPtr -> do
batchPtrF <- newForeignPtr_ batchPtr
numRet <- rdKafkaProduceBatch topicPtr partitionInt copyMsgFlags batchPtrF (length storables)
if numRet == (length storables) then return []
else do
errs <- mapM (\i -> return . err'RdKafkaMessageT =<< peekElemOff batchPtr i)
[0..((fromIntegral $ length storables) - 1)]
return [(m, KafkaResponseError e) | (m, e) <- (zip pms errs), e /= RdKafkaRespErrNoError]
where
partitionInt = (producePartitionInteger partition)
produceMessageToMessage (KafkaProduceMessage bs) = do
let (payloadFPtr, payloadOffset, payloadLength) = BSI.toForeignPtr bs
withForeignPtr topicPtr $ \ptrTopic -> do
withForeignPtr payloadFPtr $ \payloadPtr -> do
let passedPayload = payloadPtr `plusPtr` payloadOffset
return $ RdKafkaMessageT
{ err'RdKafkaMessageT = RdKafkaRespErrNoError
, topic'RdKafkaMessageT = ptrTopic
, partition'RdKafkaMessageT = fromIntegral partitionInt
, len'RdKafkaMessageT = payloadLength
, payload'RdKafkaMessageT = passedPayload
, offset'RdKafkaMessageT = 0
, keyLen'RdKafkaMessageT = 0
, key'RdKafkaMessageT = nullPtr
, private'RdKafkaMessageT = nullPtr
}
produceMessageToMessage (KafkaProduceKeyedMessage kbs bs) = do
let (payloadFPtr, payloadOffset, payloadLength) = BSI.toForeignPtr bs
(keyFPtr, keyOffset, keyLength) = BSI.toForeignPtr kbs
withForeignPtr topicPtr $ \ptrTopic ->
withForeignPtr payloadFPtr $ \payloadPtr -> do
withForeignPtr keyFPtr $ \keyPtr -> do
let passedPayload = payloadPtr `plusPtr` payloadOffset
passedKey = keyPtr `plusPtr` keyOffset
return $ RdKafkaMessageT
{ err'RdKafkaMessageT = RdKafkaRespErrNoError
, topic'RdKafkaMessageT = ptrTopic
, partition'RdKafkaMessageT = fromIntegral partitionInt
, len'RdKafkaMessageT = payloadLength
, payload'RdKafkaMessageT = passedPayload
, offset'RdKafkaMessageT = 0
, keyLen'RdKafkaMessageT = keyLength
, key'RdKafkaMessageT = passedKey
, private'RdKafkaMessageT = nullPtr
}
| Connects to broker in producer mode for a given topic , taking a function
that is fed with ' ' and ' ' instances . After receiving handles you
-- should be using 'produceMessage', 'produceKeyedMessage' and 'produceMessageBatch'
-- to publish messages. This function drains the outbound queue automatically before returning.
withKafkaProducer :: ConfigOverrides -- ^ config overrides for kafka. See <>. Use an empty list if you don't care.
-> ConfigOverrides -- ^ config overrides for topic. See <>. Use an empty list if you don't care.
-> String -- ^ broker string, e.g. localhost:9092
-> String -- ^ topic name
^ your code , fed with ' ' and ' ' instances for subsequent interaction .
-> IO a -- ^ returns what your code does
withKafkaProducer configOverrides topicConfigOverrides brokerString tName cb =
bracket
(do
kafka <- newKafka RdKafkaProducer configOverrides
addBrokers kafka brokerString
topic <- newKafkaTopic kafka tName topicConfigOverrides
return (kafka, topic)
)
(\(kafka, _) -> drainOutQueue kafka)
(\(k, t) -> cb k t)
| Connects to broker in consumer mode for a specific partition ,
-- taking a function that is fed with
' ' and ' ' instances . After receiving handles , you should be using
' consumeMessage ' and ' consumeMessageBatch ' to receive messages . This function
-- automatically starts consuming before calling your code.
withKafkaConsumer :: ConfigOverrides -- ^ config overrides for kafka. See <>. Use an empty list if you don't care.
-> ConfigOverrides -- ^ config overrides for topic. See <>. Use an empty list if you don't care.
-> String -- ^ broker string, e.g. localhost:9092
-> String -- ^ topic name
-> Int -- ^ partition to consume from. Locked until the function returns.
-> KafkaOffset -- ^ where to begin consuming in the partition.
^ your cod , fed with ' ' and ' ' instances for subsequent interaction .
-> IO a -- ^ returns what your code does
withKafkaConsumer configOverrides topicConfigOverrides brokerString tName partition offset cb =
bracket
(do
kafka <- newKafka RdKafkaConsumer configOverrides
addBrokers kafka brokerString
topic <- newKafkaTopic kafka tName topicConfigOverrides
startConsuming topic partition offset
return (kafka, topic)
)
(\(_, topic) -> stopConsuming topic partition)
(\(k, t) -> cb k t)
# INLINE copyMsgFlags #
copyMsgFlags :: Int
copyMsgFlags = rdKafkaMsgFlagCopy
# INLINE producePartitionInteger #
producePartitionInteger :: KafkaProducePartition -> CInt
producePartitionInteger KafkaUnassignedPartition = -1
producePartitionInteger (KafkaSpecifiedPartition n) = fromIntegral n
# INLINE handleProduceErr #
handleProduceErr :: Int -> IO (Maybe KafkaError)
handleProduceErr (- 1) = getErrno >>= return . Just . kafkaRespErr
handleProduceErr 0 = return $ Nothing
handleProduceErr _ = return $ Just $ KafkaInvalidReturnValue
-- | Opens a connection with brokers and returns metadata about topics, partitions and brokers.
fetchBrokerMetadata :: ConfigOverrides -- ^ connection overrides, see <>
-> String -- broker connection string, e.g. localhost:9092
timeout for the request , in milliseconds ( 10 ^ 3 per second )
-> IO (Either KafkaError KafkaMetadata) -- Left on error, Right with metadata on success
fetchBrokerMetadata configOverrides brokerString timeout = do
kafka <- newKafka RdKafkaConsumer configOverrides
addBrokers kafka brokerString
getAllMetadata kafka timeout
| Grabs all metadata from a given instance .
getAllMetadata :: Kafka
^ timeout in milliseconds ( 10 ^ 3 per second )
-> IO (Either KafkaError KafkaMetadata)
getAllMetadata k timeout = getMetadata k Nothing timeout
| Grabs topic metadata from a given topic instance
getTopicMetadata :: Kafka
-> KafkaTopic
^ timeout in milliseconds ( 10 ^ 3 per second )
-> IO (Either KafkaError KafkaTopicMetadata)
getTopicMetadata k kt timeout = do
err <- getMetadata k (Just kt) timeout
case err of
Left e -> return $ Left $ e
Right md -> case (topics md) of
[(Left e)] -> return $ Left e
[(Right tmd)] -> return $ Right tmd
_ -> return $ Left $ KafkaError "Incorrect number of topics returned"
getMetadata :: Kafka -> Maybe KafkaTopic -> Int -> IO (Either KafkaError KafkaMetadata)
getMetadata (Kafka kPtr _) mTopic timeout = alloca $ \mdDblPtr -> do
err <- case mTopic of
Just (KafkaTopic kTopicPtr _ _) ->
rdKafkaMetadata kPtr False kTopicPtr mdDblPtr timeout
Nothing -> do
nullTopic <- newForeignPtr_ nullPtr
rdKafkaMetadata kPtr True nullTopic mdDblPtr timeout
case err of
RdKafkaRespErrNoError -> do
mdPtr <- peek mdDblPtr
md <- peek mdPtr
retMd <- constructMetadata md
rdKafkaMetadataDestroy mdPtr
return $ Right $ retMd
e -> return $ Left $ KafkaResponseError e
where
constructMetadata md = do
let nBrokers = (brokerCnt'RdKafkaMetadataT md)
brokersPtr = (brokers'RdKafkaMetadataT md)
nTopics = (topicCnt'RdKafkaMetadataT md)
topicsPtr = (topics'RdKafkaMetadataT md)
brokerMds <- mapM (\i -> constructBrokerMetadata =<< peekElemOff brokersPtr i) [0..((fromIntegral nBrokers) - 1)]
topicMds <- mapM (\i -> constructTopicMetadata =<< peekElemOff topicsPtr i) [0..((fromIntegral nTopics) - 1)]
return $ KafkaMetadata brokerMds topicMds
constructBrokerMetadata bmd = do
hostStr <- peekCString (host'RdKafkaMetadataBrokerT bmd)
return $ KafkaBrokerMetadata
(id'RdKafkaMetadataBrokerT bmd)
(hostStr)
(port'RdKafkaMetadataBrokerT bmd)
constructTopicMetadata tmd = do
case (err'RdKafkaMetadataTopicT tmd) of
RdKafkaRespErrNoError -> do
let nPartitions = (partitionCnt'RdKafkaMetadataTopicT tmd)
partitionsPtr = (partitions'RdKafkaMetadataTopicT tmd)
topicStr <- peekCString (topic'RdKafkaMetadataTopicT tmd)
partitionsMds <- mapM (\i -> constructPartitionMetadata =<< peekElemOff partitionsPtr i) [0..((fromIntegral nPartitions) - 1)]
return $ Right $ KafkaTopicMetadata topicStr partitionsMds
e -> return $ Left $ KafkaResponseError e
constructPartitionMetadata pmd = do
case (err'RdKafkaMetadataPartitionT pmd) of
RdKafkaRespErrNoError -> do
let nReplicas = (replicaCnt'RdKafkaMetadataPartitionT pmd)
replicasPtr = (replicas'RdKafkaMetadataPartitionT pmd)
nIsrs = (isrCnt'RdKafkaMetadataPartitionT pmd)
isrsPtr = (isrs'RdKafkaMetadataPartitionT pmd)
replicas <- mapM (\i -> peekElemOff replicasPtr i) [0..((fromIntegral nReplicas) - 1)]
isrs <- mapM (\i -> peekElemOff isrsPtr i) [0..((fromIntegral nIsrs) - 1)]
return $ Right $ KafkaPartitionMetadata
(id'RdKafkaMetadataPartitionT pmd)
(leader'RdKafkaMetadataPartitionT pmd)
(map fromIntegral replicas)
(map fromIntegral isrs)
e -> return $ Left $ KafkaResponseError e
pollEvents :: Kafka -> Int -> IO ()
pollEvents (Kafka kPtr _) timeout = rdKafkaPoll kPtr timeout >> return ()
pollEventsSafe :: Kafka -> Int -> IO ()
pollEventsSafe (Kafka kPtr _) timeout = do
_ <- withForeignPtr kPtr $ \realPtr -> do
rdKafkaPollSafe realPtr timeout
return ()
outboundQueueLength :: Kafka -> IO (Int)
outboundQueueLength (Kafka kPtr _) = rdKafkaOutqLen kPtr
-- | Drains the outbound queue for a producer. This function is called automatically at the end of
-- 'withKafkaProducer' and usually doesn't need to be called directly.
drainOutQueue :: Kafka -> IO ()
drainOutQueue k = do
pollEvents k 100
l <- outboundQueueLength k
if l == 0 then return ()
else drainOutQueue k
| null |
https://raw.githubusercontent.com/turtlesoupy/haskakafka/cae3348be1a3934629cf58d433c224d87ff59608/src/Haskakafka.hs
|
haskell
|
Internal objects
Type re-exports
Pseudo-internal
| Adds a broker string to a given kafka instance. You
probably shouldn't use this directly (see 'withKafkaConsumer'
and 'withKafkaProducer')
| Starts consuming for a given topic. You probably do not need
to call this directly (it is called automatically by 'withKafkaConsumer') but
'consumeMessage' won't work without it. This function is non-blocking.
| Stops consuming for a given topic. You probably do not need to call
this directly (it is called automatically when 'withKafkaConsumer' terminates).
^ partition number to consume from (must match 'withKafkaConsumer')
^ Left on error or timeout, right for success
will be returned if a timeout occurs.
^ partition number to consume from (must match 'withKafkaConsumer')
^ maximum number of messages to return
^ Left on error, right with up to 'maxMessages' messages on success
^ partition number
^ destination
^ timeout in milliseconds
| Store a partition's offset in librdkafka's offset store. This function only needs to be called
if auto.commit.enable is false. See <>
for information on how to configure the offset store.
librdkafka is backed by a queue, this function can return before messages are sent. See
'drainOutQueue' to wait for queue to empty.
^ topic pointer
^ the partition to produce to. Specify 'KafkaUnassignedPartition' if you don't care.
^ the message to enqueue. This function is undefined for keyed messages.
Nothing on success, error if something went wrong.
| Produce a single keyed message. Since librdkafka is backed by a queue, this function can return
before messages are sent. See 'drainOutQueue' to wait for a queue to be empty
^ topic pointer
^ Nothing on success, error if something went wrong.
| Produce a batch of messages. Since librdkafka is backed by a queue, this function can return
before messages are sent. See 'drainOutQueue' to wait for the queue to be empty.
^ topic pointer
^ list of messages to enqueue.
list of failed messages with their errors. This will be empty on success.
should be using 'produceMessage', 'produceKeyedMessage' and 'produceMessageBatch'
to publish messages. This function drains the outbound queue automatically before returning.
^ config overrides for kafka. See <>. Use an empty list if you don't care.
^ config overrides for topic. See <>. Use an empty list if you don't care.
^ broker string, e.g. localhost:9092
^ topic name
^ returns what your code does
taking a function that is fed with
automatically starts consuming before calling your code.
^ config overrides for kafka. See <>. Use an empty list if you don't care.
^ config overrides for topic. See <>. Use an empty list if you don't care.
^ broker string, e.g. localhost:9092
^ topic name
^ partition to consume from. Locked until the function returns.
^ where to begin consuming in the partition.
^ returns what your code does
| Opens a connection with brokers and returns metadata about topics, partitions and brokers.
^ connection overrides, see <>
broker connection string, e.g. localhost:9092
Left on error, Right with metadata on success
| Drains the outbound queue for a producer. This function is called automatically at the end of
'withKafkaProducer' and usually doesn't need to be called directly.
|
module Haskakafka
( fetchBrokerMetadata
, withKafkaConsumer
, consumeMessage
, consumeMessageBatch
, withKafkaProducer
, produceMessage
, produceKeyedMessage
, produceMessageBatch
, storeOffset
, seekToOffset
, getAllMetadata
, getTopicMetadata
, handleProduceErr
, producePartitionInteger
, pollEvents
, pollEventsSafe
, IS.newKafka
, IS.newKafkaTopic
, IS.dumpConfFromKafka
, IS.dumpConfFromKafkaTopic
, IS.setLogLevel
, IS.hPrintSupportedKafkaConf
, IS.hPrintKafka
, rdKafkaVersionStr
, IT.Kafka(..)
, IT.KafkaTopic(..)
, IT.KafkaOffset(..)
, IT.KafkaMessage(..)
, IT.KafkaProduceMessage(..)
, IT.KafkaProducePartition(..)
, IT.KafkaMetadata(..)
, IT.KafkaBrokerMetadata(..)
, IT.KafkaTopicMetadata(..)
, IT.KafkaPartitionMetadata(..)
, IT.KafkaLogLevel(..)
, IT.KafkaError(..)
, RDE.RdKafkaRespErrT(..)
, addBrokers
, startConsuming
, stopConsuming
, drainOutQueue
) where
import Haskakafka.InternalRdKafka
import Haskakafka.InternalRdKafkaEnum
import Haskakafka.InternalSetup
import Haskakafka.InternalShared
import Haskakafka.InternalTypes
import Control.Exception
import Control.Monad
import Foreign
import Foreign.C.Error
import Foreign.C.String
import Foreign.C.Types
import qualified Data.ByteString.Internal as BSI
import qualified Haskakafka.InternalRdKafkaEnum as RDE
import qualified Haskakafka.InternalSetup as IS
import qualified Haskakafka.InternalTypes as IT
import Data.Either
addBrokers :: Kafka -> String -> IO ()
addBrokers (Kafka kptr _) brokerStr = do
numBrokers <- rdKafkaBrokersAdd kptr brokerStr
when (numBrokers == 0)
(throw $ KafkaBadSpecification "No valid brokers specified")
startConsuming :: KafkaTopic -> Int -> KafkaOffset -> IO ()
startConsuming (KafkaTopic topicPtr _ _) partition offset =
throwOnError $ rdKafkaConsumeStart topicPtr partition $ offsetToInt64 offset
stopConsuming :: KafkaTopic -> Int -> IO ()
stopConsuming (KafkaTopic topicPtr _ _) partition =
throwOnError $ rdKafkaConsumeStop topicPtr partition
| Consumes a single message from a topic , waiting up to a given timeout
consumeMessage :: KafkaTopic
^ the timeout , in milliseconds ( 10 ^ 3 per second )
consumeMessage (KafkaTopic topicPtr _ _) partition timeout = do
ptr <- rdKafkaConsume topicPtr (fromIntegral partition) (fromIntegral timeout)
fromMessagePtr ptr
| Consumes a batch of messages from a topic , waiting up to a given timeout . Partial results
consumeMessageBatch :: KafkaTopic
^ timeout in milliseconds ( 10 ^ 3 per second )
consumeMessageBatch (KafkaTopic topicPtr _ _) partition timeout maxMessages =
allocaArray maxMessages $ \outputPtr -> do
numMessages <- rdKafkaConsumeBatch topicPtr (fromIntegral partition) timeout outputPtr (fromIntegral maxMessages)
if numMessages < 0 then getErrno >>= return . Left . kafkaRespErr
else do
ms <- if numMessages /= 0 then
forM [0..(numMessages - 1)] $ \mnum -> do
storablePtr <- peekElemOff outputPtr (fromIntegral mnum)
storable <- peek storablePtr
ret <- fromMessageStorable storable
fptr <- newForeignPtr_ storablePtr
withForeignPtr fptr $ \realPtr ->
rdKafkaMessageDestroy realPtr
if (err'RdKafkaMessageT storable) /= RdKafkaRespErrNoError then
return $ Left $ KafkaResponseError $ err'RdKafkaMessageT storable
else
return $ Right ret
else return []
case lefts ms of
[] -> return $ Right $ rights ms
l -> return $ Left $ head l
seekToOffset :: KafkaTopic
-> IO (Maybe KafkaError)
seekToOffset (KafkaTopic ptr _ _) p ofs timeout = do
err <- rdKafkaSeek ptr (fromIntegral p)
(fromIntegral $ offsetToInt64 ofs) timeout
case err of
RdKafkaRespErrNoError -> return Nothing
e -> return $ Just $ KafkaResponseError e
storeOffset :: KafkaTopic -> Int -> Int -> IO (Maybe KafkaError)
storeOffset (KafkaTopic topicPtr _ _) partition offset = do
err <- rdKafkaOffsetStore topicPtr (fromIntegral partition) (fromIntegral offset)
case err of
RdKafkaRespErrNoError -> return Nothing
e -> return $ Just $ KafkaResponseError e
| Produce a single unkeyed message to either a random partition or specified partition . Since
produceMessage (KafkaTopic topicPtr _ _) partition (KafkaProduceMessage payload) = do
let (payloadFPtr, payloadOffset, payloadLength) = BSI.toForeignPtr payload
withForeignPtr payloadFPtr $ \payloadPtr -> do
let passedPayload = payloadPtr `plusPtr` payloadOffset
handleProduceErr =<<
rdKafkaProduce topicPtr (producePartitionInteger partition)
copyMsgFlags passedPayload (fromIntegral payloadLength)
nullPtr (CSize 0) nullPtr
produceMessage _ _ (KafkaProduceKeyedMessage _ _) = undefined
^ keyed message . This function is undefined for unkeyed messages .
produceKeyedMessage _ (KafkaProduceMessage _) = undefined
produceKeyedMessage (KafkaTopic topicPtr _ _) (KafkaProduceKeyedMessage key payload) = do
let (payloadFPtr, payloadOffset, payloadLength) = BSI.toForeignPtr payload
(keyFPtr, keyOffset, keyLength) = BSI.toForeignPtr key
withForeignPtr payloadFPtr $ \payloadPtr -> do
withForeignPtr keyFPtr $ \keyPtr -> do
let passedPayload = payloadPtr `plusPtr` payloadOffset
passedKey = keyPtr `plusPtr` keyOffset
handleProduceErr =<<
rdKafkaProduce topicPtr (producePartitionInteger KafkaUnassignedPartition)
copyMsgFlags passedPayload (fromIntegral payloadLength)
passedKey (fromIntegral keyLength) nullPtr
^ partition to produce to . Specify ' KafkaUnassignedPartition ' if you do n't care , or you have keyed messsages .
produceMessageBatch (KafkaTopic topicPtr _ _) partition pms = do
storables <- forM pms produceMessageToMessage
withArray storables $ \batchPtr -> do
batchPtrF <- newForeignPtr_ batchPtr
numRet <- rdKafkaProduceBatch topicPtr partitionInt copyMsgFlags batchPtrF (length storables)
if numRet == (length storables) then return []
else do
errs <- mapM (\i -> return . err'RdKafkaMessageT =<< peekElemOff batchPtr i)
[0..((fromIntegral $ length storables) - 1)]
return [(m, KafkaResponseError e) | (m, e) <- (zip pms errs), e /= RdKafkaRespErrNoError]
where
partitionInt = (producePartitionInteger partition)
produceMessageToMessage (KafkaProduceMessage bs) = do
let (payloadFPtr, payloadOffset, payloadLength) = BSI.toForeignPtr bs
withForeignPtr topicPtr $ \ptrTopic -> do
withForeignPtr payloadFPtr $ \payloadPtr -> do
let passedPayload = payloadPtr `plusPtr` payloadOffset
return $ RdKafkaMessageT
{ err'RdKafkaMessageT = RdKafkaRespErrNoError
, topic'RdKafkaMessageT = ptrTopic
, partition'RdKafkaMessageT = fromIntegral partitionInt
, len'RdKafkaMessageT = payloadLength
, payload'RdKafkaMessageT = passedPayload
, offset'RdKafkaMessageT = 0
, keyLen'RdKafkaMessageT = 0
, key'RdKafkaMessageT = nullPtr
, private'RdKafkaMessageT = nullPtr
}
produceMessageToMessage (KafkaProduceKeyedMessage kbs bs) = do
let (payloadFPtr, payloadOffset, payloadLength) = BSI.toForeignPtr bs
(keyFPtr, keyOffset, keyLength) = BSI.toForeignPtr kbs
withForeignPtr topicPtr $ \ptrTopic ->
withForeignPtr payloadFPtr $ \payloadPtr -> do
withForeignPtr keyFPtr $ \keyPtr -> do
let passedPayload = payloadPtr `plusPtr` payloadOffset
passedKey = keyPtr `plusPtr` keyOffset
return $ RdKafkaMessageT
{ err'RdKafkaMessageT = RdKafkaRespErrNoError
, topic'RdKafkaMessageT = ptrTopic
, partition'RdKafkaMessageT = fromIntegral partitionInt
, len'RdKafkaMessageT = payloadLength
, payload'RdKafkaMessageT = passedPayload
, offset'RdKafkaMessageT = 0
, keyLen'RdKafkaMessageT = keyLength
, key'RdKafkaMessageT = passedKey
, private'RdKafkaMessageT = nullPtr
}
| Connects to broker in producer mode for a given topic , taking a function
that is fed with ' ' and ' ' instances . After receiving handles you
^ your code , fed with ' ' and ' ' instances for subsequent interaction .
withKafkaProducer configOverrides topicConfigOverrides brokerString tName cb =
bracket
(do
kafka <- newKafka RdKafkaProducer configOverrides
addBrokers kafka brokerString
topic <- newKafkaTopic kafka tName topicConfigOverrides
return (kafka, topic)
)
(\(kafka, _) -> drainOutQueue kafka)
(\(k, t) -> cb k t)
| Connects to broker in consumer mode for a specific partition ,
' ' and ' ' instances . After receiving handles , you should be using
' consumeMessage ' and ' consumeMessageBatch ' to receive messages . This function
^ your cod , fed with ' ' and ' ' instances for subsequent interaction .
withKafkaConsumer configOverrides topicConfigOverrides brokerString tName partition offset cb =
bracket
(do
kafka <- newKafka RdKafkaConsumer configOverrides
addBrokers kafka brokerString
topic <- newKafkaTopic kafka tName topicConfigOverrides
startConsuming topic partition offset
return (kafka, topic)
)
(\(_, topic) -> stopConsuming topic partition)
(\(k, t) -> cb k t)
# INLINE copyMsgFlags #
copyMsgFlags :: Int
copyMsgFlags = rdKafkaMsgFlagCopy
# INLINE producePartitionInteger #
producePartitionInteger :: KafkaProducePartition -> CInt
producePartitionInteger KafkaUnassignedPartition = -1
producePartitionInteger (KafkaSpecifiedPartition n) = fromIntegral n
# INLINE handleProduceErr #
handleProduceErr :: Int -> IO (Maybe KafkaError)
handleProduceErr (- 1) = getErrno >>= return . Just . kafkaRespErr
handleProduceErr 0 = return $ Nothing
handleProduceErr _ = return $ Just $ KafkaInvalidReturnValue
timeout for the request , in milliseconds ( 10 ^ 3 per second )
fetchBrokerMetadata configOverrides brokerString timeout = do
kafka <- newKafka RdKafkaConsumer configOverrides
addBrokers kafka brokerString
getAllMetadata kafka timeout
| Grabs all metadata from a given instance .
getAllMetadata :: Kafka
^ timeout in milliseconds ( 10 ^ 3 per second )
-> IO (Either KafkaError KafkaMetadata)
getAllMetadata k timeout = getMetadata k Nothing timeout
| Grabs topic metadata from a given topic instance
getTopicMetadata :: Kafka
-> KafkaTopic
^ timeout in milliseconds ( 10 ^ 3 per second )
-> IO (Either KafkaError KafkaTopicMetadata)
getTopicMetadata k kt timeout = do
err <- getMetadata k (Just kt) timeout
case err of
Left e -> return $ Left $ e
Right md -> case (topics md) of
[(Left e)] -> return $ Left e
[(Right tmd)] -> return $ Right tmd
_ -> return $ Left $ KafkaError "Incorrect number of topics returned"
getMetadata :: Kafka -> Maybe KafkaTopic -> Int -> IO (Either KafkaError KafkaMetadata)
getMetadata (Kafka kPtr _) mTopic timeout = alloca $ \mdDblPtr -> do
err <- case mTopic of
Just (KafkaTopic kTopicPtr _ _) ->
rdKafkaMetadata kPtr False kTopicPtr mdDblPtr timeout
Nothing -> do
nullTopic <- newForeignPtr_ nullPtr
rdKafkaMetadata kPtr True nullTopic mdDblPtr timeout
case err of
RdKafkaRespErrNoError -> do
mdPtr <- peek mdDblPtr
md <- peek mdPtr
retMd <- constructMetadata md
rdKafkaMetadataDestroy mdPtr
return $ Right $ retMd
e -> return $ Left $ KafkaResponseError e
where
constructMetadata md = do
let nBrokers = (brokerCnt'RdKafkaMetadataT md)
brokersPtr = (brokers'RdKafkaMetadataT md)
nTopics = (topicCnt'RdKafkaMetadataT md)
topicsPtr = (topics'RdKafkaMetadataT md)
brokerMds <- mapM (\i -> constructBrokerMetadata =<< peekElemOff brokersPtr i) [0..((fromIntegral nBrokers) - 1)]
topicMds <- mapM (\i -> constructTopicMetadata =<< peekElemOff topicsPtr i) [0..((fromIntegral nTopics) - 1)]
return $ KafkaMetadata brokerMds topicMds
constructBrokerMetadata bmd = do
hostStr <- peekCString (host'RdKafkaMetadataBrokerT bmd)
return $ KafkaBrokerMetadata
(id'RdKafkaMetadataBrokerT bmd)
(hostStr)
(port'RdKafkaMetadataBrokerT bmd)
constructTopicMetadata tmd = do
case (err'RdKafkaMetadataTopicT tmd) of
RdKafkaRespErrNoError -> do
let nPartitions = (partitionCnt'RdKafkaMetadataTopicT tmd)
partitionsPtr = (partitions'RdKafkaMetadataTopicT tmd)
topicStr <- peekCString (topic'RdKafkaMetadataTopicT tmd)
partitionsMds <- mapM (\i -> constructPartitionMetadata =<< peekElemOff partitionsPtr i) [0..((fromIntegral nPartitions) - 1)]
return $ Right $ KafkaTopicMetadata topicStr partitionsMds
e -> return $ Left $ KafkaResponseError e
constructPartitionMetadata pmd = do
case (err'RdKafkaMetadataPartitionT pmd) of
RdKafkaRespErrNoError -> do
let nReplicas = (replicaCnt'RdKafkaMetadataPartitionT pmd)
replicasPtr = (replicas'RdKafkaMetadataPartitionT pmd)
nIsrs = (isrCnt'RdKafkaMetadataPartitionT pmd)
isrsPtr = (isrs'RdKafkaMetadataPartitionT pmd)
replicas <- mapM (\i -> peekElemOff replicasPtr i) [0..((fromIntegral nReplicas) - 1)]
isrs <- mapM (\i -> peekElemOff isrsPtr i) [0..((fromIntegral nIsrs) - 1)]
return $ Right $ KafkaPartitionMetadata
(id'RdKafkaMetadataPartitionT pmd)
(leader'RdKafkaMetadataPartitionT pmd)
(map fromIntegral replicas)
(map fromIntegral isrs)
e -> return $ Left $ KafkaResponseError e
pollEvents :: Kafka -> Int -> IO ()
pollEvents (Kafka kPtr _) timeout = rdKafkaPoll kPtr timeout >> return ()
pollEventsSafe :: Kafka -> Int -> IO ()
pollEventsSafe (Kafka kPtr _) timeout = do
_ <- withForeignPtr kPtr $ \realPtr -> do
rdKafkaPollSafe realPtr timeout
return ()
outboundQueueLength :: Kafka -> IO (Int)
outboundQueueLength (Kafka kPtr _) = rdKafkaOutqLen kPtr
drainOutQueue :: Kafka -> IO ()
drainOutQueue k = do
pollEvents k 100
l <- outboundQueueLength k
if l == 0 then return ()
else drainOutQueue k
|
d691a3086a82cba6af209a0b0e2847d3a6e34b43f7cbb6b0a0e62a9842c4d9bf
|
jlouis/combinatorrent
|
RateCalc.hs
|
-- | Rate calculation.
{-# LANGUAGE BangPatterns #-}
module RateCalc (
-- * Types
Rate
-- * Interface
, new
, update
, extractCount
, extractRate
)
where
import Control.DeepSeq
import Data.Time.Clock
-- | A Rate is a record of information used for calculating the rate
data Rate = Rate
{ rate :: !Double -- ^ The current rate
, bytes :: !Int -- ^ The amount of bytes transferred since last rate extraction
, count :: !Int -- ^ The amount of bytes transferred since last count extraction
, lastExt :: !UTCTime -- ^ When was the last rate update
, rateSince :: !UTCTime -- ^ From where is the rate measured
}
instance NFData Rate where
rnf (Rate r b c _ _) =
rnf r `seq` rnf b `seq` rnf c
fudge :: NominalDiffTime
fudge = fromInteger 5 -- Seconds
maxRatePeriod :: NominalDiffTime
maxRatePeriod = fromInteger 20 -- Seconds
new :: UTCTime -> Rate
new t = Rate { rate = 0.0
, bytes = 0
, count = 0
, lastExt = addUTCTime (-fudge) t
, rateSince = addUTCTime (-fudge) t
}
| The call @update n rt@ updates the rate structure @rt@ with @n@ new bytes
update :: Int -> Rate -> Rate
update n rt = {-# SCC "update" #-}
rt { bytes = nb, count = nc}
where !nb = bytes rt + n
!nc = count rt + n
-- | The call @extractRate t rt@ extracts the current rate from the rate
-- structure and updates the rate structures internal book-keeping
extractRate :: UTCTime -> Rate -> (Double, Rate)
extractRate t rt = {-# SCC "extractRate" #-}
let since = rateSince rt
lext = lastExt rt
n = bytes rt
oldWindow :: Double
oldWindow = {-# SCC "diffUTC1" #-} realToFrac $ diffUTCTime lext since
newWindow :: Double
newWindow = {-# SCC "diffUTS2" #-} realToFrac $ diffUTCTime t since
!r = {-# SCC "r" #-} (rate rt * oldWindow + (fromIntegral n)) / newWindow
!nrt = {-# SCC "rt_creat" #-}
rt { rate = r
, bytes = 0
, lastExt = t
, rateSince = {-# SCC "max" #-} max since (addUTCTime (-maxRatePeriod) t)
}
in
-- Update the rate and book-keep the missing pieces. The total is simply a built-in
counter . The point where we expect the next update is pushed at most 5 seconds ahead
-- in time. But it might come earlier if the rate is high.
-- Last is updated with the current time. Finally, we move the windows earliest value
forward if it is more than 20 seconds from now .
(r, nrt)
-- | The call @extractCount rt@ extract the bytes transferred since last extraction
extractCount :: Rate -> (Int, Rate)
extractCount rt = {-# SCC "extractCount" #-} (crt, rt { count = 0 })
where crt = count rt
| null |
https://raw.githubusercontent.com/jlouis/combinatorrent/a8660bc29507f3774d79bd364b8b509cf5146282/src/RateCalc.hs
|
haskell
|
| Rate calculation.
# LANGUAGE BangPatterns #
* Types
* Interface
| A Rate is a record of information used for calculating the rate
^ The current rate
^ The amount of bytes transferred since last rate extraction
^ The amount of bytes transferred since last count extraction
^ When was the last rate update
^ From where is the rate measured
Seconds
Seconds
# SCC "update" #
| The call @extractRate t rt@ extracts the current rate from the rate
structure and updates the rate structures internal book-keeping
# SCC "extractRate" #
# SCC "diffUTC1" #
# SCC "diffUTS2" #
# SCC "r" #
# SCC "rt_creat" #
# SCC "max" #
Update the rate and book-keep the missing pieces. The total is simply a built-in
in time. But it might come earlier if the rate is high.
Last is updated with the current time. Finally, we move the windows earliest value
| The call @extractCount rt@ extract the bytes transferred since last extraction
# SCC "extractCount" #
|
module RateCalc (
Rate
, new
, update
, extractCount
, extractRate
)
where
import Control.DeepSeq
import Data.Time.Clock
data Rate = Rate
}
instance NFData Rate where
rnf (Rate r b c _ _) =
rnf r `seq` rnf b `seq` rnf c
fudge :: NominalDiffTime
maxRatePeriod :: NominalDiffTime
new :: UTCTime -> Rate
new t = Rate { rate = 0.0
, bytes = 0
, count = 0
, lastExt = addUTCTime (-fudge) t
, rateSince = addUTCTime (-fudge) t
}
| The call @update n rt@ updates the rate structure @rt@ with @n@ new bytes
update :: Int -> Rate -> Rate
rt { bytes = nb, count = nc}
where !nb = bytes rt + n
!nc = count rt + n
extractRate :: UTCTime -> Rate -> (Double, Rate)
let since = rateSince rt
lext = lastExt rt
n = bytes rt
oldWindow :: Double
newWindow :: Double
rt { rate = r
, bytes = 0
, lastExt = t
}
in
counter . The point where we expect the next update is pushed at most 5 seconds ahead
forward if it is more than 20 seconds from now .
(r, nrt)
extractCount :: Rate -> (Int, Rate)
where crt = count rt
|
18971eaed33612c03fb6a40eb87eac117c1b6f6d21f3921d6ab3252102ae0174
|
jrh13/hol-light
|
mccarthy.ml
|
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
*
* mp.ml
*
* An HOL mechanization of the compiler correctness proof of and
* Painter from 1967 .
*
* From a HOL-4 original by and
*
* HOL Light proof by , 21st April 2004
*
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
*
* mp.ml
*
* An HOL mechanization of the compiler correctness proof of McCarthy and
* Painter from 1967.
*
* From a HOL-4 original by Robert Bauer and Ray Toal
*
* HOL Light proof by John Harrison, 21st April 2004
*
*****************************************************************************)
(* ------------------------------------------------------------------------- *)
Define a type of strings , not already there in HOL Light .
(* We don't use any particular properties of the type in the proof below. *)
(* ------------------------------------------------------------------------- *)
let string_INDUCT,string_RECURSION =
define_type "string = String (int list)";;
(* ------------------------------------------------------------------------- *)
The definitions from 's file .
(* ------------------------------------------------------------------------- *)
* The source language
* -------------------
*
* Syntax :
*
* The language contains only expressions of three kinds : ( 1 ) simple
* numeric literals , ( 2 ) simple variables , and ( 3 ) plus expressions .
* The source language
* -------------------
*
* Syntax:
*
* The language contains only expressions of three kinds: (1) simple
* numeric literals, (2) simple variables, and (3) plus expressions.
*)
let exp_INDUCT,exp_RECURSION =
define_type "exp = Lit num
| Var string
| Plus exp exp";;
* Semantics :
*
* Expressions evaluated in a state produce a result . There are no
* side effects . A state is simply a mapping from variables to
* values . The semantic function is called
* Semantics:
*
* Expressions evaluated in a state produce a result. There are no
* side effects. A state is simply a mapping from variables to
* values. The semantic function is called E.
*)
let E_DEF = new_recursive_definition exp_RECURSION
`(E (Lit n) s = n)
/\ (E (Var v) s = s v)
/\ (E (Plus e1 e2) s = E e1 s + E e2 s)`;;
* The object language
* -------------------
*
* Syntax :
*
* The target machine has a single accumulator ( Acc ) and an infinite
* set of numbered registers ( Reg 0 , Reg 1 , Reg 2 , and so on ) . The
* accumulator and registers together are called cells . There are four
* instructions : LI ( load immediate into accumulator ) , LOAD ( load the
* contents of a numbered register into the accumulator ) , STO ( store
* the accumulator value into a numbered register ) and ADD ( add the
* contents of a numbered register into the accumulator ) .
* The object language
* -------------------
*
* Syntax:
*
* The target machine has a single accumulator (Acc) and an infinite
* set of numbered registers (Reg 0, Reg 1, Reg 2, and so on). The
* accumulator and registers together are called cells. There are four
* instructions: LI (load immediate into accumulator), LOAD (load the
* contents of a numbered register into the accumulator), STO (store
* the accumulator value into a numbered register) and ADD (add the
* contents of a numbered register into the accumulator).
*)
let cell_INDUCT,cell_RECURSION =
define_type "cell = Acc
| Reg num";;
let inst_INDUCT,inst_RECURSION =
define_type "inst = LI num
| LOAD num
| STO num
| ADD num";;
* update x z s is the state that is just like s except that x now
* maps to definition applies to any kind of state .
* update x z s is the state that is just like s except that x now
* maps to z. This definition applies to any kind of state.
*)
let update_def =
new_definition `update x z s y = if (y = x) then z else s y`;;
* Semantics :
*
* First , the semantics of the execution of a single instruction .
* The semantic function is called S. Executing an instruction in
* a machine state produces a new machine state . Here a machine
* state is a mapping from cells to values .
* Semantics:
*
* First, the semantics of the execution of a single instruction.
* The semantic function is called S. Executing an instruction in
* a machine state produces a new machine state. Here a machine
* state is a mapping from cells to values.
*)
let S_DEF = new_recursive_definition inst_RECURSION
`(S (LI n) s = update Acc n s)
/\ (S (LOAD r) s = update Acc (s (Reg r)) s)
/\ (S (STO r) s = update (Reg r) (s Acc) s)
/\ (S (ADD r) s = update Acc (s (Reg r) + s Acc) s)`;;
* Next we give the semantics of a list of instructions with the
* semantic function S ' . The execution of an intruction list
* in an initial state is given by executing the first instruction
* in the list in the initial state , which produce a new state s1 ,
* and taking the execution of the rest of the list in s1 .
* Next we give the semantics of a list of instructions with the
* semantic function S'. The execution of an intruction list
* in an initial state is given by executing the first instruction
* in the list in the initial state, which produce a new state s1,
* and taking the execution of the rest of the list in s1.
*)
let S'_DEF = new_recursive_definition list_RECURSION
`(S' [] s = s)
/\ (S' (CONS inst rest) s = S' rest (S inst s))`;;
(*
* The compiler
* ------------
*
* Each source language expression is compiled into a list of
* instructions. The compilation is done using a symbol table
* which maps source language indentifiers into target machine
* register numbers, and a parameter r which tells the next
* available free register.
*)
let C_DEF = new_recursive_definition exp_RECURSION
`(C (Lit n) map r = [LI n])
/\ (C (Var v) map r = [LOAD (map v)])
/\ (C (Plus e1 e2) map r =
APPEND
(APPEND (C e1 map r) [STO r])
(APPEND (C e2 map (r + 1)) [ADD r]))`;;
(* ------------------------------------------------------------------------- *)
My key lemmas ; UPDATE_DIFFERENT and are the same as 's .
(* ------------------------------------------------------------------------- *)
let cellth = CONJ (distinctness "cell") (injectivity "cell");;
let S'_APPEND = prove
(`!p1 p2 s. S' (APPEND p1 p2) s = S' p2 (S' p1 s)`,
LIST_INDUCT_TAC THEN ASM_SIMP_TAC[S'_DEF; APPEND]);;
let UPDATE_DIFFERENT = prove
(`!x y z s. ~(x = y) ==> (update x z s y = s y)`,
SIMP_TAC[update_def]);;
let UPDATE_SAME = prove
(`!x z s. update x z s x = z`,
SIMP_TAC[update_def]);;
* The Correctness Condition
* -------------------------
*
* The correctness condition is this :
*
* For every expression e , symbol table map , source state s ,
* target state s ' , register number r :
*
* If all source variables map to registers LESS THAN r ,
* and if the value of every variable v in s is exactly
* the same as the value in s ' of the register to which
* v is mapped by map , THEN
*
* When e is compiled with map and first free register r ,
* and then executed in the state s ' , in the resulting
* machine state S'(C e map r ):
*
* the accumulator will contain E e s and every register
* with number x less than r will have the same value as
* it does in s ' .
*
* The Proof
* ---------
*
* The proof can be done by induction and careful application of [ ]
* using the lemmas isolated above .
*
* The only " hack " is to throw in GSYM SKOLEM_THM and to dispose
* of state existence subgoals of the form ` ? s. ! v. s v = t[v ] ` , which
* otherwise would not be proven automatically by the simplifier .
* The Correctness Condition
* -------------------------
*
* The correctness condition is this:
*
* For every expression e, symbol table map, source state s,
* target state s', register number r:
*
* If all source variables map to registers LESS THAN r,
* and if the value of every variable v in s is exactly
* the same as the value in s' of the register to which
* v is mapped by map, THEN
*
* When e is compiled with map and first free register r,
* and then executed in the state s', in the resulting
* machine state S'(C e map r):
*
* the accumulator will contain E e s and every register
* with number x less than r will have the same value as
* it does in s'.
*
* The Proof
* ---------
*
* The proof can be done by induction and careful application of SIMP_TAC[]
* using the lemmas isolated above.
*
* The only "hack" is to throw in GSYM SKOLEM_THM and EXISTS_REFL to dispose
* of state existence subgoals of the form `?s. !v. s v = t[v]`, which
* otherwise would not be proven automatically by the simplifier.
*)
let CORRECTNESS_THEOREM = prove
(`!e map s s' r.
(!v. map v < r) ==>
(!v. s v = s' (Reg (map v))) ==>
(S' (C e map r) s' Acc = E e s) /\
(!x. (x < r) ==> (S' (C e map r) s' (Reg x) = s' (Reg x)))`,
MATCH_MP_TAC exp_INDUCT THEN
REWRITE_TAC[E_DEF; S_DEF; S'_DEF; update_def; C_DEF; S'_APPEND] THEN
SIMP_TAC[ARITH_RULE `(x < y ==> x < y + 1 /\ ~(x = y)) /\ x < x + 1`; cellth;
UPDATE_SAME; UPDATE_DIFFERENT; GSYM SKOLEM_THM; EXISTS_REFL]);;
| null |
https://raw.githubusercontent.com/jrh13/hol-light/ea44a4cacd238d7fa5a397f043f3e3321eb66543/Examples/mccarthy.ml
|
ocaml
|
-------------------------------------------------------------------------
We don't use any particular properties of the type in the proof below.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
* The compiler
* ------------
*
* Each source language expression is compiled into a list of
* instructions. The compilation is done using a symbol table
* which maps source language indentifiers into target machine
* register numbers, and a parameter r which tells the next
* available free register.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
|
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
*
* mp.ml
*
* An HOL mechanization of the compiler correctness proof of and
* Painter from 1967 .
*
* From a HOL-4 original by and
*
* HOL Light proof by , 21st April 2004
*
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
*
* mp.ml
*
* An HOL mechanization of the compiler correctness proof of McCarthy and
* Painter from 1967.
*
* From a HOL-4 original by Robert Bauer and Ray Toal
*
* HOL Light proof by John Harrison, 21st April 2004
*
*****************************************************************************)
Define a type of strings , not already there in HOL Light .
let string_INDUCT,string_RECURSION =
define_type "string = String (int list)";;
The definitions from 's file .
* The source language
* -------------------
*
* Syntax :
*
* The language contains only expressions of three kinds : ( 1 ) simple
* numeric literals , ( 2 ) simple variables , and ( 3 ) plus expressions .
* The source language
* -------------------
*
* Syntax:
*
* The language contains only expressions of three kinds: (1) simple
* numeric literals, (2) simple variables, and (3) plus expressions.
*)
let exp_INDUCT,exp_RECURSION =
define_type "exp = Lit num
| Var string
| Plus exp exp";;
* Semantics :
*
* Expressions evaluated in a state produce a result . There are no
* side effects . A state is simply a mapping from variables to
* values . The semantic function is called
* Semantics:
*
* Expressions evaluated in a state produce a result. There are no
* side effects. A state is simply a mapping from variables to
* values. The semantic function is called E.
*)
let E_DEF = new_recursive_definition exp_RECURSION
`(E (Lit n) s = n)
/\ (E (Var v) s = s v)
/\ (E (Plus e1 e2) s = E e1 s + E e2 s)`;;
* The object language
* -------------------
*
* Syntax :
*
* The target machine has a single accumulator ( Acc ) and an infinite
* set of numbered registers ( Reg 0 , Reg 1 , Reg 2 , and so on ) . The
* accumulator and registers together are called cells . There are four
* instructions : LI ( load immediate into accumulator ) , LOAD ( load the
* contents of a numbered register into the accumulator ) , STO ( store
* the accumulator value into a numbered register ) and ADD ( add the
* contents of a numbered register into the accumulator ) .
* The object language
* -------------------
*
* Syntax:
*
* The target machine has a single accumulator (Acc) and an infinite
* set of numbered registers (Reg 0, Reg 1, Reg 2, and so on). The
* accumulator and registers together are called cells. There are four
* instructions: LI (load immediate into accumulator), LOAD (load the
* contents of a numbered register into the accumulator), STO (store
* the accumulator value into a numbered register) and ADD (add the
* contents of a numbered register into the accumulator).
*)
let cell_INDUCT,cell_RECURSION =
define_type "cell = Acc
| Reg num";;
let inst_INDUCT,inst_RECURSION =
define_type "inst = LI num
| LOAD num
| STO num
| ADD num";;
* update x z s is the state that is just like s except that x now
* maps to definition applies to any kind of state .
* update x z s is the state that is just like s except that x now
* maps to z. This definition applies to any kind of state.
*)
let update_def =
new_definition `update x z s y = if (y = x) then z else s y`;;
* Semantics :
*
* First , the semantics of the execution of a single instruction .
* The semantic function is called S. Executing an instruction in
* a machine state produces a new machine state . Here a machine
* state is a mapping from cells to values .
* Semantics:
*
* First, the semantics of the execution of a single instruction.
* The semantic function is called S. Executing an instruction in
* a machine state produces a new machine state. Here a machine
* state is a mapping from cells to values.
*)
let S_DEF = new_recursive_definition inst_RECURSION
`(S (LI n) s = update Acc n s)
/\ (S (LOAD r) s = update Acc (s (Reg r)) s)
/\ (S (STO r) s = update (Reg r) (s Acc) s)
/\ (S (ADD r) s = update Acc (s (Reg r) + s Acc) s)`;;
* Next we give the semantics of a list of instructions with the
* semantic function S ' . The execution of an intruction list
* in an initial state is given by executing the first instruction
* in the list in the initial state , which produce a new state s1 ,
* and taking the execution of the rest of the list in s1 .
* Next we give the semantics of a list of instructions with the
* semantic function S'. The execution of an intruction list
* in an initial state is given by executing the first instruction
* in the list in the initial state, which produce a new state s1,
* and taking the execution of the rest of the list in s1.
*)
let S'_DEF = new_recursive_definition list_RECURSION
`(S' [] s = s)
/\ (S' (CONS inst rest) s = S' rest (S inst s))`;;
let C_DEF = new_recursive_definition exp_RECURSION
`(C (Lit n) map r = [LI n])
/\ (C (Var v) map r = [LOAD (map v)])
/\ (C (Plus e1 e2) map r =
APPEND
(APPEND (C e1 map r) [STO r])
(APPEND (C e2 map (r + 1)) [ADD r]))`;;
My key lemmas ; UPDATE_DIFFERENT and are the same as 's .
let cellth = CONJ (distinctness "cell") (injectivity "cell");;
let S'_APPEND = prove
(`!p1 p2 s. S' (APPEND p1 p2) s = S' p2 (S' p1 s)`,
LIST_INDUCT_TAC THEN ASM_SIMP_TAC[S'_DEF; APPEND]);;
let UPDATE_DIFFERENT = prove
(`!x y z s. ~(x = y) ==> (update x z s y = s y)`,
SIMP_TAC[update_def]);;
let UPDATE_SAME = prove
(`!x z s. update x z s x = z`,
SIMP_TAC[update_def]);;
* The Correctness Condition
* -------------------------
*
* The correctness condition is this :
*
* For every expression e , symbol table map , source state s ,
* target state s ' , register number r :
*
* If all source variables map to registers LESS THAN r ,
* and if the value of every variable v in s is exactly
* the same as the value in s ' of the register to which
* v is mapped by map , THEN
*
* When e is compiled with map and first free register r ,
* and then executed in the state s ' , in the resulting
* machine state S'(C e map r ):
*
* the accumulator will contain E e s and every register
* with number x less than r will have the same value as
* it does in s ' .
*
* The Proof
* ---------
*
* The proof can be done by induction and careful application of [ ]
* using the lemmas isolated above .
*
* The only " hack " is to throw in GSYM SKOLEM_THM and to dispose
* of state existence subgoals of the form ` ? s. ! v. s v = t[v ] ` , which
* otherwise would not be proven automatically by the simplifier .
* The Correctness Condition
* -------------------------
*
* The correctness condition is this:
*
* For every expression e, symbol table map, source state s,
* target state s', register number r:
*
* If all source variables map to registers LESS THAN r,
* and if the value of every variable v in s is exactly
* the same as the value in s' of the register to which
* v is mapped by map, THEN
*
* When e is compiled with map and first free register r,
* and then executed in the state s', in the resulting
* machine state S'(C e map r):
*
* the accumulator will contain E e s and every register
* with number x less than r will have the same value as
* it does in s'.
*
* The Proof
* ---------
*
* The proof can be done by induction and careful application of SIMP_TAC[]
* using the lemmas isolated above.
*
* The only "hack" is to throw in GSYM SKOLEM_THM and EXISTS_REFL to dispose
* of state existence subgoals of the form `?s. !v. s v = t[v]`, which
* otherwise would not be proven automatically by the simplifier.
*)
let CORRECTNESS_THEOREM = prove
(`!e map s s' r.
(!v. map v < r) ==>
(!v. s v = s' (Reg (map v))) ==>
(S' (C e map r) s' Acc = E e s) /\
(!x. (x < r) ==> (S' (C e map r) s' (Reg x) = s' (Reg x)))`,
MATCH_MP_TAC exp_INDUCT THEN
REWRITE_TAC[E_DEF; S_DEF; S'_DEF; update_def; C_DEF; S'_APPEND] THEN
SIMP_TAC[ARITH_RULE `(x < y ==> x < y + 1 /\ ~(x = y)) /\ x < x + 1`; cellth;
UPDATE_SAME; UPDATE_DIFFERENT; GSYM SKOLEM_THM; EXISTS_REFL]);;
|
d98e4ef1d69e4f4a014ef726671d48c02eb2067d98ae1e29879209029f8f6261
|
takikawa/racket-ppa
|
mutated.rkt
|
#lang racket/base
(require "wrap.rkt"
"match.rkt"
"known.rkt"
"import.rkt"
"simple.rkt"
"find-definition.rkt"
"struct-type-info.rkt"
"mutated-state.rkt"
"find-known.rkt"
"infer-known.rkt"
"letrec.rkt"
"id-to-var.rkt"
"aim.rkt")
(provide mutated-in-body
update-mutated-state!)
;; See "mutated-state.rkt" for information on the content of the
;; `mutated` table.
;; We don't have to worry about errors or escapes that prevent the
;; definition of an identifier, because that will abort the enclosing
;; linklet.
;; This pass is also responsible for recording when a letrec binding
;; must be mutated implicitly via `call/cc`.
(define (mutated-in-body l exports extra-variables prim-knowns knowns imports simples
unsafe-mode? target enforce-constant?)
;; Find all `set!`ed variables, and also record all bindings
;; that might be used too early
(define mutated (make-hasheq))
;; Defined names start out as 'not-ready; start with `exports`,
;; because anything exported but not defined is implicitly in an
;; undefined state and must be accessed through a `variable`:
(for ([id (in-hash-keys exports)])
(hash-set! mutated id 'undefined))
;; Find all defined variables, and find variables that are not exported:
(define unexported-ids
(for/fold ([unexported-ids '()]) ([form (in-list l)])
(match form
[`(define-values (,ids ...) ,rhs)
(for/fold ([unexported-ids unexported-ids]) ([id (in-list ids)])
(define u-id (unwrap id))
(hash-set! mutated u-id (if enforce-constant?
'not-ready
;; If constants should not be enforced, then
;; treat all variable as mutated:
'set!ed-too-early))
(if (hash-ref exports u-id #f)
unexported-ids
(cons u-id unexported-ids)))]
[`,_ unexported-ids])))
;; To support jitify, if an unexported and unmutated variable is
;; captured in a closure before it is defined, will want to reify
;; it like an export; so, set those variables to 'too-early
;; until they are really initialized
(define unexported-ready (and (pair? unexported-ids)
(aim? target 'interp)
(make-hasheq)))
(when unexported-ready
(for ([id (in-list unexported-ids)])
(hash-set! mutated id (lambda ()
(unless (or (hash-ref unexported-ready id #f)
(set!ed-mutated-state? (hash-ref mutated id #f)))
(hash-set! mutated id 'too-early))))))
;; Walk through the body:
(for/fold ([prev-knowns knowns]) ([form (in-list l)])
;; Accumulate known-binding information in this pass, because it's
;; helpful to know which variables are bound to constructors.
;; Note that we may tentatively classify a binding as a constructor
;; before discovering that its mutated via `set!`, but any use of
;; that information is correct, because it dynamically precedes
;; the `set!`
(define-values (knowns info)
(find-definitions form prim-knowns prev-knowns imports mutated simples unsafe-mode? target
#:optimize? #f))
(match form
[`(define-values (,ids ...) ,rhs)
(cond
[info
;; Look just at the "rest" part:
(for ([e (in-list (struct-type-info-rest info))]
[pos (in-naturals)])
(define prop-vals (and (= pos struct-type-info-rest-properties-list-pos)
(pure-properties-list e prim-knowns knowns imports mutated simples)))
(cond
[prop-vals
;; check individual property values using `ids`, so procedures won't
;; count as used until some instace is created
(for ([e (in-list prop-vals)])
(find-mutated! e ids prim-knowns knowns imports mutated simples unsafe-mode?))]
[else
(find-mutated! e ids prim-knowns knowns imports mutated simples unsafe-mode?)]))]
[else
(find-mutated! rhs ids prim-knowns knowns imports mutated simples unsafe-mode?)])
;; For any among `ids` that didn't get a delay and wasn't used
;; too early, the variable is now ready, so remove from
;; `mutated`
(for ([id (in-list ids)])
(let ([id (unwrap id)])
(when (eq? 'not-ready (hash-ref mutated id #f))
(hash-remove! mutated id))))]
[`,_
(find-mutated! form #f prim-knowns knowns imports mutated simples unsafe-mode?)])
knowns)
;; For definitions that are not yet used, force delays:
(for ([form (in-list l)])
(match form
[`(define-values (,ids ...) ,rhs)
(for ([id (in-list ids)])
(let ([id (unwrap id)])
(define state (hash-ref mutated id #f))
(when unexported-ready
(when (not (hash-ref exports id #f))
(hash-set! unexported-ready id #t)))
(when (delayed-mutated-state? state)
(hash-remove! mutated id)
(state))))]
[`,_ (void)]))
;; Check for unexported variables that need to be implemented like exports:
(unless (or unsafe-mode?
(aim? target 'system))
(for ([id (in-list unexported-ids)])
(define state (hash-ref mutated id #f))
(when (via-variable-mutated-state? state)
;; force creation of variable
(id-to-variable id exports extra-variables))))
;; Everything else in `mutated` is either 'set!ed, 'too-early,
;; 'undefined, or unreachable:
mutated)
;; Schemify `let-values` to `let`, etc., and
;; reorganize struct bindings.
(define (find-mutated! top-v ids prim-knowns knowns imports mutated simples unsafe-mode?)
(define (delay! ids thunk)
(define done? #f)
(define force (lambda () (unless done?
(set! done? #t)
(thunk))))
(for ([id (in-list ids)])
(let ([id (unwrap id)])
(define m (hash-ref mutated id 'not-ready))
(cond
[(eq? 'not-ready m)
(hash-set! mutated id force)]
[(procedure? m)
(hash-set! mutated id (lambda () (m) (force)))]
[else
(force)]))))
(let find-mutated! ([v top-v] [ids ids])
(define (find-mutated!* l ids)
(let loop ([l l])
(cond
[(null? l) (void)]
[(null? (cdr l)) (find-mutated! (car l) ids)]
[else (find-mutated! (car l) #f) (loop (cdr l))])))
(match v
[`(lambda ,formals ,body ...)
(if ids
(delay! ids (lambda () (find-mutated!* body #f)))
(find-mutated!* body #f))]
[`(case-lambda [,formalss ,bodys ...] ...)
(if ids
(delay! ids (lambda () (for ([body (in-list bodys)]) (find-mutated!* body #f))))
(for ([body (in-list bodys)]) (find-mutated!* body #f)))]
[`(quote ,_) (void)]
[`(let-values ([,idss ,rhss] ...) ,bodys ...)
(for ([ids (in-list idss)]
[rhs (in-list rhss)])
;; an `id` in `ids` can't be referenced too early,
;; but it might usefully be delayed
(find-mutated! rhs ids))
(find-mutated!* bodys ids)]
[`(letrec-values ([,idss ,rhss] ...) ,bodys ...)
(cond
[(letrec-splitable-values-binding? idss rhss)
(find-mutated! (letrec-split-values-binding idss rhss bodys) ids)]
[else
(for* ([ids (in-list idss)]
[id (in-wrap-list ids)])
(hash-set! mutated (unwrap id) 'not-ready))
(for/fold ([maybe-cc? #f]) ([ids (in-list idss)]
[rhs (in-list rhss)])
(find-mutated! rhs (unwrap-list ids))
(define new-maybe-cc? (or maybe-cc?
(not (simple? rhs prim-knowns knowns imports mutated simples unsafe-mode?
#:pure? #f
#:result-arity (length ids)))))
;; Each `id` in `ids` is now ready (but might also hold a delay):
(for ([id (in-wrap-list ids)])
(let ([u-id (unwrap id)])
(define state (hash-ref mutated u-id))
(define (add-too-early-name!)
(cond
[(and (eq? 'too-early state)
(wrap-property id 'undefined-error-name))
=> (lambda (name)
(hash-set! mutated u-id (too-early name #f)))]
[(and (eq? 'set!ed-too-early state)
(wrap-property id 'undefined-error-name))
=> (lambda (name)
(hash-set! mutated u-id (too-early name #t)))]))
(cond
[new-maybe-cc?
(cond
[(or (eq? 'not-ready state)
(delayed-mutated-state? state))
(hash-set! mutated u-id 'implicitly-set!ed)]
[else (add-too-early-name!)])
(when (delayed-mutated-state? state)
(state))]
[(eq? 'not-ready state)
(hash-remove! mutated u-id)]
[else (add-too-early-name!)])))
new-maybe-cc?)
(find-mutated!* bodys ids)])]
[`(if ,tst ,thn ,els)
(find-mutated! tst #f)
(find-mutated! thn #f)
(find-mutated! els #f)]
[`(with-continuation-mark ,key ,val ,body)
(find-mutated! key #f)
(find-mutated! val #f)
(find-mutated! body ids)]
[`(begin ,exps ...)
(find-mutated!* exps ids)]
[`(begin-unsafe ,exps ...)
(find-mutated!* exps ids)]
[`(begin0 ,exp ,exps ...)
(find-mutated! exp ids)
(find-mutated!* exps #f)]
[`(set! ,id ,rhs)
(let ([id (unwrap id)])
(define old-state (hash-ref mutated id #f))
(hash-set! mutated id (state->set!ed-state old-state))
(when (delayed-mutated-state? old-state)
(old-state)))
(find-mutated! rhs #f)]
[`(#%variable-reference . ,_) (void)]
[`(,rator ,exps ...)
(cond
[(and ids
(let ([rator (unwrap rator)])
(and (symbol? rator)
(let ([v (find-known rator prim-knowns knowns imports mutated)])
(and (or (known-constructor? v)
;; Some ad hoc constructors that are particularly
;; useful to struct-type properties:
(eq? rator 'cons)
(eq? rator 'list)
(eq? rator 'vector)
(eq? rator 'make-struct-type-property))
(bitwise-bit-set? (known-procedure-arity-mask v) (length exps))))
(for/and ([exp (in-list exps)])
(simple? exp prim-knowns knowns imports mutated simples unsafe-mode?
#:ordered? #t
#:succeeds? #t)))))
;; Can delay construction
(delay! ids (lambda () (find-mutated!* exps #f)))]
[else
(find-mutated! rator #f)
(find-mutated!* exps #f)])]
[`,_
(let ([v (unwrap v)])
(when (symbol? v)
(define state (hash-ref mutated v #f))
(cond
[(not-ready-mutated-state? state)
(unless unsafe-mode? ; unsafe => assume too-early won't happen
(hash-set! mutated v 'too-early))]
[(delayed-mutated-state? state)
(cond
[ids
;; Chain delays
(delay! ids (lambda ()
(when (eq? (hash-ref mutated v #f) state)
(hash-remove! mutated v))
(state)))]
[else
(hash-remove! mutated v)
(state)])])))])))
(define (update-mutated-state! l mut-l mutated)
(cond
[(wrap-null? mut-l) '()]
[(eq? l mut-l)
;; Check for function definitions at the start of `l`, because we
;; can mark all 'too-early variable uses as being ready from now
;; on
(define new-mut-l
(let loop ([mut-l mut-l])
(cond
[(wrap-null? mut-l) '()]
[else
(match (wrap-car mut-l)
[`(define-values (,ids ...) ,rhs)
(cond
[(lambda? rhs #:simple? #t)
(for ([id (in-list ids)])
(define u-id (unwrap id))
(define state (hash-ref mutated u-id #f))
(when (and (too-early-mutated-state? state)
(not (set!ed-mutated-state? state)))
(hash-set! mutated u-id 'too-early/ready)))
(loop (wrap-cdr mut-l))]
[else mut-l])]
[`,_ mut-l])])))
(if (eq? mut-l l)
(wrap-cdr mut-l)
l)]
[else mut-l]))
| null |
https://raw.githubusercontent.com/takikawa/racket-ppa/5f2031309f6359c61a8dfd1fec0b77bbf9fb78df/src/schemify/mutated.rkt
|
racket
|
See "mutated-state.rkt" for information on the content of the
`mutated` table.
We don't have to worry about errors or escapes that prevent the
definition of an identifier, because that will abort the enclosing
linklet.
This pass is also responsible for recording when a letrec binding
must be mutated implicitly via `call/cc`.
Find all `set!`ed variables, and also record all bindings
that might be used too early
Defined names start out as 'not-ready; start with `exports`,
because anything exported but not defined is implicitly in an
undefined state and must be accessed through a `variable`:
Find all defined variables, and find variables that are not exported:
If constants should not be enforced, then
treat all variable as mutated:
To support jitify, if an unexported and unmutated variable is
captured in a closure before it is defined, will want to reify
it like an export; so, set those variables to 'too-early
until they are really initialized
Walk through the body:
Accumulate known-binding information in this pass, because it's
helpful to know which variables are bound to constructors.
Note that we may tentatively classify a binding as a constructor
before discovering that its mutated via `set!`, but any use of
that information is correct, because it dynamically precedes
the `set!`
Look just at the "rest" part:
check individual property values using `ids`, so procedures won't
count as used until some instace is created
For any among `ids` that didn't get a delay and wasn't used
too early, the variable is now ready, so remove from
`mutated`
For definitions that are not yet used, force delays:
Check for unexported variables that need to be implemented like exports:
force creation of variable
Everything else in `mutated` is either 'set!ed, 'too-early,
'undefined, or unreachable:
Schemify `let-values` to `let`, etc., and
reorganize struct bindings.
an `id` in `ids` can't be referenced too early,
but it might usefully be delayed
Each `id` in `ids` is now ready (but might also hold a delay):
Some ad hoc constructors that are particularly
useful to struct-type properties:
Can delay construction
unsafe => assume too-early won't happen
Chain delays
Check for function definitions at the start of `l`, because we
can mark all 'too-early variable uses as being ready from now
on
|
#lang racket/base
(require "wrap.rkt"
"match.rkt"
"known.rkt"
"import.rkt"
"simple.rkt"
"find-definition.rkt"
"struct-type-info.rkt"
"mutated-state.rkt"
"find-known.rkt"
"infer-known.rkt"
"letrec.rkt"
"id-to-var.rkt"
"aim.rkt")
(provide mutated-in-body
update-mutated-state!)
(define (mutated-in-body l exports extra-variables prim-knowns knowns imports simples
unsafe-mode? target enforce-constant?)
(define mutated (make-hasheq))
(for ([id (in-hash-keys exports)])
(hash-set! mutated id 'undefined))
(define unexported-ids
(for/fold ([unexported-ids '()]) ([form (in-list l)])
(match form
[`(define-values (,ids ...) ,rhs)
(for/fold ([unexported-ids unexported-ids]) ([id (in-list ids)])
(define u-id (unwrap id))
(hash-set! mutated u-id (if enforce-constant?
'not-ready
'set!ed-too-early))
(if (hash-ref exports u-id #f)
unexported-ids
(cons u-id unexported-ids)))]
[`,_ unexported-ids])))
(define unexported-ready (and (pair? unexported-ids)
(aim? target 'interp)
(make-hasheq)))
(when unexported-ready
(for ([id (in-list unexported-ids)])
(hash-set! mutated id (lambda ()
(unless (or (hash-ref unexported-ready id #f)
(set!ed-mutated-state? (hash-ref mutated id #f)))
(hash-set! mutated id 'too-early))))))
(for/fold ([prev-knowns knowns]) ([form (in-list l)])
(define-values (knowns info)
(find-definitions form prim-knowns prev-knowns imports mutated simples unsafe-mode? target
#:optimize? #f))
(match form
[`(define-values (,ids ...) ,rhs)
(cond
[info
(for ([e (in-list (struct-type-info-rest info))]
[pos (in-naturals)])
(define prop-vals (and (= pos struct-type-info-rest-properties-list-pos)
(pure-properties-list e prim-knowns knowns imports mutated simples)))
(cond
[prop-vals
(for ([e (in-list prop-vals)])
(find-mutated! e ids prim-knowns knowns imports mutated simples unsafe-mode?))]
[else
(find-mutated! e ids prim-knowns knowns imports mutated simples unsafe-mode?)]))]
[else
(find-mutated! rhs ids prim-knowns knowns imports mutated simples unsafe-mode?)])
(for ([id (in-list ids)])
(let ([id (unwrap id)])
(when (eq? 'not-ready (hash-ref mutated id #f))
(hash-remove! mutated id))))]
[`,_
(find-mutated! form #f prim-knowns knowns imports mutated simples unsafe-mode?)])
knowns)
(for ([form (in-list l)])
(match form
[`(define-values (,ids ...) ,rhs)
(for ([id (in-list ids)])
(let ([id (unwrap id)])
(define state (hash-ref mutated id #f))
(when unexported-ready
(when (not (hash-ref exports id #f))
(hash-set! unexported-ready id #t)))
(when (delayed-mutated-state? state)
(hash-remove! mutated id)
(state))))]
[`,_ (void)]))
(unless (or unsafe-mode?
(aim? target 'system))
(for ([id (in-list unexported-ids)])
(define state (hash-ref mutated id #f))
(when (via-variable-mutated-state? state)
(id-to-variable id exports extra-variables))))
mutated)
(define (find-mutated! top-v ids prim-knowns knowns imports mutated simples unsafe-mode?)
(define (delay! ids thunk)
(define done? #f)
(define force (lambda () (unless done?
(set! done? #t)
(thunk))))
(for ([id (in-list ids)])
(let ([id (unwrap id)])
(define m (hash-ref mutated id 'not-ready))
(cond
[(eq? 'not-ready m)
(hash-set! mutated id force)]
[(procedure? m)
(hash-set! mutated id (lambda () (m) (force)))]
[else
(force)]))))
(let find-mutated! ([v top-v] [ids ids])
(define (find-mutated!* l ids)
(let loop ([l l])
(cond
[(null? l) (void)]
[(null? (cdr l)) (find-mutated! (car l) ids)]
[else (find-mutated! (car l) #f) (loop (cdr l))])))
(match v
[`(lambda ,formals ,body ...)
(if ids
(delay! ids (lambda () (find-mutated!* body #f)))
(find-mutated!* body #f))]
[`(case-lambda [,formalss ,bodys ...] ...)
(if ids
(delay! ids (lambda () (for ([body (in-list bodys)]) (find-mutated!* body #f))))
(for ([body (in-list bodys)]) (find-mutated!* body #f)))]
[`(quote ,_) (void)]
[`(let-values ([,idss ,rhss] ...) ,bodys ...)
(for ([ids (in-list idss)]
[rhs (in-list rhss)])
(find-mutated! rhs ids))
(find-mutated!* bodys ids)]
[`(letrec-values ([,idss ,rhss] ...) ,bodys ...)
(cond
[(letrec-splitable-values-binding? idss rhss)
(find-mutated! (letrec-split-values-binding idss rhss bodys) ids)]
[else
(for* ([ids (in-list idss)]
[id (in-wrap-list ids)])
(hash-set! mutated (unwrap id) 'not-ready))
(for/fold ([maybe-cc? #f]) ([ids (in-list idss)]
[rhs (in-list rhss)])
(find-mutated! rhs (unwrap-list ids))
(define new-maybe-cc? (or maybe-cc?
(not (simple? rhs prim-knowns knowns imports mutated simples unsafe-mode?
#:pure? #f
#:result-arity (length ids)))))
(for ([id (in-wrap-list ids)])
(let ([u-id (unwrap id)])
(define state (hash-ref mutated u-id))
(define (add-too-early-name!)
(cond
[(and (eq? 'too-early state)
(wrap-property id 'undefined-error-name))
=> (lambda (name)
(hash-set! mutated u-id (too-early name #f)))]
[(and (eq? 'set!ed-too-early state)
(wrap-property id 'undefined-error-name))
=> (lambda (name)
(hash-set! mutated u-id (too-early name #t)))]))
(cond
[new-maybe-cc?
(cond
[(or (eq? 'not-ready state)
(delayed-mutated-state? state))
(hash-set! mutated u-id 'implicitly-set!ed)]
[else (add-too-early-name!)])
(when (delayed-mutated-state? state)
(state))]
[(eq? 'not-ready state)
(hash-remove! mutated u-id)]
[else (add-too-early-name!)])))
new-maybe-cc?)
(find-mutated!* bodys ids)])]
[`(if ,tst ,thn ,els)
(find-mutated! tst #f)
(find-mutated! thn #f)
(find-mutated! els #f)]
[`(with-continuation-mark ,key ,val ,body)
(find-mutated! key #f)
(find-mutated! val #f)
(find-mutated! body ids)]
[`(begin ,exps ...)
(find-mutated!* exps ids)]
[`(begin-unsafe ,exps ...)
(find-mutated!* exps ids)]
[`(begin0 ,exp ,exps ...)
(find-mutated! exp ids)
(find-mutated!* exps #f)]
[`(set! ,id ,rhs)
(let ([id (unwrap id)])
(define old-state (hash-ref mutated id #f))
(hash-set! mutated id (state->set!ed-state old-state))
(when (delayed-mutated-state? old-state)
(old-state)))
(find-mutated! rhs #f)]
[`(#%variable-reference . ,_) (void)]
[`(,rator ,exps ...)
(cond
[(and ids
(let ([rator (unwrap rator)])
(and (symbol? rator)
(let ([v (find-known rator prim-knowns knowns imports mutated)])
(and (or (known-constructor? v)
(eq? rator 'cons)
(eq? rator 'list)
(eq? rator 'vector)
(eq? rator 'make-struct-type-property))
(bitwise-bit-set? (known-procedure-arity-mask v) (length exps))))
(for/and ([exp (in-list exps)])
(simple? exp prim-knowns knowns imports mutated simples unsafe-mode?
#:ordered? #t
#:succeeds? #t)))))
(delay! ids (lambda () (find-mutated!* exps #f)))]
[else
(find-mutated! rator #f)
(find-mutated!* exps #f)])]
[`,_
(let ([v (unwrap v)])
(when (symbol? v)
(define state (hash-ref mutated v #f))
(cond
[(not-ready-mutated-state? state)
(hash-set! mutated v 'too-early))]
[(delayed-mutated-state? state)
(cond
[ids
(delay! ids (lambda ()
(when (eq? (hash-ref mutated v #f) state)
(hash-remove! mutated v))
(state)))]
[else
(hash-remove! mutated v)
(state)])])))])))
(define (update-mutated-state! l mut-l mutated)
(cond
[(wrap-null? mut-l) '()]
[(eq? l mut-l)
(define new-mut-l
(let loop ([mut-l mut-l])
(cond
[(wrap-null? mut-l) '()]
[else
(match (wrap-car mut-l)
[`(define-values (,ids ...) ,rhs)
(cond
[(lambda? rhs #:simple? #t)
(for ([id (in-list ids)])
(define u-id (unwrap id))
(define state (hash-ref mutated u-id #f))
(when (and (too-early-mutated-state? state)
(not (set!ed-mutated-state? state)))
(hash-set! mutated u-id 'too-early/ready)))
(loop (wrap-cdr mut-l))]
[else mut-l])]
[`,_ mut-l])])))
(if (eq? mut-l l)
(wrap-cdr mut-l)
l)]
[else mut-l]))
|
172833a47ba32e34b70e5aa5e1f408834af5302352391ac68422e378fce61b76
|
ocaml-sf/learn-ocaml-corpus
|
conn_other_sense.ml
|
let var x =
FVar x
let falsity =
FConst false
let truth =
FConst true
let const sense =
if sense then truth else falsity
(* [FConst sense] would work too, but would allocate memory *)
let neg f =
match f with
| FConst sense ->
const (not sense)
| FNeg f ->
f
| _ ->
FNeg f
let conn sense f1 f2 =
match f1, f2 with
| FConst sense', f
| f, FConst sense' when sense <> sense' ->
FConst sense'
(* wrong: failure to treat the case where [sense = sense'] *)
| _, _ ->
FConn (sense, f1, f2)
let conj f1 f2 =
conn true f1 f2
let disj f1 f2 =
conn false f1 f2
| null |
https://raw.githubusercontent.com/ocaml-sf/learn-ocaml-corpus/7dcf4d72b49863a3e37e41b3c3097aa4c6101a69/exercises/fpottier/sat/wrong/conn_other_sense.ml
|
ocaml
|
[FConst sense] would work too, but would allocate memory
wrong: failure to treat the case where [sense = sense']
|
let var x =
FVar x
let falsity =
FConst false
let truth =
FConst true
let const sense =
if sense then truth else falsity
let neg f =
match f with
| FConst sense ->
const (not sense)
| FNeg f ->
f
| _ ->
FNeg f
let conn sense f1 f2 =
match f1, f2 with
| FConst sense', f
| f, FConst sense' when sense <> sense' ->
FConst sense'
| _, _ ->
FConn (sense, f1, f2)
let conj f1 f2 =
conn true f1 f2
let disj f1 f2 =
conn false f1 f2
|
2848322185ab3e5c374fc57baf84d971b798933f02be262c95370cd1e6af87e3
|
orionsbelt-battlegrounds/obb-rules
|
worm.cljc
|
(ns obb-rules.units.worm)
(def metadata
{:name "worm"
:code "w"
:attack 1200
:defense 1200
:range 3
:value 25
:type :organic
:category :medium
:displacement :ground
:movement-type :all
:movement-cost 2})
| null |
https://raw.githubusercontent.com/orionsbelt-battlegrounds/obb-rules/97fad6506eb81142f74f4722aca58b80d618bf45/src/obb_rules/units/worm.cljc
|
clojure
|
(ns obb-rules.units.worm)
(def metadata
{:name "worm"
:code "w"
:attack 1200
:defense 1200
:range 3
:value 25
:type :organic
:category :medium
:displacement :ground
:movement-type :all
:movement-cost 2})
|
|
b1add667f04e069b5c3b8bee18e1351deab15bcb77bd06c410c5ac3a10f895cf
|
threatgrid/ctia
|
graphql_schemas.clj
|
(ns ctia.entity.tool.graphql-schemas
(:require [ctia.entity.feedback.graphql-schemas :as feedback]
[ctia.entity.relationship.graphql-schemas :as relationship]
[ctia.schemas.graphql
[flanders :as flanders]
[helpers :as g]
[pagination :as pagination]
[sorting :as sorting]]
[ctim.schemas.tool :as ctim-tool]
[flanders.utils :as fu]
[ctia.entity.tool.schemas :as ts]
[ctia.schemas.graphql.ownership :as go]))
(def ToolType
(let [{:keys [fields name description]}
(flanders/->graphql
(fu/optionalize-all ctim-tool/Tool)
{})]
(g/new-object
name
description
[]
(merge fields
feedback/feedback-connection-field
relationship/relatable-entity-fields
go/graphql-ownership-fields))))
(def tool-order-arg
(sorting/order-by-arg
"ToolOrder"
"tools"
(into {}
(map (juxt sorting/sorting-kw->enum-name name)
ts/tool-fields))))
(def ToolConnectionType
(pagination/new-connection ToolType))
| null |
https://raw.githubusercontent.com/threatgrid/ctia/32857663cdd7ac385161103dbafa8dc4f98febf0/src/ctia/entity/tool/graphql_schemas.clj
|
clojure
|
(ns ctia.entity.tool.graphql-schemas
(:require [ctia.entity.feedback.graphql-schemas :as feedback]
[ctia.entity.relationship.graphql-schemas :as relationship]
[ctia.schemas.graphql
[flanders :as flanders]
[helpers :as g]
[pagination :as pagination]
[sorting :as sorting]]
[ctim.schemas.tool :as ctim-tool]
[flanders.utils :as fu]
[ctia.entity.tool.schemas :as ts]
[ctia.schemas.graphql.ownership :as go]))
(def ToolType
(let [{:keys [fields name description]}
(flanders/->graphql
(fu/optionalize-all ctim-tool/Tool)
{})]
(g/new-object
name
description
[]
(merge fields
feedback/feedback-connection-field
relationship/relatable-entity-fields
go/graphql-ownership-fields))))
(def tool-order-arg
(sorting/order-by-arg
"ToolOrder"
"tools"
(into {}
(map (juxt sorting/sorting-kw->enum-name name)
ts/tool-fields))))
(def ToolConnectionType
(pagination/new-connection ToolType))
|
|
68b5610cf0371e1355c17d786b069f0909482bfc76cfdea9bb35f9f703ad6dcd
|
Soostone/instrument
|
Utils.hs
|
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE NoMonomorphismRestriction #
module Instrument.Utils
( formatDecimal,
formatInt,
showT,
showBS,
collect,
noDots,
encodeCompress,
decodeCompress,
indefinitely,
seconds,
milliseconds,
for,
)
where
-------------------------------------------------------------------------------
import Codec.Compression.GZip
import Control.Applicative ((<|>))
import Control.Concurrent (threadDelay)
import Control.Exception (SomeException)
import Control.Monad
import Control.Monad.Catch (Handler (..))
import Control.Retry
import qualified Data.ByteString.Char8 as B
import Data.ByteString.Lazy (fromStrict, toStrict)
import qualified Data.Map as M
import qualified Data.Map.Strict as MS
import qualified Data.SafeCopy as SC
import Data.Serialize
import Data.Text (Text)
import qualified Data.Text as T
import Numeric
import System.IO
-------------------------------------------------------------------------------
-------------------------------------------------------------------------------
collect ::
(Ord b) =>
[a] ->
(a -> b) ->
(a -> c) ->
M.Map b [c]
collect as mkKey mkVal = foldr step M.empty as
where
step x acc = MS.insertWith (++) (mkKey x) ([mkVal x]) acc
-------------------------------------------------------------------------------
noDots :: Text -> Text
noDots = T.intercalate "_" . T.splitOn "."
-------------------------------------------------------------------------------
showT :: Show a => a -> Text
showT = T.pack . show
showBS :: Show a => a -> B.ByteString
showBS = B.pack . show
-------------------------------------------------------------------------------
formatInt :: RealFrac a => a -> Text
formatInt i = showT ((floor i) :: Int)
-------------------------------------------------------------------------------
formatDecimal ::
RealFloat a =>
-- | Digits after the point
Int ->
| Add thousands sep ?
Bool ->
-- | Number
a ->
Text
formatDecimal n th i =
let res = T.pack . showFFloat (Just n) i $ ""
in if th then addThousands res else res
-------------------------------------------------------------------------------
addThousands :: Text -> Text
addThousands t = T.concat [n', dec]
where
(n, dec) = T.span (/= '.') t
n' = T.reverse . T.intercalate "," . T.chunksOf 3 . T.reverse $ n
-------------------------------------------------------------------------------
| Serialize and compress with in that order . This is the only
-- function we use for serializing to Redis.
encodeCompress :: SC.SafeCopy a => a -> B.ByteString
encodeCompress = toStrict . compress . runPutLazy . SC.safePut
-------------------------------------------------------------------------------
| Decompress from GZip and deserialize in that order . Tries to
decode SafeCopy first and falls back to Serialize if that fails to
-- account for old data. Note that encodeCompress only serializes to
SafeCopy so writes will be updated .
decodeCompress :: (SC.SafeCopy a, Serialize a) => B.ByteString -> Either String a
decodeCompress = decodeWithFallback . decompress . fromStrict
where
decodeWithFallback lbs = runGetLazy SC.safeGet lbs <|> decodeLazy lbs
-------------------------------------------------------------------------------
-- | Run an IO repeatedly with the given delay in microseconds. If
-- there are exceptions in the inner loop, they are logged to stderr,
-- prefixed with the given string context and retried at an exponential
backoff capped at 60 seconds between .
indefinitely :: String -> Int -> IO () -> IO ()
indefinitely ctx n = forever . delayed . logAndBackoff ctx
where
delayed = (>> threadDelay n)
-------------------------------------------------------------------------------
logAndBackoff :: String -> IO () -> IO ()
logAndBackoff ctx = recovering policy [h] . const
where
policy = capDelay (seconds 60) (exponentialBackoff (milliseconds 50))
h _ = Handler (\e -> logError e >> return True)
logError :: SomeException -> IO ()
logError e = hPutStrLn stderr msg
where
msg = "Caught exception in " ++ ctx ++ ": " ++ show e ++ ". Retrying..."
-------------------------------------------------------------------------------
-- | Convert seconds to microseconds
seconds :: Int -> Int
seconds = (* milliseconds 1000)
-------------------------------------------------------------------------------
-- | Convert milliseconds to microseconds
milliseconds :: Int -> Int
milliseconds = (* 1000)
-------------------------------------------------------------------------------
for :: (Functor f) => f a -> (a -> b) -> f b
for = flip fmap
| null |
https://raw.githubusercontent.com/Soostone/instrument/a82d1764aad18881c6815c2ee2a55f3f5381c8f5/instrument/src/Instrument/Utils.hs
|
haskell
|
# LANGUAGE OverloadedStrings #
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
| Digits after the point
| Number
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
function we use for serializing to Redis.
-----------------------------------------------------------------------------
account for old data. Note that encodeCompress only serializes to
-----------------------------------------------------------------------------
| Run an IO repeatedly with the given delay in microseconds. If
there are exceptions in the inner loop, they are logged to stderr,
prefixed with the given string context and retried at an exponential
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
| Convert seconds to microseconds
-----------------------------------------------------------------------------
| Convert milliseconds to microseconds
-----------------------------------------------------------------------------
|
# LANGUAGE NoMonomorphismRestriction #
module Instrument.Utils
( formatDecimal,
formatInt,
showT,
showBS,
collect,
noDots,
encodeCompress,
decodeCompress,
indefinitely,
seconds,
milliseconds,
for,
)
where
import Codec.Compression.GZip
import Control.Applicative ((<|>))
import Control.Concurrent (threadDelay)
import Control.Exception (SomeException)
import Control.Monad
import Control.Monad.Catch (Handler (..))
import Control.Retry
import qualified Data.ByteString.Char8 as B
import Data.ByteString.Lazy (fromStrict, toStrict)
import qualified Data.Map as M
import qualified Data.Map.Strict as MS
import qualified Data.SafeCopy as SC
import Data.Serialize
import Data.Text (Text)
import qualified Data.Text as T
import Numeric
import System.IO
collect ::
(Ord b) =>
[a] ->
(a -> b) ->
(a -> c) ->
M.Map b [c]
collect as mkKey mkVal = foldr step M.empty as
where
step x acc = MS.insertWith (++) (mkKey x) ([mkVal x]) acc
noDots :: Text -> Text
noDots = T.intercalate "_" . T.splitOn "."
showT :: Show a => a -> Text
showT = T.pack . show
showBS :: Show a => a -> B.ByteString
showBS = B.pack . show
formatInt :: RealFrac a => a -> Text
formatInt i = showT ((floor i) :: Int)
formatDecimal ::
RealFloat a =>
Int ->
| Add thousands sep ?
Bool ->
a ->
Text
formatDecimal n th i =
let res = T.pack . showFFloat (Just n) i $ ""
in if th then addThousands res else res
addThousands :: Text -> Text
addThousands t = T.concat [n', dec]
where
(n, dec) = T.span (/= '.') t
n' = T.reverse . T.intercalate "," . T.chunksOf 3 . T.reverse $ n
| Serialize and compress with in that order . This is the only
encodeCompress :: SC.SafeCopy a => a -> B.ByteString
encodeCompress = toStrict . compress . runPutLazy . SC.safePut
| Decompress from GZip and deserialize in that order . Tries to
decode SafeCopy first and falls back to Serialize if that fails to
SafeCopy so writes will be updated .
decodeCompress :: (SC.SafeCopy a, Serialize a) => B.ByteString -> Either String a
decodeCompress = decodeWithFallback . decompress . fromStrict
where
decodeWithFallback lbs = runGetLazy SC.safeGet lbs <|> decodeLazy lbs
backoff capped at 60 seconds between .
indefinitely :: String -> Int -> IO () -> IO ()
indefinitely ctx n = forever . delayed . logAndBackoff ctx
where
delayed = (>> threadDelay n)
logAndBackoff :: String -> IO () -> IO ()
logAndBackoff ctx = recovering policy [h] . const
where
policy = capDelay (seconds 60) (exponentialBackoff (milliseconds 50))
h _ = Handler (\e -> logError e >> return True)
logError :: SomeException -> IO ()
logError e = hPutStrLn stderr msg
where
msg = "Caught exception in " ++ ctx ++ ": " ++ show e ++ ". Retrying..."
seconds :: Int -> Int
seconds = (* milliseconds 1000)
milliseconds :: Int -> Int
milliseconds = (* 1000)
for :: (Functor f) => f a -> (a -> b) -> f b
for = flip fmap
|
6d0af2da3251a5da1c0bba8deced0f06e5da36d4d8408a68fddf84bdf2bb83b6
|
uim/uim
|
test-uim-test-utils.scm
|
#!/usr/bin/env gosh
Copyright ( c ) 2003 - 2013 uim Project
;;;
;;; All rights reserved.
;;;
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions
;;; are met:
1 . Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
2 . Redistributions in binary form must reproduce the above copyright
;;; notice, this list of conditions and the following disclaimer in the
;;; documentation and/or other materials provided with the distribution.
3 . Neither the name of authors nor the names of its contributors
;;; may be used to endorse or promote products derived from this software
;;; without specific prior written permission.
;;;
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ` ` AS IS '' AND
;;; ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR LIABLE
FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
;;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
;;; OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT
;;; LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
;;; OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
;;; SUCH DAMAGE.
;;;;
These tests are passed at revision 6605 ( new repository )
(use test.unit)
(require "test/uim-test-utils")
(define-uim-test-case "test uim-test-utils"
("test error"
(assert-error (lambda () (uim 'unbound-symbol)))))
| null |
https://raw.githubusercontent.com/uim/uim/d1ac9d9315ff8c57c713b502544fef9b3a83b3e5/test/test-uim-test-utils.scm
|
scheme
|
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
notice, this list of conditions and the following disclaimer.
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
may be used to endorse or promote products derived from this software
without specific prior written permission.
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
SUCH DAMAGE.
|
#!/usr/bin/env gosh
Copyright ( c ) 2003 - 2013 uim Project
1 . Redistributions of source code must retain the above copyright
2 . Redistributions in binary form must reproduce the above copyright
3 . Neither the name of authors nor the names of its contributors
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ` ` AS IS '' AND
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR LIABLE
FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT
These tests are passed at revision 6605 ( new repository )
(use test.unit)
(require "test/uim-test-utils")
(define-uim-test-case "test uim-test-utils"
("test error"
(assert-error (lambda () (uim 'unbound-symbol)))))
|
379e8622d028b64d722e62b1b119a3721bb487327556ca6ff2e12edccdda8052
|
facebook/duckling
|
ZH_MO.hs
|
Copyright ( c ) 2016 - present , Facebook , Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
of patent rights can be found in the PATENTS file in the same directory .
-----------------------------------------------------------------
-- Auto-generated by regenClassifiers
--
-- DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
@generated
-----------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Ranking.Classifiers.ZH_MO (classifiers) where
import Data.String
import Prelude
import qualified Data.HashMap.Strict as HashMap
import Duckling.Ranking.Types
classifiers :: Classifiers
classifiers
= HashMap.fromList
[("\25490\28783\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\19996\27491\25945\26837\26525\20027\26085",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("number of 5 minutes after|past <integer> (hour-of-day)",
Classifier{okData =
ClassData{prior = -0.2231435513142097, unseen = -3.332204510175204,
likelihoods =
HashMap.fromList
[("<integer> (latent time-of-day)integer (0..10)",
-0.8979415932059586),
("hour", -0.7308875085427924),
("<integer> (latent time-of-day)<number>\20010/\20491",
-2.1972245773362196)],
n = 12},
koData =
ClassData{prior = -1.6094379124341003,
unseen = -2.3025850929940455,
likelihoods =
HashMap.fromList
[("<integer> (latent time-of-day)integer (0..10)",
-0.8109302162163288),
("hour", -0.8109302162163288)],
n = 3}}),
("\21360\24230\20016\25910\33410\31532\22235\22825",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<time> timezone",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("<time-of-day> am|pm", -0.6931471805599453),
("hour", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("Thursday",
Classifier{okData =
ClassData{prior = -0.4700036292457356,
unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -0.9808292530117262,
unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3}}),
("integer (numeric)",
Classifier{okData =
ClassData{prior = -0.6931471805599453, unseen = -4.23410650459726,
likelihoods = HashMap.fromList [("", 0.0)], n = 67},
koData =
ClassData{prior = -0.6931471805599453, unseen = -4.23410650459726,
likelihoods = HashMap.fromList [("", 0.0)], n = 67}}),
("\21355\22622\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("the day before yesterday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\22269\38469\28040\36153\32773\26435\30410\26085",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\24314\20891\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\29369\22826\26032\24180",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("today",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("mm/dd",
Classifier{okData =
ClassData{prior = -1.6094379124341003,
unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -0.2231435513142097, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4}}),
("absorption of , after named day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.833213344056216,
likelihoods =
HashMap.fromList
[("day", -0.6931471805599453), ("Sunday", -0.6931471805599453)],
n = 7},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("September",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("tonight",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("October",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("month (grain)",
Classifier{okData =
ClassData{prior = -0.963437510299857, unseen = -3.4339872044851463,
likelihoods = HashMap.fromList [("", 0.0)], n = 29},
koData =
ClassData{prior = -0.48058573857627246,
unseen = -3.891820298110627,
likelihoods = HashMap.fromList [("", 0.0)], n = 47}}),
("<time-of-day> o'clock",
Classifier{okData =
ClassData{prior = -1.466337068793427, unseen = -3.044522437723423,
likelihoods =
HashMap.fromList
[("<integer> (latent time-of-day)", -0.6931471805599453),
("hour", -0.6931471805599453)],
n = 9},
koData =
ClassData{prior = -0.262364264467491, unseen = -4.143134726391533,
likelihoods =
HashMap.fromList
[("<integer> (latent time-of-day)", -0.6931471805599453),
("hour", -0.6931471805599453)],
n = 30}}),
("national day",
Classifier{okData =
ClassData{prior = -0.2231435513142097,
unseen = -2.3025850929940455,
likelihoods = HashMap.fromList [("", 0.0)], n = 8},
koData =
ClassData{prior = -1.6094379124341003,
unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2}}),
("integer (20,30,40)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Wednesday",
Classifier{okData =
ClassData{prior = -5.715841383994864e-2,
unseen = -2.9444389791664407,
likelihoods = HashMap.fromList [("", 0.0)], n = 17},
koData =
ClassData{prior = -2.890371757896165, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("\21360\24230\20016\25910\33410\31532\19977\22825",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("hour (grain)",
Classifier{okData =
ClassData{prior = -9.53101798043249e-2,
unseen = -3.0910424533583156,
likelihoods = HashMap.fromList [("", 0.0)], n = 20},
koData =
ClassData{prior = -2.3978952727983707,
unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2}}),
("\22307\20250\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\20803\26086",
Classifier{okData =
ClassData{prior = -1.0986122886681098, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -0.40546510810816444,
unseen = -2.3025850929940455,
likelihoods = HashMap.fromList [("", 0.0)], n = 8}}),
("\32654\22269\29420\31435\26085",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2}}),
("intersect",
Classifier{okData =
ClassData{prior = -5.694137640013845e-2,
unseen = -6.329720905522696,
likelihoods =
HashMap.fromList
[("\20799\31461\33410<part-of-day> <dim time>",
-4.718498871295094),
("year (numeric with year symbol)\20809\26126\33410",
-4.248495242049359),
("xxxx year<named-month> <day-of-month>", -4.941642422609305),
("daymonth", -4.248495242049359),
("monthday", -1.9459101490553135),
("next yearSeptember", -5.2293244950610855),
("year (numeric with year symbol)\25995\26376",
-4.941642422609305),
("year (numeric with year symbol)\20061\22812\33410",
-4.941642422609305),
("year (numeric with year symbol)February", -4.718498871295094),
("xxxx yearintersect", -4.941642422609305),
("March<time> <day-of-month>", -3.7629874262676584),
("year (numeric with year symbol)<named-month> <day-of-month>",
-3.494723439672979),
("monthhour", -3.7629874262676584),
("year (numeric with year symbol)\22320\29699\19968\23567\26102",
-5.2293244950610855),
("year (numeric with year symbol)April", -5.2293244950610855),
("dayday", -2.284885515894645),
("hourhour", -4.718498871295094),
("xxxx yearFebruary", -5.634789603169249),
("year (numeric with year symbol)March", -4.1307122063929755),
("February<dim time> <part-of-day>", -3.7629874262676584),
("hourminute", -4.718498871295094),
("April<time> <day-of-month>", -5.2293244950610855),
("February<time> <day-of-month>", -2.614364717024887),
("absorption of , after named day<named-month> <day-of-month>",
-3.619886582626985),
("year (numeric with year symbol)\22823\25995\26399",
-4.941642422609305),
("this <cycle><time> <day-of-month>", -4.941642422609305),
("year (numeric with year symbol)\22235\26092\33410",
-5.2293244950610855),
("yearmonth", -3.332204510175204),
("year (numeric with year symbol)\20303\26842\33410",
-5.2293244950610855),
("dayminute", -4.718498871295094),
("next <cycle>September", -5.634789603169249),
("intersect by \",\"<time> <day-of-month>", -3.619886582626985),
("xxxx yearMarch", -5.634789603169249),
("absorption of , after named dayintersect",
-3.619886582626985),
("intersect<time> <day-of-month>", -2.8015762591130335),
("next <cycle><time> <day-of-month>", -4.941642422609305),
("tonight<time-of-day> o'clock", -4.718498871295094),
("year (numeric with year symbol)intersect",
-3.494723439672979),
("yearday", -2.0794415416798357),
("absorption of , after named dayFebruary", -4.248495242049359),
("year (numeric with year symbol)\19971\19971\33410",
-4.248495242049359),
("year (numeric with year symbol)\36926\36234\33410",
-5.2293244950610855),
("year (numeric with year symbol)\29369\22826\26032\24180",
-5.2293244950610855),
("yearminute", -5.2293244950610855),
("<dim time> <part-of-day>relative (10-59) minutes after|past <integer> (hour-of-day)",
-4.718498871295094)],
n = 256},
koData =
ClassData{prior = -2.894068619777491, unseen = -4.3694478524670215,
likelihoods =
HashMap.fromList
[("\20799\31461\33410<part-of-day> <dim time>",
-2.159484249353372),
("dayhour", -2.7472709142554916),
("year (numeric with year symbol)Sunday", -3.6635616461296463),
("<dim time> <part-of-day><time-of-day> o'clock",
-3.258096538021482),
("hourhour", -3.258096538021482),
("hourminute", -2.7472709142554916),
("dayminute", -2.7472709142554916),
("yearday", -3.6635616461296463),
("<dim time> <part-of-day>relative (10-59) minutes after|past <integer> (hour-of-day)",
-2.7472709142554916)],
n = 15}}),
("half after|past <integer> (hour-of-day)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.5649493574615367,
likelihoods =
HashMap.fromList
[("<integer> (latent time-of-day)", -0.6931471805599453),
("hour", -0.6931471805599453)],
n = 5},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("\20399\20029\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.3978952727983707,
likelihoods = HashMap.fromList [("", 0.0)], n = 9},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("year (grain)",
Classifier{okData =
ClassData{prior = -1.625967214385311, unseen = -2.639057329615259,
likelihoods = HashMap.fromList [("", 0.0)], n = 12},
koData =
ClassData{prior = -0.21905356606268464,
unseen = -3.9318256327243257,
likelihoods = HashMap.fromList [("", 0.0)], n = 49}}),
("Saturday",
Classifier{okData =
ClassData{prior = -0.8754687373538999,
unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -0.5389965007326869,
unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [("", 0.0)], n = 7}}),
("next <cycle>",
Classifier{okData =
ClassData{prior = -0.570544858467613, unseen = -3.4965075614664802,
likelihoods =
HashMap.fromList
[("week", -1.6739764335716716),
("month (grain)", -1.5198257537444133),
("year (grain)", -2.367123614131617),
("week (grain)", -1.6739764335716716),
("year", -2.367123614131617), ("month", -1.5198257537444133)],
n = 13},
koData =
ClassData{prior = -0.832909122935104, unseen = -3.295836866004329,
likelihoods =
HashMap.fromList
[("week", -0.8602012652231115),
("week (grain)", -0.8602012652231115)],
n = 10}}),
("last year",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\22307\27583\34987\27585\26085",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("this <day-of-week>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.970291913552122,
likelihoods =
HashMap.fromList
[("Wednesday", -1.8718021769015913),
("Monday", -1.8718021769015913), ("day", -0.7323678937132265),
("Tuesday", -1.5533484457830569)],
n = 24},
koData =
ClassData{prior = -infinity, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [], n = 0}}),
("\35199\36203\25176\25289\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.3025850929940455,
likelihoods = HashMap.fromList [("", 0.0)], n = 8},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("yyyy-mm-dd",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("mm/dd/yyyy",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\20811\21704\29305\26222\36838\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\21313\32988\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("evening|night",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\20303\26842\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\22307\19977\19968\20027\26085",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\30331\38660\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Monday",
Classifier{okData =
ClassData{prior = -0.15415067982725836,
unseen = -3.258096538021482,
likelihoods = HashMap.fromList [("", 0.0)], n = 24},
koData =
ClassData{prior = -1.9459101490553135, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4}}),
("\19971\19971\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [("", 0.0)], n = 7},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("yesterday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("next year",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<time> <day-of-month>",
Classifier{okData =
ClassData{prior = -0.24946085963158313,
unseen = -4.204692619390966,
likelihoods =
HashMap.fromList
[("integer (numeric)", -1.3564413979702095),
("integer (20,30,40)", -3.0910424533583156),
("integer with consecutive unit modifiers", -1.245215762859985),
("integer (0..10)", -1.4170660197866443),
("number suffix: \21313|\25342", -2.1102132003465894),
("compose by multiplication", -3.0910424533583156)],
n = 60},
koData =
ClassData{prior = -1.5105920777974677,
unseen = -3.1780538303479458,
likelihoods =
HashMap.fromList
[("integer (0..10)", -0.3629054936893685),
("number suffix: \21313|\25342", -2.03688192726104)],
n = 17}}),
("\19996\27491\25945\22797\27963\33410",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3}}),
("hh:mm (time-of-day)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.833213344056216,
likelihoods = HashMap.fromList [("", 0.0)], n = 15},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("relative (1-9) minutes after|past <integer> (hour-of-day)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.044522437723423,
likelihoods =
HashMap.fromList
[("<integer> (latent time-of-day)integer (0..10)",
-0.6931471805599453),
("hour", -0.6931471805599453)],
n = 9},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("<integer> (latent time-of-day)",
Classifier{okData =
ClassData{prior = -0.2754119798599665,
unseen = -3.8066624897703196,
likelihoods =
HashMap.fromList
[("integer (numeric)", -2.174751721484161),
("integer (0..10)", -0.1466034741918754)],
n = 41},
koData =
ClassData{prior = -1.4240346891027378, unseen = -2.833213344056216,
likelihoods =
HashMap.fromList
[("integer (numeric)", -0.4700036292457356),
("one point 2", -1.1631508098056809)],
n = 13}}),
("\36926\36234\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("nth <time> of <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.3978952727983707,
likelihoods =
HashMap.fromList
[("Octoberordinal (digits)Monday", -0.6931471805599453),
("monthday", -0.6931471805599453)],
n = 4},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("\22235\26092\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\19996\27491\25945\22307\21608\20845",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("April",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\21360\24230\20016\25910\33410",
Classifier{okData =
ClassData{prior = -0.5108256237659907,
unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -0.916290731874155, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2}}),
("\20809\26126\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [("", 0.0)], n = 7},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("week (grain)",
Classifier{okData =
ClassData{prior = -0.8434293836092833,
unseen = -3.6635616461296463,
likelihoods = HashMap.fromList [("", 0.0)], n = 37},
koData =
ClassData{prior = -0.5625269981428811,
unseen = -3.9318256327243257,
likelihoods = HashMap.fromList [("", 0.0)], n = 49}}),
("relative (10-59) minutes after|past <integer> (hour-of-day)",
Classifier{okData =
ClassData{prior = -0.45198512374305727,
unseen = -4.127134385045092,
likelihoods =
HashMap.fromList
[("<integer> (latent time-of-day)compose by multiplication",
-2.164963715117998),
("<integer> (latent time-of-day)integer with consecutive unit modifiers",
-0.9753796482441617),
("hour", -0.7435780341868373)],
n = 28},
koData =
ClassData{prior = -1.0116009116784799,
unseen = -3.6375861597263857,
likelihoods =
HashMap.fromList
[("<integer> (latent time-of-day)number suffix: \21313|\25342",
-1.413693335308005),
("<integer> (latent time-of-day)integer (0..10)",
-1.413693335308005),
("hour", -0.7777045685880083)],
n = 16}}),
("year (numeric with year symbol)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.891820298110627,
likelihoods = HashMap.fromList [("integer (numeric)", 0.0)],
n = 47},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("now",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.4849066497880004,
likelihoods = HashMap.fromList [("", 0.0)], n = 10},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\22823\25995\26399",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\24858\20154\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\29369\22826\26893\26641\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\19996\27491\25945\22797\27963\33410\26143\26399\19968",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\22307\28789\33410\24198\26085",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("numbers prefix with -, negative or minus",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("integer (numeric)", 0.0)],
n = 4}}),
("Friday",
Classifier{okData =
ClassData{prior = -0.6931471805599453,
unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -0.6931471805599453,
unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5}}),
("tomorrow",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("this year",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\22522\30563\22307\20307\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\28595\38376\22238\24402\32426\24565\26085",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\21360\24230\20804\22969\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("next <day-of-week>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.1780538303479458,
likelihoods =
HashMap.fromList
[("Wednesday", -1.3437347467010947),
("day", -0.7375989431307791), ("Tuesday", -1.3437347467010947)],
n = 10},
koData =
ClassData{prior = -infinity, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [], n = 0}}),
("fractional number",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5}}),
("Sunday",
Classifier{okData =
ClassData{prior = -4.8790164169432056e-2,
unseen = -3.0910424533583156,
likelihoods = HashMap.fromList [("", 0.0)], n = 20},
koData =
ClassData{prior = -3.044522437723423, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("afternoon",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.6375861597263857,
likelihoods = HashMap.fromList [("", 0.0)], n = 36},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<duration> from now",
Classifier{okData =
ClassData{prior = -infinity, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -3.891820298110627,
likelihoods =
HashMap.fromList
[("week", -2.2617630984737906), ("second", -2.772588722239781),
("day", -2.2617630984737906), ("year", -2.772588722239781),
("<integer> <unit-of-duration>", -0.8266785731844679),
("hour", -2.2617630984737906), ("month", -2.772588722239781),
("minute", -2.772588722239781)],
n = 20}}),
("\36174\32618\26085",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("February",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.258096538021482,
likelihoods = HashMap.fromList [("", 0.0)], n = 24},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("this <cycle>",
Classifier{okData =
ClassData{prior = -0.8909729238898653,
unseen = -3.6635616461296463,
likelihoods =
HashMap.fromList
[("week", -1.1526795099383855),
("month (grain)", -2.2512917986064953),
("year (grain)", -2.538973871058276),
("week (grain)", -1.1526795099383855),
("year", -2.538973871058276), ("month", -2.2512917986064953)],
n = 16},
koData =
ClassData{prior = -0.5280674302004967, unseen = -3.970291913552122,
likelihoods =
HashMap.fromList
[("week", -0.7731898882334817),
("week (grain)", -0.7731898882334817)],
n = 23}}),
("minute (grain)",
Classifier{okData =
ClassData{prior = -0.4462871026284195, unseen = -2.890371757896165,
likelihoods = HashMap.fromList [("", 0.0)], n = 16},
koData =
ClassData{prior = -1.0216512475319814,
unseen = -2.3978952727983707,
likelihoods = HashMap.fromList [("", 0.0)], n = 9}}),
("xxxx year",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods =
HashMap.fromList
[("integer (0..10)integer (0..10)integer (0..10)integer (0..10)",
0.0)],
n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<dim time> <part-of-day>",
Classifier{okData =
ClassData{prior = -7.696104113612832e-2,
unseen = -4.6913478822291435,
likelihoods =
HashMap.fromList
[("dayhour", -0.750305594399894),
("national dayevening|night", -3.58351893845611),
("<named-month> <day-of-month>morning", -2.117181869662683),
("\24773\20154\33410evening|night", -3.58351893845611),
("\20799\31461\33410afternoon", -3.58351893845611),
("intersectmorning", -2.117181869662683),
("<time> <day-of-month>morning", -2.117181869662683),
("Mondaymorning", -2.4849066497880004)],
n = 50},
koData =
ClassData{prior = -2.6026896854443837, unseen = -2.833213344056216,
likelihoods =
HashMap.fromList
[("dayhour", -1.1631508098056809),
("<time> <day-of-month>morning", -1.1631508098056809)],
n = 4}}),
("<part-of-day> <dim time>",
Classifier{okData =
ClassData{prior = -0.7935659283069926,
unseen = -5.0369526024136295,
likelihoods =
HashMap.fromList
[("tonight<integer> (latent time-of-day)", -3.4210000089583352),
("afternoonrelative (10-59) minutes after|past <integer> (hour-of-day)",
-1.6631420914059614),
("hourhour", -2.322387720290225),
("afternoon<time-of-day> o'clock", -3.644143560272545),
("hourminute", -0.9699949108460162),
("afternoon<integer> (latent time-of-day)", -3.644143560272545),
("afternoonrelative (1-9) minutes after|past <integer> (hour-of-day)",
-2.72785282839839),
("afternoonhh:mm (time-of-day)", -3.644143560272545),
("tonight<time-of-day> o'clock", -3.4210000089583352),
("afternoonnumber of 5 minutes after|past <integer> (hour-of-day)",
-2.4654885639308985),
("afternoonhalf after|past <integer> (hour-of-day)",
-3.2386784521643803)],
n = 71},
koData =
ClassData{prior = -0.6018985090948004, unseen = -5.214935757608986,
likelihoods =
HashMap.fromList
[("afternoonrelative (10-59) minutes after|past <integer> (hour-of-day)",
-2.3762728087852047),
("hourhour", -0.9899784476653142),
("afternoon<time-of-day> o'clock", -1.7754989483562746),
("hourminute", -2.21375387928743),
("afternoon<integer> (latent time-of-day)", -1.571899993115035),
("afternoonnumber of 5 minutes after|past <integer> (hour-of-day)",
-3.82319179172153)],
n = 86}}),
("<integer> <unit-of-duration>",
Classifier{okData =
ClassData{prior = -infinity, unseen = -3.1354942159291497,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -6.244166900663736,
likelihoods =
HashMap.fromList
[("number suffix: \21313|\25342month (grain)",
-4.632785353021065),
("week", -3.0233474405869645),
("integer (0..10)month (grain)", -2.745715703988685),
("integer (0..10)hour (grain)", -3.1067290495260154),
("<number>\20010/\20491week (grain)", -3.8443279926567944),
("compose by multiplicationminute (grain)", -4.45046379622711),
("second", -3.6031659358399066),
("integer (0..10)day (grain)", -3.1067290495260154),
("integer (0..10)year (grain)", -3.7573166156671647),
("<number>\20010/\20491month (grain)", -3.469634543215384),
("integer (numeric)year (grain)", -2.3710222545472743),
("integer (0..10)second (grain)", -3.6031659358399066),
("day", -3.1067290495260154), ("year", -2.1646858215494458),
("integer (0..10)minute (grain)", -2.984126727433683),
("number suffix: \21313|\25342minute (grain)",
-4.855928904335275),
("hour", -3.1067290495260154),
("integer (0..10)week (grain)", -3.534173064352955),
("month", -2.008116760857906),
("integer (numeric)month (grain)", -3.3518515075590005),
("integer with consecutive unit modifiersminute (grain)",
-4.296313116399852),
("minute", -2.553343811341229)],
n = 246}}),
("\32769\26495\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\31709\28779\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<time-of-day> am|pm",
Classifier{okData =
ClassData{prior = -0.4353180712578455, unseen = -3.295836866004329,
likelihoods =
HashMap.fromList
[("hh:mm (time-of-day)", -0.9555114450274363),
("<integer> (latent time-of-day)", -2.159484249353372),
("hour", -2.159484249353372), ("minute", -0.9555114450274363)],
n = 11},
koData =
ClassData{prior = -1.041453874828161, unseen = -2.833213344056216,
likelihoods =
HashMap.fromList
[("<integer> (latent time-of-day)", -0.8266785731844679),
("hour", -0.8266785731844679)],
n = 6}}),
("one point 2",
Classifier{okData =
ClassData{prior = -infinity, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -3.7612001156935624,
likelihoods =
HashMap.fromList
[("integer (0..10)integer (0..10)", -0.9650808960435872),
("integer (0..10)number suffix: \21313|\25342",
-1.9459101490553135),
("integer (0..10)integer with consecutive unit modifiers",
-1.3397743454849977),
("integer (0..10)<number>\20010/\20491", -2.639057329615259),
("integer (0..10)compose by multiplication",
-2.639057329615259),
("integer (0..10)half", -2.639057329615259)],
n = 36}}),
("intersect by \",\"",
Classifier{okData =
ClassData{prior = 0.0, unseen = -4.330733340286331,
likelihoods =
HashMap.fromList
[("daymonth", -2.2380465718564744),
("Sunday<named-month> <day-of-month>", -1.6094379124341003),
("SundayFebruary", -2.2380465718564744),
("dayday", -0.9501922835498364),
("Sundayintersect", -1.6094379124341003)],
n = 35},
koData =
ClassData{prior = -infinity, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [], n = 0}}),
("\26837\26525\20027\26085",
Classifier{okData =
ClassData{prior = -0.6931471805599453, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -0.6931471805599453, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4}}),
("\38463\33298\25289\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("integer with consecutive unit modifiers",
Classifier{okData =
ClassData{prior = -5.715841383994864e-2,
unseen = -3.6109179126442243,
likelihoods =
HashMap.fromList
[("number suffix: \21313|\25342integer (0..10)",
-0.6931471805599453),
("integer (0..10)integer (0..10)", -0.6931471805599453)],
n = 34},
koData =
ClassData{prior = -2.890371757896165, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("integer (0..10)integer (0..10)", -0.2876820724517809)],
n = 2}}),
("second (grain)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.70805020110221,
likelihoods = HashMap.fromList [("", 0.0)], n = 13},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\19996\27491\25945\32822\31267\21463\38590\26085",
Classifier{okData =
ClassData{prior = -0.3364722366212129,
unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -1.252762968495368, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2}}),
("\25289\25746\36335\22307\21608\20845",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<duration> ago",
Classifier{okData =
ClassData{prior = -infinity, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -3.891820298110627,
likelihoods =
HashMap.fromList
[("week", -2.2617630984737906), ("second", -2.772588722239781),
("day", -2.2617630984737906), ("year", -2.772588722239781),
("<integer> <unit-of-duration>", -0.8266785731844679),
("hour", -2.2617630984737906), ("month", -2.772588722239781),
("minute", -2.772588722239781)],
n = 20}}),
("\22307\35806\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("last <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.332204510175204,
likelihoods =
HashMap.fromList
[("day", -0.7308875085427924), ("Sunday", -1.2163953243244932),
("Tuesday", -1.5040773967762742)],
n = 12},
koData =
ClassData{prior = -infinity, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [], n = 0}}),
("\20234\26031\20848\26032\24180",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("March",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.833213344056216,
likelihoods = HashMap.fromList [("", 0.0)], n = 15},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\24320\25995\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("the day after tomorrow",
Classifier{okData =
ClassData{prior = -0.5108256237659907,
unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -0.916290731874155, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2}}),
("\22307\21608\20845",
Classifier{okData =
ClassData{prior = -0.6931471805599453,
unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -0.6931471805599453,
unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6}}),
("\22919\22899\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\20840\29699\38738\24180\26381\21153\26085",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\27431\21335\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\20061\22812\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("next <time>",
Classifier{okData =
ClassData{prior = -0.6931471805599453, unseen = -2.639057329615259,
likelihoods =
HashMap.fromList
[("day", -0.7731898882334817), ("Tuesday", -0.7731898882334817)],
n = 5},
koData =
ClassData{prior = -0.6931471805599453, unseen = -2.639057329615259,
likelihoods =
HashMap.fromList
[("Wednesday", -0.7731898882334817),
("day", -0.7731898882334817)],
n = 5}}),
("last <cycle>",
Classifier{okData =
ClassData{prior = -0.8472978603872037,
unseen = -3.2188758248682006,
likelihoods =
HashMap.fromList
[("week", -1.3862943611198906),
("month (grain)", -1.791759469228055),
("year (grain)", -2.4849066497880004),
("week (grain)", -1.3862943611198906),
("year", -2.4849066497880004), ("month", -1.791759469228055)],
n = 9},
koData =
ClassData{prior = -0.5596157879354228,
unseen = -3.4339872044851463,
likelihoods =
HashMap.fromList
[("week", -0.8362480242006186),
("week (grain)", -0.8362480242006186)],
n = 12}}),
("\20197\33394\21015\29420\31435\26085",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("next n <cycle>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -4.605170185988091,
likelihoods =
HashMap.fromList
[("week", -2.3978952727983707),
("integer (0..10)hour (grain)", -2.3978952727983707),
("<number>\20010/\20491week (grain)", -2.9856819377004897),
("second", -2.803360380906535),
("integer (0..10)day (grain)", -2.515678308454754),
("integer (0..10)year (grain)", -3.2088254890146994),
("<number>\20010/\20491month (grain)", -2.803360380906535),
("integer (0..10)second (grain)", -2.803360380906535),
("day", -2.515678308454754), ("year", -3.2088254890146994),
("integer (0..10)minute (grain)", -2.649209701079277),
("hour", -2.3978952727983707),
("integer (0..10)week (grain)", -2.9856819377004897),
("month", -2.803360380906535), ("minute", -2.649209701079277)],
n = 42},
koData =
ClassData{prior = -infinity, unseen = -2.772588722239781,
likelihoods = HashMap.fromList [], n = 0}}),
("\19975\22307\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\21476\23572\37030\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("number of five minutes",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("integer (0..10)", 0.0)],
n = 2}}),
("\20799\31461\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Tuesday",
Classifier{okData =
ClassData{prior = -3.922071315328127e-2,
unseen = -3.295836866004329,
likelihoods = HashMap.fromList [("", 0.0)], n = 25},
koData =
ClassData{prior = -3.258096538021482, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("\26149\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("number.number minutes",
Classifier{okData =
ClassData{prior = -infinity, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -2.772588722239781,
likelihoods =
HashMap.fromList
[("integer (0..10)integer with consecutive unit modifiersminute (grain)",
-1.0986122886681098),
("integer (0..10)compose by multiplicationminute (grain)",
-1.6094379124341003),
("minute", -0.7621400520468967)],
n = 6}}),
("\32822\31267\21463\38590\26085",
Classifier{okData =
ClassData{prior = -1.0296194171811581,
unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -0.4418327522790392,
unseen = -2.3978952727983707,
likelihoods = HashMap.fromList [("", 0.0)], n = 9}}),
("<named-month> <day-of-month>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -4.762173934797756,
likelihoods =
HashMap.fromList
[("Marchinteger (0..10)", -2.5563656137701454),
("Marchinteger (numeric)", -3.144152278672264),
("Aprilinteger (numeric)", -3.654977902438255),
("Februaryinteger (0..10)", -2.6741486494265287),
("Februarynumber suffix: \21313|\25342", -2.6741486494265287),
("month", -0.7462570058738938),
("Februaryinteger (numeric)", -2.5563656137701454),
("Februaryinteger with consecutive unit modifiers",
-1.8091512119399242)],
n = 54},
koData =
ClassData{prior = -infinity, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [], n = 0}}),
("\21171\21160\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\22797\27963\33410\26143\26399\19968",
Classifier{okData =
ClassData{prior = -0.6931471805599453,
unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -0.6931471805599453,
unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2}}),
("number suffix: \19975|\33836",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3}}),
("\22823\25995\39318\26085",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("half",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5}}),
("two days after tomorrow",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("integer (0..10)",
Classifier{okData =
ClassData{prior = -0.5957987257888164, unseen = -5.407171771460119,
likelihoods = HashMap.fromList [("", 0.0)], n = 221},
koData =
ClassData{prior = -0.8010045764163588, unseen = -5.204006687076795,
likelihoods = HashMap.fromList [("", 0.0)], n = 180}}),
("last n <cycle>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -4.605170185988091,
likelihoods =
HashMap.fromList
[("week", -2.9856819377004897),
("integer (0..10)month (grain)", -3.4965075614664802),
("integer (0..10)hour (grain)", -2.3978952727983707),
("second", -2.649209701079277),
("integer (0..10)day (grain)", -2.9856819377004897),
("integer (0..10)year (grain)", -3.4965075614664802),
("<number>\20010/\20491month (grain)", -2.3978952727983707),
("integer (0..10)second (grain)", -2.649209701079277),
("day", -2.9856819377004897), ("year", -3.4965075614664802),
("integer (0..10)minute (grain)", -2.3978952727983707),
("hour", -2.3978952727983707),
("integer (0..10)week (grain)", -2.9856819377004897),
("month", -2.1972245773362196),
("minute", -2.3978952727983707)],
n = 42},
koData =
ClassData{prior = -infinity, unseen = -2.772588722239781,
likelihoods = HashMap.fromList [], n = 0}}),
("last <duration>",
Classifier{okData =
ClassData{prior = -infinity, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -3.912023005428146,
likelihoods =
HashMap.fromList
[("week", -2.2823823856765264), ("second", -2.505525936990736),
("day", -2.793208009442517),
("<integer> <unit-of-duration>", -0.8007778447523107),
("hour", -2.2823823856765264), ("month", -2.2823823856765264),
("minute", -2.2823823856765264)],
n = 21}}),
("ordinal (digits)",
Classifier{okData =
ClassData{prior = -1.252762968495368, unseen = -1.9459101490553135,
likelihoods =
HashMap.fromList [("<number>\20010/\20491", -0.1823215567939546)],
n = 4},
koData =
ClassData{prior = -0.3364722366212129,
unseen = -2.5649493574615367,
likelihoods =
HashMap.fromList [("integer (0..10)", -8.701137698962981e-2)],
n = 10}}),
("n <cycle> last",
Classifier{okData =
ClassData{prior = 0.0, unseen = -4.02535169073515,
likelihoods =
HashMap.fromList
[("week", -2.3978952727983707),
("integer (0..10)hour (grain)", -2.3978952727983707),
("<number>\20010/\20491week (grain)", -2.908720896564361),
("second", -2.908720896564361),
("integer (0..10)day (grain)", -2.3978952727983707),
("integer (0..10)year (grain)", -2.908720896564361),
("<number>\20010/\20491month (grain)", -2.908720896564361),
("integer (0..10)second (grain)", -2.908720896564361),
("day", -2.3978952727983707), ("year", -2.908720896564361),
("integer (0..10)minute (grain)", -2.908720896564361),
("hour", -2.3978952727983707),
("integer (0..10)week (grain)", -2.908720896564361),
("month", -2.908720896564361), ("minute", -2.908720896564361)],
n = 20},
koData =
ClassData{prior = -infinity, unseen = -2.772588722239781,
likelihoods = HashMap.fromList [], n = 0}}),
("\24527\24724\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("morning",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.890371757896165,
likelihoods = HashMap.fromList [("", 0.0)], n = 16},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("week-end",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("number suffix: \21313|\25342",
Classifier{okData =
ClassData{prior = -0.1590646946296874,
unseen = -3.4339872044851463,
likelihoods = HashMap.fromList [("", 0.0)], n = 29},
koData =
ClassData{prior = -1.916922612182061, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5}}),
("\22320\29699\19968\23567\26102",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("day (grain)",
Classifier{okData =
ClassData{prior = -0.38299225225610584,
unseen = -2.833213344056216,
likelihoods = HashMap.fromList [("", 0.0)], n = 15},
koData =
ClassData{prior = -1.1451323043030026,
unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [("", 0.0)], n = 7}}),
("\22307\32426\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\22797\27963\33410",
Classifier{okData =
ClassData{prior = -1.0986122886681098,
unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -0.40546510810816444,
unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6}}),
("<number>\20010/\20491",
Classifier{okData =
ClassData{prior = -0.2006706954621511,
unseen = -3.4011973816621555,
likelihoods =
HashMap.fromList [("integer (0..10)", -3.509131981127006e-2)],
n = 27},
koData =
ClassData{prior = -1.7047480922384253,
unseen = -2.1972245773362196,
likelihoods =
HashMap.fromList
[("one point 2", -0.9808292530117262),
("integer (0..10)", -0.4700036292457356)],
n = 6}}),
("compose by multiplication",
Classifier{okData =
ClassData{prior = -0.3364722366212129,
unseen = -2.0794415416798357,
likelihoods =
HashMap.fromList
[("integer (0..10)number suffix: \21313|\25342",
-0.15415067982725836)],
n = 5},
koData =
ClassData{prior = -1.252762968495368, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("one point 2number suffix: \21313|\25342",
-0.2876820724517809)],
n = 2}}),
("\24773\20154\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\20116\26092\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\31070\22307\26143\26399\22235",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\25995\26376",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\27861\20196\20043\22812",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("this <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -4.07753744390572,
likelihoods =
HashMap.fromList
[("Wednesday", -1.9810014688665833),
("Monday", -1.9810014688665833), ("day", -0.8415671856782186),
("hour", -2.9618307218783095), ("Tuesday", -1.6625477377480489),
("week-end", -2.9618307218783095)],
n = 26},
koData =
ClassData{prior = -infinity, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [], n = 0}})]
| null |
https://raw.githubusercontent.com/facebook/duckling/72f45e8e2c7385f41f2f8b1f063e7b5daa6dca94/Duckling/Ranking/Classifiers/ZH_MO.hs
|
haskell
|
All rights reserved.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree. An additional grant
---------------------------------------------------------------
Auto-generated by regenClassifiers
DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
---------------------------------------------------------------
# LANGUAGE OverloadedStrings #
|
Copyright ( c ) 2016 - present , Facebook , Inc.
of patent rights can be found in the PATENTS file in the same directory .
@generated
module Duckling.Ranking.Classifiers.ZH_MO (classifiers) where
import Data.String
import Prelude
import qualified Data.HashMap.Strict as HashMap
import Duckling.Ranking.Types
classifiers :: Classifiers
classifiers
= HashMap.fromList
[("\25490\28783\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\19996\27491\25945\26837\26525\20027\26085",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("number of 5 minutes after|past <integer> (hour-of-day)",
Classifier{okData =
ClassData{prior = -0.2231435513142097, unseen = -3.332204510175204,
likelihoods =
HashMap.fromList
[("<integer> (latent time-of-day)integer (0..10)",
-0.8979415932059586),
("hour", -0.7308875085427924),
("<integer> (latent time-of-day)<number>\20010/\20491",
-2.1972245773362196)],
n = 12},
koData =
ClassData{prior = -1.6094379124341003,
unseen = -2.3025850929940455,
likelihoods =
HashMap.fromList
[("<integer> (latent time-of-day)integer (0..10)",
-0.8109302162163288),
("hour", -0.8109302162163288)],
n = 3}}),
("\21360\24230\20016\25910\33410\31532\22235\22825",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<time> timezone",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("<time-of-day> am|pm", -0.6931471805599453),
("hour", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("Thursday",
Classifier{okData =
ClassData{prior = -0.4700036292457356,
unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -0.9808292530117262,
unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3}}),
("integer (numeric)",
Classifier{okData =
ClassData{prior = -0.6931471805599453, unseen = -4.23410650459726,
likelihoods = HashMap.fromList [("", 0.0)], n = 67},
koData =
ClassData{prior = -0.6931471805599453, unseen = -4.23410650459726,
likelihoods = HashMap.fromList [("", 0.0)], n = 67}}),
("\21355\22622\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("the day before yesterday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\22269\38469\28040\36153\32773\26435\30410\26085",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\24314\20891\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\29369\22826\26032\24180",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("today",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("mm/dd",
Classifier{okData =
ClassData{prior = -1.6094379124341003,
unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -0.2231435513142097, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4}}),
("absorption of , after named day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.833213344056216,
likelihoods =
HashMap.fromList
[("day", -0.6931471805599453), ("Sunday", -0.6931471805599453)],
n = 7},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("September",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("tonight",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("October",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("month (grain)",
Classifier{okData =
ClassData{prior = -0.963437510299857, unseen = -3.4339872044851463,
likelihoods = HashMap.fromList [("", 0.0)], n = 29},
koData =
ClassData{prior = -0.48058573857627246,
unseen = -3.891820298110627,
likelihoods = HashMap.fromList [("", 0.0)], n = 47}}),
("<time-of-day> o'clock",
Classifier{okData =
ClassData{prior = -1.466337068793427, unseen = -3.044522437723423,
likelihoods =
HashMap.fromList
[("<integer> (latent time-of-day)", -0.6931471805599453),
("hour", -0.6931471805599453)],
n = 9},
koData =
ClassData{prior = -0.262364264467491, unseen = -4.143134726391533,
likelihoods =
HashMap.fromList
[("<integer> (latent time-of-day)", -0.6931471805599453),
("hour", -0.6931471805599453)],
n = 30}}),
("national day",
Classifier{okData =
ClassData{prior = -0.2231435513142097,
unseen = -2.3025850929940455,
likelihoods = HashMap.fromList [("", 0.0)], n = 8},
koData =
ClassData{prior = -1.6094379124341003,
unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2}}),
("integer (20,30,40)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Wednesday",
Classifier{okData =
ClassData{prior = -5.715841383994864e-2,
unseen = -2.9444389791664407,
likelihoods = HashMap.fromList [("", 0.0)], n = 17},
koData =
ClassData{prior = -2.890371757896165, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("\21360\24230\20016\25910\33410\31532\19977\22825",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("hour (grain)",
Classifier{okData =
ClassData{prior = -9.53101798043249e-2,
unseen = -3.0910424533583156,
likelihoods = HashMap.fromList [("", 0.0)], n = 20},
koData =
ClassData{prior = -2.3978952727983707,
unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2}}),
("\22307\20250\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\20803\26086",
Classifier{okData =
ClassData{prior = -1.0986122886681098, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -0.40546510810816444,
unseen = -2.3025850929940455,
likelihoods = HashMap.fromList [("", 0.0)], n = 8}}),
("\32654\22269\29420\31435\26085",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2}}),
("intersect",
Classifier{okData =
ClassData{prior = -5.694137640013845e-2,
unseen = -6.329720905522696,
likelihoods =
HashMap.fromList
[("\20799\31461\33410<part-of-day> <dim time>",
-4.718498871295094),
("year (numeric with year symbol)\20809\26126\33410",
-4.248495242049359),
("xxxx year<named-month> <day-of-month>", -4.941642422609305),
("daymonth", -4.248495242049359),
("monthday", -1.9459101490553135),
("next yearSeptember", -5.2293244950610855),
("year (numeric with year symbol)\25995\26376",
-4.941642422609305),
("year (numeric with year symbol)\20061\22812\33410",
-4.941642422609305),
("year (numeric with year symbol)February", -4.718498871295094),
("xxxx yearintersect", -4.941642422609305),
("March<time> <day-of-month>", -3.7629874262676584),
("year (numeric with year symbol)<named-month> <day-of-month>",
-3.494723439672979),
("monthhour", -3.7629874262676584),
("year (numeric with year symbol)\22320\29699\19968\23567\26102",
-5.2293244950610855),
("year (numeric with year symbol)April", -5.2293244950610855),
("dayday", -2.284885515894645),
("hourhour", -4.718498871295094),
("xxxx yearFebruary", -5.634789603169249),
("year (numeric with year symbol)March", -4.1307122063929755),
("February<dim time> <part-of-day>", -3.7629874262676584),
("hourminute", -4.718498871295094),
("April<time> <day-of-month>", -5.2293244950610855),
("February<time> <day-of-month>", -2.614364717024887),
("absorption of , after named day<named-month> <day-of-month>",
-3.619886582626985),
("year (numeric with year symbol)\22823\25995\26399",
-4.941642422609305),
("this <cycle><time> <day-of-month>", -4.941642422609305),
("year (numeric with year symbol)\22235\26092\33410",
-5.2293244950610855),
("yearmonth", -3.332204510175204),
("year (numeric with year symbol)\20303\26842\33410",
-5.2293244950610855),
("dayminute", -4.718498871295094),
("next <cycle>September", -5.634789603169249),
("intersect by \",\"<time> <day-of-month>", -3.619886582626985),
("xxxx yearMarch", -5.634789603169249),
("absorption of , after named dayintersect",
-3.619886582626985),
("intersect<time> <day-of-month>", -2.8015762591130335),
("next <cycle><time> <day-of-month>", -4.941642422609305),
("tonight<time-of-day> o'clock", -4.718498871295094),
("year (numeric with year symbol)intersect",
-3.494723439672979),
("yearday", -2.0794415416798357),
("absorption of , after named dayFebruary", -4.248495242049359),
("year (numeric with year symbol)\19971\19971\33410",
-4.248495242049359),
("year (numeric with year symbol)\36926\36234\33410",
-5.2293244950610855),
("year (numeric with year symbol)\29369\22826\26032\24180",
-5.2293244950610855),
("yearminute", -5.2293244950610855),
("<dim time> <part-of-day>relative (10-59) minutes after|past <integer> (hour-of-day)",
-4.718498871295094)],
n = 256},
koData =
ClassData{prior = -2.894068619777491, unseen = -4.3694478524670215,
likelihoods =
HashMap.fromList
[("\20799\31461\33410<part-of-day> <dim time>",
-2.159484249353372),
("dayhour", -2.7472709142554916),
("year (numeric with year symbol)Sunday", -3.6635616461296463),
("<dim time> <part-of-day><time-of-day> o'clock",
-3.258096538021482),
("hourhour", -3.258096538021482),
("hourminute", -2.7472709142554916),
("dayminute", -2.7472709142554916),
("yearday", -3.6635616461296463),
("<dim time> <part-of-day>relative (10-59) minutes after|past <integer> (hour-of-day)",
-2.7472709142554916)],
n = 15}}),
("half after|past <integer> (hour-of-day)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.5649493574615367,
likelihoods =
HashMap.fromList
[("<integer> (latent time-of-day)", -0.6931471805599453),
("hour", -0.6931471805599453)],
n = 5},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("\20399\20029\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.3978952727983707,
likelihoods = HashMap.fromList [("", 0.0)], n = 9},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("year (grain)",
Classifier{okData =
ClassData{prior = -1.625967214385311, unseen = -2.639057329615259,
likelihoods = HashMap.fromList [("", 0.0)], n = 12},
koData =
ClassData{prior = -0.21905356606268464,
unseen = -3.9318256327243257,
likelihoods = HashMap.fromList [("", 0.0)], n = 49}}),
("Saturday",
Classifier{okData =
ClassData{prior = -0.8754687373538999,
unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -0.5389965007326869,
unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [("", 0.0)], n = 7}}),
("next <cycle>",
Classifier{okData =
ClassData{prior = -0.570544858467613, unseen = -3.4965075614664802,
likelihoods =
HashMap.fromList
[("week", -1.6739764335716716),
("month (grain)", -1.5198257537444133),
("year (grain)", -2.367123614131617),
("week (grain)", -1.6739764335716716),
("year", -2.367123614131617), ("month", -1.5198257537444133)],
n = 13},
koData =
ClassData{prior = -0.832909122935104, unseen = -3.295836866004329,
likelihoods =
HashMap.fromList
[("week", -0.8602012652231115),
("week (grain)", -0.8602012652231115)],
n = 10}}),
("last year",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\22307\27583\34987\27585\26085",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("this <day-of-week>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.970291913552122,
likelihoods =
HashMap.fromList
[("Wednesday", -1.8718021769015913),
("Monday", -1.8718021769015913), ("day", -0.7323678937132265),
("Tuesday", -1.5533484457830569)],
n = 24},
koData =
ClassData{prior = -infinity, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [], n = 0}}),
("\35199\36203\25176\25289\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.3025850929940455,
likelihoods = HashMap.fromList [("", 0.0)], n = 8},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("yyyy-mm-dd",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("mm/dd/yyyy",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\20811\21704\29305\26222\36838\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\21313\32988\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("evening|night",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\20303\26842\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\22307\19977\19968\20027\26085",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\30331\38660\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Monday",
Classifier{okData =
ClassData{prior = -0.15415067982725836,
unseen = -3.258096538021482,
likelihoods = HashMap.fromList [("", 0.0)], n = 24},
koData =
ClassData{prior = -1.9459101490553135, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4}}),
("\19971\19971\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [("", 0.0)], n = 7},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("yesterday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("next year",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<time> <day-of-month>",
Classifier{okData =
ClassData{prior = -0.24946085963158313,
unseen = -4.204692619390966,
likelihoods =
HashMap.fromList
[("integer (numeric)", -1.3564413979702095),
("integer (20,30,40)", -3.0910424533583156),
("integer with consecutive unit modifiers", -1.245215762859985),
("integer (0..10)", -1.4170660197866443),
("number suffix: \21313|\25342", -2.1102132003465894),
("compose by multiplication", -3.0910424533583156)],
n = 60},
koData =
ClassData{prior = -1.5105920777974677,
unseen = -3.1780538303479458,
likelihoods =
HashMap.fromList
[("integer (0..10)", -0.3629054936893685),
("number suffix: \21313|\25342", -2.03688192726104)],
n = 17}}),
("\19996\27491\25945\22797\27963\33410",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3}}),
("hh:mm (time-of-day)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.833213344056216,
likelihoods = HashMap.fromList [("", 0.0)], n = 15},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("relative (1-9) minutes after|past <integer> (hour-of-day)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.044522437723423,
likelihoods =
HashMap.fromList
[("<integer> (latent time-of-day)integer (0..10)",
-0.6931471805599453),
("hour", -0.6931471805599453)],
n = 9},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("<integer> (latent time-of-day)",
Classifier{okData =
ClassData{prior = -0.2754119798599665,
unseen = -3.8066624897703196,
likelihoods =
HashMap.fromList
[("integer (numeric)", -2.174751721484161),
("integer (0..10)", -0.1466034741918754)],
n = 41},
koData =
ClassData{prior = -1.4240346891027378, unseen = -2.833213344056216,
likelihoods =
HashMap.fromList
[("integer (numeric)", -0.4700036292457356),
("one point 2", -1.1631508098056809)],
n = 13}}),
("\36926\36234\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("nth <time> of <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.3978952727983707,
likelihoods =
HashMap.fromList
[("Octoberordinal (digits)Monday", -0.6931471805599453),
("monthday", -0.6931471805599453)],
n = 4},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("\22235\26092\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\19996\27491\25945\22307\21608\20845",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("April",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\21360\24230\20016\25910\33410",
Classifier{okData =
ClassData{prior = -0.5108256237659907,
unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -0.916290731874155, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2}}),
("\20809\26126\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [("", 0.0)], n = 7},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("week (grain)",
Classifier{okData =
ClassData{prior = -0.8434293836092833,
unseen = -3.6635616461296463,
likelihoods = HashMap.fromList [("", 0.0)], n = 37},
koData =
ClassData{prior = -0.5625269981428811,
unseen = -3.9318256327243257,
likelihoods = HashMap.fromList [("", 0.0)], n = 49}}),
("relative (10-59) minutes after|past <integer> (hour-of-day)",
Classifier{okData =
ClassData{prior = -0.45198512374305727,
unseen = -4.127134385045092,
likelihoods =
HashMap.fromList
[("<integer> (latent time-of-day)compose by multiplication",
-2.164963715117998),
("<integer> (latent time-of-day)integer with consecutive unit modifiers",
-0.9753796482441617),
("hour", -0.7435780341868373)],
n = 28},
koData =
ClassData{prior = -1.0116009116784799,
unseen = -3.6375861597263857,
likelihoods =
HashMap.fromList
[("<integer> (latent time-of-day)number suffix: \21313|\25342",
-1.413693335308005),
("<integer> (latent time-of-day)integer (0..10)",
-1.413693335308005),
("hour", -0.7777045685880083)],
n = 16}}),
("year (numeric with year symbol)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.891820298110627,
likelihoods = HashMap.fromList [("integer (numeric)", 0.0)],
n = 47},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("now",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.4849066497880004,
likelihoods = HashMap.fromList [("", 0.0)], n = 10},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\22823\25995\26399",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\24858\20154\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\29369\22826\26893\26641\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\19996\27491\25945\22797\27963\33410\26143\26399\19968",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\22307\28789\33410\24198\26085",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("numbers prefix with -, negative or minus",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("integer (numeric)", 0.0)],
n = 4}}),
("Friday",
Classifier{okData =
ClassData{prior = -0.6931471805599453,
unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -0.6931471805599453,
unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5}}),
("tomorrow",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("this year",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\22522\30563\22307\20307\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\28595\38376\22238\24402\32426\24565\26085",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\21360\24230\20804\22969\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("next <day-of-week>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.1780538303479458,
likelihoods =
HashMap.fromList
[("Wednesday", -1.3437347467010947),
("day", -0.7375989431307791), ("Tuesday", -1.3437347467010947)],
n = 10},
koData =
ClassData{prior = -infinity, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [], n = 0}}),
("fractional number",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5}}),
("Sunday",
Classifier{okData =
ClassData{prior = -4.8790164169432056e-2,
unseen = -3.0910424533583156,
likelihoods = HashMap.fromList [("", 0.0)], n = 20},
koData =
ClassData{prior = -3.044522437723423, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("afternoon",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.6375861597263857,
likelihoods = HashMap.fromList [("", 0.0)], n = 36},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<duration> from now",
Classifier{okData =
ClassData{prior = -infinity, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -3.891820298110627,
likelihoods =
HashMap.fromList
[("week", -2.2617630984737906), ("second", -2.772588722239781),
("day", -2.2617630984737906), ("year", -2.772588722239781),
("<integer> <unit-of-duration>", -0.8266785731844679),
("hour", -2.2617630984737906), ("month", -2.772588722239781),
("minute", -2.772588722239781)],
n = 20}}),
("\36174\32618\26085",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("February",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.258096538021482,
likelihoods = HashMap.fromList [("", 0.0)], n = 24},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("this <cycle>",
Classifier{okData =
ClassData{prior = -0.8909729238898653,
unseen = -3.6635616461296463,
likelihoods =
HashMap.fromList
[("week", -1.1526795099383855),
("month (grain)", -2.2512917986064953),
("year (grain)", -2.538973871058276),
("week (grain)", -1.1526795099383855),
("year", -2.538973871058276), ("month", -2.2512917986064953)],
n = 16},
koData =
ClassData{prior = -0.5280674302004967, unseen = -3.970291913552122,
likelihoods =
HashMap.fromList
[("week", -0.7731898882334817),
("week (grain)", -0.7731898882334817)],
n = 23}}),
("minute (grain)",
Classifier{okData =
ClassData{prior = -0.4462871026284195, unseen = -2.890371757896165,
likelihoods = HashMap.fromList [("", 0.0)], n = 16},
koData =
ClassData{prior = -1.0216512475319814,
unseen = -2.3978952727983707,
likelihoods = HashMap.fromList [("", 0.0)], n = 9}}),
("xxxx year",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods =
HashMap.fromList
[("integer (0..10)integer (0..10)integer (0..10)integer (0..10)",
0.0)],
n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<dim time> <part-of-day>",
Classifier{okData =
ClassData{prior = -7.696104113612832e-2,
unseen = -4.6913478822291435,
likelihoods =
HashMap.fromList
[("dayhour", -0.750305594399894),
("national dayevening|night", -3.58351893845611),
("<named-month> <day-of-month>morning", -2.117181869662683),
("\24773\20154\33410evening|night", -3.58351893845611),
("\20799\31461\33410afternoon", -3.58351893845611),
("intersectmorning", -2.117181869662683),
("<time> <day-of-month>morning", -2.117181869662683),
("Mondaymorning", -2.4849066497880004)],
n = 50},
koData =
ClassData{prior = -2.6026896854443837, unseen = -2.833213344056216,
likelihoods =
HashMap.fromList
[("dayhour", -1.1631508098056809),
("<time> <day-of-month>morning", -1.1631508098056809)],
n = 4}}),
("<part-of-day> <dim time>",
Classifier{okData =
ClassData{prior = -0.7935659283069926,
unseen = -5.0369526024136295,
likelihoods =
HashMap.fromList
[("tonight<integer> (latent time-of-day)", -3.4210000089583352),
("afternoonrelative (10-59) minutes after|past <integer> (hour-of-day)",
-1.6631420914059614),
("hourhour", -2.322387720290225),
("afternoon<time-of-day> o'clock", -3.644143560272545),
("hourminute", -0.9699949108460162),
("afternoon<integer> (latent time-of-day)", -3.644143560272545),
("afternoonrelative (1-9) minutes after|past <integer> (hour-of-day)",
-2.72785282839839),
("afternoonhh:mm (time-of-day)", -3.644143560272545),
("tonight<time-of-day> o'clock", -3.4210000089583352),
("afternoonnumber of 5 minutes after|past <integer> (hour-of-day)",
-2.4654885639308985),
("afternoonhalf after|past <integer> (hour-of-day)",
-3.2386784521643803)],
n = 71},
koData =
ClassData{prior = -0.6018985090948004, unseen = -5.214935757608986,
likelihoods =
HashMap.fromList
[("afternoonrelative (10-59) minutes after|past <integer> (hour-of-day)",
-2.3762728087852047),
("hourhour", -0.9899784476653142),
("afternoon<time-of-day> o'clock", -1.7754989483562746),
("hourminute", -2.21375387928743),
("afternoon<integer> (latent time-of-day)", -1.571899993115035),
("afternoonnumber of 5 minutes after|past <integer> (hour-of-day)",
-3.82319179172153)],
n = 86}}),
("<integer> <unit-of-duration>",
Classifier{okData =
ClassData{prior = -infinity, unseen = -3.1354942159291497,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -6.244166900663736,
likelihoods =
HashMap.fromList
[("number suffix: \21313|\25342month (grain)",
-4.632785353021065),
("week", -3.0233474405869645),
("integer (0..10)month (grain)", -2.745715703988685),
("integer (0..10)hour (grain)", -3.1067290495260154),
("<number>\20010/\20491week (grain)", -3.8443279926567944),
("compose by multiplicationminute (grain)", -4.45046379622711),
("second", -3.6031659358399066),
("integer (0..10)day (grain)", -3.1067290495260154),
("integer (0..10)year (grain)", -3.7573166156671647),
("<number>\20010/\20491month (grain)", -3.469634543215384),
("integer (numeric)year (grain)", -2.3710222545472743),
("integer (0..10)second (grain)", -3.6031659358399066),
("day", -3.1067290495260154), ("year", -2.1646858215494458),
("integer (0..10)minute (grain)", -2.984126727433683),
("number suffix: \21313|\25342minute (grain)",
-4.855928904335275),
("hour", -3.1067290495260154),
("integer (0..10)week (grain)", -3.534173064352955),
("month", -2.008116760857906),
("integer (numeric)month (grain)", -3.3518515075590005),
("integer with consecutive unit modifiersminute (grain)",
-4.296313116399852),
("minute", -2.553343811341229)],
n = 246}}),
("\32769\26495\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\31709\28779\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<time-of-day> am|pm",
Classifier{okData =
ClassData{prior = -0.4353180712578455, unseen = -3.295836866004329,
likelihoods =
HashMap.fromList
[("hh:mm (time-of-day)", -0.9555114450274363),
("<integer> (latent time-of-day)", -2.159484249353372),
("hour", -2.159484249353372), ("minute", -0.9555114450274363)],
n = 11},
koData =
ClassData{prior = -1.041453874828161, unseen = -2.833213344056216,
likelihoods =
HashMap.fromList
[("<integer> (latent time-of-day)", -0.8266785731844679),
("hour", -0.8266785731844679)],
n = 6}}),
("one point 2",
Classifier{okData =
ClassData{prior = -infinity, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -3.7612001156935624,
likelihoods =
HashMap.fromList
[("integer (0..10)integer (0..10)", -0.9650808960435872),
("integer (0..10)number suffix: \21313|\25342",
-1.9459101490553135),
("integer (0..10)integer with consecutive unit modifiers",
-1.3397743454849977),
("integer (0..10)<number>\20010/\20491", -2.639057329615259),
("integer (0..10)compose by multiplication",
-2.639057329615259),
("integer (0..10)half", -2.639057329615259)],
n = 36}}),
("intersect by \",\"",
Classifier{okData =
ClassData{prior = 0.0, unseen = -4.330733340286331,
likelihoods =
HashMap.fromList
[("daymonth", -2.2380465718564744),
("Sunday<named-month> <day-of-month>", -1.6094379124341003),
("SundayFebruary", -2.2380465718564744),
("dayday", -0.9501922835498364),
("Sundayintersect", -1.6094379124341003)],
n = 35},
koData =
ClassData{prior = -infinity, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [], n = 0}}),
("\26837\26525\20027\26085",
Classifier{okData =
ClassData{prior = -0.6931471805599453, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -0.6931471805599453, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4}}),
("\38463\33298\25289\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("integer with consecutive unit modifiers",
Classifier{okData =
ClassData{prior = -5.715841383994864e-2,
unseen = -3.6109179126442243,
likelihoods =
HashMap.fromList
[("number suffix: \21313|\25342integer (0..10)",
-0.6931471805599453),
("integer (0..10)integer (0..10)", -0.6931471805599453)],
n = 34},
koData =
ClassData{prior = -2.890371757896165, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("integer (0..10)integer (0..10)", -0.2876820724517809)],
n = 2}}),
("second (grain)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.70805020110221,
likelihoods = HashMap.fromList [("", 0.0)], n = 13},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\19996\27491\25945\32822\31267\21463\38590\26085",
Classifier{okData =
ClassData{prior = -0.3364722366212129,
unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -1.252762968495368, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2}}),
("\25289\25746\36335\22307\21608\20845",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<duration> ago",
Classifier{okData =
ClassData{prior = -infinity, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -3.891820298110627,
likelihoods =
HashMap.fromList
[("week", -2.2617630984737906), ("second", -2.772588722239781),
("day", -2.2617630984737906), ("year", -2.772588722239781),
("<integer> <unit-of-duration>", -0.8266785731844679),
("hour", -2.2617630984737906), ("month", -2.772588722239781),
("minute", -2.772588722239781)],
n = 20}}),
("\22307\35806\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("last <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.332204510175204,
likelihoods =
HashMap.fromList
[("day", -0.7308875085427924), ("Sunday", -1.2163953243244932),
("Tuesday", -1.5040773967762742)],
n = 12},
koData =
ClassData{prior = -infinity, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [], n = 0}}),
("\20234\26031\20848\26032\24180",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("March",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.833213344056216,
likelihoods = HashMap.fromList [("", 0.0)], n = 15},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\24320\25995\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("the day after tomorrow",
Classifier{okData =
ClassData{prior = -0.5108256237659907,
unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -0.916290731874155, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2}}),
("\22307\21608\20845",
Classifier{okData =
ClassData{prior = -0.6931471805599453,
unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -0.6931471805599453,
unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6}}),
("\22919\22899\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\20840\29699\38738\24180\26381\21153\26085",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\27431\21335\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\20061\22812\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("next <time>",
Classifier{okData =
ClassData{prior = -0.6931471805599453, unseen = -2.639057329615259,
likelihoods =
HashMap.fromList
[("day", -0.7731898882334817), ("Tuesday", -0.7731898882334817)],
n = 5},
koData =
ClassData{prior = -0.6931471805599453, unseen = -2.639057329615259,
likelihoods =
HashMap.fromList
[("Wednesday", -0.7731898882334817),
("day", -0.7731898882334817)],
n = 5}}),
("last <cycle>",
Classifier{okData =
ClassData{prior = -0.8472978603872037,
unseen = -3.2188758248682006,
likelihoods =
HashMap.fromList
[("week", -1.3862943611198906),
("month (grain)", -1.791759469228055),
("year (grain)", -2.4849066497880004),
("week (grain)", -1.3862943611198906),
("year", -2.4849066497880004), ("month", -1.791759469228055)],
n = 9},
koData =
ClassData{prior = -0.5596157879354228,
unseen = -3.4339872044851463,
likelihoods =
HashMap.fromList
[("week", -0.8362480242006186),
("week (grain)", -0.8362480242006186)],
n = 12}}),
("\20197\33394\21015\29420\31435\26085",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("next n <cycle>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -4.605170185988091,
likelihoods =
HashMap.fromList
[("week", -2.3978952727983707),
("integer (0..10)hour (grain)", -2.3978952727983707),
("<number>\20010/\20491week (grain)", -2.9856819377004897),
("second", -2.803360380906535),
("integer (0..10)day (grain)", -2.515678308454754),
("integer (0..10)year (grain)", -3.2088254890146994),
("<number>\20010/\20491month (grain)", -2.803360380906535),
("integer (0..10)second (grain)", -2.803360380906535),
("day", -2.515678308454754), ("year", -3.2088254890146994),
("integer (0..10)minute (grain)", -2.649209701079277),
("hour", -2.3978952727983707),
("integer (0..10)week (grain)", -2.9856819377004897),
("month", -2.803360380906535), ("minute", -2.649209701079277)],
n = 42},
koData =
ClassData{prior = -infinity, unseen = -2.772588722239781,
likelihoods = HashMap.fromList [], n = 0}}),
("\19975\22307\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\21476\23572\37030\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("number of five minutes",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("integer (0..10)", 0.0)],
n = 2}}),
("\20799\31461\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Tuesday",
Classifier{okData =
ClassData{prior = -3.922071315328127e-2,
unseen = -3.295836866004329,
likelihoods = HashMap.fromList [("", 0.0)], n = 25},
koData =
ClassData{prior = -3.258096538021482, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("\26149\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("number.number minutes",
Classifier{okData =
ClassData{prior = -infinity, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -2.772588722239781,
likelihoods =
HashMap.fromList
[("integer (0..10)integer with consecutive unit modifiersminute (grain)",
-1.0986122886681098),
("integer (0..10)compose by multiplicationminute (grain)",
-1.6094379124341003),
("minute", -0.7621400520468967)],
n = 6}}),
("\32822\31267\21463\38590\26085",
Classifier{okData =
ClassData{prior = -1.0296194171811581,
unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -0.4418327522790392,
unseen = -2.3978952727983707,
likelihoods = HashMap.fromList [("", 0.0)], n = 9}}),
("<named-month> <day-of-month>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -4.762173934797756,
likelihoods =
HashMap.fromList
[("Marchinteger (0..10)", -2.5563656137701454),
("Marchinteger (numeric)", -3.144152278672264),
("Aprilinteger (numeric)", -3.654977902438255),
("Februaryinteger (0..10)", -2.6741486494265287),
("Februarynumber suffix: \21313|\25342", -2.6741486494265287),
("month", -0.7462570058738938),
("Februaryinteger (numeric)", -2.5563656137701454),
("Februaryinteger with consecutive unit modifiers",
-1.8091512119399242)],
n = 54},
koData =
ClassData{prior = -infinity, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [], n = 0}}),
("\21171\21160\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\22797\27963\33410\26143\26399\19968",
Classifier{okData =
ClassData{prior = -0.6931471805599453,
unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -0.6931471805599453,
unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2}}),
("number suffix: \19975|\33836",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3}}),
("\22823\25995\39318\26085",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("half",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5}}),
("two days after tomorrow",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("integer (0..10)",
Classifier{okData =
ClassData{prior = -0.5957987257888164, unseen = -5.407171771460119,
likelihoods = HashMap.fromList [("", 0.0)], n = 221},
koData =
ClassData{prior = -0.8010045764163588, unseen = -5.204006687076795,
likelihoods = HashMap.fromList [("", 0.0)], n = 180}}),
("last n <cycle>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -4.605170185988091,
likelihoods =
HashMap.fromList
[("week", -2.9856819377004897),
("integer (0..10)month (grain)", -3.4965075614664802),
("integer (0..10)hour (grain)", -2.3978952727983707),
("second", -2.649209701079277),
("integer (0..10)day (grain)", -2.9856819377004897),
("integer (0..10)year (grain)", -3.4965075614664802),
("<number>\20010/\20491month (grain)", -2.3978952727983707),
("integer (0..10)second (grain)", -2.649209701079277),
("day", -2.9856819377004897), ("year", -3.4965075614664802),
("integer (0..10)minute (grain)", -2.3978952727983707),
("hour", -2.3978952727983707),
("integer (0..10)week (grain)", -2.9856819377004897),
("month", -2.1972245773362196),
("minute", -2.3978952727983707)],
n = 42},
koData =
ClassData{prior = -infinity, unseen = -2.772588722239781,
likelihoods = HashMap.fromList [], n = 0}}),
("last <duration>",
Classifier{okData =
ClassData{prior = -infinity, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -3.912023005428146,
likelihoods =
HashMap.fromList
[("week", -2.2823823856765264), ("second", -2.505525936990736),
("day", -2.793208009442517),
("<integer> <unit-of-duration>", -0.8007778447523107),
("hour", -2.2823823856765264), ("month", -2.2823823856765264),
("minute", -2.2823823856765264)],
n = 21}}),
("ordinal (digits)",
Classifier{okData =
ClassData{prior = -1.252762968495368, unseen = -1.9459101490553135,
likelihoods =
HashMap.fromList [("<number>\20010/\20491", -0.1823215567939546)],
n = 4},
koData =
ClassData{prior = -0.3364722366212129,
unseen = -2.5649493574615367,
likelihoods =
HashMap.fromList [("integer (0..10)", -8.701137698962981e-2)],
n = 10}}),
("n <cycle> last",
Classifier{okData =
ClassData{prior = 0.0, unseen = -4.02535169073515,
likelihoods =
HashMap.fromList
[("week", -2.3978952727983707),
("integer (0..10)hour (grain)", -2.3978952727983707),
("<number>\20010/\20491week (grain)", -2.908720896564361),
("second", -2.908720896564361),
("integer (0..10)day (grain)", -2.3978952727983707),
("integer (0..10)year (grain)", -2.908720896564361),
("<number>\20010/\20491month (grain)", -2.908720896564361),
("integer (0..10)second (grain)", -2.908720896564361),
("day", -2.3978952727983707), ("year", -2.908720896564361),
("integer (0..10)minute (grain)", -2.908720896564361),
("hour", -2.3978952727983707),
("integer (0..10)week (grain)", -2.908720896564361),
("month", -2.908720896564361), ("minute", -2.908720896564361)],
n = 20},
koData =
ClassData{prior = -infinity, unseen = -2.772588722239781,
likelihoods = HashMap.fromList [], n = 0}}),
("\24527\24724\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("morning",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.890371757896165,
likelihoods = HashMap.fromList [("", 0.0)], n = 16},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("week-end",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("number suffix: \21313|\25342",
Classifier{okData =
ClassData{prior = -0.1590646946296874,
unseen = -3.4339872044851463,
likelihoods = HashMap.fromList [("", 0.0)], n = 29},
koData =
ClassData{prior = -1.916922612182061, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5}}),
("\22320\29699\19968\23567\26102",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("day (grain)",
Classifier{okData =
ClassData{prior = -0.38299225225610584,
unseen = -2.833213344056216,
likelihoods = HashMap.fromList [("", 0.0)], n = 15},
koData =
ClassData{prior = -1.1451323043030026,
unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [("", 0.0)], n = 7}}),
("\22307\32426\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\22797\27963\33410",
Classifier{okData =
ClassData{prior = -1.0986122886681098,
unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -0.40546510810816444,
unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6}}),
("<number>\20010/\20491",
Classifier{okData =
ClassData{prior = -0.2006706954621511,
unseen = -3.4011973816621555,
likelihoods =
HashMap.fromList [("integer (0..10)", -3.509131981127006e-2)],
n = 27},
koData =
ClassData{prior = -1.7047480922384253,
unseen = -2.1972245773362196,
likelihoods =
HashMap.fromList
[("one point 2", -0.9808292530117262),
("integer (0..10)", -0.4700036292457356)],
n = 6}}),
("compose by multiplication",
Classifier{okData =
ClassData{prior = -0.3364722366212129,
unseen = -2.0794415416798357,
likelihoods =
HashMap.fromList
[("integer (0..10)number suffix: \21313|\25342",
-0.15415067982725836)],
n = 5},
koData =
ClassData{prior = -1.252762968495368, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("one point 2number suffix: \21313|\25342",
-0.2876820724517809)],
n = 2}}),
("\24773\20154\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\20116\26092\33410",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\31070\22307\26143\26399\22235",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\25995\26376",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("\27861\20196\20043\22812",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("this <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -4.07753744390572,
likelihoods =
HashMap.fromList
[("Wednesday", -1.9810014688665833),
("Monday", -1.9810014688665833), ("day", -0.8415671856782186),
("hour", -2.9618307218783095), ("Tuesday", -1.6625477377480489),
("week-end", -2.9618307218783095)],
n = 26},
koData =
ClassData{prior = -infinity, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [], n = 0}})]
|
9f6e5338222f129972701972b80d54a2bc195c45953c6a8c481352b0e4a25cae
|
Errorific/haskell-script-examples
|
tee.hs
|
module Main where
import Control.Monad (liftM)
import Data.Foldable (forM_)
import Options.Applicative (Parser, ParserInfo, argument, execParser, fullDesc,
header, help, helper, info, long, metavar, progDesc,
short, some, str, switch, (<*>), (<>))
import System.IO (Handle, IOMode (AppendMode), IOMode (WriteMode),
hClose, hPutStrLn, openFile, stdout)
-- tee
main :: IO ()
main = do
-- run the parser over the cli argumentts
opts <- execParser optsParserInfo
-- Pick file mode based on option
let fileMode = if append opts then AppendMode else WriteMode
-- Open all the mentioned output files
outputFileHandles <- mapM (`openFile` fileMode) $ filenames opts
-- start reading lines from std in
stdInLines <- liftM lines getContents
-- for each line, run hsPutStrLn for stdout and all output files
forM_ stdInLines $ hsPutStrLn (stdout : outputFileHandles)
-- close all the open output files so they flush
mapM_ hClose outputFileHandles
-- print a line to all file handles
hsPutStrLn :: [Handle] -> String -> IO ()
hsPutStrLn handles line = forM_ handles . flip hPutStrLn $ line
-- structure to hold cli arguments
data Options = Options
{ filenames :: [String]
, append :: Bool
} deriving (Show)
Parser for cli arguments
optsParser :: Parser Options
optsParser = Options
<$> some (
argument str
( metavar "FILENAMES"
<> help "Output files"))
<*> switch
( long "append"
<> short 'a'
<> help "Append to output file rather than overwrite")
-- Adding program help text to the parser
optsParserInfo :: ParserInfo Options
optsParserInfo = info (helper <*> optsParser)
( fullDesc
<> progDesc "A bad clone of tee"
<> header "tee - a bad clone of the real tee")
| null |
https://raw.githubusercontent.com/Errorific/haskell-script-examples/0704e801183487427231e04a99e64b616beeba85/src/tee.hs
|
haskell
|
tee
run the parser over the cli argumentts
Pick file mode based on option
Open all the mentioned output files
start reading lines from std in
for each line, run hsPutStrLn for stdout and all output files
close all the open output files so they flush
print a line to all file handles
structure to hold cli arguments
Adding program help text to the parser
|
module Main where
import Control.Monad (liftM)
import Data.Foldable (forM_)
import Options.Applicative (Parser, ParserInfo, argument, execParser, fullDesc,
header, help, helper, info, long, metavar, progDesc,
short, some, str, switch, (<*>), (<>))
import System.IO (Handle, IOMode (AppendMode), IOMode (WriteMode),
hClose, hPutStrLn, openFile, stdout)
main :: IO ()
main = do
opts <- execParser optsParserInfo
let fileMode = if append opts then AppendMode else WriteMode
outputFileHandles <- mapM (`openFile` fileMode) $ filenames opts
stdInLines <- liftM lines getContents
forM_ stdInLines $ hsPutStrLn (stdout : outputFileHandles)
mapM_ hClose outputFileHandles
hsPutStrLn :: [Handle] -> String -> IO ()
hsPutStrLn handles line = forM_ handles . flip hPutStrLn $ line
data Options = Options
{ filenames :: [String]
, append :: Bool
} deriving (Show)
Parser for cli arguments
optsParser :: Parser Options
optsParser = Options
<$> some (
argument str
( metavar "FILENAMES"
<> help "Output files"))
<*> switch
( long "append"
<> short 'a'
<> help "Append to output file rather than overwrite")
optsParserInfo :: ParserInfo Options
optsParserInfo = info (helper <*> optsParser)
( fullDesc
<> progDesc "A bad clone of tee"
<> header "tee - a bad clone of the real tee")
|
fa2ab796a614d076ce7f097a2b671ddac57a66bbb6511a1dd4f763e526927513
|
bmeurer/ocaml-experimental
|
builtini_GetCursor.ml
|
##ifdef CAMLTK
let cCAMLtoTKcolor = function
NamedColor x -> TkToken x
| Black -> TkToken "black"
| White -> TkToken "white"
| Red -> TkToken "red"
| Green -> TkToken "green"
| Blue -> TkToken "blue"
| Yellow -> TkToken "yellow"
;;
let cTKtoCAMLcolor = function s -> NamedColor s
;;
let cCAMLtoTKcursor = function
XCursor s -> TkToken s
| XCursorFg (s,fg) ->
TkQuote(TkTokenList [TkToken s; cCAMLtoTKcolor fg])
| XCursortFgBg (s,fg,bg) ->
TkQuote(TkTokenList [TkToken s; cCAMLtoTKcolor fg; cCAMLtoTKcolor bg])
| CursorFileFg (s,fg) ->
TkQuote(TkTokenList [TkToken ("@"^s); cCAMLtoTKcolor fg])
| CursorMaskFile (s,m,fg,bg) ->
TkQuote(TkTokenList [TkToken ("@"^s); TkToken m; cCAMLtoTKcolor fg; cCAMLtoTKcolor bg])
;;
##else
let cCAMLtoTKcolor : color -> tkArgs = function
| `Color x -> TkToken x
| `Black -> TkToken "black"
| `White -> TkToken "white"
| `Red -> TkToken "red"
| `Green -> TkToken "green"
| `Blue -> TkToken "blue"
| `Yellow -> TkToken "yellow"
;;
let cTKtoCAMLcolor = function s -> `Color s
;;
let cCAMLtoTKcursor : cursor -> tkArgs = function
| `Xcursor s -> TkToken s
| `Xcursorfg (s,fg) ->
TkQuote(TkTokenList [TkToken s; cCAMLtoTKcolor fg])
| `Xcursorfgbg (s,fg,bg) ->
TkQuote(TkTokenList [TkToken s; cCAMLtoTKcolor fg; cCAMLtoTKcolor bg])
| `Cursorfilefg (s,fg) ->
TkQuote(TkTokenList [TkToken ("@"^s); cCAMLtoTKcolor fg])
| `Cursormaskfile (s,m,fg,bg) ->
TkQuote(TkTokenList [TkToken ("@"^s); TkToken m; cCAMLtoTKcolor fg; cCAMLtoTKcolor bg])
;;
##endif
| null |
https://raw.githubusercontent.com/bmeurer/ocaml-experimental/fe5c10cdb0499e43af4b08f35a3248e5c1a8b541/otherlibs/labltk/builtin/builtini_GetCursor.ml
|
ocaml
|
##ifdef CAMLTK
let cCAMLtoTKcolor = function
NamedColor x -> TkToken x
| Black -> TkToken "black"
| White -> TkToken "white"
| Red -> TkToken "red"
| Green -> TkToken "green"
| Blue -> TkToken "blue"
| Yellow -> TkToken "yellow"
;;
let cTKtoCAMLcolor = function s -> NamedColor s
;;
let cCAMLtoTKcursor = function
XCursor s -> TkToken s
| XCursorFg (s,fg) ->
TkQuote(TkTokenList [TkToken s; cCAMLtoTKcolor fg])
| XCursortFgBg (s,fg,bg) ->
TkQuote(TkTokenList [TkToken s; cCAMLtoTKcolor fg; cCAMLtoTKcolor bg])
| CursorFileFg (s,fg) ->
TkQuote(TkTokenList [TkToken ("@"^s); cCAMLtoTKcolor fg])
| CursorMaskFile (s,m,fg,bg) ->
TkQuote(TkTokenList [TkToken ("@"^s); TkToken m; cCAMLtoTKcolor fg; cCAMLtoTKcolor bg])
;;
##else
let cCAMLtoTKcolor : color -> tkArgs = function
| `Color x -> TkToken x
| `Black -> TkToken "black"
| `White -> TkToken "white"
| `Red -> TkToken "red"
| `Green -> TkToken "green"
| `Blue -> TkToken "blue"
| `Yellow -> TkToken "yellow"
;;
let cTKtoCAMLcolor = function s -> `Color s
;;
let cCAMLtoTKcursor : cursor -> tkArgs = function
| `Xcursor s -> TkToken s
| `Xcursorfg (s,fg) ->
TkQuote(TkTokenList [TkToken s; cCAMLtoTKcolor fg])
| `Xcursorfgbg (s,fg,bg) ->
TkQuote(TkTokenList [TkToken s; cCAMLtoTKcolor fg; cCAMLtoTKcolor bg])
| `Cursorfilefg (s,fg) ->
TkQuote(TkTokenList [TkToken ("@"^s); cCAMLtoTKcolor fg])
| `Cursormaskfile (s,m,fg,bg) ->
TkQuote(TkTokenList [TkToken ("@"^s); TkToken m; cCAMLtoTKcolor fg; cCAMLtoTKcolor bg])
;;
##endif
|
|
ca9c95cc9f8c745751740c73241d69e80edf7b4c2a18b99f561f030508207aab
|
ertugrulcetin/procedure.async
|
common.cljc
|
(ns favorite-songs.common
(:require #?@(:cljs [[re-frame.core :refer [subscribe]]
[favorite-songs.network :refer [dispatch-pro]]
[favorite-songs.subs]]
:clj [[procedure.async :refer [reg-pro register-validation-fn!]]
[malli.core :as m]
[malli.error :as me]])))
(def person-name->person-id {"Michael" 1
"Pam" 2
"Oscar" 3
"Dwight" 4})
(def song-id->title {22 "Tiny Dancer by Elton John"
33 "Drop It Like It's Hot by Snoop Dogg"
44 "Everybody Hurts by REM"
55 "Mambo No. 5 by Lou Bega"
66 "Use It by The New Pornographers"
77 "Sing by Travis"
88 "Ring Around the Rosies - Sung"
99 "Here I Go Again by Whitesnake"})
(def person-id->favorite-song-ids
{1 [22 33]
2 [44 55]
3 [66 77]
4 [88 99]})
#?(:clj
(do
(register-validation-fn!
(fn [schema data]
(or (m/validate schema data)
(me/humanize (m/explain schema data)))))
(reg-pro
:get-person-name->person-id-table
(fn [_]
(println "Fetching person-name->person-id table...")
(Thread/sleep 10)
person-name->person-id))
(reg-pro
:get-song-id->title-table
(fn [_]
(println "Fetching song-id->title table...")
(Thread/sleep 30)
song-id->title))
(reg-pro
:get-person-id->favorite-song-ids-table
(fn [_]
(println "Fetching person-id->favorite-song-ids table...")
(Thread/sleep 100)
person-id->favorite-song-ids))
(reg-pro
:get-favorite-songs-by-person
[:get-person-name->person-id-table
:get-song-id->title-table
:get-person-id->favorite-song-ids-table]
{:data [:map
[:data string?]]
:response [:map
[:songs [:vector string?]]]}
(fn [[person-name->person-id-table
song-id->title-table
person-id->favorite-song-ids-table
{:keys [req socket data]}]]
(println "Payload is: " data)
(let [person-name (:data data)
person-id (get person-name->person-id-table person-name)
liked-songs-ids (get person-id->favorite-song-ids-table person-id)]
{:songs (mapv #(get song-id->title-table %) liked-songs-ids)})))))
#?(:cljs
(defn favorite-songs []
[:<>
[:span "Please select a person: "]
[:select
{:on-change #(dispatch-pro [:get-favorite-songs-by-person (.-value (.-target %))])}
[:option {:value "Michael"} "Michael"]
[:option {:value "Pam"} "Pam"]
[:option {:value "Oscar"} "Oscar"]
[:option {:value "Dwight"} "Dwight"]]
[:br]
[:span "Favorite songs: " @(subscribe [:favorite-songs])]]))
| null |
https://raw.githubusercontent.com/ertugrulcetin/procedure.async/6c3c42342252934224e471344b8e118f64306b84/examples/favorite-songs/src/cljc/favorite_songs/common.cljc
|
clojure
|
(ns favorite-songs.common
(:require #?@(:cljs [[re-frame.core :refer [subscribe]]
[favorite-songs.network :refer [dispatch-pro]]
[favorite-songs.subs]]
:clj [[procedure.async :refer [reg-pro register-validation-fn!]]
[malli.core :as m]
[malli.error :as me]])))
(def person-name->person-id {"Michael" 1
"Pam" 2
"Oscar" 3
"Dwight" 4})
(def song-id->title {22 "Tiny Dancer by Elton John"
33 "Drop It Like It's Hot by Snoop Dogg"
44 "Everybody Hurts by REM"
55 "Mambo No. 5 by Lou Bega"
66 "Use It by The New Pornographers"
77 "Sing by Travis"
88 "Ring Around the Rosies - Sung"
99 "Here I Go Again by Whitesnake"})
(def person-id->favorite-song-ids
{1 [22 33]
2 [44 55]
3 [66 77]
4 [88 99]})
#?(:clj
(do
(register-validation-fn!
(fn [schema data]
(or (m/validate schema data)
(me/humanize (m/explain schema data)))))
(reg-pro
:get-person-name->person-id-table
(fn [_]
(println "Fetching person-name->person-id table...")
(Thread/sleep 10)
person-name->person-id))
(reg-pro
:get-song-id->title-table
(fn [_]
(println "Fetching song-id->title table...")
(Thread/sleep 30)
song-id->title))
(reg-pro
:get-person-id->favorite-song-ids-table
(fn [_]
(println "Fetching person-id->favorite-song-ids table...")
(Thread/sleep 100)
person-id->favorite-song-ids))
(reg-pro
:get-favorite-songs-by-person
[:get-person-name->person-id-table
:get-song-id->title-table
:get-person-id->favorite-song-ids-table]
{:data [:map
[:data string?]]
:response [:map
[:songs [:vector string?]]]}
(fn [[person-name->person-id-table
song-id->title-table
person-id->favorite-song-ids-table
{:keys [req socket data]}]]
(println "Payload is: " data)
(let [person-name (:data data)
person-id (get person-name->person-id-table person-name)
liked-songs-ids (get person-id->favorite-song-ids-table person-id)]
{:songs (mapv #(get song-id->title-table %) liked-songs-ids)})))))
#?(:cljs
(defn favorite-songs []
[:<>
[:span "Please select a person: "]
[:select
{:on-change #(dispatch-pro [:get-favorite-songs-by-person (.-value (.-target %))])}
[:option {:value "Michael"} "Michael"]
[:option {:value "Pam"} "Pam"]
[:option {:value "Oscar"} "Oscar"]
[:option {:value "Dwight"} "Dwight"]]
[:br]
[:span "Favorite songs: " @(subscribe [:favorite-songs])]]))
|
|
9e0741f7c36326149e27e26f3caf025d404f4981ea1412cfe14d152a6fc68fb9
|
brendanhay/gogol
|
Getsupportedholidays.hs
|
# LANGUAGE DataKinds #
# LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE PatternSynonyms #
# LANGUAGE RecordWildCards #
{-# LANGUAGE StrictData #-}
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - duplicate - exports #
# OPTIONS_GHC -fno - warn - name - shadowing #
# OPTIONS_GHC -fno - warn - unused - binds #
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - warn - unused - matches #
-- |
Module : . ShoppingContent . Content . . Getsupportedholidays
Copyright : ( c ) 2015 - 2022
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
-- Stability : auto-generated
Portability : non - portable ( GHC extensions )
--
-- Retrieves supported holidays for an account.
--
/See:/ < API for Shopping Reference > for @content.shippingsettings.getsupportedholidays@.
module Gogol.ShoppingContent.Content.Shippingsettings.Getsupportedholidays
( -- * Resource
ContentShippingsettingsGetsupportedholidaysResource,
-- ** Constructing a Request
ContentShippingsettingsGetsupportedholidays (..),
newContentShippingsettingsGetsupportedholidays,
)
where
import qualified Gogol.Prelude as Core
import Gogol.ShoppingContent.Types
| A resource alias for @content.shippingsettings.getsupportedholidays@ method which the
-- 'ContentShippingsettingsGetsupportedholidays' request conforms to.
type ContentShippingsettingsGetsupportedholidaysResource =
"content"
Core.:> "v2.1"
Core.:> Core.Capture "merchantId" Core.Word64
Core.:> "supportedHolidays"
Core.:> Core.QueryParam "$.xgafv" Xgafv
Core.:> Core.QueryParam "access_token" Core.Text
Core.:> Core.QueryParam "callback" Core.Text
Core.:> Core.QueryParam "uploadType" Core.Text
Core.:> Core.QueryParam "upload_protocol" Core.Text
Core.:> Core.QueryParam "alt" Core.AltJSON
Core.:> Core.Get
'[Core.JSON]
ShippingsettingsGetSupportedHolidaysResponse
-- | Retrieves supported holidays for an account.
--
-- /See:/ 'newContentShippingsettingsGetsupportedholidays' smart constructor.
data ContentShippingsettingsGetsupportedholidays = ContentShippingsettingsGetsupportedholidays
{ -- | V1 error format.
xgafv :: (Core.Maybe Xgafv),
-- | OAuth access token.
accessToken :: (Core.Maybe Core.Text),
| JSONP
callback :: (Core.Maybe Core.Text),
-- | The ID of the account for which to retrieve the supported holidays.
merchantId :: Core.Word64,
| Legacy upload protocol for media ( e.g. \"media\ " , \"multipart\ " ) .
uploadType :: (Core.Maybe Core.Text),
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
uploadProtocol :: (Core.Maybe Core.Text)
}
deriving (Core.Eq, Core.Show, Core.Generic)
-- | Creates a value of 'ContentShippingsettingsGetsupportedholidays' with the minimum fields required to make a request.
newContentShippingsettingsGetsupportedholidays ::
-- | The ID of the account for which to retrieve the supported holidays. See 'merchantId'.
Core.Word64 ->
ContentShippingsettingsGetsupportedholidays
newContentShippingsettingsGetsupportedholidays merchantId =
ContentShippingsettingsGetsupportedholidays
{ xgafv = Core.Nothing,
accessToken = Core.Nothing,
callback = Core.Nothing,
merchantId = merchantId,
uploadType = Core.Nothing,
uploadProtocol = Core.Nothing
}
instance
Core.GoogleRequest
ContentShippingsettingsGetsupportedholidays
where
type
Rs ContentShippingsettingsGetsupportedholidays =
ShippingsettingsGetSupportedHolidaysResponse
type
Scopes
ContentShippingsettingsGetsupportedholidays =
'[Content'FullControl]
requestClient
ContentShippingsettingsGetsupportedholidays {..} =
go
merchantId
xgafv
accessToken
callback
uploadType
uploadProtocol
(Core.Just Core.AltJSON)
shoppingContentService
where
go =
Core.buildClient
( Core.Proxy ::
Core.Proxy
ContentShippingsettingsGetsupportedholidaysResource
)
Core.mempty
| null |
https://raw.githubusercontent.com/brendanhay/gogol/fffd4d98a1996d0ffd4cf64545c5e8af9c976cda/lib/services/gogol-shopping-content/gen/Gogol/ShoppingContent/Content/Shippingsettings/Getsupportedholidays.hs
|
haskell
|
# LANGUAGE OverloadedStrings #
# LANGUAGE StrictData #
|
Stability : auto-generated
Retrieves supported holidays for an account.
* Resource
** Constructing a Request
'ContentShippingsettingsGetsupportedholidays' request conforms to.
| Retrieves supported holidays for an account.
/See:/ 'newContentShippingsettingsGetsupportedholidays' smart constructor.
| V1 error format.
| OAuth access token.
| The ID of the account for which to retrieve the supported holidays.
| Upload protocol for media (e.g. \"raw\", \"multipart\").
| Creates a value of 'ContentShippingsettingsGetsupportedholidays' with the minimum fields required to make a request.
| The ID of the account for which to retrieve the supported holidays. See 'merchantId'.
|
# LANGUAGE DataKinds #
# LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
# LANGUAGE PatternSynonyms #
# LANGUAGE RecordWildCards #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - duplicate - exports #
# OPTIONS_GHC -fno - warn - name - shadowing #
# OPTIONS_GHC -fno - warn - unused - binds #
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - warn - unused - matches #
Module : . ShoppingContent . Content . . Getsupportedholidays
Copyright : ( c ) 2015 - 2022
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
Portability : non - portable ( GHC extensions )
/See:/ < API for Shopping Reference > for @content.shippingsettings.getsupportedholidays@.
module Gogol.ShoppingContent.Content.Shippingsettings.Getsupportedholidays
ContentShippingsettingsGetsupportedholidaysResource,
ContentShippingsettingsGetsupportedholidays (..),
newContentShippingsettingsGetsupportedholidays,
)
where
import qualified Gogol.Prelude as Core
import Gogol.ShoppingContent.Types
| A resource alias for @content.shippingsettings.getsupportedholidays@ method which the
type ContentShippingsettingsGetsupportedholidaysResource =
"content"
Core.:> "v2.1"
Core.:> Core.Capture "merchantId" Core.Word64
Core.:> "supportedHolidays"
Core.:> Core.QueryParam "$.xgafv" Xgafv
Core.:> Core.QueryParam "access_token" Core.Text
Core.:> Core.QueryParam "callback" Core.Text
Core.:> Core.QueryParam "uploadType" Core.Text
Core.:> Core.QueryParam "upload_protocol" Core.Text
Core.:> Core.QueryParam "alt" Core.AltJSON
Core.:> Core.Get
'[Core.JSON]
ShippingsettingsGetSupportedHolidaysResponse
data ContentShippingsettingsGetsupportedholidays = ContentShippingsettingsGetsupportedholidays
xgafv :: (Core.Maybe Xgafv),
accessToken :: (Core.Maybe Core.Text),
| JSONP
callback :: (Core.Maybe Core.Text),
merchantId :: Core.Word64,
| Legacy upload protocol for media ( e.g. \"media\ " , \"multipart\ " ) .
uploadType :: (Core.Maybe Core.Text),
uploadProtocol :: (Core.Maybe Core.Text)
}
deriving (Core.Eq, Core.Show, Core.Generic)
newContentShippingsettingsGetsupportedholidays ::
Core.Word64 ->
ContentShippingsettingsGetsupportedholidays
newContentShippingsettingsGetsupportedholidays merchantId =
ContentShippingsettingsGetsupportedholidays
{ xgafv = Core.Nothing,
accessToken = Core.Nothing,
callback = Core.Nothing,
merchantId = merchantId,
uploadType = Core.Nothing,
uploadProtocol = Core.Nothing
}
instance
Core.GoogleRequest
ContentShippingsettingsGetsupportedholidays
where
type
Rs ContentShippingsettingsGetsupportedholidays =
ShippingsettingsGetSupportedHolidaysResponse
type
Scopes
ContentShippingsettingsGetsupportedholidays =
'[Content'FullControl]
requestClient
ContentShippingsettingsGetsupportedholidays {..} =
go
merchantId
xgafv
accessToken
callback
uploadType
uploadProtocol
(Core.Just Core.AltJSON)
shoppingContentService
where
go =
Core.buildClient
( Core.Proxy ::
Core.Proxy
ContentShippingsettingsGetsupportedholidaysResource
)
Core.mempty
|
7aef190cceda830eafcf568e88ca7d85849421c522ef29ac172351dc875781dc
|
namin/biohacker
|
mlang.lisp
|
;; -*- Mode: Lisp; -*-
Modeling language for TGizmo
Last Edited : 11/7/91 , by KDF
Copyright ( c ) 1991 , , Northwestern University ,
and , the Xerox Corporation .
All Rights Reserved .
;;; See the file legal.txt for a paragraph stating scope of permission
;;; and disclaimer of warranty. The above copyright notice and that
;;; paragraph must be included in any separate copy of this file.
(in-package :COMMON-LISP-USER)
(defun keywordize (stuff)
(cond ((null stuff) (error "Can't keywordize nothing."))
((listp stuff) (keywordize (car stuff)))
(t (intern (format nil "~A" stuff) 'keyword))))
(defmacro defrule (name triggers &rest consequences)
`(rule , (mapcar #'(lambda (trigger)
`(:INTERN ,trigger)) triggers)
(rassert! (:IMPLIES (:AND ,@ triggers)
(:AND ,@ consequences))
,(keywordize name))))
(defmacro defPredicate (form &rest consequences)
`(rule ((:INTERN ,form))
(rlet ((?self ,form))
,@ (translate-relations consequences :DEFPREDICATE
form (keywordize form)))))
(defmacro defentity (form &rest consequences)
`(rule ((:INTERN ,form))
(rlet ((?self ,form))
(rassert! (:IMPLIES ,form (Exists ,(cadr form)))
:DEFENTITY)
,@ (translate-relations consequences :DEFENTITY
form (keywordize form)))))
(defmacro defview (form &rest stuff)
(multiple-value-bind (ispec pcs qcs rels infs)
(parse-vp form stuff nil)
(debugging-tgizmo :DOMAIN-THEORY
"~% Defining view ~A.." form)
(make-vp-rules form ispec pcs qcs rels infs nil)))
(defmacro defprocess (form &rest stuff)
(multiple-value-bind (ispec pcs qcs rels infs)
(parse-vp form stuff t)
(debugging-tgizmo :DOMAIN-THEORY
"~% Defining process ~A.." form)
(make-vp-rules form ispec pcs qcs rels infs t)))
;;;; Working with views and processes
(defun parse-vp (form stuff process?)
;; Does some extra syntactic checks
(let ((ispec (cadr (member :INDIVIDUALS stuff)))
(pcs (cadr (member :PRECONDITIONS stuff)))
(qcs (cadr (member :QUANTITY-CONDITIONS stuff)))
(rels (cadr (member :RELATIONS stuff)))
(infs (cadr (member :INFLUENCES stuff))))
(unless ispec (error "~A must have :INDIVIDUALS field: ~A"
(if process? "defprocess" "defview")
form))
(unless (or pcs qcs)
(error "~A must have :PRECONDITIONS or :QUANTITY-CONDITIONS: ~A"
(if process? "defprocess" "defview") form))
(cond (process?
(unless infs (error "Physical processes must have influences: ~A" form)))
(infs (error "Views cannot have influences: ~A" form)))
;;; Make sure no dangling variables
(let ((*bound-vars* (cons '?self (pattern-free-variables ispec)))
(floating nil))
(when (setq floating (pattern-free-variables pcs))
(error "Can't have free variable(s) ~A in preconditions: ~A"
floating form))
(when (setq floating (pattern-free-variables qcs))
(error "Can't have free variable(s) ~A in quantity conditions: ~A"
floating form))
(when (setq floating (pattern-free-variables rels))
(error "Can't have free variable(s) ~A in relations: ~A"
floating form))
(if process?
(when (setq floating (pattern-free-variables infs))
(error "Can't have free variable(s) ~A in influences : ~A"
floating form))))
(values ispec pcs qcs rels infs)))
;;;; Finding and instantiating views and processes
(defun make-vp-rules (form ispec pcs qcs rels infs process?)
(let ((antes (apply #'append
(mapcar #'cdr ispec)))
(is (mapcar #'car ispec)))
`(rule ,(mapcar #'(lambda (ante)
`(:INTERN ,ante))
antes)
(rlet ((?self ,form))
(debugging-tgizmo :MODELING "~% Found ~A: ~A."
,(if process? "process" "view")
?self)
The ispecs imply the process instance
(rassert! (:IMPLIES (:AND ,@ antes)
(,(if process? 'Process-Instance
'View-Instance) ,form))
:CDI-IMPLIED)
;; The existence of the individuals implies
;; the existence of the process.
,@ (when process?
`((rassert! (:IMPLIES (:AND ,@ (mapcar #'(lambda (i)
`(Exists ,i)) is))
(Exists ,form))
:PROCESS-EXISTENCE)
(rassert! (:IMPLIES (Active ,form) (Exists ,form)) :NO-GHOSTS)))
Active iff pc 's and qc 's hold
(rassert! (:IFF (Active ,form)
(:AND ,@ pcs
,@ qcs))
:CDI-ACTIVE-CONSTRAINT)
;; If active, the relations hold
,@ (when rels
(translate-relations rels (if process? :PROCESS :VIEW)
'(Active ?self) (keywordize form)))
;; If active process, influences hold
,@ (when infs
(translate-relations infs (if process? :PROCESS :VIEW)
'(Active ?self) (keywordize form)))))))
;;;; Parsing contents of relations fields
;;; In an ``industrial-grade'' QP theory implementation,
;;; there is typically alot more hair here. We'll do
;;; the minimum.
(defun translate-relations (conseqs context antes informant)
(let ((explicit nil)
(implicit nil))
(dolist (c conseqs)
(multiple-value-bind (e i)
(translate-relation c context antes informant)
(setq explicit (nconc e explicit))
(setq implicit (nconc i implicit))))
`(,@ (when explicit `((rassert! (:IMPLIES ,antes (:AND ,@ explicit))
,informant)))
,@ implicit)))
(defun translate-relation (form context antes informant)
(cond ((not (listp form)) (values (list form) nil))
(t (case (car form)
ONLY - DURING indicates that form holds exactly when cdi does .
(ONLY-DURING
(values nil
`((rassert! (:IFF ,antes ,(cadr form)) ,informant))))
;; Quantities local to a cdi only exist when it is active.
(QUANTITY (if (member context '(:PROCESS :VIEW))
(values nil
`((rassert! (:IFF ,antes ,form) ,informant)))
(values (list form) nil)))
((I+ I-) (unless (eq context :PROCESS)
(error "Can't have direct influence in ~A: ~A"
context antes))
(values nil `((rassert! (:IFF ,antes ,(append form (list '?SELF))
,informant)))))
((Qprop Qprop-)
(values nil `((rassert! (:IFF ,antes ,(append form (list '?SELF)))
,informant))))
(t (values (list form) nil))))))
| null |
https://raw.githubusercontent.com/namin/biohacker/6b5da4c51c9caa6b5e1a68b046af171708d1af64/BPS/tgizmo/mlang.lisp
|
lisp
|
-*- Mode: Lisp; -*-
See the file legal.txt for a paragraph stating scope of permission
and disclaimer of warranty. The above copyright notice and that
paragraph must be included in any separate copy of this file.
Working with views and processes
Does some extra syntactic checks
Make sure no dangling variables
Finding and instantiating views and processes
The existence of the individuals implies
the existence of the process.
If active, the relations hold
If active process, influences hold
Parsing contents of relations fields
In an ``industrial-grade'' QP theory implementation,
there is typically alot more hair here. We'll do
the minimum.
Quantities local to a cdi only exist when it is active.
|
Modeling language for TGizmo
Last Edited : 11/7/91 , by KDF
Copyright ( c ) 1991 , , Northwestern University ,
and , the Xerox Corporation .
All Rights Reserved .
(in-package :COMMON-LISP-USER)
(defun keywordize (stuff)
(cond ((null stuff) (error "Can't keywordize nothing."))
((listp stuff) (keywordize (car stuff)))
(t (intern (format nil "~A" stuff) 'keyword))))
(defmacro defrule (name triggers &rest consequences)
`(rule , (mapcar #'(lambda (trigger)
`(:INTERN ,trigger)) triggers)
(rassert! (:IMPLIES (:AND ,@ triggers)
(:AND ,@ consequences))
,(keywordize name))))
(defmacro defPredicate (form &rest consequences)
`(rule ((:INTERN ,form))
(rlet ((?self ,form))
,@ (translate-relations consequences :DEFPREDICATE
form (keywordize form)))))
(defmacro defentity (form &rest consequences)
`(rule ((:INTERN ,form))
(rlet ((?self ,form))
(rassert! (:IMPLIES ,form (Exists ,(cadr form)))
:DEFENTITY)
,@ (translate-relations consequences :DEFENTITY
form (keywordize form)))))
(defmacro defview (form &rest stuff)
(multiple-value-bind (ispec pcs qcs rels infs)
(parse-vp form stuff nil)
(debugging-tgizmo :DOMAIN-THEORY
"~% Defining view ~A.." form)
(make-vp-rules form ispec pcs qcs rels infs nil)))
(defmacro defprocess (form &rest stuff)
(multiple-value-bind (ispec pcs qcs rels infs)
(parse-vp form stuff t)
(debugging-tgizmo :DOMAIN-THEORY
"~% Defining process ~A.." form)
(make-vp-rules form ispec pcs qcs rels infs t)))
(defun parse-vp (form stuff process?)
(let ((ispec (cadr (member :INDIVIDUALS stuff)))
(pcs (cadr (member :PRECONDITIONS stuff)))
(qcs (cadr (member :QUANTITY-CONDITIONS stuff)))
(rels (cadr (member :RELATIONS stuff)))
(infs (cadr (member :INFLUENCES stuff))))
(unless ispec (error "~A must have :INDIVIDUALS field: ~A"
(if process? "defprocess" "defview")
form))
(unless (or pcs qcs)
(error "~A must have :PRECONDITIONS or :QUANTITY-CONDITIONS: ~A"
(if process? "defprocess" "defview") form))
(cond (process?
(unless infs (error "Physical processes must have influences: ~A" form)))
(infs (error "Views cannot have influences: ~A" form)))
(let ((*bound-vars* (cons '?self (pattern-free-variables ispec)))
(floating nil))
(when (setq floating (pattern-free-variables pcs))
(error "Can't have free variable(s) ~A in preconditions: ~A"
floating form))
(when (setq floating (pattern-free-variables qcs))
(error "Can't have free variable(s) ~A in quantity conditions: ~A"
floating form))
(when (setq floating (pattern-free-variables rels))
(error "Can't have free variable(s) ~A in relations: ~A"
floating form))
(if process?
(when (setq floating (pattern-free-variables infs))
(error "Can't have free variable(s) ~A in influences : ~A"
floating form))))
(values ispec pcs qcs rels infs)))
(defun make-vp-rules (form ispec pcs qcs rels infs process?)
(let ((antes (apply #'append
(mapcar #'cdr ispec)))
(is (mapcar #'car ispec)))
`(rule ,(mapcar #'(lambda (ante)
`(:INTERN ,ante))
antes)
(rlet ((?self ,form))
(debugging-tgizmo :MODELING "~% Found ~A: ~A."
,(if process? "process" "view")
?self)
The ispecs imply the process instance
(rassert! (:IMPLIES (:AND ,@ antes)
(,(if process? 'Process-Instance
'View-Instance) ,form))
:CDI-IMPLIED)
,@ (when process?
`((rassert! (:IMPLIES (:AND ,@ (mapcar #'(lambda (i)
`(Exists ,i)) is))
(Exists ,form))
:PROCESS-EXISTENCE)
(rassert! (:IMPLIES (Active ,form) (Exists ,form)) :NO-GHOSTS)))
Active iff pc 's and qc 's hold
(rassert! (:IFF (Active ,form)
(:AND ,@ pcs
,@ qcs))
:CDI-ACTIVE-CONSTRAINT)
,@ (when rels
(translate-relations rels (if process? :PROCESS :VIEW)
'(Active ?self) (keywordize form)))
,@ (when infs
(translate-relations infs (if process? :PROCESS :VIEW)
'(Active ?self) (keywordize form)))))))
(defun translate-relations (conseqs context antes informant)
(let ((explicit nil)
(implicit nil))
(dolist (c conseqs)
(multiple-value-bind (e i)
(translate-relation c context antes informant)
(setq explicit (nconc e explicit))
(setq implicit (nconc i implicit))))
`(,@ (when explicit `((rassert! (:IMPLIES ,antes (:AND ,@ explicit))
,informant)))
,@ implicit)))
(defun translate-relation (form context antes informant)
(cond ((not (listp form)) (values (list form) nil))
(t (case (car form)
ONLY - DURING indicates that form holds exactly when cdi does .
(ONLY-DURING
(values nil
`((rassert! (:IFF ,antes ,(cadr form)) ,informant))))
(QUANTITY (if (member context '(:PROCESS :VIEW))
(values nil
`((rassert! (:IFF ,antes ,form) ,informant)))
(values (list form) nil)))
((I+ I-) (unless (eq context :PROCESS)
(error "Can't have direct influence in ~A: ~A"
context antes))
(values nil `((rassert! (:IFF ,antes ,(append form (list '?SELF))
,informant)))))
((Qprop Qprop-)
(values nil `((rassert! (:IFF ,antes ,(append form (list '?SELF)))
,informant))))
(t (values (list form) nil))))))
|
c0d5096363e96f365ee3a95d3a62d6be072a446c7eb2c6c2d61aa8631ad5d8d2
|
janestreet/core
|
gc.ml
|
open! Import
module Stable = struct
module Allocation_policy = struct
module V1 = struct
type t =
| Next_fit
| First_fit
| Best_fit
[@@deriving bin_io, compare, equal, hash, sexp, stable_witness]
end
end
module Stat = struct
[%%if ocaml_version < (4, 12, 0)]
module V1 = struct
type t = Stdlib.Gc.stat =
{ minor_words : float
; promoted_words : float
; major_words : float
; minor_collections : int
; major_collections : int
; heap_words : int
; heap_chunks : int
; live_words : int
; live_blocks : int
; free_words : int
; free_blocks : int
; largest_free : int
; fragments : int
; compactions : int
; top_heap_words : int
; stack_size : int
}
[@@deriving bin_io, compare, equal, hash, sexp, stable_witness]
end
module V2 = struct
type t =
{ minor_words : float
; promoted_words : float
; major_words : float
; minor_collections : int
; major_collections : int
; heap_words : int
; heap_chunks : int
; live_words : int
; live_blocks : int
; free_words : int
; free_blocks : int
; largest_free : int
; fragments : int
; compactions : int
; top_heap_words : int
; stack_size : int
; forced_major_collections : int
}
[@@deriving bin_io, compare, equal, hash, sexp, stable_witness]
end
[%%else]
module V1 = struct
type t =
{ minor_words : float
; promoted_words : float
; major_words : float
; minor_collections : int
; major_collections : int
; heap_words : int
; heap_chunks : int
; live_words : int
; live_blocks : int
; free_words : int
; free_blocks : int
; largest_free : int
; fragments : int
; compactions : int
; top_heap_words : int
; stack_size : int
}
[@@deriving bin_io, compare, equal, hash, sexp, stable_witness]
end
module V2 = struct
type t = Stdlib.Gc.stat =
{ minor_words : float
; promoted_words : float
; major_words : float
; minor_collections : int
; major_collections : int
; heap_words : int
; heap_chunks : int
; live_words : int
; live_blocks : int
; free_words : int
; free_blocks : int
; largest_free : int
; fragments : int
; compactions : int
; top_heap_words : int
; stack_size : int
; forced_major_collections : int
}
[@@deriving bin_io, compare, equal, hash, sexp, stable_witness]
end
[%%endif]
end
module Control = struct
module V1 = struct
[@@@ocaml.warning "-3"]
type t = Stdlib.Gc.control =
{ mutable minor_heap_size : int
; mutable major_heap_increment : int
; mutable space_overhead : int
; mutable verbose : int
; mutable max_overhead : int
; mutable stack_limit : int
; mutable allocation_policy : int
; window_size : int
; custom_major_ratio : int
; custom_minor_ratio : int
; custom_minor_max_size : int
}
[@@deriving bin_io, compare, equal, sexp, stable_witness]
end
end
end
include Stdlib.Gc
module Stat = struct
module T = struct
[%%if ocaml_version < (4, 12, 0)]
type t = Stdlib.Gc.stat =
{ minor_words : float
; promoted_words : float
; major_words : float
; minor_collections : int
; major_collections : int
; heap_words : int
; heap_chunks : int
; live_words : int
; live_blocks : int
; free_words : int
; free_blocks : int
; largest_free : int
; fragments : int
; compactions : int
; top_heap_words : int
; stack_size : int
}
[@@deriving compare, hash, bin_io, sexp, fields]
[%%else]
type t = Stdlib.Gc.stat =
{ minor_words : float
; promoted_words : float
; major_words : float
; minor_collections : int
; major_collections : int
; heap_words : int
; heap_chunks : int
; live_words : int
; live_blocks : int
; free_words : int
; free_blocks : int
; largest_free : int
; fragments : int
; compactions : int
; top_heap_words : int
; stack_size : int
; forced_major_collections : int
}
[@@deriving compare, hash, sexp_of, fields]
[%%endif]
end
include T
include Comparable.Make_plain (T)
[%%if ocaml_version < (4, 12, 0)]
let combine first second ~float_f ~int_f =
{ minor_words = float_f first.minor_words second.minor_words
; promoted_words = float_f first.promoted_words second.promoted_words
; major_words = float_f first.major_words second.major_words
; minor_collections = int_f first.minor_collections second.minor_collections
; major_collections = int_f first.major_collections second.major_collections
; heap_words = int_f first.heap_words second.heap_words
; heap_chunks = int_f first.heap_chunks second.heap_chunks
; live_words = int_f first.live_words second.live_words
; live_blocks = int_f first.live_blocks second.live_blocks
; free_words = int_f first.free_words second.free_words
; free_blocks = int_f first.free_blocks second.free_blocks
; largest_free = int_f first.largest_free second.largest_free
; fragments = int_f first.fragments second.fragments
; compactions = int_f first.compactions second.compactions
; top_heap_words = int_f first.top_heap_words second.top_heap_words
; stack_size = int_f first.stack_size second.stack_size
}
;;
[%%else]
let combine first second ~float_f ~int_f =
{ minor_words = float_f first.minor_words second.minor_words
; promoted_words = float_f first.promoted_words second.promoted_words
; major_words = float_f first.major_words second.major_words
; minor_collections = int_f first.minor_collections second.minor_collections
; major_collections = int_f first.major_collections second.major_collections
; heap_words = int_f first.heap_words second.heap_words
; heap_chunks = int_f first.heap_chunks second.heap_chunks
; live_words = int_f first.live_words second.live_words
; live_blocks = int_f first.live_blocks second.live_blocks
; free_words = int_f first.free_words second.free_words
; free_blocks = int_f first.free_blocks second.free_blocks
; largest_free = int_f first.largest_free second.largest_free
; fragments = int_f first.fragments second.fragments
; compactions = int_f first.compactions second.compactions
; top_heap_words = int_f first.top_heap_words second.top_heap_words
; stack_size = int_f first.stack_size second.stack_size
; forced_major_collections =
int_f first.forced_major_collections second.forced_major_collections
}
;;
[%%endif]
let add = combine ~float_f:Float.( + ) ~int_f:Int.( + )
let diff = combine ~float_f:Float.( - ) ~int_f:Int.( - )
end
module Control = struct
module T = struct
[@@@ocaml.warning "-3"]
type t = Stdlib.Gc.control =
{ mutable minor_heap_size : int
; mutable major_heap_increment : int
; mutable space_overhead : int
; mutable verbose : int
; mutable max_overhead : int
; mutable stack_limit : int
; mutable allocation_policy : int
; window_size : int
; custom_major_ratio : int
; custom_minor_ratio : int
; custom_minor_max_size : int
}
[@@deriving compare, sexp_of, fields]
end
include T
include Comparable.Make_plain (T)
end
module Allocation_policy = struct
type t = Stable.Allocation_policy.V1.t =
| Next_fit
| First_fit
| Best_fit
[@@deriving compare, equal, hash, sexp_of]
let to_int = function
| Next_fit -> 0
| First_fit -> 1
| Best_fit -> 2
;;
end
let tune
?logger
?minor_heap_size
?major_heap_increment
?space_overhead
?verbose
?max_overhead
?stack_limit
?allocation_policy
?window_size
?custom_major_ratio
?custom_minor_ratio
?custom_minor_max_size
()
=
let old_control_params = get () in
let f opt to_string field =
let old_value = Field.get field old_control_params in
match opt with
| None -> old_value
| Some new_value ->
Option.iter logger ~f:(fun f ->
Printf.ksprintf
f
"Gc.Control.%s: %s -> %s"
(Field.name field)
(to_string old_value)
(to_string new_value));
new_value
in
let allocation_policy = Option.map allocation_policy ~f:Allocation_policy.to_int in
let new_control_params =
Control.Fields.map
~minor_heap_size:(f minor_heap_size string_of_int)
~major_heap_increment:(f major_heap_increment string_of_int)
~space_overhead:(f space_overhead string_of_int)
~verbose:(f verbose string_of_int)
~max_overhead:(f max_overhead string_of_int)
~stack_limit:(f stack_limit string_of_int)
~allocation_policy:(f allocation_policy string_of_int)
~window_size:(f window_size string_of_int)
~custom_major_ratio:(f custom_major_ratio string_of_int)
~custom_minor_ratio:(f custom_minor_ratio string_of_int)
~custom_minor_max_size:(f custom_minor_max_size string_of_int)
in
set new_control_params
;;
let disable_compaction ?logger ~allocation_policy () =
let allocation_policy =
match allocation_policy with
| `Don't_change -> None
| `Set_to policy -> Some policy
in
The value 1_000_000 , according to
-ocaml-4.02/libref/Gc.html
will disable .
-ocaml-4.02/libref/Gc.html
will disable compactions.
*)
tune ?logger ?allocation_policy ~max_overhead:1_000_000 ()
;;
external minor_words : unit -> int = "core_gc_minor_words"
external major_words : unit -> int = "core_gc_major_words" [@@noalloc]
external promoted_words : unit -> int = "core_gc_promoted_words" [@@noalloc]
external minor_collections : unit -> int = "core_gc_minor_collections" [@@noalloc]
external major_collections : unit -> int = "core_gc_major_collections" [@@noalloc]
external heap_words : unit -> int = "core_gc_heap_words" [@@noalloc]
external heap_chunks : unit -> int = "core_gc_heap_chunks" [@@noalloc]
external compactions : unit -> int = "core_gc_compactions" [@@noalloc]
external top_heap_words : unit -> int = "core_gc_top_heap_words" [@@noalloc]
external major_plus_minor_words : unit -> int = "core_gc_major_plus_minor_words"
external allocated_words : unit -> int = "core_gc_allocated_words"
external run_memprof_callbacks : unit -> unit = "core_gc_run_memprof_callbacks"
let stat_size_lazy =
lazy (Obj.reachable_words (Obj.repr (Stdlib.Gc.quick_stat () : Stat.t)))
;;
let stat_size () = Lazy.force stat_size_lazy
let zero = Sys.opaque_identity (int_of_string "0")
(* The compiler won't optimize int_of_string away so it won't
perform constant folding below. *)
let rec keep_alive o = if zero <> 0 then keep_alive (Sys.opaque_identity o)
module For_testing = struct
We disable inlining for this function so the GC stats and the call to [ f ] are never
rearranged .
rearranged. *)
let[@cold] measure_internal ~on_result f =
let minor_words_before = minor_words () in
let major_words_before = major_words () in
(* We wrap [f ()] with [Sys.opaque_identity] to prevent the return value from being
optimized away. *)
let x = Sys.opaque_identity (f ()) in
let minor_words_after = minor_words () in
let major_words_after = major_words () in
let major_words_allocated = major_words_after - major_words_before in
let minor_words_allocated = minor_words_after - minor_words_before in
on_result ~major_words_allocated ~minor_words_allocated x
;;
let is_zero_alloc (type a) (f : unit -> a) =
(* Instead of using [Allocation_report.measure], and matching on the result, we use
this construction, in order to have [is_zero_alloc] not allocate itself. This
enables [is_zero_alloc] to be used in a nested way. *)
measure_internal
f
~on_result:(fun ~major_words_allocated ~minor_words_allocated value ->
ignore (Sys.opaque_identity value : a);
major_words_allocated == 0 && minor_words_allocated == 0)
;;
module Allocation_report = struct
type t =
{ major_words_allocated : int
; minor_words_allocated : int
}
[@@deriving sexp_of]
let create ~major_words_allocated ~minor_words_allocated =
{ major_words_allocated; minor_words_allocated }
;;
end
let measure_allocation f =
measure_internal f ~on_result:(fun ~major_words_allocated ~minor_words_allocated x ->
x, Allocation_report.create ~major_words_allocated ~minor_words_allocated)
;;
module Allocation_log = struct
type t =
{ size_in_words : int
; is_major : bool
; backtrace : string
}
[@@deriving sexp_of]
end
[%%if ocaml_version >= (4, 11, 0)]
let measure_and_log_allocation f =
let log : Allocation_log.t list ref = ref []
and major_allocs = ref 0
and minor_allocs = ref 0 in
let on_alloc ~is_major (info : Stdlib.Gc.Memprof.allocation) =
if is_major
then major_allocs := !major_allocs + info.n_samples
else minor_allocs := !minor_allocs + info.n_samples;
let backtrace = Stdlib.Printexc.raw_backtrace_to_string info.callstack in
(* Make backtraces easier to read by deleting everything below this function *)
let backtrace =
match String.substr_index backtrace ~pattern:"measure_and_log_allocation" with
| None ->
(* This case is possible: we may have logged allocations in another thread *)
backtrace
| Some p ->
String.sub ~pos:0 ~len:p backtrace
|> String.rstrip ~drop:(function
| '\n' -> false
| _ -> true)
in
let info : Allocation_log.t =
{ size_in_words = info.n_samples; is_major; backtrace }
in
log := info :: !log;
None
in
let tracker =
{ Stdlib.Gc.Memprof.null_tracker with
alloc_minor = on_alloc ~is_major:false
; alloc_major = on_alloc ~is_major:true
}
in
Stdlib.Gc.Memprof.start ~sampling_rate:1.0 tracker;
(* Exn.protect, manually inlined to guarantee no allocations *)
let result =
match f () with
| x ->
Memprof.stop does not guarantee that all callbacks are run ( some may be
delayed if they happened during C code and there has been no allocation since ) ,
so we explictly flush them
delayed if they happened during C code and there has been no allocation since),
so we explictly flush them *)
run_memprof_callbacks ();
Stdlib.Gc.Memprof.stop ();
x
| exception e ->
run_memprof_callbacks ();
Stdlib.Gc.Memprof.stop ();
raise e
in
( result
, Allocation_report.create
~major_words_allocated:!major_allocs
~minor_words_allocated:!minor_allocs
, List.rev !log )
;;
[%%else]
let measure_and_log_allocation f =
let x, report = measure_allocation f in
x, report, []
;;
[%%endif]
end
module Expert = struct
let add_finalizer x f =
try Stdlib.Gc.finalise (fun x -> Exn.handle_uncaught_and_exit (fun () -> f x)) x with
| Invalid_argument _ ->
The type of add_finalizer ensures that the only possible failure
is due to [ x ] being static data . In this case , we simply drop the
finalizer since static data would never have been collected by the
GC anyway .
is due to [x] being static data. In this case, we simply drop the
finalizer since static data would never have been collected by the
GC anyway. *)
()
;;
(* [add_finalizer_exn] is the same as [add_finalizer]. However, their types in
core_gc.mli are different, and the type of [add_finalizer] guarantees that it always
receives a heap block, which ensures that it will not raise, while
[add_finalizer_exn] accepts any type, and so may raise. *)
let add_finalizer_exn x f =
try Stdlib.Gc.finalise (fun x -> Exn.handle_uncaught_and_exit (fun () -> f x)) x with
| Invalid_argument _ ->
ignore (Heap_block.create x : _ Heap_block.t option);
If [ Heap_block.create ] succeeds then [ x ] is static data and so
we can simply drop the finaliser .
we can simply drop the finaliser. *)
()
;;
let add_finalizer_last x f =
try Stdlib.Gc.finalise_last (fun () -> Exn.handle_uncaught_and_exit f) x with
| Invalid_argument _ ->
The type of add_finalizer_last ensures that the only possible failure
is due to [ x ] being static data . In this case , we simply drop the
finalizer since static data would never have been collected by the
GC anyway .
is due to [x] being static data. In this case, we simply drop the
finalizer since static data would never have been collected by the
GC anyway. *)
()
;;
let add_finalizer_last_exn x f =
try Stdlib.Gc.finalise_last (fun () -> Exn.handle_uncaught_and_exit f) x with
| Invalid_argument _ ->
ignore (Heap_block.create x : _ Heap_block.t option);
If [ Heap_block.create ] succeeds then [ x ] is static data and so
we can simply drop the finaliser .
we can simply drop the finaliser. *)
()
;;
let finalize_release = Stdlib.Gc.finalise_release
module Alarm = struct
type t = alarm
let sexp_of_t _ = "<gc alarm>" |> [%sexp_of: string]
let create f = create_alarm (fun () -> Exn.handle_uncaught_and_exit f)
let delete = delete_alarm
end
end
| null |
https://raw.githubusercontent.com/janestreet/core/f382131ccdcb4a8cd21ebf9a49fa42dcf8183de6/core/src/gc.ml
|
ocaml
|
The compiler won't optimize int_of_string away so it won't
perform constant folding below.
We wrap [f ()] with [Sys.opaque_identity] to prevent the return value from being
optimized away.
Instead of using [Allocation_report.measure], and matching on the result, we use
this construction, in order to have [is_zero_alloc] not allocate itself. This
enables [is_zero_alloc] to be used in a nested way.
Make backtraces easier to read by deleting everything below this function
This case is possible: we may have logged allocations in another thread
Exn.protect, manually inlined to guarantee no allocations
[add_finalizer_exn] is the same as [add_finalizer]. However, their types in
core_gc.mli are different, and the type of [add_finalizer] guarantees that it always
receives a heap block, which ensures that it will not raise, while
[add_finalizer_exn] accepts any type, and so may raise.
|
open! Import
module Stable = struct
module Allocation_policy = struct
module V1 = struct
type t =
| Next_fit
| First_fit
| Best_fit
[@@deriving bin_io, compare, equal, hash, sexp, stable_witness]
end
end
module Stat = struct
[%%if ocaml_version < (4, 12, 0)]
module V1 = struct
type t = Stdlib.Gc.stat =
{ minor_words : float
; promoted_words : float
; major_words : float
; minor_collections : int
; major_collections : int
; heap_words : int
; heap_chunks : int
; live_words : int
; live_blocks : int
; free_words : int
; free_blocks : int
; largest_free : int
; fragments : int
; compactions : int
; top_heap_words : int
; stack_size : int
}
[@@deriving bin_io, compare, equal, hash, sexp, stable_witness]
end
module V2 = struct
type t =
{ minor_words : float
; promoted_words : float
; major_words : float
; minor_collections : int
; major_collections : int
; heap_words : int
; heap_chunks : int
; live_words : int
; live_blocks : int
; free_words : int
; free_blocks : int
; largest_free : int
; fragments : int
; compactions : int
; top_heap_words : int
; stack_size : int
; forced_major_collections : int
}
[@@deriving bin_io, compare, equal, hash, sexp, stable_witness]
end
[%%else]
module V1 = struct
type t =
{ minor_words : float
; promoted_words : float
; major_words : float
; minor_collections : int
; major_collections : int
; heap_words : int
; heap_chunks : int
; live_words : int
; live_blocks : int
; free_words : int
; free_blocks : int
; largest_free : int
; fragments : int
; compactions : int
; top_heap_words : int
; stack_size : int
}
[@@deriving bin_io, compare, equal, hash, sexp, stable_witness]
end
module V2 = struct
type t = Stdlib.Gc.stat =
{ minor_words : float
; promoted_words : float
; major_words : float
; minor_collections : int
; major_collections : int
; heap_words : int
; heap_chunks : int
; live_words : int
; live_blocks : int
; free_words : int
; free_blocks : int
; largest_free : int
; fragments : int
; compactions : int
; top_heap_words : int
; stack_size : int
; forced_major_collections : int
}
[@@deriving bin_io, compare, equal, hash, sexp, stable_witness]
end
[%%endif]
end
module Control = struct
module V1 = struct
[@@@ocaml.warning "-3"]
type t = Stdlib.Gc.control =
{ mutable minor_heap_size : int
; mutable major_heap_increment : int
; mutable space_overhead : int
; mutable verbose : int
; mutable max_overhead : int
; mutable stack_limit : int
; mutable allocation_policy : int
; window_size : int
; custom_major_ratio : int
; custom_minor_ratio : int
; custom_minor_max_size : int
}
[@@deriving bin_io, compare, equal, sexp, stable_witness]
end
end
end
include Stdlib.Gc
module Stat = struct
module T = struct
[%%if ocaml_version < (4, 12, 0)]
type t = Stdlib.Gc.stat =
{ minor_words : float
; promoted_words : float
; major_words : float
; minor_collections : int
; major_collections : int
; heap_words : int
; heap_chunks : int
; live_words : int
; live_blocks : int
; free_words : int
; free_blocks : int
; largest_free : int
; fragments : int
; compactions : int
; top_heap_words : int
; stack_size : int
}
[@@deriving compare, hash, bin_io, sexp, fields]
[%%else]
type t = Stdlib.Gc.stat =
{ minor_words : float
; promoted_words : float
; major_words : float
; minor_collections : int
; major_collections : int
; heap_words : int
; heap_chunks : int
; live_words : int
; live_blocks : int
; free_words : int
; free_blocks : int
; largest_free : int
; fragments : int
; compactions : int
; top_heap_words : int
; stack_size : int
; forced_major_collections : int
}
[@@deriving compare, hash, sexp_of, fields]
[%%endif]
end
include T
include Comparable.Make_plain (T)
[%%if ocaml_version < (4, 12, 0)]
let combine first second ~float_f ~int_f =
{ minor_words = float_f first.minor_words second.minor_words
; promoted_words = float_f first.promoted_words second.promoted_words
; major_words = float_f first.major_words second.major_words
; minor_collections = int_f first.minor_collections second.minor_collections
; major_collections = int_f first.major_collections second.major_collections
; heap_words = int_f first.heap_words second.heap_words
; heap_chunks = int_f first.heap_chunks second.heap_chunks
; live_words = int_f first.live_words second.live_words
; live_blocks = int_f first.live_blocks second.live_blocks
; free_words = int_f first.free_words second.free_words
; free_blocks = int_f first.free_blocks second.free_blocks
; largest_free = int_f first.largest_free second.largest_free
; fragments = int_f first.fragments second.fragments
; compactions = int_f first.compactions second.compactions
; top_heap_words = int_f first.top_heap_words second.top_heap_words
; stack_size = int_f first.stack_size second.stack_size
}
;;
[%%else]
let combine first second ~float_f ~int_f =
{ minor_words = float_f first.minor_words second.minor_words
; promoted_words = float_f first.promoted_words second.promoted_words
; major_words = float_f first.major_words second.major_words
; minor_collections = int_f first.minor_collections second.minor_collections
; major_collections = int_f first.major_collections second.major_collections
; heap_words = int_f first.heap_words second.heap_words
; heap_chunks = int_f first.heap_chunks second.heap_chunks
; live_words = int_f first.live_words second.live_words
; live_blocks = int_f first.live_blocks second.live_blocks
; free_words = int_f first.free_words second.free_words
; free_blocks = int_f first.free_blocks second.free_blocks
; largest_free = int_f first.largest_free second.largest_free
; fragments = int_f first.fragments second.fragments
; compactions = int_f first.compactions second.compactions
; top_heap_words = int_f first.top_heap_words second.top_heap_words
; stack_size = int_f first.stack_size second.stack_size
; forced_major_collections =
int_f first.forced_major_collections second.forced_major_collections
}
;;
[%%endif]
let add = combine ~float_f:Float.( + ) ~int_f:Int.( + )
let diff = combine ~float_f:Float.( - ) ~int_f:Int.( - )
end
module Control = struct
module T = struct
[@@@ocaml.warning "-3"]
type t = Stdlib.Gc.control =
{ mutable minor_heap_size : int
; mutable major_heap_increment : int
; mutable space_overhead : int
; mutable verbose : int
; mutable max_overhead : int
; mutable stack_limit : int
; mutable allocation_policy : int
; window_size : int
; custom_major_ratio : int
; custom_minor_ratio : int
; custom_minor_max_size : int
}
[@@deriving compare, sexp_of, fields]
end
include T
include Comparable.Make_plain (T)
end
module Allocation_policy = struct
type t = Stable.Allocation_policy.V1.t =
| Next_fit
| First_fit
| Best_fit
[@@deriving compare, equal, hash, sexp_of]
let to_int = function
| Next_fit -> 0
| First_fit -> 1
| Best_fit -> 2
;;
end
let tune
?logger
?minor_heap_size
?major_heap_increment
?space_overhead
?verbose
?max_overhead
?stack_limit
?allocation_policy
?window_size
?custom_major_ratio
?custom_minor_ratio
?custom_minor_max_size
()
=
let old_control_params = get () in
let f opt to_string field =
let old_value = Field.get field old_control_params in
match opt with
| None -> old_value
| Some new_value ->
Option.iter logger ~f:(fun f ->
Printf.ksprintf
f
"Gc.Control.%s: %s -> %s"
(Field.name field)
(to_string old_value)
(to_string new_value));
new_value
in
let allocation_policy = Option.map allocation_policy ~f:Allocation_policy.to_int in
let new_control_params =
Control.Fields.map
~minor_heap_size:(f minor_heap_size string_of_int)
~major_heap_increment:(f major_heap_increment string_of_int)
~space_overhead:(f space_overhead string_of_int)
~verbose:(f verbose string_of_int)
~max_overhead:(f max_overhead string_of_int)
~stack_limit:(f stack_limit string_of_int)
~allocation_policy:(f allocation_policy string_of_int)
~window_size:(f window_size string_of_int)
~custom_major_ratio:(f custom_major_ratio string_of_int)
~custom_minor_ratio:(f custom_minor_ratio string_of_int)
~custom_minor_max_size:(f custom_minor_max_size string_of_int)
in
set new_control_params
;;
let disable_compaction ?logger ~allocation_policy () =
let allocation_policy =
match allocation_policy with
| `Don't_change -> None
| `Set_to policy -> Some policy
in
The value 1_000_000 , according to
-ocaml-4.02/libref/Gc.html
will disable .
-ocaml-4.02/libref/Gc.html
will disable compactions.
*)
tune ?logger ?allocation_policy ~max_overhead:1_000_000 ()
;;
external minor_words : unit -> int = "core_gc_minor_words"
external major_words : unit -> int = "core_gc_major_words" [@@noalloc]
external promoted_words : unit -> int = "core_gc_promoted_words" [@@noalloc]
external minor_collections : unit -> int = "core_gc_minor_collections" [@@noalloc]
external major_collections : unit -> int = "core_gc_major_collections" [@@noalloc]
external heap_words : unit -> int = "core_gc_heap_words" [@@noalloc]
external heap_chunks : unit -> int = "core_gc_heap_chunks" [@@noalloc]
external compactions : unit -> int = "core_gc_compactions" [@@noalloc]
external top_heap_words : unit -> int = "core_gc_top_heap_words" [@@noalloc]
external major_plus_minor_words : unit -> int = "core_gc_major_plus_minor_words"
external allocated_words : unit -> int = "core_gc_allocated_words"
external run_memprof_callbacks : unit -> unit = "core_gc_run_memprof_callbacks"
let stat_size_lazy =
lazy (Obj.reachable_words (Obj.repr (Stdlib.Gc.quick_stat () : Stat.t)))
;;
let stat_size () = Lazy.force stat_size_lazy
let zero = Sys.opaque_identity (int_of_string "0")
let rec keep_alive o = if zero <> 0 then keep_alive (Sys.opaque_identity o)
module For_testing = struct
We disable inlining for this function so the GC stats and the call to [ f ] are never
rearranged .
rearranged. *)
let[@cold] measure_internal ~on_result f =
let minor_words_before = minor_words () in
let major_words_before = major_words () in
let x = Sys.opaque_identity (f ()) in
let minor_words_after = minor_words () in
let major_words_after = major_words () in
let major_words_allocated = major_words_after - major_words_before in
let minor_words_allocated = minor_words_after - minor_words_before in
on_result ~major_words_allocated ~minor_words_allocated x
;;
let is_zero_alloc (type a) (f : unit -> a) =
measure_internal
f
~on_result:(fun ~major_words_allocated ~minor_words_allocated value ->
ignore (Sys.opaque_identity value : a);
major_words_allocated == 0 && minor_words_allocated == 0)
;;
module Allocation_report = struct
type t =
{ major_words_allocated : int
; minor_words_allocated : int
}
[@@deriving sexp_of]
let create ~major_words_allocated ~minor_words_allocated =
{ major_words_allocated; minor_words_allocated }
;;
end
let measure_allocation f =
measure_internal f ~on_result:(fun ~major_words_allocated ~minor_words_allocated x ->
x, Allocation_report.create ~major_words_allocated ~minor_words_allocated)
;;
module Allocation_log = struct
type t =
{ size_in_words : int
; is_major : bool
; backtrace : string
}
[@@deriving sexp_of]
end
[%%if ocaml_version >= (4, 11, 0)]
let measure_and_log_allocation f =
let log : Allocation_log.t list ref = ref []
and major_allocs = ref 0
and minor_allocs = ref 0 in
let on_alloc ~is_major (info : Stdlib.Gc.Memprof.allocation) =
if is_major
then major_allocs := !major_allocs + info.n_samples
else minor_allocs := !minor_allocs + info.n_samples;
let backtrace = Stdlib.Printexc.raw_backtrace_to_string info.callstack in
let backtrace =
match String.substr_index backtrace ~pattern:"measure_and_log_allocation" with
| None ->
backtrace
| Some p ->
String.sub ~pos:0 ~len:p backtrace
|> String.rstrip ~drop:(function
| '\n' -> false
| _ -> true)
in
let info : Allocation_log.t =
{ size_in_words = info.n_samples; is_major; backtrace }
in
log := info :: !log;
None
in
let tracker =
{ Stdlib.Gc.Memprof.null_tracker with
alloc_minor = on_alloc ~is_major:false
; alloc_major = on_alloc ~is_major:true
}
in
Stdlib.Gc.Memprof.start ~sampling_rate:1.0 tracker;
let result =
match f () with
| x ->
Memprof.stop does not guarantee that all callbacks are run ( some may be
delayed if they happened during C code and there has been no allocation since ) ,
so we explictly flush them
delayed if they happened during C code and there has been no allocation since),
so we explictly flush them *)
run_memprof_callbacks ();
Stdlib.Gc.Memprof.stop ();
x
| exception e ->
run_memprof_callbacks ();
Stdlib.Gc.Memprof.stop ();
raise e
in
( result
, Allocation_report.create
~major_words_allocated:!major_allocs
~minor_words_allocated:!minor_allocs
, List.rev !log )
;;
[%%else]
let measure_and_log_allocation f =
let x, report = measure_allocation f in
x, report, []
;;
[%%endif]
end
module Expert = struct
let add_finalizer x f =
try Stdlib.Gc.finalise (fun x -> Exn.handle_uncaught_and_exit (fun () -> f x)) x with
| Invalid_argument _ ->
The type of add_finalizer ensures that the only possible failure
is due to [ x ] being static data . In this case , we simply drop the
finalizer since static data would never have been collected by the
GC anyway .
is due to [x] being static data. In this case, we simply drop the
finalizer since static data would never have been collected by the
GC anyway. *)
()
;;
let add_finalizer_exn x f =
try Stdlib.Gc.finalise (fun x -> Exn.handle_uncaught_and_exit (fun () -> f x)) x with
| Invalid_argument _ ->
ignore (Heap_block.create x : _ Heap_block.t option);
If [ Heap_block.create ] succeeds then [ x ] is static data and so
we can simply drop the finaliser .
we can simply drop the finaliser. *)
()
;;
let add_finalizer_last x f =
try Stdlib.Gc.finalise_last (fun () -> Exn.handle_uncaught_and_exit f) x with
| Invalid_argument _ ->
The type of add_finalizer_last ensures that the only possible failure
is due to [ x ] being static data . In this case , we simply drop the
finalizer since static data would never have been collected by the
GC anyway .
is due to [x] being static data. In this case, we simply drop the
finalizer since static data would never have been collected by the
GC anyway. *)
()
;;
let add_finalizer_last_exn x f =
try Stdlib.Gc.finalise_last (fun () -> Exn.handle_uncaught_and_exit f) x with
| Invalid_argument _ ->
ignore (Heap_block.create x : _ Heap_block.t option);
If [ Heap_block.create ] succeeds then [ x ] is static data and so
we can simply drop the finaliser .
we can simply drop the finaliser. *)
()
;;
let finalize_release = Stdlib.Gc.finalise_release
module Alarm = struct
type t = alarm
let sexp_of_t _ = "<gc alarm>" |> [%sexp_of: string]
let create f = create_alarm (fun () -> Exn.handle_uncaught_and_exit f)
let delete = delete_alarm
end
end
|
a78a3a977c9e31f215f397ea58aede05d7a8f4794a48374aadc0dab4f169df57
|
Ericson2314/lighthouse
|
Cutil_12.hs
|
# OPTIONS_GHC -cpp #
-- #hide
-----------------------------------------------------------------------------
-- Module : OS.Cutil_12
Copyright : ( c ) 2002
-- License : BSD-style
--
-- Maintainer :
-- Stability : provisional
-- Portability : portable
--
-- This module contains some additional routines required for marshalling
arguments to OS C calling routines .
--
-----------------------------------------------------------------------------
module Graphics.UI.ObjectIO.OS.Cutil_12
( addr2int, int2addr, fpeek, Storable(..), free, malloc
, module Data.Int
, module Data.Bits
, module Foreign.Ptr
, module Foreign.C.String
) where
import Foreign.Ptr
import Foreign.Storable
import Foreign.Marshal.Alloc
import Foreign.C.String
import Data.Int
import Data.Bits
import System.IO.Unsafe
import GHC.Ptr ( Ptr(..) )
import GHC.Base
-- Conversion operations:
addr2int :: Ptr a -> Int
addr2int (Ptr x) = I# (addr2Int# x)
int2addr :: Int -> Ptr a
int2addr (I# x) = Ptr (int2Addr# x)
fpeek addr first peeks addr , then frees addr :
fpeek :: (Storable a) => Ptr a -> IO a
fpeek addr
= do {
x <- peek addr;
free addr;
return x
}
| null |
https://raw.githubusercontent.com/Ericson2314/lighthouse/210078b846ebd6c43b89b5f0f735362a01a9af02/ghc-6.8.2/libraries/ObjectIO/Graphics/UI/ObjectIO/OS/Cutil_12.hs
|
haskell
|
#hide
---------------------------------------------------------------------------
Module : OS.Cutil_12
License : BSD-style
Maintainer :
Stability : provisional
Portability : portable
This module contains some additional routines required for marshalling
---------------------------------------------------------------------------
Conversion operations:
|
# OPTIONS_GHC -cpp #
Copyright : ( c ) 2002
arguments to OS C calling routines .
module Graphics.UI.ObjectIO.OS.Cutil_12
( addr2int, int2addr, fpeek, Storable(..), free, malloc
, module Data.Int
, module Data.Bits
, module Foreign.Ptr
, module Foreign.C.String
) where
import Foreign.Ptr
import Foreign.Storable
import Foreign.Marshal.Alloc
import Foreign.C.String
import Data.Int
import Data.Bits
import System.IO.Unsafe
import GHC.Ptr ( Ptr(..) )
import GHC.Base
addr2int :: Ptr a -> Int
addr2int (Ptr x) = I# (addr2Int# x)
int2addr :: Int -> Ptr a
int2addr (I# x) = Ptr (int2Addr# x)
fpeek addr first peeks addr , then frees addr :
fpeek :: (Storable a) => Ptr a -> IO a
fpeek addr
= do {
x <- peek addr;
free addr;
return x
}
|
dccf2d4452acd886fe21b21d0a1a6d055418adb292edb4d05c2dc7086e8d02ed
|
pixlsus/registry.gimp.org_static
|
clairs-obscurs.scm
|
(define (script-fu-shadows-highlights image drawable)
; create a highlights layer
(let ((highlights-layer (car (gimp-layer-copy drawable 1))))
(gimp-drawable-set-name highlights-layer "Assombrir les lumières")
(gimp-image-add-layer image highlights-layer -1)
;process shadows/highlights layer
(gimp-desaturate highlights-layer)
(gimp-invert highlights-layer)
(gimp-layer-set-mode highlights-layer 5)
(plug-in-gauss-iir2 1 image highlights-layer 25 25)
;copy highlights layer to create shadows layer
(define shadows-layer (car (gimp-layer-copy highlights-layer 1)))
(gimp-drawable-set-name shadows-layer "Éclaircir les ombres")
(gimp-image-add-layer image shadows-layer -1)
;process highlights layer
(plug-in-colortoalpha 1 image highlights-layer '(255 255 255))
(gimp-layer-set-opacity highlights-layer 0)
;process shadows layer
(plug-in-colortoalpha 1 image shadows-layer '(0 0 0))
(gimp-layer-set-opacity shadows-layer 0)
;update image window
(gimp-displays-flush)))
(script-fu-register "script-fu-shadows-highlights"
_"<Image>/Filters/Clairs & Obscurs"
"Corrige les ombres et les hautes lumières - adapté depuis "
"Arnaud Champollion - d'après Shadows and Highlights de Dennis Bond - grâce au travail de Jozef Trawinski"
"Arnaud Champollion - d'après Shadows and Highlights de Dennis Bond - grâce au travail de Jozef Trawinski"
"24 octobre 2007"
"RGB* GRAY*"
SF-IMAGE "Image" 0
SF-DRAWABLE "Drawable" 0)
| null |
https://raw.githubusercontent.com/pixlsus/registry.gimp.org_static/ffcde7400f402728373ff6579947c6ffe87d1a5e/registry.gimp.org/files/clairs-obscurs.scm
|
scheme
|
create a highlights layer
process shadows/highlights layer
copy highlights layer to create shadows layer
process highlights layer
process shadows layer
update image window
|
(define (script-fu-shadows-highlights image drawable)
(let ((highlights-layer (car (gimp-layer-copy drawable 1))))
(gimp-drawable-set-name highlights-layer "Assombrir les lumières")
(gimp-image-add-layer image highlights-layer -1)
(gimp-desaturate highlights-layer)
(gimp-invert highlights-layer)
(gimp-layer-set-mode highlights-layer 5)
(plug-in-gauss-iir2 1 image highlights-layer 25 25)
(define shadows-layer (car (gimp-layer-copy highlights-layer 1)))
(gimp-drawable-set-name shadows-layer "Éclaircir les ombres")
(gimp-image-add-layer image shadows-layer -1)
(plug-in-colortoalpha 1 image highlights-layer '(255 255 255))
(gimp-layer-set-opacity highlights-layer 0)
(plug-in-colortoalpha 1 image shadows-layer '(0 0 0))
(gimp-layer-set-opacity shadows-layer 0)
(gimp-displays-flush)))
(script-fu-register "script-fu-shadows-highlights"
_"<Image>/Filters/Clairs & Obscurs"
"Corrige les ombres et les hautes lumières - adapté depuis "
"Arnaud Champollion - d'après Shadows and Highlights de Dennis Bond - grâce au travail de Jozef Trawinski"
"Arnaud Champollion - d'après Shadows and Highlights de Dennis Bond - grâce au travail de Jozef Trawinski"
"24 octobre 2007"
"RGB* GRAY*"
SF-IMAGE "Image" 0
SF-DRAWABLE "Drawable" 0)
|
ec54658ee279f66e0bf73ead6a180d01d3b0e0c853d427dfba657775dbdb7ae9
|
8c6794b6/guile-tjit
|
transform.scm
|
( sxml transform ) -- pre- and post - order sxml transformation
;;;;
Copyright ( C ) 2009 Free Software Foundation , Inc.
Modified 2004 by < wingo at pobox dot com > .
Written 2003 by < oleg at pobox dot com > as SXML-tree-trans.scm .
;;;;
;;;; This library is free software; you can redistribute it and/or
;;;; modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation ; either
version 3 of the License , or ( at your option ) any later version .
;;;;
;;;; This library is distributed in the hope that it will be useful,
;;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
;;;; Lesser General Public License for more details.
;;;;
You should have received a copy of the GNU Lesser General Public
;;;; License along with this library; if not, write to the Free Software
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , USA
;;;;
;;; Commentary:
;;
;;@heading SXML expression tree transformers
;
@subheading Pre - Post - order traversal of a tree and creation of a new tree
@smallexample
;pre-post-order:: <tree> x <bindings> -> <new-tree>
;@end smallexample
; where
@smallexample
; <bindings> ::= (<binding> ...)
; <binding> ::= (<trigger-symbol> *preorder* . <handler>) |
; (<trigger-symbol> *macro* . <handler>) |
; (<trigger-symbol> <new-bindings> . <handler>) |
; (<trigger-symbol> . <handler>)
; <trigger-symbol> ::= XMLname | *text* | *default*
; <handler> :: <trigger-symbol> x [<tree>] -> <new-tree>
;@end smallexample
;
; The pre-post-order function visits the nodes and nodelists
pre - post - order ( depth - first ) . For each @code{<Node > } of the form
; @code{(@var{name} <Node> ...)}, it looks up an association with the
; given @var{name} among its @var{<bindings>}. If failed,
@code{pre - post - order } tries to locate a @code{*default * } binding . It 's
; an error if the latter attempt fails as well. Having found a binding,
the @code{pre - post - order } function first checks to see if the binding
; is of the form
@smallexample
; (<trigger-symbol> *preorder* . <handler>)
;@end smallexample
;
; If it is, the handler is 'applied' to the current node. Otherwise, the
pre - post - order function first calls itself recursively for each child
of the current node , with - bindings > } prepended to the
; @var{<bindings>} in effect. The result of these calls is passed to the
; @var{<handler>} (along with the head of the current @var{<Node>}). To
; be more precise, the handler is _applied_ to the head of the current
; node and its processed children. The result of the handler, which
; should also be a @code{<tree>}, replaces the current @var{<Node>}. If
; the current @var{<Node>} is a text string or other atom, a special
; binding with a symbol @code{*text*} is looked up.
;
; A binding can also be of a form
@smallexample
; (<trigger-symbol> *macro* . <handler>)
;@end smallexample
; This is equivalent to @code{*preorder*} described above. However, the
; result is re-processed again, with the current stylesheet.
;;
;;; Code:
(define-module (sxml transform)
#:export (SRV:send-reply
foldts
post-order
pre-post-order
replace-range))
Upstream version :
$ I d : SXML - tree - trans.scm , v 1.8 2003/04/24 19:39:53 oleg Exp oleg $
; Like let* but allowing for multiple-value bindings
(define-macro (let*-values bindings . body)
(if (null? bindings) (cons 'begin body)
(apply
(lambda (vars initializer)
(let ((cont
(cons 'let*-values
(cons (cdr bindings) body))))
(cond
((not (pair? vars)) ; regular let case, a single var
`(let ((,vars ,initializer)) ,cont))
((null? (cdr vars)) ; single var, see the prev case
`(let ((,(car vars) ,initializer)) ,cont))
(else ; the most generic case
`(call-with-values (lambda () ,initializer)
(lambda ,vars ,cont))))))
(car bindings))))
(define (SRV:send-reply . fragments)
"Output the @var{fragments} to the current output port.
The fragments are a list of strings, characters, numbers, thunks,
@code{#f}, @code{#t} -- and other fragments. The function traverses the
tree depth-first, writes out strings and characters, executes thunks,
and ignores @code{#f} and @code{'()}. The function returns @code{#t} if
anything was written at all; otherwise the result is @code{#f} If
@code{#t} occurs among the fragments, it is not written out but causes
the result of @code{SRV:send-reply} to be @code{#t}."
(let loop ((fragments fragments) (result #f))
(cond
((null? fragments) result)
((not (car fragments)) (loop (cdr fragments) result))
((null? (car fragments)) (loop (cdr fragments) result))
((eq? #t (car fragments)) (loop (cdr fragments) #t))
((pair? (car fragments))
(loop (cdr fragments) (loop (car fragments) result)))
((procedure? (car fragments))
((car fragments))
(loop (cdr fragments) #t))
(else
(display (car fragments))
(loop (cdr fragments) #t)))))
;------------------------------------------------------------------------
Traversal of an SXML tree or a grove :
a < Node > or a < Nodelist >
;
A < Node > and a < Nodelist > are mutually - recursive datatypes that
underlie the SXML tree :
< Node > : : = ( name . < Nodelist > ) | " text string "
; An (ordered) set of nodes is just a list of the constituent nodes:
; <Nodelist> ::= (<Node> ...)
Nodelists , and Nodes other than text strings are both lists . A
< Nodelist > however is either an empty list , or a list whose head is
; not a symbol (an atom in general). A symbol at the head of a node is
; either an XML name (in which case it's a tag of an XML element), or
; an administrative name such as '@'.
See SXPath.scm and for more information on SXML .
;; see the commentary for docs
(define (pre-post-order tree bindings)
(let* ((default-binding (assq '*default* bindings))
(text-binding (or (assq '*text* bindings) default-binding))
Cache default and text bindings
(and text-binding
(if (procedure? (cdr text-binding))
(cdr text-binding) (cddr text-binding)))))
(let loop ((tree tree))
(cond
((null? tree) '())
((not (pair? tree))
(let ((trigger '*text*))
(if text-handler (text-handler trigger tree)
(error "Unknown binding for " trigger " and no default"))))
((not (symbol? (car tree))) (map loop tree)) ; tree is a nodelist
tree is an SXML node
(let* ((trigger (car tree))
(binding (or (assq trigger bindings) default-binding)))
(cond
((not binding)
(error "Unknown binding for " trigger " and no default"))
((not (pair? (cdr binding))) ; must be a procedure: handler
(apply (cdr binding) trigger (map loop (cdr tree))))
((eq? '*preorder* (cadr binding))
(apply (cddr binding) tree))
((eq? '*macro* (cadr binding))
(loop (apply (cddr binding) tree)))
( cadr binding ) is a local binding
(apply (cddr binding) trigger
(pre-post-order (cdr tree) (append (cadr binding) bindings)))
))))))))
; post-order is a strict subset of pre-post-order without *preorder*
; (let alone *macro*) traversals.
; Now pre-post-order is actually faster than the old post-order.
; The function post-order is deprecated and is aliased below for
; backward compatibility.
(define post-order pre-post-order)
;------------------------------------------------------------------------
; Extended tree fold
; tree = atom | (node-name tree ...)
;
foldts fdown fup fhere seed ( Leaf str ) = fhere seed str
foldts fdown fup fhere seed ( Nd kids ) =
fup seed $ foldl ( foldts fdown fup fhere ) ( fdown seed ) kids
; procedure fhere: seed -> atom -> seed
; procedure fdown: seed -> node -> seed
; procedure fup: parent-seed -> last-kid-seed -> node -> seed
; foldts returns the final seed
(define (foldts fdown fup fhere seed tree)
(cond
((null? tree) seed)
((not (pair? tree)) ; An atom
(fhere seed tree))
(else
(let loop ((kid-seed (fdown seed tree)) (kids (cdr tree)))
(if (null? kids)
(fup seed kid-seed tree)
(loop (foldts fdown fup fhere kid-seed (car kids))
(cdr kids)))))))
;------------------------------------------------------------------------
Traverse a forest depth - first and cut / replace ranges of nodes .
;
; The nodes that define a range don't have to have the same immediate
; parent, don't have to be on the same level, and the end node of a
; range doesn't even have to exist. A replace-range procedure removes
; nodes from the beginning node of the range up to (but not including)
; the end node of the range. In addition, the beginning node of the
; range can be replaced by a node or a list of nodes. The range of
; nodes is cut while depth-first traversing the forest. If all
; branches of the node are cut a node is cut as well. The procedure
; can cut several non-overlapping ranges from a forest.
; replace-range:: BEG-PRED x END-PRED x FOREST -> FOREST
; where
type FOREST = ( NODE ... )
type NODE = Atom | ( Name . FOREST ) | FOREST
;
The range of nodes is specified by two predicates , beg - pred and end - pred .
beg - pred : : NODE - > # f | FOREST
end - pred : : NODE - > # f | FOREST
; The beg-pred predicate decides on the beginning of the range. The node
; for which the predicate yields non-#f marks the beginning of the range
; The non-#f value of the predicate replaces the node. The value can be a
; list of nodes. The replace-range procedure then traverses the tree and skips
; all the nodes, until the end-pred yields non-#f. The value of the end-pred
; replaces the end-range node. The new end node and its brothers will be
; re-scanned.
; The predicates are evaluated pre-order. We do not descend into a node that
; is marked as the beginning of the range.
(define (replace-range beg-pred end-pred forest)
; loop forest keep? new-forest
; forest is the forest to traverse
; new-forest accumulates the nodes we will keep, in the reverse
; order
; If keep? is #t, keep the curr node if atomic. If the node is not atomic,
; traverse its children and keep those that are not in the skip range.
If keep ? is # f , skip the current node if atomic . Otherwise ,
; traverse its children. If all children are skipped, skip the node
; as well.
(define (loop forest keep? new-forest)
(if (null? forest) (values (reverse new-forest) keep?)
(let ((node (car forest)))
(if keep?
(cond ; accumulate mode
((beg-pred node) => ; see if the node starts the skip range
(lambda (repl-branches) ; if so, skip/replace the node
(loop (cdr forest) #f
(append (reverse repl-branches) new-forest))))
((not (pair? node)) ; it's an atom, keep it
(loop (cdr forest) keep? (cons node new-forest)))
(else
(let*-values
(((node?) (symbol? (car node))) ; or is it a nodelist?
((new-kids keep?) ; traverse its children
(loop (if node? (cdr node) node) #t '())))
(loop (cdr forest) keep?
(cons
(if node? (cons (car node) new-kids) new-kids)
new-forest)))))
; skip mode
(cond
((end-pred node) => ; end the skip range
(lambda (repl-branches) ; repl-branches will be re-scanned
(loop (append repl-branches (cdr forest)) #t
new-forest)))
((not (pair? node)) ; it's an atom, skip it
(loop (cdr forest) keep? new-forest))
(else
(let*-values
(((node?) (symbol? (car node))) ; or is it a nodelist?
((new-kids keep?) ; traverse its children
(loop (if node? (cdr node) node) #f '())))
(loop (cdr forest) keep?
(if (or keep? (pair? new-kids))
(cons
(if node? (cons (car node) new-kids) new-kids)
new-forest)
new-forest) ; if all kids are skipped
)))))))) ; skip the node too
(let*-values (((new-forest keep?) (loop forest #t '())))
new-forest))
arch - tag : 6c814f4b-38f7 - 42c1 - b8ef - ce3447edefc7
;;; transform.scm ends here
| null |
https://raw.githubusercontent.com/8c6794b6/guile-tjit/9566e480af2ff695e524984992626426f393414f/module/sxml/transform.scm
|
scheme
|
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
either
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
License along with this library; if not, write to the Free Software
Commentary:
@heading SXML expression tree transformers
pre-post-order:: <tree> x <bindings> -> <new-tree>
@end smallexample
where
<bindings> ::= (<binding> ...)
<binding> ::= (<trigger-symbol> *preorder* . <handler>) |
(<trigger-symbol> *macro* . <handler>) |
(<trigger-symbol> <new-bindings> . <handler>) |
(<trigger-symbol> . <handler>)
<trigger-symbol> ::= XMLname | *text* | *default*
<handler> :: <trigger-symbol> x [<tree>] -> <new-tree>
@end smallexample
The pre-post-order function visits the nodes and nodelists
@code{(@var{name} <Node> ...)}, it looks up an association with the
given @var{name} among its @var{<bindings>}. If failed,
an error if the latter attempt fails as well. Having found a binding,
is of the form
(<trigger-symbol> *preorder* . <handler>)
@end smallexample
If it is, the handler is 'applied' to the current node. Otherwise, the
@var{<bindings>} in effect. The result of these calls is passed to the
@var{<handler>} (along with the head of the current @var{<Node>}). To
be more precise, the handler is _applied_ to the head of the current
node and its processed children. The result of the handler, which
should also be a @code{<tree>}, replaces the current @var{<Node>}. If
the current @var{<Node>} is a text string or other atom, a special
binding with a symbol @code{*text*} is looked up.
A binding can also be of a form
(<trigger-symbol> *macro* . <handler>)
@end smallexample
This is equivalent to @code{*preorder*} described above. However, the
result is re-processed again, with the current stylesheet.
Code:
Like let* but allowing for multiple-value bindings
regular let case, a single var
single var, see the prev case
the most generic case
otherwise the result is @code{#f} If
------------------------------------------------------------------------
An (ordered) set of nodes is just a list of the constituent nodes:
<Nodelist> ::= (<Node> ...)
not a symbol (an atom in general). A symbol at the head of a node is
either an XML name (in which case it's a tag of an XML element), or
an administrative name such as '@'.
see the commentary for docs
tree is a nodelist
must be a procedure: handler
post-order is a strict subset of pre-post-order without *preorder*
(let alone *macro*) traversals.
Now pre-post-order is actually faster than the old post-order.
The function post-order is deprecated and is aliased below for
backward compatibility.
------------------------------------------------------------------------
Extended tree fold
tree = atom | (node-name tree ...)
procedure fhere: seed -> atom -> seed
procedure fdown: seed -> node -> seed
procedure fup: parent-seed -> last-kid-seed -> node -> seed
foldts returns the final seed
An atom
------------------------------------------------------------------------
The nodes that define a range don't have to have the same immediate
parent, don't have to be on the same level, and the end node of a
range doesn't even have to exist. A replace-range procedure removes
nodes from the beginning node of the range up to (but not including)
the end node of the range. In addition, the beginning node of the
range can be replaced by a node or a list of nodes. The range of
nodes is cut while depth-first traversing the forest. If all
branches of the node are cut a node is cut as well. The procedure
can cut several non-overlapping ranges from a forest.
replace-range:: BEG-PRED x END-PRED x FOREST -> FOREST
where
The beg-pred predicate decides on the beginning of the range. The node
for which the predicate yields non-#f marks the beginning of the range
The non-#f value of the predicate replaces the node. The value can be a
list of nodes. The replace-range procedure then traverses the tree and skips
all the nodes, until the end-pred yields non-#f. The value of the end-pred
replaces the end-range node. The new end node and its brothers will be
re-scanned.
The predicates are evaluated pre-order. We do not descend into a node that
is marked as the beginning of the range.
loop forest keep? new-forest
forest is the forest to traverse
new-forest accumulates the nodes we will keep, in the reverse
order
If keep? is #t, keep the curr node if atomic. If the node is not atomic,
traverse its children and keep those that are not in the skip range.
traverse its children. If all children are skipped, skip the node
as well.
accumulate mode
see if the node starts the skip range
if so, skip/replace the node
it's an atom, keep it
or is it a nodelist?
traverse its children
skip mode
end the skip range
repl-branches will be re-scanned
it's an atom, skip it
or is it a nodelist?
traverse its children
if all kids are skipped
skip the node too
transform.scm ends here
|
( sxml transform ) -- pre- and post - order sxml transformation
Copyright ( C ) 2009 Free Software Foundation , Inc.
Modified 2004 by < wingo at pobox dot com > .
Written 2003 by < oleg at pobox dot com > as SXML-tree-trans.scm .
version 3 of the License , or ( at your option ) any later version .
You should have received a copy of the GNU Lesser General Public
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , USA
@subheading Pre - Post - order traversal of a tree and creation of a new tree
@smallexample
@smallexample
pre - post - order ( depth - first ) . For each @code{<Node > } of the form
@code{pre - post - order } tries to locate a @code{*default * } binding . It 's
the @code{pre - post - order } function first checks to see if the binding
@smallexample
pre - post - order function first calls itself recursively for each child
of the current node , with - bindings > } prepended to the
@smallexample
(define-module (sxml transform)
#:export (SRV:send-reply
foldts
post-order
pre-post-order
replace-range))
Upstream version :
$ I d : SXML - tree - trans.scm , v 1.8 2003/04/24 19:39:53 oleg Exp oleg $
(define-macro (let*-values bindings . body)
(if (null? bindings) (cons 'begin body)
(apply
(lambda (vars initializer)
(let ((cont
(cons 'let*-values
(cons (cdr bindings) body))))
(cond
`(let ((,vars ,initializer)) ,cont))
`(let ((,(car vars) ,initializer)) ,cont))
`(call-with-values (lambda () ,initializer)
(lambda ,vars ,cont))))))
(car bindings))))
(define (SRV:send-reply . fragments)
"Output the @var{fragments} to the current output port.
The fragments are a list of strings, characters, numbers, thunks,
@code{#f}, @code{#t} -- and other fragments. The function traverses the
tree depth-first, writes out strings and characters, executes thunks,
and ignores @code{#f} and @code{'()}. The function returns @code{#t} if
@code{#t} occurs among the fragments, it is not written out but causes
the result of @code{SRV:send-reply} to be @code{#t}."
(let loop ((fragments fragments) (result #f))
(cond
((null? fragments) result)
((not (car fragments)) (loop (cdr fragments) result))
((null? (car fragments)) (loop (cdr fragments) result))
((eq? #t (car fragments)) (loop (cdr fragments) #t))
((pair? (car fragments))
(loop (cdr fragments) (loop (car fragments) result)))
((procedure? (car fragments))
((car fragments))
(loop (cdr fragments) #t))
(else
(display (car fragments))
(loop (cdr fragments) #t)))))
Traversal of an SXML tree or a grove :
a < Node > or a < Nodelist >
A < Node > and a < Nodelist > are mutually - recursive datatypes that
underlie the SXML tree :
< Node > : : = ( name . < Nodelist > ) | " text string "
Nodelists , and Nodes other than text strings are both lists . A
< Nodelist > however is either an empty list , or a list whose head is
See SXPath.scm and for more information on SXML .
(define (pre-post-order tree bindings)
(let* ((default-binding (assq '*default* bindings))
(text-binding (or (assq '*text* bindings) default-binding))
Cache default and text bindings
(and text-binding
(if (procedure? (cdr text-binding))
(cdr text-binding) (cddr text-binding)))))
(let loop ((tree tree))
(cond
((null? tree) '())
((not (pair? tree))
(let ((trigger '*text*))
(if text-handler (text-handler trigger tree)
(error "Unknown binding for " trigger " and no default"))))
tree is an SXML node
(let* ((trigger (car tree))
(binding (or (assq trigger bindings) default-binding)))
(cond
((not binding)
(error "Unknown binding for " trigger " and no default"))
(apply (cdr binding) trigger (map loop (cdr tree))))
((eq? '*preorder* (cadr binding))
(apply (cddr binding) tree))
((eq? '*macro* (cadr binding))
(loop (apply (cddr binding) tree)))
( cadr binding ) is a local binding
(apply (cddr binding) trigger
(pre-post-order (cdr tree) (append (cadr binding) bindings)))
))))))))
(define post-order pre-post-order)
foldts fdown fup fhere seed ( Leaf str ) = fhere seed str
foldts fdown fup fhere seed ( Nd kids ) =
fup seed $ foldl ( foldts fdown fup fhere ) ( fdown seed ) kids
(define (foldts fdown fup fhere seed tree)
(cond
((null? tree) seed)
(fhere seed tree))
(else
(let loop ((kid-seed (fdown seed tree)) (kids (cdr tree)))
(if (null? kids)
(fup seed kid-seed tree)
(loop (foldts fdown fup fhere kid-seed (car kids))
(cdr kids)))))))
Traverse a forest depth - first and cut / replace ranges of nodes .
type FOREST = ( NODE ... )
type NODE = Atom | ( Name . FOREST ) | FOREST
The range of nodes is specified by two predicates , beg - pred and end - pred .
beg - pred : : NODE - > # f | FOREST
end - pred : : NODE - > # f | FOREST
(define (replace-range beg-pred end-pred forest)
If keep ? is # f , skip the current node if atomic . Otherwise ,
(define (loop forest keep? new-forest)
(if (null? forest) (values (reverse new-forest) keep?)
(let ((node (car forest)))
(if keep?
(loop (cdr forest) #f
(append (reverse repl-branches) new-forest))))
(loop (cdr forest) keep? (cons node new-forest)))
(else
(let*-values
(loop (if node? (cdr node) node) #t '())))
(loop (cdr forest) keep?
(cons
(if node? (cons (car node) new-kids) new-kids)
new-forest)))))
(cond
(loop (append repl-branches (cdr forest)) #t
new-forest)))
(loop (cdr forest) keep? new-forest))
(else
(let*-values
(loop (if node? (cdr node) node) #f '())))
(loop (cdr forest) keep?
(if (or keep? (pair? new-kids))
(cons
(if node? (cons (car node) new-kids) new-kids)
new-forest)
(let*-values (((new-forest keep?) (loop forest #t '())))
new-forest))
arch - tag : 6c814f4b-38f7 - 42c1 - b8ef - ce3447edefc7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.