code
stringlengths 5
1.03M
| repo_name
stringlengths 5
90
| path
stringlengths 4
158
| license
stringclasses 15
values | size
int64 5
1.03M
| n_ast_errors
int64 0
53.9k
| ast_max_depth
int64 2
4.17k
| n_whitespaces
int64 0
365k
| n_ast_nodes
int64 3
317k
| n_ast_terminals
int64 1
171k
| n_ast_nonterminals
int64 1
146k
| loc
int64 -1
37.3k
| cycloplexity
int64 -1
1.31k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE DataKinds, KindSignatures,GADTs,
TypeOperators,
FlexibleInstances,
ScopedTypeVariables #-}
module Data.Union (
Union (..),
inject, project, wrap, unwrap,
push, pop,
swap, rotate,
conceal
) where
import Data.Type.Equality ((:~:) (..), gcastWith, testEquality)
import Data.Type.List hiding (length)
import Data.Type.List.Index (Index (..))
import qualified Data.Type.List.Index as Index
import Algebra.Absurd
----------------------------------------------------------------------------
data Union (l :: [k]) where
Union :: Index l e -> e -> Union l
instance Absurd (Union '[]) where
absurd (Union x _) = absurd x
inject :: Member e l => e -> Union l
inject = Union Index.index
project :: forall l e. Member e l => Union l -> Maybe e
project (Union i x) = fmap (\refl -> gcastWith refl x) $ testEquality i (Index.index :: Index l e)
wrap :: e -> Union (e ': l)
wrap = inject
unwrap :: Union '[e] -> e
unwrap (Union i x) = gcastWith (Index.trivial i) x
----------------------------------------------------------------------------
push :: Union l -> Union (e ': l)
push (Union i x) = Union (Index.push i) x
pop :: Union (e ': l) -> Either e (Union l)
pop (Union i x) = case Index.pop i of
Left Refl -> Left x
Right i' -> Right $ Union i' x
swap :: Union (e ': f ': l) -> Union (f ': e ': l)
swap (Union i x) = Union (Index.swap i) x
rotate :: Union (e ': f ': g ': l) -> Union (f ': g ': e ': l)
rotate (Union i x) = Union (Index.rotate i) x
----------------------------------------------------------------------------
conceal :: Member e l => Union (e ': l) -> Union l
conceal (Union i x) = Union (Index.conceal i) x
| seagull-kamome/haskell-toybox | Data/Union.hs | bsd-3-clause | 1,798 | 0 | 10 | 393 | 735 | 389 | 346 | 41 | 2 |
{-# OPTIONS_GHC -Wall #-}
module Main where
import Graphics.Rendering.Chart hiding (c)
import Graphics.Rendering.Chart.Gtk
import Data.Accessor
import Text.Printf
import Design.Config
import Design.WorkingConfig
import Aero.Drag.WettedArea
import Aero.Drag.Upsweep
import Aero.Drag.FormAndFrictional
main :: IO ()
main = do
let config :: Config Double
config = gaCruiseConfig
k = formFactorMarkup (cruise_mach config) (bodyFineness config)
cF = cF_skinFriction config
cD_formAndFrictional' = cD_formAndFrictional config
sWet = wettedArea config
sWing = exposedWingArea_ft2 config
putStrLn "-------------------------- configuration: -----------------------------"
print config
putStrLn "\n------------------------- wetted area: ------------------------------"
printWettedArea config
putStrLn "\n------------------------ upsweep drag: ------------------------------"
let cD_pUpsweep' = cD_pUpsweep config
cD_upsweep' = cD_upsweep config
_ <- cD_pUpsweep' `seq` printf "Cd upsweep referenced to fuselage: %.7f\n" cD_pUpsweep'
_ <- cD_upsweep' `seq` printf "Cd upsweep referenced to wing: %.7f\n" cD_upsweep'
putStrLn "\n---------------------- frictional drag: -----------------------------"
_ <- cF `seq` printf "skin friction coeff Cf referenced to wetted area:\t%.5f\n" cF
_ <- sWet `seq` printf "skin friction coeff Cf referenced to wing:\t\t%.5f\n\n" (cF*sWet/sWing)
putStrLn "\n------------------------- form drag: --------------------------------"
_ <- k `seq` printf "form factor k:\t\t\t\t%.5f\n" k
_ <- printf "Cd_form referenced to wetted area:\t%.5f\n" ((k-1)*cF)
_ <- printf "Cd_form referenced to wing area:\t%.5f\n" ((k-1)*cF*sWet/sWing)
putStrLn "\n----------------- frictional drag + form drag: ----------------------"
_ <- cD_formAndFrictional' `seq` printf "Cd_frictional_form referenced to wing:\t%.5f\n" cD_formAndFrictional'
putStrLn "\n---- total parasitic drag (for now: frictional + form + upsweep): ----"
_ <- printf "Cd_parasitic referenced to wing:\t%.5f\n\n" (cD_formAndFrictional' + cD_upsweep')
-- make some plots
--plotCfModel
--plotFormFactorModel
return ()
plotCfModel :: IO ()
plotCfModel = do
let line = plot_lines_values ^= [[ (LogValue re, LogValue (cfOfReynolds' re))
| y <- [5,5.1..9::Double], let re = 10**y]]
$ plot_lines_title ^= "cf"
$ defaultPlotLines
chart = layout1_title ^= "cf vs Reynolds"
$ layout1_plots ^= [Left (toPlot line)]
$ defaultLayout1
renderableToWindow (toRenderable chart) 640 480
_ <- renderableToPNGFile (toRenderable chart) 640 480 "cf_model.png"
return ()
plotFormFactorModel :: IO ()
plotFormFactorModel = do
let line = plot_lines_values ^= [[ (fr, formFactorMarkup 0.55 fr) | fr <- [4,4.1..10::Double]]]
$ plot_lines_title ^= "k"
$ defaultPlotLines
chart = layout1_title ^= "form factor markup k vs body fineness ratio (mach 0.55)"
$ layout1_plots ^= [Left (toPlot line)]
$ defaultLayout1
renderableToWindow (toRenderable chart) 640 480
_ <- renderableToPNGFile (toRenderable chart) 640 480 "k_model.png"
return ()
| ghorn/conceptual-design | ParasiticSummary.hs | bsd-3-clause | 3,282 | 0 | 20 | 657 | 762 | 382 | 380 | 64 | 1 |
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Quantity.MN.Corpus
( corpus
) where
import Data.String
import Prelude
import Duckling.Locale
import Duckling.Resolve
import Duckling.Quantity.Types
import Duckling.Testing.Types
corpus :: Corpus
corpus = (testContext {locale = makeLocale MN Nothing}, testOptions, allExamples)
allExamples :: [Example]
allExamples = concat
[ examples (simple Pound 2 Nothing)
[ "2 фунт"
]
, examples (simple Gram 2 Nothing)
[ "2 грамм"
, "хоёр грамм"
, "2000 миллиграмм"
, "2000 мг"
]
, examples (simple Gram 1000 Nothing)
[ "килограмм"
, "кг"
]
, examples (simple Gram 2000 Nothing)
[ "2 килограмм"
, "2 кг"
, "2000 грамм"
]
, examples (simple Pound 1 Nothing)
[ "фунт"
, "1 фунт"
]
, examples (simple Ounce 2 Nothing)
[ "2 унц"
]
, examples (simple Gram 500 Nothing)
[ "500 грамм"
, "500г"
, "500 г"
, "0.5 кг"
]
]
| facebookincubator/duckling | Duckling/Quantity/MN/Corpus.hs | bsd-3-clause | 1,472 | 0 | 9 | 512 | 274 | 159 | 115 | 37 | 1 |
import Control.Applicative
data List a = Empty | Cons a (List a)
instance Functor List where
-- fmap :: (a -> b) -> [a] -> [b]
fmap f Empty = Empty
fmap f (Cons a b) = Cons (f a) (fmap f b)
instance Applicative List where
--pure :: a -> [a]
pure a = Cons a Empty
-- (<*>) :: [a -> b] -> [a] -> [b]
_ <*> Empty = Empty
Empty <*> _ = Empty
(Cons a Empty) <*> (Cons b Empty) = Cons (a b) Empty
(Cons a t) <*> (Cons b t2) = Cons (a b) (t <*> t2)
instance Alternative List where
empty = Empty
Empty <|> _ = Empty
(Cons a t) <|> b = (Cons a (t <|> b))
join :: List (List a) -> List a
join Empty = Empty
join (Cons a b) = a <|> join b
instance Monad List where
return = pure
a >>= f = join (fmap f a)
| gahara/parsers-for-dummies | test2.hs | bsd-3-clause | 725 | 1 | 9 | 194 | 356 | 176 | 180 | -1 | -1 |
{-|
Module: FRP.Timeless.Framework.UI.Scene
Copyright: (c) 2015 Rongcui Dong
License: BSD3
Maintainer: Rongcui Dong <karl_1702@188.com>
-}
module FRP.Timeless.Framework.UI.Scene
where
import FRP.Timeless
import FRP.Timeless.Framework.UI.Events
newtype Scene = Scene {
sceneBox :: forall m s. Monad m => Signal s m UIInput ()
}
| carldong/timeless-SDL | src/FRP/Timeless/Framework/UI/Scene.hs | bsd-3-clause | 351 | 0 | 10 | 65 | 60 | 38 | 22 | -1 | -1 |
module Main where
import Ivory.Tower.Config
import Ivory.OS.FreeRTOS.Tower.STM32
import LDrive.Platforms
import LDrive.Tests.ADCMulti (app)
main :: IO ()
main = compileTowerSTM32FreeRTOS testplatform_stm32 p $
app (stm32config_clock . testplatform_stm32)
testplatform_adcs
testplatform_pwm
testplatform_uart
testplatform_leds
where
p topts = getConfig topts testPlatformParser
| sorki/odrive | test/ADCMultiTest.hs | bsd-3-clause | 441 | 0 | 8 | 101 | 91 | 51 | 40 | 13 | 1 |
{-# LANGUAGE ScopedTypeVariables #-}
module Network.HaskellNet.SMTP
( -- * Types
Command(..)
, Response(..)
, SMTPConnection
-- * Establishing Connection
, connectSMTPPort
, connectSMTP
, connectStream
-- * Operation to a Connection
, sendCommand
, closeSMTP
-- * Other Useful Operations
, sendMail
, doSMTPPort
, doSMTP
, doSMTPStream
, sendMimeMail
)
where
import Network.HaskellNet.BSStream
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as BS
import Network.BSD (getHostName)
import Network
import Control.Applicative ((<$>))
import Control.Exception
import Control.Monad (unless)
import Data.Char (isDigit)
import Network.HaskellNet.Auth
import System.IO
import Network.Mail.Mime
import qualified Data.ByteString.Lazy as B
import qualified Data.ByteString as S
import qualified Data.Text.Lazy as LT
import qualified Data.Text as T
import Prelude hiding (catch)
data SMTPConnection = SMTPC !BSStream ![ByteString]
data Command = HELO String
| EHLO String
| MAIL String
| RCPT String
| DATA ByteString
| EXPN String
| VRFY String
| HELP String
| AUTH AuthType UserName Password
| NOOP
| RSET
| QUIT
deriving (Show, Eq)
type ReplyCode = Int
data Response = Ok
| SystemStatus
| HelpMessage
| ServiceReady
| ServiceClosing
| UserNotLocal
| CannotVerify
| StartMailInput
| ServiceNotAvailable
| MailboxUnavailable
| ErrorInProcessing
| InsufficientSystemStorage
| SyntaxError
| ParameterError
| CommandNotImplemented
| BadSequence
| ParameterNotImplemented
| MailboxUnavailableError
| UserNotLocalError
| ExceededStorage
| MailboxNotAllowed
| TransactionFailed
deriving (Show, Eq)
-- | connecting SMTP server with the specified name and port number.
connectSMTPPort :: String -- ^ name of the server
-> PortNumber -- ^ port number
-> IO SMTPConnection
connectSMTPPort hostname port =
(handleToStream <$> connectTo hostname (PortNumber port))
>>= connectStream
-- | connecting SMTP server with the specified name and port 25.
connectSMTP :: String -- ^ name of the server
-> IO SMTPConnection
connectSMTP = flip connectSMTPPort 25
tryCommand :: BSStream -> Command -> Int -> ReplyCode
-> IO ByteString
tryCommand st cmd tries expectedReply | tries <= 0 = do
bsClose st
fail $ "cannot execute command " ++ show cmd ++
", expected reply code " ++ show expectedReply
tryCommand st cmd tries expectedReply = do
(code, msg) <- sendCommand (SMTPC st []) cmd
if code == expectedReply then
return msg else
tryCommand st cmd (tries - 1) expectedReply
-- | create SMTPConnection from already connected Stream
connectStream :: BSStream -> IO SMTPConnection
connectStream st =
do (code1, _) <- parseResponse st
unless (code1 == 220) $
do bsClose st
fail "cannot connect to the server"
senderHost <- getHostName
msg <- tryCommand st (EHLO senderHost) 3 250
return (SMTPC st (tail $ BS.lines msg))
parseResponse :: BSStream -> IO (ReplyCode, ByteString)
parseResponse st =
do (code, bdy) <- readLines
return (read $ BS.unpack code, BS.unlines bdy)
where readLines =
do l <- bsGetLine st
let (c, bdy) = BS.span isDigit l
if not (BS.null bdy) && BS.head bdy == '-'
then do (c2, ls) <- readLines
return (c2, (BS.tail bdy:ls))
else return (c, [BS.tail bdy])
-- | send a method to a server
sendCommand :: SMTPConnection -> Command -> IO (ReplyCode, ByteString)
sendCommand (SMTPC conn _) (DATA dat) =
do bsPutCrLf conn $ BS.pack "DATA"
(code, _) <- parseResponse conn
unless (code == 354) $ fail "this server cannot accept any data."
mapM_ sendLine $ BS.lines dat ++ [BS.pack "."]
parseResponse conn
where sendLine l = bsPutCrLf conn l
sendCommand (SMTPC conn _) (AUTH LOGIN username password) =
do bsPutCrLf conn command
(_, _) <- parseResponse conn
bsPutCrLf conn $ BS.pack userB64
(_, _) <- parseResponse conn
bsPutCrLf conn $ BS.pack passB64
parseResponse conn
where command = BS.pack $ "AUTH LOGIN"
(userB64, passB64) = login username password
sendCommand (SMTPC conn _) (AUTH at username password) =
do bsPutCrLf conn command
(code, msg) <- parseResponse conn
unless (code == 334) $ fail "authentication failed."
bsPutCrLf conn $ BS.pack $ auth at (BS.unpack msg) username password
parseResponse conn
where command = BS.pack $ unwords ["AUTH", show at]
sendCommand (SMTPC conn _) meth =
do bsPutCrLf conn $ BS.pack command
parseResponse conn
where command = case meth of
(HELO param) -> "HELO " ++ param
(EHLO param) -> "EHLO " ++ param
(MAIL param) -> "MAIL FROM:<" ++ param ++ ">"
(RCPT param) -> "RCPT TO:<" ++ param ++ ">"
(EXPN param) -> "EXPN " ++ param
(VRFY param) -> "VRFY " ++ param
(HELP msg) -> if null msg
then "HELP\r\n"
else "HELP " ++ msg
NOOP -> "NOOP"
RSET -> "RSET"
QUIT -> "QUIT"
(DATA _) ->
error "BUG: DATA pattern should be matched by sendCommand patterns"
(AUTH _ _ _) ->
error "BUG: AUTH pattern should be matched by sendCommand patterns"
-- | close the connection. This function send the QUIT method, so you
-- do not have to QUIT method explicitly.
closeSMTP :: SMTPConnection -> IO ()
closeSMTP (SMTPC conn _) = bsClose conn
{-
I must be being stupid here
I can't seem to be able to catch the exception arising from the
connection already being closed this would be the correct way to do it
but instead we're being naughty above by just closes the connection
without first sending QUIT
closeSMTP c@(SMTPC conn _) =
do sendCommand c QUIT
bsClose conn `catch` \(_ :: IOException) -> return ()
-}
-- | sending a mail to a server. This is achieved by sendMessage. If
-- something is wrong, it raises an IOexception.
sendMail :: String -- ^ sender mail
-> [String] -- ^ receivers
-> ByteString -- ^ data
-> SMTPConnection
-> IO ()
sendMail sender receivers dat conn =
catcher `handle` mainProc
where mainProc =
do (250, _) <- sendCommand conn (MAIL sender)
vals <- mapM (sendCommand conn . RCPT) receivers
unless (all ((==250) . fst) vals) $ fail "sendMail error"
(250, _) <- sendCommand conn (DATA dat)
return ()
catcher e@(PatternMatchFail _) = throwIO e
-- | doSMTPPort open a connection, and do an IO action with the
-- connection, and then close it.
doSMTPPort :: String -> PortNumber -> (SMTPConnection -> IO a) -> IO a
doSMTPPort host port execution =
bracket (connectSMTPPort host port) closeSMTP execution
-- | doSMTP is similar to doSMTPPort, except that it does not require
-- port number but connects to the server with port 25.
doSMTP :: String -> (SMTPConnection -> IO a) -> IO a
doSMTP host execution = doSMTPPort host 25 execution
-- | doSMTPStream is similar to doSMTPPort, except that its argument
-- is a Stream data instead of hostname and port number.
doSMTPStream :: BSStream -> (SMTPConnection -> IO a) -> IO a
doSMTPStream s execution = bracket (connectStream s) closeSMTP execution
sendMimeMail :: String -> String -> String -> LT.Text
-> LT.Text -> [(T.Text, FilePath)] -> SMTPConnection -> IO ()
sendMimeMail to from subject plainBody htmlBody attachments con = do
myMail <- simpleMail (Address Nothing $ T.pack to) (Address Nothing
$ T.pack from)
(T.pack subject) plainBody htmlBody attachments
renderedMail <- renderMail' myMail
sendMail from [to] (lazyToStrict renderedMail) con
closeSMTP con
-- haskellNet uses strict bytestrings
-- TODO: look at making haskellnet lazy
lazyToStrict :: B.ByteString -> S.ByteString
lazyToStrict = S.concat . B.toChunks
crlf :: BS.ByteString
crlf = BS.pack "\r\n"
bsPutCrLf :: BSStream -> ByteString -> IO ()
bsPutCrLf h s = bsPut h s >> bsPut h crlf >> bsFlush h
| danchoi/imapget | src/Network/HaskellNet/SMTP.hs | bsd-3-clause | 9,059 | 0 | 17 | 2,875 | 2,210 | 1,143 | 1,067 | 197 | 13 |
{-# LANGUAGE ExplicitForAll #-}
-- | This game mode lets you manage your own input. Pressing ESC will not abort the program.
-- You also don't get automatic pan and zoom controls like with `displayInWindow`.
module Graphics.Gloss.Interface.IO.Game
( module Graphics.Gloss.Data.Display
, module Graphics.Gloss.Data.Picture
, module Graphics.Gloss.Data.Color
, playIO
, Event(..), Key(..), SpecialKey(..), MouseButton(..), KeyState(..), Modifiers(..))
where
import Graphics.Gloss.Data.Display
import Graphics.Gloss.Data.Picture
import Graphics.Gloss.Data.Color
import Graphics.Gloss.Internals.Interface.Game
import Graphics.Gloss.Internals.Interface.Backend
-- | Play a game in a window, using IO actions to build the pictures.
playIO :: forall world
. Display -- ^ Display mode.
-> Color -- ^ Background color.
-> Int -- ^ Number of simulation steps to take for each second of real time.
-> world -- ^ The initial world.
-> (world -> IO Picture) -- ^ An action to convert the world a picture.
-> (Event -> world -> IO world) -- ^ A function to handle input events.
-> (Float -> world -> IO world) -- ^ A function to step the world one iteration.
-- It is passed the period of time (in seconds) needing to be advanced.
-> IO ()
playIO display backColor simResolution
worldStart worldToPicture worldHandleEvent worldAdvance
= playWithBackendIO defaultBackendState
display backColor simResolution
worldStart worldToPicture worldHandleEvent worldAdvance
False
| gscalzo/HaskellTheHardWay | gloss-try/gloss-master/gloss/Graphics/Gloss/Interface/IO/Game.hs | mit | 1,743 | 0 | 16 | 491 | 249 | 157 | 92 | 27 | 1 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.Register
-- Copyright : Isaac Jones 2003-2004
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- This module deals with registering and unregistering packages. There are a
-- couple ways it can do this, one is to do it directly. Another is to generate
-- a script that can be run later to do it. The idea here being that the user
-- is shielded from the details of what command to use for package registration
-- for a particular compiler. In practice this aspect was not especially
-- popular so we also provide a way to simply generate the package registration
-- file which then must be manually passed to @ghc-pkg@. It is possible to
-- generate registration information for where the package is to be installed,
-- or alternatively to register the package inplace in the build tree. The
-- latter is occasionally handy, and will become more important when we try to
-- build multi-package systems.
--
-- This module does not delegate anything to the per-compiler modules but just
-- mixes it all in in this module, which is rather unsatisfactory. The script
-- generation and the unregister feature are not well used or tested.
{- Copyright (c) 2003-2004, Isaac Jones
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of Isaac Jones nor the names of other
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -}
module Distribution.Simple.Register (
register,
unregister,
initPackageDB,
invokeHcPkg,
registerPackage,
generateRegistrationInfo,
inplaceInstalledPackageInfo,
absoluteInstalledPackageInfo,
generalInstalledPackageInfo,
) where
import Distribution.Simple.LocalBuildInfo
( LocalBuildInfo(..), ComponentLocalBuildInfo(..)
, ComponentName(..), getComponentLocalBuildInfo
, LibraryName(..)
, InstallDirs(..), absoluteInstallDirs )
import Distribution.Simple.BuildPaths (haddockName)
import qualified Distribution.Simple.GHC as GHC
import qualified Distribution.Simple.LHC as LHC
import qualified Distribution.Simple.Hugs as Hugs
import qualified Distribution.Simple.UHC as UHC
import Distribution.Simple.Compiler
( compilerVersion, Compiler, CompilerFlavor(..), compilerFlavor
, PackageDBStack, registrationPackageDB )
import Distribution.Simple.Program
( ProgramConfiguration, ConfiguredProgram
, runProgramInvocation, requireProgram, lookupProgram
, ghcPkgProgram, lhcPkgProgram )
import Distribution.Simple.Program.Script
( invocationAsSystemScript )
import qualified Distribution.Simple.Program.HcPkg as HcPkg
import Distribution.Simple.Setup
( RegisterFlags(..), CopyDest(..)
, fromFlag, fromFlagOrDefault, flagToMaybe )
import Distribution.PackageDescription
( PackageDescription(..), Library(..), BuildInfo(..), hcOptions )
import Distribution.Package
( Package(..), packageName, InstalledPackageId(..) )
import Distribution.InstalledPackageInfo
( InstalledPackageInfo, InstalledPackageInfo_(InstalledPackageInfo)
, showInstalledPackageInfo )
import qualified Distribution.InstalledPackageInfo as IPI
import Distribution.Simple.Utils
( writeUTF8File, writeFileAtomic, setFileExecutable
, die, notice, setupMessage )
import Distribution.System
( OS(..), buildOS )
import Distribution.Text
( display )
import Distribution.Version ( Version(..) )
import Distribution.Verbosity as Verbosity
( Verbosity, normal )
import Distribution.Compat.Exception
( tryIO )
import System.FilePath ((</>), (<.>), isAbsolute)
import System.Directory
( getCurrentDirectory, removeDirectoryRecursive )
import Data.Maybe
( isJust, fromMaybe, maybeToList )
import Data.List
( partition, nub )
import qualified Data.ByteString.Lazy.Char8 as BS.Char8
-- -----------------------------------------------------------------------------
-- Registration
register :: PackageDescription -> LocalBuildInfo
-> RegisterFlags -- ^Install in the user's database?; verbose
-> IO ()
register pkg@PackageDescription { library = Just lib } lbi regFlags
= do
let clbi = getComponentLocalBuildInfo lbi CLibName
installedPkgInfo <- generateRegistrationInfo
verbosity pkg lib lbi clbi inplace distPref
-- Three different modes:
case () of
_ | modeGenerateRegFile -> writeRegistrationFile installedPkgInfo
| modeGenerateRegScript -> writeRegisterScript installedPkgInfo
| otherwise -> registerPackage verbosity
installedPkgInfo pkg lbi inplace packageDbs
where
modeGenerateRegFile = isJust (flagToMaybe (regGenPkgConf regFlags))
regFile = fromMaybe (display (packageId pkg) <.> "conf")
(fromFlag (regGenPkgConf regFlags))
modeGenerateRegScript = fromFlag (regGenScript regFlags)
inplace = fromFlag (regInPlace regFlags)
-- FIXME: there's really no guarantee this will work.
-- registering into a totally different db stack can
-- fail if dependencies cannot be satisfied.
packageDbs = nub $ withPackageDB lbi
++ maybeToList (flagToMaybe (regPackageDB regFlags))
distPref = fromFlag (regDistPref regFlags)
verbosity = fromFlag (regVerbosity regFlags)
writeRegistrationFile installedPkgInfo = do
notice verbosity ("Creating package registration file: " ++ regFile)
writeUTF8File regFile (showInstalledPackageInfo installedPkgInfo)
writeRegisterScript installedPkgInfo =
case compilerFlavor (compiler lbi) of
GHC -> do (ghcPkg, _) <- requireProgram verbosity ghcPkgProgram (withPrograms lbi)
writeHcPkgRegisterScript verbosity installedPkgInfo ghcPkg packageDbs
LHC -> do (lhcPkg, _) <- requireProgram verbosity lhcPkgProgram (withPrograms lbi)
writeHcPkgRegisterScript verbosity installedPkgInfo lhcPkg packageDbs
Hugs -> notice verbosity "Registration scripts not needed for hugs"
JHC -> notice verbosity "Registration scripts not needed for jhc"
NHC -> notice verbosity "Registration scripts not needed for nhc98"
UHC -> notice verbosity "Registration scripts not needed for uhc"
_ -> die "Registration scripts are not implemented for this compiler"
register _ _ regFlags = notice verbosity "No package to register"
where
verbosity = fromFlag (regVerbosity regFlags)
generateRegistrationInfo :: Verbosity
-> PackageDescription
-> Library
-> LocalBuildInfo
-> ComponentLocalBuildInfo
-> Bool
-> FilePath
-> IO InstalledPackageInfo
generateRegistrationInfo verbosity pkg lib lbi clbi inplace distPref = do
--TODO: eliminate pwd!
pwd <- getCurrentDirectory
--TODO: the method of setting the InstalledPackageId is compiler specific
-- this aspect should be delegated to a per-compiler helper.
let comp = compiler lbi
ipid <-
case compilerFlavor comp of
GHC | compilerVersion comp >= Version [6,11] [] -> do
s <- GHC.libAbiHash verbosity pkg lbi lib clbi
return (InstalledPackageId (display (packageId pkg) ++ '-':s))
_other -> do
return (InstalledPackageId (display (packageId pkg)))
let installedPkgInfo
| inplace = inplaceInstalledPackageInfo pwd distPref
pkg lib lbi clbi
| otherwise = absoluteInstalledPackageInfo
pkg lib lbi clbi
return installedPkgInfo{ IPI.installedPackageId = ipid }
-- | Create an empty package DB at the specified location.
initPackageDB :: Verbosity -> Compiler -> ProgramConfiguration -> FilePath
-> IO ()
initPackageDB verbosity comp conf dbPath =
case (compilerFlavor comp) of
GHC -> GHC.initPackageDB verbosity conf dbPath
_ -> die "Distribution.Simple.Register.initPackageDB: \
\not implemented for this compiler"
-- | Run @hc-pkg@ using a given package DB stack, directly forwarding the
-- provided command-line arguments to it.
invokeHcPkg :: Verbosity -> Compiler -> ProgramConfiguration -> PackageDBStack
-> [String] -> IO ()
invokeHcPkg verbosity comp conf dbStack extraArgs =
case (compilerFlavor comp) of
GHC -> GHC.invokeHcPkg verbosity conf dbStack extraArgs
_ -> die "Distribution.Simple.Register.invokeHcPkg: \
\not implemented for this compiler"
registerPackage :: Verbosity
-> InstalledPackageInfo
-> PackageDescription
-> LocalBuildInfo
-> Bool
-> PackageDBStack
-> IO ()
registerPackage verbosity installedPkgInfo pkg lbi inplace packageDbs = do
let msg = if inplace
then "In-place registering"
else "Registering"
setupMessage verbosity msg (packageId pkg)
case compilerFlavor (compiler lbi) of
GHC -> GHC.registerPackage verbosity installedPkgInfo pkg lbi inplace packageDbs
LHC -> LHC.registerPackage verbosity installedPkgInfo pkg lbi inplace packageDbs
Hugs -> Hugs.registerPackage verbosity installedPkgInfo pkg lbi inplace packageDbs
UHC -> UHC.registerPackage verbosity installedPkgInfo pkg lbi inplace packageDbs
JHC -> notice verbosity "Registering for jhc (nothing to do)"
NHC -> notice verbosity "Registering for nhc98 (nothing to do)"
_ -> die "Registering is not implemented for this compiler"
writeHcPkgRegisterScript :: Verbosity
-> InstalledPackageInfo
-> ConfiguredProgram
-> PackageDBStack
-> IO ()
writeHcPkgRegisterScript verbosity installedPkgInfo hcPkg packageDbs = do
let invocation = HcPkg.reregisterInvocation hcPkg Verbosity.normal
packageDbs (Right installedPkgInfo)
regScript = invocationAsSystemScript buildOS invocation
notice verbosity ("Creating package registration script: " ++ regScriptFileName)
writeUTF8File regScriptFileName regScript
setFileExecutable regScriptFileName
regScriptFileName :: FilePath
regScriptFileName = case buildOS of
Windows -> "register.bat"
_ -> "register.sh"
-- -----------------------------------------------------------------------------
-- Making the InstalledPackageInfo
-- | Construct 'InstalledPackageInfo' for a library in a package, given a set
-- of installation directories.
--
generalInstalledPackageInfo
:: ([FilePath] -> [FilePath]) -- ^ Translate relative include dir paths to
-- absolute paths.
-> PackageDescription
-> Library
-> ComponentLocalBuildInfo
-> InstallDirs FilePath
-> InstalledPackageInfo
generalInstalledPackageInfo adjustRelIncDirs pkg lib clbi installDirs =
InstalledPackageInfo {
--TODO: do not open-code this conversion from PackageId to InstalledPackageId
IPI.installedPackageId = InstalledPackageId (display (packageId pkg)),
IPI.sourcePackageId = packageId pkg,
IPI.license = license pkg,
IPI.copyright = copyright pkg,
IPI.maintainer = maintainer pkg,
IPI.author = author pkg,
IPI.stability = stability pkg,
IPI.homepage = homepage pkg,
IPI.pkgUrl = pkgUrl pkg,
IPI.synopsis = synopsis pkg,
IPI.description = description pkg,
IPI.category = category pkg,
IPI.exposed = libExposed lib,
IPI.exposedModules = exposedModules lib,
IPI.hiddenModules = otherModules bi,
IPI.trusted = IPI.trusted IPI.emptyInstalledPackageInfo,
IPI.importDirs = [ libdir installDirs | hasModules ],
IPI.libraryDirs = if hasLibrary
then libdir installDirs : extraLibDirs bi
else extraLibDirs bi,
IPI.hsLibraries = [ libname
| LibraryName libname <- componentLibraries clbi
, hasLibrary ],
IPI.extraLibraries = extraLibs bi,
IPI.extraGHCiLibraries = [],
IPI.includeDirs = absinc ++ adjustRelIncDirs relinc,
IPI.includes = includes bi,
IPI.depends = map fst (componentPackageDeps clbi),
IPI.hugsOptions = hcOptions Hugs bi,
IPI.ccOptions = [], -- Note. NOT ccOptions bi!
-- We don't want cc-options to be propagated
-- to C compilations in other packages.
IPI.ldOptions = ldOptions bi,
IPI.frameworkDirs = [],
IPI.frameworks = frameworks bi,
IPI.haddockInterfaces = [haddockdir installDirs </> haddockName pkg],
IPI.haddockHTMLs = [htmldir installDirs]
}
where
bi = libBuildInfo lib
(absinc, relinc) = partition isAbsolute (includeDirs bi)
hasModules = not $ null (exposedModules lib)
&& null (otherModules bi)
hasLibrary = hasModules || not (null (cSources bi))
-- | Construct 'InstalledPackageInfo' for a library that is inplace in the
-- build tree.
--
-- This function knows about the layout of inplace packages.
--
inplaceInstalledPackageInfo :: FilePath -- ^ top of the build tree
-> FilePath -- ^ location of the dist tree
-> PackageDescription
-> Library
-> LocalBuildInfo
-> ComponentLocalBuildInfo
-> InstalledPackageInfo
inplaceInstalledPackageInfo inplaceDir distPref pkg lib lbi clbi =
generalInstalledPackageInfo adjustRelativeIncludeDirs pkg lib clbi
installDirs
where
adjustRelativeIncludeDirs = map (inplaceDir </>)
installDirs =
(absoluteInstallDirs pkg lbi NoCopyDest) {
libdir = inplaceDir </> buildDir lbi,
datadir = inplaceDir,
datasubdir = distPref,
docdir = inplaceDocdir,
htmldir = inplaceHtmldir,
haddockdir = inplaceHtmldir
}
inplaceDocdir = inplaceDir </> distPref </> "doc"
inplaceHtmldir = inplaceDocdir </> "html" </> display (packageName pkg)
-- | Construct 'InstalledPackageInfo' for the final install location of a
-- library package.
--
-- This function knows about the layout of installed packages.
--
absoluteInstalledPackageInfo :: PackageDescription
-> Library
-> LocalBuildInfo
-> ComponentLocalBuildInfo
-> InstalledPackageInfo
absoluteInstalledPackageInfo pkg lib lbi clbi =
generalInstalledPackageInfo adjustReativeIncludeDirs pkg lib clbi installDirs
where
-- For installed packages we install all include files into one dir,
-- whereas in the build tree they may live in multiple local dirs.
adjustReativeIncludeDirs _
| null (installIncludes bi) = []
| otherwise = [includedir installDirs]
bi = libBuildInfo lib
installDirs = absoluteInstallDirs pkg lbi NoCopyDest
-- -----------------------------------------------------------------------------
-- Unregistration
unregister :: PackageDescription -> LocalBuildInfo -> RegisterFlags -> IO ()
unregister pkg lbi regFlags = do
let pkgid = packageId pkg
genScript = fromFlag (regGenScript regFlags)
verbosity = fromFlag (regVerbosity regFlags)
packageDb = fromFlagOrDefault (registrationPackageDB (withPackageDB lbi))
(regPackageDB regFlags)
installDirs = absoluteInstallDirs pkg lbi NoCopyDest
setupMessage verbosity "Unregistering" pkgid
case compilerFlavor (compiler lbi) of
GHC ->
let Just ghcPkg = lookupProgram ghcPkgProgram (withPrograms lbi)
invocation = HcPkg.unregisterInvocation ghcPkg Verbosity.normal
packageDb pkgid
in if genScript
then writeFileAtomic unregScriptFileName
(BS.Char8.pack $ invocationAsSystemScript buildOS invocation)
else runProgramInvocation verbosity invocation
Hugs -> do
_ <- tryIO $ removeDirectoryRecursive (libdir installDirs)
return ()
NHC -> do
_ <- tryIO $ removeDirectoryRecursive (libdir installDirs)
return ()
_ ->
die ("only unregistering with GHC and Hugs is implemented")
unregScriptFileName :: FilePath
unregScriptFileName = case buildOS of
Windows -> "unregister.bat"
_ -> "unregister.sh"
| jwiegley/ghc-release | libraries/Cabal/cabal/Distribution/Simple/Register.hs | gpl-3.0 | 18,469 | 0 | 22 | 4,859 | 3,029 | 1,620 | 1,409 | 283 | 8 |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="tr-TR">
<title>Aktif Tarama Kuralları - Blpha | ZAP Uzantısı</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>İçindekiler</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>İçerik</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Arama</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favoriler</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | veggiespam/zap-extensions | addOns/simpleexample/src/main/javahelp/org/zaproxy/zap/extension/simpleexample/resources/help_tr_TR/helpset_tr_TR.hs | apache-2.0 | 1,001 | 80 | 67 | 163 | 434 | 218 | 216 | -1 | -1 |
{-# OPTIONS_HADDOCK hide #-}
{-# LANGUAGE CPP #-}
#include "fusion-phases.h"
-- | PR instance for tuples.
module Data.Array.Parallel.PArray.PData.Tuple4
( PData(..), PDatas(..)
, zip4PD)
where
import Data.Array.Parallel.Pretty
import Data.Array.Parallel.PArray.PData.Base
import Data.Array.Parallel.PArray.PData.Nested
import GHC.Exts
import Prelude hiding (zip, unzip)
import qualified Data.Typeable as T
import qualified Data.Vector as V
import qualified Data.List as P
-------------------------------------------------------------------------------
data instance PData (a, b, c, d)
= PTuple4 (PData a) (PData b) (PData c) (PData d)
data instance PDatas (a, b, c, d)
= PTuple4s (PDatas a) (PDatas b) (PDatas c) (PDatas d)
-- PR -------------------------------------------------------------------------
instance (PR a, PR b, PR c, PR d) => PR (a, b, c, d) where
{-# NOINLINE validPR #-}
validPR (PTuple4 xs ys zs ds)
= validPR xs && validPR ys && validPR zs && validPR ds
{-# NOINLINE nfPR #-}
nfPR (PTuple4 arr1 arr2 arr3 arr4)
= nfPR arr1 `seq` nfPR arr2 `seq` nfPR arr3 `seq` nfPR arr4 `seq` ()
{-# NOINLINE similarPR #-}
similarPR (x1, y1, z1, d1) (x2, y2, z2, d2)
= similarPR x1 x2
&& similarPR y1 y2
&& similarPR z1 z2
&& similarPR d1 d2
{-# NOINLINE coversPR #-}
coversPR weak (PTuple4 arr1 arr2 arr3 arr4) ix
= coversPR weak arr1 ix
&& coversPR weak arr2 ix
&& coversPR weak arr3 ix
&& coversPR weak arr4 ix
{-# NOINLINE pprpPR #-}
pprpPR (x, y, z, d)
= text "Tuple4 "
<> vcat [ pprpPR x
, pprpPR y
, pprpPR z
, pprpPR d ]
{-# NOINLINE pprpDataPR #-}
pprpDataPR (PTuple4 xs ys zs ds)
= text "PTuple4 "
<> vcat [ pprpDataPR xs
, pprpDataPR ys
, pprpDataPR zs
, pprpDataPR ds]
{-# NOINLINE typeRepPR #-}
typeRepPR x@(a, b, c, d)
= T.typeOf4 x
`T.mkAppTy` (typeRepPR a)
`T.mkAppTy` (typeRepPR b)
`T.mkAppTy` (typeRepPR c)
`T.mkAppTy` (typeRepPR d)
{-# NOINLINE typeRepDataPR #-}
typeRepDataPR (PTuple4 as bs cs ds)
= T.typeOf4 ((), (), (), ())
`T.mkAppTy` (typeRepDataPR as)
`T.mkAppTy` (typeRepDataPR bs)
`T.mkAppTy` (typeRepDataPR cs)
`T.mkAppTy` (typeRepDataPR ds)
{-# NOINLINE typeRepDatasPR #-}
typeRepDatasPR (PTuple4s as bs cs ds)
= T.typeOf4 ((), (), (), ())
`T.mkAppTy` (typeRepDatasPR as)
`T.mkAppTy` (typeRepDatasPR bs)
`T.mkAppTy` (typeRepDatasPR cs)
`T.mkAppTy` (typeRepDatasPR ds)
-- Constructors -------------------------------
{-# INLINE_PDATA emptyPR #-}
emptyPR
= PTuple4 emptyPR emptyPR emptyPR emptyPR
{-# INLINE_PDATA replicatePR #-}
replicatePR len (x, y, z, d)
= PTuple4 (replicatePR len x)
(replicatePR len y)
(replicatePR len z)
(replicatePR len d)
{-# INLINE_PDATA replicatesPR #-}
replicatesPR lens (PTuple4 arr1 arr2 arr3 arr4)
= PTuple4 (replicatesPR lens arr1)
(replicatesPR lens arr2)
(replicatesPR lens arr3)
(replicatesPR lens arr4)
{-# INLINE_PDATA appendPR #-}
appendPR (PTuple4 arr11 arr12 arr13 arr14)
(PTuple4 arr21 arr22 arr23 arr24)
= PTuple4 (arr11 `appendPR` arr21)
(arr12 `appendPR` arr22)
(arr13 `appendPR` arr23)
(arr14 `appendPR` arr24)
{-# INLINE_PDATA appendvsPR #-}
appendvsPR segdResult segd1 (PTuple4s arrs11 arrs12 arrs13 arrs14)
segd2 (PTuple4s arrs21 arrs22 arrs23 arrs24)
= PTuple4 (appendvsPR segdResult segd1 arrs11 segd2 arrs21)
(appendvsPR segdResult segd1 arrs12 segd2 arrs22)
(appendvsPR segdResult segd1 arrs13 segd2 arrs23)
(appendvsPR segdResult segd1 arrs14 segd2 arrs24)
-- Projections ---------------------------------
{-# INLINE_PDATA lengthPR #-}
lengthPR (PTuple4 arr1 _ _ _)
= lengthPR arr1
{-# INLINE_PDATA indexPR #-}
indexPR (PTuple4 arr1 arr2 arr3 arr4) ix
= ( indexPR arr1 ix
, indexPR arr2 ix
, indexPR arr3 ix
, indexPR arr4 ix)
{-# INLINE_PDATA indexsPR #-}
indexsPR (PTuple4s xs ys zs ds) srcixs
= PTuple4 (indexsPR xs srcixs)
(indexsPR ys srcixs)
(indexsPR zs srcixs)
(indexsPR ds srcixs)
{-# INLINE_PDATA indexvsPR #-}
indexvsPR (PTuple4s xs ys zs ds) vsegd srcixs
= PTuple4 (indexvsPR xs vsegd srcixs)
(indexvsPR ys vsegd srcixs)
(indexvsPR zs vsegd srcixs)
(indexvsPR ds vsegd srcixs)
{-# INLINE_PDATA extractPR #-}
extractPR (PTuple4 arr1 arr2 arr3 arr4) start len
= PTuple4 (extractPR arr1 start len)
(extractPR arr2 start len)
(extractPR arr3 start len)
(extractPR arr4 start len)
{-# INLINE_PDATA extractssPR #-}
extractssPR (PTuple4s xs ys zs ds) ussegd
= PTuple4 (extractssPR xs ussegd)
(extractssPR ys ussegd)
(extractssPR zs ussegd)
(extractssPR ds ussegd)
{-# INLINE_PDATA extractvsPR #-}
extractvsPR (PTuple4s xs ys zs ds) uvsegd
= PTuple4 (extractvsPR xs uvsegd)
(extractvsPR ys uvsegd)
(extractvsPR zs uvsegd)
(extractvsPR ds uvsegd)
-- Pack and Combine ---------------------------
{-# INLINE_PDATA packByTagPR #-}
packByTagPR (PTuple4 arr1 arr2 arr3 arr4) tags tag
= PTuple4 (packByTagPR arr1 tags tag)
(packByTagPR arr2 tags tag)
(packByTagPR arr3 tags tag)
(packByTagPR arr4 tags tag)
{-# INLINE_PDATA combine2PR #-}
combine2PR sel (PTuple4 xs1 ys1 zs1 ds1) (PTuple4 xs2 ys2 zs2 ds2)
= PTuple4 (combine2PR sel xs1 xs2)
(combine2PR sel ys1 ys2)
(combine2PR sel zs1 zs2)
(combine2PR sel ds1 ds2)
-- Conversions --------------------------------
{-# NOINLINE fromVectorPR #-}
fromVectorPR vec
= let (xs, ys, zs, ds) = V.unzip4 vec
in PTuple4 (fromVectorPR xs)
(fromVectorPR ys)
(fromVectorPR zs)
(fromVectorPR ds)
{-# NOINLINE toVectorPR #-}
toVectorPR (PTuple4 xs ys zs ds)
= V.zip4 (toVectorPR xs)
(toVectorPR ys)
(toVectorPR zs)
(toVectorPR ds)
-- PData --------------------------------------
{-# INLINE_PDATA emptydPR #-}
emptydPR
= PTuple4s emptydPR
emptydPR
emptydPR
emptydPR
{-# INLINE_PDATA singletondPR #-}
singletondPR (PTuple4 x y z d)
= PTuple4s (singletondPR x)
(singletondPR y)
(singletondPR z)
(singletondPR d)
{-# INLINE_PDATA lengthdPR #-}
lengthdPR (PTuple4s xs _ _ _)
= lengthdPR xs
{-# INLINE_PDATA indexdPR #-}
indexdPR (PTuple4s xs ys zs ds) i
= PTuple4 (indexdPR xs i)
(indexdPR ys i)
(indexdPR zs i)
(indexdPR ds i)
{-# INLINE_PDATA appenddPR #-}
appenddPR (PTuple4s xs1 ys1 zs1 ds1) (PTuple4s xs2 ys2 zs2 ds2)
= PTuple4s (appenddPR xs1 xs2)
(appenddPR ys1 ys2)
(appenddPR zs1 zs2)
(appenddPR ds1 ds2)
{-# NOINLINE fromVectordPR #-}
fromVectordPR vec
= let (xss, yss, zss, dss) = V.unzip4 $ V.map (\(PTuple4 xs ys zs ds) -> (xs, ys, zs, ds)) vec
in PTuple4s (fromVectordPR xss)
(fromVectordPR yss)
(fromVectordPR zss)
(fromVectordPR dss)
{-# NOINLINE toVectordPR #-}
toVectordPR (PTuple4s pdatas1 pdatas2 pdatas3 pdatas4)
= V.zipWith4 PTuple4
(toVectordPR pdatas1)
(toVectordPR pdatas2)
(toVectordPR pdatas3)
(toVectordPR pdatas4)
-- PD Functions ---------------------------------------------------------------
-- | O(1). Zip a pair of arrays into an array of pairs.
zip4PD :: PData a -> PData b -> PData c -> PData d -> PData (a, b, c, d)
zip4PD = PTuple4
{-# INLINE_PA zip4PD #-}
-- Show -----------------------------------------------------------------------
deriving instance (Show (PData a), Show (PData b), Show (PData c), Show (PData d))
=> Show (PData (a, b, c, d))
deriving instance (Show (PDatas a), Show (PDatas b), Show (PDatas c), Show (PDatas d))
=> Show (PDatas (a, b, c, d))
instance ( PR a, PR b, PR c, PR d, Show a, Show b, Show c, Show d
, PprVirtual (PData a), PprVirtual (PData b), PprVirtual (PData c), PprVirtual (PData d))
=> PprVirtual (PData (a, b, c, d)) where
pprv (PTuple4 xs ys zs ds)
= text $ show
$ P.zip4 (V.toList $ toVectorPR xs)
(V.toList $ toVectorPR ys)
(V.toList $ toVectorPR zs)
(V.toList $ toVectorPR ds)
| mainland/dph | dph-lifted-vseg/Data/Array/Parallel/PArray/PData/Tuple4.hs | bsd-3-clause | 9,606 | 0 | 15 | 3,330 | 2,760 | 1,458 | 1,302 | -1 | -1 |
-------------------------------------------------------------------------
--
-- Haskell: The Craft of Functional Programming, 3e
-- Simon Thompson
-- (c) Addison-Wesley, 1996-2011.
--
-- Case study: Parsing expressions
--
-- Note that this is not a monadic approach to parsing.
--
---------------------------------------------------------------------------
module ParsingBasics where
import Data.Char
infixr 5 >*>
--
-- Syntactic types
--
type Var = Char
data Expr = Lit Int | Var Var | Op Op Expr Expr
data Op = Add | Sub | Mul | Div | Mod
--
-- The type of parsers.
--
type Parse a b = [a] -> [(b,[a])]
--
-- Some basic parsers
--
--
-- Fail on any input.
--
none :: Parse a b
none inp = []
--
-- Succeed, returning the value supplied.
--
succeed :: b -> Parse a b
succeed val inp = [(val,inp)]
--
-- token t recognises t as the first value in the input.
--
token :: Eq a => a -> Parse a a
token t (x:xs)
| t==x = [(t,xs)]
| otherwise = []
token t [] = []
--
-- spot whether an element with a particular property is the
-- first element of input.
--
spot :: (a -> Bool) -> Parse a a
spot p (x:xs)
| p x = [(x,xs)]
| otherwise = []
spot p [] = []
--
-- Examples.
--
bracket = token '('
dig = spot isDigit
--
-- Combining parsers
--
--
-- alt p1 p2 recognises anything recogniseed by p1 or by p2.
--
alt :: Parse a b -> Parse a b -> Parse a b
alt p1 p2 inp = p1 inp ++ p2 inp
exam1 = (bracket `alt` dig) "234"
--
-- Apply one parser then the second to the result(s) of the first.
--
(>*>) :: Parse a b -> Parse a c -> Parse a (b,c)
--
(>*>) p1 p2 inp
= [((y,z),rem2) | (y,rem1) <- p1 inp , (z,rem2) <- p2 rem1 ]
--
-- Transform the results of the parses according to the function.
--
build :: Parse a b -> (b -> c) -> Parse a c
build p f inp = [ (f x,rem) | (x,rem) <- p inp ]
--
-- Recognise a list of objects.
--
--
list :: Parse a b -> Parse a [b]
list p = (succeed []) `alt`
((p >*> list p) `build` convert)
where
convert = uncurry (:)
--
-- From the exercises...
--
neList :: Parse a b -> Parse a [b]
neList = neList -- dummy definition
optional :: Parse a b -> Parse a [b]
optional = optional -- dummy definition
nTimes :: Int -> Parse a b -> Parse a [b]
nTimes = nTimes -- dummy definition
--
-- A parser for expressions
--
--
-- The parser has three components, corresponding to the three
-- clauses in the definition of the syntactic type.
--
parser :: Parse Char Expr
parser = (litParse `alt` varParse) `alt` opExpParse
--
-- Spotting variables.
--
varParse :: Parse Char Expr
varParse = spot isVar `build` Var
isVar :: Char -> Bool
isVar x = ('a' <= x && x <= 'z')
--
-- Parsing (fully bracketed) operator applications.
--
opExpParse
= (token '(' >*>
parser >*>
spot isOp >*>
parser >*>
token ')')
`build` makeExpr
makeExpr (_,(e1,(bop,(e2,_)))) = Op (charToOp bop) e1 e2
isOp :: Char -> Bool
isOp = isOp -- dummy definition
charToOp :: Char -> Op
charToOp = charToOp -- dummy definition
--
-- A number is a list of digits with an optional ~ at the front.
--
litParse
= ((optional (token '~')) >*>
(neList (spot isDigit)))
`build` (charlistToExpr.join)
where
join = uncurry (++)
--
-- From the exercises...
--
charlistToExpr :: [Char] -> Expr
charlistToExpr = charlistToExpr -- dummy definition
--
-- A grammar for unbracketed expressions.
--
-- eXpr ::= Int | Var | (eXpr Op eXpr) |
-- lexpr mop mexpr | mexpr aop eXpr
-- lexpr ::= Int | Var | (eXpr Op eXpr)
-- mexpr ::= Int | Var | (eXpr Op eXpr) | lexpr mop mexpr
-- mop ::= 'a' | '/' | '\%'
-- aop ::= '+' | '-'
--
--
-- The top-level parser
--
topLevel :: Parse a b -> [a] -> b
topLevel p inp
= case results of
[] -> error "parse unsuccessful"
_ -> head results
where
results = [ found | (found,[]) <- p inp ]
--
-- The type of commands.
--
data Command = Eval Expr | Assign Var Expr | Null
commandParse :: Parse Char Command
commandParse = commandParse -- dummy definition
--
-- From the exercises.
--
-- tokenList :: [a] -> Parse a [a]
-- spotWhile :: (a -> Bool) -> Parse a [a]
| c089/haskell-craft3e | ParsingBasics.hs | mit | 4,456 | 8 | 11 | 1,298 | 1,221 | 706 | 515 | 75 | 2 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.SNS.AddPermission
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | Adds a statement to a topic's access control policy, granting access for the
-- specified AWS accounts to the specified actions.
--
-- <http://docs.aws.amazon.com/sns/latest/api/API_AddPermission.html>
module Network.AWS.SNS.AddPermission
(
-- * Request
AddPermission
-- ** Request constructor
, addPermission
-- ** Request lenses
, apAWSAccountId
, apActionName
, apLabel
, apTopicArn
-- * Response
, AddPermissionResponse
-- ** Response constructor
, addPermissionResponse
) where
import Network.AWS.Prelude
import Network.AWS.Request.Query
import Network.AWS.SNS.Types
import qualified GHC.Exts
data AddPermission = AddPermission
{ _apAWSAccountId :: List "member" Text
, _apActionName :: List "member" Text
, _apLabel :: Text
, _apTopicArn :: Text
} deriving (Eq, Ord, Read, Show)
-- | 'AddPermission' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'apAWSAccountId' @::@ ['Text']
--
-- * 'apActionName' @::@ ['Text']
--
-- * 'apLabel' @::@ 'Text'
--
-- * 'apTopicArn' @::@ 'Text'
--
addPermission :: Text -- ^ 'apTopicArn'
-> Text -- ^ 'apLabel'
-> AddPermission
addPermission p1 p2 = AddPermission
{ _apTopicArn = p1
, _apLabel = p2
, _apAWSAccountId = mempty
, _apActionName = mempty
}
-- | The AWS account IDs of the users (principals) who will be given access to the
-- specified actions. The users must have AWS accounts, but do not need to be
-- signed up for this service.
apAWSAccountId :: Lens' AddPermission [Text]
apAWSAccountId = lens _apAWSAccountId (\s a -> s { _apAWSAccountId = a }) . _List
-- | The action you want to allow for the specified principal(s).
--
-- Valid values: any Amazon SNS action name.
apActionName :: Lens' AddPermission [Text]
apActionName = lens _apActionName (\s a -> s { _apActionName = a }) . _List
-- | A unique identifier for the new policy statement.
apLabel :: Lens' AddPermission Text
apLabel = lens _apLabel (\s a -> s { _apLabel = a })
-- | The ARN of the topic whose access control policy you wish to modify.
apTopicArn :: Lens' AddPermission Text
apTopicArn = lens _apTopicArn (\s a -> s { _apTopicArn = a })
data AddPermissionResponse = AddPermissionResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'AddPermissionResponse' constructor.
addPermissionResponse :: AddPermissionResponse
addPermissionResponse = AddPermissionResponse
instance ToPath AddPermission where
toPath = const "/"
instance ToQuery AddPermission where
toQuery AddPermission{..} = mconcat
[ "AWSAccountId" =? _apAWSAccountId
, "ActionName" =? _apActionName
, "Label" =? _apLabel
, "TopicArn" =? _apTopicArn
]
instance ToHeaders AddPermission
instance AWSRequest AddPermission where
type Sv AddPermission = SNS
type Rs AddPermission = AddPermissionResponse
request = post "AddPermission"
response = nullResponse AddPermissionResponse
| kim/amazonka | amazonka-sns/gen/Network/AWS/SNS/AddPermission.hs | mpl-2.0 | 4,085 | 0 | 10 | 953 | 548 | 333 | 215 | 64 | 1 |
-------------------------------------------------------------------------------
-- |
-- Module : System.Hardware.Haskino.ShallowDeepPlugin.RepPushPass
-- Copyright : (c) University of Kansas
-- License : BSD3
-- Stability : experimental
--
-- Rep Push Pass
-- This pass is used to transform shallow expressions into the
-- deep expression language. It uses rules like the following:
--
-- forall (b1 :: Bool) (b2 :: Bool).
-- rep_ (b1 || b2)
-- =
-- (rep_ b1) ||* (rep_ b2)
--
-- Each of the from and to operations (in the example above '||' and
-- '||*' are specified in the xlatList data table.
-------------------------------------------------------------------------------
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TemplateHaskell #-}
module System.Hardware.Haskino.ShallowDeepPlugin.RepPushPass (repPushPass) where
import Control.Monad.Reader
import CoreMonad
import Data.Bits as DB
import Data.Boolean
import Data.Boolean.Numbers as BN
import Data.Boolean.Bits as BB
import Data.List
import GhcPlugins
import System.Hardware.Haskino
import System.Hardware.Haskino.ShallowDeepPlugin.Utils
data XlatEntry = XlatEntry { fromId :: BindM Id
, toId :: BindM Id
}
-- The following talbe defines the names of the Shallow DSL functions
-- to translate from and the Deep DSL functions to translate to.
xlatList :: [XlatEntry]
xlatList = [ XlatEntry (thNameToId 'not)
(thNameToId 'Data.Boolean.notB)
, XlatEntry (thNameToId '(||))
(thNameToId '(||*))
, XlatEntry (thNameToId '(&&))
(thNameToId '(&&*))
, XlatEntry (thNameToId '(==))
(thNameToId 'eqE)
, XlatEntry (thNameToId '(/=))
(thNameToId 'neqE)
, XlatEntry (thNameToId '(>))
(thNameToId 'greatE)
, XlatEntry (thNameToId '(<))
(thNameToId 'lessE)
, XlatEntry (thNameToId '(>=))
(thNameToId 'greateqE)
, XlatEntry (thNameToId '(<=))
(thNameToId 'lesseqE)
, XlatEntry (thNameToId '(+))
(thNameToId '(+))
, XlatEntry (thNameToId '(-))
(thNameToId '(-))
, XlatEntry (thNameToId '(/))
(thNameToId '(/))
, XlatEntry (thNameToId '(*))
(thNameToId '(*))
, XlatEntry (thNameToId 'Prelude.div)
(thNameToId 'BN.div)
, XlatEntry (thNameToId 'Prelude.rem)
(thNameToId 'BN.rem)
, XlatEntry (thNameToId 'Prelude.quot)
(thNameToId 'BN.quot)
, XlatEntry (thNameToId 'Prelude.mod)
(thNameToId 'BN.mod)
, XlatEntry (thNameToId 'negate)
(thNameToId 'negate)
, XlatEntry (thNameToId 'abs)
(thNameToId 'abs)
, XlatEntry (thNameToId 'signum)
(thNameToId 'signum)
, XlatEntry (thNameToId '(DB..&.))
(thNameToId '(BB..&.))
, XlatEntry (thNameToId '(DB..|.))
(thNameToId '(BB..|.))
, XlatEntry (thNameToId 'DB.xor)
(thNameToId 'BB.xor)
, XlatEntry (thNameToId 'DB.complement)
(thNameToId 'BB.complement)
, XlatEntry (thNameToId 'DB.setBit)
(thNameToId 'BB.setBit)
, XlatEntry (thNameToId 'DB.clearBit)
(thNameToId 'BB.clearBit)
, XlatEntry (thNameToId 'DB.testBit)
(thNameToId 'BB.testBit)
, XlatEntry (thNameToId 'DB.finiteBitSize)
(thNameToId 'BB.bitSize)
, XlatEntry (thNameToId 'DB.isSigned)
(thNameToId 'BB.isSigned)
, XlatEntry (thNameToId 'DB.shiftL)
(thNameToId 'BB.shiftL)
, XlatEntry (thNameToId 'DB.shiftR)
(thNameToId 'BB.shiftR)
, XlatEntry (thNameToId 'DB.rotateL)
(thNameToId 'BB.rotateL)
, XlatEntry (thNameToId 'DB.rotateR)
(thNameToId 'BB.rotateR)
, XlatEntry (thNameToId '(++))
(thNameToId '(++*))
, XlatEntry (thNameToId '(:))
(thNameToId '(*:))
, XlatEntry (thNameToId '(!!))
(thNameToId '(!!*))
, XlatEntry (thNameToId 'head)
(thNameToId 'headE)
, XlatEntry (thNameToId 'tail)
(thNameToId 'tailE)
, XlatEntry (thNameToId 'length)
(thNameToId 'len)
, XlatEntry (thNameToId 'drop)
(thNameToId 'dropE)
, XlatEntry (thNameToId 'take)
(thNameToId 'takeE)
, XlatEntry (thNameToId 'Data.List.reverse)
(thNameToId 'reverseE)
, XlatEntry (thNameToId 'null)
(thNameToId 'nullE)
, XlatEntry (thNameToId 'showB)
(thNameToId 'showE)
, XlatEntry (thNameToId 'fromIntegral)
(thNameToId 'fromIntegralE)
]
-- TBD add floating to above
data BindEnv
= BindEnv
{ pluginModGuts :: ModGuts
}
newtype BindM a = BindM { runBindM :: ReaderT BindEnv CoreM a }
deriving (Functor, Applicative, Monad
,MonadIO, MonadReader BindEnv)
instance PassCoreM BindM where
liftCoreM = BindM . ReaderT . const
getModGuts = BindM $ ReaderT (return . pluginModGuts)
repPushPass :: ModGuts -> CoreM ModGuts
repPushPass guts = do
bindsOnlyPass (\x -> (runReaderT (runBindM $ (mapM changeRep) x) (BindEnv guts))) guts
changeRep :: CoreBind -> BindM CoreBind
changeRep (NonRec b e) = do
let (bs, e') = collectBinders e
e'' <- changeRepExpr e'
let e''' = mkLams bs e''
return (NonRec b e''')
changeRep (Rec bs) = do
bs' <- changeRep' bs
return $ Rec bs'
changeRep' :: [(Id, CoreExpr)] -> BindM [(Id, CoreExpr)]
changeRep' [] = return []
changeRep' ((b, e) : bs) = do
let (lbs, e') = collectBinders e
e'' <- changeRepExpr e'
let e''' = mkLams lbs e''
bs' <- changeRep' bs
return $ (b, e''') : bs'
changeRepExpr :: CoreExpr -> BindM CoreExpr
changeRepExpr e = do
repId <- thNameToId repNameTH
case e of
Var v -> return $ Var v
Lit l -> return $ Lit l
Type ty -> return $ Type ty
Coercion co -> return $ Coercion co
App e1 e2 -> do
let (b, args) = collectArgs e
let defaultReturn = do
e1' <- changeRepExpr e1
e2' <- changeRepExpr e2
return $ App e1' e2'
case b of
Var v | v == repId -> do
case args of
[_ty, _dict, e'] -> do
let (b', args') = collectArgs e'
case b' of
Var v' -> do
inList <- funcInXlatList v'
case inList of
Just xe -> pushRep xe args' (exprType e)
_ -> defaultReturn
_ -> defaultReturn
_ -> defaultReturn
_ -> defaultReturn
Lam tb el -> do
e' <- changeRepExpr el
return $ Lam tb e'
Let bind body -> do
body' <- changeRepExpr body
bind' <- case bind of
(NonRec v el) -> do
e' <- changeRepExpr el
return $ NonRec v e'
(Rec rbs) -> do
rbs' <- changeRepExpr' rbs
return $ Rec rbs'
return $ Let bind' body'
Case ec tb ty alts -> do
e' <- changeRepExpr ec
alts' <- changeRepExprAlts alts
return $ Case e' tb ty alts'
Tick t et -> do
e' <- changeRepExpr et
return $ Tick t e'
Cast ec co -> do
e' <- changeRepExpr ec
return $ Cast e' co
changeRepExpr' :: [(Id, CoreExpr)] -> BindM [(Id, CoreExpr)]
changeRepExpr' [] = return []
changeRepExpr' ((b, e) : bs) = do
e' <- changeRepExpr e
bs' <- changeRepExpr' bs
return $ (b, e') : bs'
changeRepExprAlts :: [GhcPlugins.Alt CoreBndr] -> BindM [GhcPlugins.Alt CoreBndr]
changeRepExprAlts [] = return []
changeRepExprAlts ((ac, b, a) : as) = do
a' <- changeRepExpr a
bs' <- changeRepExprAlts as
return $ (ac, b, a') : bs'
funcInXlatList :: Id -> BindM (Maybe XlatEntry)
funcInXlatList idf = do
funcInXlatList' idf xlatList
where
funcInXlatList' :: Id -> [XlatEntry] -> BindM (Maybe XlatEntry)
funcInXlatList' _ [] = return Nothing
funcInXlatList' idf' (xl:xls) = do
fId <- fromId xl
if fId == idf'
then return $ Just xl
else funcInXlatList' idf' xls
pushRep :: XlatEntry -> [CoreExpr] -> Type -> BindM CoreExpr
pushRep xe args origRTy = do
fi <- fromId xe
ti <- toId xe
-- Break down the arguments from the old function
-- into foralls, args, and return types.
let (_fromForAlls, fromFuncTy) = splitForAllTys $ idType fi
let (_fromArgTys, fromRetTy) = splitFunTys fromFuncTy
let (toForAlls, toFuncTy) = splitForAllTys $ idType ti
let (toArgTys, toRetTy) = splitFunTys toFuncTy
-- Get the count of non-dictionary args in the new function
let argCount = countNonDictTypes toArgTys
let dictCount = (length toArgTys) - argCount
-- Get the original args based on the argCount
let someArgs = take ((length args) - argCount) args
let origArgs = drop ((length args) - argCount) args
let dictTys = take dictCount toArgTys
let nonDictTys = drop dictCount toArgTys
let typeArgs = genForAllArgs toForAlls nonDictTys fromRetTy origArgs
exprTypeArgs <- mapM (thNameTyToTyConApp exprTyConTH) typeArgs
let exprTypeVars = map Type exprTypeArgs
dictArgs <- genDictArgs dictTys nonDictTys fromRetTy toRetTy origArgs origRTy
repArgs <- mapM repExpr origArgs
repArgs' <- mapM changeRepExpr repArgs
return $ mkCoreApps (Var ti) (exprTypeVars ++ dictArgs ++ repArgs')
genDictArgs :: [Type] -> [Type] -> Type -> Type -> [CoreExpr] -> Type -> BindM [CoreExpr]
genDictArgs [] _ _ _ _ _ = return []
genDictArgs (dty:dtys) tys frty trty args orty = do
let (tyConTy, ty') = splitAppTys dty
dictTys <- mapM findTypeMatch ty'
dict <- buildDictionaryTyConTs (tyConAppTyCon tyConTy) dictTys
dicts <- genDictArgs dtys tys frty trty args orty
return $ dict:dicts
where
findTypeMatch :: Type -> BindM Type
findTypeMatch fty =
case findIndex (typeIn fty) tys of
Just idx -> do
-- Find the index of the from function arg which matches the
-- type required by the dictionary
let fromArgTy = exprType $ args !! idx
-- Get the base type only of the from type, removing any
-- Expr if it already exists.
let dictTy = case splitTyConApp_maybe fromArgTy of
Just (_, [ty']) -> ty'
_ -> fromArgTy
-- Get the type of the to function arg at the same index
let toArgTy = tys !! idx
-- Determine if it has a type constructor
let tys_m = splitTyConApp_maybe toArgTy
case tys_m of
-- If it does, we are dealing with a function
-- of type Class a => Expr a -> ... -> retType
-- so the dictionary type is of a not Expr a
Just (_tyCon, _tys') -> return dictTy
-- If there is no TyCon, then we have a
-- function of type Class a => a -> ... -> retType
-- so the dictionary type needs to be Expr a.
Nothing -> thNameTyToTyConApp exprTyConTH dictTy
Nothing -> do
-- Determine if the return type has a type constructor
let tys_m = splitTyConApp_maybe trty
-- Get the base type only of the from type, removing any
-- Expr if it already exists.
let orty' = case splitTyConApp_maybe orty of
Just (_, [ty']) -> ty'
_ -> orty
case tys_m of
-- If it does, we are dealing with a function
-- of type Class a => Expr a -> ... -> Expr retType
-- so the dictionary type is of a not Expr a
Just (_tyCon, _tys') -> return orty'
-- If there is no TyCon, then we have a
-- function of type Class a => a -> ... -> retType
-- so the dictionary type needs to be Expr a.
Nothing -> thNameTyToTyConApp exprTyConTH orty'
genForAllArgs :: [TyVar] -> [Type] -> Type -> [CoreExpr] -> [Type]
genForAllArgs [] _ _ _ = []
genForAllArgs (tv:tvs) tys rty args =
case findIndex (eqType (mkTyVarTy tv)) tys of
Just idx -> (exprType $ args !! idx) : genForAllArgs tvs tys rty args
Nothing -> rty : genForAllArgs tvs tys rty args
countNonDictTypes :: [Type] -> Int
countNonDictTypes [] = 0
countNonDictTypes (ty:tys) = if isDictTy ty
then countNonDictTypes tys
else 1 + countNonDictTypes tys
typeIn :: Type -> Type -> Bool
typeIn t ty =
if t `eqType` ty
then True
else
let (_, tys') = splitTyConApp ty in
t `eqType` last tys'
| ku-fpg/kansas-amber | System/Hardware/Haskino/ShallowDeepPlugin/RepPushPass.hs | bsd-3-clause | 13,617 | 0 | 33 | 4,767 | 3,701 | 1,918 | 1,783 | 275 | 15 |
{-# LANGUAGE CPP #-}
module RnSplice (
rnTopSpliceDecls,
rnSpliceType, rnSpliceExpr, rnSplicePat, rnSpliceDecl,
rnBracket,
checkThLocalName
#ifdef GHCI
, traceSplice, SpliceInfo(..)
#endif
) where
#include "HsVersions.h"
import Name
import NameSet
import HsSyn
import RdrName
import TcRnMonad
import Kind
import RnEnv
import RnSource ( rnSrcDecls, findSplice )
import RnPat ( rnPat )
import BasicTypes ( TopLevelFlag, isTopLevel )
import Outputable
import Module
import SrcLoc
import RnTypes ( rnLHsType )
import Control.Monad ( unless, when )
import {-# SOURCE #-} RnExpr ( rnLExpr )
import TcEnv ( checkWellStaged )
import THNames ( liftName )
#ifdef GHCI
import DynFlags
import FastString
import ErrUtils ( dumpIfSet_dyn_printer )
import TcEnv ( tcMetaTy )
import Hooks
import Var ( Id )
import THNames ( quoteExpName, quotePatName, quoteDecName, quoteTypeName
, decsQTyConName, expQTyConName, patQTyConName, typeQTyConName, )
import {-# SOURCE #-} TcExpr ( tcPolyExpr )
import {-# SOURCE #-} TcSplice ( runMetaD, runMetaE, runMetaP, runMetaT, tcTopSpliceExpr )
#endif
import qualified GHC.LanguageExtensions as LangExt
{-
************************************************************************
* *
Template Haskell brackets
* *
************************************************************************
-}
rnBracket :: HsExpr RdrName -> HsBracket RdrName -> RnM (HsExpr Name, FreeVars)
rnBracket e br_body
= addErrCtxt (quotationCtxtDoc br_body) $
do { -- Check that -XTemplateHaskellQuotes is enabled and available
thQuotesEnabled <- xoptM LangExt.TemplateHaskellQuotes
; unless thQuotesEnabled $
failWith ( vcat
[ text "Syntax error on" <+> ppr e
, text ("Perhaps you intended to use TemplateHaskell"
++ " or TemplateHaskellQuotes") ] )
-- Check for nested brackets
; cur_stage <- getStage
; case cur_stage of
{ Splice Typed -> checkTc (isTypedBracket br_body)
illegalUntypedBracket
; Splice Untyped -> checkTc (not (isTypedBracket br_body))
illegalTypedBracket
; Comp -> return ()
; Brack {} -> failWithTc illegalBracket
}
-- Brackets are desugared to code that mentions the TH package
; recordThUse
; case isTypedBracket br_body of
True -> do { traceRn (text "Renaming typed TH bracket")
; (body', fvs_e) <-
setStage (Brack cur_stage RnPendingTyped) $
rn_bracket cur_stage br_body
; return (HsBracket body', fvs_e) }
False -> do { traceRn (text "Renaming untyped TH bracket")
; ps_var <- newMutVar []
; (body', fvs_e) <-
setStage (Brack cur_stage (RnPendingUntyped ps_var)) $
rn_bracket cur_stage br_body
; pendings <- readMutVar ps_var
; return (HsRnBracketOut body' pendings, fvs_e) }
}
rn_bracket :: ThStage -> HsBracket RdrName -> RnM (HsBracket Name, FreeVars)
rn_bracket outer_stage br@(VarBr flg rdr_name)
= do { name <- lookupOccRn rdr_name
; this_mod <- getModule
; when (flg && nameIsLocalOrFrom this_mod name) $
-- Type variables can be quoted in TH. See #5721.
do { mb_bind_lvl <- lookupLocalOccThLvl_maybe name
; case mb_bind_lvl of
{ Nothing -> return () -- Can happen for data constructors,
-- but nothing needs to be done for them
; Just (top_lvl, bind_lvl) -- See Note [Quoting names]
| isTopLevel top_lvl
-> when (isExternalName name) (keepAlive name)
| otherwise
-> do { traceRn (text "rn_bracket VarBr" <+> ppr name <+> ppr bind_lvl <+> ppr outer_stage)
; checkTc (thLevel outer_stage + 1 == bind_lvl)
(quotedNameStageErr br) }
}
}
; return (VarBr flg name, unitFV name) }
rn_bracket _ (ExpBr e) = do { (e', fvs) <- rnLExpr e
; return (ExpBr e', fvs) }
rn_bracket _ (PatBr p) = rnPat ThPatQuote p $ \ p' -> return (PatBr p', emptyFVs)
rn_bracket _ (TypBr t) = do { (t', fvs) <- rnLHsType TypBrCtx t
; return (TypBr t', fvs) }
rn_bracket _ (DecBrL decls)
= do { group <- groupDecls decls
; gbl_env <- getGblEnv
; let new_gbl_env = gbl_env { tcg_dus = emptyDUs }
-- The emptyDUs is so that we just collect uses for this
-- group alone in the call to rnSrcDecls below
; (tcg_env, group') <- setGblEnv new_gbl_env $
rnSrcDecls group
-- Discard the tcg_env; it contains only extra info about fixity
; traceRn (text "rn_bracket dec" <+> (ppr (tcg_dus tcg_env) $$
ppr (duUses (tcg_dus tcg_env))))
; return (DecBrG group', duUses (tcg_dus tcg_env)) }
where
groupDecls :: [LHsDecl RdrName] -> RnM (HsGroup RdrName)
groupDecls decls
= do { (group, mb_splice) <- findSplice decls
; case mb_splice of
{ Nothing -> return group
; Just (splice, rest) ->
do { group' <- groupDecls rest
; let group'' = appendGroups group group'
; return group'' { hs_splcds = noLoc splice : hs_splcds group' }
}
}}
rn_bracket _ (DecBrG _) = panic "rn_bracket: unexpected DecBrG"
rn_bracket _ (TExpBr e) = do { (e', fvs) <- rnLExpr e
; return (TExpBr e', fvs) }
quotationCtxtDoc :: HsBracket RdrName -> SDoc
quotationCtxtDoc br_body
= hang (text "In the Template Haskell quotation")
2 (ppr br_body)
illegalBracket :: SDoc
illegalBracket =
text "Template Haskell brackets cannot be nested" <+>
text "(without intervening splices)"
illegalTypedBracket :: SDoc
illegalTypedBracket =
text "Typed brackets may only appear in typed splices."
illegalUntypedBracket :: SDoc
illegalUntypedBracket =
text "Untyped brackets may only appear in untyped splices."
quotedNameStageErr :: HsBracket RdrName -> SDoc
quotedNameStageErr br
= sep [ text "Stage error: the non-top-level quoted name" <+> ppr br
, text "must be used at the same stage at which is is bound" ]
#ifndef GHCI
rnTopSpliceDecls :: HsSplice RdrName -> RnM ([LHsDecl RdrName], FreeVars)
rnTopSpliceDecls e = failTH e "Template Haskell top splice"
rnSpliceType :: HsSplice RdrName -> PostTc Name Kind
-> RnM (HsType Name, FreeVars)
rnSpliceType e _ = failTH e "Template Haskell type splice"
rnSpliceExpr :: HsSplice RdrName -> RnM (HsExpr Name, FreeVars)
rnSpliceExpr e = failTH e "Template Haskell splice"
rnSplicePat :: HsSplice RdrName -> RnM (Either (Pat RdrName) (Pat Name), FreeVars)
rnSplicePat e = failTH e "Template Haskell pattern splice"
rnSpliceDecl :: SpliceDecl RdrName -> RnM (SpliceDecl Name, FreeVars)
rnSpliceDecl e = failTH e "Template Haskell declaration splice"
#else
{-
*********************************************************
* *
Splices
* *
*********************************************************
Note [Free variables of typed splices]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider renaming this:
f = ...
h = ...$(thing "f")...
where the splice is a *typed* splice. The splice can expand into
literally anything, so when we do dependency analysis we must assume
that it might mention 'f'. So we simply treat all locally-defined
names as mentioned by any splice. This is terribly brutal, but I
don't see what else to do. For example, it'll mean that every
locally-defined thing will appear to be used, so no unused-binding
warnings. But if we miss the dependency, then we might typecheck 'h'
before 'f', and that will crash the type checker because 'f' isn't in
scope.
Currently, I'm not treating a splice as also mentioning every import,
which is a bit inconsistent -- but there are a lot of them. We might
thereby get some bogus unused-import warnings, but we won't crash the
type checker. Not very satisfactory really.
Note [Renamer errors]
~~~~~~~~~~~~~~~~~~~~~
It's important to wrap renamer calls in checkNoErrs, because the
renamer does not fail for out of scope variables etc. Instead it
returns a bogus term/type, so that it can report more than one error.
We don't want the type checker to see these bogus unbound variables.
-}
rnSpliceGen :: (HsSplice Name -> RnM (a, FreeVars)) -- Outside brackets, run splice
-> (HsSplice Name -> (PendingRnSplice, a)) -- Inside brackets, make it pending
-> HsSplice RdrName
-> RnM (a, FreeVars)
rnSpliceGen run_splice pend_splice splice
= addErrCtxt (spliceCtxt splice) $ do
{ stage <- getStage
; case stage of
Brack pop_stage RnPendingTyped
-> do { checkTc is_typed_splice illegalUntypedSplice
; (splice', fvs) <- setStage pop_stage $
rnSplice splice
; let (_pending_splice, result) = pend_splice splice'
; return (result, fvs) }
Brack pop_stage (RnPendingUntyped ps_var)
-> do { checkTc (not is_typed_splice) illegalTypedSplice
; (splice', fvs) <- setStage pop_stage $
rnSplice splice
; let (pending_splice, result) = pend_splice splice'
; ps <- readMutVar ps_var
; writeMutVar ps_var (pending_splice : ps)
; return (result, fvs) }
_ -> do { (splice', fvs1) <- checkNoErrs $
setStage (Splice splice_type) $
rnSplice splice
-- checkNoErrs: don't attempt to run the splice if
-- renaming it failed; otherwise we get a cascade of
-- errors from e.g. unbound variables
; (result, fvs2) <- run_splice splice'
; return (result, fvs1 `plusFV` fvs2) } }
where
is_typed_splice = isTypedSplice splice
splice_type = if is_typed_splice
then Typed
else Untyped
------------------
runRnSplice :: UntypedSpliceFlavour
-> (LHsExpr Id -> TcRn res)
-> (res -> SDoc) -- How to pretty-print res
-- Usually just ppr, but not for [Decl]
-> HsSplice Name -- Always untyped
-> TcRn res
runRnSplice flavour run_meta ppr_res splice
= do { splice' <- getHooked runRnSpliceHook return >>= ($ splice)
; let the_expr = case splice' of
HsUntypedSplice _ e -> e
HsQuasiQuote _ q qs str -> mkQuasiQuoteExpr flavour q qs str
HsTypedSplice {} -> pprPanic "runRnSplice" (ppr splice)
-- Typecheck the expression
; meta_exp_ty <- tcMetaTy meta_ty_name
; zonked_q_expr <- tcTopSpliceExpr Untyped $
tcPolyExpr the_expr meta_exp_ty
-- Run the expression
; result <- run_meta zonked_q_expr
; traceSplice (SpliceInfo { spliceDescription = what
, spliceIsDecl = is_decl
, spliceSource = Just the_expr
, spliceGenerated = ppr_res result })
; return result }
where
meta_ty_name = case flavour of
UntypedExpSplice -> expQTyConName
UntypedPatSplice -> patQTyConName
UntypedTypeSplice -> typeQTyConName
UntypedDeclSplice -> decsQTyConName
what = case flavour of
UntypedExpSplice -> "expression"
UntypedPatSplice -> "pattern"
UntypedTypeSplice -> "type"
UntypedDeclSplice -> "declarations"
is_decl = case flavour of
UntypedDeclSplice -> True
_ -> False
------------------
makePending :: UntypedSpliceFlavour
-> HsSplice Name
-> PendingRnSplice
makePending flavour (HsUntypedSplice n e)
= PendingRnSplice flavour n e
makePending flavour (HsQuasiQuote n quoter q_span quote)
= PendingRnSplice flavour n (mkQuasiQuoteExpr flavour quoter q_span quote)
makePending _ splice@(HsTypedSplice {})
= pprPanic "makePending" (ppr splice)
------------------
mkQuasiQuoteExpr :: UntypedSpliceFlavour -> Name -> SrcSpan -> FastString -> LHsExpr Name
-- Return the expression (quoter "...quote...")
-- which is what we must run in a quasi-quote
mkQuasiQuoteExpr flavour quoter q_span quote
= L q_span $ HsApp (L q_span $
HsApp (L q_span (HsVar (L q_span quote_selector)))
quoterExpr)
quoteExpr
where
quoterExpr = L q_span $! HsVar $! (L q_span quoter)
quoteExpr = L q_span $! HsLit $! HsString "" quote
quote_selector = case flavour of
UntypedExpSplice -> quoteExpName
UntypedPatSplice -> quotePatName
UntypedTypeSplice -> quoteTypeName
UntypedDeclSplice -> quoteDecName
---------------------
rnSplice :: HsSplice RdrName -> RnM (HsSplice Name, FreeVars)
-- Not exported...used for all
rnSplice (HsTypedSplice splice_name expr)
= do { checkTH expr "Template Haskell typed splice"
; loc <- getSrcSpanM
; n' <- newLocalBndrRn (L loc splice_name)
; (expr', fvs) <- rnLExpr expr
; return (HsTypedSplice n' expr', fvs) }
rnSplice (HsUntypedSplice splice_name expr)
= do { checkTH expr "Template Haskell untyped splice"
; loc <- getSrcSpanM
; n' <- newLocalBndrRn (L loc splice_name)
; (expr', fvs) <- rnLExpr expr
; return (HsUntypedSplice n' expr', fvs) }
rnSplice (HsQuasiQuote splice_name quoter q_loc quote)
= do { checkTH quoter "Template Haskell quasi-quote"
; loc <- getSrcSpanM
; splice_name' <- newLocalBndrRn (L loc splice_name)
-- Rename the quoter; akin to the HsVar case of rnExpr
; quoter' <- lookupOccRn quoter
; this_mod <- getModule
; when (nameIsLocalOrFrom this_mod quoter') $
checkThLocalName quoter'
; return (HsQuasiQuote splice_name' quoter' q_loc quote, unitFV quoter') }
---------------------
rnSpliceExpr :: HsSplice RdrName -> RnM (HsExpr Name, FreeVars)
rnSpliceExpr splice
= rnSpliceGen run_expr_splice pend_expr_splice splice
where
pend_expr_splice :: HsSplice Name -> (PendingRnSplice, HsExpr Name)
pend_expr_splice rn_splice
= (makePending UntypedExpSplice rn_splice, HsSpliceE rn_splice)
run_expr_splice :: HsSplice Name -> RnM (HsExpr Name, FreeVars)
run_expr_splice rn_splice
| isTypedSplice rn_splice -- Run it later, in the type checker
= do { -- Ugh! See Note [Splices] above
traceRn (text "rnSpliceExpr: typed expression splice")
; lcl_rdr <- getLocalRdrEnv
; gbl_rdr <- getGlobalRdrEnv
; let gbl_names = mkNameSet [gre_name gre | gre <- globalRdrEnvElts gbl_rdr
, isLocalGRE gre]
lcl_names = mkNameSet (localRdrEnvElts lcl_rdr)
; return (HsSpliceE rn_splice, lcl_names `plusFV` gbl_names) }
| otherwise -- Run it here, see Note [Running splices in the Renamer]
= do { traceRn (text "rnSpliceExpr: untyped expression splice")
; rn_expr <- runRnSplice UntypedExpSplice runMetaE ppr rn_splice
; (lexpr3, fvs) <- checkNoErrs (rnLExpr rn_expr)
; return (HsPar lexpr3, fvs) }
{- Note [Running splices in the Renamer]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Splices used to be run in the typechecker, which led to (Trac #4364). Since the
renamer must decide which expressions depend on which others, and it cannot
reliably do this for arbitrary splices, we used to conservatively say that
splices depend on all other expressions in scope. Unfortunately, this led to
the problem of cyclic type declarations seen in (Trac #4364). Instead, by
running splices in the renamer, we side-step the problem of determining
dependencies: by the time the dependency analysis happens, any splices have
already been run, and expression dependencies can be determined as usual.
However, see (Trac #9813), for an example where we would like to run splices
*after* performing dependency analysis (that is, after renaming). It would be
desirable to typecheck "non-splicy" expressions (those expressions that do not
contain splices directly or via dependence on an expression that does) before
"splicy" expressions, such that types/expressions within the same declaration
group would be available to `reify` calls, for example consider the following:
> module M where
> data D = C
> f = 1
> g = $(mapM reify ['f, 'D, ''C] ...)
Compilation of this example fails since D/C/f are not in the type environment
and thus cannot be reified as they have not been typechecked by the time the
splice is renamed and thus run.
These requirements are at odds: we do not want to run splices in the renamer as
we wish to first determine dependencies and typecheck certain expressions,
making them available to reify, but cannot accurately determine dependencies
without running splices in the renamer!
Indeed, the conclusion of (Trac #9813) was that it is not worth the complexity
to try and
a) implement and maintain the code for renaming/typechecking non-splicy
expressions before splicy expressions,
b) explain to TH users which expressions are/not available to reify at any
given point.
-}
----------------------
rnSpliceType :: HsSplice RdrName -> PostTc Name Kind
-> RnM (HsType Name, FreeVars)
rnSpliceType splice k
= rnSpliceGen run_type_splice pend_type_splice splice
where
pend_type_splice rn_splice
= (makePending UntypedTypeSplice rn_splice, HsSpliceTy rn_splice k)
run_type_splice rn_splice
= do { traceRn (text "rnSpliceType: untyped type splice")
; hs_ty2 <- runRnSplice UntypedTypeSplice runMetaT ppr rn_splice
; (hs_ty3, fvs) <- do { let doc = SpliceTypeCtx hs_ty2
; checkNoErrs $ rnLHsType doc hs_ty2 }
-- checkNoErrs: see Note [Renamer errors]
; return (HsParTy hs_ty3, fvs) }
-- Wrap the result of the splice in parens so that we don't
-- lose the outermost location set by runQuasiQuote (#7918)
{- Note [Partial Type Splices]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Partial Type Signatures are partially supported in TH type splices: only
anonymous wild cards are allowed.
-- ToDo: SLPJ says: I don't understand all this
Normally, named wild cards are collected before renaming a (partial) type
signature. However, TH type splices are run during renaming, i.e. after the
initial traversal, leading to out of scope errors for named wild cards. We
can't just extend the initial traversal to collect the named wild cards in TH
type splices, as we'd need to expand them, which is supposed to happen only
once, during renaming.
Similarly, the extra-constraints wild card is handled right before renaming
too, and is therefore also not supported in a TH type splice. Another reason
to forbid extra-constraints wild cards in TH type splices is that a single
signature can contain many TH type splices, whereas it mustn't contain more
than one extra-constraints wild card. Enforcing would this be hard the way
things are currently organised.
Anonymous wild cards pose no problem, because they start out without names and
are given names during renaming. These names are collected right after
renaming. The names generated for anonymous wild cards in TH type splices will
thus be collected as well.
For more details about renaming wild cards, see RnTypes.rnHsSigWcType
Note that partial type signatures are fully supported in TH declaration
splices, e.g.:
[d| foo :: _ => _
foo x y = x == y |]
This is because in this case, the partial type signature can be treated as a
whole signature, instead of as an arbitrary type.
-}
----------------------
-- | Rename a splice pattern. See Note [rnSplicePat]
rnSplicePat :: HsSplice RdrName -> RnM ( Either (Pat RdrName) (Pat Name)
, FreeVars)
rnSplicePat splice
= rnSpliceGen run_pat_splice pend_pat_splice splice
where
pend_pat_splice rn_splice
= (makePending UntypedPatSplice rn_splice, Right (SplicePat rn_splice))
run_pat_splice rn_splice
= do { traceRn (text "rnSplicePat: untyped pattern splice")
; pat <- runRnSplice UntypedPatSplice runMetaP ppr rn_splice
; return (Left (ParPat pat), emptyFVs) }
-- Wrap the result of the quasi-quoter in parens so that we don't
-- lose the outermost location set by runQuasiQuote (#7918)
----------------------
rnSpliceDecl :: SpliceDecl RdrName -> RnM (SpliceDecl Name, FreeVars)
rnSpliceDecl (SpliceDecl (L loc splice) flg)
= rnSpliceGen run_decl_splice pend_decl_splice splice
where
pend_decl_splice rn_splice
= (makePending UntypedDeclSplice rn_splice, SpliceDecl (L loc rn_splice) flg)
run_decl_splice rn_splice = pprPanic "rnSpliceDecl" (ppr rn_splice)
rnTopSpliceDecls :: HsSplice RdrName -> RnM ([LHsDecl RdrName], FreeVars)
-- Declaration splice at the very top level of the module
rnTopSpliceDecls splice
= do { (rn_splice, fvs) <- setStage (Splice Untyped) $
rnSplice splice
; traceRn (text "rnTopSpliceDecls: untyped declaration splice")
; decls <- runRnSplice UntypedDeclSplice runMetaD ppr_decls rn_splice
; return (decls,fvs) }
where
ppr_decls :: [LHsDecl RdrName] -> SDoc
ppr_decls ds = vcat (map ppr ds)
{-
Note [rnSplicePat]
~~~~~~~~~~~~~~~~~~
Renaming a pattern splice is a bit tricky, because we need the variables
bound in the pattern to be in scope in the RHS of the pattern. This scope
management is effectively done by using continuation-passing style in
RnPat, through the CpsRn monad. We don't wish to be in that monad here
(it would create import cycles and generally conflict with renaming other
splices), so we really want to return a (Pat RdrName) -- the result of
running the splice -- which can then be further renamed in RnPat, in
the CpsRn monad.
The problem is that if we're renaming a splice within a bracket, we
*don't* want to run the splice now. We really do just want to rename
it to an HsSplice Name. Of course, then we can't know what variables
are bound within the splice. So we accept any unbound variables and
rename them again when the bracket is spliced in. If a variable is brought
into scope by a pattern splice all is fine. If it is not then an error is
reported.
In any case, when we're done in rnSplicePat, we'll either have a
Pat RdrName (the result of running a top-level splice) or a Pat Name
(the renamed nested splice). Thus, the awkward return type of
rnSplicePat.
-}
spliceCtxt :: HsSplice RdrName -> SDoc
spliceCtxt splice
= hang (text "In the" <+> what) 2 (ppr splice)
where
what = case splice of
HsUntypedSplice {} -> text "untyped splice:"
HsTypedSplice {} -> text "typed splice:"
HsQuasiQuote {} -> text "quasi-quotation:"
-- | The splice data to be logged
data SpliceInfo
= SpliceInfo
{ spliceDescription :: String
, spliceSource :: Maybe (LHsExpr Name) -- Nothing <=> top-level decls
-- added by addTopDecls
, spliceIsDecl :: Bool -- True <=> put the generate code in a file
-- when -dth-dec-file is on
, spliceGenerated :: SDoc
}
-- Note that 'spliceSource' is *renamed* but not *typechecked*
-- Reason (a) less typechecking crap
-- (b) data constructors after type checking have been
-- changed to their *wrappers*, and that makes them
-- print always fully qualified
-- | outputs splice information for 2 flags which have different output formats:
-- `-ddump-splices` and `-dth-dec-file`
traceSplice :: SpliceInfo -> TcM ()
traceSplice (SpliceInfo { spliceDescription = sd, spliceSource = mb_src
, spliceGenerated = gen, spliceIsDecl = is_decl })
= do { loc <- case mb_src of
Nothing -> getSrcSpanM
Just (L loc _) -> return loc
; traceOptTcRn Opt_D_dump_splices (spliceDebugDoc loc)
; when is_decl $ -- Raw material for -dth-dec-file
do { dflags <- getDynFlags
; liftIO $ dumpIfSet_dyn_printer alwaysQualify dflags Opt_D_th_dec_file
(spliceCodeDoc loc) } }
where
-- `-ddump-splices`
spliceDebugDoc :: SrcSpan -> SDoc
spliceDebugDoc loc
= let code = case mb_src of
Nothing -> ending
Just e -> nest 2 (ppr e) : ending
ending = [ text "======>", nest 2 gen ]
in hang (ppr loc <> colon <+> text "Splicing" <+> text sd)
2 (sep code)
-- `-dth-dec-file`
spliceCodeDoc :: SrcSpan -> SDoc
spliceCodeDoc loc
= vcat [ text "--" <+> ppr loc <> colon <+> text "Splicing" <+> text sd
, gen ]
illegalTypedSplice :: SDoc
illegalTypedSplice = text "Typed splices may not appear in untyped brackets"
illegalUntypedSplice :: SDoc
illegalUntypedSplice = text "Untyped splices may not appear in typed brackets"
-- spliceResultDoc :: OutputableBndr id => LHsExpr id -> SDoc
-- spliceResultDoc expr
-- = vcat [ hang (text "In the splice:")
-- 2 (char '$' <> pprParendExpr expr)
-- , text "To see what the splice expanded to, use -ddump-splices" ]
#endif
checkThLocalName :: Name -> RnM ()
checkThLocalName name
| isUnboundName name -- Do not report two errors for
= return () -- $(not_in_scope args)
| otherwise
= do { traceRn (text "checkThLocalName" <+> ppr name)
; mb_local_use <- getStageAndBindLevel name
; case mb_local_use of {
Nothing -> return () ; -- Not a locally-bound thing
Just (top_lvl, bind_lvl, use_stage) ->
do { let use_lvl = thLevel use_stage
; checkWellStaged (quotes (ppr name)) bind_lvl use_lvl
; traceRn (text "checkThLocalName" <+> ppr name <+> ppr bind_lvl <+> ppr use_stage <+> ppr use_lvl)
; checkCrossStageLifting top_lvl bind_lvl use_stage use_lvl name } } }
--------------------------------------
checkCrossStageLifting :: TopLevelFlag -> ThLevel -> ThStage -> ThLevel
-> Name -> TcM ()
-- We are inside brackets, and (use_lvl > bind_lvl)
-- Now we must check whether there's a cross-stage lift to do
-- Examples \x -> [| x |]
-- [| map |]
--
-- This code is similar to checkCrossStageLifting in TcExpr, but
-- this is only run on *untyped* brackets.
checkCrossStageLifting top_lvl bind_lvl use_stage use_lvl name
| Brack _ (RnPendingUntyped ps_var) <- use_stage -- Only for untyped brackets
, use_lvl > bind_lvl -- Cross-stage condition
= check_cross_stage_lifting top_lvl name ps_var
| otherwise
= return ()
check_cross_stage_lifting :: TopLevelFlag -> Name -> TcRef [PendingRnSplice] -> TcM ()
check_cross_stage_lifting top_lvl name ps_var
| isTopLevel top_lvl
-- Top-level identifiers in this module,
-- (which have External Names)
-- are just like the imported case:
-- no need for the 'lifting' treatment
-- E.g. this is fine:
-- f x = x
-- g y = [| f 3 |]
= when (isExternalName name) (keepAlive name)
-- See Note [Keeping things alive for Template Haskell]
| otherwise
= -- Nested identifiers, such as 'x' in
-- E.g. \x -> [| h x |]
-- We must behave as if the reference to x was
-- h $(lift x)
-- We use 'x' itself as the SplicePointName, used by
-- the desugarer to stitch it all back together.
-- If 'x' occurs many times we may get many identical
-- bindings of the same SplicePointName, but that doesn't
-- matter, although it's a mite untidy.
do { traceRn (text "checkCrossStageLifting" <+> ppr name)
-- Construct the (lift x) expression
; let lift_expr = nlHsApp (nlHsVar liftName) (nlHsVar name)
pend_splice = PendingRnSplice UntypedExpSplice name lift_expr
-- Update the pending splices
; ps <- readMutVar ps_var
; writeMutVar ps_var (pend_splice : ps) }
{-
Note [Keeping things alive for Template Haskell]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
f x = x+1
g y = [| f 3 |]
Here 'f' is referred to from inside the bracket, which turns into data
and mentions only f's *name*, not 'f' itself. So we need some other
way to keep 'f' alive, lest it get dropped as dead code. That's what
keepAlive does. It puts it in the keep-alive set, which subsequently
ensures that 'f' stays as a top level binding.
This must be done by the renamer, not the type checker (as of old),
because the type checker doesn't typecheck the body of untyped
brackets (Trac #8540).
A thing can have a bind_lvl of outerLevel, but have an internal name:
foo = [d| op = 3
bop = op + 1 |]
Here the bind_lvl of 'op' is (bogusly) outerLevel, even though it is
bound inside a bracket. That is because we don't even even record
binding levels for top-level things; the binding levels are in the
LocalRdrEnv.
So the occurrence of 'op' in the rhs of 'bop' looks a bit like a
cross-stage thing, but it isn't really. And in fact we never need
to do anything here for top-level bound things, so all is fine, if
a bit hacky.
For these chaps (which have Internal Names) we don't want to put
them in the keep-alive set.
Note [Quoting names]
~~~~~~~~~~~~~~~~~~~~
A quoted name 'n is a bit like a quoted expression [| n |], except that we
have no cross-stage lifting (c.f. TcExpr.thBrackId). So, after incrementing
the use-level to account for the brackets, the cases are:
bind > use Error
bind = use+1 OK
bind < use
Imported things OK
Top-level things OK
Non-top-level Error
where 'use' is the binding level of the 'n quote. (So inside the implied
bracket the level would be use+1.)
Examples:
f 'map -- OK; also for top-level defns of this module
\x. f 'x -- Not ok (bind = 1, use = 1)
-- (whereas \x. f [| x |] might have been ok, by
-- cross-stage lifting
\y. [| \x. $(f 'y) |] -- Not ok (bind =1, use = 1)
[| \x. $(f 'x) |] -- OK (bind = 2, use = 1)
-}
| tjakway/ghcjvm | compiler/rename/RnSplice.hs | bsd-3-clause | 32,074 | 0 | 21 | 9,499 | 2,261 | 1,176 | 1,085 | 160 | 5 |
module Main (main) where
import qualified Distribution.ModuleName as ModuleName
import Distribution.PackageDescription
import Distribution.PackageDescription.Check hiding (doesFileExist)
import Distribution.PackageDescription.Configuration
import Distribution.PackageDescription.Parse
import Distribution.System
import Distribution.Simple
import Distribution.Simple.Configure
import Distribution.Simple.LocalBuildInfo
import Distribution.Simple.Program
import Distribution.Simple.Program.HcPkg
import Distribution.Simple.Utils (defaultPackageDesc, writeFileAtomic, toUTF8)
import Distribution.Simple.Build (writeAutogenFiles)
import Distribution.Simple.Register
import Distribution.Text
import Distribution.Verbosity
import qualified Distribution.InstalledPackageInfo as Installed
import qualified Distribution.Simple.PackageIndex as PackageIndex
import Control.Monad
import qualified Data.ByteString.Lazy.Char8 as BS
import Data.List
import Data.Maybe
import System.IO
import System.Directory
import System.Environment
import System.Exit
import System.FilePath
main :: IO ()
main = do hSetBuffering stdout LineBuffering
args <- getArgs
case args of
"hscolour" : dir : distDir : args' ->
runHsColour dir distDir args'
"check" : dir : [] ->
doCheck dir
"copy" : dir : distDir
: strip : myDestDir : myPrefix : myLibdir : myDocdir
: ghcLibWays : args' ->
doCopy dir distDir
strip myDestDir myPrefix myLibdir myDocdir
("dyn" `elem` words ghcLibWays)
args'
"register" : dir : distDir : ghc : ghcpkg : topdir
: myDestDir : myPrefix : myLibdir : myDocdir
: relocatableBuild : args' ->
doRegister dir distDir ghc ghcpkg topdir
myDestDir myPrefix myLibdir myDocdir
relocatableBuild args'
"configure" : dir : distDir : dll0Modules : config_args ->
generate dir distDir dll0Modules config_args
"sdist" : dir : distDir : [] ->
doSdist dir distDir
["--version"] ->
defaultMainArgs ["--version"]
_ -> die syntax_error
syntax_error :: [String]
syntax_error =
["syntax: ghc-cabal configure <configure-args> -- <distdir> <directory>...",
" ghc-cabal install <ghc-pkg> <directory> <distdir> <destdir> <prefix> <args>...",
" ghc-cabal hscolour <distdir> <directory> <args>..."]
die :: [String] -> IO a
die errs = do mapM_ (hPutStrLn stderr) errs
exitWith (ExitFailure 1)
-- XXX Should use bracket
withCurrentDirectory :: FilePath -> IO a -> IO a
withCurrentDirectory directory io
= do curDirectory <- getCurrentDirectory
setCurrentDirectory directory
r <- io
setCurrentDirectory curDirectory
return r
-- We need to use the autoconfUserHooks, as the packages that use
-- configure can create a .buildinfo file, and we need any info that
-- ends up in it.
userHooks :: UserHooks
userHooks = autoconfUserHooks
runDefaultMain :: IO ()
runDefaultMain
= do let verbosity = normal
gpdFile <- defaultPackageDesc verbosity
gpd <- readPackageDescription verbosity gpdFile
case buildType (flattenPackageDescription gpd) of
Just Configure -> defaultMainWithHooks autoconfUserHooks
-- time has a "Custom" Setup.hs, but it's actually Configure
-- plus a "./Setup test" hook. However, Cabal is also
-- "Custom", but doesn't have a configure script.
Just Custom ->
do configureExists <- doesFileExist "configure"
if configureExists
then defaultMainWithHooks autoconfUserHooks
else defaultMain
-- not quite right, but good enough for us:
_ -> defaultMain
doSdist :: FilePath -> FilePath -> IO ()
doSdist directory distDir
= withCurrentDirectory directory
$ withArgs (["sdist", "--builddir", distDir])
runDefaultMain
doCheck :: FilePath -> IO ()
doCheck directory
= withCurrentDirectory directory
$ do let verbosity = normal
gpdFile <- defaultPackageDesc verbosity
gpd <- readPackageDescription verbosity gpdFile
case partition isFailure $ checkPackage gpd Nothing of
([], []) -> return ()
([], warnings) -> mapM_ print warnings
(errs, _) -> do mapM_ print errs
exitWith (ExitFailure 1)
where isFailure (PackageDistSuspicious {}) = False
isFailure _ = True
runHsColour :: FilePath -> FilePath -> [String] -> IO ()
runHsColour directory distdir args
= withCurrentDirectory directory
$ defaultMainArgs ("hscolour" : "--builddir" : distdir : args)
doCopy :: FilePath -> FilePath
-> FilePath -> FilePath -> FilePath -> FilePath -> FilePath -> Bool
-> [String]
-> IO ()
doCopy directory distDir
strip myDestDir myPrefix myLibdir myDocdir withSharedLibs
args
= withCurrentDirectory directory $ do
let copyArgs = ["copy", "--builddir", distDir]
++ (if null myDestDir
then []
else ["--destdir", myDestDir])
++ args
copyHooks = userHooks {
copyHook = noGhcPrimHook
$ modHook False
$ copyHook userHooks
}
defaultMainWithHooksArgs copyHooks copyArgs
where
noGhcPrimHook f pd lbi us flags
= let pd'
| packageName pd == PackageName "ghc-prim" =
case library pd of
Just lib ->
let ghcPrim = fromJust (simpleParse "GHC.Prim")
ems = filter (ghcPrim /=) (exposedModules lib)
lib' = lib { exposedModules = ems }
in pd { library = Just lib' }
Nothing ->
error "Expected a library, but none found"
| otherwise = pd
in f pd' lbi us flags
modHook relocatableBuild f pd lbi us flags
= do let verbosity = normal
idts = updateInstallDirTemplates relocatableBuild
myPrefix myLibdir myDocdir
(installDirTemplates lbi)
progs = withPrograms lbi
stripProgram' = stripProgram {
programFindLocation = \_ _ -> return (Just strip) }
progs' <- configureProgram verbosity stripProgram' progs
let lbi' = lbi {
withPrograms = progs',
installDirTemplates = idts,
withSharedLib = withSharedLibs
}
f pd lbi' us flags
doRegister :: FilePath -> FilePath -> FilePath -> FilePath
-> FilePath -> FilePath -> FilePath -> FilePath -> FilePath
-> String -> [String]
-> IO ()
doRegister directory distDir ghc ghcpkg topdir
myDestDir myPrefix myLibdir myDocdir
relocatableBuildStr args
= withCurrentDirectory directory $ do
relocatableBuild <- case relocatableBuildStr of
"YES" -> return True
"NO" -> return False
_ -> die ["Bad relocatableBuildStr: " ++
show relocatableBuildStr]
let regArgs = "register" : "--builddir" : distDir : args
regHooks = userHooks {
regHook = modHook relocatableBuild
$ regHook userHooks
}
defaultMainWithHooksArgs regHooks regArgs
where
modHook relocatableBuild f pd lbi us flags
= do let verbosity = normal
idts = updateInstallDirTemplates relocatableBuild
myPrefix myLibdir myDocdir
(installDirTemplates lbi)
progs = withPrograms lbi
ghcpkgconf = topdir </> "package.conf.d"
ghcProgram' = ghcProgram {
programPostConf = \_ cp -> return cp { programDefaultArgs = ["-B" ++ topdir] },
programFindLocation = \_ _ -> return (Just ghc) }
ghcPkgProgram' = ghcPkgProgram {
programPostConf = \_ cp -> return cp { programDefaultArgs =
["--global-package-db", ghcpkgconf]
++ ["--force" | not (null myDestDir) ] },
programFindLocation = \_ _ -> return (Just ghcpkg) }
configurePrograms ps conf = foldM (flip (configureProgram verbosity)) conf ps
progs' <- configurePrograms [ghcProgram', ghcPkgProgram'] progs
let Just ghcPkgProg = lookupProgram ghcPkgProgram' progs'
instInfos <- dump verbosity ghcPkgProg GlobalPackageDB
let installedPkgs' = PackageIndex.fromList instInfos
let updateComponentConfig (cn, clbi, deps)
= (cn, updateComponentLocalBuildInfo clbi, deps)
updateComponentLocalBuildInfo clbi
= clbi {
componentPackageDeps =
[ (fixupPackageId instInfos ipid, pid)
| (ipid,pid) <- componentPackageDeps clbi ]
}
ccs' = map updateComponentConfig (componentsConfigs lbi)
lbi' = lbi {
componentsConfigs = ccs',
installedPkgs = installedPkgs',
installDirTemplates = idts,
withPrograms = progs'
}
f pd lbi' us flags
updateInstallDirTemplates :: Bool -> FilePath -> FilePath -> FilePath
-> InstallDirTemplates
-> InstallDirTemplates
updateInstallDirTemplates relocatableBuild myPrefix myLibdir myDocdir idts
= idts {
prefix = toPathTemplate $
if relocatableBuild
then "$topdir"
else myPrefix,
libdir = toPathTemplate $
if relocatableBuild
then "$topdir"
else myLibdir,
libsubdir = toPathTemplate "$pkgid",
docdir = toPathTemplate $
if relocatableBuild
then "$topdir/../doc/html/libraries/$pkgid"
else (myDocdir </> "$pkgid"),
htmldir = toPathTemplate "$docdir"
}
-- The packages are built with the package ID ending in "-inplace", but
-- when they're installed they get the package hash appended. We need to
-- fix up the package deps so that they use the hash package IDs, not
-- the inplace package IDs.
fixupPackageId :: [Installed.InstalledPackageInfo]
-> InstalledPackageId
-> InstalledPackageId
fixupPackageId _ x@(InstalledPackageId ipi)
| "builtin_" `isPrefixOf` ipi = x
fixupPackageId ipinfos (InstalledPackageId ipi)
= case stripPrefix (reverse "-inplace") $ reverse ipi of
Nothing ->
error ("Installed package ID doesn't end in -inplace: " ++ show ipi)
Just x ->
let ipi' = reverse ('-' : x)
f (ipinfo : ipinfos') = case Installed.installedPackageId ipinfo of
y@(InstalledPackageId ipinfoid)
| ipi' `isPrefixOf` ipinfoid ->
y
_ ->
f ipinfos'
f [] = error ("Installed package ID not registered: " ++ show ipi)
in f ipinfos
-- On Windows we need to split the ghc package into 2 pieces, or the
-- DLL that it makes contains too many symbols (#5987). There are
-- therefore 2 libraries, not just the 1 that Cabal assumes.
mangleLbi :: FilePath -> FilePath -> LocalBuildInfo -> LocalBuildInfo
mangleLbi "compiler" "stage2" lbi
| isWindows =
let ccs' = [ (cn, updateComponentLocalBuildInfo clbi, cns)
| (cn, clbi, cns) <- componentsConfigs lbi ]
updateComponentLocalBuildInfo clbi@(LibComponentLocalBuildInfo {})
= let cls' = concat [ [ LibraryName n, LibraryName (n ++ "-0") ]
| LibraryName n <- componentLibraries clbi ]
in clbi { componentLibraries = cls' }
updateComponentLocalBuildInfo clbi = clbi
in lbi { componentsConfigs = ccs' }
where isWindows = case hostPlatform lbi of
Platform _ Windows -> True
_ -> False
mangleLbi _ _ lbi = lbi
generate :: FilePath -> FilePath -> String -> [String] -> IO ()
generate directory distdir dll0Modules config_args
= withCurrentDirectory directory
$ do let verbosity = normal
-- XXX We shouldn't just configure with the default flags
-- XXX And this, and thus the "getPersistBuildConfig distdir" below,
-- aren't going to work when the deps aren't built yet
withArgs (["configure", "--distdir", distdir] ++ config_args)
runDefaultMain
lbi0 <- getPersistBuildConfig distdir
let lbi = mangleLbi directory distdir lbi0
pd0 = localPkgDescr lbi
writePersistBuildConfig distdir lbi
hooked_bi <-
if (buildType pd0 == Just Configure) || (buildType pd0 == Just Custom)
then do
maybe_infoFile <- defaultHookedPackageDesc
case maybe_infoFile of
Nothing -> return emptyHookedBuildInfo
Just infoFile -> readHookedBuildInfo verbosity infoFile
else
return emptyHookedBuildInfo
let pd = updatePackageDescription hooked_bi pd0
-- generate Paths_<pkg>.hs and cabal-macros.h
writeAutogenFiles verbosity pd lbi
-- generate inplace-pkg-config
withLibLBI pd lbi $ \lib clbi ->
do cwd <- getCurrentDirectory
let ipid = InstalledPackageId (display (packageId pd) ++ "-inplace")
let installedPkgInfo = inplaceInstalledPackageInfo cwd distdir
pd lib lbi clbi
final_ipi = installedPkgInfo {
Installed.installedPackageId = ipid,
Installed.haddockHTMLs = []
}
content = Installed.showInstalledPackageInfo final_ipi ++ "\n"
writeFileAtomic (distdir </> "inplace-pkg-config") (BS.pack $ toUTF8 content)
let
libBiModules lib = (libBuildInfo lib, libModules lib)
exeBiModules exe = (buildInfo exe, ModuleName.main : exeModules exe)
biModuless = (maybeToList $ fmap libBiModules $ library pd)
++ (map exeBiModules $ executables pd)
buildableBiModuless = filter isBuildable biModuless
where isBuildable (bi', _) = buildable bi'
(bi, modules) = case buildableBiModuless of
[] -> error "No buildable component found"
[biModules] -> biModules
_ -> error ("XXX ghc-cabal can't handle " ++
"more than one buildinfo yet")
-- XXX Another Just...
Just ghcProg = lookupProgram ghcProgram (withPrograms lbi)
dep_pkgs = PackageIndex.topologicalOrder (packageHacks (installedPkgs lbi))
forDeps f = concatMap f dep_pkgs
-- copied from Distribution.Simple.PreProcess.ppHsc2Hs
packageHacks = case compilerFlavor (compiler lbi) of
GHC -> hackRtsPackage
_ -> id
-- We don't link in the actual Haskell libraries of our
-- dependencies, so the -u flags in the ldOptions of the rts
-- package mean linking fails on OS X (it's ld is a tad
-- stricter than gnu ld). Thus we remove the ldOptions for
-- GHC's rts package:
hackRtsPackage index =
case PackageIndex.lookupPackageName index (PackageName "rts") of
[(_,[rts])] ->
PackageIndex.insert rts{
Installed.ldOptions = [],
Installed.libraryDirs = filter (not . ("gcc-lib" `isSuffixOf`)) (Installed.libraryDirs rts)} index
-- GHC <= 6.12 had $topdir/gcc-lib in their
-- library-dirs for the rts package, which causes
-- problems when we try to use the in-tree mingw,
-- due to accidentally picking up the incompatible
-- libraries there. So we filter out gcc-lib from
-- the RTS's library-dirs here.
_ -> error "No (or multiple) ghc rts package is registered!!"
dep_ids = map snd (externalPackageDeps lbi)
deps = map display dep_ids
depNames = map (display . packageName) dep_ids
transitive_dep_ids = map Installed.sourcePackageId dep_pkgs
transitiveDeps = map display transitive_dep_ids
transitiveDepNames = map (display . packageName) transitive_dep_ids
libraryDirs = forDeps Installed.libraryDirs
-- The mkLibraryRelDir function is a bit of a hack.
-- Ideally it should be handled in the makefiles instead.
mkLibraryRelDir "rts" = "rts/dist/build"
mkLibraryRelDir "ghc" = "compiler/stage2/build"
mkLibraryRelDir "Cabal" = "libraries/Cabal/Cabal/dist-install/build"
mkLibraryRelDir l = "libraries/" ++ l ++ "/dist-install/build"
libraryRelDirs = map mkLibraryRelDir transitiveDepNames
wrappedIncludeDirs <- wrap $ forDeps Installed.includeDirs
wrappedLibraryDirs <- wrap libraryDirs
let variablePrefix = directory ++ '_':distdir
mods = map display modules
otherMods = map display (otherModules bi)
allMods = mods ++ otherMods
let xs = [variablePrefix ++ "_VERSION = " ++ display (pkgVersion (package pd)),
variablePrefix ++ "_MODULES = " ++ unwords mods,
variablePrefix ++ "_HIDDEN_MODULES = " ++ unwords otherMods,
variablePrefix ++ "_SYNOPSIS =" ++ synopsis pd,
variablePrefix ++ "_HS_SRC_DIRS = " ++ unwords (hsSourceDirs bi),
variablePrefix ++ "_DEPS = " ++ unwords deps,
variablePrefix ++ "_DEP_NAMES = " ++ unwords depNames,
variablePrefix ++ "_TRANSITIVE_DEPS = " ++ unwords transitiveDeps,
variablePrefix ++ "_TRANSITIVE_DEP_NAMES = " ++ unwords transitiveDepNames,
variablePrefix ++ "_INCLUDE_DIRS = " ++ unwords (includeDirs bi),
variablePrefix ++ "_INCLUDES = " ++ unwords (includes bi),
variablePrefix ++ "_INSTALL_INCLUDES = " ++ unwords (installIncludes bi),
variablePrefix ++ "_EXTRA_LIBRARIES = " ++ unwords (extraLibs bi),
variablePrefix ++ "_EXTRA_LIBDIRS = " ++ unwords (extraLibDirs bi),
variablePrefix ++ "_C_SRCS = " ++ unwords (cSources bi),
variablePrefix ++ "_CMM_SRCS := $(addprefix cbits/,$(notdir $(wildcard " ++ directory ++ "/cbits/*.cmm)))",
variablePrefix ++ "_DATA_FILES = " ++ unwords (dataFiles pd),
-- XXX This includes things it shouldn't, like:
-- -odir dist-bootstrapping/build
variablePrefix ++ "_HC_OPTS = " ++ escape (unwords
( programDefaultArgs ghcProg
++ hcOptions GHC bi
++ languageToFlags (compiler lbi) (defaultLanguage bi)
++ extensionsToFlags (compiler lbi) (usedExtensions bi)
++ programOverrideArgs ghcProg)),
variablePrefix ++ "_CC_OPTS = " ++ unwords (ccOptions bi),
variablePrefix ++ "_CPP_OPTS = " ++ unwords (cppOptions bi),
variablePrefix ++ "_LD_OPTS = " ++ unwords (ldOptions bi),
variablePrefix ++ "_DEP_INCLUDE_DIRS_SINGLE_QUOTED = " ++ unwords wrappedIncludeDirs,
variablePrefix ++ "_DEP_CC_OPTS = " ++ unwords (forDeps Installed.ccOptions),
variablePrefix ++ "_DEP_LIB_DIRS_SINGLE_QUOTED = " ++ unwords wrappedLibraryDirs,
variablePrefix ++ "_DEP_LIB_DIRS_SEARCHPATH = " ++ mkSearchPath libraryDirs,
variablePrefix ++ "_DEP_LIB_REL_DIRS = " ++ unwords libraryRelDirs,
variablePrefix ++ "_DEP_LIB_REL_DIRS_SEARCHPATH = " ++ mkSearchPath libraryRelDirs,
variablePrefix ++ "_DEP_EXTRA_LIBS = " ++ unwords (forDeps Installed.extraLibraries),
variablePrefix ++ "_DEP_LD_OPTS = " ++ unwords (forDeps Installed.ldOptions),
variablePrefix ++ "_BUILD_GHCI_LIB = " ++ boolToYesNo (withGHCiLib lbi),
"",
-- Sometimes we need to modify the automatically-generated package-data.mk
-- bindings in a special way for the GHC build system, so allow that here:
"$(eval $(" ++ directory ++ "_PACKAGE_MAGIC))"
]
writeFile (distdir ++ "/package-data.mk") $ unlines xs
writeFile (distdir ++ "/haddock-prologue.txt") $
if null (description pd) then synopsis pd
else description pd
unless (null dll0Modules) $
do let dll0Mods = words dll0Modules
dllMods = allMods \\ dll0Mods
dllModSets = map unwords [dll0Mods, dllMods]
writeFile (distdir ++ "/dll-split") $ unlines dllModSets
where
escape = foldr (\c xs -> if c == '#' then '\\':'#':xs else c:xs) []
wrap = mapM wrap1
wrap1 s
| null s = die ["Wrapping empty value"]
| '\'' `elem` s = die ["Single quote in value to be wrapped:", s]
-- We want to be able to assume things like <space><quote> is the
-- start of a value, so check there are no spaces in confusing
-- positions
| head s == ' ' = die ["Leading space in value to be wrapped:", s]
| last s == ' ' = die ["Trailing space in value to be wrapped:", s]
| otherwise = return ("\'" ++ s ++ "\'")
mkSearchPath = intercalate [searchPathSeparator]
boolToYesNo True = "YES"
boolToYesNo False = "NO"
| ekmett/ghc | utils/ghc-cabal/Main.hs | bsd-3-clause | 23,298 | 0 | 23 | 8,374 | 4,641 | 2,366 | 2,275 | 388 | 13 |
-- |
-- Module : $Header$
-- Copyright : (c) 2013-2015 Galois, Inc.
-- License : BSD3
-- Maintainer : cryptol@galois.com
-- Stability : provisional
-- Portability : portable
{-# LANGUAGE Safe #-}
{-# LANGUAGE MultiParamTypeClasses, FunctionalDependencies #-}
{-# LANGUAGE UndecidableInstances, FlexibleInstances #-}
{-# LANGUAGE DeriveFunctor #-}
module Cryptol.TypeCheck.TypeMap
( TypeMap(..), TypesMap, TrieMap(..)
, insertTM, insertWithTM
, membersTM
, mapTM, mapWithKeyTM, mapMaybeTM
, List(..)
) where
import Cryptol.TypeCheck.AST
import qualified Data.Map as Map
import Data.Map (Map)
import Data.Maybe(fromMaybe,maybeToList)
import Control.Monad((<=<))
import Data.List(sortBy)
import Data.Maybe (isNothing)
import Data.Ord(comparing)
class TrieMap m k | m -> k where
emptyTM :: m a
nullTM :: m a -> Bool
lookupTM :: k -> m a -> Maybe a
alterTM :: k -> (Maybe a -> Maybe a) -> m a -> m a
unionTM :: (a -> a -> a) -> m a -> m a -> m a
toListTM :: m a -> [(k,a)]
mapMaybeWithKeyTM :: (k -> a -> Maybe b) -> m a -> m b
membersTM :: TrieMap m k => m a -> [a]
membersTM = map snd . toListTM
insertTM :: TrieMap m k => k -> a -> m a -> m a
insertTM t a = alterTM t (\_ -> Just a)
insertWithTM :: TrieMap m k => (a -> a -> a) -> k -> a -> m a -> m a
insertWithTM f t new = alterTM t $ \mb -> Just $ case mb of
Nothing -> new
Just old -> f old new
{-# INLINE mapTM #-}
mapTM :: TrieMap m k => (a -> b) -> m a -> m b
mapTM f = mapMaybeWithKeyTM (\ _ a -> Just (f a))
{-# INLINE mapWithKeyTM #-}
mapWithKeyTM :: TrieMap m k => (k -> a -> b) -> m a -> m b
mapWithKeyTM f = mapMaybeWithKeyTM (\ k a -> Just (f k a))
{-# INLINE mapMaybeTM #-}
mapMaybeTM :: TrieMap m k => (a -> Maybe b) -> m a -> m b
mapMaybeTM f = mapMaybeWithKeyTM (\_ -> f)
data List m a = L { nil :: Maybe a
, cons :: m (List m a)
} deriving (Functor)
instance TrieMap m a => TrieMap (List m) [a] where
emptyTM = L { nil = Nothing, cons = emptyTM }
nullTM k = isNothing (nil k) && nullTM (cons k)
lookupTM k =
case k of
[] -> nil
x : xs -> lookupTM xs <=< lookupTM x . cons
alterTM k f m =
case k of
[] -> m { nil = f (nil m) }
x:xs -> m { cons = alterTM x (updSub xs f) (cons m) }
toListTM m =
[ ([], v) | v <- maybeToList (nil m) ] ++
[ (x:xs,v) | (x,m1) <- toListTM (cons m), (xs,v) <- toListTM m1 ]
unionTM f m1 m2 = L { nil = case (nil m1, nil m2) of
(Just x, Just y) -> Just (f x y)
(Just x, _) -> Just x
(_, Just y) -> Just y
_ -> Nothing
, cons = unionTM (unionTM f) (cons m1) (cons m2)
}
mapMaybeWithKeyTM f = go []
where
go acc l = L { nil = f (reverse acc) =<< nil l
, cons = mapMaybeWithKeyTM (\k a -> Just (go (k:acc) a)) (cons l)
}
instance Ord a => TrieMap (Map a) a where
emptyTM = Map.empty
nullTM = Map.null
lookupTM = Map.lookup
alterTM = flip Map.alter
toListTM = Map.toList
unionTM = Map.unionWith
mapMaybeWithKeyTM = Map.mapMaybeWithKey
type TypesMap = List TypeMap
data TypeMap a = TM { tvar :: Map TVar a
, tcon :: Map TCon (List TypeMap a)
, trec :: Map [Name] (List TypeMap a)
} deriving (Functor)
instance TrieMap TypeMap Type where
emptyTM = TM { tvar = emptyTM, tcon = emptyTM, trec = emptyTM }
nullTM ty = and [ nullTM (tvar ty)
, nullTM (tcon ty)
, nullTM (trec ty) ]
lookupTM ty =
case ty of
TUser _ _ t -> lookupTM t
TVar x -> lookupTM x . tvar
TCon c ts -> lookupTM ts <=< lookupTM c . tcon
TRec fs -> let (xs,ts) = unzip $ sortBy (comparing fst) fs
in lookupTM ts <=< lookupTM xs . trec
alterTM ty f m =
case ty of
TUser _ _ t -> alterTM t f m
TVar x -> m { tvar = alterTM x f (tvar m) }
TCon c ts -> m { tcon = alterTM c (updSub ts f) (tcon m) }
TRec fs -> let (xs,ts) = unzip $ sortBy (comparing fst) fs
in m { trec = alterTM xs (updSub ts f) (trec m) }
toListTM m =
[ (TVar x, v) | (x,v) <- toListTM (tvar m) ] ++
[ (TCon c ts, v) | (c,m1) <- toListTM (tcon m)
, (ts,v) <- toListTM m1 ] ++
[ (TRec (zip fs ts), v) | (fs,m1) <- toListTM (trec m)
, (ts,v) <- toListTM m1 ]
unionTM f m1 m2 = TM { tvar = unionTM f (tvar m1) (tvar m2)
, tcon = unionTM (unionTM f) (tcon m1) (tcon m2)
, trec = unionTM (unionTM f) (trec m1) (trec m2)
}
mapMaybeWithKeyTM f m =
TM { tvar = mapMaybeWithKeyTM (\v -> f (TVar v)) (tvar m)
, tcon = mapWithKeyTM (\c l -> mapMaybeWithKeyTM
(\ts a -> f (TCon c ts) a) l) (tcon m)
, trec = mapWithKeyTM (\fs l -> mapMaybeWithKeyTM
(\ts a -> f (TRec (zip fs ts)) a) l) (trec m)
}
updSub :: TrieMap m k => k -> (Maybe a -> Maybe a) -> Maybe (m a) -> Maybe (m a)
updSub k f = Just . alterTM k f . fromMaybe emptyTM
instance Show a => Show (TypeMap a) where
showsPrec p xs = showsPrec p (toListTM xs)
| iblumenfeld/cryptol | src/Cryptol/TypeCheck/TypeMap.hs | bsd-3-clause | 5,626 | 0 | 18 | 2,079 | 2,373 | 1,229 | 1,144 | -1 | -1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="fa-IR">
<title>Custom Payloads Add-on</title>
<maps>
<homeID>custompayloads</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/custompayloads/src/main/javahelp/org/zaproxy/zap/extension/custompayloads/resources/help_fa_IR/helpset_fa_IR.hs | apache-2.0 | 978 | 82 | 53 | 157 | 396 | 209 | 187 | -1 | -1 |
{-# LANGUAGE CPP #-}
module CmmType
( CmmType -- Abstract
, b8, b16, b32, b64, b128, b256, b512, f32, f64, bWord, bHalfWord, gcWord
, cInt, cLong
, cmmBits, cmmFloat
, typeWidth, cmmEqType, cmmEqType_ignoring_ptrhood
, isFloatType, isGcPtrType, isWord32, isWord64, isFloat64, isFloat32
, Width(..)
, widthInBits, widthInBytes, widthInLog, widthFromBytes
, wordWidth, halfWordWidth, cIntWidth, cLongWidth
, halfWordMask
, narrowU, narrowS
, rEP_CostCentreStack_mem_alloc
, rEP_CostCentreStack_scc_count
, rEP_StgEntCounter_allocs
, rEP_StgEntCounter_allocd
, ForeignHint(..)
, Length
, vec, vec2, vec4, vec8, vec16
, vec2f64, vec2b64, vec4f32, vec4b32, vec8b16, vec16b8
, cmmVec
, vecLength, vecElemType
, isVecType
)
where
#include "HsVersions.h"
import DynFlags
import FastString
import Outputable
import Data.Word
import Data.Int
-----------------------------------------------------------------------------
-- CmmType
-----------------------------------------------------------------------------
-- NOTE: CmmType is an abstract type, not exported from this
-- module so you can easily change its representation
--
-- However Width is exported in a concrete way,
-- and is used extensively in pattern-matching
data CmmType -- The important one!
= CmmType CmmCat Width
data CmmCat -- "Category" (not exported)
= GcPtrCat -- GC pointer
| BitsCat -- Non-pointer
| FloatCat -- Float
| VecCat Length CmmCat -- Vector
deriving( Eq )
-- See Note [Signed vs unsigned] at the end
instance Outputable CmmType where
ppr (CmmType cat wid) = ppr cat <> ppr (widthInBits wid)
instance Outputable CmmCat where
ppr FloatCat = text "F"
ppr GcPtrCat = text "P"
ppr BitsCat = text "I"
ppr (VecCat n cat) = ppr cat <> text "x" <> ppr n <> text "V"
-- Why is CmmType stratified? For native code generation,
-- most of the time you just want to know what sort of register
-- to put the thing in, and for this you need to know how
-- many bits thing has and whether it goes in a floating-point
-- register. By contrast, the distinction between GcPtr and
-- GcNonPtr is of interest to only a few parts of the code generator.
-------- Equality on CmmType --------------
-- CmmType is *not* an instance of Eq; sometimes we care about the
-- Gc/NonGc distinction, and sometimes we don't
-- So we use an explicit function to force you to think about it
cmmEqType :: CmmType -> CmmType -> Bool -- Exact equality
cmmEqType (CmmType c1 w1) (CmmType c2 w2) = c1==c2 && w1==w2
cmmEqType_ignoring_ptrhood :: CmmType -> CmmType -> Bool
-- This equality is temporary; used in CmmLint
-- but the RTS files are not yet well-typed wrt pointers
cmmEqType_ignoring_ptrhood (CmmType c1 w1) (CmmType c2 w2)
= c1 `weak_eq` c2 && w1==w2
where
weak_eq :: CmmCat -> CmmCat -> Bool
FloatCat `weak_eq` FloatCat = True
FloatCat `weak_eq` _other = False
_other `weak_eq` FloatCat = False
(VecCat l1 cat1) `weak_eq` (VecCat l2 cat2) = l1 == l2
&& cat1 `weak_eq` cat2
(VecCat {}) `weak_eq` _other = False
_other `weak_eq` (VecCat {}) = False
_word1 `weak_eq` _word2 = True -- Ignores GcPtr
--- Simple operations on CmmType -----
typeWidth :: CmmType -> Width
typeWidth (CmmType _ w) = w
cmmBits, cmmFloat :: Width -> CmmType
cmmBits = CmmType BitsCat
cmmFloat = CmmType FloatCat
-------- Common CmmTypes ------------
-- Floats and words of specific widths
b8, b16, b32, b64, b128, b256, b512, f32, f64 :: CmmType
b8 = cmmBits W8
b16 = cmmBits W16
b32 = cmmBits W32
b64 = cmmBits W64
b128 = cmmBits W128
b256 = cmmBits W256
b512 = cmmBits W512
f32 = cmmFloat W32
f64 = cmmFloat W64
-- CmmTypes of native word widths
bWord :: DynFlags -> CmmType
bWord dflags = cmmBits (wordWidth dflags)
bHalfWord :: DynFlags -> CmmType
bHalfWord dflags = cmmBits (halfWordWidth dflags)
gcWord :: DynFlags -> CmmType
gcWord dflags = CmmType GcPtrCat (wordWidth dflags)
cInt, cLong :: DynFlags -> CmmType
cInt dflags = cmmBits (cIntWidth dflags)
cLong dflags = cmmBits (cLongWidth dflags)
------------ Predicates ----------------
isFloatType, isGcPtrType :: CmmType -> Bool
isFloatType (CmmType FloatCat _) = True
isFloatType _other = False
isGcPtrType (CmmType GcPtrCat _) = True
isGcPtrType _other = False
isWord32, isWord64, isFloat32, isFloat64 :: CmmType -> Bool
-- isWord64 is true of 64-bit non-floats (both gc-ptrs and otherwise)
-- isFloat32 and 64 are obvious
isWord64 (CmmType BitsCat W64) = True
isWord64 (CmmType GcPtrCat W64) = True
isWord64 _other = False
isWord32 (CmmType BitsCat W32) = True
isWord32 (CmmType GcPtrCat W32) = True
isWord32 _other = False
isFloat32 (CmmType FloatCat W32) = True
isFloat32 _other = False
isFloat64 (CmmType FloatCat W64) = True
isFloat64 _other = False
-----------------------------------------------------------------------------
-- Width
-----------------------------------------------------------------------------
data Width = W8 | W16 | W32 | W64
| W80 -- Extended double-precision float,
-- used in x86 native codegen only.
-- (we use Ord, so it'd better be in this order)
| W128
| W256
| W512
deriving (Eq, Ord, Show)
instance Outputable Width where
ppr rep = ptext (mrStr rep)
mrStr :: Width -> LitString
mrStr W8 = sLit("W8")
mrStr W16 = sLit("W16")
mrStr W32 = sLit("W32")
mrStr W64 = sLit("W64")
mrStr W128 = sLit("W128")
mrStr W256 = sLit("W256")
mrStr W512 = sLit("W512")
mrStr W80 = sLit("W80")
-------- Common Widths ------------
wordWidth :: DynFlags -> Width
wordWidth dflags
| wORD_SIZE dflags == 4 = W32
| wORD_SIZE dflags == 8 = W64
| otherwise = panic "MachOp.wordRep: Unknown word size"
halfWordWidth :: DynFlags -> Width
halfWordWidth dflags
| wORD_SIZE dflags == 4 = W16
| wORD_SIZE dflags == 8 = W32
| otherwise = panic "MachOp.halfWordRep: Unknown word size"
halfWordMask :: DynFlags -> Integer
halfWordMask dflags
| wORD_SIZE dflags == 4 = 0xFFFF
| wORD_SIZE dflags == 8 = 0xFFFFFFFF
| otherwise = panic "MachOp.halfWordMask: Unknown word size"
-- cIntRep is the Width for a C-language 'int'
cIntWidth, cLongWidth :: DynFlags -> Width
cIntWidth dflags = case cINT_SIZE dflags of
4 -> W32
8 -> W64
s -> panic ("cIntWidth: Unknown cINT_SIZE: " ++ show s)
cLongWidth dflags = case cLONG_SIZE dflags of
4 -> W32
8 -> W64
s -> panic ("cIntWidth: Unknown cLONG_SIZE: " ++ show s)
widthInBits :: Width -> Int
widthInBits W8 = 8
widthInBits W16 = 16
widthInBits W32 = 32
widthInBits W64 = 64
widthInBits W128 = 128
widthInBits W256 = 256
widthInBits W512 = 512
widthInBits W80 = 80
widthInBytes :: Width -> Int
widthInBytes W8 = 1
widthInBytes W16 = 2
widthInBytes W32 = 4
widthInBytes W64 = 8
widthInBytes W128 = 16
widthInBytes W256 = 32
widthInBytes W512 = 64
widthInBytes W80 = 10
widthFromBytes :: Int -> Width
widthFromBytes 1 = W8
widthFromBytes 2 = W16
widthFromBytes 4 = W32
widthFromBytes 8 = W64
widthFromBytes 16 = W128
widthFromBytes 32 = W256
widthFromBytes 64 = W512
widthFromBytes 10 = W80
widthFromBytes n = pprPanic "no width for given number of bytes" (ppr n)
-- log_2 of the width in bytes, useful for generating shifts.
widthInLog :: Width -> Int
widthInLog W8 = 0
widthInLog W16 = 1
widthInLog W32 = 2
widthInLog W64 = 3
widthInLog W128 = 4
widthInLog W256 = 5
widthInLog W512 = 6
widthInLog W80 = panic "widthInLog: F80"
-- widening / narrowing
narrowU :: Width -> Integer -> Integer
narrowU W8 x = fromIntegral (fromIntegral x :: Word8)
narrowU W16 x = fromIntegral (fromIntegral x :: Word16)
narrowU W32 x = fromIntegral (fromIntegral x :: Word32)
narrowU W64 x = fromIntegral (fromIntegral x :: Word64)
narrowU _ _ = panic "narrowTo"
narrowS :: Width -> Integer -> Integer
narrowS W8 x = fromIntegral (fromIntegral x :: Int8)
narrowS W16 x = fromIntegral (fromIntegral x :: Int16)
narrowS W32 x = fromIntegral (fromIntegral x :: Int32)
narrowS W64 x = fromIntegral (fromIntegral x :: Int64)
narrowS _ _ = panic "narrowTo"
-----------------------------------------------------------------------------
-- SIMD
-----------------------------------------------------------------------------
type Length = Int
vec :: Length -> CmmType -> CmmType
vec l (CmmType cat w) = CmmType (VecCat l cat) vecw
where
vecw :: Width
vecw = widthFromBytes (l*widthInBytes w)
vec2, vec4, vec8, vec16 :: CmmType -> CmmType
vec2 = vec 2
vec4 = vec 4
vec8 = vec 8
vec16 = vec 16
vec2f64, vec2b64, vec4f32, vec4b32, vec8b16, vec16b8 :: CmmType
vec2f64 = vec 2 f64
vec2b64 = vec 2 b64
vec4f32 = vec 4 f32
vec4b32 = vec 4 b32
vec8b16 = vec 8 b16
vec16b8 = vec 16 b8
cmmVec :: Int -> CmmType -> CmmType
cmmVec n (CmmType cat w) =
CmmType (VecCat n cat) (widthFromBytes (n*widthInBytes w))
vecLength :: CmmType -> Length
vecLength (CmmType (VecCat l _) _) = l
vecLength _ = panic "vecLength: not a vector"
vecElemType :: CmmType -> CmmType
vecElemType (CmmType (VecCat l cat) w) = CmmType cat scalw
where
scalw :: Width
scalw = widthFromBytes (widthInBytes w `div` l)
vecElemType _ = panic "vecElemType: not a vector"
isVecType :: CmmType -> Bool
isVecType (CmmType (VecCat {}) _) = True
isVecType _ = False
-------------------------------------------------------------------------
-- Hints
-- Hints are extra type information we attach to the arguments and
-- results of a foreign call, where more type information is sometimes
-- needed by the ABI to make the correct kind of call.
data ForeignHint
= NoHint | AddrHint | SignedHint
deriving( Eq )
-- Used to give extra per-argument or per-result
-- information needed by foreign calling conventions
-------------------------------------------------------------------------
-- These don't really belong here, but I don't know where is best to
-- put them.
rEP_CostCentreStack_mem_alloc :: DynFlags -> CmmType
rEP_CostCentreStack_mem_alloc dflags
= cmmBits (widthFromBytes (pc_REP_CostCentreStack_mem_alloc pc))
where pc = sPlatformConstants (settings dflags)
rEP_CostCentreStack_scc_count :: DynFlags -> CmmType
rEP_CostCentreStack_scc_count dflags
= cmmBits (widthFromBytes (pc_REP_CostCentreStack_scc_count pc))
where pc = sPlatformConstants (settings dflags)
rEP_StgEntCounter_allocs :: DynFlags -> CmmType
rEP_StgEntCounter_allocs dflags
= cmmBits (widthFromBytes (pc_REP_StgEntCounter_allocs pc))
where pc = sPlatformConstants (settings dflags)
rEP_StgEntCounter_allocd :: DynFlags -> CmmType
rEP_StgEntCounter_allocd dflags
= cmmBits (widthFromBytes (pc_REP_StgEntCounter_allocd pc))
where pc = sPlatformConstants (settings dflags)
-------------------------------------------------------------------------
{- Note [Signed vs unsigned]
~~~~~~~~~~~~~~~~~~~~~~~~~
Should a CmmType include a signed vs. unsigned distinction?
This is very much like a "hint" in C-- terminology: it isn't necessary
in order to generate correct code, but it might be useful in that the
compiler can generate better code if it has access to higher-level
hints about data. This is important at call boundaries, because the
definition of a function is not visible at all of its call sites, so
the compiler cannot infer the hints.
Here in Cmm, we're taking a slightly different approach. We include
the int vs. float hint in the CmmType, because (a) the majority of
platforms have a strong distinction between float and int registers,
and (b) we don't want to do any heavyweight hint-inference in the
native code backend in order to get good code. We're treating the
hint more like a type: our Cmm is always completely consistent with
respect to hints. All coercions between float and int are explicit.
What about the signed vs. unsigned hint? This information might be
useful if we want to keep sub-word-sized values in word-size
registers, which we must do if we only have word-sized registers.
On such a system, there are two straightforward conventions for
representing sub-word-sized values:
(a) Leave the upper bits undefined. Comparison operations must
sign- or zero-extend both operands before comparing them,
depending on whether the comparison is signed or unsigned.
(b) Always keep the values sign- or zero-extended as appropriate.
Arithmetic operations must narrow the result to the appropriate
size.
A clever compiler might not use either (a) or (b) exclusively, instead
it would attempt to minimize the coercions by analysis: the same kind
of analysis that propagates hints around. In Cmm we don't want to
have to do this, so we plump for having richer types and keeping the
type information consistent.
If signed/unsigned hints are missing from CmmType, then the only
choice we have is (a), because we don't know whether the result of an
operation should be sign- or zero-extended.
Many architectures have extending load operations, which work well
with (b). To make use of them with (a), you need to know whether the
value is going to be sign- or zero-extended by an enclosing comparison
(for example), which involves knowing above the context. This is
doable but more complex.
Further complicating the issue is foreign calls: a foreign calling
convention can specify that signed 8-bit quantities are passed as
sign-extended 32 bit quantities, for example (this is the case on the
PowerPC). So we *do* need sign information on foreign call arguments.
Pros for adding signed vs. unsigned to CmmType:
- It would let us use convention (b) above, and get easier
code generation for extending loads.
- Less information required on foreign calls.
- MachOp type would be simpler
Cons:
- More complexity
- What is the CmmType for a VanillaReg? Currently it is
always wordRep, but now we have to decide whether it is
signed or unsigned. The same VanillaReg can thus have
different CmmType in different parts of the program.
- Extra coercions cluttering up expressions.
Currently for GHC, the foreign call point is moot, because we do our
own promotion of sub-word-sized values to word-sized values. The Int8
type is represented by an Int# which is kept sign-extended at all times
(this is slightly naughty, because we're making assumptions about the
C calling convention rather early on in the compiler). However, given
this, the cons outweigh the pros.
-}
| oldmanmike/ghc | compiler/cmm/CmmType.hs | bsd-3-clause | 15,086 | 0 | 11 | 3,373 | 2,796 | 1,498 | 1,298 | 238 | 7 |
module RecordIn4 where
data S = S1 { x :: Int } | S2 { x :: Int } deriving Show
{- map2 xs = map (\y -> y {x = 1}) xs -}
map2 xs = (case ((\ y -> y {x = 1}), xs) of
(f, []) -> []
(f, (x : xs)) -> (f x) : (map f xs))
| SAdams601/HaRe | old/testing/generativeFold/RecordIn4.hs | bsd-3-clause | 251 | 0 | 11 | 97 | 124 | 73 | 51 | 5 | 2 |
module Renaming.C1 where
import Renaming.D1
instance SameOrNot Double where
isSame a b = a ==b
isNotSame a b = a /=b
myFringe:: Tree a -> [a]
myFringe (Leaf x ) = [x]
myFringe (Branch left right) = myFringe left
| mpickering/HaRe | test/testdata/Renaming/C1.hs | bsd-3-clause | 229 | 0 | 7 | 56 | 98 | 51 | 47 | 8 | 1 |
{-| Scans page of Markdown looking for http links. When it finds them, it submits them
to webcitation.org / https://secure.wikimedia.org/wikipedia/en/wiki/WebCite
(It will also submit them to Alexa (the source for the Internet Archive), but Alexa says that
its bots take weeks to visit and may not ever.)
This module employs the archiver daemon <http://hackage.haskell.org/package/archiver> as a library; `cabal install archiver` will install it.
Limitations:
* Only parses Markdown, not ReST or any other format; this is because 'readMarkdown'
is hardwired into it.
* No rate limitation or choking; will fire off all requests as fast as possible.
If pages have more than 20 external links or so, this may result in your IP being temporarily
banned by WebCite. To avoid this, you can use WebArchiverBot.hs instead, which will parse & dump
URLs into a file processed by the archiver daemon (which *is* rate-limited).
By: Gwern Branwen; placed in the public domain -}
module WebArchiver (plugin) where
import Control.Concurrent (forkIO)
import Network.URL.Archiver as A (checkArchive)
import Network.Gitit.Interface (askUser, bottomUpM, liftIO, uEmail, Plugin(PreCommitTransform), Inline(Link))
import Text.Pandoc (defaultParserState, readMarkdown)
plugin :: Plugin
plugin = PreCommitTransform archivePage
-- archivePage :: String -> ReaderT PluginData (StateT Context IO) String
archivePage x = do mbUser <- askUser
let email = case mbUser of
Nothing -> "nobody@mailinator.com"
Just u -> uEmail u
let p = readMarkdown defaultParserState x
-- force evaluation and archiving side-effects
_p' <- liftIO $ bottomUpM (archiveLinks email) p
return x -- note: this is read-only - don't actually change page!
archiveLinks :: String -> Inline -> IO Inline
archiveLinks e x@(Link _ (uln, _)) = forkIO (A.checkArchive e uln) >> return x
archiveLinks _ x = return x
| imuli/gitit | plugins/WebArchiver.hs | gpl-2.0 | 2,029 | 0 | 13 | 451 | 260 | 140 | 120 | 17 | 2 |
module Graphics.UI.Bottle.Widgets.Edges(
makeVertical
) where
import Control.Lens ((^.))
import Control.Monad (mplus)
import Data.List (minimumBy)
import Data.Monoid (Monoid(..))
import Data.Ord (comparing)
import Data.Vector.Vector2 (Vector2(..))
import Graphics.UI.Bottle.Direction (Direction)
import Graphics.UI.Bottle.Rect (Rect(..))
import Graphics.UI.Bottle.Widget (Widget(..))
import Graphics.UI.Bottle.Widgets.StdKeys (DirKeys(..), stdDirKeys)
import qualified Control.Lens as Lens
import qualified Graphics.UI.Bottle.Direction as Direction
import qualified Graphics.UI.Bottle.EventMap as EventMap
import qualified Graphics.UI.Bottle.Rect as Rect
import qualified Graphics.UI.Bottle.Widget as Widget
choose ::
Widget.EnterResult f -> Widget.EnterResult f ->
Direction -> Widget.EnterResult f
choose x _ Direction.Outside = x
choose x y (Direction.PrevFocalArea rect) = chooseRect x y rect
choose x y (Direction.Point pt) = chooseRect x y $ Rect pt 0
chooseRect :: Widget.EnterResult f -> Widget.EnterResult f -> Rect -> Widget.EnterResult f
chooseRect x y rect =
minimumOn (Rect.distance rect . (^. Widget.enterResultRect)) [x, y]
where
minimumOn = minimumBy . comparing
makeVertical :: Widget.Size -> Widget f -> Widget f -> Widget f
makeVertical size top unTranslatedBottom = Widget
{ _wIsFocused = _wIsFocused top || _wIsFocused bottom
, _wSize = size
, _wFrame = _wFrame top `mappend` _wFrame bottom
, _wMaybeEnter = mEnter (_wMaybeEnter top) (_wMaybeEnter bottom)
, _wEventMap = eventMap
, _wFocalArea = maybe (Rect 0 0) _wFocalArea selectedWidget
}
where
mEnter (Just enterTop) (Just enterBottom) =
Just $ \dir -> choose (enterTop dir) (enterBottom dir) dir
mEnter x y = x `mplus` y
selectedWidget
| _wIsFocused top = Just $ addTo (EventMap.Doc ["Navigation", "Move", "down"]) (keysDown stdDirKeys) bottom top
| _wIsFocused bottom = Just $ addTo (EventMap.Doc ["Navigation", "Move", "up"]) (keysUp stdDirKeys) top bottom
| otherwise = Nothing
mkKeys = map $ EventMap.ModKey EventMap.noMods
eventMap = maybe mempty _wEventMap selectedWidget
addTo doc ks other me =
maybe id
(Widget.weakerEvents . mkEventMap me doc (mkKeys ks))
(_wMaybeEnter other) me
mkEventMap me doc keys enterOther =
EventMap.keyPresses keys doc . (^. Widget.enterResultEvent) . enterOther .
Direction.PrevFocalArea $ _wFocalArea me
bottom = Widget.translate (Vector2 0 (max topHeight bottomsTop)) unTranslatedBottom
topHeight = _wSize top ^. Lens._2
bottomHeight = _wSize unTranslatedBottom ^. Lens._2
bottomsTop = size ^. Lens._2 - bottomHeight
| sinelaw/lamdu | bottlelib/Graphics/UI/Bottle/Widgets/Edges.hs | gpl-3.0 | 2,665 | 0 | 12 | 463 | 893 | 487 | 406 | 55 | 2 |
{-# LANGUAGE ForeignFunctionInterface, CPP #-}
-- Test the LANGUAGE pragma
module ShouldCompile where
#if 1
foreign import ccall "foo" foo :: Int -> IO Int
#endif
| wxwxwwxxx/ghc | testsuite/tests/parser/should_compile/read039.hs | bsd-3-clause | 164 | 0 | 7 | 27 | 27 | 17 | 10 | 3 | 0 |
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances #-}
-- !!! One method class from Sergey Mechveliani
-- showed up problematic newtype dict rep.
module Main where
import Data.Ratio
class MBConvertible a b where cm :: a -> b -> Maybe b
c :: MBConvertible a b => a -> b -> b
c a b = case cm a b
of
Just b' -> b'
_ -> error "c a b failed"
instance MBConvertible Int Int where cm a _ = Just a
instance (MBConvertible a b,Integral b) => MBConvertible a (Ratio b)
where
cm a f = case cm a (numerator f) of Just a' -> Just (a'%1)
_ -> Nothing
main = let f = 1%1 :: Ratio Int
n2 = 2::Int
g = (c n2 f) + f
in
putStr (shows g "\n")
| urbanslug/ghc | testsuite/tests/typecheck/should_run/tcrun003.hs | bsd-3-clause | 902 | 0 | 11 | 409 | 266 | 134 | 132 | 16 | 2 |
{-|
Module: Flaw.UI.DefaultStyle.Data
Description: Embedded data for default style.
License: MIT
-}
{-# LANGUAGE TemplateHaskell #-}
module Flaw.UI.DefaultStyle.Data
( loadFontData
) where
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import Flaw.Build
loadFontData :: IO B.ByteString
loadFontData = $(embedIOExp =<< BL.toStrict <$> loadFile "src/DejaVuSans.ttf")
| quyse/flaw | flaw-ui-default-style-data/Flaw/UI/DefaultStyle/Data.hs | mit | 409 | 0 | 9 | 55 | 69 | 43 | 26 | 8 | 1 |
{-# htermination (fromIntegral :: MyInt -> Float) #-}
import qualified Prelude
data MyBool = MyTrue | MyFalse
data List a = Cons a (List a) | Nil
data Float = Float MyInt MyInt ;
data Integer = Integer MyInt ;
data MyInt = Pos Nat | Neg Nat ;
data Nat = Succ Nat | Zero ;
primIntToFloat :: MyInt -> Float;
primIntToFloat x = Float x (Pos (Succ Zero));
primIntegerToFloat :: Integer -> Float;
primIntegerToFloat (Integer x) = primIntToFloat x;
fromIntegerFloat :: Integer -> Float
fromIntegerFloat = primIntegerToFloat;
pt :: (c -> b) -> (a -> c) -> a -> b;
pt f g x = f (g x);
toIntegerMyInt :: MyInt -> Integer
toIntegerMyInt x = Integer x;
fromIntegral = pt fromIntegerFloat toIntegerMyInt;
| ComputationWithBoundedResources/ara-inference | doc/tpdb_trs/Haskell/basic_haskell/fromIntegral_2.hs | mit | 729 | 0 | 9 | 161 | 252 | 141 | 111 | 18 | 1 |
{-# LANGUAGE TemplateHaskell
, TypeFamilies
, OverloadedStrings #-}
module FirewallModel (
Rule(..)
, View(..)
) where
import Generics.BiGUL.TH
import GHC.Generics
import Data.Aeson
data View = View {
rules :: [Rule]
} deriving (Show, Eq)
data Rule = Rule {
ruleID :: String
, securityGroupRefFrom :: String
, securityGroupRefTo :: String
, port :: String
, protocol :: String
} deriving (Show, Eq)
instance FromJSON View where
parseJSON (Object v) = View <$>
v .: "rules"
-- A non-Object value is of the wrong type, so fail.
parseJSON _ = mempty
instance ToJSON View where
-- this generates a Value
toJSON (View rules) =
object ["rules" .= rules]
instance FromJSON Rule where
parseJSON (Object v) = Rule <$>
v .: "ruleID" <*>
v .: "securityGroupRefFrom" <*>
v .: "securityGroupRefTo" <*>
v .: "port" <*>
v .: "protocol"
-- A non-Object value is of the wrong type, so fail.
parseJSON _ = mempty
instance ToJSON Rule where
-- this generates a Value
toJSON (Rule ruleID securityGroupRefFrom securityGroupRefTo port protocol) =
object ["ruleID" .= ruleID
, "securityGroupRefFrom" .= securityGroupRefFrom
, "securityGroupRefTo" .= securityGroupRefTo
, "port" .= port
, "protocol" .= protocol]
deriveBiGULGeneric ''View
deriveBiGULGeneric ''Rule
| prl-tokyo/MAPE-knowledge-base | Haskell/views/FirewallModel.hs | mit | 1,572 | 0 | 15 | 523 | 344 | 189 | 155 | -1 | -1 |
-- Caffeine Script
-- http://www.codewars.com/kata/5434283682b0fdb0420000e6
module Codewars.Kata.Caffeine where
caffeineBuzz :: Integer -> String
caffeineBuzz n | n `mod` 12 == 0 = "CoffeeScript"
| n `mod` 6 == 0 = "JavaScript"
| n `mod` 3 == 0 = "Java"
| otherwise = "mocha_missing!"
| gafiatulin/codewars | src/7 kyu/Caffeine.hs | mit | 334 | 0 | 9 | 92 | 89 | 48 | 41 | 6 | 1 |
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE ViewPatterns #-}
module Snipcheck where
import Control.Monad
import Control.Exception
import Control.Monad.IO.Class
import Data.Char (isSpace)
import Data.List (dropWhileEnd)
import Data.Maybe
import System.Process(readCreateProcess, shell)
import Text.Pandoc (Block(..))
import qualified Data.Text.IO as Text
import qualified Data.Map as Map
import qualified Text.Pandoc as Pandoc
data Sloppy a = Skip | Must a deriving (Show, Functor)
sloppyString :: String -> Sloppy String
sloppyString "..." = Skip
sloppyString str = Must str
checkSloppy :: Eq a => [a] -> [Sloppy a] -> Bool
checkSloppy (a:as) (Must a':as')
| a == a' = checkSloppy as as'
| otherwise = False
checkSloppy (a:as) as'@(Skip:Must a':as'')
| a == a' = checkSloppy as as''
| otherwise = checkSloppy as as'
checkSloppy as (Skip:Skip:as') = checkSloppy as (Skip:as')
checkSloppy [] (Must{}:_) = False
checkSloppy [] (Skip:as') = checkSloppy [] as'
checkSloppy [] [] = True
checkSloppy (_:_) [] = False
checkSloppy _ [Skip] = True
checkMarkdownFile :: FilePath -> IO ()
checkMarkdownFile fp = do
content <- Text.readFile fp
eres <- Pandoc.runIO $ do
Pandoc.Pandoc meta blocks <- Pandoc.readMarkdown Pandoc.def content
let
sections = findSections meta
blocks' =
if null sections
then blocks
else filterBlocksBySectionName sections blocks
forM_ blocks' check
case eres of
Right () -> pure ()
Left e -> throwIO $ userError $ show e
data AcceptSection
= GoodSection
| BadSection
| Dunno
filterBlocksBySectionName :: [String] -> [Pandoc.Block] -> [Pandoc.Block]
filterBlocksBySectionName secs = skipThese
where
skipThese, keepThese :: [Pandoc.Block] -> [Pandoc.Block]
skipThese (b:bs) =
case acceptSection b of
GoodSection -> keepThese bs
_ -> skipThese bs
skipThese [] = []
keepThese (b:bs) = b : case acceptSection b of
BadSection -> skipThese bs
_ -> keepThese bs
keepThese [] = []
acceptSection :: Pandoc.Block -> AcceptSection
acceptSection (Pandoc.Header _ (hName,_,_) _)
| hName `elem` secs = GoodSection
| otherwise = BadSection
acceptSection _ = Dunno
findSections :: Pandoc.Meta -> [String]
findSections (Pandoc.unMeta -> meta) =
case Map.lookup "sc_check-sections" meta of
Just (Pandoc.MetaList ss) -> join $ unMetaString <$> ss
_ -> []
where
unMetaString :: Pandoc.MetaValue -> [String]
unMetaString (Pandoc.MetaString s) =[s]
unMetaString (Pandoc.MetaInlines is) = mapMaybe unMetaStr is
unMetaString _ = []
unMetaStr :: Pandoc.Inline -> Maybe String
unMetaStr (Pandoc.Str s) = Just s
unMetaStr _ = Nothing
trim :: String -> String
trim = dropWhile isSpace . dropWhileEnd isSpace
check :: MonadIO m => Pandoc.Block -> m ()
check (CodeBlock (typ, classes, kvs) content)
| "shell" `elem` classes = do
let Right cmds = extractCommands content
forM_ cmds $ \(cmd, expected) -> do
actual <- (fmap trim . lines) <$> liftIO (readCreateProcess (shell cmd) "")
let expected' = (sloppyString . trim) <$> expected
unless (checkSloppy actual expected') $ error $ mconcat
[ "Couldnt match expected ", show expected'
, " with " <> show actual
]
| otherwise = liftIO $ print (typ, classes, kvs)
check _ = return ()
extractCommands :: String -> Either String [(String, [String])]
extractCommands str = go (lines str)
where
go :: [String] -> Either String [(String, [String])]
go (l:ls) | Just cmd <- toCommand l =
let (output, rest) = break isCommand ls
in ((cmd,output):) <$> go rest
| otherwise = Left $ "Expected a command, got " <> l
go [] = Right []
toCommand :: String -> Maybe String
toCommand ('$':cmd) = Just cmd
toCommand _ = Nothing
isCommand :: String -> Bool
isCommand = isJust . toCommand
someFunc :: IO ()
someFunc = putStrLn "someFunc"
| nmattia/snipcheck | src/Snipcheck.hs | mit | 4,018 | 0 | 18 | 932 | 1,521 | 779 | 742 | 108 | 6 |
import Avus.Scan
import Criterion.Types
import Criterion.Main
noopProcessData :: FilePath -> IO ()
noopProcessData fp = processData (Just fp) (Just "null") $
processVuln (\_ b -> return b) -- noop update functions
(\_ t -> return t)
(\_ e -> return e)
main :: IO ()
main = defaultMainWith
(defaultConfig {reportFile = Just "noop-criterion.html"})
[ bgroup "noop"
[ bench "noopProcessData 20" $ nfIO (noopProcessData "benchmark/sample.csv")
, bench "noopProcessData 200" $ nfIO (noopProcessData "benchmark/sample200.csv")
]
]
| srenatus/avus | benchmark/AvusBenchmark.hs | mit | 576 | 0 | 12 | 125 | 186 | 95 | 91 | 14 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
module Main where
import Control.Applicative ((<|>))
import Text.Trifecta
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as BC
import Data.FileEmbed (embedFile)
input :: ByteString
input = $(embedFile "input.txt")
data Input
= Plain ByteString
| Compressed { _reps :: Int
, _string :: ByteString}
aXb :: Parser (Int, Int)
aXb =
parens $
do a <- natural
_ <- char 'x'
b <- natural
return (fromIntegral a, fromIntegral b)
compressed1 :: Parser Int
compressed1 = do
(c,r) <- aXb
s <- count c anyChar
return $ r * length s
plain :: Parser Int
plain = length <$> some (notChar '(')
parse :: Parser Int -> Parser Int -> ByteString -> [Int]
parse f g b =
case parseByteString (many $ f <|> g) mempty b of
Success is -> is
Failure ei -> fail . show $ ei
decompress1 :: ByteString -> Int
decompress1 = sum . parse compressed1 plain
test1in :: [ByteString]
test1in =
[ "ADVENT"
, "A(1x5)BC"
, "(3x3)XYZ"
, "A(2x2)BCD(2x2)EFG"
, "(6x1)(1x3)A"
, "X(8x2)(3x3)ABCY"]
test1out :: [ByteString]
test1out =
[ "ADVENT"
, "ABBBBBC"
, "XYZXYZXYZ"
, "ABCBCDEFEFG"
, "(1x3)A"
, "X(3x3)ABC(3x3)ABCY"]
test1 :: Bool
test1 = and $ zipWith (==) (map decompress1 test1in) (map BC.length test1out)
part1 :: Int
part1 = decompress1 input
compressed2 :: Parser Int
compressed2 = do
(c,r) <- aXb
s <- count c anyChar
return $ r * (sum . parse compressed2 plain $ BC.pack s)
decompress2 :: ByteString -> Int
decompress2 = sum . parse compressed2 plain
test2in :: [ByteString]
test2in =
[ "(3x3)XYZ"
, "X(8x2)(3x3)ABCY"
, "(27x12)(20x12)(13x14)(7x10)(1x12)A"
, "(25x3)(3x3)ABC(2x3)XY(5x2)PQRSTX(18x9)(3x2)TWO(5x7)SEVEN"]
test2out :: [Int]
test2out =
[BC.length "XYZXYZXYZ", BC.length "XABCABCABCABCABCABCY", 241920, 445]
test2 :: Bool
test2 = and $ zipWith (==) (map decompress2 test2in) test2out
part2 :: Int
part2 = decompress2 input
main :: IO ()
main = do
print test1
print part1
print test2
print part2
| genos/online_problems | advent_of_code_2016/day9/src/Main.hs | mit | 2,172 | 0 | 11 | 512 | 699 | 374 | 325 | 81 | 2 |
{-# LANGUAGE LambdaCase, NamedFieldPuns, OverloadedStrings #-}
module Main (main) where
import Devil.Config
import Devil.Daemons
import Options.Applicative
import qualified Data.Text as T
import qualified Devil.Log as Log
data Params = Params {
configFile :: String
} deriving (Show,Eq)
params' :: Parser Params
params'
= Params
<$> strOption
( long "config"
<> short 'c'
<> metavar "CONFIG"
<> help "Config file")
params :: ParserInfo Params
params = info (helper <*> params')
( progDesc "Small (and silly!) `daemon` manager"
)
main :: IO ()
main = do
Log.logThread
go =<< execParser params where
go (Params{configFile}) =
loadConfig configFile >>= \case
Left err -> do
Log.error_d "CONFIG" (T.pack $ show err)
Right cfg ->
runDaemons cfg
| EXio4/devil | src/Main.hs | mit | 961 | 0 | 16 | 333 | 246 | 128 | 118 | 31 | 2 |
{-# LANGUAGE QuasiQuotes #-}
module Minesweeper where
import Prelude hiding (map, zipWith)
import Data.Maybe (fromJust)
import qualified Data.Vector as V
import Data.Array.Repa as R hiding ((++))
import Data.Array.Repa.Repr.Vector
import Data.Array.Repa.Stencil
import Data.Array.Repa.Stencil.Dim2
import Data.Array.Repa.Algorithms.Randomish
import System.Random
-- the danger rating of a minesweeper square
-- I think that the Danger type is isomorphic to Maybe Integer
-- if it helps me to have a monad/functor/applicative instance
-- for danger, I can just use Maybe's implementation and it should
-- work.
data Danger = Mine | Danger Int deriving Eq
-- a square keeps track of if it is revealed, as well as the danger rating
-- of the location it is in
data Square = Square { getDanger :: Danger
, isRevealed :: Bool
} deriving Eq
emptySquare = Square (Danger 0) False
-- For minesweeper, there are a couple of things that we need to keep track of
-- First, we need the locations of the mines
-- We also need which squares are revealed at any given time
-- the numbers are calculatable, but I think that we should calculate them
-- once at the beginning and then store them
-- the field is row-major
-- note: with the new repa representation, the width and height of the array
-- are encoded in the array as the shape
data Field = Field { getField :: Array V DIM2 Square }
instance Show Danger where
show Mine = "*"
-- any danger level is guarenteed to be 1 digit, because the most neighbors
-- any square can have is 8
--show (Danger 0) = " "
show (Danger d) = show d
instance Show Square where
show (Square d True) = show d
show (Square _ False) = " "
--debug show
--show (Square d _) = show d
instance Show Field where
show (Field f) = let (Z :. x :. y) = extent f in
V.foldl1' (++)
$ V.imap (\i a -> if i /= 0 then (if mod i y == 0 then '\n' else ' ') : show a else show a)
$ toVector f
-- take n unique values from a list. the third argument is the seen list
takeUnique :: (Eq n, Eq a, Num n) => n -> [a] -> [a]
takeUnique = takeUnique' []
takeUnique' :: (Eq n, Eq a, Num n) => [a] -> n -> [a] -> [a]
takeUnique' seen 0 _ = seen
takeUnique' seen n (x:xs) | x `notElem` seen = takeUnique' (x:seen) (n-1) xs
| otherwise = takeUnique' seen n xs
-- generate a new field using one rng for x and one rng for y
generateField :: (RandomGen g)
=> g -- random number generator
-> Int -- height of minefield
-> Int -- width of minefield
-> Int -- Number of mines
-> Field
generateField g x y n = updateDangers $ Field f
where rs = takeUnique n $ randomRs (0, (x * y) - 1) g
fv = V.replicate (x * y) emptySquare V.// zip rs (repeat $ Square Mine False)
f = fromVector (Z :. x :. y) fv
updateDangers :: Field -> Field
updateDangers (Field f) = Field
. computeVectorS -- convert from delayed array back to vector array
. zipWith updateDanger f -- update the field with the new dangers we found
. mapStencil2 (BoundConst 0) stencil -- perform the convolution
. map (\s->if isMine s then 1 else 0) -- turn mines into 1s and others into 0s
$ f
where stencil = [stencil2|1 1 1
1 0 1
1 1 1|]
updateDanger :: Square -> Int -> Square
updateDanger (Square (Danger _) r) d = Square (Danger d) r
updateDanger s _ = s
isMine :: Square -> Bool
isMine (Square Mine _) = True
isMine _ = False
isRevealedMine :: Square -> Bool
isRevealedMine (Square Mine True) = True
isRevealedMine _ = False
isMineOrRevealed :: Square -> Bool
isMineOrRevealed (Square Mine False) = True
isMineOrRevealed (Square (Danger _) True) = True
isMineOrRevealed _ = False
-- reveal spot
-- coordinates are measured from top left of grid
-- the basic reveal is to just set the reveal mask for that spot to be True,
-- and return the Left Field if that spot is a mine, and Right Field if it's
-- not
-- past that, you could reasonably space-fill the revealed area if there are
-- no mines in the surrounding areas, much like most implementations for
-- playing but I think we can leave that task on the consumer of the game,
-- and just have this a "recorder of events" of sorts
reveal :: Int -- x value of spot to reveal
-> Int -- y value of spot to reveal
-> Field -- field to reveal spot on
-> Field -- no computation is done to imply whether or not a mine was hit
reveal x y (Field f) = Field (fromJust (computeP (R.traverse f id (update x y))))
where update x y square s@(Z :. sx :. sy) | x == sx && y == sy = (square s){isRevealed = True}
update x y square s = square s
hitMine :: Field -> Bool
hitMine = fromJust . foldAllP (||) False . R.map isRevealedMine . getField
solved :: Field -> Bool
solved = fromJust . foldAllP (&&) True . R.map isMineOrRevealed . getField
| sdemos/minesweeper | src/Minesweeper.hs | mit | 5,242 | 0 | 18 | 1,505 | 1,229 | 673 | 556 | 76 | 2 |
sumtorial :: Integer -> Integer
sumtorial 0 = 0
sumtorial n = n + sumtorial (n - 1)
| martindavid/code-sandbox | haskell/exercises/exercise1.hs | mit | 84 | 0 | 8 | 18 | 40 | 20 | 20 | 3 | 1 |
module Main where
import Test.Framework (defaultMain)
import Language.Swift.Tests (tests)
main :: IO ()
main = defaultMain
[ tests
]
| CodaFi/language-swift | tests/Tests.hs | mit | 144 | 0 | 6 | 30 | 46 | 27 | 19 | 6 | 1 |
module Reader where
import Control.Applicative ((<$>), (<*>))
import Text.ParserCombinators.Parsec
import Text.ParserCombinators.Parsec.Language
import Text.ParserCombinators.Parsec.Token
import LispData
import Numbers
-- | parses lisp code from a string and returns either the code an error message
reader :: String -> Either String LispVal
reader str = case parse parser "Lisp" str of
Left err -> Left (show err)
Right res -> Right res
where parser = const . makeForms <$> many lispParser <*> eof
makeForms [form] = form
makeForms forms = List (Symbol "begin" : forms)
-- | parses one lisp token
lexer :: TokenParser ()
lexer = makeTokenParser LanguageDef
{ commentStart = "#|"
, commentEnd = "|#"
, commentLine = ";"
, nestedComments = True
, identStart = symChar
, identLetter = symChar
, opStart = oneOf ""
, opLetter = oneOf ""
, reservedNames = []
, reservedOpNames = []
, caseSensitive = False
}
where symChar = alphaNum <|> oneOf "?+*~#-_.:=&%$!^<>|/"
-- | parses lisp
lispParser :: Parser LispVal
lispParser = do
whiteSpace lexer
val <- try numberParser <|>
try boolParser <|>
symbolParser <|>
stringParser <|>
listParser <|>
quoteParser <|>
quasiQuoteParser <|>
unquoteParser
whiteSpace lexer
return val
symbolParser :: Parser LispVal
symbolParser = do
name <- identifier lexer
return $ if name == "nil"
then Nil
else Symbol name
-- TODO: nice literals for rationals and complex numbers
numberParser :: Parser LispVal
numberParser = do
sign <- optionMaybe (oneOf "-+")
let mySign = case sign of
Just '-' -> -1
_ -> 1
number <- naturalOrFloat lexer
return $ Number $ case number of
Left i -> LispInt (mySign * i)
Right f -> LispFloat (fromIntegral mySign * f)
stringParser :: Parser LispVal
stringParser = LispString <$> stringLiteral lexer
listParser :: Parser LispVal
listParser = (\_ content _ -> List content) <$> char '(' <*> many lispParser <*> char ')'
quoteParser :: Parser LispVal
quoteParser = (\_ val -> List [Symbol "quote", val]) <$> char '\'' <*> lispParser
quasiQuoteParser :: Parser LispVal
quasiQuoteParser = (\_ val -> List [Symbol "quasiquote", val]) <$> char '`' <*> lispParser
unquoteParser :: Parser LispVal
unquoteParser = (\_ val -> List [Symbol "unquote", val]) <$> char ',' <*> lispParser
boolParser :: Parser LispVal
boolParser = char '#' >> ((char 't' >> return (Boolean True)) <|> (char 'f' >> return (Boolean False)))
| orion-42/my-lisp | Reader.hs | mit | 2,630 | 0 | 15 | 648 | 792 | 408 | 384 | 69 | 3 |
{-# LANGUAGE DeriveDataTypeable #-}
module Base.CLI (ProgramOptions(..), Action(..), usage, newCommands, standard, module System.Console.CmdArgs) where
import System.Console.CmdArgs
import Base.Common
data Action = Format | ListCommands
deriving (Show, Typeable, Data)
instance Default Action where
def = Format
data ProgramOptions = ProgramOptions
{ agda_mode :: Bool
, action :: Action
, input :: [FilePath]
, output :: FilePath
} deriving (Show, Data, Typeable)
usage :: String
usage = unlines
[ programName ++" "++ programVersion ++" - A lhs2TeX Syntax Colouring preprocessor"
, "Consult the README file for extra information or visit:\n"
, " https://github.com/spockz/lhs2texhl"
, " and "
, " http://alessandrovermeulen.me/projects/lhs2texhl\n"
, "Copyright 2010, Alessandro Vermeulen <me@alessandrovermeulen.me>" ]
newCommands :: String
newCommands = unlines [
"\\newcommand{\\lhsCHfunction}[1]{\\color{infixoperator}{{#1}}}",
"\\newcommand{\\lhsCHinfixoperator}[1]{\\color{infixoperator}{{#1}}}",
"\\newcommand{\\lhsCHprelude}[1]{\\color{prelude}{{#1}}}",
"\\newcommand{\\lhsCHkeyword}[1]{\\color{keyword}{{#1}}}",
"\\newcommand{\\lhsCHconstructor}[1]{\\color{constructor}{{#1}}}",
"\\newcommand{\\lhsCHtype}[1]{\\color{datatype}{{#1}}}",
"\\newcommand{\\lhsCHsyntax}[1]{\\color{syntax}{{#1}}}",
"\\newcommand{\\lhsCHclass}[1]{\\color{class}{{#1}}}",
"\\newcommand{\\lhsCHconstant}[1]{\\color{constant}{{#1}}}"
]
-- | Standard command line options.
--
standard = cmdArgsMode $ ProgramOptions
{
agda_mode = def &= help "Run in agda-mode!"
, action = (def &= help "What should the program do? Format|ListCommands.") &= typ "Action"
, output = (def &= help "Output file") &= typFile
, input = (def &= args )
} &= summary usage
| spockz/lhs2texhl | src/Base/CLI.hs | mit | 1,973 | 0 | 12 | 421 | 311 | 185 | 126 | 39 | 1 |
{-# LANGUAGE CPP #-}
module GHCJS.DOM.SVGFESpotLightElement (
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
module GHCJS.DOM.JSFFI.Generated.SVGFESpotLightElement
#else
#endif
) where
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
import GHCJS.DOM.JSFFI.Generated.SVGFESpotLightElement
#else
#endif
| plow-technologies/ghcjs-dom | src/GHCJS/DOM/SVGFESpotLightElement.hs | mit | 376 | 0 | 5 | 33 | 33 | 26 | 7 | 4 | 0 |
module GraphDB.Util.Prelude.TH
(
module Exports,
purify,
tryToReify,
isInstance',
isProperInstance',
)
where
import GraphDB.Util.Prelude hiding (Fixity)
import Language.Haskell.TH as Exports
import Language.Haskell.TH.Syntax as Exports
import THInstanceReification as Exports
purify :: Q a -> a
purify = unsafePerformIO . runQ
tryToReify :: Name -> Q (Maybe Info)
tryToReify n = recover (return Nothing) (fmap Just $ reify n)
isInstance' :: Name -> [Type] -> Q Bool
isInstance' name types = recover (return False) (isInstance name types)
isProperInstance' :: Name -> [Type] -> Q Bool
isProperInstance' name types = recover (return False) (isProperInstance name types)
| nikita-volkov/graph-db | library/GraphDB/Util/Prelude/TH.hs | mit | 686 | 0 | 8 | 110 | 228 | 127 | 101 | 19 | 1 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Server.Main where
import System.Directory
import qualified Text.Blaze.Html.Renderer.Text as H
import Text.Pandoc
import System.FilePath
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import qualified Data.Text as T
import qualified Data.Text.Lazy as LT
import qualified Data.Text.Lazy.Encoding as LT
import qualified Data.Set as S
import Text.Pandoc.Walk
import Control.Monad.Reader
import Config
import Utils
import API
import Servant
import Control.Monad.Error.Class
import Text.Pandoc.CrossRef
import Data.Monoid ((<>))
import Text.Pandoc.Builder
import Data.Generics
import Crypto.Hash (hash, Digest, SHA1)
import Data.Char (isAlphaNum)
import Control.Exception
import System.IO.Error
mainServer :: ServerT MainAPI ConfigHandler
mainServer (_ :: User)
= listProjects
:<|> createProject
:<|> deleteProject
:<|> render
:<|> update
:<|> appendChunk
:<|> getSource
:<|> fileList
:<|> deleteFile
:<|> uploadFile
:<|> renderDocx
mdOpts :: WriterOptions
mdOpts = def {
writerSetextHeaders = False
, writerExtensions = S.delete Ext_simple_tables pandocExtensions
}
htmlOpts :: WriterOptions
htmlOpts = def{
writerHtml5 = True
, writerHTMLMathMethod = MathJax ""
}
handlePandocError :: (MonadError ServantErr m) => Either PandocError Pandoc -> m Pandoc
handlePandocError (Left err) = throwError err500{ errBody = LT.encodeUtf8 $ LT.pack $ show err }
handlePandocError (Right res) = return res
getBody :: (MonadIO m, MonadError ServantErr m, MonadReader Config m) => FilePath -> m Pandoc
getBody name = do
validateName name
dataDirectory <- asks configDataDir
everywhere (mkT splitMath) <$> (handlePandocError . readMarkdown def
=<< liftIO (readFile (dataDirectory </> name </> "index.md")))
splitMath :: [Block] -> [Block]
splitMath (Para ils:xs)
| length ils > 1 = map Para (split ils) ++ xs
where
split ys =
let bef = takeWhile (not . isMath) ys
rest = drop (length bef) ys
m = takeWhile (not . isSpace) rest
af = drop (length m) rest
in filter (not . null) [bef, m, af]
isMath (Math DisplayMath _) = True
isMath (Span _ [Math DisplayMath _]) = True
isMath _ = False
isSpace Space = True
isSpace SoftBreak = True
isSpace _ = False
splitMath xs = xs
createProject :: FilePath -> T.Text -> ConfigHandler ()
createProject name content = do
projects <- liftIO . listDirectory =<< asks configDataDir
when (name `elem` projects) $ throwError err409
unless (all isAlphaNum name) $ throwError err400
dataDir <- asks configDataDir
liftIO $ do
createDirectory (dataDir </> name)
BL.writeFile (dataDir </> name </> "index.md") $ LT.encodeUtf8 $ LT.fromStrict content
deleteProject :: FilePath -> ConfigHandler ()
deleteProject name = do
validateName name
dataDir <- asks configDataDir
liftIO $ removeDirectoryRecursive (dataDir </> name)
render :: FilePath -> ConfigHandler [Chunk]
render name = do
Pandoc meta body <- getBody name
uri <- asks configDataUri
let modImgs (Image a t (src, tit)) = Image a t (uri </> name </> src, tit)
modImgs x = x
let body' = runCrossRef (crossRefSettings <> meta) Nothing crossRefBlocks . wrapDiv $ walk modImgs body
return $ zipWith3 (mkChunk meta) body body' [0..]
where
wrapDiv = map (Div nullAttr . return)
mkChunk meta blold bl idx = Chunk {
chunkHtml = H.renderHtml $ writeHtml htmlOpts $ Pandoc meta [bl]
, chunkSrc = T.pack $ writeMarkdown mdOpts $ Pandoc meta [blold]
, chunkNum = idx
}
renderDocx :: FilePath -> ConfigHandler FileData
renderDocx name = do
validateName name
dataDirectory <- asks configDataDir
newcmdFile <- asks configNewCmdFile
newcommands <-
either (const "") id <$>
maybe (return $ Left ())
(liftIO . tryJust (guard . isDoesNotExistError) . readFile)
newcmdFile
Pandoc meta body <- handlePandocError . readMarkdown def . (newcommands <>)
=<< liftIO (readFile (dataDirectory </> name </> "index.md"))
let modImgs (Image a t (src, tit)) = Image a t (dataDirectory </> name </> src, tit)
modImgs x = x
let body' = runCrossRef (crossRefSettings <> meta) Nothing crossRefBlocks $ walk modImgs body
FileData <$> liftIO (writeDocx def (Pandoc meta body'))
crossRefSettings :: Meta
crossRefSettings =
chapters True
<> numberSections True
<> sectionsDepth "3"
<> chaptersDepth "1"
<> figureTitle (str "Рисунок")
<> tableTitle (str "Таблица")
<> listingTitle (str "Листинг")
<> figPrefix [str "рис."]
<> eqnPrefixTemplate (str "(" <> var "i" <> str ")")
<> tblPrefix [str "табл."]
<> lstPrefix [str "лист."]
<> secPrefix [str "разд."]
<> lofTitle (header 1 $ text "Список рисунков")
<> lotTitle (header 1 $ text "Список таблиц")
<> lolTitle (header 1 $ text "Список листингов")
-- <> autoEqnLabels True
<> subfigGrid True
<> linkReferences True
where var = displayMath
update :: FilePath -> Int -> T.Text -> ConfigHandler ()
update name chunk mdbody = do
Pandoc meta body <- getBody name
dataDirectory <- asks configDataDir
validateChunk chunk (length body)
Pandoc _ newChunkBody <- handlePandocError $ readMarkdown def $ T.unpack mdbody
let (b1, _:b2) = splitAt chunk body
body' = b1 ++ newChunkBody ++ b2
liftIO
$ writeFile (dataDirectory </> name </> "index.md")
$ writeMarkdown mdOpts $ Pandoc meta body'
appendChunk :: FilePath -> T.Text -> ConfigHandler ()
appendChunk name mdbody = do
Pandoc meta body <- getBody name
dataDirectory <- asks configDataDir
Pandoc _ newChunkBody <- handlePandocError $ readMarkdown def $ T.unpack mdbody
let body' = body ++ newChunkBody
liftIO
$ writeFile (dataDirectory </> name </> "index.md")
$ writeMarkdown mdOpts $ Pandoc meta body'
getSource :: FilePath -> Int -> ConfigHandler T.Text
getSource name chunk = do
Pandoc meta body <- getBody name
validateChunk chunk (length body)
let c = [body !! chunk]
return $ T.pack $ writeMarkdown mdOpts (Pandoc meta c)
listProjects :: ConfigHandler [FilePath]
listProjects = do
dataDirectory <- asks configDataDir
liftIO $ listDirectory dataDirectory
uploadFile :: FilePath -> FileData -> ConfigHandler T.Text
uploadFile name (FileData content) = do
validateName name
dataDirectory <- asks configDataDir
let content' = BL.toStrict content
filename = show (hash content' :: Digest SHA1)
liftIO $ B.writeFile (dataDirectory </> name </> filename) content'
return $ T.pack filename
fileList :: FilePath -> ConfigHandler [FileInfo]
fileList name = do
validateName name
dataDirectory <- asks configDataDir
uriBase <- asks configDataUri
map (mkFI uriBase) . filter (/= "index.md") <$> liftIO (listDirectory (dataDirectory </> name))
where
mkFI uriBase fn = FileInfo {
fileName = T.pack fn
, fileURI = T.pack $ uriBase </> name </> fn
}
deleteFile :: FilePath -> FilePath -> ConfigHandler ()
deleteFile proj fn = do
validateFile proj fn
dataDirectory <- asks configDataDir
liftIO $ removeFile (dataDirectory </> proj </> fn)
| lierdakil/markco | server/src/Server/Main.hs | mit | 7,255 | 0 | 21 | 1,428 | 2,497 | 1,237 | 1,260 | 189 | 5 |
{-# LANGUAGE OverloadedStrings #-}
module HailsRock.Views where
import Prelude hiding (div, span, head, id)
import Data.Maybe (isJust, fromJust)
import qualified Data.ByteString.Lazy.Char8 as L8
import qualified Data.Text as T
import Text.Blaze.Html5 hiding (Tag, map)
import Text.Blaze.Html5.Attributes hiding ( label, form, span
, title, style )
import qualified Text.Blaze.Html5.Attributes as A
import Text.Blaze.Html.Renderer.Utf8
import Control.Monad (forM_, when)
import Hails.Web hiding (body)
import Hails.HttpServer.Types
import HailsRock.MP
respondHtml :: Html -> Response
respondHtml content = okHtml $ renderHtml $ docTypeHtml $ do
head $ do
title "HailsRock"
meta ! charset "utf-8"
link ! rel "stylesheet"
! type_ "text/css" ! href "/static/css/bootstrap.css"
script ! src "/static/js/jquery-1.10.1.js" $ ""
script ! src "/static/js/bootstrap.js" $ ""
script ! src "/static/js/application.js" $ ""
body $ do
div ! class_ "container-fluid" $ content
welcome :: Maybe UserName -> Html
welcome Nothing = do
h1 $ "Welcome to HailsRock!"
a ! class_ "btn btn-large btn-info"
! href "/login"
$ "Login to play"
welcome (Just usr) = do
h1 $ toHtml $ "Welcome to HailsRock, " ++ T.unpack usr ++ "!"
a ! class_ "btn btn-large btn-primary"
! href "/game/new"
$ "Create a new game"
" "
a ! class_ "btn btn-large"
! href "/game"
$ "Join a game"
newGame :: UserName -> Html
newGame usr = do
h1 $ "Create a new game"
div $ do
form ! action "/game/create" ! method "POST" ! id "newGame"$ do
div $ do
input ! type_ "hidden" ! name "creator"
! value (toValue usr)
div $ do
label ! for "opponent" $ "Opponent (optional):"
input ! type_ "text"
! name "opponent" ! id "opponent"
! placeholder "rick-james"
div ! class_ "btn-group" $ do
input ! type_ "submit" ! class_ "btn" ! value "Create"
listGames :: UserName -> [Game] -> Html
listGames usr gs' = do
-- Get all the games for which the current user is not the creator;
let gs = filter ((/= usr) . creator) gs'
--
h1 $ "Available games"
div $ if null gs
then p $ "Sorry, no games ... :-("
else table ! class_ "table table-hover table-condensed" $ do
thead $ tr $ do
th $ "#"
th $ "Creator"
th $ "Private"
tbody $ do
forM_ (zip [1..] gs) $ \(nr,game) -> do
let tagUrl = "/game/" ++ show (fromJust $ gameId game)
tr ! onclick (toValue $ "location.href=" ++ show tagUrl )$ do
td $ toHtml (nr :: Int)
td $ toHtml $ creator game
td $ when (isJust $ opponent game) $ "1-vs-1"
playGame :: UserName -> Game -> Bool -> Html
playGame usr game True = do
h1 $ "You already played!"
playGame usr game False = do
h1 $ "Make your move..."
div $ do
let gid = show . fromJust . gameId $ game
form ! action (toValue $ "/game/"++gid++"/play")
! method "POST" ! id "newGame"$ do
input ! type_ "hidden" ! name "game"
! value (toValue gid)
input ! type_ "hidden" ! name "player"
! value (toValue usr)
input ! name "move"
! type_ "submit"
! class_ "btn btn-large btn-info"
! value (toValue $ show Rock)
" "
input ! name "move"
! type_ "submit"
! class_ "btn btn-large btn-primary"
! value (toValue $ show Paper)
" "
input ! name "move"
! type_ "submit"
! class_ "btn btn-large btn-inverse"
! value (toValue $ show Scissors)
showStats :: [(UserName, Outcome)] -> Html
showStats stats = do
h1 $ "Your move status"
div $ if null stats
then p $ "Sorry, nobody has played your move... :-("
else table ! class_ "table table-hover table-condensed" $ do
thead $ tr $ do
th $ "#"
th $ "Player"
th $ "Status"
tbody $ do
forM_ (zip [1..] stats) $ \(nr,(p,result)) -> do
tr $ do
td $ toHtml (nr :: Int)
td $ toHtml $ T.unpack p
td $ toHtml $ show result
| scslab/hails | examples/hails-rock/HailsRock/Views.hs | mit | 4,365 | 0 | 27 | 1,434 | 1,388 | 673 | 715 | 118 | 2 |
main :: IO()
main = putStrLn "Hello world!"
add a b = a + b
x = 10
myDrop n xs = if n <= 0 || null xs
then xs
else myDrop (n-1) (tail xs)
data BookInfo = Book Int String [String]
deriving (Show)
data MagzineInfo = Magzine Int String [String]
deriving (Show)
myInfo = Book 9780135072455 "Algebra of Programming"
["Richard Bird", "Oege de Moor"]
type CustomerID = Int
type ReviewBody = String
data BookReview = BookReview BookInfo CustomerID String
type BookRecord = (BookInfo, BookReview)
data Cartesian2D = Cartesian2D Double Double deriving (Eq, Show)
data Polar2D = Polar2D Double Double deriving (Eq, Show)
data Roygbiv = Red
| Orange
| Yellow
| Green
| Blue
| Infigo
| Violet deriving (Eq, Show)
type Vector = (Double, Double)
data Shape = Circle Vector Double
| Ploy [Vector] deriving (Show)
myNot True = False
myNot False = True
sumList (x:xs) = x + sumList xs
sumList [] = 0
third (a, b, c) = c
complicated (True, a, x:xs, 5) = (a, xs)
bookID (Book id title authors) = id
bookTitle (Book id title authors) = title
bookAuthors (Book id title authors) = authors
nicerID (Book id _ _ ) = id
nicerTitle (Book _ title _ ) = title
nicerAuthors (Book _ _ authors ) = authors
isTwo :: Int -> Bool
isTwo n = if n == 2 then True else False
month :: Int -> Int
month = undefined
days :: (Int, Int) -> Int
days (m, d) = month m + d
let xxx = concat $ map
| dalonng/hellos | haskell.hello/hello.hs | gpl-2.0 | 1,572 | 1 | 8 | 483 | 599 | 327 | 272 | -1 | -1 |
{-
Copyright (C) 2014 Jesse Rosenthal <jrosenthal@jhu.edu>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- |
Module : Text.Pandoc.Readers.DocX.Lists
Copyright : Copyright (C) 2014 Jesse Rosenthal
License : GNU GPL, version 2 or above
Maintainer : Jesse Rosenthal <jrosenthal@jhu.edu>
Stability : alpha
Portability : portable
Functions for converting flat DocX paragraphs into nested lists.
-}
module Text.Pandoc.Readers.DocX.Lists ( blocksToBullets
, blocksToDefinitions) where
import Text.Pandoc.JSON
import Text.Pandoc.Shared (trim)
import Control.Monad
import Data.List
import Data.Maybe
isListItem :: Block -> Bool
isListItem (Div (_, classes, _) _) | "list-item" `elem` classes = True
isListItem _ = False
getLevel :: Block -> Maybe Integer
getLevel (Div (_, _, kvs) _) = liftM read $ lookup "level" kvs
getLevel _ = Nothing
getLevelN :: Block -> Integer
getLevelN b = case getLevel b of
Just n -> n
Nothing -> -1
getNumId :: Block -> Maybe Integer
getNumId (Div (_, _, kvs) _) = liftM read $ lookup "num-id" kvs
getNumId _ = Nothing
getNumIdN :: Block -> Integer
getNumIdN b = case getNumId b of
Just n -> n
Nothing -> -1
getText :: Block -> Maybe String
getText (Div (_, _, kvs) _) = lookup "text" kvs
getText _ = Nothing
data ListType = Itemized | Enumerated ListAttributes
listStyleMap :: [(String, ListNumberStyle)]
listStyleMap = [("upperLetter", UpperAlpha),
("lowerLetter", LowerAlpha),
("upperRoman", UpperRoman),
("lowerRoman", LowerRoman),
("decimal", Decimal)]
listDelimMap :: [(String, ListNumberDelim)]
listDelimMap = [("%1)", OneParen),
("(%1)", TwoParens),
("%1.", Period)]
getListType :: Block -> Maybe ListType
getListType b@(Div (_, _, kvs) _) | isListItem b =
let
start = lookup "start" kvs
frmt = lookup "format" kvs
txt = lookup "text" kvs
in
case frmt of
Just "bullet" -> Just Itemized
Just f ->
case txt of
Just t -> Just $ Enumerated (
read (fromMaybe "1" start) :: Int,
fromMaybe DefaultStyle (lookup f listStyleMap),
fromMaybe DefaultDelim (lookup t listDelimMap))
Nothing -> Nothing
_ -> Nothing
getListType _ = Nothing
listParagraphDivs :: [String]
listParagraphDivs = ["ListParagraph"]
-- This is a first stab at going through and attaching meaning to list
-- paragraphs, without an item marker, following a list item. We
-- assume that these are paragraphs in the same item.
handleListParagraphs :: [Block] -> [Block]
handleListParagraphs [] = []
handleListParagraphs (
(Div attr1@(_, classes1, _) blks1) :
(Div (ident2, classes2, kvs2) blks2) :
blks
) | "list-item" `elem` classes1 &&
not ("list-item" `elem` classes2) &&
(not . null) (listParagraphDivs `intersect` classes2) =
-- We don't want to keep this indent.
let newDiv2 =
(Div (ident2, classes2, filter (\kv -> fst kv /= "indent") kvs2) blks2)
in
handleListParagraphs ((Div attr1 (blks1 ++ [newDiv2])) : blks)
handleListParagraphs (blk:blks) = blk : (handleListParagraphs blks)
separateBlocks' :: Block -> [[Block]] -> [[Block]]
separateBlocks' blk ([] : []) = [[blk]]
separateBlocks' b@(BulletList _) acc = (init acc) ++ [(last acc) ++ [b]]
separateBlocks' b@(OrderedList _ _) acc = (init acc) ++ [(last acc) ++ [b]]
-- The following is for the invisible bullet lists. This is how
-- pandoc-generated ooxml does multiparagraph item lists.
separateBlocks' b acc | liftM trim (getText b) == Just "" =
(init acc) ++ [(last acc) ++ [b]]
separateBlocks' b acc = acc ++ [[b]]
separateBlocks :: [Block] -> [[Block]]
separateBlocks blks = foldr separateBlocks' [[]] (reverse blks)
flatToBullets' :: Integer -> [Block] -> [Block]
flatToBullets' _ [] = []
flatToBullets' num xs@(b : elems)
| getLevelN b == num = b : (flatToBullets' num elems)
| otherwise =
let bNumId = getNumIdN b
bLevel = getLevelN b
(children, remaining) =
span
(\b' ->
((getLevelN b') > bLevel ||
((getLevelN b') == bLevel && (getNumIdN b') == bNumId)))
xs
in
case getListType b of
Just (Enumerated attr) ->
(OrderedList attr (separateBlocks $ flatToBullets' bLevel children)) :
(flatToBullets' num remaining)
_ ->
(BulletList (separateBlocks $ flatToBullets' bLevel children)) :
(flatToBullets' num remaining)
flatToBullets :: [Block] -> [Block]
flatToBullets elems = flatToBullets' (-1) elems
blocksToBullets :: [Block] -> [Block]
blocksToBullets blks =
-- bottomUp removeListItemDivs $
flatToBullets $ (handleListParagraphs blks)
plainParaInlines :: Block -> [Inline]
plainParaInlines (Plain ils) = ils
plainParaInlines (Para ils) = ils
plainParaInlines _ = []
blocksToDefinitions' :: [([Inline], [[Block]])] -> [Block] -> [Block] -> [Block]
blocksToDefinitions' [] acc [] = reverse acc
blocksToDefinitions' defAcc acc [] =
reverse $ (DefinitionList (reverse defAcc)) : acc
blocksToDefinitions' defAcc acc
((Div (_, classes1, _) blks1) : (Div (ident2, classes2, kvs2) blks2) : blks)
| "DefinitionTerm" `elem` classes1 && "Definition" `elem` classes2 =
let remainingAttr2 = (ident2, delete "Definition" classes2, kvs2)
pair = case remainingAttr2 == ("", [], []) of
True -> (concatMap plainParaInlines blks1, [blks2])
False -> (concatMap plainParaInlines blks1, [[Div remainingAttr2 blks2]])
in
blocksToDefinitions' (pair : defAcc) acc blks
blocksToDefinitions' defAcc acc
((Div (ident2, classes2, kvs2) blks2) : blks)
| (not . null) defAcc && "Definition" `elem` classes2 =
let remainingAttr2 = (ident2, delete "Definition" classes2, kvs2)
defItems2 = case remainingAttr2 == ("", [], []) of
True -> blks2
False -> [Div remainingAttr2 blks2]
((defTerm, defItems):defs) = defAcc
defAcc' = case null defItems of
True -> (defTerm, [defItems2]) : defs
False -> (defTerm, init defItems ++ [last defItems ++ defItems2]) : defs
in
blocksToDefinitions' defAcc' acc blks
blocksToDefinitions' [] acc (b:blks) =
blocksToDefinitions' [] (b:acc) blks
blocksToDefinitions' defAcc acc (b:blks) =
blocksToDefinitions' [] (b : (DefinitionList (reverse defAcc)) : acc) blks
blocksToDefinitions :: [Block] -> [Block]
blocksToDefinitions = blocksToDefinitions' [] []
| uws-eresearch/docx2pandoc | src/Text/Pandoc/Readers/DocX/Lists.hs | gpl-2.0 | 7,237 | 0 | 20 | 1,684 | 2,211 | 1,186 | 1,025 | 138 | 4 |
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TupleSections #-}
{-# OPTIONS_HADDOCK show-extensions #-}
-- |
-- Module : Yi.Keymap.Vim.Ex.Commands.Quit
-- License : GPL-2
-- Maintainer : yi-devel@googlegroups.com
-- Stability : experimental
-- Portability : portable
--
-- Implements quit commands.
module Yi.Keymap.Vim.Ex.Commands.Quit (parse) where
import Control.Applicative
import Control.Lens
import Control.Monad
import Data.Foldable (find)
import qualified Data.List.PointedList.Circular as PL
import Data.Monoid
import qualified Data.Text as T
import qualified Text.ParserCombinators.Parsec as P
import Yi.Buffer
import Yi.Core (quitEditor, errorEditor, closeWindow)
import Yi.Editor
import Yi.File
import Yi.Keymap
import Yi.Keymap.Vim.Common
import qualified Yi.Keymap.Vim.Ex.Commands.Common as Common
import Yi.Keymap.Vim.Ex.Types
import Yi.Monad
import Yi.String (showT)
import Yi.Window (bufkey)
parse :: EventString -> Maybe ExCommand
parse = Common.parse $ P.choice
[ do
_ <- (P.try ( P.string "xit") <|> P.string "x")
bangs <- P.many (P.char '!')
return (quit True (not $ null bangs) False)
, do
ws <- P.many (P.char 'w')
void $ P.try ( P.string "quit") <|> P.string "q"
as <- P.many (P.try ( P.string "all") <|> P.string "a")
bangs <- P.many (P.char '!')
return $! quit (not $ null ws) (not $ null bangs) (not $ null as)
]
quit :: Bool -> Bool -> Bool -> ExCommand
quit w f a = Common.impureExCommand {
cmdShow = (if w then "w" else "")
`T.append` "quit"
`T.append` (if a then "all" else "")
`T.append` (if f then "!" else "")
, cmdAction = YiA $ action w f a
}
action :: Bool -> Bool -> Bool -> YiM ()
action False False False = quitWindowE
action False False True = quitAllE
action True False False = viWrite >> closeWindow
action True False True = saveAndQuitAllE
action False True False = closeWindow
action False True True = quitEditor
action True True False = viWrite >> closeWindow
action True True True = saveAndQuitAllE
quitWindowE :: YiM ()
quitWindowE = do
nw <- gets currentBuffer >>= Common.needsSaving
ws <- withEditor $ use currentWindowA >>= windowsOnBufferE . bufkey
if length ws == 1 && nw
then errorEditor "No write since last change (add ! to override)"
else do
winCount <- withEditor $ uses windowsA PL.length
tabCount <- withEditor $ uses tabsA PL.length
if winCount == 1 && tabCount == 1
-- if its the last window, quitting will quit the editor
then quitAllE
else closeWindow
quitAllE :: YiM ()
quitAllE = do
let needsWindow b = (b,) <$> deservesSave b
bs <- readEditor bufferSet >>= mapM needsWindow
-- Vim only shows the first modified buffer in the error.
case find snd bs of
Nothing -> quitEditor
Just (b, _) -> do
bufferName <- withEditor $ withGivenBuffer (bkey b) $ gets file
errorEditor $ "No write since last change for buffer "
<> showT bufferName
<> " (add ! to override)"
saveAndQuitAllE :: YiM ()
saveAndQuitAllE = Common.forAllBuffers fwriteBufferE >> quitEditor | atsukotakahashi/wi | src/library/Yi/Keymap/Vim/Ex/Commands/Quit.hs | gpl-2.0 | 3,438 | 0 | 17 | 953 | 969 | 513 | 456 | 77 | 4 |
-- A QDSL implementation of Carlson, Hudak, and Jones'
-- Geometric Region Servers,
-- by Josef Svenningsson
module Examples.Region where
import QHaskell
type Point = (Float,Float)
type Region = Point -> Bool
type Radius = Float
circle :: Radius -> Region
circle r = \p -> magnitude p <= r
outside :: Region -> Region
outside r = \p -> not (r p)
intersection :: Region -> Region -> Region
intersection r1 r2 = \p -> r1 p && r2 p
union :: Region -> Region -> Region
union r1 r2 = \p -> r1 p || r2 p
magnitude :: Point -> Float
magnitude (x,y) = sqrt (x ** 2 + y ** 2)
makeQDSL "RegionLang" ['circle,'outside,'intersection,'union,'magnitude]
{-
\begin{Code generated automatically by a Template Haskell function}
type Types = [Radius -> Region
,Region -> Region
,Region -> Region -> Region
,Region -> Region -> Region
,Point -> Float]
typeEnv :: TypeEnv Types
typeEnv = 'circle <:> 'outside <:> 'intersection <:> 'union <:>
'magnitude <:> nil
evalEnv :: EvalEnv Types
evalEnv = circle <+> outside <+> intersection <+> union <+>
magnitude <+> nil
translate :: Type a => Qt a -> ErrM (RegionLang a)
translate = tran typeEnv
evaluate :: Type a => RegionLang a -> a
evaluate = eval evalEnv
normalise :: Type a => RegionLang a -> RegionLang a
normalise = norm
type RegionLang a = Dp Types a
qqRegionLang :: Type a => Qt a -> Qt (RegionLang a)
qqRegionLang = tranQ typeEnv
\end{Code generated automatically by a Template Haskell function}
-}
type Result = Region
compile :: Type a => RegionLang a -> a
compile = evaluate
regionLang :: Qt Region -> ErrM Region
regionLang q = do d <- translate q
return (compile (normalise True d))
inRegion :: Qt Region -> Point -> ErrM Bool
inRegion q p = do f <- regionLang q
return (f p)
{-
inRegion :: Qt (Point -> Region -> Bool)
inRegion = [|| \ p r -> r p ||]
-}
test :: ErrM Bool
test = inRegion [|| outside (circle 3) `intersection` circle 6 ||]
(4,4)
| shayan-najd/QHaskell | Examples/Region.hs | gpl-3.0 | 2,038 | 2 | 11 | 500 | 428 | 227 | 201 | -1 | -1 |
module Lamdu.GUI.TagPane
( make
) where
import qualified Control.Lens as Lens
import Data.Binary.Extended (encodeS)
import qualified Data.Char as Char
import Data.Property (Property(..), pVal)
import qualified Data.Property as Property
import qualified Data.Set as Set
import qualified Data.Text as Text
import qualified GUI.Momentu as M
import GUI.Momentu.Align (TextWidget, Aligned(..), WithTextPos(..))
import qualified GUI.Momentu.Align as Align
import qualified GUI.Momentu.Element as Element
import qualified GUI.Momentu.EventMap as E
import GUI.Momentu.Glue ((/-/), (/|/), hbox)
import qualified GUI.Momentu.I18N as MomentuTexts
import GUI.Momentu.ModKey (noMods)
import qualified GUI.Momentu.ModKey as ModKey
import qualified GUI.Momentu.State as GuiState
import GUI.Momentu.Widget (Widget)
import qualified GUI.Momentu.Widget as Widget
import qualified GUI.Momentu.Widgets.DropDownList as DropDownList
import qualified GUI.Momentu.Widgets.FocusDelegator as FocusDelegator
import qualified GUI.Momentu.Widgets.Grid as Grid
import qualified GUI.Momentu.Widgets.Spacer as Spacer
import qualified GUI.Momentu.Widgets.TextEdit as TextEdit
import qualified GUI.Momentu.Widgets.TextEdit.Property as TextEdits
import qualified GUI.Momentu.Widgets.TextView as TextView
import qualified Lamdu.CharClassification as Chars
import qualified Lamdu.Config as Config
import qualified Lamdu.Config.Theme.TextColors as TextColors
import Lamdu.Data.Tag (TextsInLang(..))
import qualified Lamdu.Data.Tag as Tag
import Lamdu.Formatting (Format(..))
import Lamdu.GUI.Styled (addValFrame, label, info, withColor)
import qualified Lamdu.I18N.CodeUI as Texts
import Lamdu.I18N.LangId (LangId(..), _LangId)
import qualified Lamdu.Sugar.Types as Sugar
import Lamdu.Prelude
tagRenameId :: Widget.Id -> Widget.Id
tagRenameId = (`Widget.joinId` ["rename"])
disallowedNameChars :: Set Char
disallowedNameChars = Set.fromList ",[]\\`()"
makeTagNameEdit :: _ => Property f Text -> Widget.Id -> m (TextWidget f)
makeTagNameEdit prop myId =
TextEdits.makeWordEdit
?? pure " "
?? prop
?? tagRenameId myId
<&> Align.tValue . Widget.eventMapMaker . Lens.mapped %~ E.filterChars (`Set.notMember` disallowedNameChars)
makeSymbolNameEdit :: _ => Property f Text -> Widget.Id -> m (TextWidget f)
makeSymbolNameEdit prop myId =
TextEdits.makeWordEdit
<*> (Lens.view (has . Texts.typeOperatorHere) <&> pure)
?? prop
?? tagRenameId myId
<&> Align.tValue . Widget.eventMapMaker . Lens.mapped %~ E.filterChars allowedSymbolChars
where
allowedSymbolChars =
Set.member
?? Set.fromList Chars.operator `Set.difference` disallowedNameChars
makeFocusableTagNameEdit :: _ => Widget.Id -> Property o Text -> m (TextWidget o)
makeFocusableTagNameEdit myId prop =
do
env <- Lens.view id
let fdConfig =
FocusDelegator.Config
{ FocusDelegator.focusChildKeys = env ^. has . Config.jumpToDefinitionKeys
, FocusDelegator.focusChildDoc =
E.toDoc env
[ has . MomentuTexts.edit
, has . Texts.tag
, has . Texts.renameTag
]
, FocusDelegator.focusParentKeys =
[ noMods ModKey.Key'Escape
, noMods ModKey.Key'Enter
]
, FocusDelegator.focusParentDoc =
E.toDoc env
[ has . MomentuTexts.edit
, has . Texts.tag
, has . Texts.stopEditing
]
}
(FocusDelegator.make ?? fdConfig ?? FocusDelegator.FocusEntryParent ?? myId
<&> (Align.tValue %~))
<*> makeTagNameEdit prop myId
makeLanguageTitle :: _ => Widget.Id -> LangId -> m (TextWidget o)
makeLanguageTitle myId lang =
TextView.make
<*> (Lens.view has <&> getLang)
<*> pure (Widget.toAnimId myId <> ["lang-title"])
<&> Align.tValue %~ Widget.fromView
where
getLang :: Map LangId Text -> Text
getLang x =
x ^. Lens.at lang
& fromMaybe (lang ^. _LangId & Lens.ix 0 %~ Char.toUpper)
data TextsRow a = TextsRow
{ _language :: a
, _space0 :: a
, _name :: a
, _space1 :: a
, _abbreviation :: a
, _space2 :: a
, _disambig :: a
} deriving (Functor, Foldable, Traversable)
langWidgetId :: Widget.Id -> LangId -> Widget.Id
langWidgetId parentId lang =
parentId `Widget.joinId` [encodeS lang]
nameId :: Widget.Id -> Widget.Id
nameId = (`Widget.joinId` ["name"])
hspace :: _ => m (TextWidget f)
hspace = Spacer.stdHSpace <&> Widget.fromView <&> WithTextPos 0
hspaceOf :: Widget.R -> TextWidget f
hspaceOf w = Spacer.makeHorizontal w & Widget.fromView & WithTextPos 0
textsRow ::
_ =>
m (TextWidget f) ->
m (TextWidget f) ->
m (TextWidget f) ->
m (TextWidget f) ->
m (TextsRow (Aligned (Widget f)))
textsRow lang name abbrev disambig =
TextsRow lang hspace name hspace abbrev hspace disambig
& sequenceA
<&> Lens.mapped %~ Align.fromWithTextPos 0
makeLangRow ::
_ =>
Widget.Id -> (LangId -> TextsInLang -> o ()) -> LangId -> TextsInLang ->
m (TextsRow (Aligned (Widget o)))
makeLangRow parentId setName lang langNames =
textsRow
(makeLanguageTitle langId lang & info)
(makeFocusableTagNameEdit (nameId langId) nameProp)
(mkProp Tag.abbreviation & makeFocusableTagNameEdit (mkId "abbr"))
(mkProp Tag.disambiguationText & makeFocusableTagNameEdit (mkId "disamb"))
where
mkId suffix = langId `Widget.joinId` [suffix]
langId = langWidgetId parentId lang
nameProp =
setName lang . (\x -> langNames & Tag.name .~ x)
& Property (langNames ^. Tag.name)
mkProp l =
setName lang .
(\x -> langNames & Lens.cloneLens l .~ if x == "" then Nothing else Just x)
& Property (langNames ^. Lens.cloneLens l . Lens._Just)
makeMissingLangRow ::
_ =>
Widget.Id -> (LangId -> TextsInLang -> o ()) -> LangId ->
m (TextsRow (Aligned (Widget o)))
makeMissingLangRow parentId setName lang =
textsRow
(makeLanguageTitle langId lang & info)
(makeFocusableTagNameEdit (nameId langId) nameProp)
(pure Element.empty)
(pure Element.empty)
where
langId = langWidgetId parentId lang
nameProp =
setName lang . (\x -> TextsInLang x Nothing Nothing)
& Property ""
makeLangsTable ::
(MonadReader env m, _) =>
Widget.Id -> Map LangId TextsInLang ->
(LangId -> TextsInLang -> o ()) -> m (Widget o)
makeLangsTable myId tagTexts setName =
do
lang <- Lens.view has
let currentLang =
case tagTexts ^. Lens.at lang of
Nothing -> makeMissingLangRow myId setName lang
Just cur -> makeLangRow myId setName lang cur
let editOtherLangs =
tagTexts ^@.. Lens.itraversed
& filter ((/= lang) . fst)
<&> uncurry (makeLangRow myId setName)
Grid.make <*>
sequence
(heading : currentLang : editOtherLangs)
<&> snd
where
-- the type of Styled.label is RankN, so duplicate a bit of
-- code to avoid complicating too much here
toWidget = fmap Widget.fromView
heading =
textsRow
(label MomentuTexts.language <&> toWidget)
(label Texts.name <&> toWidget)
(label Texts.abbreviation <&> toWidget)
(label Texts.disambiguationText <&> toWidget)
& info
data SymType = NoSymbol | UniversalSymbol | DirectionalSymbol
deriving Eq
makeSymbol ::
_ => Widget.Id -> Property o Tag.Symbol -> m (TextWidget o, TextWidget o)
makeSymbol myId symProp =
case symProp ^. pVal of
Tag.NoSymbol ->
flip (,) Element.empty <$> makeDropDownList NoSymbol (toSym "" "")
Tag.UniversalSymbol text ->
(,)
<$> makeDropDownList UniversalSymbol (toSym text text)
<*> nameEdit (Property text (set . Tag.UniversalSymbol)) "universal"
Tag.DirectionalSymbol (Tag.DirOp ltr rtl) ->
(,)
<$> makeDropDownList DirectionalSymbol (toSym ltr rtl)
<*>
( (label Texts.leftToRightSymbol & info <&> fmap Widget.fromView)
/|/ hspace /|/ nameEdit (Property ltr (`setDirectional` rtl)) "ltr"
/|/ hspace /|/ info (label Texts.rightToLeftSymbol)
/|/ hspace /|/ nameEdit (Property rtl (setDirectional ltr)) "rtl"
)
where
set = void . Property.set symProp
setDirectional ltr rtl = Tag.DirOp ltr rtl & Tag.DirectionalSymbol & set
toSym _ _ NoSymbol = Tag.NoSymbol
toSym "" rtl UniversalSymbol = Tag.UniversalSymbol rtl
toSym ltr _ UniversalSymbol = Tag.UniversalSymbol ltr
toSym ltr rtl DirectionalSymbol = Tag.DirectionalSymbol (Tag.DirOp ltr rtl)
mkId suffix = myId `Widget.joinId` [suffix]
nameEdit prop = makeSymbolNameEdit prop . mkId
focusableLabel l suffix =
TextView.makeFocusable <*> Lens.view (has . l) ?? mkId suffix
makeDropDownList curType toTagSym =
do
noSymLabel <- focusableLabel Texts.noSymbol "nosym"
uniLabel <- focusableLabel Texts.symbol "unisym"
dirLabel <- focusableLabel Texts.directionalSymbol "dirsym"
defConf <- DropDownList.defaultConfig <*> Lens.view (has . Texts.symbolType)
DropDownList.make ?? Property curType (set . toTagSym)
?? [ (NoSymbol, noSymLabel)
, (UniversalSymbol, uniLabel)
, (DirectionalSymbol, dirLabel)
]
?? defConf ?? mkId "symType"
& withColor TextColors.actionTextColor
parseInt :: Text -> Maybe Int
parseInt newText
| newText /= Text.strip newText = Nothing
| newText == "" = Just 0
| otherwise = tryParse newText
makeIntEdit :: _ => Widget.Id -> Property o Int -> m (TextWidget o)
makeIntEdit myId prop =
do
text <-
M.readWidgetState myId
<&> (^? Lens._Just . Lens.filtered ((== Just prevVal) . parseInt))
<&> fromMaybe prevValStr
TextEdit.make ?? TextEdit.Modes "0" "0" ?? text ?? myId
<&> Align.tValue . Widget.eventMapMaker . Lens.mapped %~
-- Avoid taking keys that don't belong to us,
-- so weakerEvents with them will work.
E.filter (Lens.has Lens._Just . parseInt . fst)
<&> Align.tValue . Widget.updates %~
\(newText, eventRes) ->
eventRes <> GuiState.updateWidgetState myId newText
<$ (parseInt newText & parseAssert & Property.set prop)
where
parseAssert = error "parsing int failed" & fromMaybe
prevVal = Property.value prop
prevValStr = show prevVal & Text.pack
makeOrderEdit :: _ => Widget.Id -> Property o Int -> m (TextWidget o)
makeOrderEdit tagPaneId prop =
info (label Texts.order) /|/ hspace /|/
makeIntEdit orderEditId prop
where
orderEditId = tagPaneId `Widget.joinId` ["tagOrder"]
make :: _ => Sugar.TagPane o -> M.WidgetId -> m (Widget o)
make tagPane myId =
Lens.view has
>>= \lang ->
addValFrame <*>
do
(symbol, nextLine) <- makeSymbol myId symbolProp
langsTable <-
makeLangsTable myId
(tagPane ^. Sugar.tpTagData . Tag.tagTexts) (tagPane ^. Sugar.tpSetTexts)
orderEdit <- makeOrderEdit myId orderProp
let totalWidth =
max (nextLine ^. Element.width) (langsTable ^. Element.width)
let gap = totalWidth - (symbol ^. Element.width + orderEdit ^. Element.width)
pure langsTable
/-/ (hbox ?? [symbol, hspaceOf gap, orderEdit] <&> (^. Align.tValue))
/-/ pure (nextLine ^. Align.tValue)
& local (Element.animIdPrefix .~ Widget.toAnimId myId)
& GuiState.assignCursor myId (nameId (langWidgetId myId lang))
where
prop lens setterLens =
Property
(tagPane ^. Sugar.tpTagData . lens)
(tagPane ^. setterLens)
orderProp = prop Tag.tagOrder Sugar.tpSetOrder
symbolProp = prop Tag.tagSymbol Sugar.tpSetSymbol
| Peaker/lamdu | src/Lamdu/GUI/TagPane.hs | gpl-3.0 | 12,550 | 0 | 20 | 3,527 | 3,632 | 1,913 | 1,719 | -1 | -1 |
module Stats (Stats,
initialStats,
levelInStats,
linesInStats,
scoreInStats,
addLinesToStats) where
data Stats = Stats { linesCount :: Int, score :: Int }
initialStats :: Stats
initialStats = Stats 0 0
levelInStats :: Stats -> Int
levelInStats stats = div (linesCount stats) 5
linesInStats :: Stats -> Int
linesInStats = linesCount
scoreInStats :: Stats -> Int
scoreInStats = score
addLinesToStats :: Int -> Stats -> Stats
addLinesToStats n (Stats l s) = Stats (l + n) (s + scoreForLines n)
where scoreForLines :: Int -> Int
scoreForLines 0 = 0
scoreForLines 1 = 100
scoreForLines 2 = 200
scoreForLines 3 = 400
scoreForLines 4 = 800
scoreForLines _ = error "Wrong number of lines" | pavelfatin/haskell-blocks | src/Stats.hs | gpl-3.0 | 820 | 0 | 8 | 251 | 231 | 125 | 106 | 24 | 6 |
-- Author: Viacheslav Lotsmanov
-- License: GPLv3 https://raw.githubusercontent.com/unclechu/xlib-keys-hack/master/LICENSE
module Utils.Sugar (spec) where
import "hspec" Test.Hspec (Spec, describe, it, shouldBe)
-- local imports
import "xlib-keys-hack" Utils.Sugar
( (.>), (|?|), (?)
, applyIf, applyUnless
)
spec :: Spec
spec = do
describe "Piping operators" $
it "(.>) flipped version of composition operator" $ do
(subtract 6 . (*2) . subtract 2) 15 `shouldBe` (20 :: Int)
(subtract 2 .> (*2) .> subtract 6) 15 `shouldBe` (20 :: Int)
describe "Boolean operators" $ do
it "(|?|) flipped `bool` function (A value if True or B value if False)" $ do
(10 |?| 20) True `shouldBe` (10 :: Int)
(10 |?| 20) False `shouldBe` (20 :: Int)
it "(?) if-condition operator" $ do
(True ? 10 $ 20) `shouldBe` (10 :: Int)
(False ? 10 $ 20) `shouldBe` (20 :: Int)
it "(?) if-condition operator chaining" $
(False ? 10 $ False ? 20 $ True ? 30 $ 40) `shouldBe` (30 :: Int)
describe "Function applying helpers" $ do
it "`applyIf` applies given function if value passes predicate\
\ or returns original value" $ do
applyIf (+5) True 10 `shouldBe` (15 :: Int)
applyIf (+5) False 10 `shouldBe` (10 :: Int)
it "`applyUnless` applies given function if value doesn't pass predicate\
\ or returns original value" $ do
applyUnless (+5) False 10 `shouldBe` (15 :: Int)
applyUnless (+5) True 10 `shouldBe` (10 :: Int)
| unclechu/xlib-keys-hack | test/Utils/Sugar.hs | gpl-3.0 | 1,530 | 0 | 18 | 372 | 483 | 267 | 216 | -1 | -1 |
module KRPCHS.RemoteTech
( Target(..)
, Antenna
, Comms
, antenna
, antennaStream
, antennaStreamReq
, getAntennaHasConnection
, getAntennaHasConnectionStream
, getAntennaHasConnectionStreamReq
, getAntennaPart
, getAntennaPartStream
, getAntennaPartStreamReq
, getAntennaTarget
, getAntennaTargetStream
, getAntennaTargetStreamReq
, getAntennaTargetBody
, getAntennaTargetBodyStream
, getAntennaTargetBodyStreamReq
, getAntennaTargetGroundStation
, getAntennaTargetGroundStationStream
, getAntennaTargetGroundStationStreamReq
, getAntennaTargetVessel
, getAntennaTargetVesselStream
, getAntennaTargetVesselStreamReq
, setAntennaTarget
, setAntennaTargetBody
, setAntennaTargetGroundStation
, setAntennaTargetVessel
, comms
, commsStream
, commsStreamReq
, commsSignalDelayToVessel
, commsSignalDelayToVesselStream
, commsSignalDelayToVesselStreamReq
, getCommsAntennas
, getCommsAntennasStream
, getCommsAntennasStreamReq
, getCommsHasConnection
, getCommsHasConnectionStream
, getCommsHasConnectionStreamReq
, getCommsHasConnectionToGroundStation
, getCommsHasConnectionToGroundStationStream
, getCommsHasConnectionToGroundStationStreamReq
, getCommsHasFlightComputer
, getCommsHasFlightComputerStream
, getCommsHasFlightComputerStreamReq
, getCommsHasLocalControl
, getCommsHasLocalControlStream
, getCommsHasLocalControlStreamReq
, getCommsSignalDelay
, getCommsSignalDelayStream
, getCommsSignalDelayStreamReq
, getCommsSignalDelayToGroundStation
, getCommsSignalDelayToGroundStationStream
, getCommsSignalDelayToGroundStationStreamReq
, getCommsVessel
, getCommsVesselStream
, getCommsVesselStreamReq
, getAvailable
, getAvailableStream
, getAvailableStreamReq
, getGroundStations
, getGroundStationsStream
, getGroundStationsStreamReq
) where
import qualified Data.Text
import qualified KRPCHS.SpaceCenter
import KRPCHS.Internal.Requests
import KRPCHS.Internal.SerializeUtils
{-
- A RemoteTech antenna. Obtained by calling <see cref="M:RemoteTech.Comms.Antennas" /> or <see cref="M:RemoteTech.Antenna" />.
-}
newtype Antenna = Antenna { antennaId :: Int }
deriving (Show, Eq, Ord)
instance PbSerializable Antenna where
encodePb = encodePb . antennaId
decodePb b = Antenna <$> decodePb b
instance KRPCResponseExtractable Antenna
{-
- Communications for a vessel.
-}
newtype Comms = Comms { commsId :: Int }
deriving (Show, Eq, Ord)
instance PbSerializable Comms where
encodePb = encodePb . commsId
decodePb b = Comms <$> decodePb b
instance KRPCResponseExtractable Comms
{-
- The type of object an antenna is targetting.
- See <see cref="M:RemoteTech.Antenna.Target" />.
-}
data Target
= Target'ActiveVessel
| Target'CelestialBody
| Target'GroundStation
| Target'Vessel
| Target'None
deriving (Show, Eq, Ord, Enum)
instance PbSerializable Target where
encodePb = encodePb . fromEnum
decodePb b = toEnum <$> decodePb b
instance KRPCResponseExtractable Target
{-
- Get the antenna object for a particular part.
-}
antenna :: KRPCHS.SpaceCenter.Part -> RPCContext (KRPCHS.RemoteTech.Antenna)
antenna partArg = do
let r = makeRequest "RemoteTech" "Antenna" [makeArgument 0 partArg]
res <- sendRequest r
processResponse res
antennaStreamReq :: KRPCHS.SpaceCenter.Part -> KRPCStreamReq (KRPCHS.RemoteTech.Antenna)
antennaStreamReq partArg =
let req = makeRequest "RemoteTech" "Antenna" [makeArgument 0 partArg]
in makeStream req
antennaStream :: KRPCHS.SpaceCenter.Part -> RPCContext (KRPCStream (KRPCHS.RemoteTech.Antenna))
antennaStream partArg = requestStream $ antennaStreamReq partArg
{-
- Whether the antenna has a connection.
-}
getAntennaHasConnection :: KRPCHS.RemoteTech.Antenna -> RPCContext (Bool)
getAntennaHasConnection thisArg = do
let r = makeRequest "RemoteTech" "Antenna_get_HasConnection" [makeArgument 0 thisArg]
res <- sendRequest r
processResponse res
getAntennaHasConnectionStreamReq :: KRPCHS.RemoteTech.Antenna -> KRPCStreamReq (Bool)
getAntennaHasConnectionStreamReq thisArg =
let req = makeRequest "RemoteTech" "Antenna_get_HasConnection" [makeArgument 0 thisArg]
in makeStream req
getAntennaHasConnectionStream :: KRPCHS.RemoteTech.Antenna -> RPCContext (KRPCStream (Bool))
getAntennaHasConnectionStream thisArg = requestStream $ getAntennaHasConnectionStreamReq thisArg
{-
- Get the part containing this antenna.
-}
getAntennaPart :: KRPCHS.RemoteTech.Antenna -> RPCContext (KRPCHS.SpaceCenter.Part)
getAntennaPart thisArg = do
let r = makeRequest "RemoteTech" "Antenna_get_Part" [makeArgument 0 thisArg]
res <- sendRequest r
processResponse res
getAntennaPartStreamReq :: KRPCHS.RemoteTech.Antenna -> KRPCStreamReq (KRPCHS.SpaceCenter.Part)
getAntennaPartStreamReq thisArg =
let req = makeRequest "RemoteTech" "Antenna_get_Part" [makeArgument 0 thisArg]
in makeStream req
getAntennaPartStream :: KRPCHS.RemoteTech.Antenna -> RPCContext (KRPCStream (KRPCHS.SpaceCenter.Part))
getAntennaPartStream thisArg = requestStream $ getAntennaPartStreamReq thisArg
{-
- The object that the antenna is targetting.
- This property can be used to set the target to <see cref="M:RemoteTech.Target.None" /> or <see cref="M:RemoteTech.Target.ActiveVessel" />.
- To set the target to a celestial body, ground station or vessel see <see cref="M:RemoteTech.Antenna.TargetBody" />,
- <see cref="M:RemoteTech.Antenna.TargetGroundStation" /> and <see cref="M:RemoteTech.Antenna.TargetVessel" />.
-}
getAntennaTarget :: KRPCHS.RemoteTech.Antenna -> RPCContext (KRPCHS.RemoteTech.Target)
getAntennaTarget thisArg = do
let r = makeRequest "RemoteTech" "Antenna_get_Target" [makeArgument 0 thisArg]
res <- sendRequest r
processResponse res
getAntennaTargetStreamReq :: KRPCHS.RemoteTech.Antenna -> KRPCStreamReq (KRPCHS.RemoteTech.Target)
getAntennaTargetStreamReq thisArg =
let req = makeRequest "RemoteTech" "Antenna_get_Target" [makeArgument 0 thisArg]
in makeStream req
getAntennaTargetStream :: KRPCHS.RemoteTech.Antenna -> RPCContext (KRPCStream (KRPCHS.RemoteTech.Target))
getAntennaTargetStream thisArg = requestStream $ getAntennaTargetStreamReq thisArg
{-
- The celestial body the antenna is targetting.
-}
getAntennaTargetBody :: KRPCHS.RemoteTech.Antenna -> RPCContext (KRPCHS.SpaceCenter.CelestialBody)
getAntennaTargetBody thisArg = do
let r = makeRequest "RemoteTech" "Antenna_get_TargetBody" [makeArgument 0 thisArg]
res <- sendRequest r
processResponse res
getAntennaTargetBodyStreamReq :: KRPCHS.RemoteTech.Antenna -> KRPCStreamReq (KRPCHS.SpaceCenter.CelestialBody)
getAntennaTargetBodyStreamReq thisArg =
let req = makeRequest "RemoteTech" "Antenna_get_TargetBody" [makeArgument 0 thisArg]
in makeStream req
getAntennaTargetBodyStream :: KRPCHS.RemoteTech.Antenna -> RPCContext (KRPCStream (KRPCHS.SpaceCenter.CelestialBody))
getAntennaTargetBodyStream thisArg = requestStream $ getAntennaTargetBodyStreamReq thisArg
{-
- The ground station the antenna is targetting.
-}
getAntennaTargetGroundStation :: KRPCHS.RemoteTech.Antenna -> RPCContext (Data.Text.Text)
getAntennaTargetGroundStation thisArg = do
let r = makeRequest "RemoteTech" "Antenna_get_TargetGroundStation" [makeArgument 0 thisArg]
res <- sendRequest r
processResponse res
getAntennaTargetGroundStationStreamReq :: KRPCHS.RemoteTech.Antenna -> KRPCStreamReq (Data.Text.Text)
getAntennaTargetGroundStationStreamReq thisArg =
let req = makeRequest "RemoteTech" "Antenna_get_TargetGroundStation" [makeArgument 0 thisArg]
in makeStream req
getAntennaTargetGroundStationStream :: KRPCHS.RemoteTech.Antenna -> RPCContext (KRPCStream (Data.Text.Text))
getAntennaTargetGroundStationStream thisArg = requestStream $ getAntennaTargetGroundStationStreamReq thisArg
{-
- The vessel the antenna is targetting.
-}
getAntennaTargetVessel :: KRPCHS.RemoteTech.Antenna -> RPCContext (KRPCHS.SpaceCenter.Vessel)
getAntennaTargetVessel thisArg = do
let r = makeRequest "RemoteTech" "Antenna_get_TargetVessel" [makeArgument 0 thisArg]
res <- sendRequest r
processResponse res
getAntennaTargetVesselStreamReq :: KRPCHS.RemoteTech.Antenna -> KRPCStreamReq (KRPCHS.SpaceCenter.Vessel)
getAntennaTargetVesselStreamReq thisArg =
let req = makeRequest "RemoteTech" "Antenna_get_TargetVessel" [makeArgument 0 thisArg]
in makeStream req
getAntennaTargetVesselStream :: KRPCHS.RemoteTech.Antenna -> RPCContext (KRPCStream (KRPCHS.SpaceCenter.Vessel))
getAntennaTargetVesselStream thisArg = requestStream $ getAntennaTargetVesselStreamReq thisArg
{-
- The object that the antenna is targetting.
- This property can be used to set the target to <see cref="M:RemoteTech.Target.None" /> or <see cref="M:RemoteTech.Target.ActiveVessel" />.
- To set the target to a celestial body, ground station or vessel see <see cref="M:RemoteTech.Antenna.TargetBody" />,
- <see cref="M:RemoteTech.Antenna.TargetGroundStation" /> and <see cref="M:RemoteTech.Antenna.TargetVessel" />.
-}
setAntennaTarget :: KRPCHS.RemoteTech.Antenna -> KRPCHS.RemoteTech.Target -> RPCContext ()
setAntennaTarget thisArg valueArg = do
let r = makeRequest "RemoteTech" "Antenna_set_Target" [makeArgument 0 thisArg, makeArgument 1 valueArg]
res <- sendRequest r
processResponse res
{-
- The celestial body the antenna is targetting.
-}
setAntennaTargetBody :: KRPCHS.RemoteTech.Antenna -> KRPCHS.SpaceCenter.CelestialBody -> RPCContext ()
setAntennaTargetBody thisArg valueArg = do
let r = makeRequest "RemoteTech" "Antenna_set_TargetBody" [makeArgument 0 thisArg, makeArgument 1 valueArg]
res <- sendRequest r
processResponse res
{-
- The ground station the antenna is targetting.
-}
setAntennaTargetGroundStation :: KRPCHS.RemoteTech.Antenna -> Data.Text.Text -> RPCContext ()
setAntennaTargetGroundStation thisArg valueArg = do
let r = makeRequest "RemoteTech" "Antenna_set_TargetGroundStation" [makeArgument 0 thisArg, makeArgument 1 valueArg]
res <- sendRequest r
processResponse res
{-
- The vessel the antenna is targetting.
-}
setAntennaTargetVessel :: KRPCHS.RemoteTech.Antenna -> KRPCHS.SpaceCenter.Vessel -> RPCContext ()
setAntennaTargetVessel thisArg valueArg = do
let r = makeRequest "RemoteTech" "Antenna_set_TargetVessel" [makeArgument 0 thisArg, makeArgument 1 valueArg]
res <- sendRequest r
processResponse res
{-
- Get a communications object, representing the communication capability of a particular vessel.
-}
comms :: KRPCHS.SpaceCenter.Vessel -> RPCContext (KRPCHS.RemoteTech.Comms)
comms vesselArg = do
let r = makeRequest "RemoteTech" "Comms" [makeArgument 0 vesselArg]
res <- sendRequest r
processResponse res
commsStreamReq :: KRPCHS.SpaceCenter.Vessel -> KRPCStreamReq (KRPCHS.RemoteTech.Comms)
commsStreamReq vesselArg =
let req = makeRequest "RemoteTech" "Comms" [makeArgument 0 vesselArg]
in makeStream req
commsStream :: KRPCHS.SpaceCenter.Vessel -> RPCContext (KRPCStream (KRPCHS.RemoteTech.Comms))
commsStream vesselArg = requestStream $ commsStreamReq vesselArg
{-
- The signal delay between the this vessel and another vessel, in seconds.<param name="other">
-}
commsSignalDelayToVessel :: KRPCHS.RemoteTech.Comms -> KRPCHS.SpaceCenter.Vessel -> RPCContext (Double)
commsSignalDelayToVessel thisArg otherArg = do
let r = makeRequest "RemoteTech" "Comms_SignalDelayToVessel" [makeArgument 0 thisArg, makeArgument 1 otherArg]
res <- sendRequest r
processResponse res
commsSignalDelayToVesselStreamReq :: KRPCHS.RemoteTech.Comms -> KRPCHS.SpaceCenter.Vessel -> KRPCStreamReq (Double)
commsSignalDelayToVesselStreamReq thisArg otherArg =
let req = makeRequest "RemoteTech" "Comms_SignalDelayToVessel" [makeArgument 0 thisArg, makeArgument 1 otherArg]
in makeStream req
commsSignalDelayToVesselStream :: KRPCHS.RemoteTech.Comms -> KRPCHS.SpaceCenter.Vessel -> RPCContext (KRPCStream (Double))
commsSignalDelayToVesselStream thisArg otherArg = requestStream $ commsSignalDelayToVesselStreamReq thisArg otherArg
{-
- The antennas for this vessel.
-}
getCommsAntennas :: KRPCHS.RemoteTech.Comms -> RPCContext ([KRPCHS.RemoteTech.Antenna])
getCommsAntennas thisArg = do
let r = makeRequest "RemoteTech" "Comms_get_Antennas" [makeArgument 0 thisArg]
res <- sendRequest r
processResponse res
getCommsAntennasStreamReq :: KRPCHS.RemoteTech.Comms -> KRPCStreamReq ([KRPCHS.RemoteTech.Antenna])
getCommsAntennasStreamReq thisArg =
let req = makeRequest "RemoteTech" "Comms_get_Antennas" [makeArgument 0 thisArg]
in makeStream req
getCommsAntennasStream :: KRPCHS.RemoteTech.Comms -> RPCContext (KRPCStream ([KRPCHS.RemoteTech.Antenna]))
getCommsAntennasStream thisArg = requestStream $ getCommsAntennasStreamReq thisArg
{-
- Whether the vessel has any connection.
-}
getCommsHasConnection :: KRPCHS.RemoteTech.Comms -> RPCContext (Bool)
getCommsHasConnection thisArg = do
let r = makeRequest "RemoteTech" "Comms_get_HasConnection" [makeArgument 0 thisArg]
res <- sendRequest r
processResponse res
getCommsHasConnectionStreamReq :: KRPCHS.RemoteTech.Comms -> KRPCStreamReq (Bool)
getCommsHasConnectionStreamReq thisArg =
let req = makeRequest "RemoteTech" "Comms_get_HasConnection" [makeArgument 0 thisArg]
in makeStream req
getCommsHasConnectionStream :: KRPCHS.RemoteTech.Comms -> RPCContext (KRPCStream (Bool))
getCommsHasConnectionStream thisArg = requestStream $ getCommsHasConnectionStreamReq thisArg
{-
- Whether the vessel has a connection to a ground station.
-}
getCommsHasConnectionToGroundStation :: KRPCHS.RemoteTech.Comms -> RPCContext (Bool)
getCommsHasConnectionToGroundStation thisArg = do
let r = makeRequest "RemoteTech" "Comms_get_HasConnectionToGroundStation" [makeArgument 0 thisArg]
res <- sendRequest r
processResponse res
getCommsHasConnectionToGroundStationStreamReq :: KRPCHS.RemoteTech.Comms -> KRPCStreamReq (Bool)
getCommsHasConnectionToGroundStationStreamReq thisArg =
let req = makeRequest "RemoteTech" "Comms_get_HasConnectionToGroundStation" [makeArgument 0 thisArg]
in makeStream req
getCommsHasConnectionToGroundStationStream :: KRPCHS.RemoteTech.Comms -> RPCContext (KRPCStream (Bool))
getCommsHasConnectionToGroundStationStream thisArg = requestStream $ getCommsHasConnectionToGroundStationStreamReq thisArg
{-
- Whether the vessel has a flight computer on board.
-}
getCommsHasFlightComputer :: KRPCHS.RemoteTech.Comms -> RPCContext (Bool)
getCommsHasFlightComputer thisArg = do
let r = makeRequest "RemoteTech" "Comms_get_HasFlightComputer" [makeArgument 0 thisArg]
res <- sendRequest r
processResponse res
getCommsHasFlightComputerStreamReq :: KRPCHS.RemoteTech.Comms -> KRPCStreamReq (Bool)
getCommsHasFlightComputerStreamReq thisArg =
let req = makeRequest "RemoteTech" "Comms_get_HasFlightComputer" [makeArgument 0 thisArg]
in makeStream req
getCommsHasFlightComputerStream :: KRPCHS.RemoteTech.Comms -> RPCContext (KRPCStream (Bool))
getCommsHasFlightComputerStream thisArg = requestStream $ getCommsHasFlightComputerStreamReq thisArg
{-
- Whether the vessel can be controlled locally.
-}
getCommsHasLocalControl :: KRPCHS.RemoteTech.Comms -> RPCContext (Bool)
getCommsHasLocalControl thisArg = do
let r = makeRequest "RemoteTech" "Comms_get_HasLocalControl" [makeArgument 0 thisArg]
res <- sendRequest r
processResponse res
getCommsHasLocalControlStreamReq :: KRPCHS.RemoteTech.Comms -> KRPCStreamReq (Bool)
getCommsHasLocalControlStreamReq thisArg =
let req = makeRequest "RemoteTech" "Comms_get_HasLocalControl" [makeArgument 0 thisArg]
in makeStream req
getCommsHasLocalControlStream :: KRPCHS.RemoteTech.Comms -> RPCContext (KRPCStream (Bool))
getCommsHasLocalControlStream thisArg = requestStream $ getCommsHasLocalControlStreamReq thisArg
{-
- The shortest signal delay to the vessel, in seconds.
-}
getCommsSignalDelay :: KRPCHS.RemoteTech.Comms -> RPCContext (Double)
getCommsSignalDelay thisArg = do
let r = makeRequest "RemoteTech" "Comms_get_SignalDelay" [makeArgument 0 thisArg]
res <- sendRequest r
processResponse res
getCommsSignalDelayStreamReq :: KRPCHS.RemoteTech.Comms -> KRPCStreamReq (Double)
getCommsSignalDelayStreamReq thisArg =
let req = makeRequest "RemoteTech" "Comms_get_SignalDelay" [makeArgument 0 thisArg]
in makeStream req
getCommsSignalDelayStream :: KRPCHS.RemoteTech.Comms -> RPCContext (KRPCStream (Double))
getCommsSignalDelayStream thisArg = requestStream $ getCommsSignalDelayStreamReq thisArg
{-
- The signal delay between the vessel and the closest ground station, in seconds.
-}
getCommsSignalDelayToGroundStation :: KRPCHS.RemoteTech.Comms -> RPCContext (Double)
getCommsSignalDelayToGroundStation thisArg = do
let r = makeRequest "RemoteTech" "Comms_get_SignalDelayToGroundStation" [makeArgument 0 thisArg]
res <- sendRequest r
processResponse res
getCommsSignalDelayToGroundStationStreamReq :: KRPCHS.RemoteTech.Comms -> KRPCStreamReq (Double)
getCommsSignalDelayToGroundStationStreamReq thisArg =
let req = makeRequest "RemoteTech" "Comms_get_SignalDelayToGroundStation" [makeArgument 0 thisArg]
in makeStream req
getCommsSignalDelayToGroundStationStream :: KRPCHS.RemoteTech.Comms -> RPCContext (KRPCStream (Double))
getCommsSignalDelayToGroundStationStream thisArg = requestStream $ getCommsSignalDelayToGroundStationStreamReq thisArg
{-
- Get the vessel.
-}
getCommsVessel :: KRPCHS.RemoteTech.Comms -> RPCContext (KRPCHS.SpaceCenter.Vessel)
getCommsVessel thisArg = do
let r = makeRequest "RemoteTech" "Comms_get_Vessel" [makeArgument 0 thisArg]
res <- sendRequest r
processResponse res
getCommsVesselStreamReq :: KRPCHS.RemoteTech.Comms -> KRPCStreamReq (KRPCHS.SpaceCenter.Vessel)
getCommsVesselStreamReq thisArg =
let req = makeRequest "RemoteTech" "Comms_get_Vessel" [makeArgument 0 thisArg]
in makeStream req
getCommsVesselStream :: KRPCHS.RemoteTech.Comms -> RPCContext (KRPCStream (KRPCHS.SpaceCenter.Vessel))
getCommsVesselStream thisArg = requestStream $ getCommsVesselStreamReq thisArg
{-
- Whether RemoteTech is installed.
-}
getAvailable :: RPCContext (Bool)
getAvailable = do
let r = makeRequest "RemoteTech" "get_Available" []
res <- sendRequest r
processResponse res
getAvailableStreamReq :: KRPCStreamReq (Bool)
getAvailableStreamReq =
let req = makeRequest "RemoteTech" "get_Available" []
in makeStream req
getAvailableStream :: RPCContext (KRPCStream (Bool))
getAvailableStream = requestStream $ getAvailableStreamReq
{-
- The names of the ground stations.
-}
getGroundStations :: RPCContext ([Data.Text.Text])
getGroundStations = do
let r = makeRequest "RemoteTech" "get_GroundStations" []
res <- sendRequest r
processResponse res
getGroundStationsStreamReq :: KRPCStreamReq ([Data.Text.Text])
getGroundStationsStreamReq =
let req = makeRequest "RemoteTech" "get_GroundStations" []
in makeStream req
getGroundStationsStream :: RPCContext (KRPCStream ([Data.Text.Text]))
getGroundStationsStream = requestStream $ getGroundStationsStreamReq
| Cahu/krpc-hs | src/KRPCHS/RemoteTech.hs | gpl-3.0 | 19,170 | 0 | 12 | 2,534 | 3,896 | 1,981 | 1,915 | 321 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.AndroidEnterprise.ManagedConfigurationssettings.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Lists all the managed configurations settings for the specified app.
--
-- /See:/ <https://developers.google.com/android/work/play/emm-api Google Play EMM API Reference> for @androidenterprise.managedconfigurationssettings.list@.
module Network.Google.Resource.AndroidEnterprise.ManagedConfigurationssettings.List
(
-- * REST Resource
ManagedConfigurationssettingsListResource
-- * Creating a Request
, managedConfigurationssettingsList
, ManagedConfigurationssettingsList
-- * Request Lenses
, mclXgafv
, mclUploadProtocol
, mclEnterpriseId
, mclAccessToken
, mclUploadType
, mclProductId
, mclCallback
) where
import Network.Google.AndroidEnterprise.Types
import Network.Google.Prelude
-- | A resource alias for @androidenterprise.managedconfigurationssettings.list@ method which the
-- 'ManagedConfigurationssettingsList' request conforms to.
type ManagedConfigurationssettingsListResource =
"androidenterprise" :>
"v1" :>
"enterprises" :>
Capture "enterpriseId" Text :>
"products" :>
Capture "productId" Text :>
"managedConfigurationsSettings" :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
Get '[JSON]
ManagedConfigurationsSettingsListResponse
-- | Lists all the managed configurations settings for the specified app.
--
-- /See:/ 'managedConfigurationssettingsList' smart constructor.
data ManagedConfigurationssettingsList =
ManagedConfigurationssettingsList'
{ _mclXgafv :: !(Maybe Xgafv)
, _mclUploadProtocol :: !(Maybe Text)
, _mclEnterpriseId :: !Text
, _mclAccessToken :: !(Maybe Text)
, _mclUploadType :: !(Maybe Text)
, _mclProductId :: !Text
, _mclCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ManagedConfigurationssettingsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'mclXgafv'
--
-- * 'mclUploadProtocol'
--
-- * 'mclEnterpriseId'
--
-- * 'mclAccessToken'
--
-- * 'mclUploadType'
--
-- * 'mclProductId'
--
-- * 'mclCallback'
managedConfigurationssettingsList
:: Text -- ^ 'mclEnterpriseId'
-> Text -- ^ 'mclProductId'
-> ManagedConfigurationssettingsList
managedConfigurationssettingsList pMclEnterpriseId_ pMclProductId_ =
ManagedConfigurationssettingsList'
{ _mclXgafv = Nothing
, _mclUploadProtocol = Nothing
, _mclEnterpriseId = pMclEnterpriseId_
, _mclAccessToken = Nothing
, _mclUploadType = Nothing
, _mclProductId = pMclProductId_
, _mclCallback = Nothing
}
-- | V1 error format.
mclXgafv :: Lens' ManagedConfigurationssettingsList (Maybe Xgafv)
mclXgafv = lens _mclXgafv (\ s a -> s{_mclXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
mclUploadProtocol :: Lens' ManagedConfigurationssettingsList (Maybe Text)
mclUploadProtocol
= lens _mclUploadProtocol
(\ s a -> s{_mclUploadProtocol = a})
-- | The ID of the enterprise.
mclEnterpriseId :: Lens' ManagedConfigurationssettingsList Text
mclEnterpriseId
= lens _mclEnterpriseId
(\ s a -> s{_mclEnterpriseId = a})
-- | OAuth access token.
mclAccessToken :: Lens' ManagedConfigurationssettingsList (Maybe Text)
mclAccessToken
= lens _mclAccessToken
(\ s a -> s{_mclAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
mclUploadType :: Lens' ManagedConfigurationssettingsList (Maybe Text)
mclUploadType
= lens _mclUploadType
(\ s a -> s{_mclUploadType = a})
-- | The ID of the product for which the managed configurations settings
-- applies to.
mclProductId :: Lens' ManagedConfigurationssettingsList Text
mclProductId
= lens _mclProductId (\ s a -> s{_mclProductId = a})
-- | JSONP
mclCallback :: Lens' ManagedConfigurationssettingsList (Maybe Text)
mclCallback
= lens _mclCallback (\ s a -> s{_mclCallback = a})
instance GoogleRequest
ManagedConfigurationssettingsList
where
type Rs ManagedConfigurationssettingsList =
ManagedConfigurationsSettingsListResponse
type Scopes ManagedConfigurationssettingsList =
'["https://www.googleapis.com/auth/androidenterprise"]
requestClient ManagedConfigurationssettingsList'{..}
= go _mclEnterpriseId _mclProductId _mclXgafv
_mclUploadProtocol
_mclAccessToken
_mclUploadType
_mclCallback
(Just AltJSON)
androidEnterpriseService
where go
= buildClient
(Proxy ::
Proxy ManagedConfigurationssettingsListResource)
mempty
| brendanhay/gogol | gogol-android-enterprise/gen/Network/Google/Resource/AndroidEnterprise/ManagedConfigurationssettings/List.hs | mpl-2.0 | 5,926 | 0 | 20 | 1,385 | 786 | 457 | 329 | 122 | 1 |
-- Copyright (C) 2016-2017 Red Hat, Inc.
--
-- This library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU Lesser General Public
-- License as published by the Free Software Foundation; either
-- version 2.1 of the License, or (at your option) any later version.
--
-- This library is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- Lesser General Public License for more details.
--
-- You should have received a copy of the GNU Lesser General Public
-- License along with this library; if not, see <http://www.gnu.org/licenses/>.
{-# LANGUAGE OverloadedStrings #-}
module BDCS.Packages(filesInPackage,
findPackage,
insertPackageName)
where
import Control.Monad.IO.Class(MonadIO)
import qualified Data.Text as T
import Database.Esqueleto
import BDCS.DB
import BDCS.KeyValue(insertKeyValue)
filesInPackage :: MonadIO m => T.Text -> SqlPersistT m [T.Text]
filesInPackage name = do
results <- select $ from $ \(files `InnerJoin` key_val `InnerJoin` file_key_values) -> do
on $ key_val ^. KeyValId ==. file_key_values ^. FileKeyValuesKey_val_id &&.
file_key_values ^. FileKeyValuesFile_id ==. files ^. FilesId
where_ $ key_val ^. KeyValKey_value ==. val "packageName" &&.
key_val ^. KeyValVal_value ==. val name
return $ files ^. FilesPath
return $ map unValue results
insertPackageName :: MonadIO m => T.Text -> SqlPersistT m (Key KeyVal)
insertPackageName packageName =
findPackage packageName `orDo` insertKeyValue "packageName" packageName Nothing
findPackage :: MonadIO m => T.Text -> SqlPersistT m (Maybe (Key KeyVal))
findPackage name = firstResult $
select $ from $ \pkg -> do
where_ $ pkg ^. KeyValKey_value ==. val "packageName" &&.
pkg ^. KeyValVal_value ==. val name
limit 1
return $ pkg ^. KeyValId
| dashea/bdcs | importer/BDCS/Packages.hs | lgpl-2.1 | 2,097 | 0 | 19 | 489 | 397 | 210 | 187 | 28 | 1 |
module Main where
import Points (convexHull, center)
import Point (Point)
import Support (shouldEql)
p00 = (0, 0)
p10 = (1, 0)
p10' = (-1, 0)
p01 = (0, 1)
p01' = (0, -1)
p11 = (1, 1)
p11' = (-1, -1)
p20 = (2, 0)
p02 = (0, 2)
main :: IO ()
main = print $ foldl1 (++) [
center [p00] `shouldEql` p00,
center [p10', p10] `shouldEql` p00,
center [p01', p01] `shouldEql` p00,
center [p01', p01, p10] `shouldEql` (0.5, 0),
center [
(-1, 1), p11,
p11', (1, -1)
] `shouldEql` p00,
center [
(-1, 1), p11,
p20,
p11', (1, -1)
] `shouldEql` (0.5, 0),
convexHull [] `shouldEql` [],
convexHull [p00] `shouldEql` [],
convexHull [p00, p01] `shouldEql` [],
convexHull [p00, p01, p01'] `shouldEql` [],
convexHull [p01, p01', p00] `shouldEql` [],
convexHull [p01', p00, p01] `shouldEql` [],
convexHull [p00, p10, p10', p20] `shouldEql` [],
convexHull [p00, p01, p01', p02] `shouldEql` [],
convexHull [p00, p01, p10] `shouldEql` [p01, p10, p00],
convexHull [p01, p10, p00] `shouldEql` [p01, p10, p00],
convexHull [p10, p00, p01] `shouldEql` [p01, p10, p00],
convexHull [
(-1, 1), ( 1, 1),
(-1, -1), ( 1, -1)
] `shouldEql` [(1, 1), (1, -1), (-1, -1), (-1, 1)],
convexHull [
(-1, 1), ( 1, 1),
( 2, 0),
(-1, -1), ( 1, -1)
] `shouldEql` [(1, 1), (2, 0), (1, -1), (-1, -1), (-1, 1)],
convexHull [
(-1, 1), ( 0, 1), ( 1, 1),
(-0.5, 0), ( 0, 0), (1.5, 0),
(-1, -1), ( 0, -1)
] `shouldEql` [(1, 1), (1.5, 0), (0, -1), (-1, -1), (-1, 1)],
convexHull [
(-2, 2), ( 2, 2),
(-1, 1), ( 0, 1), ( 1, 1),
(-1, 0), ( 0, 0), ( 1, 0),
(-1, -1), ( 0, -1), ( 1, -1),
(-2, -2), ( 2, -2)
] `shouldEql` [(2, 2), (2, -2), (-2, -2), (-2, 2)],
convexHull [
(-2, 2),
(-1, 1),
( 0, 0),
( 1, -1),
( 2, -2)
] `shouldEql` [],
convexHull [
( 0, 2),
(-1, 1), ( 1, 1),
(-1, -1), ( 1, -1),
( 0, -2)
] `shouldEql` [(0, 2), (1, 1), (1, -1), (0, -2), (-1, -1), (-1, 1)],
convexHull [
(-2, 0), ( 0, -0.5), ( 2, 0),
(-1, -1), ( 1, -1),
( 0, -2)
] `shouldEql` [(2, 0), (0, -2), (-2, 0)],
convexHull [
(-2, 0), ( 0, -0.5), ( 2, 0),
(-1, -1.5), ( 1, -1.5),
( 0, -2)
] `shouldEql` [(2, 0), (1, -1.5), (0, -2), (-1, -1.5), (-2, 0)],
convexHull [
( 0, 1),
(-2, 0), ( 0, -0.5), ( 2, 0),
(-1, -1), ( 1, -1),
( 0, -2),
( 0, -3)
] `shouldEql` [(0, 1), (2, 0), (0, -3), (-2, 0)],
"done"
]
| jhnns/haskell-experiments | test/PointsSpec.hs | unlicense | 3,532 | 0 | 12 | 1,762 | 1,743 | 1,096 | 647 | 91 | 1 |
findKey :: (Eq k) => k -> [(k, v)] -> Maybe v
findKey _ [] = Nothing
findKey key ((k, v):xs)
| key == k = Just v
| otherwise = findKey key xs
findKey' :: (Eq k) => k -> [(k, v)] -> Maybe v
findKey' _ [] = Nothing
findKey' key ((k, v):xs) = case key == k of
True -> Just v
False -> findKey' key xs
| EricYT/Haskell | src/chapter-3-2.hs | apache-2.0 | 366 | 1 | 9 | 139 | 192 | 100 | 92 | 10 | 2 |
module FractalFlame.Variation.Types.VTransform where
import FractalFlame.Point.Types.CartesianPoint
import FractalFlame.Variation.Types.VarP
-- | Variation function (use FractalFlame.Variation.runVariation to build a VarP parameter)
type VTransform = VarP -> CartesianPoint
| anthezium/fractal_flame_renderer_haskell | FractalFlame/Variation/Types/VTransform.hs | bsd-2-clause | 277 | 0 | 5 | 27 | 33 | 23 | 10 | 4 | 0 |
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE DataKinds #-}
module Data.Interface.Module.Entity where
import Data.Interface.Change
import Data.Interface.Name
import Data.Interface.Type
import Data.Interface.Type.Diff
-- | A top-level exported entity in a module, without a name.
data Entity
= LocalValue ValueDecl -- ^ a value-namespace declaration
| LocalType TypeDecl -- ^ a type-namespace declaration
| ReExport ModuleName Namespace -- ^ module and namespace of declaration
deriving (Show, Eq, Ord)
data EntityDiff
= LocalValueDiff ValueDeclDiff -- ^ a value-namespace diff
| LocalTypeDiff TypeDeclDiff -- ^ a type-namespace diff
| EntityDiff (Change Entity) -- ^ none of the above
deriving (Show, Eq, Ord)
instance ToChange Entity EntityDiff where
toChange ediff = case ediff of
LocalValueDiff vd -> LocalValue <$> toChange vd
LocalTypeDiff td -> LocalType <$> toChange td
EntityDiff c -> c
instance Diff Entity EntityDiff where
noDiff e = case e of
LocalValue vd -> LocalValueDiff (noDiff vd)
LocalType td -> LocalTypeDiff (noDiff td)
ReExport{} -> EntityDiff (NoChange e)
diff a b = case (a,b) of
(LocalValue vd0, LocalValue vd1) -> LocalValueDiff (diff vd0 vd1)
(LocalType td0, LocalType td1) -> LocalTypeDiff (diff td0 td1)
_ -> EntityDiff (diff a b)
-- * ValueDecl
data ValueDecl = ValueDecl
{ vdType :: Type
, vdInfo :: ValueDeclInfo
} deriving (Show, Eq, Ord)
data ValueDeclInfo
= Identifier
| PatternSyn
| DataCon [DataField]
deriving (Show, Eq, Ord)
type DataField = Named ()
type instance Space ValueDecl = 'Values
instance HasNamespace ValueDecl where
namespace _ = Values
instance TraverseNames ValueDecl where
traverseNames f (ValueDecl t i) =
ValueDecl <$> traverseNames f t <*> traverseNames f i
instance TraverseNames ValueDeclInfo where
traverseNames f vdi = case vdi of
DataCon fields -> DataCon <$> traverse (traverseNames f) fields
_ -> pure vdi
data ValueDeclDiff = ValueDeclDiff
{ vdTypeDiff :: TypeDiff
, vdInfoDiff :: Change ValueDeclInfo
} deriving (Show, Eq, Ord)
instance ToChange ValueDecl ValueDeclDiff where
toChange (ValueDeclDiff t i) = ValueDecl <$> toChange t <*> toChange i
instance Diff ValueDecl ValueDeclDiff where
diff (ValueDecl ta ia) (ValueDecl tb ib) =
ValueDeclDiff (diff ta tb) (diff ia ib)
-- * TypeDecl
data TypeDecl = TypeDecl
{ tdKind :: Kind
, tdInfo :: TypeDeclInfo
} deriving (Show, Eq, Ord)
data TypeDeclInfo
= DataType DataConList -- ^ data/newtype
| TypeSyn String -- ^ type synonym (TODO)
| TypeClass -- ^ type class (TODO)
deriving (Show, Eq, Ord)
{- TypeDecl notes:
- TypeSyn contains its definition
(this is only a String for now, but will have to include
first-class type information)
TODO:
- type/data families
-}
type instance Space TypeDecl = 'Types
instance HasNamespace TypeDecl where
namespace _ = Types
instance TraverseNames TypeDecl where
traverseNames f (TypeDecl k i) =
TypeDecl <$> traverseNames f k <*> traverseNames f i
instance TraverseNames TypeDeclInfo where
traverseNames f tdi = case tdi of
DataType dcons -> DataType <$> traverseNames f dcons
_ -> pure tdi
-- | Data constructors for an algebraic type, or `Abstract` when the data
-- constructors are hidden.
data DataConList = Abstract | DataConList [RawName]
deriving (Show, Eq, Ord)
instance TraverseNames DataConList where
traverseNames f dcons = case dcons of
Abstract -> pure Abstract
DataConList ns -> DataConList <$> traverse (traverseNames f) ns
data TypeDeclDiff = TypeDeclDiff
{ tdKindDiff :: Change Kind
, tdInfoDiff :: Change TypeDeclInfo
} deriving (Show, Eq, Ord)
instance ToChange TypeDecl TypeDeclDiff where
toChange (TypeDeclDiff t i) = TypeDecl <$> toChange t <*> toChange i
instance Diff TypeDecl TypeDeclDiff where
diff (TypeDecl ka ia) (TypeDecl kb ib) =
TypeDeclDiff (diff ka kb) (diff ia ib)
| cdxr/haskell-interface | src/Data/Interface/Module/Entity.hs | bsd-3-clause | 4,273 | 0 | 12 | 1,064 | 1,118 | 583 | 535 | 95 | 0 |
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TemplateHaskell #-}
{-|
The 'Message' is a single displayed event in a Channel. All Messages
have a date/time, and messages that represent posts to the channel
have a (hash) ID, and displayable text, along with other attributes.
All Messages are sorted chronologically. There is no assumption that
the server date/time is synchronized with the local date/time, so all
of the Message ordering uses the server's date/time.
The mattermost-api retrieves a 'Post' from the server, briefly encodes
the useful portions of that as a 'ClientPost' object and then converts
it to a 'Message' inserting this result it into the collection of
Messages associated with a Channel. The PostID of the message
uniquely identifies that message and can be used to interact with the
server for subsequent operations relative to that message's 'Post'.
The date/time associated with these messages is generated by the
server.
There are also "messages" generated directly by the Matterhorn client
which can be used to display additional, client-related information to
the user. Examples of these client messages are: date boundaries, the
"new messages" marker, errors from invoking the browser, etc. These
client-generated messages will have a date/time although it is locally
generated (usually by relation to an associated Post).
Most other Matterhorn operations primarily are concerned with
user-posted messages (@case mMessageId of Just _@ or @case mType of CP
_@), but others will include client-generated messages (@case mMessageId
of Nothing@ or @case mType of C _@).
--}
module Matterhorn.Types.Messages
( -- * Message and operations on a single Message
Message(..)
, isDeletable, isReplyable, isReactable, isEditable, isReplyTo, isGap, isFlaggable
, isPinnable, isEmote, isJoinLeave, isTransition, isNewMessagesTransition
, mText, mUser, mDate, mType, mPending, mDeleted, mPinned
, mAttachments, mInReplyToMsg, mMessageId, mReactions, mFlagged
, mOriginalPost, mChannelId, mMarkdownSource
, isBotMessage
, MessageType(..)
, MessageId(..)
, ThreadState(..)
, MentionedUser(..)
, isPostMessage
, messagePostId
, messageIdPostId
, UserRef(..)
, ReplyState(..)
, clientMessageToMessage
, clientPostToMessage
, clientPostReactionUserIds
, newMessageOfType
-- * Message Collections
, Messages
, ChronologicalMessages
, RetrogradeMessages
, MessageOps (..)
, noMessages
, messagesLength
, filterMessages
, reverseMessages
, unreverseMessages
, splitMessages
, splitDirSeqOn
, chronologicalMsgsWithThreadStates
, retrogradeMsgsWithThreadStates
, findMessage
, getRelMessageId
, messagesHead
, messagesDrop
, getNextMessage
, getPrevMessage
, getNextMessageId
, getPrevMessageId
, getNextPostId
, getPrevPostId
, getEarliestPostMsg
, getLatestPostMsg
, getEarliestSelectableMessage
, getLatestSelectableMessage
, findLatestUserMessage
-- * Operations on any Message type
, messagesAfter
, removeMatchesFromSubset
, withFirstMessage
, msgURLs
, LinkTarget(..)
, LinkChoice(LinkChoice, _linkTarget)
, linkUser
, linkTarget
, linkTime
, linkLabel
)
where
import Prelude ()
import Matterhorn.Prelude
import Control.Monad
import qualified Data.Foldable as F
import Data.Hashable ( Hashable )
import qualified Data.Map.Strict as Map
import Data.Sequence as Seq
import qualified Data.Set as S
import Data.Tuple
import Data.UUID ( UUID )
import GHC.Generics ( Generic )
import Lens.Micro.Platform ( makeLenses )
import Network.Mattermost.Types ( ChannelId, PostId, Post
, ServerTime, UserId, FileId
)
import Matterhorn.Types.DirectionalSeq
import Matterhorn.Types.Posts
import Matterhorn.Types.RichText
-- | The state of a message's thread context.
data ThreadState =
NoThread
-- ^ The message is not in a thread at all.
| InThreadShowParent
-- ^ The message is in a thread, and the thread's root message
-- (parent) should be displayed above this message.
| InThread
-- ^ The message is in a thread but the thread's root message should
-- not be displayed above this message.
deriving (Show, Eq)
-- ----------------------------------------------------------------------
-- * Messages
data MessageId = MessagePostId PostId
| MessageUUID UUID
deriving (Eq, Read, Ord, Show, Generic, Hashable)
messageIdPostId :: MessageId -> Maybe PostId
messageIdPostId (MessagePostId p) = Just p
messageIdPostId _ = Nothing
-- | A 'Message' is any message we might want to render, either from
-- Mattermost itself or from a client-internal source.
data Message = Message
{ _mText :: Blocks
, _mMarkdownSource :: Text
, _mUser :: UserRef
, _mDate :: ServerTime
, _mType :: MessageType
, _mPending :: Bool
, _mDeleted :: Bool
, _mAttachments :: Seq Attachment
, _mInReplyToMsg :: ReplyState
, _mMessageId :: Maybe MessageId
, _mReactions :: Map.Map Text (S.Set UserId)
, _mOriginalPost :: Maybe Post
, _mFlagged :: Bool
, _mPinned :: Bool
, _mChannelId :: Maybe ChannelId
} deriving (Show)
isPostMessage :: Message -> Bool
isPostMessage m =
isJust (_mMessageId m >>= messageIdPostId)
messagePostId :: Message -> Maybe PostId
messagePostId m = do
mId <- _mMessageId m
messageIdPostId mId
isDeletable :: Message -> Bool
isDeletable m =
isJust (messagePostId m) &&
case _mType m of
CP NormalPost -> True
CP Emote -> True
_ -> False
isFlaggable :: Message -> Bool
isFlaggable = isJust . messagePostId
isPinnable :: Message -> Bool
isPinnable = isJust . messagePostId
isReplyable :: Message -> Bool
isReplyable m =
isJust (messagePostId m) &&
case _mType m of
CP NormalPost -> True
CP Emote -> True
_ -> False
isReactable :: Message -> Bool
isReactable m =
isJust (messagePostId m) &&
case _mType m of
CP NormalPost -> True
CP Emote -> True
_ -> False
isEditable :: Message -> Bool
isEditable m =
isJust (messagePostId m) &&
case _mType m of
CP NormalPost -> True
CP Emote -> True
_ -> False
isReplyTo :: PostId -> Message -> Bool
isReplyTo expectedParentId m =
case _mInReplyToMsg m of
NotAReply -> False
InReplyTo actualParentId -> actualParentId == expectedParentId
isGap :: Message -> Bool
isGap m = case _mType m of
C UnknownGapBefore -> True
C UnknownGapAfter -> True
_ -> False
isTransition :: Message -> Bool
isTransition m = case _mType m of
C DateTransition -> True
C NewMessagesTransition -> True
_ -> False
isNewMessagesTransition :: Message -> Bool
isNewMessagesTransition m = case _mType m of
C NewMessagesTransition -> True
_ -> False
isEmote :: Message -> Bool
isEmote m = case _mType m of
CP Emote -> True
_ -> False
isJoinLeave :: Message -> Bool
isJoinLeave m = case _mType m of
CP Join -> True
CP Leave -> True
_ -> False
-- | A 'Message' is the representation we use for storage and
-- rendering, so it must be able to represent either a
-- post from Mattermost or an internal message. This represents
-- the union of both kinds of post types.
data MessageType = C ClientMessageType
| CP ClientPostType
deriving (Show)
-- | There may be no user (usually an internal message), a reference to
-- a user (by Id), or the server may have supplied a specific username
-- (often associated with bots). The boolean flag indicates whether the
-- user reference is for a message from a bot.
data UserRef = NoUser | UserI Bool UserId | UserOverride Bool Text
deriving (Eq, Show, Ord)
isBotMessage :: Message -> Bool
isBotMessage m =
case _mUser m of
UserI bot _ -> bot
UserOverride bot _ -> bot
NoUser -> False
-- | The 'ReplyState' of a message represents whether a message
-- is a reply, and if so, to what message
data ReplyState =
NotAReply
| InReplyTo PostId
deriving (Show, Eq)
data LinkTarget =
LinkURL URL
| LinkFileId FileId
| LinkPermalink TeamURLName PostId
deriving (Eq, Show, Ord)
-- | This type represents links to things in the 'open links' view.
data LinkChoice =
LinkChoice { _linkTime :: ServerTime
, _linkUser :: UserRef
, _linkLabel :: Maybe Inlines
, _linkTarget :: LinkTarget
} deriving (Eq, Show)
makeLenses ''LinkChoice
-- | Convert a 'ClientMessage' to a 'Message'. A 'ClientMessage' is
-- one that was generated by the Matterhorn client and which the
-- server knows nothing about. For example, an error message
-- associated with passing a link to the local browser.
clientMessageToMessage :: ClientMessage -> Message
clientMessageToMessage cm = Message
{ _mText = parseMarkdown Nothing (cm^.cmText)
, _mMarkdownSource = cm^.cmText
, _mUser = NoUser
, _mDate = cm^.cmDate
, _mType = C $ cm^.cmType
, _mPending = False
, _mDeleted = False
, _mAttachments = Seq.empty
, _mInReplyToMsg = NotAReply
, _mMessageId = Nothing
, _mReactions = Map.empty
, _mOriginalPost = Nothing
, _mFlagged = False
, _mPinned = False
, _mChannelId = Nothing
}
data MentionedUser =
UsernameMention Text
| UserIdMention UserId
deriving (Eq, Show, Ord)
clientPostReactionUserIds :: ClientPost -> S.Set UserId
clientPostReactionUserIds cp =
S.unions $ F.toList $ cp^.cpReactions
-- | Builds a message from a ClientPost and also returns the set of
-- usernames mentioned in the text of the message.
clientPostToMessage :: ClientPost -> (Message, S.Set MentionedUser)
clientPostToMessage cp = (m, mentions)
where
mentions =
S.fromList $
(UsernameMention <$> (F.toList $ findUsernames $ cp^.cpText)) <>
(UserIdMention <$> (F.toList $ clientPostReactionUserIds cp))
m = Message { _mText = cp^.cpText
, _mMarkdownSource = cp^.cpMarkdownSource
, _mUser =
case cp^.cpUserOverride of
Just n | cp^.cpType == NormalPost -> UserOverride (cp^.cpFromWebhook) n
_ -> maybe NoUser (UserI (cp^.cpFromWebhook)) $ cp^.cpUser
, _mDate = cp^.cpDate
, _mType = CP $ cp^.cpType
, _mPending = cp^.cpPending
, _mDeleted = cp^.cpDeleted
, _mAttachments = cp^.cpAttachments
, _mInReplyToMsg =
case cp^.cpInReplyToPost of
Nothing -> NotAReply
Just pId -> InReplyTo pId
, _mMessageId = Just $ MessagePostId $ cp^.cpPostId
, _mReactions = cp^.cpReactions
, _mOriginalPost = Just $ cp^.cpOriginalPost
, _mFlagged = False
, _mPinned = cp^.cpPinned
, _mChannelId = Just $ cp^.cpChannelId
}
newMessageOfType :: Text -> MessageType -> ServerTime -> Message
newMessageOfType text typ d = Message
{ _mText = parseMarkdown Nothing text
, _mMarkdownSource = text
, _mUser = NoUser
, _mDate = d
, _mType = typ
, _mPending = False
, _mDeleted = False
, _mAttachments = Seq.empty
, _mInReplyToMsg = NotAReply
, _mMessageId = Nothing
, _mReactions = Map.empty
, _mOriginalPost = Nothing
, _mFlagged = False
, _mPinned = False
, _mChannelId = Nothing
}
-- ** 'Message' Lenses
makeLenses ''Message
-- ----------------------------------------------------------------------
-- * Message Collections
-- | A wrapper for an ordered, unique list of 'Message' values.
--
-- This type has (and promises) the following instances: Show,
-- Functor, Monoid, Foldable, Traversable
type ChronologicalMessages = DirectionalSeq Chronological Message
type Messages = ChronologicalMessages
-- | There are also cases where the list of 'Message' values are kept
-- in reverse order (most recent -> oldest); these cases are
-- represented by the `RetrogradeMessages` type.
type RetrogradeMessages = DirectionalSeq Retrograde Message
-- ** Common operations on Messages
filterMessages :: SeqDirection seq
=> (a -> Bool)
-> DirectionalSeq seq a
-> DirectionalSeq seq a
filterMessages f = onDirectedSeq (Seq.filter f)
class MessageOps a where
-- | addMessage inserts a date in proper chronological order, with
-- the following extra functionality:
-- * no duplication (by PostId)
-- * no duplication (adjacent UnknownGap entries)
addMessage :: Message -> a -> a
instance MessageOps ChronologicalMessages where
addMessage m ml =
case viewr (dseq ml) of
EmptyR -> DSeq $ singleton m
_ :> l ->
case compare (m^.mDate) (l^.mDate) of
GT -> DSeq $ dseq ml |> m
EQ -> if m^.mMessageId == l^.mMessageId && isJust (m^.mMessageId)
then ml
else dirDateInsert m ml
LT -> dirDateInsert m ml
dirDateInsert :: Message -> ChronologicalMessages -> ChronologicalMessages
dirDateInsert m = onDirectedSeq $ finalize . foldr insAfter initial
where initial = (Just m, mempty)
insAfter c (Nothing, l) = (Nothing, c <| l)
insAfter c (Just n, l) =
case compare (n^.mDate) (c^.mDate) of
GT -> (Nothing, c <| (n <| l))
EQ -> if n^.mMessageId == c^.mMessageId && isJust (c^.mMessageId)
then (Nothing, c <| l)
else (Just n, c <| l)
LT -> (Just n, c <| l)
finalize (Just n, l) = n <| l
finalize (_, l) = l
noMessages :: Messages
noMessages = DSeq mempty
messagesLength :: DirectionalSeq seq a -> Int
messagesLength (DSeq ms) = Seq.length ms
-- | Reverse the order of the messages
reverseMessages :: Messages -> RetrogradeMessages
reverseMessages = DSeq . Seq.reverse . dseq
-- | Unreverse the order of the messages
unreverseMessages :: RetrogradeMessages -> Messages
unreverseMessages = DSeq . Seq.reverse . dseq
splitDirSeqOn :: SeqDirection d
=> (a -> Bool)
-> DirectionalSeq d a
-> (Maybe a, (DirectionalSeq (ReverseDirection d) a,
DirectionalSeq d a))
splitDirSeqOn f msgs =
let (removed, remaining) = dirSeqBreakl f msgs
devomer = DSeq $ Seq.reverse $ dseq removed
in (withDirSeqHead id remaining, (devomer, onDirectedSeq (Seq.drop 1) remaining))
-- ----------------------------------------------------------------------
-- * Operations on Posted Messages
-- | Searches for the specified MessageId and returns a tuple where the
-- first element is the Message associated with the MessageId (if it
-- exists), and the second element is another tuple: the first element
-- of the second is all the messages from the beginning of the list to
-- the message just before the MessageId message (or all messages if not
-- found) *in reverse order*, and the second element of the second are
-- all the messages that follow the found message (none if the message
-- was never found) in *forward* order.
splitMessages :: Maybe MessageId
-> DirectionalSeq Chronological (Message, ThreadState)
-> (Maybe (Message, ThreadState),
( DirectionalSeq Retrograde (Message, ThreadState),
DirectionalSeq Chronological (Message, ThreadState)))
splitMessages mid msgs = splitDirSeqOn (\(m, _) -> isJust mid && m^.mMessageId == mid) msgs
-- | Given a message and its chronological predecessor, return
-- the thread state of the specified message with respect to its
-- predecessor.
threadStateFor :: Message
-- ^ The message whose state is to be obtained.
-> Message
-- ^ The message's predecessor.
-> ThreadState
threadStateFor msg prev = case msg^.mInReplyToMsg of
InReplyTo rootId ->
if | (prev^.mMessageId) == Just (MessagePostId rootId) ->
InThread
| prev^.mInReplyToMsg == msg^.mInReplyToMsg ->
InThread
| otherwise ->
InThreadShowParent
_ -> NoThread
retrogradeMsgsWithThreadStates :: RetrogradeMessages -> DirectionalSeq Retrograde (Message, ThreadState)
retrogradeMsgsWithThreadStates msgs = DSeq $ checkAdjacentMessages (dseq msgs)
where
getMessagePredecessor ms =
let visiblePredMsg m = not (isTransition m || m^.mDeleted) in
case Seq.viewl ms of
prev Seq.:< rest ->
if visiblePredMsg prev
then Just prev
else getMessagePredecessor rest
Seq.EmptyL -> Nothing
checkAdjacentMessages s = case Seq.viewl s of
Seq.EmptyL -> mempty
m Seq.:< t ->
let new_m = case getMessagePredecessor t of
Just prev -> (m, threadStateFor m prev)
Nothing -> case m^.mInReplyToMsg of
InReplyTo _ -> (m, InThreadShowParent)
_ -> (m, NoThread)
in new_m Seq.<| checkAdjacentMessages t
chronologicalMsgsWithThreadStates :: Messages -> DirectionalSeq Chronological (Message, ThreadState)
chronologicalMsgsWithThreadStates msgs = DSeq $ checkAdjacentMessages (dseq msgs)
where
getMessagePredecessor ms =
let visiblePredMsg m = not (isTransition m || m^.mDeleted) in
case Seq.viewr ms of
rest Seq.:> prev ->
if visiblePredMsg prev
then Just prev
else getMessagePredecessor rest
Seq.EmptyR -> Nothing
checkAdjacentMessages s = case Seq.viewr s of
Seq.EmptyR -> mempty
t Seq.:> m ->
let new_m = case getMessagePredecessor t of
Just prev -> (m, threadStateFor m prev)
Nothing -> case m^.mInReplyToMsg of
InReplyTo _ -> (m, InThreadShowParent)
_ -> (m, NoThread)
in checkAdjacentMessages t Seq.|> new_m
-- | findMessage searches for a specific message as identified by the
-- PostId. The search starts from the most recent messages because
-- that is the most likely place the message will occur.
findMessage :: MessageId -> Messages -> Maybe Message
findMessage mid msgs =
findIndexR (\m -> m^.mMessageId == Just mid) (dseq msgs)
>>= Just . Seq.index (dseq msgs)
-- | Look forward for the first Message with an ID that follows the
-- specified Id and return it. If no input Id supplied, get the
-- latest (most recent chronologically) Message in the input set.
getNextMessage :: Maybe MessageId -> Messages -> Maybe Message
getNextMessage = getRelMessageId
-- | Look backward for the first Message with an ID that follows the
-- specified MessageId and return it. If no input MessageId supplied,
-- get the latest (most recent chronologically) Message in the input
-- set.
getPrevMessage :: Maybe MessageId -> Messages -> Maybe Message
getPrevMessage mId = getRelMessageId mId . reverseMessages
messagesHead :: (SeqDirection seq) => DirectionalSeq seq a -> Maybe a
messagesHead = withDirSeqHead id
messagesDrop :: (SeqDirection seq) => Int -> DirectionalSeq seq a -> DirectionalSeq seq a
messagesDrop i = onDirectedSeq (Seq.drop i)
-- | Look forward for the first Message with an ID that follows the
-- specified MessageId and return that found Message's ID; if no input
-- MessageId is specified, return the latest (most recent
-- chronologically) MessageId (if any) in the input set.
getNextMessageId :: Maybe MessageId -> Messages -> Maybe MessageId
getNextMessageId mId = _mMessageId <=< getNextMessage mId
-- | Look backwards for the first Message with an ID that comes before
-- the specified MessageId and return that found Message's ID; if no
-- input MessageId is specified, return the latest (most recent
-- chronologically) MessageId (if any) in the input set.
getPrevMessageId :: Maybe MessageId -> Messages -> Maybe MessageId
getPrevMessageId mId = _mMessageId <=< getPrevMessage mId
-- | Look forward for the first Message with an ID that follows the
-- specified PostId and return that found Message's PostID; if no
-- input PostId is specified, return the latest (most recent
-- chronologically) PostId (if any) in the input set.
getNextPostId :: Maybe PostId -> Messages -> Maybe PostId
getNextPostId pid = messagePostId <=< getNextMessage (MessagePostId <$> pid)
-- | Look backwards for the first Post with an ID that comes before
-- the specified PostId.
getPrevPostId :: Maybe PostId -> Messages -> Maybe PostId
getPrevPostId pid = messagePostId <=< getPrevMessage (MessagePostId <$> pid)
getRelMessageId :: SeqDirection dir =>
Maybe MessageId
-> DirectionalSeq dir Message
-> Maybe Message
getRelMessageId mId =
let isMId = const ((==) mId . _mMessageId) <$> mId
in getRelMessage isMId
-- | Internal worker function to return a different user message in
-- relation to either the latest point or a specific message.
getRelMessage :: SeqDirection dir =>
Maybe (Message -> Bool)
-> DirectionalSeq dir Message
-> Maybe Message
getRelMessage matcher msgs =
let after = case matcher of
Just matchFun -> case splitDirSeqOn matchFun msgs of
(_, (_, ms)) -> ms
Nothing -> msgs
in withDirSeqHead id $ filterMessages validSelectableMessage after
-- | Find the most recent message that is a Post (as opposed to a
-- local message) (if any).
getLatestPostMsg :: Messages -> Maybe Message
getLatestPostMsg msgs =
case viewr $ dropWhileR (not . validUserMessage) (dseq msgs) of
EmptyR -> Nothing
_ :> m -> Just m
-- | Find the oldest message that is a message with an ID.
getEarliestSelectableMessage :: Messages -> Maybe Message
getEarliestSelectableMessage msgs =
case viewl $ dropWhileL (not . validSelectableMessage) (dseq msgs) of
EmptyL -> Nothing
m :< _ -> Just m
-- | Find the most recent message that is a message with an ID.
getLatestSelectableMessage :: Messages -> Maybe Message
getLatestSelectableMessage msgs =
case viewr $ dropWhileR (not . validSelectableMessage) (dseq msgs) of
EmptyR -> Nothing
_ :> m -> Just m
-- | Find the earliest message that is a Post (as opposed to a
-- local message) (if any).
getEarliestPostMsg :: Messages -> Maybe Message
getEarliestPostMsg msgs =
case viewl $ dropWhileL (not . validUserMessage) (dseq msgs) of
EmptyL -> Nothing
m :< _ -> Just m
-- | Find the most recent message that is a message posted by a user
-- that matches the test (if any), skipping local client messages and
-- any user event that is not a message (i.e. find a normal message or
-- an emote).
findLatestUserMessage :: (Message -> Bool) -> Messages -> Maybe Message
findLatestUserMessage f ml =
case viewr $ dropWhileR (\m -> not (validUserMessage m && f m)) $ dseq ml of
EmptyR -> Nothing
_ :> m -> Just m
validUserMessage :: Message -> Bool
validUserMessage m =
not (m^.mDeleted) && case m^.mMessageId of
Just (MessagePostId _) -> True
_ -> False
validSelectableMessage :: Message -> Bool
validSelectableMessage m = (not $ m^.mDeleted) && (isJust $ m^.mMessageId)
-- ----------------------------------------------------------------------
-- * Operations on any Message type
-- | Return all messages that were posted after the specified date/time.
messagesAfter :: ServerTime -> Messages -> Messages
messagesAfter viewTime = onDirectedSeq $ takeWhileR (\m -> m^.mDate > viewTime)
-- | Removes any Messages (all types) for which the predicate is true
-- from the specified subset of messages (identified by a starting and
-- ending MessageId, inclusive) and returns the resulting list (from
-- start to finish, irrespective of 'firstId' and 'lastId') and the
-- list of removed items.
--
-- start | end | operates-on | (test) case
-- --------------------------------------------------------|-------------
-- Nothing | Nothing | entire list | C1
-- Nothing | Just found | start --> found] | C2
-- Nothing | Just missing | nothing [suggest invalid] | C3
-- Just found | Nothing | [found --> end | C4
-- Just found | Just found | [found --> found] | C5
-- Just found | Just missing | [found --> end | C6
-- Just missing | Nothing | nothing [suggest invalid] | C7
-- Just missing | Just found | start --> found] | C8
-- Just missing | Just missing | nothing [suggest invalid] | C9
--
-- @removeMatchesFromSubset matchPred fromId toId msgs = (remaining, removed)@
--
removeMatchesFromSubset :: (Message -> Bool) -> Maybe MessageId -> Maybe MessageId
-> Messages -> (Messages, Messages)
removeMatchesFromSubset matching firstId lastId msgs =
let knownIds = fmap (^.mMessageId) msgs
in if isNothing firstId && isNothing lastId
then swap $ dirSeqPartition matching msgs
else if isJust firstId && firstId `elem` knownIds
then onDirSeqSubset
(\m -> m^.mMessageId == firstId)
(if isJust lastId then \m -> m^.mMessageId == lastId else const False)
(swap . dirSeqPartition matching) msgs
else if isJust lastId && lastId `elem` knownIds
then onDirSeqSubset
(const True)
(\m -> m^.mMessageId == lastId)
(swap . dirSeqPartition matching) msgs
else (msgs, noMessages)
-- | Performs an operation on the first Message, returning just the
-- result of that operation, or Nothing if there were no messages.
-- Note that the message is not necessarily a posted user message.
withFirstMessage :: SeqDirection dir
=> (Message -> r)
-> DirectionalSeq dir Message
-> Maybe r
withFirstMessage = withDirSeqHead
msgURLs :: Message -> Seq LinkChoice
msgURLs msg =
let uRef = msg^.mUser
mkTarget (Right url) = LinkURL url
mkTarget (Left (tName, pId)) = LinkPermalink tName pId
mkEntry (val, text) = LinkChoice (msg^.mDate) uRef text (mkTarget val)
msgUrls = mkEntry <$> (Seq.fromList $ mconcat $ blockGetURLs <$> (F.toList $ unBlocks $ msg^.mText))
attachmentURLs = (\ a ->
LinkChoice
(msg^.mDate)
uRef
(Just $ attachmentLabel a)
(LinkFileId $ a^.attachmentFileId))
<$> (msg^.mAttachments)
attachmentLabel a =
Inlines $ Seq.fromList [ EText "attachment"
, ESpace
, ECode $ Inlines $ Seq.singleton $ EText $ a^.attachmentName
]
in msgUrls <> attachmentURLs
| matterhorn-chat/matterhorn | src/Matterhorn/Types/Messages.hs | bsd-3-clause | 28,203 | 0 | 20 | 7,952 | 5,443 | 2,939 | 2,504 | 496 | 6 |
module Seventeen where
import Data.List
permute :: Int -> [Int] -> [Int] -> [[Int]]
permute goal tried []
| goal == sum tried = [tried]
| otherwise = []
permute goal tried remaining
| goal == sum tried = [tried]
| otherwise = concatMap (permute2 goal tried) (sublists remaining)
permute2 :: Int -> [Int] -> [Int] -> [[Int]]
permute2 _ _ [] = []
permute2 goal tried (x:xs)
| x + sum tried <= goal = permute goal (tried ++ [x]) xs
| otherwise = []
sublists :: [Int] -> [[Int]]
sublists = filter (not . null) . tails
seventeen :: IO Int
seventeen = do
input <- readFile "input/17.txt"
let buckets = map read $ lines input
in return . length $ permute 150 [] buckets
| purcell/adventofcodeteam | app/Seventeen.hs | bsd-3-clause | 705 | 0 | 12 | 169 | 345 | 174 | 171 | 21 | 1 |
{-# language CPP #-}
-- | = Name
--
-- VK_AMD_shader_core_properties2 - device extension
--
-- == VK_AMD_shader_core_properties2
--
-- [__Name String__]
-- @VK_AMD_shader_core_properties2@
--
-- [__Extension Type__]
-- Device extension
--
-- [__Registered Extension Number__]
-- 228
--
-- [__Revision__]
-- 1
--
-- [__Extension and Version Dependencies__]
--
-- - Requires Vulkan 1.0
--
-- - Requires @VK_AMD_shader_core_properties@
--
-- [__Contact__]
--
-- - Matthaeus G. Chajdas
-- <https://github.com/KhronosGroup/Vulkan-Docs/issues/new?body=[VK_AMD_shader_core_properties2] @anteru%0A<<Here describe the issue or question you have about the VK_AMD_shader_core_properties2 extension>> >
--
-- == Other Extension Metadata
--
-- [__Last Modified Date__]
-- 2019-07-26
--
-- [__IP Status__]
-- No known IP claims.
--
-- [__Contributors__]
--
-- - Matthaeus G. Chajdas, AMD
--
-- - Tobias Hector, AMD
--
-- == Description
--
-- This extension exposes additional shader core properties for a target
-- physical device through the @VK_KHR_get_physical_device_properties2@
-- extension.
--
-- == New Structures
--
-- - Extending
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceProperties2':
--
-- - 'PhysicalDeviceShaderCoreProperties2AMD'
--
-- == New Enums
--
-- - 'ShaderCorePropertiesFlagBitsAMD'
--
-- == New Bitmasks
--
-- - 'ShaderCorePropertiesFlagsAMD'
--
-- == New Enum Constants
--
-- - 'AMD_SHADER_CORE_PROPERTIES_2_EXTENSION_NAME'
--
-- - 'AMD_SHADER_CORE_PROPERTIES_2_SPEC_VERSION'
--
-- - Extending 'Vulkan.Core10.Enums.StructureType.StructureType':
--
-- - 'Vulkan.Core10.Enums.StructureType.STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD'
--
-- == Examples
--
-- None.
--
-- == Version History
--
-- - Revision 1, 2019-07-26 (Matthaeus G. Chajdas)
--
-- - Initial draft.
--
-- == See Also
--
-- 'PhysicalDeviceShaderCoreProperties2AMD',
-- 'ShaderCorePropertiesFlagBitsAMD', 'ShaderCorePropertiesFlagsAMD'
--
-- == Document Notes
--
-- For more information, see the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#VK_AMD_shader_core_properties2 Vulkan Specification>
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module Vulkan.Extensions.VK_AMD_shader_core_properties2 ( PhysicalDeviceShaderCoreProperties2AMD(..)
, ShaderCorePropertiesFlagsAMD
, ShaderCorePropertiesFlagBitsAMD(..)
, AMD_SHADER_CORE_PROPERTIES_2_SPEC_VERSION
, pattern AMD_SHADER_CORE_PROPERTIES_2_SPEC_VERSION
, AMD_SHADER_CORE_PROPERTIES_2_EXTENSION_NAME
, pattern AMD_SHADER_CORE_PROPERTIES_2_EXTENSION_NAME
) where
import Vulkan.Internal.Utils (enumReadPrec)
import Vulkan.Internal.Utils (enumShowsPrec)
import Foreign.Marshal.Alloc (allocaBytes)
import Foreign.Ptr (nullPtr)
import Foreign.Ptr (plusPtr)
import GHC.Show (showString)
import Numeric (showHex)
import Vulkan.CStruct (FromCStruct)
import Vulkan.CStruct (FromCStruct(..))
import Vulkan.CStruct (ToCStruct)
import Vulkan.CStruct (ToCStruct(..))
import Vulkan.Zero (Zero)
import Vulkan.Zero (Zero(..))
import Data.Bits (Bits)
import Data.Bits (FiniteBits)
import Data.String (IsString)
import Data.Typeable (Typeable)
import Foreign.Storable (Storable)
import Foreign.Storable (Storable(peek))
import Foreign.Storable (Storable(poke))
import qualified Foreign.Storable (Storable(..))
import GHC.Generics (Generic)
import Foreign.Ptr (Ptr)
import GHC.Read (Read(readPrec))
import GHC.Show (Show(showsPrec))
import Data.Word (Word32)
import Data.Kind (Type)
import Vulkan.Core10.FundamentalTypes (Flags)
import Vulkan.Core10.Enums.StructureType (StructureType)
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD))
-- | VkPhysicalDeviceShaderCoreProperties2AMD - Structure describing shader
-- core properties that can be supported by an implementation
--
-- = Description
--
-- If the 'PhysicalDeviceShaderCoreProperties2AMD' structure is included in
-- the @pNext@ chain of the
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.PhysicalDeviceProperties2'
-- structure passed to
-- 'Vulkan.Core11.Promoted_From_VK_KHR_get_physical_device_properties2.getPhysicalDeviceProperties2',
-- it is filled in with each corresponding implementation-dependent
-- property.
--
-- == Valid Usage (Implicit)
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_AMD_shader_core_properties2 VK_AMD_shader_core_properties2>,
-- 'ShaderCorePropertiesFlagsAMD',
-- 'Vulkan.Core10.Enums.StructureType.StructureType'
data PhysicalDeviceShaderCoreProperties2AMD = PhysicalDeviceShaderCoreProperties2AMD
{ -- | #features-shaderCoreFeatures# @shaderCoreFeatures@ is a bitmask of
-- 'ShaderCorePropertiesFlagBitsAMD' indicating the set of features
-- supported by the shader core.
shaderCoreFeatures :: ShaderCorePropertiesFlagsAMD
, -- | #limits-activeComputeUnitCount# @activeComputeUnitCount@ is an unsigned
-- integer value indicating the number of compute units that have been
-- enabled.
activeComputeUnitCount :: Word32
}
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PhysicalDeviceShaderCoreProperties2AMD)
#endif
deriving instance Show PhysicalDeviceShaderCoreProperties2AMD
instance ToCStruct PhysicalDeviceShaderCoreProperties2AMD where
withCStruct x f = allocaBytes 24 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PhysicalDeviceShaderCoreProperties2AMD{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr ShaderCorePropertiesFlagsAMD)) (shaderCoreFeatures)
poke ((p `plusPtr` 20 :: Ptr Word32)) (activeComputeUnitCount)
f
cStructSize = 24
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr ShaderCorePropertiesFlagsAMD)) (zero)
poke ((p `plusPtr` 20 :: Ptr Word32)) (zero)
f
instance FromCStruct PhysicalDeviceShaderCoreProperties2AMD where
peekCStruct p = do
shaderCoreFeatures <- peek @ShaderCorePropertiesFlagsAMD ((p `plusPtr` 16 :: Ptr ShaderCorePropertiesFlagsAMD))
activeComputeUnitCount <- peek @Word32 ((p `plusPtr` 20 :: Ptr Word32))
pure $ PhysicalDeviceShaderCoreProperties2AMD
shaderCoreFeatures activeComputeUnitCount
instance Storable PhysicalDeviceShaderCoreProperties2AMD where
sizeOf ~_ = 24
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero PhysicalDeviceShaderCoreProperties2AMD where
zero = PhysicalDeviceShaderCoreProperties2AMD
zero
zero
type ShaderCorePropertiesFlagsAMD = ShaderCorePropertiesFlagBitsAMD
-- | VkShaderCorePropertiesFlagBitsAMD - Bitmask specifying shader core
-- properties
--
-- = See Also
--
-- <https://www.khronos.org/registry/vulkan/specs/1.2-extensions/html/vkspec.html#VK_AMD_shader_core_properties2 VK_AMD_shader_core_properties2>,
-- 'PhysicalDeviceShaderCoreProperties2AMD', 'ShaderCorePropertiesFlagsAMD'
newtype ShaderCorePropertiesFlagBitsAMD = ShaderCorePropertiesFlagBitsAMD Flags
deriving newtype (Eq, Ord, Storable, Zero, Bits, FiniteBits)
conNameShaderCorePropertiesFlagBitsAMD :: String
conNameShaderCorePropertiesFlagBitsAMD = "ShaderCorePropertiesFlagBitsAMD"
enumPrefixShaderCorePropertiesFlagBitsAMD :: String
enumPrefixShaderCorePropertiesFlagBitsAMD = ""
showTableShaderCorePropertiesFlagBitsAMD :: [(ShaderCorePropertiesFlagBitsAMD, String)]
showTableShaderCorePropertiesFlagBitsAMD = []
instance Show ShaderCorePropertiesFlagBitsAMD where
showsPrec = enumShowsPrec enumPrefixShaderCorePropertiesFlagBitsAMD
showTableShaderCorePropertiesFlagBitsAMD
conNameShaderCorePropertiesFlagBitsAMD
(\(ShaderCorePropertiesFlagBitsAMD x) -> x)
(\x -> showString "0x" . showHex x)
instance Read ShaderCorePropertiesFlagBitsAMD where
readPrec = enumReadPrec enumPrefixShaderCorePropertiesFlagBitsAMD
showTableShaderCorePropertiesFlagBitsAMD
conNameShaderCorePropertiesFlagBitsAMD
ShaderCorePropertiesFlagBitsAMD
type AMD_SHADER_CORE_PROPERTIES_2_SPEC_VERSION = 1
-- No documentation found for TopLevel "VK_AMD_SHADER_CORE_PROPERTIES_2_SPEC_VERSION"
pattern AMD_SHADER_CORE_PROPERTIES_2_SPEC_VERSION :: forall a . Integral a => a
pattern AMD_SHADER_CORE_PROPERTIES_2_SPEC_VERSION = 1
type AMD_SHADER_CORE_PROPERTIES_2_EXTENSION_NAME = "VK_AMD_shader_core_properties2"
-- No documentation found for TopLevel "VK_AMD_SHADER_CORE_PROPERTIES_2_EXTENSION_NAME"
pattern AMD_SHADER_CORE_PROPERTIES_2_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern AMD_SHADER_CORE_PROPERTIES_2_EXTENSION_NAME = "VK_AMD_shader_core_properties2"
| expipiplus1/vulkan | src/Vulkan/Extensions/VK_AMD_shader_core_properties2.hs | bsd-3-clause | 9,673 | 0 | 14 | 1,698 | 1,307 | 798 | 509 | -1 | -1 |
module Text.Highlighter.Lexer (runLexer) where
import Control.Monad.Except (ExceptT, runExceptT, throwError, catchError)
import Control.Monad.State (State, gets, modify, evalState)
import Text.Regex.PCRE.Light hiding (compile)
import Text.Regex.PCRE.Light.Char8 (compile)
import qualified Data.ByteString as BS
import Data.Sequence (Seq, empty, singleton, (><), viewl, null, ViewL(..))
import Data.Monoid ((<>))
import Control.Applicative ((<$>))
import Data.Foldable (toList, foldr1, mapM_)
import Prelude hiding (lex, foldr1, mapM_, concat, head, drop, tail, reverse, dropWhile, null)
import qualified Prelude as P
import Text.Highlighter.Types
data LexerState =
LexerState
{ lsLexer :: Lexer
, lsInput :: BS.ByteString
, lsState :: [TokenMatcher]
, lsLexed :: (Seq Token)
, lastNotNull :: Bool
}
deriving Show
type LexerM = ExceptT LexerError (State LexerState)
data LexerError
= NoMatchFor BS.ByteString
| OtherLexerError String
deriving Show
runLexer :: Lexer -> BS.ByteString -> Either LexerError [Token]
runLexer l s = toList <$> runLexer' l s
runLexer' :: Lexer -> BS.ByteString -> Either LexerError (Seq Token)
runLexer' l s = evalState (runExceptT lex) (LexerState l s [lStart l] empty True)
lex :: LexerM (Seq Token)
lex = do
done <- gets (BS.null . lsInput)
if done
then gets lsLexed
else do
ms <- getState
ts <- tryAll ms
if null ts || (BS.null . tText . head $ ts)
then modify $ \ls -> ls { lsLexed = lsLexed ls >< ts }
else modify $ \ls -> ls { lsLexed = lsLexed ls >< ts
, lastNotNull = (BS.last . tText . head $ ts) == 10
}
lex
where
getState = gets (P.head . lsState)
isBOL :: LexerM Bool
isBOL = gets lastNotNull
head :: Seq a -> a
head x = let (b :< _) = viewl x
in b
tryAll :: [Match] -> LexerM (Seq Token)
tryAll [] = do
i <- gets lsInput
throwError (NoMatchFor i)
tryAll (AnyOf ms:ms') =
tryAll (ms ++ ms')
tryAll (m:ms) = do
atbol <- isBOL
fs <- gets (lFlags . lsLexer)
let opts
| atbol = [exec_anchored]
| otherwise = [exec_anchored, exec_notbol]
i <- gets lsInput
case match (compile (mRegexp m) fs) i opts of
Just [] -> do
nextState (mNextState m) []
return empty
Just (s:ss) -> do
modify $ \ls -> ls { lsInput = BS.drop (BS.length s) i }
nextState (mNextState m) (s:ss)
toTokens (s:ss) (mType m)
Nothing ->
tryAll ms `catchError` trySkipping
where
trySkipping (NoMatchFor _) = tryAllFirst (m:ms)
trySkipping e = throwError e
tryAllFirst :: [Match] -> LexerM (Seq Token)
tryAllFirst [] = do
i <- gets lsInput
throwError (NoMatchFor i)
tryAllFirst (AnyOf ms:ms') =
tryAllFirst (ms ++ ms')
tryAllFirst (m:ms) = do
atbol <- isBOL
fs <- gets (lFlags . lsLexer)
let opts
| atbol = []
| otherwise = [exec_notbol]
i <- gets lsInput
case match (compile (mRegexp m) fs) i opts of
Just (s:ss) -> do
let (skipped, next) = skipFailed i s
modify $ \ls -> ls { lsInput = next }
ts <- toTokens (s:ss) (mType m)
return . singleton . Token Error $ (skipped <> (tText $ head ts))
_ -> tryAllFirst ms
toTokens :: [BS.ByteString] -> TokenType -> LexerM (Seq Token)
toTokens (s:_) (Using l) = either throwError return (runLexer' l s)
toTokens (_:ss) (ByGroups ts) = foldr1 (><) <$> mapM (\(s,t) -> toTokens [s] t) (P.zip ss ts)
toTokens (s:_) t = return $ singleton $ Token t s
toTokens [] _ = return empty
-- Given the starting point, return the text preceding and after
-- the failing regexp match
skipFailed :: BS.ByteString -> BS.ByteString -> (BS.ByteString, BS.ByteString)
skipFailed i r
| r `BS.isPrefixOf` i = (BS.empty, BS.drop (BS.length r) i)
| otherwise =
let (pre, next) = skipFailed (BS.tail i) r
in (BS.cons (BS.head i) pre, next)
nextState :: NextState -> [BS.ByteString] -> LexerM ()
nextState Continue _ = return ()
nextState Pop _ =
modify $ \ls -> ls { lsState = P.tail (lsState ls) }
nextState (PopNum n) _ =
modify $ \ls -> ls { lsState = P.drop n (lsState ls) }
nextState Push _ =
modify $ \ls -> ls { lsState = P.head (lsState ls) : lsState ls }
nextState (GoTo n) _ =
modify $ \ls -> ls { lsState = n : lsState ls }
nextState (CapturesTo f) cs =
modify $ \ls -> ls { lsState = f (map fromBS cs) : lsState ls }
where
fromBS = map (toEnum . fromEnum) . BS.unpack
nextState (DoAll nss) cs = mapM_ (flip nextState cs) nss
nextState (Combined nss) _ =
modify $ \ls -> ls { lsState = P.concat nss : lsState ls }
| chemist/highlighter | src/Text/Highlighter/Lexer.hs | bsd-3-clause | 4,808 | 0 | 20 | 1,313 | 2,018 | 1,050 | 968 | -1 | -1 |
module Problem15 where
--
-- Problem 15: Lattice paths
--
-- Starting in the top left corner of a 2×2 grid, and only being able to move to
-- the right and down, there are exactly 6 routes to the bottom right corner.
--
-- https://projecteuler.net/project/images/p015.gif
--
-- Paths: (R=Right, D=Down)
--
-- R R D D
-- R D R D
-- R D D R
-- D R R D
-- D R D R
-- D D R R
--
-- |R| = |D|
--
-- How many such routes are there through a 20×20 grid?
--
-- 40! / 20!20!
| c0deaddict/project-euler | src/Part1/Problem15.hs | bsd-3-clause | 469 | 0 | 2 | 105 | 26 | 25 | 1 | 1 | 0 |
{-# LANGUAGE MultiParamTypeClasses #-}
-- |
-- Module : Simulation.Aivika.Experiment.Base.InfoView
-- Copyright : Copyright (c) 2012-2017, David Sorokin <david.sorokin@gmail.com>
-- License : BSD3
-- Maintainer : David Sorokin <david.sorokin@gmail.com>
-- Stability : experimental
-- Tested with: GHC 8.0.1
--
-- The module defines 'InfoView' that shows the description of series.
--
module Simulation.Aivika.Experiment.Base.InfoView
(InfoView(..),
defaultInfoView) where
import Control.Monad
import Control.Monad.Trans
import Control.Concurrent.MVar
import Data.IORef
import Data.Maybe
import Data.Monoid
import Simulation.Aivika
import Simulation.Aivika.Experiment.Types
import Simulation.Aivika.Experiment.Base.WebPageRenderer
import Simulation.Aivika.Experiment.Base.ExperimentWriter
import Simulation.Aivika.Experiment.Base.HtmlWriter
import Simulation.Aivika.Experiment.Concurrent.MVar
-- | Defines the 'View' that shows the description of series.
data InfoView =
InfoView { infoTitle :: String,
-- ^ This is a title for the view.
infoDescription :: String,
-- ^ This is a text description used in HTML.
infoTransform :: ResultTransform,
-- ^ The transform applied to the results before receiving series.
infoSeries :: ResultTransform
-- ^ It defines the series for which the description is shown.
}
-- | The default description view.
defaultInfoView :: InfoView
defaultInfoView =
InfoView { infoTitle = "Information",
infoDescription = "It shows the information about simulation entities:",
infoTransform = id,
infoSeries = id }
instance ExperimentView InfoView (WebPageRenderer a) where
outputView v =
let reporter exp renderer dir =
do st <- newInfo v exp dir
let context =
WebPageContext $
WebPageWriter { reporterWriteTOCHtml = infoTOCHtml st,
reporterWriteHtml = infoHtml st }
return ExperimentReporter { reporterInitialise = return (),
reporterFinalise = return (),
reporterSimulate = simulateInfo st,
reporterContext = context }
in ExperimentGenerator { generateReporter = reporter }
-- | The state of the view.
data InfoViewState =
InfoViewState { infoView :: InfoView,
infoExperiment :: Experiment,
infoResults :: MVar (Maybe InfoResults) }
-- | The information table.
data InfoResults =
InfoResults { infoNames :: [String],
infoValues :: [String] }
-- | Create a new state of the view.
newInfo :: InfoView -> Experiment -> FilePath -> ExperimentWriter InfoViewState
newInfo view exp dir =
do r <- liftIO $ newMVar Nothing
return InfoViewState { infoView = view,
infoExperiment = exp,
infoResults = r }
-- | Create a new information table.
newInfoResults :: [ResultSource] -> ResultLocalisation -> Experiment -> IO InfoResults
newInfoResults sources loc exp =
do let xs =
flip map sources $ \source ->
case source of
ResultItemSource (ResultItem x) ->
[(resultNameToTitle $ resultItemName x,
localiseResultDescription loc $ resultItemId x)]
ResultObjectSource x ->
[(resultNameToTitle $ resultObjectName x,
localiseResultDescription loc $ resultObjectId x)]
ResultVectorSource x ->
[(resultNameToTitle $ resultVectorName x,
localiseResultDescription loc $ resultVectorId x)]
ResultSeparatorSource x ->
[]
(names, values) = unzip $ concat xs
return InfoResults { infoNames = names,
infoValues = values }
-- | Require to return the unique information table associated with the specified state.
requireInfoResults :: InfoViewState -> [ResultSource] -> IO InfoResults
requireInfoResults st sources =
let view = infoView st
exp = infoExperiment st
loc = experimentLocalisation exp
in maybePutMVar (infoResults st)
(newInfoResults sources loc exp) $ \results ->
do let xs =
flip map sources $ \source ->
case source of
ResultItemSource (ResultItem x) ->
[resultNameToTitle $ resultItemName x]
ResultObjectSource x ->
[resultNameToTitle $ resultObjectName x]
ResultVectorSource x ->
[resultNameToTitle $ resultVectorName x]
ResultSeparatorSource x ->
[]
let names = concat xs
if (names /= infoNames results)
then error "Series with different names are returned for different runs: requireInfoResults"
else return results
-- | Simulate the specified series.
simulateInfo :: InfoViewState -> ExperimentData -> Composite ()
simulateInfo st expdata =
do let view = infoView st
rs = infoSeries view $
infoTransform view $
experimentResults expdata
sources = resultSourceList rs
liftIO $ requireInfoResults st sources
return ()
-- | Get the HTML code.
infoHtml :: InfoViewState -> Int -> HtmlWriter ()
infoHtml st index =
do header st index
results <- liftIO $ readMVar (infoResults st)
case results of
Nothing -> return ()
Just results ->
do let names = infoNames results
values = infoValues results
writeHtmlList $
forM_ (zip names values) $ \(name, value) ->
writeHtmlListItem $
do writeHtmlText name
writeHtmlText " - "
writeHtmlText value
header :: InfoViewState -> Int -> HtmlWriter ()
header st index =
do writeHtmlHeader3WithId ("id" ++ show index) $
writeHtmlText (infoTitle $ infoView st)
let description = infoDescription $ infoView st
unless (null description) $
writeHtmlParagraph $
writeHtmlText description
-- | Get the TOC item.
infoTOCHtml :: InfoViewState -> Int -> HtmlWriter ()
infoTOCHtml st index =
writeHtmlListItem $
writeHtmlLink ("#id" ++ show index) $
writeHtmlText (infoTitle $ infoView st)
| dsorokin/aivika-experiment | Simulation/Aivika/Experiment/Base/InfoView.hs | bsd-3-clause | 6,535 | 0 | 21 | 2,037 | 1,330 | 693 | 637 | 131 | 5 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
module Jade.TopLevel ( getNetsWithName
, connectWiresWithSameSigName
, dependencyOrder
, explodeConnect
, getAllIndexesWithName
, getAllSigNames
, getInputs
, getInternalSigNames
, getModule
, getOutputs
, getSchematic
, getSubModules
, getTerminalsAtPoint
, getWidthOfValName
, getWidthOfPartsAtTerminal
, isNetDriver
, netWithTerminal
, nets
, numNets
, replicationDepth
, terminals
) where
import Control.Monad
import Control.Monad.State
import qualified Data.List as DL
import qualified Data.Map as DM
import Data.Maybe
import qualified Data.Maybe as Maybe
import qualified Data.Set as DS
import qualified Data.Vector as DV
import qualified Jade.Decode.Decode as D
import qualified Jade.Jumper as Jumper
import qualified Jade.MemUnit as MemUnit
import qualified Jade.Module as Module
import qualified Jade.Net as Net
import qualified Jade.Part as Part
import qualified Jade.Term as Term
import qualified Jade.Schematic as Schem
import Jade.Common
import qualified Jade.UnionFindST as UF
import qualified Jade.Wire as Wire
import qualified Web.Hashids as WH
import qualified Jade.Decode.Bundle as Bundle
getSubModules :: String -> J [SubModule]
getSubModules modname = do --"TopLevel.getSubModules" <? do
(Module _ schem _ _) <- getModule modname
case schem of
Just schem -> return $ Schem.getSubModules schem
Nothing -> die "No schematics found"
-- a function to possible create an edge given a wire and a part
makePartEdge :: Wire -> Part -> J (Maybe Edge)
makePartEdge wire part = "TopLevel.makePartEdge" <? do
let (loc1, loc2) = Wire.ends wire
ploc <- Part.loc part
if ploc == loc1
then return $ Just $ Edge (Node loc1 (WireC wire)) (Node ploc part)
else if ploc == loc2
then return $ Just $ Edge (Node loc2 (WireC wire)) (Node ploc part)
else return Nothing
makeWire2WireEdge :: Wire -> Wire -> J (Maybe Edge)
makeWire2WireEdge w1 w2 = "TopLevel.makeWire2WireEdge" <?
let (loc1, loc2) = Wire.ends w1
(loc3, loc4) = Wire.ends w2
econ p1 v p2 w = Just $ Edge (Node p1 (WireC v)) (Node p2 (WireC w))
in return $ case (loc1 == loc3, loc1 == loc4, loc2 == loc3, loc2 == loc4) of
(True, _, _, _) -> econ loc1 w1 loc3 w2
(_, True, _, _) -> econ loc1 w1 loc4 w2
(_, _, True, _) -> econ loc2 w1 loc3 w2
(_, _, _, True) -> econ loc2 w1 loc4 w2
_ -> Nothing
terminalsOverlapP (Terminal (Coord3 x1 y1 _) _) (Terminal (Coord3 x2 y2 _) _)
= (x1,y1) == (x2,y2)
getOverlappingTerminals :: String -> J [(Terminal, Terminal)]
getOverlappingTerminals modname = "TopLevel.collectOverlappingTerminals" <? do
allsubs <- getSubModules modname
ts <- concatMapM terminals allsubs
return $ DL.nub $ DL.sort [ (min t1 t2, max t1 t2) | t1 <- ts, t2 <- ts,
terminalsOverlapP t1 t2,
t1 /= t2 ]
connectOverlappingTerminals :: [(Terminal, Terminal)] -> J [Wire]
connectOverlappingTerminals termPairs = "TopLevel.connectOverlappingTerminals" <? do
return $ [Wire.mkDegenerate c | (Terminal c _, _) <- termPairs]
processEdges :: [Wire] -> [Part] -> J [Edge]
processEdges wires parts = "TopLevel.processEdges" <? do
nb "with all wires, make an edge from ones that share a point with a part"
partEdges <- sequence [makePartEdge w p | w <- wires, p <- parts]
let Just edges = sequence $ filter Maybe.isJust partEdges
wireNbrs <- sequence [makeWire2WireEdge v w | (v, w) <- triangleProd wires]
let Just nbrs = sequence $ filter Maybe.isJust wireNbrs
return $ edges ++ nbrs
makeJumperWire :: Jumper -> J Wire
makeJumperWire jumper = "TopLevel.makeJumperEdge" <? do
nb "find the endpoints of the jumper"
let (p1, p2) = Jumper.getEnds jumper
nb "create a wire where the jumper is"
return $ Wire.new p1 p2
makePortWire :: Port -> J Wire
makePortWire (Port (Coord3 x y r) sig) = "TopLevel.makePortWire" <? do
nb "create a wire of length zero, that has the signal from the port"
return $ Wire (Coord5 x y r 0 0) sig
connectSubWires :: Wire -> Wire -> J (Maybe Wire)
connectSubWires w1 w2 = "TopLevel.connectSubWires" <? do
assert (Wire.width w1 == 1) "this function assumes wires are width 1"
assert (Wire.width w2 == 1) "this function assumes wires are width 1"
assert (Wire.hasSigName w1) "can't be literal"
assert (Wire.hasSigName w2) "can't be literal"
if w1 `Wire.hasSameSig` w2
then do nb "--------------------------------------------"
enb w1
enb w2
return $ Just $ Wire.new (fst $ Wire.ends w1) (fst $ Wire.ends w2)
else return Nothing
explodeConnect :: (Wire, Wire) -> J [Wire]
explodeConnect (w1, w2) = "TopLevel.explodeConnect" <? do
let subwires1 = Wire.explode w1
subwires2 = Wire.explode w2
enb (subwires1)
enb (subwires2)
catMaybes <$> sequence [connectSubWires sw1 sw2 | sw1 <- subwires1, sw2 <- subwires2 ]
connectWiresWithSameSigName :: [Part] -> J [Wire]
connectWiresWithSameSigName parts = "TopLevel.connectWiresWithSameSigName" <? do
let wires = triangleProd $ filter Wire.hasSigName $ catMaybes $ map Part.toWire parts
let pairs = [(w1, w2) | (w1, w2) <- wires, w1 `Wire.hasSameSig` w2]
return [Wire.new (fst $ Wire.ends w1) (fst $ Wire.ends w2) | (w1, w2) <- pairs]
-- concat <$> sequence [explodeConnect (w1, w2) | (w1, w2) <- wires] --, w1 `Wire.hasSameSig` w2]
-- | What's going on here? Now that the decoder explodes signal names
-- out into val bundles immediately, it's possible to connect wire
-- names at the very beginning, rather it's possible to associate two
-- wires for the unionfind algorithm. this is the right way to do it.
-- The wrong way, is what I was doing, some after the fact kludge
-- braindead patching. So two parts enter this function. parts have
-- wires. wires have bundles. The change in procedure here, what the
-- whole refactoring was about, is to intersect these bundles and feed
-- them to union find. This will reduce the total number of nets!
-- which is good. wires should just magically connect if this works.
-- So, back to how union find distinguishes nodes, by
nets :: String -> J [Net]
nets modname = "TopLevel.nets" <? do
-- memoize, TODO: abstract this away.
Memo table <- getMemo
case DM.lookup modname table of
-- Already computed this net, so return it.
Just nets -> return nets
-- Compute the net, insert it into the memo map, then return the net
Nothing -> do cs <- nets' modname
putMemo $ Memo (DM.insert modname cs table)
return cs
nets' :: String -> J [Net]
nets' modname = do --"TopLevel.nets_" <? do
edges <- getEdges modname
let nets_ = UF.components $ edges
nb "let nets = UF.components $ edges ++ wireEdges"
enb nets_
return nets_
getEdges :: String -> J [Edge]
getEdges modname = "TopLevel.getEdges" <? do
nb "---------------------------------"
nbf "get the module: {0}" [modname]
(Module _ (Just schem@(Schematic parts)) _ _) <- getModule modname
terms <- sequence [terminals submod | submod <- Schem.getSubModules schem]
let wires = [w | WireC w <- parts]
ports = [p | PortC p <- parts]
jumpers = Schem.getJumpers schem
termcs = map TermC $ concat terms
ssnw <- connectWiresWithSameSigName parts
jumperWires <- mapM makeJumperWire jumpers
portWires <- mapM makePortWire ports
ts <- getOverlappingTerminals modname
overlappingTermWires <- connectOverlappingTerminals ts
let allWires = concat [ wires
, jumperWires
, ssnw
, portWires
, overlappingTermWires]
wireEdges = map Wire.toEdge allWires
edges <- processEdges allWires termcs
return (wireEdges ++ edges)
-- | VHDL requires that modules be instantiated in dependency order,
dependencyOrder :: String -> J [String]
dependencyOrder modname = "TopLevel.dependencyOrder" <?
if not $ modname `startsWith` "/user/" then return []
else do m <- getModule modname
schem <- Module.getSchematic m
let subnames = DL.nub [subname | (SubModule subname _) <- Schem.getSubModules schem]
children <- concatMapM dependencyOrder subnames
return $ filter (`startsWith` "/user") $ DL.nub $ children ++ subnames
-- |Get the graph net which contains the terminals.
netWithTerminal :: [Char] -> Terminal -> J Net
netWithTerminal modname term@(Terminal c3@(Coord3 x y _) _) = "TopLevel.netWithTerminal" <? do
nets <- nets modname
let result = filter (flip Net.hasTerm term) nets
case length result of
0 -> die $ concat [ " No net found in module: ", modname
, " that has a terminal: ", show term ]
1 -> return $ head result
x -> die $ concat [ show x, " nets found in module: ", modname
, " that has a terminal: ", show term, "."
, " This should not be possible, because all such nets should"
, " be connected if they contain the same node" ]
-- | Get a list of input and output terminals in a submodule offset by
-- the position of the submodule
terminals :: SubModule -> J [Terminal]
terminals (SubModule modname offset) = do --"TopLevel.terminals" <? do
nb $ show ("TopLevel.terminals checks submodule: " ++ modname)
mod <- getModule modname
Module.terminals mod offset
terminals (SubMemUnit memunit) = "TopLevel.terminals/memunit" <? do
MemUnit.terminals memunit
-- | Get the number of distinct nodes in the schematic
numNets :: String -> J Int
numNets modname = "TopLevel.numNets" <? do
length <$> nets modname ? "Couldn't get number of nets"
-- -- | Get the input of a module. This requires tests to be defined in
-- -- the module referenced, because .input directive of the test script
-- -- indicate the target signals in the schematic
getInputs :: String -> J Inputs
getInputs modname = "TopLevel.getInputs" <? do
getModule modname >>= Module.getInputs
-- -- | Get the outputs of a module. This requires tests to be defined in
-- -- the module referenced, because the .output directive of the test
-- -- script indicate the source signals in the schematic
getOutputs :: String -> J Outputs
getOutputs modname = "TopLevel.getOutputs" <? do
mod <- getModule modname
let msg = "TopLevel.getOutputs couldn't find outputs in module: " ++ modname
Module.getOutputs mod ? msg
-- | The assumption always is, that the jade module works and is
-- tested. With that in mind, then it's safe to assume that there is
-- one driving signal per net. This
-- function finds the driving signal for a given net.
-- If a signal is in more than one net then it is a driving signal.
-- | What signals are driving? .input signals from the test script
-- indicate a set of driving signals. OUTPUT terminals of sub modules
-- are also driving signals.
getNetWithTerminal :: String -> Terminal -> J Net
getNetWithTerminal modname term = "getNetWithTerminal" <? do
allNets <- nets modname
let matches = [c | c <- allNets, Net.hasTerm c term]
case matches of
[] -> die $ "No net found with terminal: " ++ show term
[c] -> do nb $ "found net with terminal: " ++ show term
return c
_ -> impossible $ "More than one net found with terminal: " ++ show term
getPartsConnectedToTerminal :: String -> Terminal -> J [Part]
getPartsConnectedToTerminal modname terminal = "TopLevel.getPartsConnectedToTerminal" <? do
let (Terminal (Coord3 x y _) _) = terminal
schem <- getSchematic modname
Schem.getAllPartsAtPoint schem (Point x y)
getWidthOfPartsAtTerminal :: String -> Terminal -> J Int
getWidthOfPartsAtTerminal modname terminal = "TopLevel.getWidthOfPartsAtTerminal" <? do
parts <- getPartsConnectedToTerminal modname terminal
maximum <$> mapM Part.width (Part.removeTerms parts)
getTerminalsAtPoint :: String -> Point -> J [Terminal]
getTerminalsAtPoint modname point@(Point x1 y1) = "TopLevel.getTerminalAtPoint" <? do
schem <- getSchematic modname
let subs = Schem.getSubModules schem
allTerms <- concatMapM terminals subs
return $ filter (flip Term.atPoint point) allTerms
getRatio :: Terminal -> Part -> J (Int, Int)
getRatio terminal part = "TopLevel.getTerminalRatio" <? do
let tw = Term.width terminal
pw <- Part.width part
return (tw, pw)
getRatios modname term = do
parts <- getPartsConnectedToTerminal modname term
mapM (getRatio term) (filter (not . Part.isSubModule) parts)
replicationDepth :: String -> SubModule -> J Int
replicationDepth modname submod = "TopLevel.replicationDepth" <? do
terms <- terminals submod
ratios <- concatMapM (getRatios modname) terms
let numReps = [if tw >= pw then 1 else pw `div` tw | (tw, pw) <- ratios]
return $ maximum numReps
getNetsWithName :: String -> String -> J [Net]
getNetsWithName modname signame = "TopLevel.getNetsWithName" <? do
ns <- nets modname
filterM (flip Net.containsIdent signame) ns
getAllSigNames :: String -> J [String]
getAllSigNames modname = do
nets <- nets modname
let parts = (concat $ map Net.parts nets)
results = concat $ map Part.getNames parts
return $ DL.nub results
-- | Given a valnal name scour all the nets that contains the
-- name for the total width of all valnals with the name.
getWidthOfValName :: String -> String -> J Int
getWidthOfValName modname valname = "TopLevel.getWidthOfValName" <? do
nets <- getNetsWithName modname valname
vals <- concatMapM (flip Net.getValsWithIdent valname) nets
return $ length (DL.nub vals)
getInternalSigNames modname = "TopLevel.getInternalSigNames" <? do
m <- getModule modname
allNames <- getAllSigNames modname
(Inputs inputBundles) <- getInputs modname
(Outputs outputBundles) <- getOutputs modname
let inputNames = concat $ map Bundle.getNames inputBundles
outputNames = concat $ map Bundle.getNames outputBundles
return $ allNames DL.\\ (inputNames ++ outputNames)
getAllIndexesWithName :: String -> String -> J [Val]
getAllIndexesWithName modname name = "TopLevel.getAllIndexesWithName" <? do
allNets <- nets modname
return $ DL.nub $ concat $ map (flip Net.getIndexesWithName name) allNets
isNetDriver :: String -> Net -> J Bool
isNetDriver modname net = "TopLevel.isNetDriven" <? do
-- does this net contain names that are included in the .input
-- groups? does this net contain nodes that are connected to the
-- output of contradiction. if an internal name is both on the
-- output and input of a submodule, then unfortunately this means
-- that the net actually belongs to both. but wait. net analysis is pass#1 analysis
-- so, no, they actually aren't.
-- how many nets are there here?
return False
| drhodes/jade2hdl | src/Jade/TopLevel.hs | bsd-3-clause | 15,284 | 0 | 17 | 3,628 | 3,972 | 1,996 | 1,976 | 268 | 5 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
module Main where
------------------------------------------------------------------------------
import Control.Monad.Trans
import Data.ByteString (ByteString)
import Control.Lens
import qualified Data.Text.Encoding as T
import qualified Database.Persist as P
import Database.Persist.Sql
import Snap
import Snap.Snaplet.Auth
import Snap.Snaplet.Auth.Backends.Persistent
import Snap.Snaplet.Persistent
import Snap.Snaplet.Session
import Snap.Snaplet.Session.Backends.CookieSession
------------------------------------------------------------------------------
data App = App
{ _sess :: Snaplet SessionManager
, _db :: Snaplet PersistState
, _auth :: Snaplet (AuthManager App)
}
makeLenses ''App
instance HasPersistPool (Handler b App) where
getPersistPool = with db getPersistPool
------------------------------------------------------------------------------
-- | The application's routes.
routes :: [(ByteString, Handler App App ())]
routes = [ ("/", writeText "hello")
, ("foo", fooHandler)
, ("add/:uname", addHandler)
]
fooHandler :: Handler App App ()
fooHandler = do
results <- runPersist $ P.selectList [] []
liftIO $ print (map db2au results)
addHandler :: Handler App App ()
addHandler = do
mname <- getParam "uname"
let name = maybe "guest" T.decodeUtf8 mname
u <- with auth $ createUser name ""
liftIO $ print u
------------------------------------------------------------------------------
-- | The application initializer.
app :: SnapletInit App App
app = makeSnaplet "app" "An snaplet example application." Nothing $ do
s <- nestSnaplet "" sess $
initCookieSessionManager "site_key.txt" "_cookie" Nothing Nothing
d <- nestSnaplet "db" db $ initPersist (runMigrationUnsafe migrateAuth)
a <- nestSnaplet "auth" auth $
initPersistAuthManager sess (persistPool $ view snapletValue d)
addRoutes routes
return $ App s d a
main :: IO ()
main = serveSnaplet defaultConfig app
| Soostone/snaplet-persistent | example/Site.hs | bsd-3-clause | 2,261 | 0 | 13 | 481 | 513 | 273 | 240 | 49 | 1 |
{-# LANGUAGE PatternGuards #-}
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Lex
-- Copyright : Ben Gamari 2015-2019
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- This module contains a simple lexer supporting quoted strings
module Distribution.Lex (
tokenizeQuotedWords
) where
import Prelude ()
import Distribution.Compat.Prelude
newtype DList a = DList ([a] -> [a])
runDList :: DList a -> [a]
runDList (DList run) = run []
singleton :: a -> DList a
singleton a = DList (a:)
instance Monoid (DList a) where
mempty = DList id
mappend = (<>)
instance Semigroup (DList a) where
DList a <> DList b = DList (a . b)
tokenizeQuotedWords :: String -> [String]
tokenizeQuotedWords = filter (not . null) . go False mempty
where
go :: Bool -- ^ in quoted region
-> DList Char -- ^ accumulator
-> String -- ^ string to be parsed
-> [String] -- ^ parse result
go _ accum []
| [] <- accum' = []
| otherwise = [accum']
where accum' = runDList accum
go False accum (c:cs)
| isSpace c = runDList accum : go False mempty cs
| c == '"' = go True accum cs
go True accum (c:cs)
| c == '"' = go False accum cs
go quoted accum (c:cs)
= go quoted (accum `mappend` singleton c) cs
| sopvop/cabal | Cabal/Distribution/Lex.hs | bsd-3-clause | 1,409 | 0 | 11 | 378 | 437 | 230 | 207 | 32 | 4 |
{-# LANGUAGE GADTs #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE StandaloneDeriving #-}
data StackItem a where
Snum :: forall a. Fractional a => a -> StackItem a
Sop :: OpDesc -> StackItem a
deriving instance Show a => Show (StackItem a)
-- AZ added to test Trac #10399
data MaybeDefault v where
SetTo :: forall v . ( Eq v, Show v ) => !v -> MaybeDefault v
SetTo4 :: forall v a. (( Eq v, Show v ) => v -> MaybeDefault v -> a -> MaybeDefault [a])
| mpickering/ghc-exactprint | tests/examples/ghc710/GADTContext.hs | bsd-3-clause | 481 | 9 | 12 | 125 | 157 | 85 | 72 | 12 | 0 |
module Matterhorn.Events.ShowHelp where
import Prelude ()
import Matterhorn.Prelude
import Brick
import qualified Graphics.Vty as Vty
import Matterhorn.Constants
import Matterhorn.Events.Keybindings
import Matterhorn.Types
onEventShowHelp :: Vty.Event -> MH Bool
onEventShowHelp =
handleKeyboardEvent helpKeybindings $ \ e -> case e of
Vty.EvKey _ _ -> popMode
_ -> return ()
helpKeybindings :: KeyConfig -> KeyHandlerMap
helpKeybindings = mkKeybindings helpKeyHandlers
helpKeyHandlers :: [KeyEventHandler]
helpKeyHandlers =
[ mkKb ScrollUpEvent "Scroll up" $
mh $ vScrollBy (viewportScroll HelpViewport) (-1)
, mkKb ScrollDownEvent "Scroll down" $
mh $ vScrollBy (viewportScroll HelpViewport) 1
, mkKb PageUpEvent "Page up" $
mh $ vScrollBy (viewportScroll HelpViewport) (-1 * pageAmount)
, mkKb PageDownEvent "Page down" $
mh $ vScrollBy (viewportScroll HelpViewport) (1 * pageAmount)
, mkKb CancelEvent "Return to the previous interface" $
popMode
, mkKb ScrollBottomEvent "Scroll to the end of the help" $
mh $ vScrollToEnd (viewportScroll HelpViewport)
, mkKb ScrollTopEvent "Scroll to the beginning of the help" $
mh $ vScrollToBeginning (viewportScroll HelpViewport)
]
popMode :: MH ()
popMode = do
ShowHelp _ prevMode <- use (csCurrentTeam.tsMode)
setMode prevMode
| matterhorn-chat/matterhorn | src/Matterhorn/Events/ShowHelp.hs | bsd-3-clause | 1,447 | 0 | 11 | 345 | 366 | 189 | 177 | 35 | 2 |
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Simple.SrcDist
-- Copyright : Simon Marlow 2004
-- License : BSD3
--
-- Maintainer : cabal-devel@haskell.org
-- Portability : portable
--
-- This handles the @sdist@ command. The module exports an 'sdist' action but
-- also some of the phases that make it up so that other tools can use just the
-- bits they need. In particular the preparation of the tree of files to go
-- into the source tarball is separated from actually building the source
-- tarball.
--
-- The 'createArchive' action uses the external @tar@ program and assumes that
-- it accepts the @-z@ flag. Neither of these assumptions are valid on Windows.
-- The 'sdist' action now also does some distribution QA checks.
-- NOTE: FIX: we don't have a great way of testing this module, since
-- we can't easily look inside a tarball once its created.
module Distribution.Simple.SrcDist (
-- * The top level action
sdist,
-- ** Parts of 'sdist'
printPackageProblems,
prepareTree,
createArchive,
-- ** Snapshots
prepareSnapshotTree,
snapshotPackage,
snapshotVersion,
dateToSnapshotNumber,
-- * Extracting the source files
listPackageSources
) where
import Prelude ()
import Distribution.Compat.Prelude
import Distribution.PackageDescription hiding (Flag)
import Distribution.PackageDescription.Check hiding (doesFileExist)
import Distribution.Package
import Distribution.ModuleName
import qualified Distribution.ModuleName as ModuleName
import Distribution.Version
import Distribution.Simple.Utils
import Distribution.Simple.Setup
import Distribution.Simple.PreProcess
import Distribution.Simple.LocalBuildInfo
import Distribution.Simple.BuildPaths
import Distribution.Simple.Program
import Distribution.Text
import Distribution.Verbosity
import Data.List (partition)
import qualified Data.Map as Map
import Data.Time (UTCTime, getCurrentTime, toGregorian, utctDay)
import System.Directory ( doesFileExist )
import System.IO (IOMode(WriteMode), hPutStrLn, withFile)
import System.FilePath ((</>), (<.>), dropExtension, isRelative)
-- |Create a source distribution.
sdist :: PackageDescription -- ^information from the tarball
-> Maybe LocalBuildInfo -- ^Information from configure
-> SDistFlags -- ^verbosity & snapshot
-> (FilePath -> FilePath) -- ^build prefix (temp dir)
-> [PPSuffixHandler] -- ^ extra preprocessors (includes suffixes)
-> IO ()
sdist pkg mb_lbi flags mkTmpDir pps =
-- When given --list-sources, just output the list of sources to a file.
case (sDistListSources flags) of
Flag path -> withFile path WriteMode $ \outHandle -> do
(ordinary, maybeExecutable) <- listPackageSources verbosity pkg pps
traverse_ (hPutStrLn outHandle) ordinary
traverse_ (hPutStrLn outHandle) maybeExecutable
notice verbosity $ "List of package sources written to file '"
++ path ++ "'"
NoFlag -> do
-- do some QA
printPackageProblems verbosity pkg
when (isNothing mb_lbi) $
warn verbosity "Cannot run preprocessors. Run 'configure' command first."
date <- getCurrentTime
let pkg' | snapshot = snapshotPackage date pkg
| otherwise = pkg
case flagToMaybe (sDistDirectory flags) of
Just targetDir -> do
generateSourceDir targetDir pkg'
info verbosity $ "Source directory created: " ++ targetDir
Nothing -> do
createDirectoryIfMissingVerbose verbosity True tmpTargetDir
withTempDirectory verbosity tmpTargetDir "sdist." $ \tmpDir -> do
let targetDir = tmpDir </> tarBallName pkg'
generateSourceDir targetDir pkg'
targzFile <- createArchive verbosity pkg' mb_lbi tmpDir targetPref
notice verbosity $ "Source tarball created: " ++ targzFile
where
generateSourceDir targetDir pkg' = do
setupMessage verbosity "Building source dist for" (packageId pkg')
prepareTree verbosity pkg' mb_lbi targetDir pps
when snapshot $
overwriteSnapshotPackageDesc verbosity pkg' targetDir
verbosity = fromFlag (sDistVerbosity flags)
snapshot = fromFlag (sDistSnapshot flags)
distPref = fromFlag $ sDistDistPref flags
targetPref = distPref
tmpTargetDir = mkTmpDir distPref
-- | List all source files of a package. Returns a tuple of lists: first
-- component is a list of ordinary files, second one is a list of those files
-- that may be executable.
listPackageSources :: Verbosity -- ^ verbosity
-> PackageDescription -- ^ info from the cabal file
-> [PPSuffixHandler] -- ^ extra preprocessors (include
-- suffixes)
-> IO ([FilePath], [FilePath])
listPackageSources verbosity pkg_descr0 pps = do
-- Call helpers that actually do all work.
ordinary <- listPackageSourcesOrdinary verbosity pkg_descr pps
maybeExecutable <- listPackageSourcesMaybeExecutable pkg_descr
return (ordinary, maybeExecutable)
where
pkg_descr = filterAutogenModules pkg_descr0
-- | List those source files that may be executable (e.g. the configure script).
listPackageSourcesMaybeExecutable :: PackageDescription -> IO [FilePath]
listPackageSourcesMaybeExecutable pkg_descr =
-- Extra source files.
fmap concat . for (extraSrcFiles pkg_descr) $ \fpath -> matchFileGlob fpath
-- | List those source files that should be copied with ordinary permissions.
listPackageSourcesOrdinary :: Verbosity
-> PackageDescription
-> [PPSuffixHandler]
-> IO [FilePath]
listPackageSourcesOrdinary verbosity pkg_descr pps =
fmap concat . sequenceA $
[
-- Library sources.
fmap concat
. withAllLib $ \Library { exposedModules = modules, libBuildInfo = libBi } ->
allSourcesBuildInfo libBi pps modules
-- Executables sources.
, fmap concat
. withAllExe $ \Executable { modulePath = mainPath, buildInfo = exeBi } -> do
biSrcs <- allSourcesBuildInfo exeBi pps []
mainSrc <- findMainExeFile exeBi pps mainPath
return (mainSrc:biSrcs)
-- Test suites sources.
, fmap concat
. withAllTest $ \t -> do
let bi = testBuildInfo t
case testInterface t of
TestSuiteExeV10 _ mainPath -> do
biSrcs <- allSourcesBuildInfo bi pps []
srcMainFile <- do
ppFile <- findFileWithExtension (ppSuffixes pps)
(hsSourceDirs bi) (dropExtension mainPath)
case ppFile of
Nothing -> findFile (hsSourceDirs bi) mainPath
Just pp -> return pp
return (srcMainFile:biSrcs)
TestSuiteLibV09 _ m ->
allSourcesBuildInfo bi pps [m]
TestSuiteUnsupported tp -> die $ "Unsupported test suite type: "
++ show tp
-- Benchmarks sources.
, fmap concat
. withAllBenchmark $ \bm -> do
let bi = benchmarkBuildInfo bm
case benchmarkInterface bm of
BenchmarkExeV10 _ mainPath -> do
biSrcs <- allSourcesBuildInfo bi pps []
srcMainFile <- do
ppFile <- findFileWithExtension (ppSuffixes pps)
(hsSourceDirs bi) (dropExtension mainPath)
case ppFile of
Nothing -> findFile (hsSourceDirs bi) mainPath
Just pp -> return pp
return (srcMainFile:biSrcs)
BenchmarkUnsupported tp -> die $ "Unsupported benchmark type: "
++ show tp
-- Data files.
, fmap concat
. for (dataFiles pkg_descr) $ \filename ->
matchFileGlob (dataDir pkg_descr </> filename)
-- Extra doc files.
, fmap concat
. for (extraDocFiles pkg_descr) $ \ filename ->
matchFileGlob filename
-- License file(s).
, return (licenseFiles pkg_descr)
-- Install-include files.
, fmap concat
. withAllLib $ \ l -> do
let lbi = libBuildInfo l
relincdirs = "." : filter isRelative (includeDirs lbi)
traverse (fmap snd . findIncludeFile relincdirs) (installIncludes lbi)
-- Setup script, if it exists.
, fmap (maybe [] (\f -> [f])) $ findSetupFile ""
-- The .cabal file itself.
, fmap (\d -> [d]) (defaultPackageDesc verbosity)
]
where
-- We have to deal with all libs and executables, so we have local
-- versions of these functions that ignore the 'buildable' attribute:
withAllLib action = traverse action (allLibraries pkg_descr)
withAllExe action = traverse action (executables pkg_descr)
withAllTest action = traverse action (testSuites pkg_descr)
withAllBenchmark action = traverse action (benchmarks pkg_descr)
-- |Prepare a directory tree of source files.
prepareTree :: Verbosity -- ^verbosity
-> PackageDescription -- ^info from the cabal file
-> Maybe LocalBuildInfo
-> FilePath -- ^source tree to populate
-> [PPSuffixHandler] -- ^extra preprocessors (includes suffixes)
-> IO ()
prepareTree verbosity pkg_descr0 mb_lbi targetDir pps = do
-- If the package was configured then we can run platform-independent
-- pre-processors and include those generated files.
case mb_lbi of
Just lbi | not (null pps) -> do
let lbi' = lbi{ buildDir = targetDir </> buildDir lbi }
withAllComponentsInBuildOrder pkg_descr lbi' $ \c clbi ->
preprocessComponent pkg_descr c lbi' clbi True verbosity pps
_ -> return ()
(ordinary, mExecutable) <- listPackageSources verbosity pkg_descr0 pps
installOrdinaryFiles verbosity targetDir (zip (repeat []) ordinary)
installMaybeExecutableFiles verbosity targetDir (zip (repeat []) mExecutable)
maybeCreateDefaultSetupScript targetDir
where
pkg_descr = filterAutogenModules pkg_descr0
-- | Find the setup script file, if it exists.
findSetupFile :: FilePath -> NoCallStackIO (Maybe FilePath)
findSetupFile targetDir = do
hsExists <- doesFileExist setupHs
lhsExists <- doesFileExist setupLhs
if hsExists
then return (Just setupHs)
else if lhsExists
then return (Just setupLhs)
else return Nothing
where
setupHs = targetDir </> "Setup.hs"
setupLhs = targetDir </> "Setup.lhs"
-- | Create a default setup script in the target directory, if it doesn't exist.
maybeCreateDefaultSetupScript :: FilePath -> NoCallStackIO ()
maybeCreateDefaultSetupScript targetDir = do
mSetupFile <- findSetupFile targetDir
case mSetupFile of
Just _setupFile -> return ()
Nothing -> do
writeUTF8File (targetDir </> "Setup.hs") $ unlines [
"import Distribution.Simple",
"main = defaultMain"]
-- | Find the main executable file.
findMainExeFile :: BuildInfo -> [PPSuffixHandler] -> FilePath -> IO FilePath
findMainExeFile exeBi pps mainPath = do
ppFile <- findFileWithExtension (ppSuffixes pps) (hsSourceDirs exeBi)
(dropExtension mainPath)
case ppFile of
Nothing -> findFile (hsSourceDirs exeBi) mainPath
Just pp -> return pp
-- | Given a list of include paths, try to find the include file named
-- @f@. Return the name of the file and the full path, or exit with error if
-- there's no such file.
findIncludeFile :: [FilePath] -> String -> IO (String, FilePath)
findIncludeFile [] f = die ("can't find include file " ++ f)
findIncludeFile (d:ds) f = do
let path = (d </> f)
b <- doesFileExist path
if b then return (f,path) else findIncludeFile ds f
-- | Remove the auto-generated modules (like 'Paths_*') from 'exposed-modules'
-- and 'other-modules'.
filterAutogenModules :: PackageDescription -> PackageDescription
filterAutogenModules pkg_descr0 = mapLib filterAutogenModuleLib $
mapAllBuildInfo filterAutogenModuleBI pkg_descr0
where
mapLib f pkg = pkg { library = fmap f (library pkg)
, subLibraries = map f (subLibraries pkg) }
filterAutogenModuleLib lib = lib {
exposedModules = filter (filterFunction (libBuildInfo lib)) (exposedModules lib)
}
filterAutogenModuleBI bi = bi {
otherModules = filter (filterFunction bi) (otherModules bi)
}
pathsModule = autogenPathsModuleName pkg_descr0
filterFunction bi = \mn ->
mn /= pathsModule
&& not (elem mn (autogenModules bi))
-- | Prepare a directory tree of source files for a snapshot version.
-- It is expected that the appropriate snapshot version has already been set
-- in the package description, eg using 'snapshotPackage' or 'snapshotVersion'.
--
prepareSnapshotTree :: Verbosity -- ^verbosity
-> PackageDescription -- ^info from the cabal file
-> Maybe LocalBuildInfo
-> FilePath -- ^source tree to populate
-> [PPSuffixHandler] -- ^extra preprocessors (includes
-- suffixes)
-> IO ()
prepareSnapshotTree verbosity pkg mb_lbi targetDir pps = do
prepareTree verbosity pkg mb_lbi targetDir pps
overwriteSnapshotPackageDesc verbosity pkg targetDir
overwriteSnapshotPackageDesc :: Verbosity -- ^verbosity
-> PackageDescription -- ^info from the cabal file
-> FilePath -- ^source tree
-> IO ()
overwriteSnapshotPackageDesc verbosity pkg targetDir = do
-- We could just writePackageDescription targetDescFile pkg_descr,
-- but that would lose comments and formatting.
descFile <- defaultPackageDesc verbosity
withUTF8FileContents descFile $
writeUTF8File (targetDir </> descFile)
. unlines . map (replaceVersion (packageVersion pkg)) . lines
where
replaceVersion :: Version -> String -> String
replaceVersion version line
| "version:" `isPrefixOf` map toLower line
= "version: " ++ display version
| otherwise = line
-- | Modifies a 'PackageDescription' by appending a snapshot number
-- corresponding to the given date.
--
snapshotPackage :: UTCTime -> PackageDescription -> PackageDescription
snapshotPackage date pkg =
pkg {
package = pkgid { pkgVersion = snapshotVersion date (pkgVersion pkgid) }
}
where pkgid = packageId pkg
-- | Modifies a 'Version' by appending a snapshot number corresponding
-- to the given date.
--
snapshotVersion :: UTCTime -> Version -> Version
snapshotVersion date version = version {
versionBranch = versionBranch version
++ [dateToSnapshotNumber date]
}
-- | Given a date produce a corresponding integer representation.
-- For example given a date @18/03/2008@ produce the number @20080318@.
--
dateToSnapshotNumber :: UTCTime -> Int
dateToSnapshotNumber date = case toGregorian (utctDay date) of
(year, month, day) ->
fromIntegral year * 10000
+ month * 100
+ day
-- | Callback type for use by sdistWith.
type CreateArchiveFun = Verbosity -- ^verbosity
-> PackageDescription -- ^info from cabal file
-> Maybe LocalBuildInfo -- ^info from configure
-> FilePath -- ^source tree to archive
-> FilePath -- ^name of archive to create
-> IO FilePath
-- | Create an archive from a tree of source files, and clean up the tree.
createArchive :: CreateArchiveFun
createArchive verbosity pkg_descr mb_lbi tmpDir targetPref = do
let tarBallFilePath = targetPref </> tarBallName pkg_descr <.> "tar.gz"
(tarProg, _) <- requireProgram verbosity tarProgram
(maybe defaultProgramDb withPrograms mb_lbi)
let formatOptSupported = maybe False (== "YES") $
Map.lookup "Supports --format"
(programProperties tarProg)
runProgram verbosity tarProg $
-- Hmm: I could well be skating on thinner ice here by using the -C option
-- (=> seems to be supported at least by GNU and *BSD tar) [The
-- prev. solution used pipes and sub-command sequences to set up the paths
-- correctly, which is problematic in a Windows setting.]
["-czf", tarBallFilePath, "-C", tmpDir]
++ (if formatOptSupported then ["--format", "ustar"] else [])
++ [tarBallName pkg_descr]
return tarBallFilePath
-- | Given a buildinfo, return the names of all source files.
allSourcesBuildInfo :: BuildInfo
-> [PPSuffixHandler] -- ^ Extra preprocessors
-> [ModuleName] -- ^ Exposed modules
-> IO [FilePath]
allSourcesBuildInfo bi pps modules = do
let searchDirs = hsSourceDirs bi
sources <- fmap concat $ sequenceA $
[ let file = ModuleName.toFilePath module_
in findAllFilesWithExtension suffixes searchDirs file
>>= nonEmpty (notFound module_) return
| module_ <- modules ++ otherModules bi ]
bootFiles <- sequenceA
[ let file = ModuleName.toFilePath module_
fileExts = ["hs-boot", "lhs-boot"]
in findFileWithExtension fileExts (hsSourceDirs bi) file
| module_ <- modules ++ otherModules bi ]
return $ sources ++ catMaybes bootFiles ++ cSources bi ++ jsSources bi
where
nonEmpty x _ [] = x
nonEmpty _ f xs = f xs
suffixes = ppSuffixes pps ++ ["hs", "lhs"]
notFound m = die $ "Error: Could not find module: " ++ display m
++ " with any suffix: " ++ show suffixes ++ ". If the module "
++ "is autogenerated it should be added to 'autogen-modules'."
printPackageProblems :: Verbosity -> PackageDescription -> IO ()
printPackageProblems verbosity pkg_descr = do
ioChecks <- checkPackageFiles pkg_descr "."
let pureChecks = checkConfiguredPackage pkg_descr
isDistError (PackageDistSuspicious _) = False
isDistError (PackageDistSuspiciousWarn _) = False
isDistError _ = True
(errors, warnings) = partition isDistError (pureChecks ++ ioChecks)
unless (null errors) $
notice verbosity $ "Distribution quality errors:\n"
++ unlines (map explanation errors)
unless (null warnings) $
notice verbosity $ "Distribution quality warnings:\n"
++ unlines (map explanation warnings)
unless (null errors) $
notice verbosity
"Note: the public hackage server would reject this package."
------------------------------------------------------------
-- | The name of the tarball without extension
--
tarBallName :: PackageDescription -> String
tarBallName = display . packageId
mapAllBuildInfo :: (BuildInfo -> BuildInfo)
-> (PackageDescription -> PackageDescription)
mapAllBuildInfo f pkg = pkg {
library = fmap mapLibBi (library pkg),
subLibraries = fmap mapLibBi (subLibraries pkg),
executables = fmap mapExeBi (executables pkg),
testSuites = fmap mapTestBi (testSuites pkg),
benchmarks = fmap mapBenchBi (benchmarks pkg)
}
where
mapLibBi lib = lib { libBuildInfo = f (libBuildInfo lib) }
mapExeBi exe = exe { buildInfo = f (buildInfo exe) }
mapTestBi t = t { testBuildInfo = f (testBuildInfo t) }
mapBenchBi bm = bm { benchmarkBuildInfo = f (benchmarkBuildInfo bm) }
| sopvop/cabal | Cabal/Distribution/Simple/SrcDist.hs | bsd-3-clause | 19,745 | 0 | 24 | 5,249 | 3,999 | 2,037 | 1,962 | 326 | 6 |
{-# LANGUAGE Rank2Types #-}
module Examples
( tests
) where
import Control.Applicative ((<$>))
import Control.Monad (forM)
import Data.Bits (shiftL)
import Data.List (isPrefixOf, sort)
import Test.Framework (Test, testGroup)
import Test.Framework.Providers.HUnit (testCase)
import Test.HUnit (Assertion, assert, (@=?))
import qualified Data.ByteString as B
import Assembler
import Emulator
import Emulator.Monad
import Emulator.Monad.ST
import Memory (Address (..), Register (..))
tests :: Test
tests = testGroup "Examples"
[ testExample "notch" $ do
x <- load $ Register X
cycles <- load Cycles
return $ (0x40, 106) @=? (x, cycles)
, testExample "sum-squares" $ do
x <- load $ Register X
return $ sum [n * n | n <- [0 .. 50]] @=? x
, testExample "bubble-sort" $ do
xs <- forM [0 .. 9] $ load . Ram . (0x1000 +)
return $ sort xs @=? xs
, testExample "32-bit-add" $ do
lo <- load $ Ram 0x1000
hi <- load $ Ram 0x1001
let sum' = (fromIntegral hi `shiftL` 16) + fromIntegral lo :: Int
return $ 0x12345678 + 0xaabbccdd @=? sum'
, testExample "fib" $ do
let fibs = 1 : 2 : zipWith (+) fibs (tail fibs)
addrs = [0xffff, 0xfffe .. 0x000c]
loop _ [] = return True
loop [] _ = return True
loop (f : fs) (a : as) = do
f' <- load $ Ram a
if f == f' then loop fs as else return False
return . assert =<< loop fibs addrs
, testExample "self-copy" $ do
let readRam i = do
x <- load $ Ram i
if x == 0x0000
then return []
else (x :) <$> readRam (i + 1)
programs <- readRam 1
let len = length programs `div` 10
equal xs = case splitAt len xs of
(_, []) -> True
(hs, ts) -> hs `isPrefixOf` ts && equal ts
return $ assert $ equal programs
]
testExample :: String
-> (forall s. STEmulator s Assertion)
-> Test
testExample name = testCase name .
example ("examples/" ++ name ++ ".dasm16")
example :: FilePath
-> (forall s. STEmulator s Assertion)
-> Assertion
example filePath check = do
assembleFile filePath "a.out"
program <- B.readFile "a.out"
runSTEmulator $ do
loadProgram program
emulate
check
| jaspervdj/dcpu16-hs | tests/Examples.hs | bsd-3-clause | 2,482 | 0 | 19 | 861 | 878 | 454 | 424 | 69 | 6 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE GADTs #-}
module Oracle.DiffOracle where
import Data.Maybe
import Data.Tuple (swap)
import Debug.Trace
import Control.Monad.Reader
import qualified Data.IntMap as M
import qualified Data.Set as S
import Oracle.Internal
import Language.Common
import Language.Clojure.AST
import Language.Clojure.Lang
import Util.UnixDiff
type CopyMap = M.IntMap Int
type CopyMaps = (CopyMap, CopyMap)
type MbMoveConflict = ConflictResult (S.Set (Int, Int))
type DelInsMap = (M.IntMap Path, M.IntMap Path)
data DiffOracle = DiffOracle DelInsMap
deriving (Show)
buildDiffOracle :: String -> String -> Expr -> Expr -> DiffOracle
buildDiffOracle s d src dst = DiffOracle diffActions
where
cp = buildCopyMaps (preprocess s d)
delinsMap = buildDelInsMap (preprocessGrouped s d)
diffActions = solveConflicts delinsMap cp src dst
buildCopyMaps :: [DiffAction] -> (M.IntMap Int, M.IntMap Int)
buildCopyMaps as = (insMap, reverseMap insMap)
where
insMap = buildCopyMap as
reverseMap = M.fromList . map swap . M.toList
buildCopyMap [] = (M.empty)
buildCopyMap (first:rest) = (process first) `M.union` (buildCopyMap rest)
where
process (Copy (i1, i2)) = M.singleton i1 i2
process _ = M.empty
solveConflicts :: DelInsMap -> CopyMaps -> Expr -> Expr -> DelInsMap
solveConflicts diffActions copyMaps src dst = foldl invalidate diffActions conflicts
where
conflicts = checkCopyMaps copyMaps src dst
invalidate :: DelInsMap -> [MbMoveConflict] -> DelInsMap
invalidate diffActions [] = diffActions
invalidate diffActions (NoConflict:rest) = invalidate diffActions rest
invalidate diffActions ((ConflictAt pairs):rest) = invalidate (S.foldl invalidatePair diffActions pairs) rest
invalidatePair :: DelInsMap -> (Int, Int) -> DelInsMap
invalidatePair (srcMap, dstMap) (i,j) = (M.insert i M srcMap, M.insert j M dstMap)
unionDelInsMap :: DelInsMap -> DelInsMap -> DelInsMap
unionDelInsMap (s1, d1) (s2, d2) = (M.union s1 s2, M.union d1 d2)
buildDelInsMap :: [GroupDiffAction] -> DelInsMap
buildDelInsMap [] = (M.empty, M.empty)
buildDelInsMap (first:rest) = process first `unionDelInsMap` buildDelInsMap rest
where
process (OMod srcRange dstRange) = (insertRange srcRange M, insertRange dstRange M)
process (OIns dstRange _) = (M.empty, insertRange dstRange I)
process (ODel srcRange _) = (insertRange srcRange D, M.empty)
insertRange :: LineRange -> Path -> M.IntMap Path
insertRange (Range s e) o = go s M.empty
where
go i m | i <= e = go (i+1) (M.insert i o m)
| otherwise = m
giveAdvice' :: DelInsMap -> Usingl u -> Usingl v -> [Path]
giveAdvice' (srcMap, dstMap) src dst =
if (isMod srcRange srcMap && isMod dstRange dstMap)
then []
else if (isDel srcRange srcMap || isMod srcRange srcMap)
then [ D ]
else if (isIns dstRange dstMap || isMod dstRange dstMap)
then [ I ]
else [ M ]
where
srcRange = fromJust $ extractRange src
dstRange = fromJust $ extractRange dst
isContainedIn :: LineRange -> LineRange -> Bool
isContainedIn (Range s1 e1) (Range s2 e2) = s2 <= s1 && e1 <= e2
isMod :: LineRange -> M.IntMap Path -> Bool
isMod lr m = case M.lookup (takeStart lr) m of
Just M -> True
_ -> False
isIns :: LineRange -> M.IntMap Path -> Bool
isIns lr m = case M.lookup (takeStart lr) m of
Just I -> True
_ -> False
isDel :: LineRange -> M.IntMap Path -> Bool
isDel lr m = case M.lookup (takeStart lr) m of
Just D -> True
_ -> False
inSync :: Usingl u -> LineRange -> Bool
inSync u lr = uRange `inSync'` lr
where
uRange = fromJust $ extractRange u
inSync' :: LineRange -> LineRange -> Bool
inSync' (Range s1 e1) (Range s2 e2) = s2 <= s1 && s1 <= e2
copySetExpr :: CopyMap -> Expr -> S.Set Int
copySetExpr copyMap e = collectAll copyMap eRange
where
eRange = extractRangeExpr e
copySetSel :: CopyMap -> SepExprList -> S.Set Int
copySetSel copyMap e = collectAll copyMap eRange
where
eRange = extractRangeSepExprList e
collectAll :: CopyMap -> LineRange -> S.Set Int
collectAll cpM (Range s e) = go cpM s
where
go m i | i <= e = mbTakeLine m i `S.union` go m (i+1)
go m i | otherwise = S.empty
mbTakeLine m i = if isJust (M.lookup i m)
then S.singleton i
else S.empty
lookupSet :: S.Set Int -> CopyMap -> S.Set Int
lookupSet s cp = S.map (fromJust . flip M.lookup cp) s
lookupSetPairs :: S.Set Int -> CopyMap -> [(Int, Int)]
lookupSetPairs s cp = S.toList $ S.map (\i -> (i, fromJust (M.lookup i cp))) s
intersectsNonOverlapping :: Ord a => S.Set a -> S.Set a -> S.Set a -> S.Set a
intersectsNonOverlapping target a b =
if check
then target `S.difference` overlapping
else S.empty
where
check = target `intersects` (a `S.difference` overlapping) && target `intersects` (b `S.difference` overlapping)
overlapping = a `S.intersection` b
targetL = S.toList target
intersects :: Ord a => S.Set a -> S.Set a -> Bool
intersects a b = not (S.null (a `S.intersection` b))
pickBigger :: CopyMaps -> S.Set Int -> S.Set Int -> S.Set (Int, Int)
pickBigger (srcMap, dstMap) a b
| S.size a >= S.size b = S.fromList $ map swap (lookupSetPairs a dstMap)
| otherwise = S.fromList $ lookupSetPairs b srcMap
deOptimizeExpr :: CopyMaps -> Expr -> Expr -> MbMoveConflict
deOptimizeExpr cp@(srcMap, dstMap) (Seq a b _) (Seq c d _) =
if (S.null overlapA && S.null overlapC)
then NoConflict
else ConflictAt (pickBigger cp overlapA overlapC)
where
copySetA = copySetExpr srcMap a
copySetB = copySetExpr srcMap b
copySetC = copySetExpr dstMap c
copySetD = copySetExpr dstMap d
copyTargetA = lookupSet copySetA srcMap
copyTargetC = lookupSet copySetC dstMap
overlapA = intersectsNonOverlapping copyTargetA copySetC copySetD
overlapC = intersectsNonOverlapping copyTargetC copySetA copySetB
deOptimizeExpr cp@(srcMap, dstMap) (Collection _ (Cons a _ b _) _) (Collection _ (Cons c _ d _) _) =
if (S.null overlapA && S.null overlapC)
then NoConflict
else ConflictAt (pickBigger cp overlapA overlapC)
where
copySetA = copySetExpr srcMap a
copySetB = copySetSel srcMap b
copySetC = copySetExpr dstMap c
copySetD = copySetSel dstMap d
copyTargetA = lookupSet copySetA srcMap
copyTargetC = lookupSet copySetC dstMap
overlapA = intersectsNonOverlapping copyTargetA copySetC copySetD
overlapC = intersectsNonOverlapping copyTargetC copySetA copySetB
deOptimizeExpr _ _ _ = NoConflict
checkCopyMaps :: CopyMaps -> Expr -> Expr -> [[MbMoveConflict]]
checkCopyMaps cp@(srcMap, dstMap) src dst = fmap (collectSrcDstLines cp src dst) (M.toList srcMap)
collectSrcDstLines :: CopyMaps -> Expr -> Expr -> (Int, Int) -> [MbMoveConflict]
collectSrcDstLines cp src dst (s,d) = map (\(src,dst) -> deOptimizeExpr cp (wrap src) (wrap dst)) conflifts
where
conflifts = zipEqLen srcConflicts dstConflicts
srcConflicts = collectSubTrees src s
dstConflicts = collectSubTrees dst d
zipEqLen :: [a] -> [b] -> [(a,b)]
zipEqLen (a:as) (b:bs) = (a,b):(zipEqLen as bs)
zipEqLen [] [] = []
zipEqLen [] bs = error "dst is longer"
zipEqLen as [] = error "src is longer"
wrap :: SubTree -> Expr
wrap (Exp e) = e
wrap (Sel sel) = (Collection Parens sel (extractRangeSepExprList sel))
instance (Monad m) => OracleF DiffOracle m where
callF o@(DiffOracle diffActions) s d = return $ askOracle o s d
instance (Monad m) => OracleP DiffOracle m where
callP _ An An = do
return []
callP _ An (_ `Ac` _) = do
return [ I ]
callP _ (_ `Ac` _) An = do
return [ D ]
callP o@(DiffOracle diffActions) (s `Ac` _) (d `Ac` _) = return $ askOracle o s d
askOracle :: DiffOracle -> Usingl u -> Usingl v -> [Path]
askOracle (DiffOracle diffActions) src dst = case (extractRange src, extractRange dst) of
(Nothing, Nothing) -> [ M ]
(Just sRange, Nothing) -> [ D ]
(Nothing, Just dRange) -> [ I ]
(Just sRange, Just dRange) -> giveAdvice' diffActions src dst | nazrhom/vcs-clojure | src/Oracle/DiffOracle.hs | bsd-3-clause | 8,201 | 0 | 13 | 1,776 | 3,167 | 1,644 | 1,523 | 174 | 4 |
{-# LANGUAGE
DeriveFunctor
, FlexibleInstances
, ScopedTypeVariables
#-}
module Data.Trie.Pseudo where
import Prelude hiding (foldl, foldr, foldr1, lookup,
map)
import Data.Foldable hiding (all)
import Data.List (intercalate)
import Data.List.NonEmpty (NonEmpty (..), fromList, toList)
import qualified Data.List.NonEmpty as NE
import Data.Maybe (fromMaybe)
import Data.Monoid
import qualified Data.Semigroup as S
import Control.Applicative
import Control.Monad (replicateM)
import Control.Arrow (second)
-- TODO: difference
-- | Tagged rose tree with explicit emptyness
data PseudoTrie t a = More t (Maybe a) (NonEmpty (PseudoTrie t a))
| Rest (NonEmpty t) a
| Nil
deriving (Show, Eq, Functor)
-- | Overwriting instance
instance (Eq t) => Monoid (PseudoTrie t a) where
mempty = Nil
mappend = merge
-- | Depth first
instance Foldable (PseudoTrie t) where
foldr _ acc Nil = acc
foldr f acc (Rest _ x) = f x acc
foldr f acc (More t Nothing xs) = foldr go acc xs
where
go z bcc = foldr f bcc z
foldr f acc (More t (Just x) xs) = foldr go (f x acc) xs
where
go z bcc = foldr f bcc z
beginsWith :: (Eq t) => PseudoTrie t a -> t -> Bool
beginsWith Nil _ = False
beginsWith (Rest (t:|_) _) p = t == p
beginsWith (More t _ _) p = t == p
-- | Provides a form of deletion by setting a path to @Nothing@, but doesn't
-- cleanup like @prune@
assign :: (Eq t) => NonEmpty t -> Maybe a -> PseudoTrie t a -> PseudoTrie t a
assign ts (Just x) Nil = Rest ts x
assign _ Nothing Nil = Nil
assign tss@(t:|ts) mx ys@(Rest pss@(p:|ps) y)
| tss == pss = case mx of
(Just x) -> Rest pss x
Nothing -> Nil
| t == p = case (ts,ps) of
([], p':_) -> More t mx $ Rest (NE.fromList ps) y :| []
(t':_, []) -> case mx of
Just x -> More p (Just y) $ Rest (NE.fromList ts) x :| []
Nothing -> ys
(t':_,p':_) -> if t' == p'
then More t Nothing $
assign (NE.fromList ts) mx (Rest (NE.fromList ps) y) :| []
else case mx of -- disjoint
Nothing -> ys
Just x -> More t Nothing $ NE.fromList $
[ Rest (NE.fromList ps) y
, Rest (NE.fromList ts) x
]
| otherwise = ys
assign (t:|ts) mx y@(More p my ys)
| t == p = case ts of
[] -> More p mx ys
_ -> More p my $ fmap (assign (NE.fromList ts) mx) ys
| otherwise = y
-- | Overwrite the LHS point-wise with the RHS's contents
merge :: (Eq t) => PseudoTrie t a -> PseudoTrie t a -> PseudoTrie t a
merge Nil y = y
merge x Nil = x
merge xx@(Rest tss@(t:|ts) x) (Rest pss@(p:|ps) y)
| tss == pss = Rest pss y
| t == p = case (ts,ps) of
([],p':ps') -> More t (Just x) $ Rest (NE.fromList ps) y :| []
(t':ts',[]) -> More t (Just y) $ Rest (NE.fromList ts) x :| []
(_,_) -> More t Nothing $
merge (Rest (NE.fromList ts) x)
(Rest (NE.fromList ps) y) :| []
| otherwise = xx
merge xx@(More t mx xs) (More p my ys)
| t == p = More p my $ NE.fromList $
foldr go [] $ NE.toList xs ++ NE.toList ys
| otherwise = xx
where
go q [] = [q]
go q (z:zs) | areDisjoint q z = q : z : zs
| otherwise = merge q z : zs
merge xx@(More t mx xs) (Rest pss@(p:|ps) y)
| t == p = case ps of
[] -> More t (Just y) xs
_ -> More t mx $
fmap (flip merge $ Rest (NE.fromList ps) y) xs
| otherwise = xx
merge xx@(Rest tss@(t:|ts) x) (More p my ys)
| t == p = case ts of
[] -> More p (Just x) ys
_ -> More p my $
fmap (merge $ Rest (NE.fromList ts) x) ys
| otherwise = xx
add :: (Eq t) => NonEmpty t -> PseudoTrie t a -> PseudoTrie t a -> PseudoTrie t a
add ts input container =
let ts' = NE.toList ts in
merge container $ mkMores ts' input
where
mkMores :: (Eq t) => [t] -> PseudoTrie t a -> PseudoTrie t a
mkMores [] trie = trie
mkMores (t:ts) trie = More t Nothing $
mkMores ts trie :| []
toAssocs :: PseudoTrie t a -> [(NonEmpty t, a)]
toAssocs = go [] []
where
go :: [t] -> [(NonEmpty t, a)] -> PseudoTrie t a -> [(NonEmpty t, a)]
go depth acc Nil = acc
go depth acc (Rest ts x) = (NE.fromList $ depth ++ NE.toList ts, x) : acc
go depth acc (More t Nothing xs) =
foldr (flip $ go $ depth ++ [t]) acc $ NE.toList xs
go depth acc (More t (Just x) xs) =
(NE.fromList $ depth ++ [t], x) :
(foldr $ flip $ go $ depth ++ [t]) acc (NE.toList xs)
fromAssocs :: (Eq t) => [(NonEmpty t, a)] -> PseudoTrie t a
fromAssocs = foldr (uncurry assign) Nil . fmap (second Just)
lookup :: (Eq t) => NonEmpty t -> PseudoTrie t a -> Maybe a
lookup _ Nil = Nothing
lookup tss (Rest pss a)
| tss == pss = Just a
| otherwise = Nothing
lookup tss@(t:|ts) (More p mx xs)
| t == p = case ts of
[] -> mx
(t':ts') -> find (hasNextTag t') xs >>= lookup (fromList ts)
| otherwise = Nothing
where
hasNextTag :: (Eq t) => t -> PseudoTrie t a -> Bool
hasNextTag t Nil = False
hasNextTag t (More p _ _) = t == p
hasNextTag t (Rest (p:|_) _) = t == p
-- | Simple test on the heads of two tries
areDisjoint :: (Eq t) => PseudoTrie t a -> PseudoTrie t a -> Bool
areDisjoint (More t _ _) (More p _ _)
| t == p = False
| otherwise = True
areDisjoint (Rest (t:|_) _) (Rest (p:|_) _)
| t == p = False
| otherwise = True
areDisjoint _ _ = True
-- | The meet of two @PseudoTrie@s
intersectionWith :: (Eq t) =>
(a -> b -> c)
-> PseudoTrie t a
-> PseudoTrie t b
-> PseudoTrie t c
intersectionWith _ _ Nil = Nil
intersectionWith _ Nil _ = Nil
intersectionWith f (Rest tss@(t:|ts) x) (Rest pss@(p:|ps) y)
| tss == pss = Rest pss $ f x y
| otherwise = Nil
intersectionWith f (More t mx xs) (More p my ys)
| t == p = case [intersectionWith f x' y' | x' <- NE.toList xs, y' <- NE.toList ys] of
[] -> case f <$> mx <*> my of
Nothing -> Nil
Just c -> Rest (p :| []) c
zs -> More p (f <$> mx <*> my) $ NE.fromList zs
-- implicit root
| otherwise = Nil
intersectionWith f (More t mx xs) (Rest pss@(p:|ps) y)
| t == p = case ps of
[] -> case f <$> mx <*> Just y of
Nothing -> Nil
Just c -> Rest (p :| []) c
_ -> More p Nothing $ fmap (flip (intersectionWith f) $ Rest (fromList ps) y) xs
| otherwise = Nil
intersectionWith f (Rest tss@(t:|ts) x) (More p my ys)
| t == p = case ts of
[] -> case f <$> Just x <*> my of
Nothing -> Nil
Just c -> Rest (t :| []) c
_ -> More t Nothing $ fmap (intersectionWith f $ Rest (fromList ts) x) ys
| otherwise = Nil
-- difference :: Eq t =>
-- PseudoTrie t a
-- -> PseudoTrie t a
-- -> PseudoTrie t a
-- | Needless intermediary elements are turned into shortcuts, @Nil@'s in
-- subtrees are also removed.
prune :: PseudoTrie t a -> PseudoTrie t a
prune = go
where
go Nil = Nil
go xx@(Rest ts x) = xx
go (More t Nothing xs) =
case cleaned xs of
[Nil] -> Nil
[Rest ts x] -> Rest (t:|NE.toList ts) x
xs' -> More t Nothing $ NE.fromList xs'
go (More t (Just x) xs) =
case cleaned xs of
[Nil] -> Rest (t:|[]) x
xs' -> More t (Just x) $ NE.fromList xs'
cleaned xs = removeNils (NE.toList $ fmap go xs)
removeNils xs = case removeNils' xs of
[] -> [Nil]
ys -> ys
where
removeNils' [] = []
removeNils' (Nil:xs) = removeNils' xs
removeNils' (x:xs) = x : removeNils' xs
| athanclark/pseudo-trie | src/Data/Trie/Pseudo.hs | bsd-3-clause | 8,552 | 0 | 18 | 3,279 | 3,708 | 1,862 | 1,846 | 185 | 10 |
-- | Test utility functions
module Test.Util(utilsTests) where
import Test.Tasty
import Test.Tasty.HUnit
import Language.Haskell.Ghcid.Util
utilsTests :: TestTree
utilsTests = testGroup "Utility tests"
[dropPrefixTests
,chunksOfWordTests
]
dropPrefixTests :: TestTree
dropPrefixTests = testGroup "dropPrefix"
[testCase "Prefix not found" $ dropPrefixRepeatedly "prefix" "string" @?= "string"
,testCase "Empty prefix" $ dropPrefixRepeatedly "" "string" @?= "string"
,testCase "Prefix found once" $ dropPrefixRepeatedly "str" "string" @?= "ing"
,testCase "Prefix found twice" $ dropPrefixRepeatedly "str" "strstring" @?= "ing"
]
chunksOfWordTests :: TestTree
chunksOfWordTests = testGroup "chunksOfWord"
[testCase "Max 0" $ chunksOfWord 4 0 "ab cd efgh" @?= ["ab c","d ef","gh"]
,testCase "Max 2" $ chunksOfWord 4 2 "ab cd efgh" @?= ["ab ","cd ","efgh"]
]
| JPMoresmau/ghcid | src/Test/Util.hs | bsd-3-clause | 904 | 0 | 9 | 158 | 217 | 116 | 101 | 18 | 1 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module TauSigma.ADEV
( Statistic(..)
, Options
, options
, main
) where
import Control.Monad.Primitive (PrimMonad)
import Control.Monad.Trans
import Control.Monad.Trans.Except
import Control.Lens (view)
import Control.Lens.TH
import Control.Parallel.Strategies (withStrategy, parBuffer, rdeepseq)
import Data.Csv (HasHeader(..), fromOnly)
import qualified Data.Vector as V
import qualified Data.Vector.Unboxed as U
import Options.Applicative hiding (header)
import Pipes
import Pipes.ByteString (stdin, stdout)
import qualified Pipes.Prelude as P
import TauSigma.Types (TauSigma(..))
import TauSigma.Statistics.Types (Tau0, Tau, Sigma)
import TauSigma.Statistics.Allan (adevs, mdevs, tdevs)
import TauSigma.Statistics.Hadamard (hdevs)
import TauSigma.Statistics.Total (totdevs)
import TauSigma.Statistics.Theo1 (theo1devs, theoBRdevs, theoHdevs)
import TauSigma.Util.CSV
import TauSigma.Util.Vector (drainToVector)
data Statistic = ADEV | MDEV | TDEV | HDEV | TOTDEV | Theo1 | TheoBR | TheoH
data Options
= Options { _tau0 :: Tau0 Double
, _maxTau :: Maybe (Tau Double)
}
$(makeLenses ''Options)
options :: Parser Options
options = Options <$> tau0 <*> maxTau
where f `with` xs = f (mconcat xs)
tau0 = option auto
`with` [ long "tau0"
, metavar "N"
, help "Base sampling interval"
]
maxTau = option (fmap Just auto)
`with` [ long "max-tau"
, metavar "N"
, value Nothing
, help "Maximum multiple of tau0 to output."
]
main :: (PrimMonad m, MonadIO m) =>
Statistic
-> Options
-> ExceptT String m ()
main statistic opts = do
errors <- drainToVector (decode NoHeader stdin >-> P.map fromOnly)
runEffect $ each (compute statistic opts errors)
>-> P.map (uncurry TauSigma)
>-> encodeByName (V.fromList ["tau", "sigma"])
>-> stdout
compute
:: Statistic
-> Options
-> U.Vector Double
-> [(Tau Double, Sigma Double)]
compute statistic opts xs = parallelize retained
where parallelize = withStrategy (parBuffer 50 rdeepseq)
retained = limiter opts all
where all = dispatch statistic (view tau0 opts) xs
dispatch
:: Statistic
-> Tau0 Double
-> U.Vector Double
-> [(Tau Double, Sigma Double)]
dispatch ADEV = adevs
dispatch MDEV = mdevs
dispatch TDEV = tdevs
dispatch HDEV = hdevs
dispatch TOTDEV = totdevs
dispatch Theo1 = theo1devs
dispatch TheoBR = theoBRdevs
dispatch TheoH = theoHdevs
limiter
:: Options
-> [(Tau Double, Sigma Double)]
-> [(Tau Double, Sigma Double)]
limiter opts =
case view maxTau opts of
Nothing -> id
Just limit -> limiter' limit
limiter'
:: Tau Double
-> [(Tau Double, Sigma Double)]
-> [(Tau Double, Sigma Double)]
limiter' limit = filter go
where go (tau, _) = tau <= limit
| sacundim/tau-sigma | src/TauSigma/ADEV.hs | bsd-3-clause | 3,151 | 0 | 13 | 812 | 949 | 523 | 426 | 93 | 2 |
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TupleSections #-}
{-# OPTIONS_GHC -fno-warn-deprecations #-}
module Network.Wai.Handler.Warp.Run where
#if __GLASGOW_HASKELL__ < 709
import Control.Applicative ((<$>))
#endif
import Control.Arrow (first)
import Control.Concurrent (threadDelay)
import qualified Control.Concurrent as Conc (yield)
import Control.Exception as E
import Control.Monad (when, unless, void)
import Data.ByteString (ByteString)
import qualified Data.ByteString as S
import Data.Char (chr)
import "iproute" Data.IP (toHostAddress, toHostAddress6)
import Data.IORef (IORef, newIORef, readIORef, writeIORef)
import Data.Streaming.Network (bindPortTCP)
import Network (sClose, Socket)
import Network.Socket (accept, withSocketsDo, SockAddr(SockAddrInet, SockAddrInet6), setSocketOption, SocketOption(..))
import qualified Network.Socket.ByteString as Sock
import Network.Wai
import Network.Wai.Handler.Warp.Buffer
import Network.Wai.Handler.Warp.Counter
import qualified Network.Wai.Handler.Warp.Date as D
import qualified Network.Wai.Handler.Warp.FdCache as F
import qualified Network.Wai.Handler.Warp.FileInfoCache as I
import Network.Wai.Handler.Warp.HTTP2 (http2, isHTTP2)
import Network.Wai.Handler.Warp.Header
import Network.Wai.Handler.Warp.ReadInt
import Network.Wai.Handler.Warp.Recv
import Network.Wai.Handler.Warp.Request
import Network.Wai.Handler.Warp.Response
import Network.Wai.Handler.Warp.SendFile
import Network.Wai.Handler.Warp.Settings
import qualified Network.Wai.Handler.Warp.Timeout as T
import Network.Wai.Handler.Warp.Types
import Network.Wai.Internal (ResponseReceived (ResponseReceived))
import System.Environment (getEnvironment)
import System.IO.Error (isFullErrorType, ioeGetErrorType)
#if WINDOWS
import Network.Wai.Handler.Warp.Windows
#else
import Network.Socket (fdSocket)
#endif
-- | Creating 'Connection' for plain HTTP based on a given socket.
socketConnection :: Socket -> IO Connection
socketConnection s = do
bufferPool <- newBufferPool
writeBuf <- allocateBuffer bufferSize
let sendall = Sock.sendAll s
return Connection {
connSendMany = Sock.sendMany s
, connSendAll = sendall
, connSendFile = sendFile s writeBuf bufferSize sendall
, connClose = sClose s >> freeBuffer writeBuf
, connRecv = receive s bufferPool
, connRecvBuf = receiveBuf s
, connWriteBuffer = writeBuf
, connBufferSize = bufferSize
}
#if __GLASGOW_HASKELL__ < 702
allowInterrupt :: IO ()
allowInterrupt = unblock $ return ()
#endif
-- | Run an 'Application' on the given port.
-- This calls 'runSettings' with 'defaultSettings'.
run :: Port -> Application -> IO ()
run p = runSettings defaultSettings { settingsPort = p }
-- | Run an 'Application' on the port present in the @PORT@
-- environment variable. Uses the 'Port' given when the variable is unset.
-- This calls 'runSettings' with 'defaultSettings'.
--
-- Since 3.0.9
runEnv :: Port -> Application -> IO ()
runEnv p app = do
mp <- lookup "PORT" <$> getEnvironment
maybe (run p app) runReadPort mp
where
runReadPort :: String -> IO ()
runReadPort sp = case reads sp of
((p', _):_) -> run p' app
_ -> fail $ "Invalid value in $PORT: " ++ sp
-- | Run an 'Application' with the given 'Settings'.
-- This opens a listen socket on the port defined in 'Settings' and
-- calls 'runSettingsSocket'.
runSettings :: Settings -> Application -> IO ()
runSettings set app = withSocketsDo $
bracket
(bindPortTCP (settingsPort set) (settingsHost set))
sClose
(\socket -> do
setSocketCloseOnExec socket
runSettingsSocket set socket app)
-- | This installs a shutdown handler for the given socket and
-- calls 'runSettingsConnection' with the default connection setup action
-- which handles plain (non-cipher) HTTP.
-- When the listen socket in the second argument is closed, all live
-- connections are gracefully shut down.
--
-- The supplied socket can be a Unix named socket, which
-- can be used when reverse HTTP proxying into your application.
--
-- Note that the 'settingsPort' will still be passed to 'Application's via the
-- 'serverPort' record.
runSettingsSocket :: Settings -> Socket -> Application -> IO ()
runSettingsSocket set socket app = do
settingsInstallShutdownHandler set closeListenSocket
runSettingsConnection set getConn app
where
getConn = do
#if WINDOWS
(s, sa) <- windowsThreadBlockHack $ accept socket
#else
(s, sa) <- accept socket
#endif
setSocketCloseOnExec s
-- NoDelay causes an error for AF_UNIX.
setSocketOption s NoDelay 1 `E.catch` \(E.SomeException _) -> return ()
conn <- socketConnection s
return (conn, sa)
closeListenSocket = sClose socket
-- | The connection setup action would be expensive. A good example
-- is initialization of TLS.
-- So, this converts the connection setup action to the connection maker
-- which will be executed after forking a new worker thread.
-- Then this calls 'runSettingsConnectionMaker' with the connection maker.
-- This allows the expensive computations to be performed
-- in a separate worker thread instead of the main server loop.
--
-- Since 1.3.5
runSettingsConnection :: Settings -> IO (Connection, SockAddr) -> Application -> IO ()
runSettingsConnection set getConn app = runSettingsConnectionMaker set getConnMaker app
where
getConnMaker = do
(conn, sa) <- getConn
return (return conn, sa)
-- | This modifies the connection maker so that it returns 'TCP' for 'Transport'
-- (i.e. plain HTTP) then calls 'runSettingsConnectionMakerSecure'.
runSettingsConnectionMaker :: Settings -> IO (IO Connection, SockAddr) -> Application -> IO ()
runSettingsConnectionMaker x y =
runSettingsConnectionMakerSecure x (toTCP <$> y)
where
toTCP = first ((, TCP) <$>)
----------------------------------------------------------------
-- | The core run function which takes 'Settings',
-- a connection maker and 'Application'.
-- The connection maker can return a connection of either plain HTTP
-- or HTTP over TLS.
--
-- Since 2.1.4
runSettingsConnectionMakerSecure :: Settings -> IO (IO (Connection, Transport), SockAddr) -> Application -> IO ()
runSettingsConnectionMakerSecure set getConnMaker app = do
settingsBeforeMainLoop set
counter <- newCounter
withII0 $ acceptConnection set getConnMaker app counter
where
withII0 action =
withTimeoutManager $ \tm ->
D.withDateCache $ \dc ->
F.withFdCache fdCacheDurationInSeconds $ \fdc ->
I.withFileInfoCache fdFileInfoDurationInSeconds $ \fic -> do
let ii0 = InternalInfo0 tm dc fdc fic
action ii0
!fdCacheDurationInSeconds = settingsFdCacheDuration set * 1000000
!fdFileInfoDurationInSeconds = settingsFileInfoCacheDuration set * 1000000
!timeoutInSeconds = settingsTimeout set * 1000000
withTimeoutManager f = case settingsManager set of
Just tm -> f tm
Nothing -> bracket
(T.initialize timeoutInSeconds)
T.stopManager
f
-- Note that there is a thorough discussion of the exception safety of the
-- following code at: https://github.com/yesodweb/wai/issues/146
--
-- We need to make sure of two things:
--
-- 1. Asynchronous exceptions are not blocked entirely in the main loop.
-- Doing so would make it impossible to kill the Warp thread.
--
-- 2. Once a connection maker is received via acceptNewConnection, the
-- connection is guaranteed to be closed, even in the presence of
-- async exceptions.
--
-- Our approach is explained in the comments below.
acceptConnection :: Settings
-> IO (IO (Connection, Transport), SockAddr)
-> Application
-> Counter
-> InternalInfo0
-> IO ()
acceptConnection set getConnMaker app counter ii0 = do
-- First mask all exceptions in acceptLoop. This is necessary to
-- ensure that no async exception is throw between the call to
-- acceptNewConnection and the registering of connClose.
void $ mask_ acceptLoop
gracefulShutdown counter
where
acceptLoop = do
-- Allow async exceptions before receiving the next connection maker.
allowInterrupt
-- acceptNewConnection will try to receive the next incoming
-- request. It returns a /connection maker/, not a connection,
-- since in some circumstances creating a working connection
-- from a raw socket may be an expensive operation, and this
-- expensive work should not be performed in the main event
-- loop. An example of something expensive would be TLS
-- negotiation.
mx <- acceptNewConnection
case mx of
Nothing -> return ()
Just (mkConn, addr) -> do
fork set mkConn addr app counter ii0
acceptLoop
acceptNewConnection = do
ex <- try getConnMaker
case ex of
Right x -> return $ Just x
Left e -> do
settingsOnException set Nothing $ toException e
if isFullErrorType (ioeGetErrorType e) then do
-- "resource exhausted (Too many open files)" may
-- happen by accept(). Wait a second hoping that
-- resource will be available.
threadDelay 1000000
acceptNewConnection
else
-- Assuming the listen socket is closed.
return Nothing
-- Fork a new worker thread for this connection maker, and ask for a
-- function to unmask (i.e., allow async exceptions to be thrown).
fork :: Settings
-> IO (Connection, Transport)
-> SockAddr
-> Application
-> Counter
-> InternalInfo0
-> IO ()
fork set mkConn addr app counter ii0 = settingsFork set $ \ unmask ->
-- Run the connection maker to get a new connection, and ensure
-- that the connection is closed. If the mkConn call throws an
-- exception, we will leak the connection. If the mkConn call is
-- vulnerable to attacks (e.g., Slowloris), we do nothing to
-- protect the server. It is therefore vital that mkConn is well
-- vetted.
--
-- We grab the connection before registering timeouts since the
-- timeouts will be useless during connection creation, due to the
-- fact that async exceptions are still masked.
bracket mkConn closeConn $ \(conn, transport) ->
-- We need to register a timeout handler for this thread, and
-- cancel that handler as soon as we exit.
bracket (T.registerKillThread (timeoutManager0 ii0)) T.cancel $ \th ->
let ii1 = toInternalInfo1 ii0 th
-- We now have fully registered a connection close handler
-- in the case of all exceptions, so it is safe to one
-- again allow async exceptions.
in unmask .
-- Call the user-supplied on exception code if any
-- exceptions are thrown.
handle (settingsOnException set Nothing) .
-- Call the user-supplied code for connection open and close events
bracket (onOpen addr) (onClose addr) $ \goingon ->
-- Actually serve this connection.
-- bracket with closeConn above ensures the connection is closed.
when goingon $ serveConnection conn ii1 addr transport set app
where
closeConn (conn, _transport) = connClose conn
onOpen adr = increase counter >> settingsOnOpen set adr
onClose adr _ = decrease counter >> settingsOnClose set adr
serveConnection :: Connection
-> InternalInfo1
-> SockAddr
-> Transport
-> Settings
-> Application
-> IO ()
serveConnection conn ii1 origAddr transport settings app = do
-- fixme: Upgrading to HTTP/2 should be supported.
(h2,bs) <- if isHTTP2 transport then
return (True, "")
else do
bs0 <- connRecv conn
if S.length bs0 >= 4 && "PRI " `S.isPrefixOf` bs0 then
return (True, bs0)
else
return (False, bs0)
if settingsHTTP2Enabled settings && h2 then do
recvN <- makeReceiveN bs (connRecv conn) (connRecvBuf conn)
-- fixme: origAddr
http2 conn ii1 origAddr transport settings recvN app
else do
istatus <- newIORef False
src <- mkSource (wrappedRecv conn th istatus (settingsSlowlorisSize settings))
writeIORef istatus True
leftoverSource src bs
addr <- getProxyProtocolAddr src
http1 addr istatus src `E.catch` \e -> do
sendErrorResponse addr istatus e
throwIO (e :: SomeException)
where
getProxyProtocolAddr src =
case settingsProxyProtocol settings of
ProxyProtocolNone ->
return origAddr
ProxyProtocolRequired -> do
seg <- readSource src
parseProxyProtocolHeader src seg
ProxyProtocolOptional -> do
seg <- readSource src
if S.isPrefixOf "PROXY " seg
then parseProxyProtocolHeader src seg
else do leftoverSource src seg
return origAddr
parseProxyProtocolHeader src seg = do
let (header,seg') = S.break (== 0x0d) seg -- 0x0d == CR
maybeAddr = case S.split 0x20 header of -- 0x20 == space
["PROXY","TCP4",clientAddr,_,clientPort,_] ->
case [x | (x, t) <- reads (decodeAscii clientAddr), null t] of
[a] -> Just (SockAddrInet (readInt clientPort)
(toHostAddress a))
_ -> Nothing
["PROXY","TCP6",clientAddr,_,clientPort,_] ->
case [x | (x, t) <- reads (decodeAscii clientAddr), null t] of
[a] -> Just (SockAddrInet6 (readInt clientPort)
0
(toHostAddress6 a)
0)
_ -> Nothing
("PROXY":"UNKNOWN":_) ->
Just origAddr
_ ->
Nothing
case maybeAddr of
Nothing -> throwIO (BadProxyHeader (decodeAscii header))
Just a -> do leftoverSource src (S.drop 2 seg') -- drop CRLF
return a
decodeAscii = map (chr . fromEnum) . S.unpack
th = threadHandle1 ii1
shouldSendErrorResponse se
| Just ConnectionClosedByPeer <- fromException se = False
| otherwise = True
sendErrorResponse addr istatus e = do
status <- readIORef istatus
when (shouldSendErrorResponse e && status) $ do
let ii = toInternalInfo ii1 0 -- dummy
dreq = dummyreq addr
void $ sendResponse settings conn ii dreq defaultIndexRequestHeader (return S.empty) (errorResponse e)
dummyreq addr = defaultRequest { remoteHost = addr }
errorResponse e = settingsOnExceptionResponse settings e
http1 addr istatus src = do
(req', mremainingRef, idxhdr, nextBodyFlush, ii) <- recvRequest settings conn ii1 addr src
let req = req' { isSecure = isTransportSecure transport }
keepAlive <- processRequest istatus src req mremainingRef idxhdr nextBodyFlush ii
`E.catch` \e -> do
-- Call the user-supplied exception handlers, passing the request.
sendErrorResponse addr istatus e
settingsOnException settings (Just req) e
-- Don't throw the error again to prevent calling settingsOnException twice.
return False
when keepAlive $ http1 addr istatus src
processRequest istatus src req mremainingRef idxhdr nextBodyFlush ii = do
-- Let the application run for as long as it wants
T.pause th
-- In the event that some scarce resource was acquired during
-- creating the request, we need to make sure that we don't get
-- an async exception before calling the ResponseSource.
keepAliveRef <- newIORef $ error "keepAliveRef not filled"
_ <- app req $ \res -> do
T.resume th
-- FIXME consider forcing evaluation of the res here to
-- send more meaningful error messages to the user.
-- However, it may affect performance.
writeIORef istatus False
keepAlive <- sendResponse settings conn ii req idxhdr (readSource src) res
writeIORef keepAliveRef keepAlive
return ResponseReceived
keepAlive <- readIORef keepAliveRef
-- We just send a Response and it takes a time to
-- receive a Request again. If we immediately call recv,
-- it is likely to fail and the IO manager works.
-- It is very costly. So, we yield to another Haskell
-- thread hoping that the next Request will arrive
-- when this Haskell thread will be re-scheduled.
-- This improves performance at least when
-- the number of cores is small.
Conc.yield
if not keepAlive then
return False
else
-- If there is an unknown or large amount of data to still be read
-- from the request body, simple drop this connection instead of
-- reading it all in to satisfy a keep-alive request.
case settingsMaximumBodyFlush settings of
Nothing -> do
flushEntireBody nextBodyFlush
T.resume th
return True
Just maxToRead -> do
let tryKeepAlive = do
-- flush the rest of the request body
isComplete <- flushBody nextBodyFlush maxToRead
if isComplete then do
T.resume th
return True
else
return False
case mremainingRef of
Just ref -> do
remaining <- readIORef ref
if remaining <= maxToRead then
tryKeepAlive
else
return False
Nothing -> tryKeepAlive
flushEntireBody :: IO ByteString -> IO ()
flushEntireBody src =
loop
where
loop = do
bs <- src
unless (S.null bs) loop
flushBody :: IO ByteString -- ^ get next chunk
-> Int -- ^ maximum to flush
-> IO Bool -- ^ True == flushed the entire body, False == we didn't
flushBody src =
loop
where
loop toRead = do
bs <- src
let toRead' = toRead - S.length bs
case () of
()
| S.null bs -> return True
| toRead' >= 0 -> loop toRead'
| otherwise -> return False
wrappedRecv :: Connection -> T.Handle -> IORef Bool -> Int -> IO ByteString
wrappedRecv Connection { connRecv = recv } th istatus slowlorisSize = do
bs <- recv
unless (S.null bs) $ do
writeIORef istatus True
when (S.length bs >= slowlorisSize) $ T.tickle th
return bs
-- Copied from: https://github.com/mzero/plush/blob/master/src/Plush/Server/Warp.hs
setSocketCloseOnExec :: Socket -> IO ()
#if WINDOWS
setSocketCloseOnExec _ = return ()
#else
setSocketCloseOnExec socket = F.setFileCloseOnExec $ fromIntegral $ fdSocket socket
#endif
gracefulShutdown :: Counter -> IO ()
gracefulShutdown counter = waitForZero counter
| erikd/wai | warp/Network/Wai/Handler/Warp/Run.hs | mit | 20,199 | 0 | 24 | 6,013 | 3,760 | 1,959 | 1,801 | 311 | 18 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- Module : Network.AWS.Lambda.RemovePermission
-- Copyright : (c) 2013-2014 Brendan Hay <brendan.g.hay@gmail.com>
-- License : This Source Code Form is subject to the terms of
-- the Mozilla Public License, v. 2.0.
-- A copy of the MPL can be found in the LICENSE file or
-- you can obtain it at http://mozilla.org/MPL/2.0/.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : experimental
-- Portability : non-portable (GHC extensions)
--
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- | You can remove individual permissions from an access policy associated with a
-- Lambda function by providing a Statement ID.
--
-- Note that removal of a permission will cause an active event source to lose
-- permission to the function.
--
-- You need permission for the 'lambda:RemovePermission' action.
--
-- <http://docs.aws.amazon.com/lambda/latest/dg/API_RemovePermission.html>
module Network.AWS.Lambda.RemovePermission
(
-- * Request
RemovePermission
-- ** Request constructor
, removePermission
-- ** Request lenses
, rpFunctionName
, rpStatementId
-- * Response
, RemovePermissionResponse
-- ** Response constructor
, removePermissionResponse
) where
import Network.AWS.Data (Object)
import Network.AWS.Prelude
import Network.AWS.Request.RestJSON
import Network.AWS.Lambda.Types
import qualified GHC.Exts
data RemovePermission = RemovePermission
{ _rpFunctionName :: Text
, _rpStatementId :: Text
} deriving (Eq, Ord, Read, Show)
-- | 'RemovePermission' constructor.
--
-- The fields accessible through corresponding lenses are:
--
-- * 'rpFunctionName' @::@ 'Text'
--
-- * 'rpStatementId' @::@ 'Text'
--
removePermission :: Text -- ^ 'rpFunctionName'
-> Text -- ^ 'rpStatementId'
-> RemovePermission
removePermission p1 p2 = RemovePermission
{ _rpFunctionName = p1
, _rpStatementId = p2
}
-- | Lambda function whose access policy you want to remove a permission from.
--
-- You can specify an unqualified function name (for example, "Thumbnail") or
-- you can specify Amazon Resource Name (ARN) of the function (for example,
-- "arn:aws:lambda:us-west-2:account-id:function:ThumbNail"). AWS Lambda also
-- allows you to specify only the account ID qualifier (for example,
-- "account-id:Thumbnail"). Note that the length constraint applies only to the
-- ARN. If you specify only the function name, it is limited to 64 character in
-- length.
rpFunctionName :: Lens' RemovePermission Text
rpFunctionName = lens _rpFunctionName (\s a -> s { _rpFunctionName = a })
-- | Statement ID of the permission to remove.
rpStatementId :: Lens' RemovePermission Text
rpStatementId = lens _rpStatementId (\s a -> s { _rpStatementId = a })
data RemovePermissionResponse = RemovePermissionResponse
deriving (Eq, Ord, Read, Show, Generic)
-- | 'RemovePermissionResponse' constructor.
removePermissionResponse :: RemovePermissionResponse
removePermissionResponse = RemovePermissionResponse
instance ToPath RemovePermission where
toPath RemovePermission{..} = mconcat
[ "/2015-03-31/functions/"
, toText _rpFunctionName
, "/versions/HEAD/policy/"
, toText _rpStatementId
]
instance ToQuery RemovePermission where
toQuery = const mempty
instance ToHeaders RemovePermission
instance ToJSON RemovePermission where
toJSON = const (toJSON Empty)
instance AWSRequest RemovePermission where
type Sv RemovePermission = Lambda
type Rs RemovePermission = RemovePermissionResponse
request = delete
response = nullResponse RemovePermissionResponse
| romanb/amazonka | amazonka-lambda/gen/Network/AWS/Lambda/RemovePermission.hs | mpl-2.0 | 4,172 | 0 | 9 | 870 | 441 | 273 | 168 | 57 | 1 |
-- Copyright (c) 1998-1999 Chris Okasaki.
-- See COPYRIGHT file for terms and conditions.
module RandList
{-# DEPRECATED "This module is unmaintained, and will disappear soon" #-}
(
-- type
Seq, -- instance of Sequence, Functor, Monad, MonadPlus
-- sequence operations
empty,single,cons,snoc,append,lview,lhead,ltail,rview,rhead,rtail,
null,size,concat,reverse,reverseOnto,fromList,toList,
map,concatMap,foldr,foldl,foldr1,foldl1,reducer,reducel,reduce1,
copy,tabulate,inBounds,lookup,lookupM,lookupWithDefault,update,adjust,
mapWithIndex,foldrWithIndex,foldlWithIndex,
take,drop,splitAt,subseq,filter,partition,takeWhile,dropWhile,splitWhile,
zip,zip3,zipWith,zipWith3,unzip,unzip3,unzipWith,unzipWith3,
-- documentation
moduleName,
-- re-export view type from EdisonPrelude for convenience
Maybe2(Just2,Nothing2)
) where
import Prelude hiding (concat,reverse,map,concatMap,foldr,foldl,foldr1,foldl1,
filter,takeWhile,dropWhile,lookup,take,drop,splitAt,
zip,zip3,zipWith,zipWith3,unzip,unzip3,null)
import EdisonPrelude(Maybe2(Just2,Nothing2))
import qualified Sequence as S( Sequence(..) )
import SequenceDefaults
import Monad
import QuickCheck
-- signatures for exported functions
moduleName :: String
empty :: Seq a
single :: a -> Seq a
cons :: a -> Seq a -> Seq a
snoc :: Seq a -> a -> Seq a
append :: Seq a -> Seq a -> Seq a
lview :: Seq a -> Maybe2 a (Seq a)
lhead :: Seq a -> a
ltail :: Seq a -> Seq a
rview :: Seq a -> Maybe2 (Seq a) a
rhead :: Seq a -> a
rtail :: Seq a -> Seq a
null :: Seq a -> Bool
size :: Seq a -> Int
concat :: Seq (Seq a) -> Seq a
reverse :: Seq a -> Seq a
reverseOnto :: Seq a -> Seq a -> Seq a
fromList :: [a] -> Seq a
toList :: Seq a -> [a]
map :: (a -> b) -> Seq a -> Seq b
concatMap :: (a -> Seq b) -> Seq a -> Seq b
foldr :: (a -> b -> b) -> b -> Seq a -> b
foldl :: (b -> a -> b) -> b -> Seq a -> b
foldr1 :: (a -> a -> a) -> Seq a -> a
foldl1 :: (a -> a -> a) -> Seq a -> a
reducer :: (a -> a -> a) -> a -> Seq a -> a
reducel :: (a -> a -> a) -> a -> Seq a -> a
reduce1 :: (a -> a -> a) -> Seq a -> a
copy :: Int -> a -> Seq a
tabulate :: Int -> (Int -> a) -> Seq a
inBounds :: Seq a -> Int -> Bool
lookup :: Seq a -> Int -> a
lookupM :: Seq a -> Int -> Maybe a
lookupWithDefault :: a -> Seq a -> Int -> a
update :: Int -> a -> Seq a -> Seq a
adjust :: (a -> a) -> Int -> Seq a -> Seq a
mapWithIndex :: (Int -> a -> b) -> Seq a -> Seq b
foldrWithIndex :: (Int -> a -> b -> b) -> b -> Seq a -> b
foldlWithIndex :: (b -> Int -> a -> b) -> b -> Seq a -> b
take :: Int -> Seq a -> Seq a
drop :: Int -> Seq a -> Seq a
splitAt :: Int -> Seq a -> (Seq a, Seq a)
subseq :: Int -> Int -> Seq a -> Seq a
filter :: (a -> Bool) -> Seq a -> Seq a
partition :: (a -> Bool) -> Seq a -> (Seq a, Seq a)
takeWhile :: (a -> Bool) -> Seq a -> Seq a
dropWhile :: (a -> Bool) -> Seq a -> Seq a
splitWhile :: (a -> Bool) -> Seq a -> (Seq a, Seq a)
zip :: Seq a -> Seq b -> Seq (a,b)
zip3 :: Seq a -> Seq b -> Seq c -> Seq (a,b,c)
zipWith :: (a -> b -> c) -> Seq a -> Seq b -> Seq c
zipWith3 :: (a -> b -> c -> d) -> Seq a -> Seq b -> Seq c -> Seq d
unzip :: Seq (a,b) -> (Seq a, Seq b)
unzip3 :: Seq (a,b,c) -> (Seq a, Seq b, Seq c)
unzipWith :: (a -> b) -> (a -> c) -> Seq a -> (Seq b, Seq c)
unzipWith3 :: (a -> b) -> (a -> c) -> (a -> d) -> Seq a -> (Seq b, Seq c, Seq d)
moduleName = "RandList"
-- Adapted from
-- Chris Okasaki. Purely Functional Data Structures. 1998.
-- Section 9.3.1.
-- and
-- Chris Okasaki. "Purely Functional Random Access Lists". FPCA'95,
-- pages 86-95.
data Tree a = L a | T a (Tree a) (Tree a) deriving (Eq)
data Seq a = E | C !Int (Tree a) (Seq a) --deriving (Eq)
-- want to derive Eq but can't because of GHC bug
half :: Int -> Int
half n = n `quot` 2 -- use a shift?
empty = E
single x = C 1 (L x) E
cons x xs@(C i s (C j t xs'))
| i == j = C (1 + i + j) (T x s t) xs'
cons x xs = C 1 (L x) xs
copy n x = if n <= 0 then E else buildTrees (1::Int) (L x)
where buildTrees j t
| j > n = takeTrees n (half j) (child t) E
| otherwise = buildTrees (1 + j + j) (T x t t)
takeTrees i j t xs
| i >= j = takeTrees (i - j) j t (C j t xs)
| i > 0 = takeTrees i (half j) (child t) xs
| otherwise = xs
child (T x s t) = t
lview E = Nothing2
lview (C _ (L x) xs) = Just2 x xs
lview (C i (T x s t) xs) = Just2 x (C j s (C j t xs))
where j = half i
lhead E = error "RandList.lhead: empty sequence"
lhead (C _ (L x) xs) = x
lhead (C _ (T x s t) xs) = x
ltail E = E
ltail (C _ (L x) xs) = xs
ltail (C i (T x s t) xs) = C j s (C j t xs)
where j = half i
rhead E = error "RandList.rhead: empty sequence"
rhead (C _ t E) = treeLast t
where treeLast (L x) = x
treeLast (T x s t) = treeLast t
rhead (C _ t xs) = rhead xs
null E = True
null _ = False
size xs = sz xs
where sz E = (0::Int)
sz (C j t xs) = j + sz xs
reverseOnto E ys = ys
reverseOnto (C _ t xs) ys = reverseOnto xs (revTree t ys)
where revTree (L x) ys = cons x ys
revTree (T x s t) ys = revTree t (revTree s (cons x ys))
map f E = E
map f (C j t xs) = C j (mapTree f t) (map f xs)
where mapTree f (L x) = L (f x)
mapTree f (T x s t) = T (f x) (mapTree f s) (mapTree f t)
foldr f e E = e
foldr f e (C _ t xs) = foldTree t (foldr f e xs)
where foldTree (L x) e = f x e
foldTree (T x s t) e = f x (foldTree s (foldTree t e))
foldl f e E = e
foldl f e (C _ t xs) = foldl f (foldTree e t) xs
where foldTree e (L x) = f e x
foldTree e (T x s t) = foldTree (foldTree (f e x) s) t
reduce1 f xs = case lview xs of
Nothing2 -> error "RandList.reduce1: empty seq"
Just2 x xs -> red1 x xs
where red1 x E = x
red1 x (C j t xs) = red1 (redTree x t) xs
redTree x (L y) = f x y
redTree x (T y s t) = redTree (redTree (f x y) s) t
inBounds xs i = inb xs i
where inb E i = False
inb (C j t xs) i
| i < j = (i >= 0)
| otherwise = inb xs (i - j)
lookup xs i = look xs i
where look E i = error "RandList.lookup: bad subscript"
look (C j t xs) i
| i < j = lookTree j t i
| otherwise = look xs (i - j)
lookTree _ (L x) i
| i == 0 = x
| otherwise = error "RandList.lookup: bad subscript"
lookTree j (T x s t) i
| i > k = lookTree k t (i - 1 - k)
| i /= 0 = lookTree k s (i - 1)
| otherwise = x
where k = half j
lookupM xs i = look xs i
where look E i = Nothing
look (C j t xs) i
| i < j = lookTree j t i
| otherwise = look xs (i - j)
lookTree _ (L x) i
| i == 0 = Just x
| otherwise = Nothing
lookTree j (T x s t) i
| i > k = lookTree k t (i - 1 - k)
| i /= 0 = lookTree k s (i - 1)
| otherwise = Just x
where k = half j
lookupWithDefault d xs i = look xs i
where look E i = d
look (C j t xs) i
| i < j = lookTree j t i
| otherwise = look xs (i - j)
lookTree _ (L x) i
| i == 0 = x
| otherwise = d
lookTree j (T x s t) i
| i > k = lookTree k t (i - 1 - k)
| i /= 0 = lookTree k s (i - 1)
| otherwise = x
where k = half j
update i y xs = upd i xs
where upd i E = E
upd i (C j t xs)
| i < j = C j (updTree i j t) xs
| otherwise = C j t (upd (i - j) xs)
updTree i j t@(L x)
| i == 0 = L y
| otherwise = t
updTree i j (T x s t)
| i > k = T x s (updTree (i - 1 - k) k t)
| i /= 0 = T x (updTree (i - 1) k s) t
| otherwise = T y s t
where k = half j
adjust f i xs = adj i xs
where adj i E = E
adj i (C j t xs)
| i < j = C j (adjTree i j t) xs
| otherwise = C j t (adj (i - j) xs)
adjTree i j t@(L x)
| i == 0 = L (f x)
| otherwise = t
adjTree i j (T x s t)
| i > k = T x s (adjTree (i - 1 - k) k t)
| i /= 0 = T x (adjTree (i - 1) k s) t
| otherwise = T (f x) s t
where k = half j
drop n xs = if n < 0 then xs else drp n xs
where drp i E = E
drp i (C j t xs)
| i < j = drpTree i j t xs
| otherwise = drp (i - j) xs
drpTree 0 j t xs = C j t xs
drpTree i j (L x) xs = error "RandList.drop: bug. Impossible case!"
drpTree i j (T x s t) xs
| i > k = drpTree (i - 1 - k) k t xs
| otherwise = drpTree (i - 1) k s (C k t xs)
where k = half j
-- the remaining functions all use defaults
snoc = snocUsingFoldr
append = appendUsingFoldr
rview = rviewDefault
rtail = rtailUsingLview
concat = concatUsingFoldr
reverse = reverseUsingReverseOnto
fromList = fromListUsingCons
toList = toListUsingFoldr
concatMap = concatMapUsingFoldr
foldr1 = foldr1UsingLview
foldl1 = foldl1UsingFoldl
reducer = reducerUsingReduce1
reducel = reducelUsingReduce1
tabulate = tabulateUsingLists
mapWithIndex = mapWithIndexUsingLists
foldrWithIndex = foldrWithIndexUsingLists
foldlWithIndex = foldlWithIndexUsingLists
take = takeUsingLists
splitAt = splitAtDefault
filter = filterUsingFoldr
partition = partitionUsingFoldr
subseq = subseqDefault
takeWhile = takeWhileUsingLview
dropWhile = dropWhileUsingLview
splitWhile = splitWhileUsingLview
-- for zips, could optimize by calculating which one is shorter and
-- retaining its shape
zip = zipUsingLists
zip3 = zip3UsingLists
zipWith = zipWithUsingLists
zipWith3 = zipWith3UsingLists
unzip = unzipUsingLists
unzip3 = unzip3UsingLists
unzipWith = unzipWithUsingLists
unzipWith3 = unzipWith3UsingLists
-- instances
instance S.Sequence Seq where
{empty = empty; single = single; cons = cons; snoc = snoc;
append = append; lview = lview; lhead = lhead; ltail = ltail;
rview = rview; rhead = rhead; rtail = rtail; null = null;
size = size; concat = concat; reverse = reverse;
reverseOnto = reverseOnto; fromList = fromList; toList = toList;
map = map; concatMap = concatMap; foldr = foldr; foldl = foldl;
foldr1 = foldr1; foldl1 = foldl1; reducer = reducer;
reducel = reducel; reduce1 = reduce1; copy = copy;
tabulate = tabulate; inBounds = inBounds; lookup = lookup;
lookupM = lookupM; lookupWithDefault = lookupWithDefault;
update = update; adjust = adjust; mapWithIndex = mapWithIndex;
foldrWithIndex = foldrWithIndex; foldlWithIndex = foldlWithIndex;
take = take; drop = drop; splitAt = splitAt; subseq = subseq;
filter = filter; partition = partition; takeWhile = takeWhile;
dropWhile = dropWhile; splitWhile = splitWhile; zip = zip;
zip3 = zip3; zipWith = zipWith; zipWith3 = zipWith3; unzip = unzip;
unzip3 = unzip3; unzipWith = unzipWith; unzipWith3 = unzipWith3;
instanceName s = moduleName}
instance Functor Seq where
fmap = map
instance Monad Seq where
return = single
xs >>= k = concatMap k xs
instance MonadPlus Seq where
mplus = append
mzero = empty
-- want to derive the following instance but can't because of GHC bug
instance Eq a => Eq (Seq a) where
C i tx xs == C j ty ys = (i == j) && (tx == ty) && (xs == ys)
E == E = True
_ == _ = False
instance Show a => Show (Seq a) where
show xs = show (toList xs)
instance Arbitrary a => Arbitrary (Seq a) where
arbitrary = do xs <- arbitrary
return (fromList xs)
coarbitrary xs = coarbitrary (toList xs)
| FranklinChen/hugs98-plus-Sep2006 | fptools/hslibs/data/edison/Seq/RandList.hs | bsd-3-clause | 12,098 | 2 | 13 | 4,006 | 5,495 | 2,831 | 2,664 | 295 | 5 |
{-#LANGUAGE NoImplicitPrelude #-}
{-#LANGUAGE LambdaCase #-}
{-#LANGUAGE ScopedTypeVariables #-}
{-#LANGUAGE OverloadedStrings #-}
module Web.Sprinkles.Cache.Memcached
where
import Web.Sprinkles.Prelude
import Web.Sprinkles.Cache
import Data.Time.Clock.POSIX
import qualified Data.HashMap.Strict as HashMap
import Data.Default
import qualified Database.Memcache.Client as Memcache
memcachedCache :: IO (Cache ByteString ByteString)
memcachedCache = do
let options :: Memcache.Options = def
withConnection =
bracket
(Memcache.newClient [] options)
(Memcache.quit)
expiry = 60
return
Cache
{ cacheGet = \key -> do
withConnection $ \client -> Memcache.gat client key expiry >>= \case
Just (val, _, _) -> return (Just val)
Nothing -> return Nothing
, cachePut = \key val -> do
withConnection $ \client ->
Memcache.set client key val 0 expiry
return ()
, cacheDelete = \key -> do
withConnection $ \client ->
Memcache.delete client key 0
return ()
, cacheVacuum = return 0
}
| tdammers/templar | src/Web/Sprinkles/Cache/Memcached.hs | bsd-3-clause | 1,249 | 0 | 21 | 409 | 300 | 164 | 136 | 34 | 2 |
{-
(c) The AQUA Project, Glasgow University, 1993-1998
\section[Simplify]{The main module of the simplifier}
-}
{-# LANGUAGE CPP #-}
module Simplify ( simplTopBinds, simplExpr, simplRules ) where
#include "HsVersions.h"
import DynFlags
import SimplMonad
import Type hiding ( substTy, substTyVar, extendTvSubst, extendCvSubst )
import SimplEnv
import SimplUtils
import FamInstEnv ( FamInstEnv )
import Literal ( litIsLifted ) --, mkMachInt ) -- temporalily commented out. See #8326
import Id
import MkId ( seqId, voidPrimId )
import MkCore ( mkImpossibleExpr, castBottomExpr )
import IdInfo
import Name ( Name, mkSystemVarName, isExternalName, getOccFS )
import Coercion hiding ( substCo, substCoVar )
import OptCoercion ( optCoercion )
import FamInstEnv ( topNormaliseType_maybe )
import DataCon ( DataCon, dataConWorkId, dataConRepStrictness
, isMarkedStrict, dataConRepArgTys ) --, dataConTyCon, dataConTag, fIRST_TAG )
--import TyCon ( isEnumerationTyCon ) -- temporalily commented out. See #8326
import CoreMonad ( Tick(..), SimplifierMode(..) )
import CoreSyn
import Demand ( StrictSig(..), dmdTypeDepth, isStrictDmd )
import PprCore ( pprCoreExpr )
import CoreUnfold
import CoreUtils
import CoreArity
--import PrimOp ( tagToEnumKey ) -- temporalily commented out. See #8326
import Rules ( mkRuleInfo, lookupRule, getRules )
import TysPrim ( voidPrimTy ) --, intPrimTy ) -- temporalily commented out. See #8326
import BasicTypes ( TopLevelFlag(..), isTopLevel, RecFlag(..) )
import MonadUtils ( foldlM, mapAccumLM, liftIO )
import Maybes ( orElse )
--import Unique ( hasKey ) -- temporalily commented out. See #8326
import Control.Monad
import Outputable
import FastString
import Pair
import Util
import ErrUtils
{-
The guts of the simplifier is in this module, but the driver loop for
the simplifier is in SimplCore.hs.
-----------------------------------------
*** IMPORTANT NOTE ***
-----------------------------------------
The simplifier used to guarantee that the output had no shadowing, but
it does not do so any more. (Actually, it never did!) The reason is
documented with simplifyArgs.
-----------------------------------------
*** IMPORTANT NOTE ***
-----------------------------------------
Many parts of the simplifier return a bunch of "floats" as well as an
expression. This is wrapped as a datatype SimplUtils.FloatsWith.
All "floats" are let-binds, not case-binds, but some non-rec lets may
be unlifted (with RHS ok-for-speculation).
-----------------------------------------
ORGANISATION OF FUNCTIONS
-----------------------------------------
simplTopBinds
- simplify all top-level binders
- for NonRec, call simplRecOrTopPair
- for Rec, call simplRecBind
------------------------------
simplExpr (applied lambda) ==> simplNonRecBind
simplExpr (Let (NonRec ...) ..) ==> simplNonRecBind
simplExpr (Let (Rec ...) ..) ==> simplify binders; simplRecBind
------------------------------
simplRecBind [binders already simplfied]
- use simplRecOrTopPair on each pair in turn
simplRecOrTopPair [binder already simplified]
Used for: recursive bindings (top level and nested)
top-level non-recursive bindings
Returns:
- check for PreInlineUnconditionally
- simplLazyBind
simplNonRecBind
Used for: non-top-level non-recursive bindings
beta reductions (which amount to the same thing)
Because it can deal with strict arts, it takes a
"thing-inside" and returns an expression
- check for PreInlineUnconditionally
- simplify binder, including its IdInfo
- if strict binding
simplStrictArg
mkAtomicArgs
completeNonRecX
else
simplLazyBind
addFloats
simplNonRecX: [given a *simplified* RHS, but an *unsimplified* binder]
Used for: binding case-binder and constr args in a known-constructor case
- check for PreInLineUnconditionally
- simplify binder
- completeNonRecX
------------------------------
simplLazyBind: [binder already simplified, RHS not]
Used for: recursive bindings (top level and nested)
top-level non-recursive bindings
non-top-level, but *lazy* non-recursive bindings
[must not be strict or unboxed]
Returns floats + an augmented environment, not an expression
- substituteIdInfo and add result to in-scope
[so that rules are available in rec rhs]
- simplify rhs
- mkAtomicArgs
- float if exposes constructor or PAP
- completeBind
completeNonRecX: [binder and rhs both simplified]
- if the the thing needs case binding (unlifted and not ok-for-spec)
build a Case
else
completeBind
addFloats
completeBind: [given a simplified RHS]
[used for both rec and non-rec bindings, top level and not]
- try PostInlineUnconditionally
- add unfolding [this is the only place we add an unfolding]
- add arity
Right hand sides and arguments
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In many ways we want to treat
(a) the right hand side of a let(rec), and
(b) a function argument
in the same way. But not always! In particular, we would
like to leave these arguments exactly as they are, so they
will match a RULE more easily.
f (g x, h x)
g (+ x)
It's harder to make the rule match if we ANF-ise the constructor,
or eta-expand the PAP:
f (let { a = g x; b = h x } in (a,b))
g (\y. + x y)
On the other hand if we see the let-defns
p = (g x, h x)
q = + x
then we *do* want to ANF-ise and eta-expand, so that p and q
can be safely inlined.
Even floating lets out is a bit dubious. For let RHS's we float lets
out if that exposes a value, so that the value can be inlined more vigorously.
For example
r = let x = e in (x,x)
Here, if we float the let out we'll expose a nice constructor. We did experiments
that showed this to be a generally good thing. But it was a bad thing to float
lets out unconditionally, because that meant they got allocated more often.
For function arguments, there's less reason to expose a constructor (it won't
get inlined). Just possibly it might make a rule match, but I'm pretty skeptical.
So for the moment we don't float lets out of function arguments either.
Eta expansion
~~~~~~~~~~~~~~
For eta expansion, we want to catch things like
case e of (a,b) -> \x -> case a of (p,q) -> \y -> r
If the \x was on the RHS of a let, we'd eta expand to bring the two
lambdas together. And in general that's a good thing to do. Perhaps
we should eta expand wherever we find a (value) lambda? Then the eta
expansion at a let RHS can concentrate solely on the PAP case.
************************************************************************
* *
\subsection{Bindings}
* *
************************************************************************
-}
simplTopBinds :: SimplEnv -> [InBind] -> SimplM SimplEnv
simplTopBinds env0 binds0
= do { -- Put all the top-level binders into scope at the start
-- so that if a transformation rule has unexpectedly brought
-- anything into scope, then we don't get a complaint about that.
-- It's rather as if the top-level binders were imported.
-- See note [Glomming] in OccurAnal.
; env1 <- simplRecBndrs env0 (bindersOfBinds binds0)
; env2 <- simpl_binds env1 binds0
; freeTick SimplifierDone
; return env2 }
where
-- We need to track the zapped top-level binders, because
-- they should have their fragile IdInfo zapped (notably occurrence info)
-- That's why we run down binds and bndrs' simultaneously.
--
simpl_binds :: SimplEnv -> [InBind] -> SimplM SimplEnv
simpl_binds env [] = return env
simpl_binds env (bind:binds) = do { env' <- simpl_bind env bind
; simpl_binds env' binds }
simpl_bind env (Rec pairs) = simplRecBind env TopLevel pairs
simpl_bind env (NonRec b r) = do { (env', b') <- addBndrRules env b (lookupRecBndr env b)
; simplRecOrTopPair env' TopLevel NonRecursive b b' r }
{-
************************************************************************
* *
\subsection{Lazy bindings}
* *
************************************************************************
simplRecBind is used for
* recursive bindings only
-}
simplRecBind :: SimplEnv -> TopLevelFlag
-> [(InId, InExpr)]
-> SimplM SimplEnv
simplRecBind env0 top_lvl pairs0
= do { (env_with_info, triples) <- mapAccumLM add_rules env0 pairs0
; env1 <- go (zapFloats env_with_info) triples
; return (env0 `addRecFloats` env1) }
-- addFloats adds the floats from env1,
-- _and_ updates env0 with the in-scope set from env1
where
add_rules :: SimplEnv -> (InBndr,InExpr) -> SimplM (SimplEnv, (InBndr, OutBndr, InExpr))
-- Add the (substituted) rules to the binder
add_rules env (bndr, rhs)
= do { (env', bndr') <- addBndrRules env bndr (lookupRecBndr env bndr)
; return (env', (bndr, bndr', rhs)) }
go env [] = return env
go env ((old_bndr, new_bndr, rhs) : pairs)
= do { env' <- simplRecOrTopPair env top_lvl Recursive old_bndr new_bndr rhs
; go env' pairs }
{-
simplOrTopPair is used for
* recursive bindings (whether top level or not)
* top-level non-recursive bindings
It assumes the binder has already been simplified, but not its IdInfo.
-}
simplRecOrTopPair :: SimplEnv
-> TopLevelFlag -> RecFlag
-> InId -> OutBndr -> InExpr -- Binder and rhs
-> SimplM SimplEnv -- Returns an env that includes the binding
simplRecOrTopPair env top_lvl is_rec old_bndr new_bndr rhs
= do { dflags <- getDynFlags
; trace_bind dflags $
if preInlineUnconditionally dflags env top_lvl old_bndr rhs
-- Check for unconditional inline
then do tick (PreInlineUnconditionally old_bndr)
return (extendIdSubst env old_bndr (mkContEx env rhs))
else simplLazyBind env top_lvl is_rec old_bndr new_bndr rhs env }
where
trace_bind dflags thing_inside
| not (dopt Opt_D_verbose_core2core dflags)
= thing_inside
| otherwise
= pprTrace "SimplBind" (ppr old_bndr) thing_inside
-- trace_bind emits a trace for each top-level binding, which
-- helps to locate the tracing for inlining and rule firing
{-
simplLazyBind is used for
* [simplRecOrTopPair] recursive bindings (whether top level or not)
* [simplRecOrTopPair] top-level non-recursive bindings
* [simplNonRecE] non-top-level *lazy* non-recursive bindings
Nota bene:
1. It assumes that the binder is *already* simplified,
and is in scope, and its IdInfo too, except unfolding
2. It assumes that the binder type is lifted.
3. It does not check for pre-inline-unconditionally;
that should have been done already.
-}
simplLazyBind :: SimplEnv
-> TopLevelFlag -> RecFlag
-> InId -> OutId -- Binder, both pre-and post simpl
-- The OutId has IdInfo, except arity, unfolding
-> InExpr -> SimplEnv -- The RHS and its environment
-> SimplM SimplEnv
-- Precondition: rhs obeys the let/app invariant
simplLazyBind env top_lvl is_rec bndr bndr1 rhs rhs_se
= -- pprTrace "simplLazyBind" ((ppr bndr <+> ppr bndr1) $$ ppr rhs $$ ppr (seIdSubst rhs_se)) $
do { let rhs_env = rhs_se `setInScope` env
(tvs, body) = case collectTyAndValBinders rhs of
(tvs, [], body)
| surely_not_lam body -> (tvs, body)
_ -> ([], rhs)
surely_not_lam (Lam {}) = False
surely_not_lam (Tick t e)
| not (tickishFloatable t) = surely_not_lam e
-- eta-reduction could float
surely_not_lam _ = True
-- Do not do the "abstract tyyvar" thing if there's
-- a lambda inside, because it defeats eta-reduction
-- f = /\a. \x. g a x
-- should eta-reduce.
; (body_env, tvs') <- simplBinders rhs_env tvs
-- See Note [Floating and type abstraction] in SimplUtils
-- Simplify the RHS
; let rhs_cont = mkRhsStop (substTy body_env (exprType body))
; (body_env1, body1) <- simplExprF body_env body rhs_cont
-- ANF-ise a constructor or PAP rhs
; (body_env2, body2) <- prepareRhs top_lvl body_env1 bndr1 body1
; (env', rhs')
<- if not (doFloatFromRhs top_lvl is_rec False body2 body_env2)
then -- No floating, revert to body1
do { rhs' <- mkLam tvs' (wrapFloats body_env1 body1) rhs_cont
; return (env, rhs') }
else if null tvs then -- Simple floating
do { tick LetFloatFromLet
; return (addFloats env body_env2, body2) }
else -- Do type-abstraction first
do { tick LetFloatFromLet
; (poly_binds, body3) <- abstractFloats tvs' body_env2 body2
; rhs' <- mkLam tvs' body3 rhs_cont
; env' <- foldlM (addPolyBind top_lvl) env poly_binds
; return (env', rhs') }
; completeBind env' top_lvl bndr bndr1 rhs' }
{-
A specialised variant of simplNonRec used when the RHS is already simplified,
notably in knownCon. It uses case-binding where necessary.
-}
simplNonRecX :: SimplEnv
-> InId -- Old binder
-> OutExpr -- Simplified RHS
-> SimplM SimplEnv
-- Precondition: rhs satisfies the let/app invariant
simplNonRecX env bndr new_rhs
| isDeadBinder bndr -- Not uncommon; e.g. case (a,b) of c { (p,q) -> p }
= return env -- Here c is dead, and we avoid creating
-- the binding c = (a,b)
| Coercion co <- new_rhs
= return (extendCvSubst env bndr co)
| otherwise
= do { (env', bndr') <- simplBinder env bndr
; completeNonRecX NotTopLevel env' (isStrictId bndr) bndr bndr' new_rhs }
-- simplNonRecX is only used for NotTopLevel things
completeNonRecX :: TopLevelFlag -> SimplEnv
-> Bool
-> InId -- Old binder
-> OutId -- New binder
-> OutExpr -- Simplified RHS
-> SimplM SimplEnv
-- Precondition: rhs satisfies the let/app invariant
-- See Note [CoreSyn let/app invariant] in CoreSyn
completeNonRecX top_lvl env is_strict old_bndr new_bndr new_rhs
= do { (env1, rhs1) <- prepareRhs top_lvl (zapFloats env) new_bndr new_rhs
; (env2, rhs2) <-
if doFloatFromRhs NotTopLevel NonRecursive is_strict rhs1 env1
then do { tick LetFloatFromLet
; return (addFloats env env1, rhs1) } -- Add the floats to the main env
else return (env, wrapFloats env1 rhs1) -- Wrap the floats around the RHS
; completeBind env2 NotTopLevel old_bndr new_bndr rhs2 }
{-
{- No, no, no! Do not try preInlineUnconditionally in completeNonRecX
Doing so risks exponential behaviour, because new_rhs has been simplified once already
In the cases described by the folowing commment, postInlineUnconditionally will
catch many of the relevant cases.
-- This happens; for example, the case_bndr during case of
-- known constructor: case (a,b) of x { (p,q) -> ... }
-- Here x isn't mentioned in the RHS, so we don't want to
-- create the (dead) let-binding let x = (a,b) in ...
--
-- Similarly, single occurrences can be inlined vigourously
-- e.g. case (f x, g y) of (a,b) -> ....
-- If a,b occur once we can avoid constructing the let binding for them.
Furthermore in the case-binding case preInlineUnconditionally risks extra thunks
-- Consider case I# (quotInt# x y) of
-- I# v -> let w = J# v in ...
-- If we gaily inline (quotInt# x y) for v, we end up building an
-- extra thunk:
-- let w = J# (quotInt# x y) in ...
-- because quotInt# can fail.
| preInlineUnconditionally env NotTopLevel bndr new_rhs
= thing_inside (extendIdSubst env bndr (DoneEx new_rhs))
-}
----------------------------------
prepareRhs takes a putative RHS, checks whether it's a PAP or
constructor application and, if so, converts it to ANF, so that the
resulting thing can be inlined more easily. Thus
x = (f a, g b)
becomes
t1 = f a
t2 = g b
x = (t1,t2)
We also want to deal well cases like this
v = (f e1 `cast` co) e2
Here we want to make e1,e2 trivial and get
x1 = e1; x2 = e2; v = (f x1 `cast` co) v2
That's what the 'go' loop in prepareRhs does
-}
prepareRhs :: TopLevelFlag -> SimplEnv -> OutId -> OutExpr -> SimplM (SimplEnv, OutExpr)
-- Adds new floats to the env iff that allows us to return a good RHS
prepareRhs top_lvl env id (Cast rhs co) -- Note [Float coercions]
| Pair ty1 _ty2 <- coercionKind co -- Do *not* do this if rhs has an unlifted type
, not (isUnliftedType ty1) -- see Note [Float coercions (unlifted)]
= do { (env', rhs') <- makeTrivialWithInfo top_lvl env (getOccFS id) sanitised_info rhs
; return (env', Cast rhs' co) }
where
sanitised_info = vanillaIdInfo `setStrictnessInfo` strictnessInfo info
`setDemandInfo` demandInfo info
info = idInfo id
prepareRhs top_lvl env0 id rhs0
= do { (_is_exp, env1, rhs1) <- go 0 env0 rhs0
; return (env1, rhs1) }
where
go n_val_args env (Cast rhs co)
= do { (is_exp, env', rhs') <- go n_val_args env rhs
; return (is_exp, env', Cast rhs' co) }
go n_val_args env (App fun (Type ty))
= do { (is_exp, env', rhs') <- go n_val_args env fun
; return (is_exp, env', App rhs' (Type ty)) }
go n_val_args env (App fun arg)
= do { (is_exp, env', fun') <- go (n_val_args+1) env fun
; case is_exp of
True -> do { (env'', arg') <- makeTrivial top_lvl env' (getOccFS id) arg
; return (True, env'', App fun' arg') }
False -> return (False, env, App fun arg) }
go n_val_args env (Var fun)
= return (is_exp, env, Var fun)
where
is_exp = isExpandableApp fun n_val_args -- The fun a constructor or PAP
-- See Note [CONLIKE pragma] in BasicTypes
-- The definition of is_exp should match that in
-- OccurAnal.occAnalApp
go n_val_args env (Tick t rhs)
-- We want to be able to float bindings past this
-- tick. Non-scoping ticks don't care.
| tickishScoped t == NoScope
= do { (is_exp, env', rhs') <- go n_val_args env rhs
; return (is_exp, env', Tick t rhs') }
-- On the other hand, for scoping ticks we need to be able to
-- copy them on the floats, which in turn is only allowed if
-- we can obtain non-counting ticks.
| not (tickishCounts t) || tickishCanSplit t
= do { (is_exp, env', rhs') <- go n_val_args (zapFloats env) rhs
; let tickIt (id, expr) = (id, mkTick (mkNoCount t) expr)
floats' = seFloats $ env `addFloats` mapFloats env' tickIt
; return (is_exp, env' { seFloats = floats' }, Tick t rhs') }
go _ env other
= return (False, env, other)
{-
Note [Float coercions]
~~~~~~~~~~~~~~~~~~~~~~
When we find the binding
x = e `cast` co
we'd like to transform it to
x' = e
x = x `cast` co -- A trivial binding
There's a chance that e will be a constructor application or function, or something
like that, so moving the coercion to the usage site may well cancel the coercions
and lead to further optimisation. Example:
data family T a :: *
data instance T Int = T Int
foo :: Int -> Int -> Int
foo m n = ...
where
x = T m
go 0 = 0
go n = case x of { T m -> go (n-m) }
-- This case should optimise
Note [Preserve strictness when floating coercions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In the Note [Float coercions] transformation, keep the strictness info.
Eg
f = e `cast` co -- f has strictness SSL
When we transform to
f' = e -- f' also has strictness SSL
f = f' `cast` co -- f still has strictness SSL
Its not wrong to drop it on the floor, but better to keep it.
Note [Float coercions (unlifted)]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
BUT don't do [Float coercions] if 'e' has an unlifted type.
This *can* happen:
foo :: Int = (error (# Int,Int #) "urk")
`cast` CoUnsafe (# Int,Int #) Int
If do the makeTrivial thing to the error call, we'll get
foo = case error (# Int,Int #) "urk" of v -> v `cast` ...
But 'v' isn't in scope!
These strange casts can happen as a result of case-of-case
bar = case (case x of { T -> (# 2,3 #); F -> error "urk" }) of
(# p,q #) -> p+q
-}
makeTrivialArg :: SimplEnv -> ArgSpec -> SimplM (SimplEnv, ArgSpec)
makeTrivialArg env (ValArg e) = do
{ (env', e') <- makeTrivial NotTopLevel env (fsLit "arg") e
; return (env', ValArg e') }
makeTrivialArg env arg = return (env, arg) -- CastBy, TyArg
makeTrivial :: TopLevelFlag -> SimplEnv
-> FastString -- ^ a "friendly name" to build the new binder from
-> OutExpr -> SimplM (SimplEnv, OutExpr)
-- Binds the expression to a variable, if it's not trivial, returning the variable
makeTrivial top_lvl env context expr =
makeTrivialWithInfo top_lvl env context vanillaIdInfo expr
makeTrivialWithInfo :: TopLevelFlag -> SimplEnv
-> FastString
-- ^ a "friendly name" to build the new binder from
-> IdInfo -> OutExpr -> SimplM (SimplEnv, OutExpr)
-- Propagate strictness and demand info to the new binder
-- Note [Preserve strictness when floating coercions]
-- Returned SimplEnv has same substitution as incoming one
makeTrivialWithInfo top_lvl env context info expr
| exprIsTrivial expr -- Already trivial
|| not (bindingOk top_lvl expr expr_ty) -- Cannot trivialise
-- See Note [Cannot trivialise]
= return (env, expr)
| otherwise -- See Note [Take care] below
= do { uniq <- getUniqueM
; let name = mkSystemVarName uniq context
var = mkLocalIdOrCoVarWithInfo name expr_ty info
; env' <- completeNonRecX top_lvl env False var var expr
; expr' <- simplVar env' var
; return (env', expr') }
-- The simplVar is needed becase we're constructing a new binding
-- a = rhs
-- And if rhs is of form (rhs1 |> co), then we might get
-- a1 = rhs1
-- a = a1 |> co
-- and now a's RHS is trivial and can be substituted out, and that
-- is what completeNonRecX will do
-- To put it another way, it's as if we'd simplified
-- let var = e in var
where
expr_ty = exprType expr
bindingOk :: TopLevelFlag -> CoreExpr -> Type -> Bool
-- True iff we can have a binding of this expression at this level
-- Precondition: the type is the type of the expression
bindingOk top_lvl _ expr_ty
| isTopLevel top_lvl = not (isUnliftedType expr_ty)
| otherwise = True
{-
Note [Cannot trivialise]
~~~~~~~~~~~~~~~~~~~~~~~~
Consider tih
f :: Int -> Addr#
foo :: Bar
foo = Bar (f 3)
Then we can't ANF-ise foo, even though we'd like to, because
we can't make a top-level binding for the Addr# (f 3). And if
so we don't want to turn it into
foo = let x = f 3 in Bar x
because we'll just end up inlining x back, and that makes the
simplifier loop. Better not to ANF-ise it at all.
A case in point is literal strings (a MachStr is not regarded as
trivial):
foo = Ptr "blob"#
We don't want to ANF-ise this.
************************************************************************
* *
\subsection{Completing a lazy binding}
* *
************************************************************************
completeBind
* deals only with Ids, not TyVars
* takes an already-simplified binder and RHS
* is used for both recursive and non-recursive bindings
* is used for both top-level and non-top-level bindings
It does the following:
- tries discarding a dead binding
- tries PostInlineUnconditionally
- add unfolding [this is the only place we add an unfolding]
- add arity
It does *not* attempt to do let-to-case. Why? Because it is used for
- top-level bindings (when let-to-case is impossible)
- many situations where the "rhs" is known to be a WHNF
(so let-to-case is inappropriate).
Nor does it do the atomic-argument thing
-}
completeBind :: SimplEnv
-> TopLevelFlag -- Flag stuck into unfolding
-> InId -- Old binder
-> OutId -> OutExpr -- New binder and RHS
-> SimplM SimplEnv
-- completeBind may choose to do its work
-- * by extending the substitution (e.g. let x = y in ...)
-- * or by adding to the floats in the envt
--
-- Precondition: rhs obeys the let/app invariant
completeBind env top_lvl old_bndr new_bndr new_rhs
| isCoVar old_bndr
= case new_rhs of
Coercion co -> return (extendCvSubst env old_bndr co)
_ -> return (addNonRec env new_bndr new_rhs)
| otherwise
= ASSERT( isId new_bndr )
do { let old_info = idInfo old_bndr
old_unf = unfoldingInfo old_info
occ_info = occInfo old_info
-- Do eta-expansion on the RHS of the binding
-- See Note [Eta-expanding at let bindings] in SimplUtils
; (new_arity, final_rhs) <- tryEtaExpandRhs env new_bndr new_rhs
-- Simplify the unfolding
; new_unfolding <- simplLetUnfolding env top_lvl old_bndr final_rhs old_unf
; dflags <- getDynFlags
; if postInlineUnconditionally dflags env top_lvl new_bndr occ_info
final_rhs new_unfolding
-- Inline and discard the binding
then do { tick (PostInlineUnconditionally old_bndr)
; return (extendIdSubst env old_bndr (DoneEx final_rhs)) }
-- Use the substitution to make quite, quite sure that the
-- substitution will happen, since we are going to discard the binding
else
do { let info1 = idInfo new_bndr `setArityInfo` new_arity
-- Unfolding info: Note [Setting the new unfolding]
info2 = info1 `setUnfoldingInfo` new_unfolding
-- Demand info: Note [Setting the demand info]
--
-- We also have to nuke demand info if for some reason
-- eta-expansion *reduces* the arity of the binding to less
-- than that of the strictness sig. This can happen: see Note [Arity decrease].
info3 | isEvaldUnfolding new_unfolding
|| (case strictnessInfo info2 of
StrictSig dmd_ty -> new_arity < dmdTypeDepth dmd_ty)
= zapDemandInfo info2 `orElse` info2
| otherwise
= info2
final_id = new_bndr `setIdInfo` info3
; -- pprTrace "Binding" (ppr final_id <+> ppr new_unfolding) $
return (addNonRec env final_id final_rhs) } }
-- The addNonRec adds it to the in-scope set too
------------------------------
addPolyBind :: TopLevelFlag -> SimplEnv -> OutBind -> SimplM SimplEnv
-- Add a new binding to the environment, complete with its unfolding
-- but *do not* do postInlineUnconditionally, because we have already
-- processed some of the scope of the binding
-- We still want the unfolding though. Consider
-- let
-- x = /\a. let y = ... in Just y
-- in body
-- Then we float the y-binding out (via abstractFloats and addPolyBind)
-- but 'x' may well then be inlined in 'body' in which case we'd like the
-- opportunity to inline 'y' too.
--
-- INVARIANT: the arity is correct on the incoming binders
addPolyBind top_lvl env (NonRec poly_id rhs)
= do { unfolding <- simplLetUnfolding env top_lvl poly_id rhs noUnfolding
-- Assumes that poly_id did not have an INLINE prag
-- which is perhaps wrong. ToDo: think about this
; let final_id = setIdInfo poly_id $
idInfo poly_id `setUnfoldingInfo` unfolding
; return (addNonRec env final_id rhs) }
addPolyBind _ env bind@(Rec _)
= return (extendFloats env bind)
-- Hack: letrecs are more awkward, so we extend "by steam"
-- without adding unfoldings etc. At worst this leads to
-- more simplifier iterations
{- Note [Arity decrease]
~~~~~~~~~~~~~~~~~~~~~~~~
Generally speaking the arity of a binding should not decrease. But it *can*
legitimately happen because of RULES. Eg
f = g Int
where g has arity 2, will have arity 2. But if there's a rewrite rule
g Int --> h
where h has arity 1, then f's arity will decrease. Here's a real-life example,
which is in the output of Specialise:
Rec {
$dm {Arity 2} = \d.\x. op d
{-# RULES forall d. $dm Int d = $s$dm #-}
dInt = MkD .... opInt ...
opInt {Arity 1} = $dm dInt
$s$dm {Arity 0} = \x. op dInt }
Here opInt has arity 1; but when we apply the rule its arity drops to 0.
That's why Specialise goes to a little trouble to pin the right arity
on specialised functions too.
Note [Setting the demand info]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If the unfolding is a value, the demand info may
go pear-shaped, so we nuke it. Example:
let x = (a,b) in
case x of (p,q) -> h p q x
Here x is certainly demanded. But after we've nuked
the case, we'll get just
let x = (a,b) in h a b x
and now x is not demanded (I'm assuming h is lazy)
This really happens. Similarly
let f = \x -> e in ...f..f...
After inlining f at some of its call sites the original binding may
(for example) be no longer strictly demanded.
The solution here is a bit ad hoc...
************************************************************************
* *
\subsection[Simplify-simplExpr]{The main function: simplExpr}
* *
************************************************************************
The reason for this OutExprStuff stuff is that we want to float *after*
simplifying a RHS, not before. If we do so naively we get quadratic
behaviour as things float out.
To see why it's important to do it after, consider this (real) example:
let t = f x
in fst t
==>
let t = let a = e1
b = e2
in (a,b)
in fst t
==>
let a = e1
b = e2
t = (a,b)
in
a -- Can't inline a this round, cos it appears twice
==>
e1
Each of the ==> steps is a round of simplification. We'd save a
whole round if we float first. This can cascade. Consider
let f = g d
in \x -> ...f...
==>
let f = let d1 = ..d.. in \y -> e
in \x -> ...f...
==>
let d1 = ..d..
in \x -> ...(\y ->e)...
Only in this second round can the \y be applied, and it
might do the same again.
-}
simplExpr :: SimplEnv -> CoreExpr -> SimplM CoreExpr
simplExpr env expr = simplExprC env expr (mkBoringStop expr_out_ty)
where
expr_out_ty :: OutType
expr_out_ty = substTy env (exprType expr)
simplExprC :: SimplEnv -> CoreExpr -> SimplCont -> SimplM CoreExpr
-- Simplify an expression, given a continuation
simplExprC env expr cont
= -- pprTrace "simplExprC" (ppr expr $$ ppr cont {- $$ ppr (seIdSubst env) -} $$ ppr (seFloats env) ) $
do { (env', expr') <- simplExprF (zapFloats env) expr cont
; -- pprTrace "simplExprC ret" (ppr expr $$ ppr expr') $
-- pprTrace "simplExprC ret3" (ppr (seInScope env')) $
-- pprTrace "simplExprC ret4" (ppr (seFloats env')) $
return (wrapFloats env' expr') }
--------------------------------------------------
simplExprF :: SimplEnv -> InExpr -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplExprF env e cont
= {- pprTrace "simplExprF" (vcat
[ ppr e
, text "cont =" <+> ppr cont
, text "inscope =" <+> ppr (seInScope env)
, text "tvsubst =" <+> ppr (seTvSubst env)
, text "idsubst =" <+> ppr (seIdSubst env)
, text "cvsubst =" <+> ppr (seCvSubst env)
{- , ppr (seFloats env) -}
]) $ -}
simplExprF1 env e cont
simplExprF1 :: SimplEnv -> InExpr -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplExprF1 env (Var v) cont = simplIdF env v cont
simplExprF1 env (Lit lit) cont = rebuild env (Lit lit) cont
simplExprF1 env (Tick t expr) cont = simplTick env t expr cont
simplExprF1 env (Cast body co) cont = simplCast env body co cont
simplExprF1 env (Coercion co) cont = simplCoercionF env co cont
simplExprF1 env (Type ty) cont = ASSERT( contIsRhsOrArg cont )
rebuild env (Type (substTy env ty)) cont
simplExprF1 env (App fun arg) cont
= simplExprF env fun $
case arg of
Type ty -> ApplyToTy { sc_arg_ty = substTy env ty
, sc_hole_ty = substTy env (exprType fun)
, sc_cont = cont }
_ -> ApplyToVal { sc_arg = arg, sc_env = env
, sc_dup = NoDup, sc_cont = cont }
simplExprF1 env expr@(Lam {}) cont
= simplLam env zapped_bndrs body cont
-- The main issue here is under-saturated lambdas
-- (\x1. \x2. e) arg1
-- Here x1 might have "occurs-once" occ-info, because occ-info
-- is computed assuming that a group of lambdas is applied
-- all at once. If there are too few args, we must zap the
-- occ-info, UNLESS the remaining binders are one-shot
where
(bndrs, body) = collectBinders expr
zapped_bndrs | need_to_zap = map zap bndrs
| otherwise = bndrs
need_to_zap = any zappable_bndr (drop n_args bndrs)
n_args = countArgs cont
-- NB: countArgs counts all the args (incl type args)
-- and likewise drop counts all binders (incl type lambdas)
zappable_bndr b = isId b && not (isOneShotBndr b)
zap b | isTyVar b = b
| otherwise = zapLamIdInfo b
simplExprF1 env (Case scrut bndr _ alts) cont
= simplExprF env scrut (Select { sc_dup = NoDup, sc_bndr = bndr
, sc_alts = alts
, sc_env = env, sc_cont = cont })
simplExprF1 env (Let (Rec pairs) body) cont
= do { env' <- simplRecBndrs env (map fst pairs)
-- NB: bndrs' don't have unfoldings or rules
-- We add them as we go down
; env'' <- simplRecBind env' NotTopLevel pairs
; simplExprF env'' body cont }
simplExprF1 env (Let (NonRec bndr rhs) body) cont
= simplNonRecE env bndr (rhs, env) ([], body) cont
---------------------------------
simplType :: SimplEnv -> InType -> SimplM OutType
-- Kept monadic just so we can do the seqType
simplType env ty
= -- pprTrace "simplType" (ppr ty $$ ppr (seTvSubst env)) $
seqType new_ty `seq` return new_ty
where
new_ty = substTy env ty
---------------------------------
simplCoercionF :: SimplEnv -> InCoercion -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplCoercionF env co cont
= do { co' <- simplCoercion env co
; rebuild env (Coercion co') cont }
simplCoercion :: SimplEnv -> InCoercion -> SimplM OutCoercion
simplCoercion env co
= let opt_co = optCoercion (getTCvSubst env) co
in seqCo opt_co `seq` return opt_co
-----------------------------------
-- | Push a TickIt context outwards past applications and cases, as
-- long as this is a non-scoping tick, to let case and application
-- optimisations apply.
simplTick :: SimplEnv -> Tickish Id -> InExpr -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplTick env tickish expr cont
-- A scoped tick turns into a continuation, so that we can spot
-- (scc t (\x . e)) in simplLam and eliminate the scc. If we didn't do
-- it this way, then it would take two passes of the simplifier to
-- reduce ((scc t (\x . e)) e').
-- NB, don't do this with counting ticks, because if the expr is
-- bottom, then rebuildCall will discard the continuation.
-- XXX: we cannot do this, because the simplifier assumes that
-- the context can be pushed into a case with a single branch. e.g.
-- scc<f> case expensive of p -> e
-- becomes
-- case expensive of p -> scc<f> e
--
-- So I'm disabling this for now. It just means we will do more
-- simplifier iterations that necessary in some cases.
-- | tickishScoped tickish && not (tickishCounts tickish)
-- = simplExprF env expr (TickIt tickish cont)
-- For unscoped or soft-scoped ticks, we are allowed to float in new
-- cost, so we simply push the continuation inside the tick. This
-- has the effect of moving the tick to the outside of a case or
-- application context, allowing the normal case and application
-- optimisations to fire.
| tickish `tickishScopesLike` SoftScope
= do { (env', expr') <- simplExprF env expr cont
; return (env', mkTick tickish expr')
}
-- Push tick inside if the context looks like this will allow us to
-- do a case-of-case - see Note [case-of-scc-of-case]
| Select {} <- cont, Just expr' <- push_tick_inside
= simplExprF env expr' cont
-- We don't want to move the tick, but we might still want to allow
-- floats to pass through with appropriate wrapping (or not, see
-- wrap_floats below)
--- | not (tickishCounts tickish) || tickishCanSplit tickish
-- = wrap_floats
| otherwise
= no_floating_past_tick
where
-- Try to push tick inside a case, see Note [case-of-scc-of-case].
push_tick_inside =
case expr0 of
Case scrut bndr ty alts
-> Just $ Case (tickScrut scrut) bndr ty (map tickAlt alts)
_other -> Nothing
where (ticks, expr0) = stripTicksTop movable (Tick tickish expr)
movable t = not (tickishCounts t) ||
t `tickishScopesLike` NoScope ||
tickishCanSplit t
tickScrut e = foldr mkTick e ticks
-- Alternatives get annotated with all ticks that scope in some way,
-- but we don't want to count entries.
tickAlt (c,bs,e) = (c,bs, foldr mkTick e ts_scope)
ts_scope = map mkNoCount $
filter (not . (`tickishScopesLike` NoScope)) ticks
no_floating_past_tick =
do { let (inc,outc) = splitCont cont
; (env', expr') <- simplExprF (zapFloats env) expr inc
; let tickish' = simplTickish env tickish
; (env'', expr'') <- rebuild (zapFloats env')
(wrapFloats env' expr')
(TickIt tickish' outc)
; return (addFloats env env'', expr'')
}
-- Alternative version that wraps outgoing floats with the tick. This
-- results in ticks being duplicated, as we don't make any attempt to
-- eliminate the tick if we re-inline the binding (because the tick
-- semantics allows unrestricted inlining of HNFs), so I'm not doing
-- this any more. FloatOut will catch any real opportunities for
-- floating.
--
-- wrap_floats =
-- do { let (inc,outc) = splitCont cont
-- ; (env', expr') <- simplExprF (zapFloats env) expr inc
-- ; let tickish' = simplTickish env tickish
-- ; let wrap_float (b,rhs) = (zapIdStrictness (setIdArity b 0),
-- mkTick (mkNoCount tickish') rhs)
-- -- when wrapping a float with mkTick, we better zap the Id's
-- -- strictness info and arity, because it might be wrong now.
-- ; let env'' = addFloats env (mapFloats env' wrap_float)
-- ; rebuild env'' expr' (TickIt tickish' outc)
-- }
simplTickish env tickish
| Breakpoint n ids <- tickish
= Breakpoint n (map (getDoneId . substId env) ids)
| otherwise = tickish
-- Push type application and coercion inside a tick
splitCont :: SimplCont -> (SimplCont, SimplCont)
splitCont cont@(ApplyToTy { sc_cont = tail }) = (cont { sc_cont = inc }, outc)
where (inc,outc) = splitCont tail
splitCont (CastIt co c) = (CastIt co inc, outc)
where (inc,outc) = splitCont c
splitCont other = (mkBoringStop (contHoleType other), other)
getDoneId (DoneId id) = id
getDoneId (DoneEx e) = getIdFromTrivialExpr e -- Note [substTickish] in CoreSubst
getDoneId other = pprPanic "getDoneId" (ppr other)
-- Note [case-of-scc-of-case]
-- It's pretty important to be able to transform case-of-case when
-- there's an SCC in the way. For example, the following comes up
-- in nofib/real/compress/Encode.hs:
--
-- case scctick<code_string.r1>
-- case $wcode_string_r13s wild_XC w1_s137 w2_s138 l_aje
-- of _ { (# ww1_s13f, ww2_s13g, ww3_s13h #) ->
-- (ww1_s13f, ww2_s13g, ww3_s13h)
-- }
-- of _ { (ww_s12Y, ww1_s12Z, ww2_s130) ->
-- tick<code_string.f1>
-- (ww_s12Y,
-- ww1_s12Z,
-- PTTrees.PT
-- @ GHC.Types.Char @ GHC.Types.Int wild2_Xj ww2_s130 r_ajf)
-- }
--
-- We really want this case-of-case to fire, because then the 3-tuple
-- will go away (indeed, the CPR optimisation is relying on this
-- happening). But the scctick is in the way - we need to push it
-- inside to expose the case-of-case. So we perform this
-- transformation on the inner case:
--
-- scctick c (case e of { p1 -> e1; ...; pn -> en })
-- ==>
-- case (scctick c e) of { p1 -> scc c e1; ...; pn -> scc c en }
--
-- So we've moved a constant amount of work out of the scc to expose
-- the case. We only do this when the continuation is interesting: in
-- for now, it has to be another Case (maybe generalise this later).
{-
************************************************************************
* *
\subsection{The main rebuilder}
* *
************************************************************************
-}
rebuild :: SimplEnv -> OutExpr -> SimplCont -> SimplM (SimplEnv, OutExpr)
-- At this point the substitution in the SimplEnv should be irrelevant
-- only the in-scope set and floats should matter
rebuild env expr cont
= case cont of
Stop {} -> return (env, expr)
TickIt t cont -> rebuild env (mkTick t expr) cont
CastIt co cont -> rebuild env (mkCast expr co) cont
-- NB: mkCast implements the (Coercion co |> g) optimisation
Select { sc_bndr = bndr, sc_alts = alts, sc_env = se, sc_cont = cont }
-> rebuildCase (se `setFloats` env) expr bndr alts cont
StrictArg info _ cont -> rebuildCall env (info `addValArgTo` expr) cont
StrictBind b bs body se cont -> do { env' <- simplNonRecX (se `setFloats` env) b expr
-- expr satisfies let/app since it started life
-- in a call to simplNonRecE
; simplLam env' bs body cont }
ApplyToTy { sc_arg_ty = ty, sc_cont = cont}
-> rebuild env (App expr (Type ty)) cont
ApplyToVal { sc_arg = arg, sc_env = se, sc_dup = dup_flag, sc_cont = cont}
-- See Note [Avoid redundant simplification]
| isSimplified dup_flag -> rebuild env (App expr arg) cont
| otherwise -> do { arg' <- simplExpr (se `setInScope` env) arg
; rebuild env (App expr arg') cont }
{-
************************************************************************
* *
\subsection{Lambdas}
* *
************************************************************************
-}
simplCast :: SimplEnv -> InExpr -> Coercion -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplCast env body co0 cont0
= do { co1 <- simplCoercion env co0
; cont1 <- addCoerce co1 cont0
; simplExprF env body cont1 }
where
addCoerce co cont = add_coerce co (coercionKind co) cont
add_coerce _co (Pair s1 k1) cont -- co :: ty~ty
| s1 `eqType` k1 = return cont -- is a no-op
add_coerce co1 (Pair s1 _k2) (CastIt co2 cont)
| (Pair _l1 t1) <- coercionKind co2
-- e |> (g1 :: S1~L) |> (g2 :: L~T1)
-- ==>
-- e, if S1=T1
-- e |> (g1 . g2 :: S1~T1) otherwise
--
-- For example, in the initial form of a worker
-- we may find (coerce T (coerce S (\x.e))) y
-- and we'd like it to simplify to e[y/x] in one round
-- of simplification
, s1 `eqType` t1 = return cont -- The coerces cancel out
| otherwise = return (CastIt (mkTransCo co1 co2) cont)
add_coerce co (Pair s1s2 _t1t2) cont@(ApplyToTy { sc_arg_ty = arg_ty, sc_cont = tail })
-- (f |> g) ty ---> (f ty) |> (g @ ty)
-- This implements the PushT rule from the paper
| isForAllTy s1s2
= do { cont' <- addCoerce new_cast tail
; return (cont { sc_cont = cont' }) }
where
new_cast = mkInstCo co (mkNomReflCo arg_ty)
add_coerce co (Pair s1s2 t1t2) (ApplyToVal { sc_arg = arg, sc_env = arg_se
, sc_dup = dup, sc_cont = cont })
| isFunTy s1s2 -- This implements the Push rule from the paper
, isFunTy t1t2 -- Check t1t2 to ensure 'arg' is a value arg
-- (e |> (g :: s1s2 ~ t1->t2)) f
-- ===>
-- (e (f |> (arg g :: t1~s1))
-- |> (res g :: s2->t2)
--
-- t1t2 must be a function type, t1->t2, because it's applied
-- to something but s1s2 might conceivably not be
--
-- When we build the ApplyTo we can't mix the out-types
-- with the InExpr in the argument, so we simply substitute
-- to make it all consistent. It's a bit messy.
-- But it isn't a common case.
--
-- Example of use: Trac #995
= do { (dup', arg_se', arg') <- simplArg env dup arg_se arg
; cont' <- addCoerce co2 cont
; return (ApplyToVal { sc_arg = mkCast arg' (mkSymCo co1)
, sc_env = arg_se'
, sc_dup = dup'
, sc_cont = cont' }) }
where
-- we split coercion t1->t2 ~ s1->s2 into t1 ~ s1 and
-- t2 ~ s2 with left and right on the curried form:
-- (->) t1 t2 ~ (->) s1 s2
[co1, co2] = decomposeCo 2 co
add_coerce co _ cont = return (CastIt co cont)
simplArg :: SimplEnv -> DupFlag -> StaticEnv -> CoreExpr
-> SimplM (DupFlag, StaticEnv, OutExpr)
simplArg env dup_flag arg_env arg
| isSimplified dup_flag
= return (dup_flag, arg_env, arg)
| otherwise
= do { arg' <- simplExpr (arg_env `setInScope` env) arg
; return (Simplified, zapSubstEnv arg_env, arg') }
{-
************************************************************************
* *
\subsection{Lambdas}
* *
************************************************************************
Note [Zap unfolding when beta-reducing]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Lambda-bound variables can have stable unfoldings, such as
$j = \x. \b{Unf=Just x}. e
See Note [Case binders and join points] below; the unfolding for lets
us optimise e better. However when we beta-reduce it we want to
revert to using the actual value, otherwise we can end up in the
stupid situation of
let x = blah in
let b{Unf=Just x} = y
in ...b...
Here it'd be far better to drop the unfolding and use the actual RHS.
-}
simplLam :: SimplEnv -> [InId] -> InExpr -> SimplCont
-> SimplM (SimplEnv, OutExpr)
simplLam env [] body cont = simplExprF env body cont
-- Beta reduction
simplLam env (bndr:bndrs) body (ApplyToTy { sc_arg_ty = arg_ty, sc_cont = cont })
= do { tick (BetaReduction bndr)
; simplLam (extendTvSubst env bndr arg_ty) bndrs body cont }
simplLam env (bndr:bndrs) body (ApplyToVal { sc_arg = arg, sc_env = arg_se
, sc_cont = cont })
= do { tick (BetaReduction bndr)
; simplNonRecE env' (zap_unfolding bndr) (arg, arg_se) (bndrs, body) cont }
where
env' | Coercion co <- arg
= extendCvSubst env bndr co
| otherwise
= env
zap_unfolding bndr -- See Note [Zap unfolding when beta-reducing]
| isId bndr, isStableUnfolding (realIdUnfolding bndr)
= setIdUnfolding bndr NoUnfolding
| otherwise = bndr
-- discard a non-counting tick on a lambda. This may change the
-- cost attribution slightly (moving the allocation of the
-- lambda elsewhere), but we don't care: optimisation changes
-- cost attribution all the time.
simplLam env bndrs body (TickIt tickish cont)
| not (tickishCounts tickish)
= simplLam env bndrs body cont
-- Not enough args, so there are real lambdas left to put in the result
simplLam env bndrs body cont
= do { (env', bndrs') <- simplLamBndrs env bndrs
; body' <- simplExpr env' body
; new_lam <- mkLam bndrs' body' cont
; rebuild env' new_lam cont }
simplLamBndrs :: SimplEnv -> [InBndr] -> SimplM (SimplEnv, [OutBndr])
simplLamBndrs env bndrs = mapAccumLM simplLamBndr env bndrs
-------------
simplLamBndr :: SimplEnv -> Var -> SimplM (SimplEnv, Var)
-- Used for lambda binders. These sometimes have unfoldings added by
-- the worker/wrapper pass that must be preserved, because they can't
-- be reconstructed from context. For example:
-- f x = case x of (a,b) -> fw a b x
-- fw a b x{=(a,b)} = ...
-- The "{=(a,b)}" is an unfolding we can't reconstruct otherwise.
simplLamBndr env bndr
| isId bndr && hasSomeUnfolding old_unf -- Special case
= do { (env1, bndr1) <- simplBinder env bndr
; unf' <- simplUnfolding env1 NotTopLevel bndr old_unf
; let bndr2 = bndr1 `setIdUnfolding` unf'
; return (modifyInScope env1 bndr2, bndr2) }
| otherwise
= simplBinder env bndr -- Normal case
where
old_unf = idUnfolding bndr
------------------
simplNonRecE :: SimplEnv
-> InBndr -- The binder
-> (InExpr, SimplEnv) -- Rhs of binding (or arg of lambda)
-> ([InBndr], InExpr) -- Body of the let/lambda
-- \xs.e
-> SimplCont
-> SimplM (SimplEnv, OutExpr)
-- simplNonRecE is used for
-- * non-top-level non-recursive lets in expressions
-- * beta reduction
--
-- It deals with strict bindings, via the StrictBind continuation,
-- which may abort the whole process
--
-- Precondition: rhs satisfies the let/app invariant
-- Note [CoreSyn let/app invariant] in CoreSyn
--
-- The "body" of the binding comes as a pair of ([InId],InExpr)
-- representing a lambda; so we recurse back to simplLam
-- Why? Because of the binder-occ-info-zapping done before
-- the call to simplLam in simplExprF (Lam ...)
-- First deal with type applications and type lets
-- (/\a. e) (Type ty) and (let a = Type ty in e)
simplNonRecE env bndr (Type ty_arg, rhs_se) (bndrs, body) cont
= ASSERT( isTyVar bndr )
do { ty_arg' <- simplType (rhs_se `setInScope` env) ty_arg
; simplLam (extendTvSubst env bndr ty_arg') bndrs body cont }
simplNonRecE env bndr (rhs, rhs_se) (bndrs, body) cont
= do dflags <- getDynFlags
case () of
_ | preInlineUnconditionally dflags env NotTopLevel bndr rhs
-> do { tick (PreInlineUnconditionally bndr)
; -- pprTrace "preInlineUncond" (ppr bndr <+> ppr rhs) $
simplLam (extendIdSubst env bndr (mkContEx rhs_se rhs)) bndrs body cont }
| isStrictId bndr -- Includes coercions
-> simplExprF (rhs_se `setFloats` env) rhs
(StrictBind bndr bndrs body env cont)
| otherwise
-> ASSERT( not (isTyVar bndr) )
do { (env1, bndr1) <- simplNonRecBndr env bndr
; (env2, bndr2) <- addBndrRules env1 bndr bndr1
; env3 <- simplLazyBind env2 NotTopLevel NonRecursive bndr bndr2 rhs rhs_se
; simplLam env3 bndrs body cont }
{-
************************************************************************
* *
Variables
* *
************************************************************************
-}
simplVar :: SimplEnv -> InVar -> SimplM OutExpr
-- Look up an InVar in the environment
simplVar env var
| isTyVar var = return (Type (substTyVar env var))
| isCoVar var = return (Coercion (substCoVar env var))
| otherwise
= case substId env var of
DoneId var1 -> return (Var var1)
DoneEx e -> return e
ContEx tvs cvs ids e -> simplExpr (setSubstEnv env tvs cvs ids) e
simplIdF :: SimplEnv -> InId -> SimplCont -> SimplM (SimplEnv, OutExpr)
simplIdF env var cont
= case substId env var of
DoneEx e -> simplExprF (zapSubstEnv env) e cont
ContEx tvs cvs ids e -> simplExprF (setSubstEnv env tvs cvs ids) e cont
DoneId var1 -> completeCall env var1 cont
-- Note [zapSubstEnv]
-- The template is already simplified, so don't re-substitute.
-- This is VITAL. Consider
-- let x = e in
-- let y = \z -> ...x... in
-- \ x -> ...y...
-- We'll clone the inner \x, adding x->x' in the id_subst
-- Then when we inline y, we must *not* replace x by x' in
-- the inlined copy!!
---------------------------------------------------------
-- Dealing with a call site
completeCall :: SimplEnv -> OutId -> SimplCont -> SimplM (SimplEnv, OutExpr)
completeCall env var cont
= do { ------------- Try inlining ----------------
dflags <- getDynFlags
; let (lone_variable, arg_infos, call_cont) = contArgs cont
n_val_args = length arg_infos
interesting_cont = interestingCallContext call_cont
unfolding = activeUnfolding env var
maybe_inline = callSiteInline dflags var unfolding
lone_variable arg_infos interesting_cont
; case maybe_inline of {
Just expr -- There is an inlining!
-> do { checkedTick (UnfoldingDone var)
; dump_inline dflags expr cont
; simplExprF (zapSubstEnv env) expr cont }
; Nothing -> do -- No inlining!
{ rule_base <- getSimplRules
; let info = mkArgInfo var (getRules rule_base var) n_val_args call_cont
; rebuildCall env info cont
}}}
where
dump_inline dflags unfolding cont
| not (dopt Opt_D_dump_inlinings dflags) = return ()
| not (dopt Opt_D_verbose_core2core dflags)
= when (isExternalName (idName var)) $
liftIO $ printOutputForUser dflags alwaysQualify $
sep [text "Inlining done:", nest 4 (ppr var)]
| otherwise
= liftIO $ printOutputForUser dflags alwaysQualify $
sep [text "Inlining done: " <> ppr var,
nest 4 (vcat [text "Inlined fn: " <+> nest 2 (ppr unfolding),
text "Cont: " <+> ppr cont])]
rebuildCall :: SimplEnv
-> ArgInfo
-> SimplCont
-> SimplM (SimplEnv, OutExpr)
rebuildCall env (ArgInfo { ai_fun = fun, ai_args = rev_args, ai_strs = [] }) cont
-- When we run out of strictness args, it means
-- that the call is definitely bottom; see SimplUtils.mkArgInfo
-- Then we want to discard the entire strict continuation. E.g.
-- * case (error "hello") of { ... }
-- * (error "Hello") arg
-- * f (error "Hello") where f is strict
-- etc
-- Then, especially in the first of these cases, we'd like to discard
-- the continuation, leaving just the bottoming expression. But the
-- type might not be right, so we may have to add a coerce.
| not (contIsTrivial cont) -- Only do this if there is a non-trivial
= return (env, castBottomExpr res cont_ty) -- contination to discard, else we do it
where -- again and again!
res = argInfoExpr fun rev_args
cont_ty = contResultType cont
rebuildCall env info (CastIt co cont)
= rebuildCall env (addCastTo info co) cont
rebuildCall env info (ApplyToTy { sc_arg_ty = arg_ty, sc_cont = cont })
= rebuildCall env (info `addTyArgTo` arg_ty) cont
rebuildCall env info@(ArgInfo { ai_encl = encl_rules, ai_type = fun_ty
, ai_strs = str:strs, ai_discs = disc:discs })
(ApplyToVal { sc_arg = arg, sc_env = arg_se
, sc_dup = dup_flag, sc_cont = cont })
| isSimplified dup_flag -- See Note [Avoid redundant simplification]
= rebuildCall env (addValArgTo info' arg) cont
| str -- Strict argument
= -- pprTrace "Strict Arg" (ppr arg $$ ppr (seIdSubst env) $$ ppr (seInScope env)) $
simplExprF (arg_se `setFloats` env) arg
(StrictArg info' cci cont)
-- Note [Shadowing]
| otherwise -- Lazy argument
-- DO NOT float anything outside, hence simplExprC
-- There is no benefit (unlike in a let-binding), and we'd
-- have to be very careful about bogus strictness through
-- floating a demanded let.
= do { arg' <- simplExprC (arg_se `setInScope` env) arg
(mkLazyArgStop (funArgTy fun_ty) cci)
; rebuildCall env (addValArgTo info' arg') cont }
where
info' = info { ai_strs = strs, ai_discs = discs }
cci | encl_rules = RuleArgCtxt
| disc > 0 = DiscArgCtxt -- Be keener here
| otherwise = BoringCtxt -- Nothing interesting
rebuildCall env (ArgInfo { ai_fun = fun, ai_args = rev_args, ai_rules = rules }) cont
| null rules
= rebuild env (argInfoExpr fun rev_args) cont -- No rules, common case
| otherwise
= do { -- We've accumulated a simplified call in <fun,rev_args>
-- so try rewrite rules; see Note [RULEs apply to simplified arguments]
-- See also Note [Rules for recursive functions]
; let env' = zapSubstEnv env -- See Note [zapSubstEnv];
-- and NB that 'rev_args' are all fully simplified
; mb_rule <- tryRules env' rules fun (reverse rev_args) cont
; case mb_rule of {
Just (rule_rhs, cont') -> simplExprF env' rule_rhs cont'
-- Rules don't match
; Nothing -> rebuild env (argInfoExpr fun rev_args) cont -- No rules
} }
{-
Note [RULES apply to simplified arguments]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's very desirable to try RULES once the arguments have been simplified, because
doing so ensures that rule cascades work in one pass. Consider
{-# RULES g (h x) = k x
f (k x) = x #-}
...f (g (h x))...
Then we want to rewrite (g (h x)) to (k x) and only then try f's rules. If
we match f's rules against the un-simplified RHS, it won't match. This
makes a particularly big difference when superclass selectors are involved:
op ($p1 ($p2 (df d)))
We want all this to unravel in one sweep.
Note [Avoid redundant simplification]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Because RULES apply to simplified arguments, there's a danger of repeatedly
simplifying already-simplified arguments. An important example is that of
(>>=) d e1 e2
Here e1, e2 are simplified before the rule is applied, but don't really
participate in the rule firing. So we mark them as Simplified to avoid
re-simplifying them.
Note [Shadowing]
~~~~~~~~~~~~~~~~
This part of the simplifier may break the no-shadowing invariant
Consider
f (...(\a -> e)...) (case y of (a,b) -> e')
where f is strict in its second arg
If we simplify the innermost one first we get (...(\a -> e)...)
Simplifying the second arg makes us float the case out, so we end up with
case y of (a,b) -> f (...(\a -> e)...) e'
So the output does not have the no-shadowing invariant. However, there is
no danger of getting name-capture, because when the first arg was simplified
we used an in-scope set that at least mentioned all the variables free in its
static environment, and that is enough.
We can't just do innermost first, or we'd end up with a dual problem:
case x of (a,b) -> f e (...(\a -> e')...)
I spent hours trying to recover the no-shadowing invariant, but I just could
not think of an elegant way to do it. The simplifier is already knee-deep in
continuations. We have to keep the right in-scope set around; AND we have
to get the effect that finding (error "foo") in a strict arg position will
discard the entire application and replace it with (error "foo"). Getting
all this at once is TOO HARD!
************************************************************************
* *
Rewrite rules
* *
************************************************************************
-}
tryRules :: SimplEnv -> [CoreRule]
-> Id -> [ArgSpec] -> SimplCont
-> SimplM (Maybe (CoreExpr, SimplCont))
-- The SimplEnv already has zapSubstEnv applied to it
tryRules env rules fn args call_cont
| null rules
= return Nothing
{- Disabled until we fix #8326
| fn `hasKey` tagToEnumKey -- See Note [Optimising tagToEnum#]
, [_type_arg, val_arg] <- args
, Select dup bndr ((_,[],rhs1) : rest_alts) se cont <- call_cont
, isDeadBinder bndr
= do { dflags <- getDynFlags
; let enum_to_tag :: CoreAlt -> CoreAlt
-- Takes K -> e into tagK# -> e
-- where tagK# is the tag of constructor K
enum_to_tag (DataAlt con, [], rhs)
= ASSERT( isEnumerationTyCon (dataConTyCon con) )
(LitAlt tag, [], rhs)
where
tag = mkMachInt dflags (toInteger (dataConTag con - fIRST_TAG))
enum_to_tag alt = pprPanic "tryRules: tagToEnum" (ppr alt)
new_alts = (DEFAULT, [], rhs1) : map enum_to_tag rest_alts
new_bndr = setIdType bndr intPrimTy
-- The binder is dead, but should have the right type
; return (Just (val_arg, Select dup new_bndr new_alts se cont)) }
-}
| otherwise
= do { dflags <- getDynFlags
; case lookupRule dflags (getUnfoldingInRuleMatch env) (activeRule env)
fn (argInfoAppArgs args) rules of {
Nothing ->
do { nodump dflags -- This ensures that an empty file is written
; return Nothing } ; -- No rule matches
Just (rule, rule_rhs) ->
do { checkedTick (RuleFired (ru_name rule))
; let cont' = pushSimplifiedArgs env
(drop (ruleArity rule) args)
call_cont
-- (ruleArity rule) says how many args the rule consumed
; dump dflags rule rule_rhs
; return (Just (rule_rhs, cont')) }}}
where
dump dflags rule rule_rhs
| dopt Opt_D_dump_rule_rewrites dflags
= log_rule dflags Opt_D_dump_rule_rewrites "Rule fired" $ vcat
[ text "Rule:" <+> ftext (ru_name rule)
, text "Before:" <+> hang (ppr fn) 2 (sep (map ppr args))
, text "After: " <+> pprCoreExpr rule_rhs
, text "Cont: " <+> ppr call_cont ]
| dopt Opt_D_dump_rule_firings dflags
= log_rule dflags Opt_D_dump_rule_firings "Rule fired:" $
ftext (ru_name rule)
| otherwise
= return ()
nodump dflags
| dopt Opt_D_dump_rule_rewrites dflags
= liftIO $ dumpSDoc dflags alwaysQualify Opt_D_dump_rule_rewrites "" empty
| dopt Opt_D_dump_rule_firings dflags
= liftIO $ dumpSDoc dflags alwaysQualify Opt_D_dump_rule_firings "" empty
| otherwise
= return ()
log_rule dflags flag hdr details
= liftIO . dumpSDoc dflags alwaysQualify flag "" $
sep [text hdr, nest 4 details]
{-
Note [Optimising tagToEnum#]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we have an enumeration data type:
data Foo = A | B | C
Then we want to transform
case tagToEnum# x of ==> case x of
A -> e1 DEFAULT -> e1
B -> e2 1# -> e2
C -> e3 2# -> e3
thereby getting rid of the tagToEnum# altogether. If there was a DEFAULT
alternative we retain it (remember it comes first). If not the case must
be exhaustive, and we reflect that in the transformed version by adding
a DEFAULT. Otherwise Lint complains that the new case is not exhaustive.
See #8317.
Note [Rules for recursive functions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
You might think that we shouldn't apply rules for a loop breaker:
doing so might give rise to an infinite loop, because a RULE is
rather like an extra equation for the function:
RULE: f (g x) y = x+y
Eqn: f a y = a-y
But it's too drastic to disable rules for loop breakers.
Even the foldr/build rule would be disabled, because foldr
is recursive, and hence a loop breaker:
foldr k z (build g) = g k z
So it's up to the programmer: rules can cause divergence
************************************************************************
* *
Rebuilding a case expression
* *
************************************************************************
Note [Case elimination]
~~~~~~~~~~~~~~~~~~~~~~~
The case-elimination transformation discards redundant case expressions.
Start with a simple situation:
case x# of ===> let y# = x# in e
y# -> e
(when x#, y# are of primitive type, of course). We can't (in general)
do this for algebraic cases, because we might turn bottom into
non-bottom!
The code in SimplUtils.prepareAlts has the effect of generalise this
idea to look for a case where we're scrutinising a variable, and we
know that only the default case can match. For example:
case x of
0# -> ...
DEFAULT -> ...(case x of
0# -> ...
DEFAULT -> ...) ...
Here the inner case is first trimmed to have only one alternative, the
DEFAULT, after which it's an instance of the previous case. This
really only shows up in eliminating error-checking code.
Note that SimplUtils.mkCase combines identical RHSs. So
case e of ===> case e of DEFAULT -> r
True -> r
False -> r
Now again the case may be elminated by the CaseElim transformation.
This includes things like (==# a# b#)::Bool so that we simplify
case ==# a# b# of { True -> x; False -> x }
to just
x
This particular example shows up in default methods for
comparison operations (e.g. in (>=) for Int.Int32)
Note [Case elimination: lifted case]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If a case over a lifted type has a single alternative, and is being used
as a strict 'let' (all isDeadBinder bndrs), we may want to do this
transformation:
case e of r ===> let r = e in ...r...
_ -> ...r...
(a) 'e' is already evaluated (it may so if e is a variable)
Specifically we check (exprIsHNF e). In this case
we can just allocate the WHNF directly with a let.
or
(b) 'x' is not used at all and e is ok-for-speculation
The ok-for-spec bit checks that we don't lose any
exceptions or divergence.
NB: it'd be *sound* to switch from case to let if the
scrutinee was not yet WHNF but was guaranteed to
converge; but sticking with case means we won't build a
thunk
or
(c) 'x' is used strictly in the body, and 'e' is a variable
Then we can just substitute 'e' for 'x' in the body.
See Note [Eliminating redundant seqs]
For (b), the "not used at all" test is important. Consider
case (case a ># b of { True -> (p,q); False -> (q,p) }) of
r -> blah
The scrutinee is ok-for-speculation (it looks inside cases), but we do
not want to transform to
let r = case a ># b of { True -> (p,q); False -> (q,p) }
in blah
because that builds an unnecessary thunk.
Note [Eliminating redundant seqs]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we have this:
case x of r { _ -> ..r.. }
where 'r' is used strictly in (..r..), the case is effectively a 'seq'
on 'x', but since 'r' is used strictly anyway, we can safely transform to
(...x...)
Note that this can change the error behaviour. For example, we might
transform
case x of { _ -> error "bad" }
--> error "bad"
which is might be puzzling if 'x' currently lambda-bound, but later gets
let-bound to (error "good").
Nevertheless, the paper "A semantics for imprecise exceptions" allows
this transformation. If you want to fix the evaluation order, use
'pseq'. See Trac #8900 for an example where the loss of this
transformation bit us in practice.
See also Note [Empty case alternatives] in CoreSyn.
Just for reference, the original code (added Jan 13) looked like this:
|| case_bndr_evald_next rhs
case_bndr_evald_next :: CoreExpr -> Bool
-- See Note [Case binder next]
case_bndr_evald_next (Var v) = v == case_bndr
case_bndr_evald_next (Cast e _) = case_bndr_evald_next e
case_bndr_evald_next (App e _) = case_bndr_evald_next e
case_bndr_evald_next (Case e _ _ _) = case_bndr_evald_next e
case_bndr_evald_next _ = False
(This came up when fixing Trac #7542. See also Note [Eta reduction of
an eval'd function] in CoreUtils.)
Note [Case elimination: unlifted case]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
case a +# b of r -> ...r...
Then we do case-elimination (to make a let) followed by inlining,
to get
.....(a +# b)....
If we have
case indexArray# a i of r -> ...r...
we might like to do the same, and inline the (indexArray# a i).
But indexArray# is not okForSpeculation, so we don't build a let
in rebuildCase (lest it get floated *out*), so the inlining doesn't
happen either.
This really isn't a big deal I think. The let can be
Further notes about case elimination
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider: test :: Integer -> IO ()
test = print
Turns out that this compiles to:
Print.test
= \ eta :: Integer
eta1 :: Void# ->
case PrelNum.< eta PrelNum.zeroInteger of wild { __DEFAULT ->
case hPutStr stdout
(PrelNum.jtos eta ($w[] @ Char))
eta1
of wild1 { (# new_s, a4 #) -> PrelIO.lvl23 new_s }}
Notice the strange '<' which has no effect at all. This is a funny one.
It started like this:
f x y = if x < 0 then jtos x
else if y==0 then "" else jtos x
At a particular call site we have (f v 1). So we inline to get
if v < 0 then jtos x
else if 1==0 then "" else jtos x
Now simplify the 1==0 conditional:
if v<0 then jtos v else jtos v
Now common-up the two branches of the case:
case (v<0) of DEFAULT -> jtos v
Why don't we drop the case? Because it's strict in v. It's technically
wrong to drop even unnecessary evaluations, and in practice they
may be a result of 'seq' so we *definitely* don't want to drop those.
I don't really know how to improve this situation.
-}
---------------------------------------------------------
-- Eliminate the case if possible
rebuildCase, reallyRebuildCase
:: SimplEnv
-> OutExpr -- Scrutinee
-> InId -- Case binder
-> [InAlt] -- Alternatives (inceasing order)
-> SimplCont
-> SimplM (SimplEnv, OutExpr)
--------------------------------------------------
-- 1. Eliminate the case if there's a known constructor
--------------------------------------------------
rebuildCase env scrut case_bndr alts cont
| Lit lit <- scrut -- No need for same treatment as constructors
-- because literals are inlined more vigorously
, not (litIsLifted lit)
= do { tick (KnownBranch case_bndr)
; case findAlt (LitAlt lit) alts of
Nothing -> missingAlt env case_bndr alts cont
Just (_, bs, rhs) -> simple_rhs bs rhs }
| Just (con, ty_args, other_args) <- exprIsConApp_maybe (getUnfoldingInRuleMatch env) scrut
-- Works when the scrutinee is a variable with a known unfolding
-- as well as when it's an explicit constructor application
= do { tick (KnownBranch case_bndr)
; case findAlt (DataAlt con) alts of
Nothing -> missingAlt env case_bndr alts cont
Just (DEFAULT, bs, rhs) -> simple_rhs bs rhs
Just (_, bs, rhs) -> knownCon env scrut con ty_args other_args
case_bndr bs rhs cont
}
where
simple_rhs bs rhs = ASSERT( null bs )
do { env' <- simplNonRecX env case_bndr scrut
-- scrut is a constructor application,
-- hence satisfies let/app invariant
; simplExprF env' rhs cont }
--------------------------------------------------
-- 2. Eliminate the case if scrutinee is evaluated
--------------------------------------------------
rebuildCase env scrut case_bndr alts@[(_, bndrs, rhs)] cont
-- See if we can get rid of the case altogether
-- See Note [Case elimination]
-- mkCase made sure that if all the alternatives are equal,
-- then there is now only one (DEFAULT) rhs
-- 2a. Dropping the case altogether, if
-- a) it binds nothing (so it's really just a 'seq')
-- b) evaluating the scrutinee has no side effects
| is_plain_seq
, exprOkForSideEffects scrut
-- The entire case is dead, so we can drop it
-- if the scrutinee converges without having imperative
-- side effects or raising a Haskell exception
-- See Note [PrimOp can_fail and has_side_effects] in PrimOp
= simplExprF env rhs cont
-- 2b. Turn the case into a let, if
-- a) it binds only the case-binder
-- b) unlifted case: the scrutinee is ok-for-speculation
-- lifted case: the scrutinee is in HNF (or will later be demanded)
| all_dead_bndrs
, if is_unlifted
then exprOkForSpeculation scrut -- See Note [Case elimination: unlifted case]
else exprIsHNF scrut -- See Note [Case elimination: lifted case]
|| scrut_is_demanded_var scrut
= do { tick (CaseElim case_bndr)
; env' <- simplNonRecX env case_bndr scrut
; simplExprF env' rhs cont }
-- 2c. Try the seq rules if
-- a) it binds only the case binder
-- b) a rule for seq applies
-- See Note [User-defined RULES for seq] in MkId
| is_plain_seq
= do { let scrut_ty = exprType scrut
rhs_ty = substTy env (exprType rhs)
out_args = [ TyArg { as_arg_ty = scrut_ty
, as_hole_ty = seq_id_ty }
, TyArg { as_arg_ty = rhs_ty
, as_hole_ty = piResultTy seq_id_ty scrut_ty }
, ValArg scrut]
rule_cont = ApplyToVal { sc_dup = NoDup, sc_arg = rhs
, sc_env = env, sc_cont = cont }
env' = zapSubstEnv env
-- Lazily evaluated, so we don't do most of this
; rule_base <- getSimplRules
; mb_rule <- tryRules env' (getRules rule_base seqId) seqId out_args rule_cont
; case mb_rule of
Just (rule_rhs, cont') -> simplExprF env' rule_rhs cont'
Nothing -> reallyRebuildCase env scrut case_bndr alts cont }
where
is_unlifted = isUnliftedType (idType case_bndr)
all_dead_bndrs = all isDeadBinder bndrs -- bndrs are [InId]
is_plain_seq = all_dead_bndrs && isDeadBinder case_bndr -- Evaluation *only* for effect
seq_id_ty = idType seqId
scrut_is_demanded_var :: CoreExpr -> Bool
-- See Note [Eliminating redundant seqs]
scrut_is_demanded_var (Cast s _) = scrut_is_demanded_var s
scrut_is_demanded_var (Var _) = isStrictDmd (idDemandInfo case_bndr)
scrut_is_demanded_var _ = False
rebuildCase env scrut case_bndr alts cont
= reallyRebuildCase env scrut case_bndr alts cont
--------------------------------------------------
-- 3. Catch-all case
--------------------------------------------------
reallyRebuildCase env scrut case_bndr alts cont
= do { -- Prepare the continuation;
-- The new subst_env is in place
(env', dup_cont, nodup_cont) <- prepareCaseCont env alts cont
-- Simplify the alternatives
; (scrut', case_bndr', alts') <- simplAlts env' scrut case_bndr alts dup_cont
; dflags <- getDynFlags
; let alts_ty' = contResultType dup_cont
; case_expr <- mkCase dflags scrut' case_bndr' alts_ty' alts'
-- Notice that rebuild gets the in-scope set from env', not alt_env
-- (which in any case is only build in simplAlts)
-- The case binder *not* scope over the whole returned case-expression
; rebuild env' case_expr nodup_cont }
{-
simplCaseBinder checks whether the scrutinee is a variable, v. If so,
try to eliminate uses of v in the RHSs in favour of case_bndr; that
way, there's a chance that v will now only be used once, and hence
inlined.
Historical note: we use to do the "case binder swap" in the Simplifier
so there were additional complications if the scrutinee was a variable.
Now the binder-swap stuff is done in the occurrence analyer; see
OccurAnal Note [Binder swap].
Note [knownCon occ info]
~~~~~~~~~~~~~~~~~~~~~~~~
If the case binder is not dead, then neither are the pattern bound
variables:
case <any> of x { (a,b) ->
case x of { (p,q) -> p } }
Here (a,b) both look dead, but come alive after the inner case is eliminated.
The point is that we bring into the envt a binding
let x = (a,b)
after the outer case, and that makes (a,b) alive. At least we do unless
the case binder is guaranteed dead.
Note [Case alternative occ info]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we are simply reconstructing a case (the common case), we always
zap the occurrence info on the binders in the alternatives. Even
if the case binder is dead, the scrutinee is usually a variable, and *that*
can bring the case-alternative binders back to life.
See Note [Add unfolding for scrutinee]
Note [Improving seq]
~~~~~~~~~~~~~~~~~~~
Consider
type family F :: * -> *
type instance F Int = Int
... case e of x { DEFAULT -> rhs } ...
where x::F Int. Then we'd like to rewrite (F Int) to Int, getting
case e `cast` co of x'::Int
I# x# -> let x = x' `cast` sym co
in rhs
so that 'rhs' can take advantage of the form of x'.
Notice that Note [Case of cast] (in OccurAnal) may then apply to the result.
Nota Bene: We only do the [Improving seq] transformation if the
case binder 'x' is actually used in the rhs; that is, if the case
is *not* a *pure* seq.
a) There is no point in adding the cast to a pure seq.
b) There is a good reason not to: doing so would interfere
with seq rules (Note [Built-in RULES for seq] in MkId).
In particular, this [Improving seq] thing *adds* a cast
while [Built-in RULES for seq] *removes* one, so they
just flip-flop.
You might worry about
case v of x { __DEFAULT ->
... case (v `cast` co) of y { I# -> ... }}
This is a pure seq (since x is unused), so [Improving seq] won't happen.
But it's ok: the simplifier will replace 'v' by 'x' in the rhs to get
case v of x { __DEFAULT ->
... case (x `cast` co) of y { I# -> ... }}
Now the outer case is not a pure seq, so [Improving seq] will happen,
and then the inner case will disappear.
The need for [Improving seq] showed up in Roman's experiments. Example:
foo :: F Int -> Int -> Int
foo t n = t `seq` bar n
where
bar 0 = 0
bar n = bar (n - case t of TI i -> i)
Here we'd like to avoid repeated evaluating t inside the loop, by
taking advantage of the `seq`.
At one point I did transformation in LiberateCase, but it's more
robust here. (Otherwise, there's a danger that we'll simply drop the
'seq' altogether, before LiberateCase gets to see it.)
-}
simplAlts :: SimplEnv
-> OutExpr
-> InId -- Case binder
-> [InAlt] -- Non-empty
-> SimplCont
-> SimplM (OutExpr, OutId, [OutAlt]) -- Includes the continuation
-- Like simplExpr, this just returns the simplified alternatives;
-- it does not return an environment
-- The returned alternatives can be empty, none are possible
simplAlts env scrut case_bndr alts cont'
= do { let env0 = zapFloats env
; (env1, case_bndr1) <- simplBinder env0 case_bndr
; fam_envs <- getFamEnvs
; (alt_env', scrut', case_bndr') <- improveSeq fam_envs env1 scrut
case_bndr case_bndr1 alts
; (imposs_deflt_cons, in_alts) <- prepareAlts scrut' case_bndr' alts
-- NB: it's possible that the returned in_alts is empty: this is handled
-- by the caller (rebuildCase) in the missingAlt function
; alts' <- mapM (simplAlt alt_env' (Just scrut') imposs_deflt_cons case_bndr' cont') in_alts
; -- pprTrace "simplAlts" (ppr case_bndr $$ ppr alts_ty $$ ppr alts_ty' $$ ppr alts $$ ppr cont') $
return (scrut', case_bndr', alts') }
------------------------------------
improveSeq :: (FamInstEnv, FamInstEnv) -> SimplEnv
-> OutExpr -> InId -> OutId -> [InAlt]
-> SimplM (SimplEnv, OutExpr, OutId)
-- Note [Improving seq]
improveSeq fam_envs env scrut case_bndr case_bndr1 [(DEFAULT,_,_)]
| not (isDeadBinder case_bndr) -- Not a pure seq! See Note [Improving seq]
, Just (co, ty2) <- topNormaliseType_maybe fam_envs (idType case_bndr1)
= do { case_bndr2 <- newId (fsLit "nt") ty2
; let rhs = DoneEx (Var case_bndr2 `Cast` mkSymCo co)
env2 = extendIdSubst env case_bndr rhs
; return (env2, scrut `Cast` co, case_bndr2) }
improveSeq _ env scrut _ case_bndr1 _
= return (env, scrut, case_bndr1)
------------------------------------
simplAlt :: SimplEnv
-> Maybe OutExpr -- The scrutinee
-> [AltCon] -- These constructors can't be present when
-- matching the DEFAULT alternative
-> OutId -- The case binder
-> SimplCont
-> InAlt
-> SimplM OutAlt
simplAlt env _ imposs_deflt_cons case_bndr' cont' (DEFAULT, bndrs, rhs)
= ASSERT( null bndrs )
do { let env' = addBinderUnfolding env case_bndr'
(mkOtherCon imposs_deflt_cons)
-- Record the constructors that the case-binder *can't* be.
; rhs' <- simplExprC env' rhs cont'
; return (DEFAULT, [], rhs') }
simplAlt env scrut' _ case_bndr' cont' (LitAlt lit, bndrs, rhs)
= ASSERT( null bndrs )
do { env' <- addAltUnfoldings env scrut' case_bndr' (Lit lit)
; rhs' <- simplExprC env' rhs cont'
; return (LitAlt lit, [], rhs') }
simplAlt env scrut' _ case_bndr' cont' (DataAlt con, vs, rhs)
= do { -- Deal with the pattern-bound variables
-- Mark the ones that are in ! positions in the
-- data constructor as certainly-evaluated.
-- NB: simplLamBinders preserves this eval info
; let vs_with_evals = add_evals (dataConRepStrictness con)
; (env', vs') <- simplLamBndrs env vs_with_evals
-- Bind the case-binder to (con args)
; let inst_tys' = tyConAppArgs (idType case_bndr')
con_app :: OutExpr
con_app = mkConApp2 con inst_tys' vs'
; env'' <- addAltUnfoldings env' scrut' case_bndr' con_app
; rhs' <- simplExprC env'' rhs cont'
; return (DataAlt con, vs', rhs') }
where
-- add_evals records the evaluated-ness of the bound variables of
-- a case pattern. This is *important*. Consider
-- data T = T !Int !Int
--
-- case x of { T a b -> T (a+1) b }
--
-- We really must record that b is already evaluated so that we don't
-- go and re-evaluate it when constructing the result.
-- See Note [Data-con worker strictness] in MkId.hs
add_evals the_strs
= go vs the_strs
where
go [] [] = []
go (v:vs') strs | isTyVar v = v : go vs' strs
go (v:vs') (str:strs)
| isMarkedStrict str = eval v : go vs' strs
| otherwise = zap v : go vs' strs
go _ _ = pprPanic "cat_evals"
(ppr con $$
ppr vs $$
ppr_with_length the_strs $$
ppr_with_length (dataConRepArgTys con) $$
ppr_with_length (dataConRepStrictness con))
where
ppr_with_length list
= ppr list <+> parens (text "length =" <+> ppr (length list))
-- NB: If this panic triggers, note that
-- NoStrictnessMark doesn't print!
zap v = zapIdOccInfo v -- See Note [Case alternative occ info]
eval v = zap v `setIdUnfolding` evaldUnfolding
addAltUnfoldings :: SimplEnv -> Maybe OutExpr -> OutId -> OutExpr -> SimplM SimplEnv
addAltUnfoldings env scrut case_bndr con_app
= do { dflags <- getDynFlags
; let con_app_unf = mkSimpleUnfolding dflags con_app
env1 = addBinderUnfolding env case_bndr con_app_unf
-- See Note [Add unfolding for scrutinee]
env2 = case scrut of
Just (Var v) -> addBinderUnfolding env1 v con_app_unf
Just (Cast (Var v) co) -> addBinderUnfolding env1 v $
mkSimpleUnfolding dflags (Cast con_app (mkSymCo co))
_ -> env1
; traceSmpl "addAltUnf" (vcat [ppr case_bndr <+> ppr scrut, ppr con_app])
; return env2 }
addBinderUnfolding :: SimplEnv -> Id -> Unfolding -> SimplEnv
addBinderUnfolding env bndr unf
| debugIsOn, Just tmpl <- maybeUnfoldingTemplate unf
= WARN( not (eqType (idType bndr) (exprType tmpl)),
ppr bndr $$ ppr (idType bndr) $$ ppr tmpl $$ ppr (exprType tmpl) )
modifyInScope env (bndr `setIdUnfolding` unf)
| otherwise
= modifyInScope env (bndr `setIdUnfolding` unf)
zapBndrOccInfo :: Bool -> Id -> Id
-- Consider case e of b { (a,b) -> ... }
-- Then if we bind b to (a,b) in "...", and b is not dead,
-- then we must zap the deadness info on a,b
zapBndrOccInfo keep_occ_info pat_id
| keep_occ_info = pat_id
| otherwise = zapIdOccInfo pat_id
{-
Note [Add unfolding for scrutinee]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In general it's unlikely that a variable scrutinee will appear
in the case alternatives case x of { ...x unlikely to appear... }
because the binder-swap in OccAnal has got rid of all such occcurrences
See Note [Binder swap] in OccAnal.
BUT it is still VERY IMPORTANT to add a suitable unfolding for a
variable scrutinee, in simplAlt. Here's why
case x of y
(a,b) -> case b of c
I# v -> ...(f y)...
There is no occurrence of 'b' in the (...(f y)...). But y gets
the unfolding (a,b), and *that* mentions b. If f has a RULE
RULE f (p, I# q) = ...
we want that rule to match, so we must extend the in-scope env with a
suitable unfolding for 'y'. It's *essential* for rule matching; but
it's also good for case-elimintation -- suppose that 'f' was inlined
and did multi-level case analysis, then we'd solve it in one
simplifier sweep instead of two.
Exactly the same issue arises in SpecConstr;
see Note [Add scrutinee to ValueEnv too] in SpecConstr
HOWEVER, given
case x of y { Just a -> r1; Nothing -> r2 }
we do not want to add the unfolding x -> y to 'x', which might seem cool,
since 'y' itself has different unfoldings in r1 and r2. Reason: if we
did that, we'd have to zap y's deadness info and that is a very useful
piece of information.
So instead we add the unfolding x -> Just a, and x -> Nothing in the
respective RHSs.
************************************************************************
* *
\subsection{Known constructor}
* *
************************************************************************
We are a bit careful with occurrence info. Here's an example
(\x* -> case x of (a*, b) -> f a) (h v, e)
where the * means "occurs once". This effectively becomes
case (h v, e) of (a*, b) -> f a)
and then
let a* = h v; b = e in f a
and then
f (h v)
All this should happen in one sweep.
-}
knownCon :: SimplEnv
-> OutExpr -- The scrutinee
-> DataCon -> [OutType] -> [OutExpr] -- The scrutinee (in pieces)
-> InId -> [InBndr] -> InExpr -- The alternative
-> SimplCont
-> SimplM (SimplEnv, OutExpr)
knownCon env scrut dc dc_ty_args dc_args bndr bs rhs cont
= do { env' <- bind_args env bs dc_args
; env'' <- bind_case_bndr env'
; simplExprF env'' rhs cont }
where
zap_occ = zapBndrOccInfo (isDeadBinder bndr) -- bndr is an InId
-- Ugh!
bind_args env' [] _ = return env'
bind_args env' (b:bs') (Type ty : args)
= ASSERT( isTyVar b )
bind_args (extendTvSubst env' b ty) bs' args
bind_args env' (b:bs') (Coercion co : args)
= ASSERT( isCoVar b )
bind_args (extendCvSubst env' b co) bs' args
bind_args env' (b:bs') (arg : args)
= ASSERT( isId b )
do { let b' = zap_occ b
-- Note that the binder might be "dead", because it doesn't
-- occur in the RHS; and simplNonRecX may therefore discard
-- it via postInlineUnconditionally.
-- Nevertheless we must keep it if the case-binder is alive,
-- because it may be used in the con_app. See Note [knownCon occ info]
; env'' <- simplNonRecX env' b' arg -- arg satisfies let/app invariant
; bind_args env'' bs' args }
bind_args _ _ _ =
pprPanic "bind_args" $ ppr dc $$ ppr bs $$ ppr dc_args $$
text "scrut:" <+> ppr scrut
-- It's useful to bind bndr to scrut, rather than to a fresh
-- binding x = Con arg1 .. argn
-- because very often the scrut is a variable, so we avoid
-- creating, and then subsequently eliminating, a let-binding
-- BUT, if scrut is a not a variable, we must be careful
-- about duplicating the arg redexes; in that case, make
-- a new con-app from the args
bind_case_bndr env
| isDeadBinder bndr = return env
| exprIsTrivial scrut = return (extendIdSubst env bndr (DoneEx scrut))
| otherwise = do { dc_args <- mapM (simplVar env) bs
-- dc_ty_args are aready OutTypes,
-- but bs are InBndrs
; let con_app = Var (dataConWorkId dc)
`mkTyApps` dc_ty_args
`mkApps` dc_args
; simplNonRecX env bndr con_app }
-------------------
missingAlt :: SimplEnv -> Id -> [InAlt] -> SimplCont -> SimplM (SimplEnv, OutExpr)
-- This isn't strictly an error, although it is unusual.
-- It's possible that the simplifer might "see" that
-- an inner case has no accessible alternatives before
-- it "sees" that the entire branch of an outer case is
-- inaccessible. So we simply put an error case here instead.
missingAlt env case_bndr _ cont
= WARN( True, text "missingAlt" <+> ppr case_bndr )
return (env, mkImpossibleExpr (contResultType cont))
{-
************************************************************************
* *
\subsection{Duplicating continuations}
* *
************************************************************************
-}
prepareCaseCont :: SimplEnv
-> [InAlt] -> SimplCont
-> SimplM (SimplEnv,
SimplCont, -- Dupable part
SimplCont) -- Non-dupable part
-- We are considering
-- K[case _ of { p1 -> r1; ...; pn -> rn }]
-- where K is some enclosing continuation for the case
-- Goal: split K into two pieces Kdup,Knodup so that
-- a) Kdup can be duplicated
-- b) Knodup[Kdup[e]] = K[e]
-- The idea is that we'll transform thus:
-- Knodup[ (case _ of { p1 -> Kdup[r1]; ...; pn -> Kdup[rn] }
--
-- We may also return some extra bindings in SimplEnv (that scope over
-- the entire continuation)
--
-- When case-of-case is off, just make the entire continuation non-dupable
prepareCaseCont env alts cont
| not (sm_case_case (getMode env)) = return (env, mkBoringStop (contHoleType cont), cont)
| not (many_alts alts) = return (env, cont, mkBoringStop (contResultType cont))
| otherwise = mkDupableCont env cont
where
many_alts :: [InAlt] -> Bool -- True iff strictly > 1 non-bottom alternative
many_alts [] = False -- See Note [Bottom alternatives]
many_alts [_] = False
many_alts (alt:alts)
| is_bot_alt alt = many_alts alts
| otherwise = not (all is_bot_alt alts)
is_bot_alt (_,_,rhs) = exprIsBottom rhs
{-
Note [Bottom alternatives]
~~~~~~~~~~~~~~~~~~~~~~~~~~
When we have
case (case x of { A -> error .. ; B -> e; C -> error ..)
of alts
then we can just duplicate those alts because the A and C cases
will disappear immediately. This is more direct than creating
join points and inlining them away; and in some cases we would
not even create the join points (see Note [Single-alternative case])
and we would keep the case-of-case which is silly. See Trac #4930.
-}
mkDupableCont :: SimplEnv -> SimplCont
-> SimplM (SimplEnv, SimplCont, SimplCont)
mkDupableCont env cont
| contIsDupable cont
= return (env, cont, mkBoringStop (contResultType cont))
mkDupableCont _ (Stop {}) = panic "mkDupableCont" -- Handled by previous eqn
mkDupableCont env (CastIt ty cont)
= do { (env', dup, nodup) <- mkDupableCont env cont
; return (env', CastIt ty dup, nodup) }
-- Duplicating ticks for now, not sure if this is good or not
mkDupableCont env cont@(TickIt{})
= return (env, mkBoringStop (contHoleType cont), cont)
mkDupableCont env cont@(StrictBind {})
= return (env, mkBoringStop (contHoleType cont), cont)
-- See Note [Duplicating StrictBind]
mkDupableCont env (StrictArg info cci cont)
-- See Note [Duplicating StrictArg]
= do { (env', dup, nodup) <- mkDupableCont env cont
; (env'', args') <- mapAccumLM makeTrivialArg env' (ai_args info)
; return (env'', StrictArg (info { ai_args = args' }) cci dup, nodup) }
mkDupableCont env cont@(ApplyToTy { sc_cont = tail })
= do { (env', dup_cont, nodup_cont) <- mkDupableCont env tail
; return (env', cont { sc_cont = dup_cont }, nodup_cont ) }
mkDupableCont env (ApplyToVal { sc_arg = arg, sc_dup = dup, sc_env = se, sc_cont = cont })
= -- e.g. [...hole...] (...arg...)
-- ==>
-- let a = ...arg...
-- in [...hole...] a
do { (env', dup_cont, nodup_cont) <- mkDupableCont env cont
; (_, se', arg') <- simplArg env' dup se arg
; (env'', arg'') <- makeTrivial NotTopLevel env' (fsLit "karg") arg'
; let app_cont = ApplyToVal { sc_arg = arg'', sc_env = se'
, sc_dup = OkToDup, sc_cont = dup_cont }
; return (env'', app_cont, nodup_cont) }
mkDupableCont env cont@(Select { sc_bndr = case_bndr, sc_alts = [(_, bs, _rhs)] })
-- See Note [Single-alternative case]
-- | not (exprIsDupable rhs && contIsDupable case_cont)
-- | not (isDeadBinder case_bndr)
| all isDeadBinder bs -- InIds
&& not (isUnliftedType (idType case_bndr))
-- Note [Single-alternative-unlifted]
= return (env, mkBoringStop (contHoleType cont), cont)
mkDupableCont env (Select { sc_bndr = case_bndr, sc_alts = alts
, sc_env = se, sc_cont = cont })
= -- e.g. (case [...hole...] of { pi -> ei })
-- ===>
-- let ji = \xij -> ei
-- in case [...hole...] of { pi -> ji xij }
do { tick (CaseOfCase case_bndr)
; (env', dup_cont, nodup_cont) <- prepareCaseCont env alts cont
-- NB: We call prepareCaseCont here. If there is only one
-- alternative, then dup_cont may be big, but that's ok
-- because we push it into the single alternative, and then
-- use mkDupableAlt to turn that simplified alternative into
-- a join point if it's too big to duplicate.
-- And this is important: see Note [Fusing case continuations]
; let alt_env = se `setInScope` env'
; (alt_env', case_bndr') <- simplBinder alt_env case_bndr
; alts' <- mapM (simplAlt alt_env' Nothing [] case_bndr' dup_cont) alts
-- Safe to say that there are no handled-cons for the DEFAULT case
-- NB: simplBinder does not zap deadness occ-info, so
-- a dead case_bndr' will still advertise its deadness
-- This is really important because in
-- case e of b { (# p,q #) -> ... }
-- b is always dead, and indeed we are not allowed to bind b to (# p,q #),
-- which might happen if e was an explicit unboxed pair and b wasn't marked dead.
-- In the new alts we build, we have the new case binder, so it must retain
-- its deadness.
-- NB: we don't use alt_env further; it has the substEnv for
-- the alternatives, and we don't want that
; (env'', alts'') <- mkDupableAlts env' case_bndr' alts'
; return (env'', -- Note [Duplicated env]
Select { sc_dup = OkToDup
, sc_bndr = case_bndr', sc_alts = alts''
, sc_env = zapSubstEnv env''
, sc_cont = mkBoringStop (contHoleType nodup_cont) },
nodup_cont) }
mkDupableAlts :: SimplEnv -> OutId -> [InAlt]
-> SimplM (SimplEnv, [InAlt])
-- Absorbs the continuation into the new alternatives
mkDupableAlts env case_bndr' the_alts
= go env the_alts
where
go env0 [] = return (env0, [])
go env0 (alt:alts)
= do { (env1, alt') <- mkDupableAlt env0 case_bndr' alt
; (env2, alts') <- go env1 alts
; return (env2, alt' : alts' ) }
mkDupableAlt :: SimplEnv -> OutId -> (AltCon, [CoreBndr], CoreExpr)
-> SimplM (SimplEnv, (AltCon, [CoreBndr], CoreExpr))
mkDupableAlt env case_bndr (con, bndrs', rhs') = do
dflags <- getDynFlags
if exprIsDupable dflags rhs' -- Note [Small alternative rhs]
then return (env, (con, bndrs', rhs'))
else
do { let rhs_ty' = exprType rhs'
scrut_ty = idType case_bndr
case_bndr_w_unf
= case con of
DEFAULT -> case_bndr
DataAlt dc -> setIdUnfolding case_bndr unf
where
-- See Note [Case binders and join points]
unf = mkInlineUnfolding Nothing rhs
rhs = mkConApp2 dc (tyConAppArgs scrut_ty) bndrs'
LitAlt {} -> WARN( True, text "mkDupableAlt"
<+> ppr case_bndr <+> ppr con )
case_bndr
-- The case binder is alive but trivial, so why has
-- it not been substituted away?
used_bndrs' | isDeadBinder case_bndr = filter abstract_over bndrs'
| otherwise = bndrs' ++ [case_bndr_w_unf]
abstract_over bndr
| isTyVar bndr = True -- Abstract over all type variables just in case
| otherwise = not (isDeadBinder bndr)
-- The deadness info on the new Ids is preserved by simplBinders
; (final_bndrs', final_args) -- Note [Join point abstraction]
<- if (any isId used_bndrs')
then return (used_bndrs', varsToCoreExprs used_bndrs')
else do { rw_id <- newId (fsLit "w") voidPrimTy
; return ([setOneShotLambda rw_id], [Var voidPrimId]) }
; join_bndr <- newId (fsLit "$j") (mkPiTypes final_bndrs' rhs_ty')
-- Note [Funky mkPiTypes]
; let -- We make the lambdas into one-shot-lambdas. The
-- join point is sure to be applied at most once, and doing so
-- prevents the body of the join point being floated out by
-- the full laziness pass
really_final_bndrs = map one_shot final_bndrs'
one_shot v | isId v = setOneShotLambda v
| otherwise = v
join_rhs = mkLams really_final_bndrs rhs'
join_arity = exprArity join_rhs
join_call = mkApps (Var join_bndr) final_args
; env' <- addPolyBind NotTopLevel env (NonRec (join_bndr `setIdArity` join_arity) join_rhs)
; return (env', (con, bndrs', join_call)) }
-- See Note [Duplicated env]
{-
Note [Fusing case continuations]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's important to fuse two successive case continuations when the
first has one alternative. That's why we call prepareCaseCont here.
Consider this, which arises from thunk splitting (see Note [Thunk
splitting] in WorkWrap):
let
x* = case (case v of {pn -> rn}) of
I# a -> I# a
in body
The simplifier will find
(Var v) with continuation
Select (pn -> rn) (
Select [I# a -> I# a] (
StrictBind body Stop
So we'll call mkDupableCont on
Select [I# a -> I# a] (StrictBind body Stop)
There is just one alternative in the first Select, so we want to
simplify the rhs (I# a) with continuation (StricgtBind body Stop)
Supposing that body is big, we end up with
let $j a = <let x = I# a in body>
in case v of { pn -> case rn of
I# a -> $j a }
This is just what we want because the rn produces a box that
the case rn cancels with.
See Trac #4957 a fuller example.
Note [Case binders and join points]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this
case (case .. ) of c {
I# c# -> ....c....
If we make a join point with c but not c# we get
$j = \c -> ....c....
But if later inlining scrutines the c, thus
$j = \c -> ... case c of { I# y -> ... } ...
we won't see that 'c' has already been scrutinised. This actually
happens in the 'tabulate' function in wave4main, and makes a significant
difference to allocation.
An alternative plan is this:
$j = \c# -> let c = I# c# in ...c....
but that is bad if 'c' is *not* later scrutinised.
So instead we do both: we pass 'c' and 'c#' , and record in c's inlining
(a stable unfolding) that it's really I# c#, thus
$j = \c# -> \c[=I# c#] -> ...c....
Absence analysis may later discard 'c'.
NB: take great care when doing strictness analysis;
see Note [Lamba-bound unfoldings] in DmdAnal.
Also note that we can still end up passing stuff that isn't used. Before
strictness analysis we have
let $j x y c{=(x,y)} = (h c, ...)
in ...
After strictness analysis we see that h is strict, we end up with
let $j x y c{=(x,y)} = ($wh x y, ...)
and c is unused.
Note [Duplicated env]
~~~~~~~~~~~~~~~~~~~~~
Some of the alternatives are simplified, but have not been turned into a join point
So they *must* have an zapped subst-env. So we can't use completeNonRecX to
bind the join point, because it might to do PostInlineUnconditionally, and
we'd lose that when zapping the subst-env. We could have a per-alt subst-env,
but zapping it (as we do in mkDupableCont, the Select case) is safe, and
at worst delays the join-point inlining.
Note [Small alternative rhs]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It is worth checking for a small RHS because otherwise we
get extra let bindings that may cause an extra iteration of the simplifier to
inline back in place. Quite often the rhs is just a variable or constructor.
The Ord instance of Maybe in PrelMaybe.hs, for example, took several extra
iterations because the version with the let bindings looked big, and so wasn't
inlined, but after the join points had been inlined it looked smaller, and so
was inlined.
NB: we have to check the size of rhs', not rhs.
Duplicating a small InAlt might invalidate occurrence information
However, if it *is* dupable, we return the *un* simplified alternative,
because otherwise we'd need to pair it up with an empty subst-env....
but we only have one env shared between all the alts.
(Remember we must zap the subst-env before re-simplifying something).
Rather than do this we simply agree to re-simplify the original (small) thing later.
Note [Funky mkPiTypes]
~~~~~~~~~~~~~~~~~~~~~~
Notice the funky mkPiTypes. If the contructor has existentials
it's possible that the join point will be abstracted over
type variables as well as term variables.
Example: Suppose we have
data T = forall t. C [t]
Then faced with
case (case e of ...) of
C t xs::[t] -> rhs
We get the join point
let j :: forall t. [t] -> ...
j = /\t \xs::[t] -> rhs
in
case (case e of ...) of
C t xs::[t] -> j t xs
Note [Join point abstraction]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Join points always have at least one value argument,
for several reasons
* If we try to lift a primitive-typed something out
for let-binding-purposes, we will *caseify* it (!),
with potentially-disastrous strictness results. So
instead we turn it into a function: \v -> e
where v::Void#. The value passed to this function is void,
which generates (almost) no code.
* CPR. We used to say "&& isUnliftedType rhs_ty'" here, but now
we make the join point into a function whenever used_bndrs'
is empty. This makes the join-point more CPR friendly.
Consider: let j = if .. then I# 3 else I# 4
in case .. of { A -> j; B -> j; C -> ... }
Now CPR doesn't w/w j because it's a thunk, so
that means that the enclosing function can't w/w either,
which is a lose. Here's the example that happened in practice:
kgmod :: Int -> Int -> Int
kgmod x y = if x > 0 && y < 0 || x < 0 && y > 0
then 78
else 5
* Let-no-escape. We want a join point to turn into a let-no-escape
so that it is implemented as a jump, and one of the conditions
for LNE is that it's not updatable. In CoreToStg, see
Note [What is a non-escaping let]
* Floating. Since a join point will be entered once, no sharing is
gained by floating out, but something might be lost by doing
so because it might be allocated.
I have seen a case alternative like this:
True -> \v -> ...
It's a bit silly to add the realWorld dummy arg in this case, making
$j = \s v -> ...
True -> $j s
(the \v alone is enough to make CPR happy) but I think it's rare
There's a slight infelicity here: we pass the overall
case_bndr to all the join points if it's used in *any* RHS,
because we don't know its usage in each RHS separately
Note [Duplicating StrictArg]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The original plan had (where E is a big argument)
e.g. f E [..hole..]
==> let $j = \a -> f E a
in $j [..hole..]
But this is terrible! Here's an example:
&& E (case x of { T -> F; F -> T })
Now, && is strict so we end up simplifying the case with
an ArgOf continuation. If we let-bind it, we get
let $j = \v -> && E v
in simplExpr (case x of { T -> F; F -> T })
(ArgOf (\r -> $j r)
And after simplifying more we get
let $j = \v -> && E v
in case x of { T -> $j F; F -> $j T }
Which is a Very Bad Thing
What we do now is this
f E [..hole..]
==> let a = E
in f a [..hole..]
Now if the thing in the hole is a case expression (which is when
we'll call mkDupableCont), we'll push the function call into the
branches, which is what we want. Now RULES for f may fire, and
call-pattern specialisation. Here's an example from Trac #3116
go (n+1) (case l of
1 -> bs'
_ -> Chunk p fpc (o+1) (l-1) bs')
If we can push the call for 'go' inside the case, we get
call-pattern specialisation for 'go', which is *crucial* for
this program.
Here is the (&&) example:
&& E (case x of { T -> F; F -> T })
==> let a = E in
case x of { T -> && a F; F -> && a T }
Much better!
Notice that
* Arguments to f *after* the strict one are handled by
the ApplyToVal case of mkDupableCont. Eg
f [..hole..] E
* We can only do the let-binding of E because the function
part of a StrictArg continuation is an explicit syntax
tree. In earlier versions we represented it as a function
(CoreExpr -> CoreEpxr) which we couldn't take apart.
Do *not* duplicate StrictBind and StritArg continuations. We gain
nothing by propagating them into the expressions, and we do lose a
lot.
The desire not to duplicate is the entire reason that
mkDupableCont returns a pair of continuations.
Note [Duplicating StrictBind]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Unlike StrictArg, there doesn't seem anything to gain from
duplicating a StrictBind continuation, so we don't.
Note [Single-alternative cases]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This case is just like the ArgOf case. Here's an example:
data T a = MkT !a
...(MkT (abs x))...
Then we get
case (case x of I# x' ->
case x' <# 0# of
True -> I# (negate# x')
False -> I# x') of y {
DEFAULT -> MkT y
Because the (case x) has only one alternative, we'll transform to
case x of I# x' ->
case (case x' <# 0# of
True -> I# (negate# x')
False -> I# x') of y {
DEFAULT -> MkT y
But now we do *NOT* want to make a join point etc, giving
case x of I# x' ->
let $j = \y -> MkT y
in case x' <# 0# of
True -> $j (I# (negate# x'))
False -> $j (I# x')
In this case the $j will inline again, but suppose there was a big
strict computation enclosing the orginal call to MkT. Then, it won't
"see" the MkT any more, because it's big and won't get duplicated.
And, what is worse, nothing was gained by the case-of-case transform.
So, in circumstances like these, we don't want to build join points
and push the outer case into the branches of the inner one. Instead,
don't duplicate the continuation.
When should we use this strategy? We should not use it on *every*
single-alternative case:
e.g. case (case ....) of (a,b) -> (# a,b #)
Here we must push the outer case into the inner one!
Other choices:
* Match [(DEFAULT,_,_)], but in the common case of Int,
the alternative-filling-in code turned the outer case into
case (...) of y { I# _ -> MkT y }
* Match on single alternative plus (not (isDeadBinder case_bndr))
Rationale: pushing the case inwards won't eliminate the construction.
But there's a risk of
case (...) of y { (a,b) -> let z=(a,b) in ... }
Now y looks dead, but it'll come alive again. Still, this
seems like the best option at the moment.
* Match on single alternative plus (all (isDeadBinder bndrs))
Rationale: this is essentially seq.
* Match when the rhs is *not* duplicable, and hence would lead to a
join point. This catches the disaster-case above. We can test
the *un-simplified* rhs, which is fine. It might get bigger or
smaller after simplification; if it gets smaller, this case might
fire next time round. NB also that we must test contIsDupable
case_cont *too, because case_cont might be big!
HOWEVER: I found that this version doesn't work well, because
we can get let x = case (...) of { small } in ...case x...
When x is inlined into its full context, we find that it was a bad
idea to have pushed the outer case inside the (...) case.
There is a cost to not doing case-of-case; see Trac #10626.
Note [Single-alternative-unlifted]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Here's another single-alternative where we really want to do case-of-case:
data Mk1 = Mk1 Int# | Mk2 Int#
M1.f =
\r [x_s74 y_s6X]
case
case y_s6X of tpl_s7m {
M1.Mk1 ipv_s70 -> ipv_s70;
M1.Mk2 ipv_s72 -> ipv_s72;
}
of
wild_s7c
{ __DEFAULT ->
case
case x_s74 of tpl_s7n {
M1.Mk1 ipv_s77 -> ipv_s77;
M1.Mk2 ipv_s79 -> ipv_s79;
}
of
wild1_s7b
{ __DEFAULT -> ==# [wild1_s7b wild_s7c];
};
};
So the outer case is doing *nothing at all*, other than serving as a
join-point. In this case we really want to do case-of-case and decide
whether to use a real join point or just duplicate the continuation:
let $j s7c = case x of
Mk1 ipv77 -> (==) s7c ipv77
Mk1 ipv79 -> (==) s7c ipv79
in
case y of
Mk1 ipv70 -> $j ipv70
Mk2 ipv72 -> $j ipv72
Hence: check whether the case binder's type is unlifted, because then
the outer case is *not* a seq.
************************************************************************
* *
Unfoldings
* *
************************************************************************
-}
simplLetUnfolding :: SimplEnv-> TopLevelFlag
-> InId
-> OutExpr
-> Unfolding -> SimplM Unfolding
simplLetUnfolding env top_lvl id new_rhs unf
| isStableUnfolding unf
= simplUnfolding env top_lvl id unf
| otherwise
= bottoming `seq` -- See Note [Force bottoming field]
do { dflags <- getDynFlags
; return (mkUnfolding dflags InlineRhs (isTopLevel top_lvl) bottoming new_rhs) }
-- We make an unfolding *even for loop-breakers*.
-- Reason: (a) It might be useful to know that they are WHNF
-- (b) In TidyPgm we currently assume that, if we want to
-- expose the unfolding then indeed we *have* an unfolding
-- to expose. (We could instead use the RHS, but currently
-- we don't.) The simple thing is always to have one.
where
bottoming = isBottomingId id
simplUnfolding :: SimplEnv-> TopLevelFlag -> InId -> Unfolding -> SimplM Unfolding
-- Note [Setting the new unfolding]
simplUnfolding env top_lvl id unf
= case unf of
NoUnfolding -> return unf
OtherCon {} -> return unf
DFunUnfolding { df_bndrs = bndrs, df_con = con, df_args = args }
-> do { (env', bndrs') <- simplBinders rule_env bndrs
; args' <- mapM (simplExpr env') args
; return (mkDFunUnfolding bndrs' con args') }
CoreUnfolding { uf_tmpl = expr, uf_src = src, uf_guidance = guide }
| isStableSource src
-> do { expr' <- simplExpr rule_env expr
; case guide of
UnfWhen { ug_arity = arity, ug_unsat_ok = sat_ok } -- Happens for INLINE things
-> let guide' = UnfWhen { ug_arity = arity, ug_unsat_ok = sat_ok
, ug_boring_ok = inlineBoringOk expr' }
-- Refresh the boring-ok flag, in case expr'
-- has got small. This happens, notably in the inlinings
-- for dfuns for single-method classes; see
-- Note [Single-method classes] in TcInstDcls.
-- A test case is Trac #4138
in return (mkCoreUnfolding src is_top_lvl expr' guide')
-- See Note [Top-level flag on inline rules] in CoreUnfold
_other -- Happens for INLINABLE things
-> bottoming `seq` -- See Note [Force bottoming field]
do { dflags <- getDynFlags
; return (mkUnfolding dflags src is_top_lvl bottoming expr') } }
-- If the guidance is UnfIfGoodArgs, this is an INLINABLE
-- unfolding, and we need to make sure the guidance is kept up
-- to date with respect to any changes in the unfolding.
| otherwise -> return noUnfolding -- Discard unstable unfoldings
where
bottoming = isBottomingId id
is_top_lvl = isTopLevel top_lvl
act = idInlineActivation id
rule_env = updMode (updModeForStableUnfoldings act) env
-- See Note [Simplifying inside stable unfoldings] in SimplUtils
{-
Note [Force bottoming field]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We need to force bottoming, or the new unfolding holds
on to the old unfolding (which is part of the id).
Note [Setting the new unfolding]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* If there's an INLINE pragma, we simplify the RHS gently. Maybe we
should do nothing at all, but simplifying gently might get rid of
more crap.
* If not, we make an unfolding from the new RHS. But *only* for
non-loop-breakers. Making loop breakers not have an unfolding at all
means that we can avoid tests in exprIsConApp, for example. This is
important: if exprIsConApp says 'yes' for a recursive thing, then we
can get into an infinite loop
If there's an stable unfolding on a loop breaker (which happens for
INLINEABLE), we hang on to the inlining. It's pretty dodgy, but the
user did say 'INLINE'. May need to revisit this choice.
************************************************************************
* *
Rules
* *
************************************************************************
Note [Rules in a letrec]
~~~~~~~~~~~~~~~~~~~~~~~~
After creating fresh binders for the binders of a letrec, we
substitute the RULES and add them back onto the binders; this is done
*before* processing any of the RHSs. This is important. Manuel found
cases where he really, really wanted a RULE for a recursive function
to apply in that function's own right-hand side.
See Note [Loop breaking and RULES] in OccAnal.
-}
addBndrRules :: SimplEnv -> InBndr -> OutBndr -> SimplM (SimplEnv, OutBndr)
-- Rules are added back into the bin
addBndrRules env in_id out_id
| null old_rules
= return (env, out_id)
| otherwise
= do { new_rules <- simplRules env (Just (idName out_id)) old_rules
; let final_id = out_id `setIdSpecialisation` mkRuleInfo new_rules
; return (modifyInScope env final_id, final_id) }
where
old_rules = ruleInfoRules (idSpecialisation in_id)
simplRules :: SimplEnv -> Maybe Name -> [CoreRule] -> SimplM [CoreRule]
simplRules env mb_new_nm rules
= mapM simpl_rule rules
where
simpl_rule rule@(BuiltinRule {})
= return rule
simpl_rule rule@(Rule { ru_bndrs = bndrs, ru_args = args
, ru_fn = fn_name, ru_rhs = rhs })
= do { (env', bndrs') <- simplBinders env bndrs
; let rule_env = updMode updModeForRules env'
; args' <- mapM (simplExpr rule_env) args
; rhs' <- simplExpr rule_env rhs
; return (rule { ru_bndrs = bndrs'
, ru_fn = mb_new_nm `orElse` fn_name
, ru_args = args'
, ru_rhs = rhs' }) }
| tjakway/ghcjvm | compiler/simplCore/Simplify.hs | bsd-3-clause | 124,411 | 20 | 25 | 38,197 | 15,479 | 8,190 | 7,289 | -1 | -1 |
{-# LANGUAGE OverloadedStrings #-}
module Distribution.Parsec.ConfVar (parseConditionConfVar) where
import Prelude ()
import Distribution.Compat.Prelude
import Distribution.Compat.Parsec (integral)
import Distribution.Parsec.Class (Parsec (..))
import Distribution.Parsec.Types.Common
import Distribution.Parsec.Types.Field (SectionArg (..))
import Distribution.Parsec.Types.ParseResult
import Distribution.Simple.Utils (fromUTF8BS)
import Distribution.Types.Condition
import Distribution.Types.GenericPackageDescription
(ConfVar (..))
import Distribution.Version
(anyVersion, earlierVersion, intersectVersionRanges,
laterVersion, majorBoundVersion, mkVersion, noVersion,
orEarlierVersion, orLaterVersion, thisVersion,
unionVersionRanges, withinVersion)
import qualified Text.Parsec as P
import qualified Text.Parsec.Error as P
-- | Parse @'Condition' 'ConfVar'@ from section arguments provided by parsec
-- based outline parser.
parseConditionConfVar :: [SectionArg Position] -> ParseResult (Condition ConfVar)
parseConditionConfVar args = do
-- preprocess glued operators
args' <- preprocess args
-- The name of the input file is irrelevant, as we reformat the error message.
case P.runParser (parser <* P.eof) () "<condition>" args' of
Right x -> pure x
Left err -> do
-- Mangle the position to the actual one
let ppos = P.errorPos err
let epos = Position (P.sourceLine ppos) (P.sourceColumn ppos)
let msg = P.showErrorMessages
"or" "unknown parse error" "expecting" "unexpected" "end of input"
(P.errorMessages err)
parseFailure epos msg
pure $ Lit True
-- This is a hack, as we have "broken" .cabal files on Hackage
--
-- There are glued operators "&&!" (no whitespace) in some cabal files.
-- E.g. http://hackage.haskell.org/package/hblas-0.2.0.0/hblas.cabal
preprocess :: [SectionArg Position] -> ParseResult [SectionArg Position]
preprocess (SecArgOther pos "&&!" : rest) = do
parseWarning pos PWTGluedOperators "Glued operators: &&!"
(\rest' -> SecArgOther pos "&&" : SecArgOther pos "!" : rest') <$> preprocess rest
preprocess (x : rest) =
(x: ) <$> preprocess rest
preprocess [] = pure []
type Parser = P.Parsec [SectionArg Position] ()
parser :: Parser (Condition ConfVar)
parser = condOr
where
condOr = P.sepBy1 condAnd (oper "||") >>= return . foldl1 COr
condAnd = P.sepBy1 cond (oper "&&") >>= return . foldl1 CAnd
cond = P.choice
[ boolLiteral, parens condOr, notCond, osCond, archCond, flagCond, implCond ]
notCond = CNot <$ oper "!" <*> cond
boolLiteral = Lit <$> boolLiteral'
osCond = Var . OS <$ string "os" <*> parens fromParsec
flagCond = Var . Flag <$ string "flag" <*> parens fromParsec
archCond = Var . Arch <$ string "arch" <*> parens fromParsec
implCond = Var <$ string "impl" <*> parens implCond'
implCond' = Impl
<$> fromParsec
<*> P.option anyVersion versionRange
version = fromParsec
versionStar = mkVersion <$> fromParsec' versionStar' <* oper "*"
versionStar' = some (integral <* P.char '.')
versionRange = expr
where
expr = foldl1 unionVersionRanges <$> P.sepBy1 term (oper "||")
term = foldl1 intersectVersionRanges <$> P.sepBy1 factor (oper "&&")
factor = P.choice
$ parens expr
: parseAnyVersion
: parseNoVersion
: parseWildcardRange
: map parseRangeOp rangeOps
parseAnyVersion = anyVersion <$ string "-any"
parseNoVersion = noVersion <$ string "-none"
parseWildcardRange = P.try $ withinVersion <$ oper "==" <*> versionStar
parseRangeOp (s,f) = P.try (f <$ oper s <*> version)
rangeOps = [ ("<", earlierVersion),
("<=", orEarlierVersion),
(">", laterVersion),
(">=", orLaterVersion),
("^>=", majorBoundVersion),
("==", thisVersion) ]
-- Number token can have many dots in it: SecArgNum (Position 65 15) "7.6.1"
ident = tokenPrim $ \t -> case t of
SecArgName _ s -> Just $ fromUTF8BS s
SecArgNum _ s -> Just $ fromUTF8BS s
_ -> Nothing
boolLiteral' = tokenPrim $ \t -> case t of
SecArgName _ s
| s == "True" -> Just True
| s == "true" -> Just True
| s == "False" -> Just False
| s == "false" -> Just False
_ -> Nothing
string s = tokenPrim $ \t -> case t of
SecArgName _ s' | s == s' -> Just ()
_ -> Nothing
oper o = tokenPrim $ \t -> case t of
SecArgOther _ o' | o == o' -> Just ()
_ -> Nothing
parens = P.between (oper "(") (oper ")")
tokenPrim = P.tokenPrim prettySectionArg updatePosition
-- TODO: check where the errors are reported
updatePosition x _ _ = x
prettySectionArg = show
fromParsec :: Parsec a => Parser a
fromParsec = fromParsec' parsec
fromParsec' p = do
i <- ident
case P.runParser (p <* P.eof) [] "<ident>" i of
Right x -> pure x
-- TODO: better lifting or errors / warnings
Left err -> fail $ show err
| mydaum/cabal | Cabal/Distribution/Parsec/ConfVar.hs | bsd-3-clause | 5,766 | 0 | 18 | 1,885 | 1,453 | 753 | 700 | 106 | 7 |
module One where
import qualified Two
resource = "This is the sub-plugin of (" ++ Two.resource ++ ")"
| abuiles/turbinado-blog | tmp/dependencies/hs-plugins-1.3.1/testsuite/hier/hier3/One.hs | bsd-3-clause | 106 | 0 | 7 | 22 | 23 | 14 | 9 | 3 | 1 |
{-# LANGUAGE PolyKinds #-}
module T16456 where
data T p = MkT
foo :: T Int
foo = _
| sdiehl/ghc | testsuite/tests/typecheck/should_fail/T16456.hs | bsd-3-clause | 85 | 0 | 5 | 21 | 26 | 16 | 10 | 5 | 1 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[NameEnv]{@NameEnv@: name environments}
-}
{-# LANGUAGE CPP #-}
module NameEnv (
-- * Var, Id and TyVar environments (maps)
NameEnv,
-- ** Manipulating these environments
mkNameEnv,
emptyNameEnv, isEmptyNameEnv,
unitNameEnv, nameEnvElts, nameEnvUniqueElts,
extendNameEnv_C, extendNameEnv_Acc, extendNameEnv,
extendNameEnvList, extendNameEnvList_C,
foldNameEnv, filterNameEnv, anyNameEnv,
plusNameEnv, plusNameEnv_C, alterNameEnv,
lookupNameEnv, lookupNameEnv_NF, delFromNameEnv, delListFromNameEnv,
elemNameEnv, mapNameEnv, disjointNameEnv,
-- ** Dependency analysis
depAnal
) where
#include "HsVersions.h"
import Digraph
import Name
import Unique
import UniqFM
import Maybes
{-
************************************************************************
* *
\subsection{Name environment}
* *
************************************************************************
-}
depAnal :: (node -> [Name]) -- Defs
-> (node -> [Name]) -- Uses
-> [node]
-> [SCC node]
-- Peform dependency analysis on a group of definitions,
-- where each definition may define more than one Name
--
-- The get_defs and get_uses functions are called only once per node
depAnal get_defs get_uses nodes
= stronglyConnCompFromEdgedVertices (map mk_node keyed_nodes)
where
keyed_nodes = nodes `zip` [(1::Int)..]
mk_node (node, key) = (node, key, mapMaybe (lookupNameEnv key_map) (get_uses node))
key_map :: NameEnv Int -- Maps a Name to the key of the decl that defines it
key_map = mkNameEnv [(name,key) | (node, key) <- keyed_nodes, name <- get_defs node]
{-
************************************************************************
* *
\subsection{Name environment}
* *
************************************************************************
-}
type NameEnv a = UniqFM a -- Domain is Name
emptyNameEnv :: NameEnv a
isEmptyNameEnv :: NameEnv a -> Bool
mkNameEnv :: [(Name,a)] -> NameEnv a
nameEnvElts :: NameEnv a -> [a]
nameEnvUniqueElts :: NameEnv a -> [(Unique, a)]
alterNameEnv :: (Maybe a-> Maybe a) -> NameEnv a -> Name -> NameEnv a
extendNameEnv_C :: (a->a->a) -> NameEnv a -> Name -> a -> NameEnv a
extendNameEnv_Acc :: (a->b->b) -> (a->b) -> NameEnv b -> Name -> a -> NameEnv b
extendNameEnv :: NameEnv a -> Name -> a -> NameEnv a
plusNameEnv :: NameEnv a -> NameEnv a -> NameEnv a
plusNameEnv_C :: (a->a->a) -> NameEnv a -> NameEnv a -> NameEnv a
extendNameEnvList :: NameEnv a -> [(Name,a)] -> NameEnv a
extendNameEnvList_C :: (a->a->a) -> NameEnv a -> [(Name,a)] -> NameEnv a
delFromNameEnv :: NameEnv a -> Name -> NameEnv a
delListFromNameEnv :: NameEnv a -> [Name] -> NameEnv a
elemNameEnv :: Name -> NameEnv a -> Bool
unitNameEnv :: Name -> a -> NameEnv a
lookupNameEnv :: NameEnv a -> Name -> Maybe a
lookupNameEnv_NF :: NameEnv a -> Name -> a
foldNameEnv :: (a -> b -> b) -> b -> NameEnv a -> b
filterNameEnv :: (elt -> Bool) -> NameEnv elt -> NameEnv elt
anyNameEnv :: (elt -> Bool) -> NameEnv elt -> Bool
mapNameEnv :: (elt1 -> elt2) -> NameEnv elt1 -> NameEnv elt2
disjointNameEnv :: NameEnv a -> NameEnv a -> Bool
nameEnvElts x = eltsUFM x
emptyNameEnv = emptyUFM
isEmptyNameEnv = isNullUFM
unitNameEnv x y = unitUFM x y
extendNameEnv x y z = addToUFM x y z
extendNameEnvList x l = addListToUFM x l
lookupNameEnv x y = lookupUFM x y
alterNameEnv = alterUFM
mkNameEnv l = listToUFM l
elemNameEnv x y = elemUFM x y
foldNameEnv a b c = foldUFM a b c
plusNameEnv x y = plusUFM x y
plusNameEnv_C f x y = plusUFM_C f x y
extendNameEnv_C f x y z = addToUFM_C f x y z
mapNameEnv f x = mapUFM f x
nameEnvUniqueElts x = ufmToList x
extendNameEnv_Acc x y z a b = addToUFM_Acc x y z a b
extendNameEnvList_C x y z = addListToUFM_C x y z
delFromNameEnv x y = delFromUFM x y
delListFromNameEnv x y = delListFromUFM x y
filterNameEnv x y = filterUFM x y
anyNameEnv f x = foldUFM ((||) . f) False x
disjointNameEnv x y = isNullUFM (intersectUFM x y)
lookupNameEnv_NF env n = expectJust "lookupNameEnv_NF" (lookupNameEnv env n)
| tjakway/ghcjvm | compiler/basicTypes/NameEnv.hs | bsd-3-clause | 4,708 | 0 | 11 | 1,328 | 1,271 | 666 | 605 | 77 | 1 |
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd">
<helpset version="2.0" xml:lang="zh-CN">
<title>Groovy Support</title>
<maps>
<homeID>top</homeID>
<mapref location="map.jhm"/>
</maps>
<view>
<name>TOC</name>
<label>Contents</label>
<type>org.zaproxy.zap.extension.help.ZapTocView</type>
<data>toc.xml</data>
</view>
<view>
<name>Index</name>
<label>Index</label>
<type>javax.help.IndexView</type>
<data>index.xml</data>
</view>
<view>
<name>Search</name>
<label>Search</label>
<type>javax.help.SearchView</type>
<data engine="com.sun.java.help.search.DefaultSearchEngine">
JavaHelpSearch
</data>
</view>
<view>
<name>Favorites</name>
<label>Favorites</label>
<type>javax.help.FavoritesView</type>
</view>
</helpset> | thc202/zap-extensions | addOns/groovy/src/main/javahelp/org/zaproxy/zap/extension/groovy/resources/help_zh_CN/helpset_zh_CN.hs | apache-2.0 | 959 | 82 | 52 | 156 | 390 | 206 | 184 | -1 | -1 |
module MultiParamIn3 where
fromMaybe :: (Maybe a) -> a
fromMaybe (Just x) = x
fromMaybe Nothing = error "fromMaybe: Nothing"
f :: (Maybe Int) -> [Int] -> (Either Int b) -> Int
f Nothing y@[] (Left a) = (hd y) + a
f Nothing y@(b_1 : b_2) (Left a) = (hd y) + a
f (Just x) y@[] (Right b) = (hd y) + (fromMaybe x)
f (Just x) y@(b_1 : b_2) (Right b)
= (hd y) + (fromMaybe x)
f Nothing y (Left a) = (hd y) + a
f (Just x) y (Right b) = (hd y) + (fromMaybe x)
hd x = head x
tl x = tail x
| kmate/HaRe | old/testing/introPattern/MultiParamIn3AST.hs | bsd-3-clause | 495 | 0 | 9 | 127 | 334 | 171 | 163 | 14 | 1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE NoImplicitPrelude, ExistentialQuantification #-}
-----------------------------------------------------------------------------
-- |
-- Module : Control.Exception
-- Copyright : (c) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : experimental
-- Portability : non-portable (extended exceptions)
--
-- This module provides support for raising and catching both built-in
-- and user-defined exceptions.
--
-- In addition to exceptions thrown by 'IO' operations, exceptions may
-- be thrown by pure code (imprecise exceptions) or by external events
-- (asynchronous exceptions), but may only be caught in the 'IO' monad.
-- For more details, see:
--
-- * /A semantics for imprecise exceptions/, by Simon Peyton Jones,
-- Alastair Reid, Tony Hoare, Simon Marlow, Fergus Henderson,
-- in /PLDI'99/.
--
-- * /Asynchronous exceptions in Haskell/, by Simon Marlow, Simon Peyton
-- Jones, Andy Moran and John Reppy, in /PLDI'01/.
--
-- * /An Extensible Dynamically-Typed Hierarchy of Exceptions/,
-- by Simon Marlow, in /Haskell '06/.
--
-----------------------------------------------------------------------------
module Control.Exception (
-- * The Exception type
SomeException(..),
Exception(..), -- class
IOException, -- instance Eq, Ord, Show, Typeable, Exception
ArithException(..), -- instance Eq, Ord, Show, Typeable, Exception
ArrayException(..), -- instance Eq, Ord, Show, Typeable, Exception
AssertionFailed(..),
SomeAsyncException(..),
AsyncException(..), -- instance Eq, Ord, Show, Typeable, Exception
asyncExceptionToException, asyncExceptionFromException,
NonTermination(..),
NestedAtomically(..),
BlockedIndefinitelyOnMVar(..),
BlockedIndefinitelyOnSTM(..),
AllocationLimitExceeded(..),
CompactionFailed(..),
Deadlock(..),
NoMethodError(..),
PatternMatchFail(..),
RecConError(..),
RecSelError(..),
RecUpdError(..),
ErrorCall(..),
TypeError(..),
-- * Throwing exceptions
throw,
throwIO,
ioError,
throwTo,
-- * Catching Exceptions
-- $catching
-- ** Catching all exceptions
-- $catchall
-- ** The @catch@ functions
catch,
catches, Handler(..),
catchJust,
-- ** The @handle@ functions
handle,
handleJust,
-- ** The @try@ functions
try,
tryJust,
-- ** The @evaluate@ function
evaluate,
-- ** The @mapException@ function
mapException,
-- * Asynchronous Exceptions
-- $async
-- ** Asynchronous exception control
-- |The following functions allow a thread to control delivery of
-- asynchronous exceptions during a critical region.
mask,
mask_,
uninterruptibleMask,
uninterruptibleMask_,
MaskingState(..),
getMaskingState,
interruptible,
allowInterrupt,
-- *** Applying @mask@ to an exception handler
-- $block_handler
-- *** Interruptible operations
-- $interruptible
-- * Assertions
assert,
-- * Utilities
bracket,
bracket_,
bracketOnError,
finally,
onException,
) where
import Control.Exception.Base
import GHC.Base
import GHC.IO (interruptible)
-- | You need this when using 'catches'.
data Handler a = forall e . Exception e => Handler (e -> IO a)
-- | @since 4.6.0.0
instance Functor Handler where
fmap f (Handler h) = Handler (fmap f . h)
{- |
Sometimes you want to catch two different sorts of exception. You could
do something like
> f = expr `catch` \ (ex :: ArithException) -> handleArith ex
> `catch` \ (ex :: IOException) -> handleIO ex
However, there are a couple of problems with this approach. The first is
that having two exception handlers is inefficient. However, the more
serious issue is that the second exception handler will catch exceptions
in the first, e.g. in the example above, if @handleArith@ throws an
@IOException@ then the second exception handler will catch it.
Instead, we provide a function 'catches', which would be used thus:
> f = expr `catches` [Handler (\ (ex :: ArithException) -> handleArith ex),
> Handler (\ (ex :: IOException) -> handleIO ex)]
-}
catches :: IO a -> [Handler a] -> IO a
catches io handlers = io `catch` catchesHandler handlers
catchesHandler :: [Handler a] -> SomeException -> IO a
catchesHandler handlers e = foldr tryHandler (throw e) handlers
where tryHandler (Handler handler) res
= case fromException e of
Just e' -> handler e'
Nothing -> res
-- -----------------------------------------------------------------------------
-- Catching exceptions
{- $catching
There are several functions for catching and examining
exceptions; all of them may only be used from within the
'IO' monad.
Here's a rule of thumb for deciding which catch-style function to
use:
* If you want to do some cleanup in the event that an exception
is raised, use 'finally', 'bracket' or 'onException'.
* To recover after an exception and do something else, the best
choice is to use one of the 'try' family.
* ... unless you are recovering from an asynchronous exception, in which
case use 'catch' or 'catchJust'.
The difference between using 'try' and 'catch' for recovery is that in
'catch' the handler is inside an implicit 'mask' (see \"Asynchronous
Exceptions\") which is important when catching asynchronous
exceptions, but when catching other kinds of exception it is
unnecessary. Furthermore it is possible to accidentally stay inside
the implicit 'mask' by tail-calling rather than returning from the
handler, which is why we recommend using 'try' rather than 'catch' for
ordinary exception recovery.
A typical use of 'tryJust' for recovery looks like this:
> do r <- tryJust (guard . isDoesNotExistError) $ getEnv "HOME"
> case r of
> Left e -> ...
> Right home -> ...
-}
-- -----------------------------------------------------------------------------
-- Asynchronous exceptions
-- | When invoked inside 'mask', this function allows a masked
-- asynchronous exception to be raised, if one exists. It is
-- equivalent to performing an interruptible operation (see
-- #interruptible), but does not involve any actual blocking.
--
-- When called outside 'mask', or inside 'uninterruptibleMask', this
-- function has no effect.
--
-- @since 4.4.0.0
allowInterrupt :: IO ()
allowInterrupt = interruptible $ return ()
{- $async
#AsynchronousExceptions# Asynchronous exceptions are so-called because they arise due to
external influences, and can be raised at any point during execution.
'StackOverflow' and 'HeapOverflow' are two examples of
system-generated asynchronous exceptions.
The primary source of asynchronous exceptions, however, is
'throwTo':
> throwTo :: ThreadId -> Exception -> IO ()
'throwTo' (also 'Control.Concurrent.killThread') allows one
running thread to raise an arbitrary exception in another thread. The
exception is therefore asynchronous with respect to the target thread,
which could be doing anything at the time it receives the exception.
Great care should be taken with asynchronous exceptions; it is all too
easy to introduce race conditions by the over zealous use of
'throwTo'.
-}
{- $block_handler
There\'s an implied 'mask' around every exception handler in a call
to one of the 'catch' family of functions. This is because that is
what you want most of the time - it eliminates a common race condition
in starting an exception handler, because there may be no exception
handler on the stack to handle another exception if one arrives
immediately. If asynchronous exceptions are masked on entering the
handler, though, we have time to install a new exception handler
before being interrupted. If this weren\'t the default, one would have
to write something like
> mask $ \restore ->
> catch (restore (...))
> (\e -> handler)
If you need to unmask asynchronous exceptions again in the exception
handler, 'restore' can be used there too.
Note that 'try' and friends /do not/ have a similar default, because
there is no exception handler in this case. Don't use 'try' for
recovering from an asynchronous exception.
-}
{- $interruptible
#interruptible#
Some operations are /interruptible/, which means that they can receive
asynchronous exceptions even in the scope of a 'mask'. Any function
which may itself block is defined as interruptible; this includes
'Control.Concurrent.MVar.takeMVar'
(but not 'Control.Concurrent.MVar.tryTakeMVar'),
and most operations which perform
some I\/O with the outside world. The reason for having
interruptible operations is so that we can write things like
> mask $ \restore -> do
> a <- takeMVar m
> catch (restore (...))
> (\e -> ...)
if the 'Control.Concurrent.MVar.takeMVar' was not interruptible,
then this particular
combination could lead to deadlock, because the thread itself would be
blocked in a state where it can\'t receive any asynchronous exceptions.
With 'Control.Concurrent.MVar.takeMVar' interruptible, however, we can be
safe in the knowledge that the thread can receive exceptions right up
until the point when the 'Control.Concurrent.MVar.takeMVar' succeeds.
Similar arguments apply for other interruptible operations like
'System.IO.openFile'.
It is useful to think of 'mask' not as a way to completely prevent
asynchronous exceptions, but as a way to switch from asynchronous mode
to polling mode. The main difficulty with asynchronous
exceptions is that they normally can occur anywhere, but within a
'mask' an asynchronous exception is only raised by operations that are
interruptible (or call other interruptible operations). In many cases
these operations may themselves raise exceptions, such as I\/O errors,
so the caller will usually be prepared to handle exceptions arising from the
operation anyway. To perform an explicit poll for asynchronous exceptions
inside 'mask', use 'allowInterrupt'.
Sometimes it is too onerous to handle exceptions in the middle of a
critical piece of stateful code. There are three ways to handle this
kind of situation:
* Use STM. Since a transaction is always either completely executed
or not at all, transactions are a good way to maintain invariants
over state in the presence of asynchronous (and indeed synchronous)
exceptions.
* Use 'mask', and avoid interruptible operations. In order to do
this, we have to know which operations are interruptible. It is
impossible to know for any given library function whether it might
invoke an interruptible operation internally; so instead we give a
list of guaranteed-not-to-be-interruptible operations below.
* Use 'uninterruptibleMask'. This is generally not recommended,
unless you can guarantee that any interruptible operations invoked
during the scope of 'uninterruptibleMask' can only ever block for
a short time. Otherwise, 'uninterruptibleMask' is a good way to
make your program deadlock and be unresponsive to user interrupts.
The following operations are guaranteed not to be interruptible:
* operations on 'IORef' from "Data.IORef"
* STM transactions that do not use 'retry'
* everything from the @Foreign@ modules
* everything from @Control.Exception@ except for 'throwTo'
* @tryTakeMVar@, @tryPutMVar@, @isEmptyMVar@
* @takeMVar@ if the @MVar@ is definitely full, and conversely @putMVar@ if the @MVar@ is definitely empty
* @newEmptyMVar@, @newMVar@
* @forkIO@, @forkIOUnmasked@, @myThreadId@
-}
{- $catchall
It is possible to catch all exceptions, by using the type 'SomeException':
> catch f (\e -> ... (e :: SomeException) ...)
HOWEVER, this is normally not what you want to do!
For example, suppose you want to read a file, but if it doesn't exist
then continue as if it contained \"\". You might be tempted to just
catch all exceptions and return \"\" in the handler. However, this has
all sorts of undesirable consequences. For example, if the user
presses control-C at just the right moment then the 'UserInterrupt'
exception will be caught, and the program will continue running under
the belief that the file contains \"\". Similarly, if another thread
tries to kill the thread reading the file then the 'ThreadKilled'
exception will be ignored.
Instead, you should only catch exactly the exceptions that you really
want. In this case, this would likely be more specific than even
\"any IO exception\"; a permissions error would likely also want to be
handled differently. Instead, you would probably want something like:
> e <- tryJust (guard . isDoesNotExistError) (readFile f)
> let str = either (const "") id e
There are occassions when you really do need to catch any sort of
exception. However, in most cases this is just so you can do some
cleaning up; you aren't actually interested in the exception itself.
For example, if you open a file then you want to close it again,
whether processing the file executes normally or throws an exception.
However, in these cases you can use functions like 'bracket', 'finally'
and 'onException', which never actually pass you the exception, but
just call the cleanup functions at the appropriate points.
But sometimes you really do need to catch any exception, and actually
see what the exception is. One example is at the very top-level of a
program, you may wish to catch any exception, print it to a logfile or
the screen, and then exit gracefully. For these cases, you can use
'catch' (or one of the other exception-catching functions) with the
'SomeException' type.
-}
| olsner/ghc | libraries/base/Control/Exception.hs | bsd-3-clause | 14,104 | 0 | 10 | 2,951 | 582 | 382 | 200 | 69 | 2 |
{-# LANGUAGE CPP #-}
module Examples.Commands where
import Data.List
import Options.Applicative
#if __GLASGOW_HASKELL__ <= 702
import Data.Monoid
(<>) :: Monoid a => a -> a -> a
(<>) = mappend
#endif
data Sample
= Hello [String]
| Goodbye
deriving Show
hello :: Parser Sample
hello = Hello <$> many (argument str (metavar "TARGET..."))
sample :: Parser Sample
sample = subparser
( command "hello"
(info hello
(progDesc "Print greeting"))
<> command "goodbye"
(info (pure Goodbye)
(progDesc "Say goodbye"))
)
run :: Sample -> IO ()
run (Hello targets) = putStrLn $ "Hello, " ++ intercalate ", " targets ++ "!"
run Goodbye = putStrLn "Goodbye."
opts :: ParserInfo Sample
opts = info (sample <**> helper) idm
main :: IO ()
main = execParser opts >>= run
| begriffs/optparse-applicative | tests/Examples/Commands.hs | bsd-3-clause | 833 | 0 | 12 | 207 | 275 | 145 | 130 | 28 | 1 |
module ShouldCompile where
-- I bet this test is a mistake! From the layout it
-- looks as if 'test' takes three args, the latter two
-- of higher rank. But the parens around these args are
-- missing, so it parses as
-- test :: [a]
-- -> forall a. Ord a
-- => [b]
-- -> forall c. Num c
-- => [c]
-- -> [a]
--
-- But maybe that what was intended; I'm not sure
-- Anyway it should typecheck!
test :: [a] -- ^ doc1
-> forall b. (Ord b) => [b] {-^ doc2 -}
-> forall c. (Num c) => [c] -- ^ doc3
-> [a]
test xs ys zs = xs
| spacekitteh/smcghc | testsuite/tests/haddock/should_compile_noflag_haddock/haddockC027.hs | bsd-3-clause | 599 | 0 | 12 | 200 | 89 | 58 | 31 | -1 | -1 |
{-# OPTIONS_GHC -fwarn-incomplete-patterns -fwarn-overlapping-patterns #-}
{-# LANGUAGE GADTs #-}
module T2006 where
data Expr a vs where
EPrim :: String -> a -> Expr a vs
EVar :: Expr a (a,vs)
interpret :: Expr a () -> a
interpret (EPrim _ a) = a
-- interpret EVar = error "unreachable"
| olsner/ghc | testsuite/tests/pmcheck/should_compile/T2006.hs | bsd-3-clause | 305 | 0 | 8 | 67 | 79 | 45 | 34 | 8 | 1 |
module ShouldCompile where
x@_ = x
| ryantm/ghc | testsuite/tests/deSugar/should_compile/ds-wildcard.hs | bsd-3-clause | 36 | 0 | 5 | 7 | 13 | 8 | 5 | 2 | 1 |
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE CPP, MagicHash, UnboxedTuples, NoImplicitPrelude #-}
{-# OPTIONS_HADDOCK hide #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.Float.RealFracMethods
-- Copyright : (c) Daniel Fischer 2010
-- License : see libraries/base/LICENSE
--
-- Maintainer : cvs-ghc@haskell.org
-- Stability : internal
-- Portability : non-portable (GHC Extensions)
--
-- Methods for the RealFrac instances for 'Float' and 'Double',
-- with specialised versions for 'Int'.
--
-- Moved to their own module to not bloat GHC.Float further.
--
-----------------------------------------------------------------------------
#include "MachDeps.h"
module GHC.Float.RealFracMethods
( -- * Double methods
-- ** Integer results
properFractionDoubleInteger
, truncateDoubleInteger
, floorDoubleInteger
, ceilingDoubleInteger
, roundDoubleInteger
-- ** Int results
, properFractionDoubleInt
, floorDoubleInt
, ceilingDoubleInt
, roundDoubleInt
-- * Double/Int conversions, wrapped primops
, double2Int
, int2Double
-- * Float methods
-- ** Integer results
, properFractionFloatInteger
, truncateFloatInteger
, floorFloatInteger
, ceilingFloatInteger
, roundFloatInteger
-- ** Int results
, properFractionFloatInt
, floorFloatInt
, ceilingFloatInt
, roundFloatInt
-- * Float/Int conversions, wrapped primops
, float2Int
, int2Float
) where
import GHC.Integer
import GHC.Base
import GHC.Num ()
#if WORD_SIZE_IN_BITS < 64
import GHC.IntWord64
#define TO64 integerToInt64
#define FROM64 int64ToInteger
#define MINUS64 minusInt64#
#define NEGATE64 negateInt64#
#else
#define TO64 integerToInt
#define FROM64 smallInteger
#define MINUS64 ( -# )
#define NEGATE64 negateInt#
uncheckedIShiftRA64# :: Int# -> Int# -> Int#
uncheckedIShiftRA64# = uncheckedIShiftRA#
uncheckedIShiftL64# :: Int# -> Int# -> Int#
uncheckedIShiftL64# = uncheckedIShiftL#
#endif
default ()
------------------------------------------------------------------------------
-- Float Methods --
------------------------------------------------------------------------------
-- Special Functions for Int, nice, easy and fast.
-- They should be small enough to be inlined automatically.
-- We have to test for ±0.0 to avoid returning -0.0 in the second
-- component of the pair. Unfortunately the branching costs a lot
-- of performance.
properFractionFloatInt :: Float -> (Int, Float)
properFractionFloatInt (F# x) =
if isTrue# (x `eqFloat#` 0.0#)
then (I# 0#, F# 0.0#)
else case float2Int# x of
n -> (I# n, F# (x `minusFloat#` int2Float# n))
-- truncateFloatInt = float2Int
floorFloatInt :: Float -> Int
floorFloatInt (F# x) =
case float2Int# x of
n | isTrue# (x `ltFloat#` int2Float# n) -> I# (n -# 1#)
| otherwise -> I# n
ceilingFloatInt :: Float -> Int
ceilingFloatInt (F# x) =
case float2Int# x of
n | isTrue# (int2Float# n `ltFloat#` x) -> I# (n +# 1#)
| otherwise -> I# n
roundFloatInt :: Float -> Int
roundFloatInt x = float2Int (c_rintFloat x)
-- Functions with Integer results
-- With the new code generator in GHC 7, the explicit bit-fiddling is
-- slower than the old code for values of small modulus, but when the
-- 'Int' range is left, the bit-fiddling quickly wins big, so we use that.
-- If the methods are called on smallish values, hopefully people go
-- through Int and not larger types.
-- Note: For negative exponents, we must check the validity of the shift
-- distance for the right shifts of the mantissa.
{-# INLINE properFractionFloatInteger #-}
properFractionFloatInteger :: Float -> (Integer, Float)
properFractionFloatInteger v@(F# x) =
case decodeFloat_Int# x of
(# m, e #)
| isTrue# (e <# 0#) ->
case negateInt# e of
s | isTrue# (s ># 23#) -> (0, v)
| isTrue# (m <# 0#) ->
case negateInt# (negateInt# m `uncheckedIShiftRA#` s) of
k -> (smallInteger k,
case m -# (k `uncheckedIShiftL#` s) of
r -> F# (encodeFloatInteger (smallInteger r) e))
| otherwise ->
case m `uncheckedIShiftRL#` s of
k -> (smallInteger k,
case m -# (k `uncheckedIShiftL#` s) of
r -> F# (encodeFloatInteger (smallInteger r) e))
| otherwise -> (shiftLInteger (smallInteger m) e, F# 0.0#)
{-# INLINE truncateFloatInteger #-}
truncateFloatInteger :: Float -> Integer
truncateFloatInteger x =
case properFractionFloatInteger x of
(n, _) -> n
-- floor is easier for negative numbers than truncate, so this gets its
-- own implementation, it's a little faster.
{-# INLINE floorFloatInteger #-}
floorFloatInteger :: Float -> Integer
floorFloatInteger (F# x) =
case decodeFloat_Int# x of
(# m, e #)
| isTrue# (e <# 0#) ->
case negateInt# e of
s | isTrue# (s ># 23#) -> if isTrue# (m <# 0#) then (-1) else 0
| otherwise -> smallInteger (m `uncheckedIShiftRA#` s)
| otherwise -> shiftLInteger (smallInteger m) e
-- ceiling x = -floor (-x)
-- If giving this its own implementation is faster at all,
-- it's only marginally so, hence we keep it short.
{-# INLINE ceilingFloatInteger #-}
ceilingFloatInteger :: Float -> Integer
ceilingFloatInteger (F# x) =
negateInteger (floorFloatInteger (F# (negateFloat# x)))
{-# INLINE roundFloatInteger #-}
roundFloatInteger :: Float -> Integer
roundFloatInteger x = float2Integer (c_rintFloat x)
------------------------------------------------------------------------------
-- Double Methods --
------------------------------------------------------------------------------
-- Special Functions for Int, nice, easy and fast.
-- They should be small enough to be inlined automatically.
-- We have to test for ±0.0 to avoid returning -0.0 in the second
-- component of the pair. Unfortunately the branching costs a lot
-- of performance.
properFractionDoubleInt :: Double -> (Int, Double)
properFractionDoubleInt (D# x) =
if isTrue# (x ==## 0.0##)
then (I# 0#, D# 0.0##)
else case double2Int# x of
n -> (I# n, D# (x -## int2Double# n))
-- truncateDoubleInt = double2Int
floorDoubleInt :: Double -> Int
floorDoubleInt (D# x) =
case double2Int# x of
n | isTrue# (x <## int2Double# n) -> I# (n -# 1#)
| otherwise -> I# n
ceilingDoubleInt :: Double -> Int
ceilingDoubleInt (D# x) =
case double2Int# x of
n | isTrue# (int2Double# n <## x) -> I# (n +# 1#)
| otherwise -> I# n
roundDoubleInt :: Double -> Int
roundDoubleInt x = double2Int (c_rintDouble x)
-- Functions with Integer results
-- The new Code generator isn't quite as good for the old 'Double' code
-- as for the 'Float' code, so for 'Double' the bit-fiddling also wins
-- when the values have small modulus.
-- When the exponent is negative, all mantissae have less than 64 bits
-- and the right shifting of sized types is much faster than that of
-- 'Integer's, especially when we can
-- Note: For negative exponents, we must check the validity of the shift
-- distance for the right shifts of the mantissa.
{-# INLINE properFractionDoubleInteger #-}
properFractionDoubleInteger :: Double -> (Integer, Double)
properFractionDoubleInteger v@(D# x) =
case decodeDoubleInteger x of
(# m, e #)
| isTrue# (e <# 0#) ->
case negateInt# e of
s | isTrue# (s ># 52#) -> (0, v)
| m < 0 ->
case TO64 (negateInteger m) of
n ->
case n `uncheckedIShiftRA64#` s of
k ->
(FROM64 (NEGATE64 k),
case MINUS64 n (k `uncheckedIShiftL64#` s) of
r ->
D# (encodeDoubleInteger (FROM64 (NEGATE64 r)) e))
| otherwise ->
case TO64 m of
n ->
case n `uncheckedIShiftRA64#` s of
k -> (FROM64 k,
case MINUS64 n (k `uncheckedIShiftL64#` s) of
r -> D# (encodeDoubleInteger (FROM64 r) e))
| otherwise -> (shiftLInteger m e, D# 0.0##)
{-# INLINE truncateDoubleInteger #-}
truncateDoubleInteger :: Double -> Integer
truncateDoubleInteger x =
case properFractionDoubleInteger x of
(n, _) -> n
-- floor is easier for negative numbers than truncate, so this gets its
-- own implementation, it's a little faster.
{-# INLINE floorDoubleInteger #-}
floorDoubleInteger :: Double -> Integer
floorDoubleInteger (D# x) =
case decodeDoubleInteger x of
(# m, e #)
| isTrue# (e <# 0#) ->
case negateInt# e of
s | isTrue# (s ># 52#) -> if m < 0 then (-1) else 0
| otherwise ->
case TO64 m of
n -> FROM64 (n `uncheckedIShiftRA64#` s)
| otherwise -> shiftLInteger m e
{-# INLINE ceilingDoubleInteger #-}
ceilingDoubleInteger :: Double -> Integer
ceilingDoubleInteger (D# x) =
negateInteger (floorDoubleInteger (D# (negateDouble# x)))
{-# INLINE roundDoubleInteger #-}
roundDoubleInteger :: Double -> Integer
roundDoubleInteger x = double2Integer (c_rintDouble x)
-- Wrappers around double2Int#, int2Double#, float2Int# and int2Float#,
-- we need them here, so we move them from GHC.Float and re-export them
-- explicitly from there.
double2Int :: Double -> Int
double2Int (D# x) = I# (double2Int# x)
int2Double :: Int -> Double
int2Double (I# i) = D# (int2Double# i)
float2Int :: Float -> Int
float2Int (F# x) = I# (float2Int# x)
int2Float :: Int -> Float
int2Float (I# i) = F# (int2Float# i)
-- Quicker conversions from 'Double' and 'Float' to 'Integer',
-- assuming the floating point value is integral.
--
-- Note: Since the value is integral, the exponent can't be less than
-- (-TYP_MANT_DIG), so we need not check the validity of the shift
-- distance for the right shfts here.
{-# INLINE double2Integer #-}
double2Integer :: Double -> Integer
double2Integer (D# x) =
case decodeDoubleInteger x of
(# m, e #)
| isTrue# (e <# 0#) ->
case TO64 m of
n -> FROM64 (n `uncheckedIShiftRA64#` negateInt# e)
| otherwise -> shiftLInteger m e
{-# INLINE float2Integer #-}
float2Integer :: Float -> Integer
float2Integer (F# x) =
case decodeFloat_Int# x of
(# m, e #)
| isTrue# (e <# 0#) -> smallInteger (m `uncheckedIShiftRA#` negateInt# e)
| otherwise -> shiftLInteger (smallInteger m) e
-- Foreign imports, the rounding is done faster in C when the value
-- isn't integral, so we call out for rounding. For values of large
-- modulus, calling out to C is slower than staying in Haskell, but
-- presumably 'round' is mostly called for values with smaller modulus,
-- when calling out to C is a major win.
-- For all other functions, calling out to C gives at most a marginal
-- speedup for values of small modulus and is much slower than staying
-- in Haskell for values of large modulus, so those are done in Haskell.
foreign import ccall unsafe "rintDouble"
c_rintDouble :: Double -> Double
foreign import ccall unsafe "rintFloat"
c_rintFloat :: Float -> Float
| tolysz/prepare-ghcjs | spec-lts8/base/GHC/Float/RealFracMethods.hs | bsd-3-clause | 11,762 | 0 | 29 | 3,141 | 2,251 | 1,197 | 1,054 | 186 | 2 |
-- | Geometric functions concerning angles. If not otherwise specified, all angles are in radians.
module Graphics.Gloss.Geometry.Angle
( degToRad
, radToDeg
, normaliseAngle )
where
-- | Convert degrees to radians
{-# INLINE degToRad #-}
degToRad :: Float -> Float
degToRad d = d * pi / 180
-- | Convert radians to degrees
{-# INLINE radToDeg #-}
radToDeg :: Float -> Float
radToDeg r = r * 180 / pi
-- | Normalise an angle to be between 0 and 2*pi radians
{-# INLINE normaliseAngle #-}
normaliseAngle :: Float -> Float
normaliseAngle f = f - 2 * pi * floor' (f / (2 * pi))
where floor' :: Float -> Float
floor' x = fromIntegral (floor x :: Int)
| gscalzo/HaskellTheHardWay | gloss-try/gloss-master/gloss/Graphics/Gloss/Geometry/Angle.hs | mit | 665 | 6 | 10 | 139 | 159 | 89 | 70 | 15 | 1 |
module Y2018.M02.D21.Solution where
{--
More P99 fun with lists from:
http://www.ic.unicamp.br/~meidanis/courses/mc336/2009s2/prolog/problemas/
P28 (**) Sorting a list of lists according to length of sublists
a) We suppose that a list (InList) contains elements that are lists themselves.
The objective is to sort the elements of InList according to their length. E.g.
short lists first, longer lists later, or vice versa.
Example:
?- lsort([[a,b,c],[d,e],[f,g,h],[d,e],[i,j,k,l],[m,n],[o]],L).
L = [[o], [d, e], [d, e], [m, n], [a, b, c], [f, g, h], [i, j, k, l]]
--}
import Control.Arrow ((&&&))
import Data.List (sortOn, groupBy)
import Data.Function (on)
lsort :: [[a]] -> [[a]]
lsort = sortOn length -- from Denis Stoyanov @xgrommx
-- create a list of lists by matching the lengths against the ints
-- until the ints or lengths is exhausted:
lengths, ints :: [Int]
lengths = [1,8,7,4,9,5,4,1,2,7,
7,6,4,9,4,5,6,5,5,5,
6,4,6,4,3,6,1,7,3,8,
1,8,2,6,1,1,3,5,8,7,
9,2,6,6,8,7,8,2,5,5]
ints = [33,61,14,26,50,22,87,61,98,24,
8,71,24,89,44,3,42,21,16,62,
8,94,82,57,59,18,50,30,54,47,
64,35,88,36,81,40,48,62,61,81,
31,37,13,99,35,29,33,94,81,29,
39,62,11,18,48,45,33,43,99,49,
17,49,21,44,33,16,33,48,73,35,
81,47,96,54,23,62,6,94,16,44,
95,4,76,14,71,2,4,70,22,13,
9,96,9,55,12,91,63,41,33,37]
-- random numbers provided by random.org
mklistolists :: [Int] -> [a] -> [[a]]
mklistolists [] _ = []
mklistolists _ [] = []
mklistolists (len:gths) list@(_:_) = let (subl,ist) = splitAt len list in
subl:mklistolists gths ist
-- what is your listolists? What is your lfsort listolists?
{--
>>> listies = mklistolists lengths ints
>>> length listies
20
>>> take 4 listies
[[33],[61,14,26,50,22,87,61,98],[24,8,71,24,89,44,3],[42,21,16,62]]
>>> lsort listies
[[33],[61],[37],[81,31],[42,21,16,62],[81,40,48,62],[49,21,44,33],[23,62,6,94],
[47,64,35,88,36],[16,44,95,4,76],[13,9,96,9,55],[12,91,63,41,33],
[45,33,43,99,49,17],[14,71,2,4,70,22],[24,8,71,24,89,44,3],
[37,13,99,35,29,33,94],[81,29,39,62,11,18,48],[61,14,26,50,22,87,61,98],
[8,94,82,57,59,18,50,30,54],[16,33,48,73,35,81,47,96,54]]
--}
{-- BONUS -----------------------------------------------------------------
b) Again, we suppose that a list (InList) contains elements that are lists
themselves. But this time the objective is to sort the elements of InList
according to their length frequency; i.e. in the default, where sorting is done
ascendingly, lists with rare lengths are placed first, others with a more
frequent length come later.
Example:
?- lfsort([[a,b,c],[d,e],[f,g,h],[d,e],[i,j,k,l],[m,n],[o]],L).
L = [[i, j, k, l], [o], [a, b, c], [f, g, h], [d, e], [d, e], [m, n]]
Note that in the above example, the first two lists in the result L have length
4 and 1, both lengths appear just once. The third and forth list have length 3
which appears, there are two list of this length. And finally, the last three
lists have length 2. This is the most frequent length.
--}
lfsort :: [[a]] -> [[a]]
lfsort = concat . lsort . groupBy ((==) `on` length) . lsort
{--
or from Bazzargh @bazzargh
let {g=flip on length;f=sortBy (g compare)} in (concat . f . groupBy (g (==)) . f)
>>> lfsort (mklistolists lengths ints)
[[81,31],[61,14,26,50,22,87,61,98],[45,33,43,99,49,17],[14,71,2,4,70,22],
[8,94,82,57,59,18,50,30,54],[16,33,48,73,35,81,47,96,54],[33],[61],[37],
[24,8,71,24,89,44,3],[37,13,99,35,29,33,94],[81,29,39,62,11,18,48],
[42,21,16,62],[81,40,48,62],[49,21,44,33],[23,62,6,94],[47,64,35,88,36],
[16,44,95,4,76],[13,9,96,9,55],[12,91,63,41,33]]
And there we go!
--}
| geophf/1HaskellADay | exercises/HAD/Y2018/M02/D21/Solution.hs | mit | 3,700 | 0 | 9 | 576 | 715 | 457 | 258 | 29 | 1 |
{-# OPTIONS_GHC -fno-warn-type-defaults -fno-warn-orphans -fno-warn-missing-fields #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE DeriveDataTypeable #-}
module Yage.Prelude
( module ClassyPrelude
, io, pass
, traceShowS, traceShowS', ioTime, printIOTime, traceWith, traceStack
, globFp
-- list functions
, zipWithTF
, offset0
, eqType, descending
, (<?), (?>)
, isLeft, isRight
, Identity()
, qStr
, module Text.Show
, module FilePath
, module DeepSeq
, module Default
, module Prelude
, module Proxy
, module Printf
) where
import qualified Prelude as Prelude
import ClassyPrelude
import Text.Printf as Printf (printf)
import Data.Typeable
import Data.Data
import Data.Proxy as Proxy
import Data.Traversable as Trav
import Data.Foldable as Fold
import Data.Functor.Identity ()
import Data.Default as Default
import Control.DeepSeq as DeepSeq
import Control.DeepSeq.Generics as DeepSeq
import Filesystem.Path.CurrentOS as FilePath (decodeString,
encodeString)
import Foreign.Ptr
import System.CPUTime
import System.FilePath.Glob
import Text.Printf
import Text.Show
import Language.Haskell.TH
import Language.Haskell.TH.Quote
import Debug.Trace (traceStack)
io :: (MonadIO m) => IO a -> m a
io = liftIO
pass :: IO ()
pass = return ()
traceShowS :: Show a => ShowS -> a -> a
traceShowS sf a = traceShow (sf $ show a) a
traceShowS' :: Show a => String -> a -> a
traceShowS' msg = traceShowS (msg Prelude.++)
traceWith :: Show b => (a -> b) -> a -> a
traceWith f a = traceShow (f a) a
-- | time a monadic action in seconds, the monadic value is strict evaluated
ioTime :: MonadIO m => m a -> m (a, Double)
ioTime action = do
start <- io $! getCPUTime
v <- action
end <- v `seq` io $! getCPUTime
let diff = (fromIntegral (end - start)) / (10^(12::Int))
return $! (v, diff)
printIOTime :: MonadIO m => m a -> m a
printIOTime f = do
(res, t) <- ioTime f
_ <- io $! printf "Computation time: %0.5f sec\n" t
return res
-- stolen from: Graphics-GLUtil-BufferObjects
-- |A zero-offset 'Ptr'.
offset0 :: Ptr a
offset0 = offsetPtr 0
-- |Produce a 'Ptr' value to be used as an offset of the given number
-- of bytes.
offsetPtr :: Int -> Ptr a
offsetPtr = wordPtrToPtr . fromIntegral
eqType :: (Typeable r, Typeable t) => Proxy r -> Proxy t -> Bool
eqType r t = (typeOf r) == (typeOf t)
(?>) :: a -> Maybe a -> a
l ?> mr = maybe l id mr
(<?) :: Maybe a -> a -> a
(<?) = flip (?>)
descending :: (a -> a -> Ordering) -> (a -> a -> Ordering)
descending cmp = flip cmp
isLeft :: Either a b -> Bool
isLeft (Left _) = True
isLeft _ = False
isRight :: Either a b -> Bool
isRight (Right _)= True
isRight _ = False
zipWithTF :: (Traversable t, Foldable f) => (a -> b -> c) -> t a -> f b -> t c
zipWithTF g t f = snd (Trav.mapAccumL map_one (Fold.toList f) t)
where map_one (x:xs) y = (xs, g y x)
map_one _ _ = error "Yage.Prelude.zipWithTF"
qStr :: QuasiQuoter
qStr = QuasiQuoter { quoteExp = stringE }
-- | utility function to glob with a 'Filesystem.Path.FilePath'
--
-- > globFp ( "foo" </> "bar" </> "*<->.jpg" )
-- > ["foo/bar/image01.jpg", "foo/bar/image02.jpg"]
globFp :: MonadIO m => FilePath -> m [FilePath]
globFp = io . fmap (map fpFromString) . glob . fpToString
{-# INLINE globFp #-}
deriving instance Data Zero
deriving instance Typeable Zero
deriving instance Data a => Data (Succ a)
deriving instance Typeable Succ
| MaxDaten/yage-contrib | src/Yage/Prelude.hs | mit | 3,837 | 0 | 14 | 1,100 | 1,156 | 637 | 519 | 100 | 2 |
{-# LANGUAGE TemplateHaskell #-}
module PeaCoq where
import Control.Lens (makeLenses)
import Data.IORef (IORef)
import Data.IntMap (IntMap)
import Snap (Snaplet)
import Snap.Snaplet.Session (SessionManager)
import System.IO
import System.Process (ProcessHandle)
type Handles = (Handle, Handle, Handle, ProcessHandle)
data SessionState
= SessionState
Bool -- True while the session is alive
Handles -- I/O handles
-- Global state must be used in thread-safe way
data GlobalState
= GlobalState
{ gNextSession :: Int -- number to assign to the next session
, gActiveSessions :: IntMap SessionState
, gCoqtop :: String -- the command to use to run coqtop
}
type PeaCoqGlobRef = (IORef GlobalState)
type PeaCoqHash = String
type PeaCoqSession = SessionManager
-- Each thread gets a separate copy of this, fields must be read-only
data PeaCoq
= PeaCoq
{ _lGlobRef :: Snaplet PeaCoqGlobRef
, _lHash :: Snaplet PeaCoqHash
, _lSession :: Snaplet PeaCoqSession
}
-- Fields are lenses to separate concerns, use "Handler PeaCoq <Lens> a"
makeLenses ''PeaCoq
| Ptival/peacoq-server | lib/PeaCoq.hs | mit | 1,192 | 0 | 9 | 299 | 203 | 124 | 79 | 28 | 0 |
{-# LANGUAGE CPP #-}
#if __GLASGOW_HASKELL__ >= 702
{-# LANGUAGE Trustworthy #-}
#endif
-- | Reexports "Test.Hspec" from a @Trustworthy@ module.
module TestHspecTrustworthy (module Test.Hspec) where
import Test.Hspec
| haskell-compat/base-compat | base-compat-batteries/test/TestHspecTrustworthy.hs | mit | 218 | 0 | 5 | 29 | 22 | 16 | 6 | 3 | 0 |
{-#LANGUAGE ScopedTypeVariables #-}
{-#LANGUAGe DataKinds #-}
{-#LANGUAGE DeriveGeneric #-}
{-#LANGUAGE DeriveAnyClass #-}
{-#LANGUAGE FlexibleContexts #-}
module Foreign.Storable.Generic.Internal.GStorableSpec where
-- Test tools
import Test.Hspec
import Test.QuickCheck
import GenericType
-- Tested modules
import Foreign.Storable.Generic.Internal
-- Additional data
import Foreign.Storable.Generic -- overlapping Storable
import Foreign.Storable.Generic.Instances
import Data.Int
import Data.Word
import GHC.Generics
import Foreign.Ptr (Ptr, plusPtr)
import Foreign.Marshal.Alloc (malloc, mallocBytes, free)
import Foreign.Marshal.Array (peekArray,pokeArray)
data TestData = TestData Int Int64 Int8 Int8
deriving (Show, Generic, GStorable, Eq)
instance Arbitrary TestData where
arbitrary = TestData <$> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary
data TestData2 = TestData2 Int8 TestData Int32 Int64
deriving (Show, Generic, GStorable, Eq)
instance Arbitrary TestData2 where
arbitrary = TestData2 <$> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary
data TestData3 = TestData3 Int64 TestData2 Int16 TestData Int8
deriving (Show, Generic, GStorable, Eq)
instance Arbitrary TestData3 where
arbitrary = TestData3 <$> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary <*> arbitrary
sizeEquality a = do
gsizeOf a `shouldBe` internalSizeOf (from a)
alignmentEquality a = do
gsizeOf a `shouldBe` internalSizeOf (from a)
pokeEquality a = do
let size = gsizeOf a
off <- generate $ suchThat arbitrary (>=0)
ptr <- mallocBytes (off + size)
-- First poke
gpokeByteOff ptr off a
bytes1 <- peekArray (off+size) ptr :: IO [Word8]
internalPokeByteOff ptr off (from a)
bytes2 <- peekArray (off+size) ptr :: IO [Word8]
free ptr
bytes1 `shouldBe` bytes2
peekEquality (a :: t) = do
let size = gsizeOf a
off <- generate $ suchThat arbitrary (>=0)
ptr <- mallocBytes (off + size)
bytes <- generate $ ok_vector (off+size) :: IO [Word8]
-- Save random stuff to memory
pokeArray ptr bytes
-- Take a peek
v1 <- gpeekByteOff ptr off :: IO t
v2 <- internalPeekByteOff ptr off :: IO (Rep t p)
free ptr
v1 `shouldBe` to v2
peekAndPoke (a :: t)= do
ptr <- malloc :: IO (Ptr t)
gpokeByteOff ptr 0 a
(gpeekByteOff ptr 0) `shouldReturn` a
spec :: Spec
spec = do
describe "gsizeOf" $ do
it "is equal to: internalSizeOf (from a)" $ property $ do
test1 <- generate $ arbitrary :: IO TestData
test2 <- generate $ arbitrary :: IO TestData2
test3 <- generate $ arbitrary :: IO TestData3
sizeEquality test1
sizeEquality test2
sizeEquality test3
describe "galignment" $ do
it "is equal to: internalAlignment (from a)" $ property $ do
test1 <- generate $ arbitrary :: IO TestData
test2 <- generate $ arbitrary :: IO TestData2
test3 <- generate $ arbitrary :: IO TestData3
alignmentEquality test1
alignmentEquality test2
alignmentEquality test3
describe "gpokeByteOff" $ do
it "is equal to: internalPokeByteOff ptr off (from a)" $ property $ do
test1 <- generate $ arbitrary :: IO TestData
test2 <- generate $ arbitrary :: IO TestData2
test3 <- generate $ arbitrary :: IO TestData3
pokeEquality test1
pokeEquality test2
pokeEquality test3
describe "gpeekByteOff" $ do
it "is equal to: to <$> internalPeekByteOff ptr off" $ property $ do
test1 <- generate $ arbitrary :: IO TestData
test2 <- generate $ arbitrary :: IO TestData2
test3 <- generate $ arbitrary :: IO TestData3
peekEquality test1
peekEquality test2
peekEquality test3
describe "Other tests:" $ do
it "gpokeByteOff ptr 0 val >> gpeekByteOff ptr 0 == val" $ property $ do
test1 <- generate $ arbitrary :: IO TestData
test2 <- generate $ arbitrary :: IO TestData2
test3 <- generate $ arbitrary :: IO TestData3
peekAndPoke test1
peekAndPoke test2
peekAndPoke test3
| mkloczko/derive-storable | test/Spec/Foreign/Storable/Generic/Internal/GStorableSpec.hs | mit | 4,354 | 0 | 14 | 1,211 | 1,220 | 594 | 626 | 100 | 1 |
module Drifter
(
-- * Managing Migrations
resolveDependencyOrder
, changeSequence
, migrate
-- * Types
, Drifter(..)
, ChangeName(..)
, Change(..)
, Description
, Method
, DBConnection
) where
-------------------------------------------------------------------------------
import Data.List
-------------------------------------------------------------------------------
import Drifter.Graph
import Drifter.Types
-------------------------------------------------------------------------------
-- | This is a helper for the common case of where you just want
-- dependencies to run in list order. This will take the input list
-- and set their dependencies to run in the given sequence.
changeSequence :: [Change a] -> [Change a]
changeSequence [] = []
changeSequence (x:xs) = reverse $ snd $ foldl' go (x, [x]) xs
where
go :: (Change a, [Change a]) -> Change a -> (Change a, [Change a])
go (lastChange, xs') c =
let c' = c { changeDependencies = [changeName lastChange] }
in (c', c':xs')
| AndrewRademacher/drifter | src/Drifter.hs | mit | 1,097 | 0 | 14 | 243 | 239 | 139 | 100 | 21 | 1 |
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.SVGZoomAndPan
(pattern SVG_ZOOMANDPAN_UNKNOWN, pattern SVG_ZOOMANDPAN_DISABLE,
pattern SVG_ZOOMANDPAN_MAGNIFY, js_setZoomAndPan, setZoomAndPan,
js_getZoomAndPan, getZoomAndPan, SVGZoomAndPan,
castToSVGZoomAndPan, gTypeSVGZoomAndPan)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.JSFFI.Generated.Enums
pattern SVG_ZOOMANDPAN_UNKNOWN = 0
pattern SVG_ZOOMANDPAN_DISABLE = 1
pattern SVG_ZOOMANDPAN_MAGNIFY = 2
foreign import javascript unsafe "$1[\"zoomAndPan\"] = $2;"
js_setZoomAndPan :: SVGZoomAndPan -> Word -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGZoomAndPan.zoomAndPan Mozilla SVGZoomAndPan.zoomAndPan documentation>
setZoomAndPan :: (MonadIO m) => SVGZoomAndPan -> Word -> m ()
setZoomAndPan self val = liftIO (js_setZoomAndPan (self) val)
foreign import javascript unsafe "$1[\"zoomAndPan\"]"
js_getZoomAndPan :: SVGZoomAndPan -> IO Word
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGZoomAndPan.zoomAndPan Mozilla SVGZoomAndPan.zoomAndPan documentation>
getZoomAndPan :: (MonadIO m) => SVGZoomAndPan -> m Word
getZoomAndPan self = liftIO (js_getZoomAndPan (self)) | manyoo/ghcjs-dom | ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/SVGZoomAndPan.hs | mit | 1,960 | 14 | 8 | 235 | 477 | 292 | 185 | 31 | 1 |
{-# LANGUAGE TypeFamilies #-}
module Agent.PingPong.Role.Ask where
import AgentSystem.Generic
import Agent.PingPong
import qualified Agent.PingPong.Simple.Ask as Ask
import Data.IORef
--------------------------------------------------------------------------------
data PingRole = PingRole
data PongRole = PongRole
--------------------------------------------------------------------------------
instance RoleName PingRole where roleName _ = "Ping"
instance AgentRole PingRole where
type RoleState PingRole = (IORef Integer, IORef SomeAgentRef)
type RoleResult PingRole = ()
type RoleSysArgs PingRole = ()
type RoleArgs PingRole = (Integer, IORef SomeAgentRef)
instance RoleName PongRole where roleName _ = "Pong"
instance AgentRole PongRole where
type RoleState PongRole = ()
type RoleResult PongRole = ()
type RoleSysArgs PongRole = ()
type RoleArgs PongRole = ()
--------------------------------------------------------------------------------
pingRoleDescriptor = genericRoleDescriptor PingRole
(const $ return . uncurry Ask.pingDescriptor)
pongRoleDescriptor = genericRoleDescriptor PongRole
(const . const $ return Ask.pongDescriptor)
--------------------------------------------------------------------------------
runPingPong nPings = do pongRef <- newIORef undefined
putStrLn "<< CreateAgentOfRole >> "
let pingC = CreateAgentOfRole pingRoleDescriptor
(return ()) (return (nPings, pongRef))
pongC = CreateAgentOfRole pongRoleDescriptor
(return ()) (return ())
ping <- createAgentRef pingC
pong <- createAgentRef pongC
pongRef `writeIORef` someAgentRef pong
putStrLn "Starting PING"
agentStart ping
putStrLn "Starting PONG"
agentStart pong
putStrLn "Waiting PING termination"
agentWaitTermination ping
| fehu/h-agents | test/Agent/PingPong/Role/Ask.hs | mit | 2,150 | 0 | 13 | 618 | 413 | 211 | 202 | 39 | 1 |
module JoScript.Util.Text (foldlM, readFloat, readInt) where
import Prelude (read)
import Protolude hiding (foldlM)
import qualified Data.Text as T
foldlM :: Monad m => (b -> Char -> m b) -> b -> Text -> m b
foldlM f init bsInit = impl (pure init) bsInit where
impl acc bs
| T.null bs = acc
| otherwise = impl (acc >>= \acc' -> f acc' (T.head bs)) (T.tail bs)
readInt :: Text -> Integer
readInt = read . T.unpack
readFloat :: Text -> Double
readFloat = read . T.unpack
| AKST/jo | source/lib/JoScript/Util/Text.hs | mit | 486 | 0 | 15 | 105 | 217 | 115 | 102 | 13 | 1 |