-- Hoogle documentation, generated by Haddock -- See Hoogle, http://www.haskell.org/hoogle/ -- | Type-safe, multi-backend data serialization. -- -- Hackage documentation generation is not reliable. For up to date -- documentation, please see: -- http://www.stackage.org/package/persistent. @package persistent @version 2.14.0.0 module Database.Persist.Class.PersistConfig -- | Represents a value containing all the configuration options for a -- specific backend. This abstraction makes it easier to write code that -- can easily swap backends. class PersistConfig c where { type family PersistConfigBackend c :: (Type -> Type) -> Type -> Type; type family PersistConfigPool c; } -- | Load the config settings from a Value, most likely taken from a -- YAML config file. loadConfig :: PersistConfig c => Value -> Parser c -- | Modify the config settings based on environment variables. applyEnv :: PersistConfig c => c -> IO c -- | Create a new connection pool based on the given config settings. createPoolConfig :: PersistConfig c => c -> IO (PersistConfigPool c) -- | Run a database action by taking a connection from the pool. runPool :: (PersistConfig c, MonadUnliftIO m) => c -> PersistConfigBackend c m a -> PersistConfigPool c -> m a instance (Database.Persist.Class.PersistConfig.PersistConfig c1, Database.Persist.Class.PersistConfig.PersistConfig c2, Database.Persist.Class.PersistConfig.PersistConfigPool c1 GHC.Types.~ Database.Persist.Class.PersistConfig.PersistConfigPool c2, Database.Persist.Class.PersistConfig.PersistConfigBackend c1 GHC.Types.~ Database.Persist.Class.PersistConfig.PersistConfigBackend c2) => Database.Persist.Class.PersistConfig.PersistConfig (Data.Either.Either c1 c2) -- | This module contains types and functions for working with and -- disambiguating database and Haskell names. module Database.Persist.Names -- | Convenience operations for working with '-NameDB' types. class DatabaseName a escapeWith :: DatabaseName a => (Text -> str) -> a -> str -- | An EntityNameDB represents the datastore-side name that -- persistent will use for an entity. newtype FieldNameDB FieldNameDB :: Text -> FieldNameDB [unFieldNameDB] :: FieldNameDB -> Text -- | A FieldNameHS represents the Haskell-side name that -- persistent will use for a field. newtype FieldNameHS FieldNameHS :: Text -> FieldNameHS [unFieldNameHS] :: FieldNameHS -> Text -- | An EntityNameHS represents the Haskell-side name that -- persistent will use for an entity. newtype EntityNameHS EntityNameHS :: Text -> EntityNameHS [unEntityNameHS] :: EntityNameHS -> Text -- | An EntityNameDB represents the datastore-side name that -- persistent will use for an entity. newtype EntityNameDB EntityNameDB :: Text -> EntityNameDB [unEntityNameDB] :: EntityNameDB -> Text -- | A ConstraintNameDB represents the datastore-side name that -- persistent will use for a constraint. newtype ConstraintNameDB ConstraintNameDB :: Text -> ConstraintNameDB [unConstraintNameDB] :: ConstraintNameDB -> Text -- | An ConstraintNameHS represents the Haskell-side name that -- persistent will use for a constraint. newtype ConstraintNameHS ConstraintNameHS :: Text -> ConstraintNameHS [unConstraintNameHS] :: ConstraintNameHS -> Text instance Language.Haskell.TH.Syntax.Lift Database.Persist.Names.FieldNameDB instance GHC.Classes.Ord Database.Persist.Names.FieldNameDB instance GHC.Read.Read Database.Persist.Names.FieldNameDB instance GHC.Classes.Eq Database.Persist.Names.FieldNameDB instance GHC.Show.Show Database.Persist.Names.FieldNameDB instance Language.Haskell.TH.Syntax.Lift Database.Persist.Names.FieldNameHS instance GHC.Classes.Ord Database.Persist.Names.FieldNameHS instance GHC.Read.Read Database.Persist.Names.FieldNameHS instance GHC.Classes.Eq Database.Persist.Names.FieldNameHS instance GHC.Show.Show Database.Persist.Names.FieldNameHS instance Language.Haskell.TH.Syntax.Lift Database.Persist.Names.EntityNameHS instance GHC.Classes.Ord Database.Persist.Names.EntityNameHS instance GHC.Read.Read Database.Persist.Names.EntityNameHS instance GHC.Classes.Eq Database.Persist.Names.EntityNameHS instance GHC.Show.Show Database.Persist.Names.EntityNameHS instance Language.Haskell.TH.Syntax.Lift Database.Persist.Names.EntityNameDB instance GHC.Classes.Ord Database.Persist.Names.EntityNameDB instance GHC.Read.Read Database.Persist.Names.EntityNameDB instance GHC.Classes.Eq Database.Persist.Names.EntityNameDB instance GHC.Show.Show Database.Persist.Names.EntityNameDB instance Language.Haskell.TH.Syntax.Lift Database.Persist.Names.ConstraintNameDB instance GHC.Classes.Ord Database.Persist.Names.ConstraintNameDB instance GHC.Read.Read Database.Persist.Names.ConstraintNameDB instance GHC.Classes.Eq Database.Persist.Names.ConstraintNameDB instance GHC.Show.Show Database.Persist.Names.ConstraintNameDB instance Language.Haskell.TH.Syntax.Lift Database.Persist.Names.ConstraintNameHS instance GHC.Classes.Ord Database.Persist.Names.ConstraintNameHS instance GHC.Read.Read Database.Persist.Names.ConstraintNameHS instance GHC.Classes.Eq Database.Persist.Names.ConstraintNameHS instance GHC.Show.Show Database.Persist.Names.ConstraintNameHS instance Database.Persist.Names.DatabaseName Database.Persist.Names.ConstraintNameDB instance Database.Persist.Names.DatabaseName Database.Persist.Names.EntityNameDB instance Database.Persist.Names.DatabaseName Database.Persist.Names.FieldNameDB -- | This module contains an intermediate representation of values before -- the backends serialize them into explicit database types. module Database.Persist.PersistValue -- | A raw value which can be stored in any backend and can be marshalled -- to and from a PersistField. data PersistValue PersistText :: Text -> PersistValue PersistByteString :: ByteString -> PersistValue PersistInt64 :: Int64 -> PersistValue PersistDouble :: Double -> PersistValue PersistRational :: Rational -> PersistValue PersistBool :: Bool -> PersistValue PersistDay :: Day -> PersistValue PersistTimeOfDay :: TimeOfDay -> PersistValue PersistUTCTime :: UTCTime -> PersistValue PersistNull :: PersistValue PersistList :: [PersistValue] -> PersistValue PersistMap :: [(Text, PersistValue)] -> PersistValue -- | Intended especially for MongoDB backend PersistObjectId :: ByteString -> PersistValue -- | Intended especially for PostgreSQL backend for text arrays PersistArray :: [PersistValue] -> PersistValue -- | This constructor is used to specify some raw literal value for the -- backend. The LiteralType value specifies how the value should -- be escaped. This can be used to make special, custom types avaialable -- in the back end. PersistLiteral_ :: LiteralType -> ByteString -> PersistValue -- | This pattern synonym used to be a data constructor on -- PersistValue, but was changed into a catch-all pattern synonym -- to allow backwards compatiblity with database types. See the -- documentation on PersistDbSpecific for more details. pattern PersistLiteral :: ByteString -> PersistValue -- | This pattern synonym used to be a data constructor on -- PersistValue, but was changed into a catch-all pattern synonym -- to allow backwards compatiblity with database types. See the -- documentation on PersistDbSpecific for more details. pattern PersistLiteralEscaped :: ByteString -> PersistValue -- | This pattern synonym used to be a data constructor for the -- PersistValue type. It was changed to be a pattern so that -- JSON-encoded database values could be parsed into their corresponding -- values. You should not use this, and instead prefer to pattern match -- on PersistLiteral_ directly. -- -- If you use this, it will overlap a patern match on the -- 'PersistLiteral_, PersistLiteral, and -- PersistLiteralEscaped patterns. If you need to disambiguate -- between these constructors, pattern match on PersistLiteral_ -- directly. -- | Deprecated: Deprecated since 2.11 because of inconsistent escaping -- behavior across backends. The Postgres backend escapes these values, -- while the MySQL backend does not. If you are using this, please switch -- to PersistLiteral_ and provide a relevant LiteralType -- for your conversion. pattern PersistDbSpecific :: ByteString -> PersistValue fromPersistValueText :: PersistValue -> Either Text Text -- | A type that determines how a backend should handle the literal. data LiteralType -- | The accompanying value will be escaped before inserting into the -- database. This is the correct default choice to use. Escaped :: LiteralType -- | The accompanying value will not be escaped when inserting into the -- database. This is potentially dangerous - use this with care. Unescaped :: LiteralType -- | The DbSpecific constructor corresponds to the legacy -- PersistDbSpecific constructor. We need to keep this around -- because old databases may have serialized JSON representations that -- reference this. We don't want to break the ability of a database to -- load rows. DbSpecific :: LiteralType instance GHC.Classes.Ord Database.Persist.PersistValue.LiteralType instance GHC.Classes.Eq Database.Persist.PersistValue.LiteralType instance GHC.Read.Read Database.Persist.PersistValue.LiteralType instance GHC.Show.Show Database.Persist.PersistValue.LiteralType instance GHC.Classes.Ord Database.Persist.PersistValue.PersistValue instance GHC.Classes.Eq Database.Persist.PersistValue.PersistValue instance GHC.Read.Read Database.Persist.PersistValue.PersistValue instance GHC.Show.Show Database.Persist.PersistValue.PersistValue instance Web.Internal.HttpApiData.ToHttpApiData Database.Persist.PersistValue.PersistValue instance Web.Internal.HttpApiData.FromHttpApiData Database.Persist.PersistValue.PersistValue instance Web.PathPieces.PathPiece Database.Persist.PersistValue.PersistValue instance Data.Aeson.Types.ToJSON.ToJSON Database.Persist.PersistValue.PersistValue instance Data.Aeson.Types.FromJSON.FromJSON Database.Persist.PersistValue.PersistValue module Database.Persist.SqlBackend.Internal.IsolationLevel -- | Please refer to the documentation for the database in question for a -- full overview of the semantics of the varying isloation levels data IsolationLevel ReadUncommitted :: IsolationLevel ReadCommitted :: IsolationLevel RepeatableRead :: IsolationLevel Serializable :: IsolationLevel makeIsolationLevelStatement :: (Monoid s, IsString s) => IsolationLevel -> s instance GHC.Enum.Bounded Database.Persist.SqlBackend.Internal.IsolationLevel.IsolationLevel instance GHC.Classes.Ord Database.Persist.SqlBackend.Internal.IsolationLevel.IsolationLevel instance GHC.Enum.Enum Database.Persist.SqlBackend.Internal.IsolationLevel.IsolationLevel instance GHC.Classes.Eq Database.Persist.SqlBackend.Internal.IsolationLevel.IsolationLevel instance GHC.Show.Show Database.Persist.SqlBackend.Internal.IsolationLevel.IsolationLevel module Database.Persist.SqlBackend.Internal.SqlPoolHooks -- | A set of hooks that may be used to alter the behaviour of -- runSqlPoolWithExtensibleHooks in a backwards-compatible -- fashion. data SqlPoolHooks m backend SqlPoolHooks :: (backend -> m backend) -> (backend -> Maybe IsolationLevel -> m ()) -> (backend -> Maybe IsolationLevel -> m ()) -> (backend -> Maybe IsolationLevel -> SomeException -> m ()) -> SqlPoolHooks m backend -- | Alter the backend prior to executing any actions with it. [alterBackend] :: SqlPoolHooks m backend -> backend -> m backend -- | Run this action immediately before the action is performed. [runBefore] :: SqlPoolHooks m backend -> backend -> Maybe IsolationLevel -> m () -- | Run this action immediately after the action is completed. [runAfter] :: SqlPoolHooks m backend -> backend -> Maybe IsolationLevel -> m () -- | This action is performed when an exception is received. The exception -- is provided as a convenience - it is rethrown once this cleanup -- function is complete. [runOnException] :: SqlPoolHooks m backend -> backend -> Maybe IsolationLevel -> SomeException -> m () module Database.Persist.SqlBackend.Internal.Statement -- | A Statement is a representation of a database query that has -- been prepared and stored on the server side. data Statement Statement :: IO () -> IO () -> ([PersistValue] -> IO Int64) -> (forall m. MonadIO m => [PersistValue] -> Acquire (ConduitM () [PersistValue] m ())) -> Statement [stmtFinalize] :: Statement -> IO () [stmtReset] :: Statement -> IO () [stmtExecute] :: Statement -> [PersistValue] -> IO Int64 [stmtQuery] :: Statement -> forall m. MonadIO m => [PersistValue] -> Acquire (ConduitM () [PersistValue] m ()) module Database.Persist.SqlBackend.Internal.StatementCache -- | A statement cache used to lookup statements that have already been -- prepared for a given query. data StatementCache StatementCache :: (StatementCacheKey -> IO (Maybe Statement)) -> (StatementCacheKey -> Statement -> IO ()) -> IO () -> IO Int -> StatementCache [statementCacheLookup] :: StatementCache -> StatementCacheKey -> IO (Maybe Statement) [statementCacheInsert] :: StatementCache -> StatementCacheKey -> Statement -> IO () [statementCacheClear] :: StatementCache -> IO () [statementCacheSize] :: StatementCache -> IO Int newtype StatementCacheKey StatementCacheKey :: Text -> StatementCacheKey [cacheKey] :: StatementCacheKey -> Text -- | Construct a StatementCacheKey from a raw SQL query. mkCacheKeyFromQuery :: Text -> StatementCacheKey module Database.Persist.SqlBackend.StatementCache -- | A statement cache used to lookup statements that have already been -- prepared for a given query. data StatementCache data StatementCacheKey -- | Construct a StatementCacheKey from a raw SQL query. mkCacheKeyFromQuery :: Text -> StatementCacheKey -- | Configuration parameters for creating a custom statement cache data MkStatementCache MkStatementCache :: (StatementCacheKey -> IO (Maybe Statement)) -> (StatementCacheKey -> Statement -> IO ()) -> IO () -> IO Int -> MkStatementCache -- | Retrieve a statement from the cache, or return nothing if it is not -- found. [statementCacheLookup] :: MkStatementCache -> StatementCacheKey -> IO (Maybe Statement) -- | Put a new statement into the cache. An immediate lookup of the -- statement MUST return the inserted statement for the given cache key. -- Depending on the implementation, the statement cache MAY choose to -- evict other statements from the cache within this function. [statementCacheInsert] :: MkStatementCache -> StatementCacheKey -> Statement -> IO () -- | Remove all statements from the cache. Implementations of this should -- be sure to call stmtFinalize on all statements removed from the -- cache. [statementCacheClear] :: MkStatementCache -> IO () -- | Get the current size of the cache. [statementCacheSize] :: MkStatementCache -> IO Int -- | Make a simple statement cache that will cache statements if they are -- not currently cached. mkSimpleStatementCache :: IORef (Map Text Statement) -> MkStatementCache -- | Create a statement cache. mkStatementCache :: MkStatementCache -> StatementCache module Database.Persist.SqlBackend.Internal.InsertSqlResult data InsertSqlResult ISRSingle :: Text -> InsertSqlResult ISRInsertGet :: Text -> Text -> InsertSqlResult ISRManyKeys :: Text -> [PersistValue] -> InsertSqlResult module Database.Persist.SqlBackend.Internal.MkSqlBackend -- | This type shares many of the same field names as the -- SqlBackend type. It's useful for library authors to use this -- when migrating from using the SqlBackend constructor directly -- to the mkSqlBackend function. -- -- This type will only contain required fields for constructing a -- SqlBackend. For fields that aren't present on this record, -- you'll want to use the various set functions or data MkSqlBackendArgs MkSqlBackendArgs :: (Text -> IO Statement) -> (EntityDef -> [PersistValue] -> InsertSqlResult) -> IORef (Map Text Statement) -> IO () -> ([EntityDef] -> (Text -> IO Statement) -> EntityDef -> IO (Either [Text] [(Bool, Text)])) -> ((Text -> IO Statement) -> Maybe IsolationLevel -> IO ()) -> ((Text -> IO Statement) -> IO ()) -> ((Text -> IO Statement) -> IO ()) -> (FieldNameDB -> Text) -> (EntityDef -> Text) -> (Text -> Text) -> Text -> Text -> ((Int, Int) -> Text -> Text) -> LogFunc -> MkSqlBackendArgs -- | This function should prepare a Statement in the target -- database, which should allow for efficient query reuse. [connPrepare] :: MkSqlBackendArgs -> Text -> IO Statement -- | This function generates the SQL and values necessary for performing an -- insert against the database. [connInsertSql] :: MkSqlBackendArgs -> EntityDef -> [PersistValue] -> InsertSqlResult -- | A reference to the cache of statements. Statements are keyed by -- the Text queries that generated them. [connStmtMap] :: MkSqlBackendArgs -> IORef (Map Text Statement) -- | Close the underlying connection. [connClose] :: MkSqlBackendArgs -> IO () -- | This function returns the migrations required to include the -- EntityDef parameter in the [EntityDef] -- database. This might include creating a new table if the entity is not -- present, or altering an existing table if it is. [connMigrateSql] :: MkSqlBackendArgs -> [EntityDef] -> (Text -> IO Statement) -> EntityDef -> IO (Either [Text] [(Bool, Text)]) -- | A function to begin a transaction for the underlying database. [connBegin] :: MkSqlBackendArgs -> (Text -> IO Statement) -> Maybe IsolationLevel -> IO () -- | A function to commit a transaction to the underlying database. [connCommit] :: MkSqlBackendArgs -> (Text -> IO Statement) -> IO () -- | A function to roll back a transaction on the underlying database. [connRollback] :: MkSqlBackendArgs -> (Text -> IO Statement) -> IO () -- | A function to extract and escape the name of the column corresponding -- to the provided field. [connEscapeFieldName] :: MkSqlBackendArgs -> FieldNameDB -> Text -- | A function to extract and escape the name of the table corresponding -- to the provided entity. PostgreSQL uses this to support schemas. [connEscapeTableName] :: MkSqlBackendArgs -> EntityDef -> Text -- | A function to escape raw DB identifiers. MySQL uses backticks, while -- PostgreSQL uses quotes, and so on. [connEscapeRawName] :: MkSqlBackendArgs -> Text -> Text [connNoLimit] :: MkSqlBackendArgs -> Text -- | A tag displaying what database the SqlBackend is for. Can be -- used to differentiate features in downstream libraries for different -- database backends. [connRDBMS] :: MkSqlBackendArgs -> Text -- | Attach a 'LIMIT/OFFSET' clause to a SQL query. Note that LIMIT/OFFSET -- is problematic for performance, and indexed range queries are the -- superior way to offer pagination. [connLimitOffset] :: MkSqlBackendArgs -> (Int, Int) -> Text -> Text -- | A log function for the SqlBackend to use. [connLogFunc] :: MkSqlBackendArgs -> LogFunc type LogFunc = Loc -> LogSource -> LogLevel -> LogStr -> IO () module Database.Persist.FieldDef.Internal -- | A FieldDef represents the inormation that persistent -- knows about a field of a datatype. This includes information used to -- parse the field out of the database and what the field corresponds to. data FieldDef FieldDef :: !FieldNameHS -> !FieldNameDB -> !FieldType -> !SqlType -> ![FieldAttr] -> !Bool -> !ReferenceDef -> !FieldCascade -> !Maybe Text -> !Maybe Text -> !Bool -> FieldDef -- | The name of the field. Note that this does not corresponds to the -- record labels generated for the particular entity - record labels are -- generated with the type name prefixed to the field, so a -- FieldDef that contains a FieldNameHS "name" for -- a type User will have a record field userName. [fieldHaskell] :: FieldDef -> !FieldNameHS -- | The name of the field in the database. For SQL databases, this -- corresponds to the column name. [fieldDB] :: FieldDef -> !FieldNameDB -- | The type of the field in Haskell. [fieldType] :: FieldDef -> !FieldType -- | The type of the field in a SQL database. [fieldSqlType] :: FieldDef -> !SqlType -- | User annotations for a field. These are provided with the ! -- operator. [fieldAttrs] :: FieldDef -> ![FieldAttr] -- | If this is True, then the Haskell datatype will have a strict -- record field. The default value for this is True. [fieldStrict] :: FieldDef -> !Bool [fieldReference] :: FieldDef -> !ReferenceDef -- | Defines how operations on the field cascade on to the referenced -- tables. This doesn't have any meaning if the fieldReference is -- set to NoReference or SelfReference. The cascade option -- here should be the same as the one obtained in the -- fieldReference. [fieldCascade] :: FieldDef -> !FieldCascade -- | Optional comments for a Field. There is not currently a way -- to attach comments to a field in the quasiquoter. [fieldComments] :: FieldDef -> !Maybe Text -- | Whether or not the field is a GENERATED column, and -- additionally the expression to use for generation. [fieldGenerated] :: FieldDef -> !Maybe Text -- | True if the field is an implicit ID column. False -- otherwise. [fieldIsImplicitIdColumn] :: FieldDef -> !Bool isFieldNotGenerated :: FieldDef -> Bool -- | This datatype describes how a foreign reference field cascades deletes -- or updates. -- -- This type is used in both parsing the model definitions and performing -- migrations. A Nothing in either of the field values means that -- the user has not specified a CascadeAction. An unspecified -- CascadeAction is defaulted to Restrict when doing -- migrations. data FieldCascade FieldCascade :: !Maybe CascadeAction -> !Maybe CascadeAction -> FieldCascade [fcOnUpdate] :: FieldCascade -> !Maybe CascadeAction [fcOnDelete] :: FieldCascade -> !Maybe CascadeAction -- | Renders a FieldCascade value such that it can be used in SQL -- migrations. renderFieldCascade :: FieldCascade -> Text -- | Render a CascadeAction to Text such that it can be used -- in a SQL command. renderCascadeAction :: CascadeAction -> Text -- | A FieldCascade that does nothing. noCascade :: FieldCascade -- | An action that might happen on a deletion or update on a foreign key -- change. data CascadeAction Cascade :: CascadeAction Restrict :: CascadeAction SetNull :: CascadeAction SetDefault :: CascadeAction module Database.Persist.FieldDef -- | A FieldDef represents the inormation that persistent -- knows about a field of a datatype. This includes information used to -- parse the field out of the database and what the field corresponds to. data FieldDef -- | Replace the FieldDef FieldAttr with the new list. setFieldAttrs :: [FieldAttr] -> FieldDef -> FieldDef -- | Modify the list of field attributes. overFieldAttrs :: ([FieldAttr] -> [FieldAttr]) -> FieldDef -> FieldDef -- | Add an attribute to the list of field attributes. addFieldAttr :: FieldAttr -> FieldDef -> FieldDef -- | Check if the field definition is nullable isFieldNullable :: FieldDef -> IsNullable -- | Check if the field is `Maybe a` isFieldMaybe :: FieldDef -> Bool isFieldNotGenerated :: FieldDef -> Bool -- | Returns True if the FieldDef does not have a -- MigrationOnly or SafeToRemove flag from the -- QuasiQuoter. isHaskellField :: FieldDef -> Bool -- | This datatype describes how a foreign reference field cascades deletes -- or updates. -- -- This type is used in both parsing the model definitions and performing -- migrations. A Nothing in either of the field values means that -- the user has not specified a CascadeAction. An unspecified -- CascadeAction is defaulted to Restrict when doing -- migrations. data FieldCascade FieldCascade :: !Maybe CascadeAction -> !Maybe CascadeAction -> FieldCascade [fcOnUpdate] :: FieldCascade -> !Maybe CascadeAction [fcOnDelete] :: FieldCascade -> !Maybe CascadeAction -- | Renders a FieldCascade value such that it can be used in SQL -- migrations. renderFieldCascade :: FieldCascade -> Text -- | Render a CascadeAction to Text such that it can be used -- in a SQL command. renderCascadeAction :: CascadeAction -> Text -- | A FieldCascade that does nothing. noCascade :: FieldCascade -- | An action that might happen on a deletion or update on a foreign key -- change. data CascadeAction Cascade :: CascadeAction Restrict :: CascadeAction SetNull :: CascadeAction SetDefault :: CascadeAction -- | The EntityDef type, fields, and constructor are exported from -- this module. Breaking changes to the EntityDef type are not -- reflected in the major version of the API. Please import from -- Database.Persist.EntityDef instead. -- -- If you need this module, please file a GitHub issue why. module Database.Persist.EntityDef.Internal -- | An EntityDef represents the information that -- persistent knows about an Entity. It uses this information to -- generate the Haskell datatype, the SQL migrations, and other relevant -- conversions. data EntityDef EntityDef :: !EntityNameHS -> !EntityNameDB -> !EntityIdDef -> ![Attr] -> ![FieldDef] -> ![UniqueDef] -> ![ForeignDef] -> ![Text] -> !Map Text [ExtraLine] -> !Bool -> !Maybe Text -> EntityDef -- | The name of the entity as Haskell understands it. [entityHaskell] :: EntityDef -> !EntityNameHS -- | The name of the database table corresponding to the entity. [entityDB] :: EntityDef -> !EntityNameDB -- | The entity's primary key or identifier. [entityId] :: EntityDef -> !EntityIdDef -- | The persistent entity syntax allows you to add arbitrary -- Attrs to an entity using the ! operator. Those -- attributes are stored in this list. [entityAttrs] :: EntityDef -> ![Attr] -- | The fields for this entity. Note that the ID field will not be present -- in this list. To get all of the fields for an entity, use -- keyAndEntityFields. [entityFields] :: EntityDef -> ![FieldDef] -- | The Uniqueness constraints for this entity. [entityUniques] :: EntityDef -> ![UniqueDef] -- | The foreign key relationships that this entity has to other entities. [entityForeigns] :: EntityDef -> ![ForeignDef] -- | A list of type classes that have been derived for this entity. [entityDerives] :: EntityDef -> ![Text] [entityExtra] :: EntityDef -> !Map Text [ExtraLine] -- | Whether or not this entity represents a sum type in the database. [entitySum] :: EntityDef -> !Bool -- | Optional comments on the entity. [entityComments] :: EntityDef -> !Maybe Text entityPrimary :: EntityDef -> Maybe CompositeDef -- | Return the [FieldDef] for the entity keys. entitiesPrimary :: EntityDef -> NonEmpty FieldDef -- | Returns a NonEmpty list of FieldDef that correspond with -- the key columns for an EntityDef. keyAndEntityFields :: EntityDef -> NonEmpty FieldDef toEmbedEntityDef :: EntityDef -> EmbedEntityDef -- | The definition for the entity's primary key ID. data EntityIdDef -- | The entity has a single key column, and it is a surrogate key - that -- is, you can't go from rec -> Key rec. EntityIdField :: !FieldDef -> EntityIdDef -- | The entity has a natural key. This means you can write rec -> -- Key rec because all the key fields are present on the datatype. -- -- A natural key can have one or more columns. EntityIdNaturalKey :: !CompositeDef -> EntityIdDef -- | An EntityDef represents metadata about a type that -- persistent uses to store the type in the database, as well as -- generate Haskell code from it. module Database.Persist.EntityDef -- | An EntityDef represents the information that -- persistent knows about an Entity. It uses this information to -- generate the Haskell datatype, the SQL migrations, and other relevant -- conversions. data EntityDef -- | Retrieve the Haskell name of the given entity. getEntityHaskellName :: EntityDef -> EntityNameHS -- | Return the database name for the given entity. getEntityDBName :: EntityDef -> EntityNameDB -- | Retrieve the list of FieldDef that makes up the fields of the -- entity. -- -- This does not return the fields for an Id column or an -- implicit id. It will return the key columns if you used the -- Primary syntax for defining the primary key. -- -- This does not return fields that are marked SafeToRemove or -- MigrationOnly - so it only returns fields that are -- represented in the Haskell type. If you need those fields, use -- getEntityFieldsDatabase. getEntityFields :: EntityDef -> [FieldDef] -- | This returns all of the FieldDef defined for the -- EntityDef, including those fields that are marked as -- MigrationOnly (and therefore only present in the database) or -- SafeToRemove (and a migration will drop the column if it -- exists in the database). -- -- For all the fields that are present on the Haskell-type, see -- getEntityFields. getEntityFieldsDatabase :: EntityDef -> [FieldDef] getEntityForeignDefs :: EntityDef -> [ForeignDef] -- | Retrieve the list of UniqueDef from an EntityDef. As of -- version 2.14, this will also include the primary key on the entity, if -- one is defined. If you do not want the primary key, see -- getEntityUniquesNoPrimaryKey. getEntityUniques :: EntityDef -> [UniqueDef] -- | Retrieve the list of UniqueDef from an EntityDef. This -- does not include a Primary key, if one is defined. A future -- version of persistent will include a Primary key -- among the Unique constructors for the Entity. getEntityUniquesNoPrimaryKey :: EntityDef -> [UniqueDef] getEntityId :: EntityDef -> EntityIdDef getEntityIdField :: EntityDef -> Maybe FieldDef getEntityKeyFields :: EntityDef -> NonEmpty FieldDef getEntityComments :: EntityDef -> Maybe Text getEntityExtra :: EntityDef -> Map Text [[Text]] isEntitySum :: EntityDef -> Bool entityPrimary :: EntityDef -> Maybe CompositeDef -- | Return the [FieldDef] for the entity keys. entitiesPrimary :: EntityDef -> NonEmpty FieldDef -- | Returns a NonEmpty list of FieldDef that correspond with -- the key columns for an EntityDef. keyAndEntityFields :: EntityDef -> NonEmpty FieldDef -- | Set an entityId to be the given FieldDef. setEntityId :: FieldDef -> EntityDef -> EntityDef setEntityIdDef :: EntityIdDef -> EntityDef -> EntityDef setEntityDBName :: EntityNameDB -> EntityDef -> EntityDef -- | Perform a mapping function over all of the entity fields, as -- determined by getEntityFieldsDatabase. overEntityFields :: ([FieldDef] -> [FieldDef]) -> EntityDef -> EntityDef -- | The definition for the entity's primary key ID. data EntityIdDef -- | The entity has a single key column, and it is a surrogate key - that -- is, you can't go from rec -> Key rec. EntityIdField :: !FieldDef -> EntityIdDef -- | The entity has a natural key. This means you can write rec -> -- Key rec because all the key fields are present on the datatype. -- -- A natural key can have one or more columns. EntityIdNaturalKey :: !CompositeDef -> EntityIdDef module Database.Persist.Class.PersistField -- | This class teaches Persistent how to take a custom type and marshal it -- to and from a PersistValue, allowing it to be stored in a -- database. -- --
-- {-# LANGUAGE GeneralizedNewtypeDeriving #-}
--
-- newtype HashedPassword = HashedPassword ByteString
-- deriving (Eq, Show, PersistField, PersistFieldSql)
--
--
--
-- {-# LANGUAGE GeneralizedNewtypeDeriving #-}
-- import qualified Data.Text as T
-- import qualified Data.Char as C
--
-- -- | An American Social Security Number
-- newtype SSN = SSN Text
-- deriving (Eq, Show, PersistFieldSql)
--
-- mkSSN :: Text -> Either Text SSN
-- mkSSN t = if (T.length t == 9) && (T.all C.isDigit t)
-- then Right $ SSN t
-- else Left $ "Invalid SSN: " <> t
--
-- instance PersistField SSN where
-- toPersistValue (SSN t) = PersistText t
-- fromPersistValue (PersistText t) = mkSSN t
-- -- Handle cases where the database does not give us PersistText
-- fromPersistValue x = Left $ "File.hs: When trying to deserialize an SSN: expected PersistText, received: " <> T.pack (show x)
--
--
-- Tips:
--
-- -- parseFromEnvironmentVariables :: IO (Entity User) -- parseFromEnvironmentVariables = -- tabulateEntityA $ \userField -> -- case userField of -- UserName -> -- getEnv USER_NAME -- UserAge -> do -- ageVar <- getEnv USER_AGE -- case readMaybe ageVar of -- Just age -> -- pure age -- Nothing -> -- error $ "Failed to parse Age from: " <> ageVar -- UserAddressId -> do -- addressVar <- getEnv USER_ADDRESS_ID -- pure $ AddressKey addressVar --tabulateEntityA :: (PersistEntity record, Applicative f) => (forall a. EntityField record a -> f a) -> f (Entity record) -- | A meta operation to retrieve all the Unique keys. persistUniqueKeys :: PersistEntity record => record -> [Unique record] -- | A lower level operation. persistUniqueToFieldNames :: PersistEntity record => Unique record -> NonEmpty (FieldNameHS, FieldNameDB) -- | A lower level operation. persistUniqueToValues :: PersistEntity record => Unique record -> [PersistValue] -- | Use a PersistField as a lens. fieldLens :: PersistEntity record => EntityField record field -> forall f. Functor f => (field -> f field) -> Entity record -> f (Entity record) -- | Extract a Key record from a record value. -- Currently, this is only defined for entities using the -- Primary syntax for natural/composite keys. In a future -- version of persistent which incorporates the ID directly into -- the entity, this will always be Just. keyFromRecordM :: PersistEntity record => Maybe (record -> Key record) -- | Construct an Entity record by providing a value for -- each of the record's fields. -- -- These constructions are equivalent: -- --
-- entityMattConstructor, entityMattTabulate :: Entity User
-- entityMattConstructor =
-- Entity
-- { entityKey = toSqlKey 123
-- , entityVal =
-- User
-- { userName = Matt
-- , userAge = 33
-- }
-- }
--
-- entityMattTabulate =
-- tabulateEntity $ \case
-- UserId ->
-- toSqlKey 123
-- UserName ->
-- Matt
-- UserAge ->
-- 33
--
--
-- This is a specialization of tabulateEntityA, which allows you
-- to construct an Entity by providing an Applicative
-- action for each field instead of a regular function.
tabulateEntity :: PersistEntity record => (forall a. EntityField record a -> a) -> Entity record
-- | Updating a database entity.
--
-- Persistent users use combinators to create these.
data Update record
Update :: EntityField record typ -> typ -> PersistUpdate -> Update record
[updateField] :: Update record -> EntityField record typ
[updateValue] :: Update record -> typ
[updateUpdate] :: Update record -> PersistUpdate
BackendUpdate :: BackendSpecificUpdate (PersistEntityBackend record) record -> Update record
type family BackendSpecificUpdate backend record
-- | Query options.
--
-- Persistent users use these directly.
data SelectOpt record
Asc :: EntityField record typ -> SelectOpt record
Desc :: EntityField record typ -> SelectOpt record
OffsetBy :: Int -> SelectOpt record
LimitTo :: Int -> SelectOpt record
-- | Filters which are available for select, updateWhere
-- and deleteWhere. Each filter constructor specifies the field
-- being filtered on, the type of comparison applied (equals, not equals,
-- etc) and the argument for the comparison.
--
-- Persistent users use combinators to create these.
--
-- Note that it's important to be careful about the PersistFilter
-- that you are using, if you use this directly. For example, using the
-- In PersistFilter requires that you have an array- or
-- list-shaped EntityField. It is possible to construct values
-- using this that will create malformed runtime values.
data Filter record
Filter :: EntityField record typ -> FilterValue typ -> PersistFilter -> Filter record
[filterField] :: Filter record -> EntityField record typ
[filterValue] :: Filter record -> FilterValue typ
[filterFilter] :: Filter record -> PersistFilter
-- | convenient for internal use, not needed for the API
FilterAnd :: [Filter record] -> Filter record
FilterOr :: [Filter record] -> Filter record
BackendFilter :: BackendSpecificFilter (PersistEntityBackend record) record -> Filter record
-- | Value to filter with. Highly dependant on the type of filter used.
data FilterValue typ
[FilterValue] :: typ -> FilterValue typ
[FilterValues] :: [typ] -> FilterValue typ
[UnsafeValue] :: forall a typ. PersistField a => a -> FilterValue typ
type family BackendSpecificFilter backend record
-- | Datatype that represents an entity, with both its Key and its
-- Haskell record representation.
--
-- When using a SQL-based backend (such as SQLite or PostgreSQL), an
-- Entity may take any number of columns depending on how many
-- fields it has. In order to reconstruct your entity on the Haskell
-- side, persistent needs all of your entity columns and in the
-- right order. Note that you don't need to worry about this when using
-- persistent's API since everything is handled correctly behind
-- the scenes.
--
-- However, if you want to issue a raw SQL command that returns an
-- Entity, then you have to be careful with the column order.
-- While you could use SELECT Entity.* WHERE ... and that would
-- work most of the time, there are times when the order of the columns
-- on your database is different from the order that persistent
-- expects (for example, if you add a new field in the middle of you
-- entity definition and then use the migration code --
-- persistent will expect the column to be in the middle, but
-- your DBMS will put it as the last column). So, instead of using a
-- query like the one above, you may use rawSql (from the
-- Database.Persist.GenericSql module) with its /entity selection
-- placeholder/ (a double question mark ??). Using
-- rawSql the query above must be written as SELECT ?? WHERE
-- ... Then rawSql will replace ?? with the list
-- of all columns that we need from your entity in the right order. If
-- your query returns two entities (i.e. (Entity backend a, Entity
-- backend b)), then you must you use SELECT ??, ?? WHERE
-- ..., and so on.
data Entity record
Entity' :: Key record -> record -> Entity record
pattern Entity :: Key rec -> rec -> Entity rec
-- | Textual representation of the record
recordName :: PersistEntity record => record -> Text
-- | Get list of values corresponding to given entity.
entityValues :: PersistEntity record => Entity record -> [PersistValue]
-- | Predefined toJSON. The resulting JSON looks like {"key":
-- 1, "value": {"name": ...}}.
--
-- The typical usage is:
--
-- -- instance ToJSON (Entity User) where -- toJSON = keyValueEntityToJSON --keyValueEntityToJSON :: (PersistEntity record, ToJSON record) => Entity record -> Value -- | Predefined parseJSON. The input JSON looks like {"key": -- 1, "value": {"name": ...}}. -- -- The typical usage is: -- --
-- instance FromJSON (Entity User) where -- parseJSON = keyValueEntityFromJSON --keyValueEntityFromJSON :: (PersistEntity record, FromJSON record) => Value -> Parser (Entity record) -- | Predefined toJSON. The resulting JSON looks like {"id": -- 1, "name": ...}. -- -- The typical usage is: -- --
-- instance ToJSON (Entity User) where -- toJSON = entityIdToJSON --entityIdToJSON :: (PersistEntity record, ToJSON record) => Entity record -> Value -- | Predefined parseJSON. The input JSON looks like {"id": 1, -- "name": ...}. -- -- The typical usage is: -- --
-- instance FromJSON (Entity User) where -- parseJSON = entityIdFromJSON --entityIdFromJSON :: (PersistEntity record, FromJSON record) => Value -> Parser (Entity record) -- | Convenience function for getting a free PersistField instance -- from a type with JSON instances. -- -- Example usage in combination with fromPersistValueJSON: -- --
-- instance PersistField MyData where -- fromPersistValue = fromPersistValueJSON -- toPersistValue = toPersistValueJSON --toPersistValueJSON :: ToJSON a => a -> PersistValue -- | Convenience function for getting a free PersistField instance -- from a type with JSON instances. The JSON parser used will accept JSON -- values other that object and arrays. So, if your instance serializes -- the data to a JSON string, this will still work. -- -- Example usage in combination with toPersistValueJSON: -- --
-- instance PersistField MyData where -- fromPersistValue = fromPersistValueJSON -- toPersistValue = toPersistValueJSON --fromPersistValueJSON :: FromJSON a => PersistValue -> Either Text a -- | Convenience function for getting a free PersistField instance -- from a type with an Enum instance. The function -- derivePersistField from the persistent-template package -- should generally be preferred. However, if you want to ensure that an -- ORDER BY clause that uses your field will order rows by the -- data constructor order, this is a better choice. -- -- Example usage in combination with fromPersistValueEnum: -- --
-- data SeverityLevel = Low | Medium | Critical | High -- deriving (Enum, Bounded) -- instance PersistField SeverityLevel where -- fromPersistValue = fromPersistValueEnum -- toPersistValue = toPersistValueEnum --toPersistValueEnum :: Enum a => a -> PersistValue -- | Convenience function for getting a free PersistField instance -- from a type with an Enum instance. This function also requires -- a Bounded instance to improve the reporting of errors. -- -- Example usage in combination with toPersistValueEnum: -- --
-- data SeverityLevel = Low | Medium | Critical | High -- deriving (Enum, Bounded) -- instance PersistField SeverityLevel where -- fromPersistValue = fromPersistValueEnum -- toPersistValue = toPersistValueEnum --fromPersistValueEnum :: (Enum a, Bounded a) => PersistValue -> Either Text a -- | This type class is used with the OverloadedLabels extension -- to provide a more convenient means of using the EntityField -- type. EntityField definitions are prefixed with the type name -- to avoid ambiguity, but this ambiguity can result in verbose code. -- -- If you have a table User with a name Text field, -- then the corresponding EntityField is UserName. With -- this, we can write #name :: EntityField User Text. -- -- What's more fun is that the type is more general: it's actually -- #name :: (SymbolToField "name" rec typ) => EntityField rec -- typ -- -- Which means it is *polymorphic* over the actual record. This allows -- you to write code that can be generic over the tables, provided they -- have the right fields. class SymbolToField (sym :: Symbol) rec typ | sym rec -> typ symbolToField :: SymbolToField sym rec typ => EntityField rec typ -- | A type class which is used to witness that a type is safe to insert -- into the database without providing a primary key. -- -- The TemplateHaskell function mkPersist will generate -- instances of this class for any entity that it works on. If the entity -- has a default primary key, then it provides a regular instance. If the -- entity has a Primary natural key, then this works fine. But -- if the entity has an Id column with no default=, -- then this does a TypeError and forces the user to use -- insertKey. class SafeToInsert a type SafeToInsertErrorMessage a = 'Text "The PersistEntity " :<>: ShowType a :<>: 'Text " does not have a default primary key." :$$: 'Text "This means that 'insert' will fail with a database error." :$$: 'Text "Please provide a default= clause inthe entity definition," :$$: 'Text "or use 'insertKey' instead to provide one." instance (GHC.Generics.Generic (Database.Persist.Class.PersistEntity.Key record), GHC.Generics.Generic record) => GHC.Generics.Generic (Database.Persist.Class.PersistEntity.Entity record) instance (GHC.Classes.Eq (Database.Persist.Class.PersistEntity.Key record), GHC.Classes.Eq record) => GHC.Classes.Eq (Database.Persist.Class.PersistEntity.Entity record) instance (GHC.Classes.Ord (Database.Persist.Class.PersistEntity.Key record), GHC.Classes.Ord record) => GHC.Classes.Ord (Database.Persist.Class.PersistEntity.Entity record) instance (GHC.Show.Show (Database.Persist.Class.PersistEntity.Key record), GHC.Show.Show record) => GHC.Show.Show (Database.Persist.Class.PersistEntity.Entity record) instance (GHC.Read.Read (Database.Persist.Class.PersistEntity.Key record), GHC.Read.Read record) => GHC.Read.Read (Database.Persist.Class.PersistEntity.Entity record) instance (TypeError ...) => Database.Persist.Class.PersistEntity.SafeToInsert (Database.Persist.Class.PersistEntity.Entity a) instance (TypeError ...) => Database.Persist.Class.PersistEntity.SafeToInsert (a -> b) instance (Database.Persist.Class.PersistEntity.SymbolToField sym ent typ, Database.Persist.Class.PersistEntity.PersistEntity ent) => GHC.Records.HasField sym (Database.Persist.Class.PersistEntity.Entity ent) typ instance Database.Persist.Class.PersistEntity.SymbolToField sym rec typ => GHC.OverloadedLabels.IsLabel sym (Database.Persist.Class.PersistEntity.EntityField rec typ) instance (Database.Persist.Class.PersistEntity.PersistEntity record, Database.Persist.Class.PersistField.PersistField record, Database.Persist.Class.PersistField.PersistField (Database.Persist.Class.PersistEntity.Key record)) => Database.Persist.Class.PersistField.PersistField (Database.Persist.Class.PersistEntity.Entity record) -- | This module exports many types and functions for operating on -- persistent's database representation. It's a bit of a kitchen -- sink. In the future, this module will be reorganized, and many of the -- dependent modules will be viewable on their own for easier -- documentation and organization. module Database.Persist.Types -- | Updating a database entity. -- -- Persistent users use combinators to create these. data Update record Update :: EntityField record typ -> typ -> PersistUpdate -> Update record [updateField] :: Update record -> EntityField record typ [updateValue] :: Update record -> typ [updateUpdate] :: Update record -> PersistUpdate BackendUpdate :: BackendSpecificUpdate (PersistEntityBackend record) record -> Update record type family BackendSpecificUpdate backend record -- | Query options. -- -- Persistent users use these directly. data SelectOpt record Asc :: EntityField record typ -> SelectOpt record Desc :: EntityField record typ -> SelectOpt record OffsetBy :: Int -> SelectOpt record LimitTo :: Int -> SelectOpt record -- | Filters which are available for select, updateWhere -- and deleteWhere. Each filter constructor specifies the field -- being filtered on, the type of comparison applied (equals, not equals, -- etc) and the argument for the comparison. -- -- Persistent users use combinators to create these. -- -- Note that it's important to be careful about the PersistFilter -- that you are using, if you use this directly. For example, using the -- In PersistFilter requires that you have an array- or -- list-shaped EntityField. It is possible to construct values -- using this that will create malformed runtime values. data Filter record Filter :: EntityField record typ -> FilterValue typ -> PersistFilter -> Filter record [filterField] :: Filter record -> EntityField record typ [filterValue] :: Filter record -> FilterValue typ [filterFilter] :: Filter record -> PersistFilter -- | convenient for internal use, not needed for the API FilterAnd :: [Filter record] -> Filter record FilterOr :: [Filter record] -> Filter record BackendFilter :: BackendSpecificFilter (PersistEntityBackend record) record -> Filter record -- | Value to filter with. Highly dependant on the type of filter used. data FilterValue typ [FilterValue] :: typ -> FilterValue typ [FilterValues] :: [typ] -> FilterValue typ [UnsafeValue] :: forall a typ. PersistField a => a -> FilterValue typ type family BackendSpecificFilter backend record -- | By default, a backend will automatically generate the key Instead you -- can specify a Primary key made up of unique values. data family Key record -- | Datatype that represents an entity, with both its Key and its -- Haskell record representation. -- -- When using a SQL-based backend (such as SQLite or PostgreSQL), an -- Entity may take any number of columns depending on how many -- fields it has. In order to reconstruct your entity on the Haskell -- side, persistent needs all of your entity columns and in the -- right order. Note that you don't need to worry about this when using -- persistent's API since everything is handled correctly behind -- the scenes. -- -- However, if you want to issue a raw SQL command that returns an -- Entity, then you have to be careful with the column order. -- While you could use SELECT Entity.* WHERE ... and that would -- work most of the time, there are times when the order of the columns -- on your database is different from the order that persistent -- expects (for example, if you add a new field in the middle of you -- entity definition and then use the migration code -- -- persistent will expect the column to be in the middle, but -- your DBMS will put it as the last column). So, instead of using a -- query like the one above, you may use rawSql (from the -- Database.Persist.GenericSql module) with its /entity selection -- placeholder/ (a double question mark ??). Using -- rawSql the query above must be written as SELECT ?? WHERE -- ... Then rawSql will replace ?? with the list -- of all columns that we need from your entity in the right order. If -- your query returns two entities (i.e. (Entity backend a, Entity -- backend b)), then you must you use SELECT ??, ?? WHERE -- ..., and so on. data Entity record Entity' :: Key record -> record -> Entity record pattern Entity :: Key rec -> rec -> Entity rec -- | Prior to persistent-2.11.0, we provided an instance of -- PersistField for the Natural type. This was in error, -- because Natural represents an infinite value, and databases -- don't have reasonable types for this. -- -- The instance for Natural used the Int64 underlying type, -- which will cause underflow and overflow errors. This type has the -- exact same code in the instances, and will work seamlessly. -- -- A more appropriate type for this is the Word series of types -- from Data.Word. These have a bounded size, are guaranteed to be -- non-negative, and are quite efficient for the database to store. newtype OverflowNatural OverflowNatural :: Natural -> OverflowNatural [unOverflowNatural] :: OverflowNatural -> Natural -- | A type that determines how a backend should handle the literal. data LiteralType -- | The accompanying value will be escaped before inserting into the -- database. This is the correct default choice to use. Escaped :: LiteralType -- | The accompanying value will not be escaped when inserting into the -- database. This is potentially dangerous - use this with care. Unescaped :: LiteralType -- | The DbSpecific constructor corresponds to the legacy -- PersistDbSpecific constructor. We need to keep this around -- because old databases may have serialized JSON representations that -- reference this. We don't want to break the ability of a database to -- load rows. DbSpecific :: LiteralType -- | A raw value which can be stored in any backend and can be marshalled -- to and from a PersistField. data PersistValue PersistText :: Text -> PersistValue PersistByteString :: ByteString -> PersistValue PersistInt64 :: Int64 -> PersistValue PersistDouble :: Double -> PersistValue PersistRational :: Rational -> PersistValue PersistBool :: Bool -> PersistValue PersistDay :: Day -> PersistValue PersistTimeOfDay :: TimeOfDay -> PersistValue PersistUTCTime :: UTCTime -> PersistValue PersistNull :: PersistValue PersistList :: [PersistValue] -> PersistValue PersistMap :: [(Text, PersistValue)] -> PersistValue -- | Intended especially for MongoDB backend PersistObjectId :: ByteString -> PersistValue -- | Intended especially for PostgreSQL backend for text arrays PersistArray :: [PersistValue] -> PersistValue -- | This constructor is used to specify some raw literal value for the -- backend. The LiteralType value specifies how the value should -- be escaped. This can be used to make special, custom types avaialable -- in the back end. PersistLiteral_ :: LiteralType -> ByteString -> PersistValue -- | This pattern synonym used to be a data constructor for the -- PersistValue type. It was changed to be a pattern so that -- JSON-encoded database values could be parsed into their corresponding -- values. You should not use this, and instead prefer to pattern match -- on PersistLiteral_ directly. -- -- If you use this, it will overlap a patern match on the -- 'PersistLiteral_, PersistLiteral, and -- PersistLiteralEscaped patterns. If you need to disambiguate -- between these constructors, pattern match on PersistLiteral_ -- directly. -- | Deprecated: Deprecated since 2.11 because of inconsistent escaping -- behavior across backends. The Postgres backend escapes these values, -- while the MySQL backend does not. If you are using this, please switch -- to PersistLiteral_ and provide a relevant LiteralType -- for your conversion. pattern PersistDbSpecific :: ByteString -> PersistValue -- | This pattern synonym used to be a data constructor on -- PersistValue, but was changed into a catch-all pattern synonym -- to allow backwards compatiblity with database types. See the -- documentation on PersistDbSpecific for more details. pattern PersistLiteral :: ByteString -> PersistValue -- | This pattern synonym used to be a data constructor on -- PersistValue, but was changed into a catch-all pattern synonym -- to allow backwards compatiblity with database types. See the -- documentation on PersistDbSpecific for more details. pattern PersistLiteralEscaped :: ByteString -> PersistValue -- | A FieldDef represents the inormation that persistent -- knows about a field of a datatype. This includes information used to -- parse the field out of the database and what the field corresponds to. data FieldDef FieldDef :: !FieldNameHS -> !FieldNameDB -> !FieldType -> !SqlType -> ![FieldAttr] -> !Bool -> !ReferenceDef -> !FieldCascade -> !Maybe Text -> !Maybe Text -> !Bool -> FieldDef -- | The name of the field. Note that this does not corresponds to the -- record labels generated for the particular entity - record labels are -- generated with the type name prefixed to the field, so a -- FieldDef that contains a FieldNameHS "name" for -- a type User will have a record field userName. [fieldHaskell] :: FieldDef -> !FieldNameHS -- | The name of the field in the database. For SQL databases, this -- corresponds to the column name. [fieldDB] :: FieldDef -> !FieldNameDB -- | The type of the field in Haskell. [fieldType] :: FieldDef -> !FieldType -- | The type of the field in a SQL database. [fieldSqlType] :: FieldDef -> !SqlType -- | User annotations for a field. These are provided with the ! -- operator. [fieldAttrs] :: FieldDef -> ![FieldAttr] -- | If this is True, then the Haskell datatype will have a strict -- record field. The default value for this is True. [fieldStrict] :: FieldDef -> !Bool [fieldReference] :: FieldDef -> !ReferenceDef -- | Defines how operations on the field cascade on to the referenced -- tables. This doesn't have any meaning if the fieldReference is -- set to NoReference or SelfReference. The cascade option -- here should be the same as the one obtained in the -- fieldReference. [fieldCascade] :: FieldDef -> !FieldCascade -- | Optional comments for a Field. There is not currently a way -- to attach comments to a field in the quasiquoter. [fieldComments] :: FieldDef -> !Maybe Text -- | Whether or not the field is a GENERATED column, and -- additionally the expression to use for generation. [fieldGenerated] :: FieldDef -> !Maybe Text -- | True if the field is an implicit ID column. False -- otherwise. [fieldIsImplicitIdColumn] :: FieldDef -> !Bool data PersistUpdate Assign :: PersistUpdate Add :: PersistUpdate Subtract :: PersistUpdate Multiply :: PersistUpdate Divide :: PersistUpdate BackendSpecificUpdate :: Text -> PersistUpdate data UpdateException KeyNotFound :: String -> UpdateException UpsertError :: String -> UpdateException data PersistFilter Eq :: PersistFilter Ne :: PersistFilter Gt :: PersistFilter Lt :: PersistFilter Ge :: PersistFilter Le :: PersistFilter In :: PersistFilter NotIn :: PersistFilter BackendSpecificFilter :: Text -> PersistFilter -- | A SQL data type. Naming attempts to reflect the underlying Haskell -- datatypes, eg SqlString instead of SqlVarchar. Different SQL databases -- may have different translations for these types. data SqlType SqlString :: SqlType SqlInt32 :: SqlType SqlInt64 :: SqlType SqlReal :: SqlType SqlNumeric :: Word32 -> Word32 -> SqlType SqlBool :: SqlType SqlDay :: SqlType SqlTime :: SqlType -- | Always uses UTC timezone SqlDayTime :: SqlType SqlBlob :: SqlType -- | a backend-specific name SqlOther :: Text -> SqlType data PersistException -- | Generic Exception PersistError :: Text -> PersistException PersistMarshalError :: Text -> PersistException PersistInvalidField :: Text -> PersistException PersistForeignConstraintUnmet :: Text -> PersistException PersistMongoDBError :: Text -> PersistException PersistMongoDBUnsupported :: Text -> PersistException -- | An action that might happen on a deletion or update on a foreign key -- change. data CascadeAction Cascade :: CascadeAction Restrict :: CascadeAction SetNull :: CascadeAction SetDefault :: CascadeAction -- | This datatype describes how a foreign reference field cascades deletes -- or updates. -- -- This type is used in both parsing the model definitions and performing -- migrations. A Nothing in either of the field values means that -- the user has not specified a CascadeAction. An unspecified -- CascadeAction is defaulted to Restrict when doing -- migrations. data FieldCascade FieldCascade :: !Maybe CascadeAction -> !Maybe CascadeAction -> FieldCascade [fcOnUpdate] :: FieldCascade -> !Maybe CascadeAction [fcOnDelete] :: FieldCascade -> !Maybe CascadeAction data ForeignDef ForeignDef :: !EntityNameHS -> !EntityNameDB -> !ConstraintNameHS -> !ConstraintNameDB -> !FieldCascade -> ![(ForeignFieldDef, ForeignFieldDef)] -> ![Attr] -> Bool -> Bool -> ForeignDef [foreignRefTableHaskell] :: ForeignDef -> !EntityNameHS [foreignRefTableDBName] :: ForeignDef -> !EntityNameDB [foreignConstraintNameHaskell] :: ForeignDef -> !ConstraintNameHS [foreignConstraintNameDBName] :: ForeignDef -> !ConstraintNameDB -- | Determine how the field will cascade on updates and deletions. [foreignFieldCascade] :: ForeignDef -> !FieldCascade [foreignFields] :: ForeignDef -> ![(ForeignFieldDef, ForeignFieldDef)] [foreignAttrs] :: ForeignDef -> ![Attr] [foreignNullable] :: ForeignDef -> Bool -- | Determines if the reference is towards a Primary Key or not. [foreignToPrimary] :: ForeignDef -> Bool -- | Used instead of FieldDef to generate a smaller amount of code type ForeignFieldDef = (FieldNameHS, FieldNameDB) data CompositeDef CompositeDef :: !NonEmpty FieldDef -> ![Attr] -> CompositeDef [compositeFields] :: CompositeDef -> !NonEmpty FieldDef [compositeAttrs] :: CompositeDef -> ![Attr] -- | Type for storing the Uniqueness constraint in the Schema. Assume you -- have the following schema with a uniqueness constraint: -- --
-- Person -- name String -- age Int -- UniqueAge age ---- -- This will be represented as: -- --
-- UniqueDef
-- { uniqueHaskell = ConstraintNameHS (packPTH UniqueAge)
-- , uniqueDBName = ConstraintNameDB (packPTH "unique_age")
-- , uniqueFields = [(FieldNameHS (packPTH "age"), FieldNameDB (packPTH "age"))]
-- , uniqueAttrs = []
-- }
--
data UniqueDef
UniqueDef :: !ConstraintNameHS -> !ConstraintNameDB -> !NonEmpty (FieldNameHS, FieldNameDB) -> ![Attr] -> UniqueDef
[uniqueHaskell] :: UniqueDef -> !ConstraintNameHS
[uniqueDBName] :: UniqueDef -> !ConstraintNameDB
[uniqueFields] :: UniqueDef -> !NonEmpty (FieldNameHS, FieldNameDB)
[uniqueAttrs] :: UniqueDef -> ![Attr]
-- | An EmbedFieldDef is the same as a FieldDef But it is only used for
-- embeddedFields so it only has data needed for embedding
data EmbedFieldDef
EmbedFieldDef :: FieldNameDB -> Maybe (Either SelfEmbed EntityNameHS) -> EmbedFieldDef
[emFieldDB] :: EmbedFieldDef -> FieldNameDB
[emFieldEmbed] :: EmbedFieldDef -> Maybe (Either SelfEmbed EntityNameHS)
-- | An EmbedEntityDef is the same as an EntityDef But it is only used for
-- fieldReference so it only has data needed for embedding
data EmbedEntityDef
EmbedEntityDef :: EntityNameHS -> [EmbedFieldDef] -> EmbedEntityDef
[embeddedHaskell] :: EmbedEntityDef -> EntityNameHS
[embeddedFields] :: EmbedEntityDef -> [EmbedFieldDef]
-- | There are 3 kinds of references 1) composite (to fields that exist in
-- the record) 2) single field 3) embedded
data ReferenceDef
NoReference :: ReferenceDef
-- | A ForeignRef has a late binding to the EntityDef it references via
-- name and has the Haskell type of the foreign key in the form of
-- FieldType
ForeignRef :: !EntityNameHS -> ReferenceDef
EmbedRef :: EntityNameHS -> ReferenceDef
-- | A SelfReference stops an immediate cycle which causes non-termination
-- at compile-time (issue #311).
SelfReference :: ReferenceDef
-- | A FieldType describes a field parsed from the QuasiQuoter and
-- is used to determine the Haskell type in the generated code.
--
-- name Text parses into FTTypeCon Nothing Text
--
-- name T.Text parses into FTTypeCon (Just T
-- Text)
--
-- name (Jsonb User) parses into:
--
-- -- FTApp (FTTypeCon Nothing Jsonb) (FTTypeCon Nothing User) --data FieldType -- | Optional module and name. FTTypeCon :: Maybe Text -> Text -> FieldType FTLit :: FieldTypeLit -> FieldType FTTypePromoted :: Text -> FieldType FTApp :: FieldType -> FieldType -> FieldType FTList :: FieldType -> FieldType -- | Attributes that may be attached to fields that can affect migrations -- and serialization in backend-specific ways. -- -- While we endeavor to, we can't forsee all use cases for all backends, -- and so FieldAttr is extensible through its constructor -- FieldAttrOther. data FieldAttr -- | The Maybe keyword goes after the type. This indicates that the -- column is nullable, and the generated Haskell code will have a -- Maybe type for it. -- -- Example: -- --
-- User -- name Text Maybe --FieldAttrMaybe :: FieldAttr -- | This indicates that the column is nullable, but should not have a -- Maybe type. For this to work out, you need to ensure that the -- PersistField instance for the type in question can support a -- PersistNull value. -- --
-- data What = NoWhat | Hello Text -- -- instance PersistField What where -- fromPersistValue PersistNull = -- pure NoWhat -- fromPersistValue pv = -- Hello $ fromPersistValue pv -- -- instance PersistFieldSql What where -- sqlType _ = SqlString -- -- User -- what What nullable --FieldAttrNullable :: FieldAttr -- | This tag means that the column will not be present on the Haskell -- code, but will not be removed from the database. Useful to deprecate -- fields in phases. -- -- You should set the column to be nullable in the database. Otherwise, -- inserts won't have values. -- --
-- User -- oldName Text MigrationOnly -- newName Text --FieldAttrMigrationOnly :: FieldAttr -- | A SafeToRemove attribute is not present on the Haskell -- datatype, and the backend migrations should attempt to drop the column -- without triggering any unsafe migration warnings. -- -- Useful after you've used MigrationOnly to remove a column -- from the database in phases. -- --
-- User -- oldName Text SafeToRemove -- newName Text --FieldAttrSafeToRemove :: FieldAttr -- | This attribute indicates that we should create a foreign key reference -- from a column. By default, persistent will try and create a -- foreign key reference for a column if it can determine that the type -- of the column is a Key entity or an -- EntityId and the Entity's name was present in -- mkPersist. -- -- This is useful if you want to use the explicit foreign key syntax. -- --
-- Post -- title Text -- -- Comment -- postId PostId noreference -- Foreign Post fk_comment_post postId --FieldAttrNoreference :: FieldAttr -- | This is set to specify precisely the database table the column refers -- to. -- --
-- Post -- title Text -- -- Comment -- postId PostId references="post" ---- -- You should not need this - persistent should be capable of -- correctly determining the target table's name. If you do need this, -- please file an issue describing why. FieldAttrReference :: Text -> FieldAttr -- | Specify a name for the constraint on the foreign key reference for -- this table. -- --
-- Post -- title Text -- -- Comment -- postId PostId constraint="my_cool_constraint_name" --FieldAttrConstraint :: Text -> FieldAttr -- | Specify the default value for a column. -- --
-- User -- createdAt UTCTime default="NOW()" ---- -- Note that a default= attribute does not mean you can omit the -- value while inserting. FieldAttrDefault :: Text -> FieldAttr -- | Specify a custom SQL type for the column. Generally, you should define -- a custom datatype with a custom PersistFieldSql instance -- instead of using this. -- --
-- User -- uuid Text sqltype=UUID --FieldAttrSqltype :: Text -> FieldAttr -- | Set a maximum length for a column. Useful for VARCHAR and indexes. -- --
-- User -- name Text maxlen=200 -- -- UniqueName name --FieldAttrMaxlen :: Integer -> FieldAttr -- | Specify the database name of the column. -- --
-- User -- blarghle Int sql="b_l_a_r_g_h_l_e" ---- -- Useful for performing phased migrations, where one column is renamed -- to another column over time. FieldAttrSql :: Text -> FieldAttr -- | A grab bag of random attributes that were unrecognized by the parser. FieldAttrOther :: Text -> FieldAttr type Attr = Text type ExtraLine = [Text] -- | The reason why a field is nullable is very important. A field -- that is nullable because of a Maybe tag will have its type -- changed from A to Maybe A. OTOH, a field that is -- nullable because of a nullable tag will remain with the same -- type. data WhyNullable ByMaybeAttr :: WhyNullable ByNullableAttr :: WhyNullable data IsNullable Nullable :: !WhyNullable -> IsNullable NotNullable :: IsNullable -- | A Checkmark should be used as a field type whenever a -- uniqueness constraint should guarantee that a certain kind of record -- may appear at most once, but other kinds of records may appear any -- number of times. -- -- NOTE: You need to mark any Checkmark fields as -- nullable (see the following example). -- -- For example, suppose there's a Location entity that -- represents where a user has lived: -- --
-- Location -- user UserId -- name Text -- current Checkmark nullable -- -- UniqueLocation user current ---- -- The UniqueLocation constraint allows any number of -- Inactive Locations to be current. However, -- there may be at most one current Location per user -- (i.e., either zero or one per user). -- -- This data type works because of the way that SQL treats -- NULLable fields within uniqueness constraints. The SQL -- standard says that NULL values should be considered -- different, so we represent Inactive as SQL NULL, thus -- allowing any number of Inactive records. On the other hand, we -- represent Active as TRUE, so the uniqueness constraint -- will disallow more than one Active record. -- -- Note: There may be DBMSs that do not respect the SQL standard's -- treatment of NULL values on uniqueness constraints, please -- check if this data type works before relying on it. -- -- The SQL BOOLEAN type is used because it's the smallest data -- type available. Note that we never use FALSE, just -- TRUE and NULL. Provides the same behavior Maybe -- () would if () was a valid PersistField. data Checkmark -- | When used on a uniqueness constraint, there may be at most one -- Active record. Active :: Checkmark -- | When used on a uniqueness constraint, there may be any number of -- Inactive records. Inactive :: Checkmark fieldAttrsContainsNullable :: [FieldAttr] -> IsNullable -- | Returns a NonEmpty list of FieldDef that correspond with -- the key columns for an EntityDef. keyAndEntityFields :: EntityDef -> NonEmpty FieldDef -- | Parse raw field attributes into structured form. Any unrecognized -- attributes will be preserved, identically as they are encountered, as -- FieldAttrOther values. parseFieldAttrs :: [Text] -> [FieldAttr] -- | A FieldCascade that does nothing. noCascade :: FieldCascade -- | This Internal module may have breaking changes that will not -- be reflected in major version bumps. Please use -- Database.Persist.Quasi instead. If you need something in this -- module, please file an issue on GitHub. module Database.Persist.Quasi.Internal -- | Parses a quasi-quoted syntax into a list of entity definitions. parse :: PersistSettings -> Text -> [UnboundEntityDef] data PersistSettings PersistSettings :: !Text -> Text -> !EntityNameHS -> ConstraintNameHS -> Text -> !Bool -> !Text -> PersistSettings -- | Modify the Haskell-style name into a database-style name. [psToDBName] :: PersistSettings -> !Text -> Text -- | A function for generating the constraint name, with access to the -- entity and constraint names. Default value: mappend [psToFKName] :: PersistSettings -> !EntityNameHS -> ConstraintNameHS -> Text -- | Whether fields are by default strict. Default value: True. [psStrictFields] :: PersistSettings -> !Bool -- | The name of the id column. Default value: id The name of the -- id column can also be changed on a per-model basis -- https://github.com/yesodweb/persistent/wiki/Persistent-entity-syntax [psIdName] :: PersistSettings -> !Text upperCaseSettings :: PersistSettings lowerCaseSettings :: PersistSettings toFKNameInfixed :: Text -> EntityNameHS -> ConstraintNameHS -> Text -- | A token used by the parser. data Token -- | Token tok is token tok already unquoted. Token :: Text -> Token -- | DocComment is a documentation comment, unmodified. DocComment :: Text -> Token -- | A line of parsed tokens data Line Line :: Int -> NonEmpty Token -> Line [lineIndent] :: Line -> Int [tokens] :: Line -> NonEmpty Token preparse :: Text -> Maybe (NonEmpty Line) parseLine :: Text -> Maybe Line parseFieldType :: Text -> Either String FieldType associateLines :: NonEmpty Line -> [LinesWithComments] data LinesWithComments LinesWithComments :: NonEmpty Line -> [Text] -> LinesWithComments [lwcLines] :: LinesWithComments -> NonEmpty Line [lwcComments] :: LinesWithComments -> [Text] parseEntityFields :: [Line] -> ([[Token]], Map Text [ExtraLine]) takeColsEx :: PersistSettings -> [Text] -> Maybe UnboundFieldDef -- | An EntityDef produced by the QuasiQuoter. It contains -- information that the QuasiQuoter is capable of knowing about the -- entities. It is inherently unfinished, though - there are many other -- Unbound datatypes that also contain partial information. -- -- The unboundEntityDef is not complete or reliable - to know -- which fields are safe to use, consult the parsing code. -- -- This type was completely internal until 2.13.0.0, when it was exposed -- as part of the Database.Persist.Quasi.Internal module. -- -- TODO: refactor this so we can expose it for consumers. data UnboundEntityDef UnboundEntityDef :: [UnboundForeignDef] -> PrimarySpec -> EntityDef -> [UnboundFieldDef] -> UnboundEntityDef -- | A list of foreign definitions on the parsed entity. [unboundForeignDefs] :: UnboundEntityDef -> [UnboundForeignDef] -- | The specification for the primary key of the unbound entity. [unboundPrimarySpec] :: UnboundEntityDef -> PrimarySpec -- | The incomplete and partial EntityDef that we're defining. We -- re-use the type here to prevent duplication, but several of the fields -- are unset and left to defaults. [unboundEntityDef] :: UnboundEntityDef -> EntityDef -- | The list of fields for the entity. We're not capable of knowing -- information like "is this a reference?" or "what's the underlying type -- of the field?" yet, so we defer those to the Template Haskell -- execution. [unboundEntityFields] :: UnboundEntityDef -> [UnboundFieldDef] -- | Return the EntityNameHS for an UnboundEntityDef. getUnboundEntityNameHS :: UnboundEntityDef -> EntityNameHS -- | Convert an EntityDef into an UnboundEntityDef. This -- "forgets" information about the EntityDef, but it is all kept -- present on the unboundEntityDef field if necessary. unbindEntityDef :: EntityDef -> UnboundEntityDef -- | Returns the [UnboundFieldDef] for an -- UnboundEntityDef. This returns all fields defined on the -- entity. getUnboundFieldDefs :: UnboundEntityDef -> [UnboundFieldDef] -- | Define an explicit foreign key reference. -- --
-- User -- name Text -- email Text -- -- Primary name email -- -- Dog -- ownerName Text -- ownerEmail Text -- -- Foreign User fk_dog_user ownerName ownerEmail --data UnboundForeignDef UnboundForeignDef :: UnboundForeignFieldList -> ForeignDef -> UnboundForeignDef -- | Fields in the source entity. [unboundForeignFields] :: UnboundForeignDef -> UnboundForeignFieldList -- | The ForeignDef which needs information filled in. -- -- This value is unreliable. See the parsing code to see what data is -- filled in here. [unboundForeignDef] :: UnboundForeignDef -> ForeignDef getSqlNameOr :: FieldNameDB -> [FieldAttr] -> FieldNameDB -- | A representation of a database column, with everything that can be -- known at parse time. data UnboundFieldDef UnboundFieldDef :: FieldNameHS -> FieldNameDB -> [FieldAttr] -> Bool -> FieldType -> FieldCascade -> Maybe Text -> Maybe Text -> UnboundFieldDef -- | The Haskell name of the field. This is parsed directly from the -- definition, and is used to generate the Haskell record field and the -- EntityField definition. [unboundFieldNameHS] :: UnboundFieldDef -> FieldNameHS -- | The database name of the field. By default, this is determined by the -- PersistSettings record at parse time. You can customize this -- with a sql= attribute: -- --
-- name Text sql=foo_name --[unboundFieldNameDB] :: UnboundFieldDef -> FieldNameDB -- | The attributes present on the field. For rules on parsing and utility, -- see the comments on the datatype. [unboundFieldAttrs] :: UnboundFieldDef -> [FieldAttr] -- | Whether or not the field should be strict in the generated Haskell -- code. [unboundFieldStrict] :: UnboundFieldDef -> Bool -- | The type of the field, as far as is known at parse time. -- -- The TemplateHaskell code will reconstruct a Type out of this, -- but the names will be imported as-is. [unboundFieldType] :: UnboundFieldDef -> FieldType -- | We parse if there's a FieldCascade on the field. If the field -- is not a reference, this information is ignored. -- --
-- Post -- user UserId OnDeleteCascade --[unboundFieldCascade] :: UnboundFieldDef -> FieldCascade -- | Contains an expression to generate the column. If this is present, -- then the column will not be written to the database, but generated by -- the expression every time. -- --
-- Item -- subtotal Int -- taxRate Rational -- total Int generated="subtotal * tax_rate" --[unboundFieldGenerated] :: UnboundFieldDef -> Maybe Text -- | Any comments present on the field. Documentation comments use a -- Haskell-like syntax, and must be present before the field in question. -- --
-- Post -- -- | This is the blog post title. -- title Text -- -- | You can have multi-line comments. -- -- | But each line must have the pipe character. -- author UserId --[unboundFieldComments] :: UnboundFieldDef -> Maybe Text -- | A definition for a composite primary key. -- -- @since.2.13.0.0 data UnboundCompositeDef UnboundCompositeDef :: NonEmpty FieldNameHS -> [Attr] -> UnboundCompositeDef -- | The field names for the primary key. [unboundCompositeCols] :: UnboundCompositeDef -> NonEmpty FieldNameHS -- | A list of attributes defined on the primary key. This is anything that -- occurs after a ! character. [unboundCompositeAttrs] :: UnboundCompositeDef -> [Attr] -- | This type represents an Id declaration in the QuasiQuoted -- syntax. -- --
-- Id ---- -- This uses the implied settings, and is equivalent to omitting the -- Id statement entirely. -- --
-- Id Text ---- -- This will set the field type of the ID to be Text. -- --
-- Id Text sql=foo_id ---- -- This will set the field type of the Id to be Text and the SQL -- DB name to be foo_id. -- --
-- Id FooId ---- -- This results in a shared primary key - the FooId refers to a -- Foo table. -- --
-- Id FooId OnDelete Cascade ---- -- You can set a cascade behavior on an ID column. data UnboundIdDef UnboundIdDef :: EntityNameHS -> !FieldNameDB -> [FieldAttr] -> FieldCascade -> Maybe FieldType -> UnboundIdDef [unboundIdEntityName] :: UnboundIdDef -> EntityNameHS [unboundIdDBName] :: UnboundIdDef -> !FieldNameDB [unboundIdAttrs] :: UnboundIdDef -> [FieldAttr] [unboundIdCascade] :: UnboundIdDef -> FieldCascade [unboundIdType] :: UnboundIdDef -> Maybe FieldType -- | Forget innformation about a FieldDef so it can beused as an -- UnboundFieldDef. unbindFieldDef :: FieldDef -> UnboundFieldDef isUnboundFieldNullable :: UnboundFieldDef -> IsNullable -- | Convert an UnboundIdDef into a FieldDef suitable for use -- in the EntityIdField constructor. unboundIdDefToFieldDef :: FieldNameDB -> EntityNameHS -> UnboundIdDef -> FieldDef -- | The specification for how an entity's primary key should be formed. -- -- Persistent requires that every table have a primary key. By default, -- an implied ID is assigned, based on the mpsImplicitIdDef -- field on MkPersistSettings. Because we can't access that type -- at parse-time, we defer that decision until later. data PrimarySpec -- | A NaturalKey contains columns that are defined on the datatype -- itself. This is defined using the Primary keyword and given a -- non-empty list of columns. -- --
-- User -- name Text -- email Text -- -- Primary name email ---- -- A natural key may also contain only a single column. A natural key -- with multiple columns is called a 'composite key'. NaturalKey :: UnboundCompositeDef -> PrimarySpec -- | A surrogate key is not part of the domain model for a database table. -- You can specify a custom surro -- -- You can specify a custom surrogate key using the Id syntax. -- --
-- User -- Id Text -- name Text ---- -- Note that you must provide a default= expression when using -- this in order to use insert or related functions. The -- insertKey function can be used instead, as it allows you to -- specify a key directly. Fixing this issue is tracked in #1247 on -- GitHub. SurrogateKey :: UnboundIdDef -> PrimarySpec -- | The default key for the entity using the settings in -- MkPersistSettings. -- -- This is implicit - a table without an Id or Primary -- declaration will have a DefaultKey. DefaultKey :: FieldNameDB -> PrimarySpec -- | Creates a default ID field. mkAutoIdField' :: FieldNameDB -> EntityNameHS -> SqlType -> FieldDef -- | A list of fields present on the foreign reference. data UnboundForeignFieldList -- | If no References keyword is supplied, then it is assumed that -- you are referring to the Primary key or Id of the -- target entity. FieldListImpliedId :: NonEmpty FieldNameHS -> UnboundForeignFieldList -- | You can specify the exact columns you're referring to here, if they -- aren't part of a primary key. Most databases expect a unique index on -- the columns you refer to, but Persistent doesnt' check that. -- --
-- User -- Id UUID default="uuid_generate_v1mc()" -- name Text -- -- UniqueName name -- -- Dog -- ownerName Text -- -- Foreign User fk_dog_user ownerName References name --FieldListHasReferences :: NonEmpty ForeignFieldReference -> UnboundForeignFieldList -- | A pairing of the FieldNameHS for the source table to the -- FieldNameHS for the target table. data ForeignFieldReference ForeignFieldReference :: FieldNameHS -> FieldNameHS -> ForeignFieldReference -- | The column on the source table. [ffrSourceField] :: ForeignFieldReference -> FieldNameHS -- | The column on the target table. [ffrTargetField] :: ForeignFieldReference -> FieldNameHS -- | Convert an EntityNameHS into FieldType that will get -- parsed into the ID type for the entity. -- --
-- >>> mkKeyConType (EntityNameHS "Hello) -- FTTypeCon Nothing HelloId --mkKeyConType :: EntityNameHS -> FieldType -- | Returns True if the UnboundFieldDef does not have a -- MigrationOnly or SafeToRemove flag from the -- QuasiQuoter. isHaskellUnboundField :: UnboundFieldDef -> Bool data FieldTypeLit IntTypeLit :: Integer -> FieldTypeLit TextTypeLit :: Text -> FieldTypeLit instance GHC.Show.Show a => GHC.Show.Show (Database.Persist.Quasi.Internal.ParseState a) instance GHC.Classes.Eq Database.Persist.Quasi.Internal.Token instance GHC.Show.Show Database.Persist.Quasi.Internal.Token instance GHC.Show.Show Database.Persist.Quasi.Internal.Line instance GHC.Classes.Eq Database.Persist.Quasi.Internal.Line instance GHC.Show.Show Database.Persist.Quasi.Internal.LinesWithComments instance GHC.Classes.Eq Database.Persist.Quasi.Internal.LinesWithComments instance Language.Haskell.TH.Syntax.Lift Database.Persist.Quasi.Internal.UnboundFieldDef instance GHC.Show.Show Database.Persist.Quasi.Internal.UnboundFieldDef instance GHC.Classes.Ord Database.Persist.Quasi.Internal.UnboundFieldDef instance GHC.Classes.Eq Database.Persist.Quasi.Internal.UnboundFieldDef instance Language.Haskell.TH.Syntax.Lift Database.Persist.Quasi.Internal.UnboundIdDef instance GHC.Show.Show Database.Persist.Quasi.Internal.UnboundIdDef instance GHC.Classes.Ord Database.Persist.Quasi.Internal.UnboundIdDef instance GHC.Classes.Eq Database.Persist.Quasi.Internal.UnboundIdDef instance Language.Haskell.TH.Syntax.Lift Database.Persist.Quasi.Internal.UnboundCompositeDef instance GHC.Show.Show Database.Persist.Quasi.Internal.UnboundCompositeDef instance GHC.Classes.Ord Database.Persist.Quasi.Internal.UnboundCompositeDef instance GHC.Classes.Eq Database.Persist.Quasi.Internal.UnboundCompositeDef instance Language.Haskell.TH.Syntax.Lift Database.Persist.Quasi.Internal.PrimarySpec instance GHC.Show.Show Database.Persist.Quasi.Internal.PrimarySpec instance GHC.Classes.Ord Database.Persist.Quasi.Internal.PrimarySpec instance GHC.Classes.Eq Database.Persist.Quasi.Internal.PrimarySpec instance Language.Haskell.TH.Syntax.Lift Database.Persist.Quasi.Internal.ForeignFieldReference instance GHC.Show.Show Database.Persist.Quasi.Internal.ForeignFieldReference instance GHC.Classes.Ord Database.Persist.Quasi.Internal.ForeignFieldReference instance GHC.Classes.Eq Database.Persist.Quasi.Internal.ForeignFieldReference instance Language.Haskell.TH.Syntax.Lift Database.Persist.Quasi.Internal.UnboundForeignFieldList instance GHC.Show.Show Database.Persist.Quasi.Internal.UnboundForeignFieldList instance GHC.Classes.Ord Database.Persist.Quasi.Internal.UnboundForeignFieldList instance GHC.Classes.Eq Database.Persist.Quasi.Internal.UnboundForeignFieldList instance Language.Haskell.TH.Syntax.Lift Database.Persist.Quasi.Internal.UnboundForeignDef instance GHC.Show.Show Database.Persist.Quasi.Internal.UnboundForeignDef instance GHC.Classes.Ord Database.Persist.Quasi.Internal.UnboundForeignDef instance GHC.Classes.Eq Database.Persist.Quasi.Internal.UnboundForeignDef instance Language.Haskell.TH.Syntax.Lift Database.Persist.Quasi.Internal.UnboundEntityDef instance GHC.Show.Show Database.Persist.Quasi.Internal.UnboundEntityDef instance GHC.Classes.Ord Database.Persist.Quasi.Internal.UnboundEntityDef instance GHC.Classes.Eq Database.Persist.Quasi.Internal.UnboundEntityDef instance GHC.Base.Semigroup Database.Persist.Quasi.Internal.EntityConstraintDefs instance GHC.Base.Monoid Database.Persist.Quasi.Internal.EntityConstraintDefs instance GHC.Base.Semigroup (Database.Persist.Quasi.Internal.SetOnceAtMost a) instance GHC.Base.Monoid (Database.Persist.Quasi.Internal.SetOnceAtMost a) instance GHC.Base.Semigroup Database.Persist.Quasi.Internal.LinesWithComments -- | This module defines the Persistent entity syntax used in the -- quasiquoter to generate persistent entities. -- -- The basic structure of the syntax looks like this: -- --
-- TableName -- fieldName FieldType -- otherField String -- nullableField Int Maybe ---- -- You start an entity definition with the table name, in this case, -- TableName. It's followed by a list of fields on the entity, -- which have the basic form fieldName FieldType. You can -- indicate that a field is nullable with Maybe at the end of the -- type. -- -- persistent automatically generates an ID column for you, if -- you don't specify one, so the above table definition corresponds to -- the following SQL: -- --
-- CREATE TABLE table_name ( -- id SERIAL PRIMARY KEY, -- field_name field_type NOT NULL, -- other_field varchar NOT NULL, -- nullable_field int NULL -- ); ---- -- Note that the exact SQL that is generated can be customized using the -- PersistSettings that are passed to the parse function. -- -- It generates a Haskell datatype with the following form: -- --
-- data TableName = TableName
-- { tableNameFieldName :: FieldType
-- , tableNameOtherField :: String
-- , tableNameNullableField :: Maybe Int
-- }
--
--
-- As with the SQL generated, the specifics of this are customizable. See
-- the Database.Persist.TH module for details.
--
-- -- User -- name String -- age Int -- deriving Eq Show -- deriving Ord ---- --
-- User -- name String -- age Int -- -- UniqueUserName name ---- -- This will put a unique index on the user table and the -- name field. -- --
-- User -- name Text -- admin Bool default=false ---- -- This creates a SQL definition like this: -- --
-- CREATE TABLE user ( -- id SERIAL PRIMARY KEY, -- name VARCHAR NOT NULL, -- admin BOOL DEFAULT=false -- ); ---- -- A restriction here is that you still need to provide a value when -- performing an insert, because the generated Haskell type has -- the form: -- --
-- data User = User
-- { userName :: Text
-- , userAdmin :: Bool
-- }
--
--
-- You can work around this by using a 'Maybe Bool' and supplying
-- Nothing by default.
--
-- Note: Persistent determines whether or not to migrate a
-- column's default value by comparing the exact string found in your
-- models file with the one returned by the database. If a
-- database canonicalizes the SQL FALSE from your
-- models file to false in the database, Persistent
-- will think the default value needs to be migrated and attempt a
-- migration each time you start your app.
--
-- To workaround this, find the exact SQL your DBMS uses for the default
-- value. For example, using postgres:
--
-- -- psql database_name # Open postgres -- -- \d+ table_name -- describe the table schema ---- --
-- ... -- created | timestamp without time zone | not null default now() ---- -- Then use the listed default value SQL inside your models -- file. -- --
-- User -- Id Text -- name Text -- age Int ---- -- If you do this, it's a good idea to set a default for the ID. -- Otherwise, you will need to use insertKey instead of -- insert when performing inserts. -- --
-- insertKey (UserKey "Hello world!") (User Bob 32) ---- -- If you attempt to do insert (User Bob 32), -- then you will receive a runtime error because the SQL database doesn't -- know how to make an ID for you anymore. So instead just use a default -- expression, like this: -- --
-- User -- Id Text default=generate_user_id() -- name Text -- age Int ---- --
-- Email -- firstPart Text -- secondPart Text -- -- Primary firstPart secondPart ---- -- This creates a table with the following form: -- --
-- CREATE TABLE email ( -- first_part varchar, -- second_part varchar, -- -- PRIMARY KEY (first_part, second_part) ---- -- Since the primary key for this table is part of the record, it's -- called a "natural key" in the SQL lingo. As a key with multiple -- fields, it is also a "composite key." -- -- You can specify a Primary key with a single field, too. -- --
-- User sql=big_user_table -- fullName String sql=name -- age Int ---- -- This will alter the generated SQL to be: -- --
-- CREATE TABEL big_user_table ( -- id SERIAL PRIMARY KEY, -- name VARCHAR, -- age INT -- ); ---- --
-- Person json -- name Text ---- -- Requires {-# LANGUAGE FlexibleInstances #-} -- -- Customizable by using mpsEntityJSON * -- http://hackage.haskell.org/package/persistent-template/docs/Database-Persist-TH.html#v:EntityJSON -- * -- http://hackage.haskell.org/package/persistent/docs/Database-Persist-Class.html#v:keyValueEntityToJSON -- --
-- Person sql=peoples -- name Text ---- --
-- Person -- Id sql=my_id_name -- phone Text ---- -- With a Haskell type, the corresponding type is used. Note that you'll -- need to use default= to tell it what to do on insertion. -- --
-- Person -- Id Day default=CURRENT_DATE -- phone Text ---- -- default= works for SQL databases, and is backend specific. -- For MongoDB currently one always needs to create the key on the -- application side and use insertKey. insert will not -- work correctly. Sql backends can also do this if default does not -- work. -- -- sqltype can also be used to specify a different database type -- --
-- Currency -- Id String sqltype=varchar(3) sql=code ---- -- Composite key (using multiple columns) can also be defined using -- Primary. -- -- sql= also works for setting the names of unique indexes. -- --
-- Person -- name Text -- phone Text -- UniquePersonPhone phone sql=UniqPerPhone ---- -- This makes a unique index requiring phone to be unique across -- Person rows. Ordinarily Persistent will generate a snake-case -- index name from the capitalized name provided such that -- UniquePersonPhone becomes unique_person_phone. -- However, we provided a sql= so the index name in the database -- will instead be UniqPerPhone. Keep in mind sql= and -- ! attrs must come after the list of fields in front of the -- index name in the quasi-quoter. -- --
-- TableName -- fieldName FieldType -- otherField String -- nullableField Int Maybe ---- -- Alternatively we can specify the keyword nullable: -- --
-- TableName -- fieldName FieldType -- otherField String -- nullableField Int nullable ---- -- However the difference here is in the first instance the Haskell type -- will be 'Maybe Int', but in the second it will be Int. Be aware -- that this will cause runtime errors if the database returns -- NULL and the PersistField instance does not handle -- PersistNull. -- -- If you wish to define your Maybe types in a way that is similar to the -- actual Haskell definition, you can define 'Maybe Int' like so: -- --
-- TableName -- fieldName FieldType -- otherField String -- nullableField (Maybe Int) ---- -- However, note, the field _must_ be enclosed in parenthesis. -- --
-- User -- username Text sqltype=varchar(255) ---- --
-- User !funny -- field String !sad -- good Dog !sogood ---- -- We can see the attributes using the entityAttrs field and the -- fieldAttrs field. -- --
-- userAttrs = do -- let userDefinition = entityDef (Proxy :: Proxy User) -- let userAttributes = entityAttrs userDefinition -- let fieldAttributes = map fieldAttrs (entityFields userDefinition) -- print userAttributes -- -- ["funny"] -- print fieldAttributes -- -- [["sad"],["sogood"]] ---- --
-- Person -- name Text -- age Int -- unusedField ByteString Maybe MigrationOnly ---- -- Note that you almost certainly want to either mark the field as -- Maybe or provide a default value, otherwise insertions will -- fail. -- --
-- Person -- name Text -- -- Dog -- name Text -- owner PersonId ---- -- This automatically creates a foreign key reference from Dog -- to Person. The foreign key constraint means that, if you have -- a PersonId on the Dog, the database guarantees that -- the corresponding Person exists in the database. If you try -- to delete a Person out of the database that has a -- Dog, you'll receive an exception that a foreign key violation -- has occurred. -- --
-- VeryLongTableName -- name Text -- -- AnotherVeryLongTableName -- veryLongTableNameId VeryLongTableNameId constraint=short_foreign_key ---- --
-- Record -- -- If the referred Foo is deleted or updated, then this record will -- -- also be deleted or updated. -- fooId FooId OnDeleteCascade OnUpdateCascade -- -- -- If the referred Bar is deleted, then we'll set the reference to -- -- Nothing. If the referred Bar is updated, then we'll cascade the -- -- update. -- barId BarId Maybe OnDeleteSetNull OnUpdateCascade -- -- -- If the referred Baz is deleted, then we set to the default ID. -- bazId BazId OnDeleteSetDefault default=1 ---- -- Let's demonstrate this with a shopping cart example. -- --
-- User -- name Text -- -- Cart -- user UserId Maybe -- -- CartItem -- cartId CartId -- itemId ItemId -- -- Item -- name Text -- price Int ---- -- Let's consider how we want to handle deletions and updates. If a -- User is deleted or update, then we want to cascade the action -- to the associated Cart. -- --
-- Cart -- user UserId Maybe OnDeleteCascade OnUpdateCascade ---- -- If an Item is deleted, then we want to set the -- CartItem to refer to a special "deleted item" in the -- database. If a Cart is deleted, though, then we just want to -- delete the CartItem. -- --
-- CartItem -- cartId CartId OnDeleteCascade -- itemId ItemId OnDeleteSetDefault default=1 ---- --
-- Foreign $(TargetEntity) [$(cascade-actions)] $(constraint-name) $(columns) [ $(references) ] -- -- columns := column0 [column1 column2 .. columnX] -- references := References $(target-columns) -- target-columns := target-column0 [target-column1 target-columns2 .. target-columnX] ---- -- Columns are the columns as defined on this entity. -- target-columns are the columns as defined on the target -- entity. -- -- Let's look at some examples. -- --
-- Email -- firstPart Text -- secondPart Text -- Primary firstPart secondPart -- -- User -- name Text -- emailFirstPart Text -- emailSecondPart Text -- -- Foreign Email fk_user_email emailFirstPart emailSecondPart ---- -- If you omit the References keyword, then it assumes that the -- foreign key reference is for the target table's primary key. If we -- wanted to be fully redundant, we could specify the References -- keyword. -- --
-- Foreign Email fk_user_email emailFirstPart emailSecondPart References firstPart secondPart ---- -- We can specify delete/cascade behavior directly after the target -- table. -- --
-- Foreign Email OnDeleteCascade OnUpdateCascade fk_user_email emailFirstPart emailSecondPart ---- -- Now, if the email is deleted or updated, the user will be deleted or -- updated to match. -- --
-- User -- name Text -- email Text -- -- UniqueEmail email -- -- Notification -- content Text -- sentTo Text -- -- Foreign User fk_noti_user sentTo References email ---- -- If the target uniqueness constraint has multiple columns, then you -- must specify them independently. -- --
-- User -- name Text -- emailFirst Text -- emailSecond Text -- -- UniqueEmail emailFirst emailSecond -- -- Notification -- content Text -- sentToFirst Text -- sentToSecond Text -- -- Foreign User fk_noti_user sentToFirst sentToSecond References emailFirst emailSecond ---- --
-- -- | I am a doc comment for a User. Users are important -- -- | to the application, and should be treasured. -- User -- -- | Users have names. Call them by names. -- name String -- -- | A user can be old, or young, and we care about -- -- | this for some reason. -- age Int ---- -- The documentation is present on the entityComments field on -- the EntityDef for the entity: -- --
-- >>> let userDefinition = entityDef (Proxy :: Proxy User) -- >>> entityComments userDefinition -- "I am a doc comment for a User. Users are importantnto the application, and should be treasured.n" ---- -- Likewise, the field documentation is present in the -- fieldComments field on the FieldDef present in the -- EntityDef: -- --
-- >>> let userFields = entityFields userDefinition -- >>> let comments = map fieldComments userFields -- >>> mapM_ putStrLn comments -- "Users have names. Call them by names." -- "A user can be old, or young, and we care aboutnthis for some reason." ---- -- Unfortunately, we can't use this to create Haddocks for you, because -- Template Haskell does not support Haddock yet. -- persistent backends *can* use this to generate SQL -- COMMENTs, which are useful for a database perspective, and -- you can use the @persistent-documentation@ library to render a -- Markdown document of the entity definitions. -- --
-- -- @Employment.hs -- -- module Employment where -- -- import Database.Persist.TH -- import Prelude -- -- data Employment = Employed | Unemployed | Retired -- deriving (Show, Read, Eq) -- derivePersistField Employment ---- -- derivePersistField stores sum type values as strins in the -- database. While not as efficient as using integers, this approach -- simplifies adding and removing values from your enumeration. -- -- Due to the GHC Stage Restriction, the call to the Template Haskell -- function derivePersistField must be in a separate module than -- where the generated code is used. -- -- Note: If you created a new module, make sure add it to the -- exposed-modules section of your Cabal file. -- -- Use the module by importing it into your Model.hs file: -- --
-- -- @Model.hs -- import Employment ---- -- and use it in the models DSL: -- --
-- Person -- employment Employment ---- -- You can export the Employment module from Import to use it across your -- app: -- --
-- -- @Import.hs -- import Employment as Import ---- --
-- share [mkPersist persistSettings, mkMigrate "sumTypeMigrate"] [persistLowerCase| -- Bicycle -- brand T.Text -- Car -- make T.Text -- model T.Text -- +Vehicle -- bicycle BicycleId -- car CarId -- |] ---- -- Let's check out the definition of the Haskell type Vehicle. -- Using ghci, we can query for :info Vehicle: -- --
-- >>> :i Vehicle -- type Vehicle = VehicleGeneric SqlBackend -- -- Defined at .../Projects/persistent/persistent-test/src/SumTypeTest.hs:26:1 ---- --
-- >>> :i VehicleGeneric -- type role VehicleGeneric nominal -- data VehicleGeneric backend -- = VehicleBicycleSum (Key (BicycleGeneric backend)) -- | VehicleCarSum (Key (CarGeneric backend)) -- -- Defined at .../persistent/persistent-test/src/SumTypeTest.hs:26:1 -- -- lots of instances follow... ---- -- A VehicleGeneric has two constructors: -- --
-- CREATE TABLE "bicycle" ( -- "id" INTEGER PRIMARY KEY, -- "brand" VARCHAR NOT NULL -- ); -- -- CREATE TABLE "car"( -- "id" INTEGER PRIMARY KEY, -- "make" VARCHAR NOT NULL, -- "model" VARCHAR NOT NULL -- ); -- -- CREATE TABLE "vehicle"( -- "id" INTEGER PRIMARY KEY, -- "bicycle" INTEGER NULL REFERENCES "bicycle", -- "car" INTEGER NULL REFERENCES "car" -- ); ---- -- The vehicle table contains a nullable foreign key reference -- to both the bicycle and the car tables. -- -- A SQL query that grabs all the vehicles from the database looks like -- this (note the ?? is for the persistent raw SQL -- query functions): -- --
-- SELECT ??, ??, ?? -- FROM vehicle -- LEFT JOIN car -- ON vehicle.car = car.id -- LEFT JOIN bicycle -- ON vehicle.bicycle = bicycle.id ---- -- If we use the above query with rawSql, we'd get the following -- result: -- --
-- getVehicles -- :: SqlPersistM -- [ ( Entity Vehicle -- , Maybe (Entity Bicycle) -- , Maybe (Entity Car) -- ) -- ] ---- -- This result has some post-conditions that are not guaranteed by the -- types *or* the schema. The constructor for Entity Vehicle is -- going to determine which of the other members of the tuple is -- Nothing. We can convert this to a friendlier domain model -- like this: -- --
-- data Vehicle' -- = Car' Text Text -- | Bike Text -- -- check = do -- result <- getVehicles -- pure (map convert result) -- -- convert -- :: (Entity Vehicle, Maybe (Entity Bicycle), Maybe (Entity Car)) -- -> Vehicle' -- convert (Entity _ (VehicycleBicycleSum _), Just (Entity _ (Bicycle brand)), _) = -- Bike brand -- convert (Entity _ (VehicycleCarSum _), _, Just (Entity _ (Car make model))) = -- Car make model -- convert _ = -- error "The database preconditions have been violated!" ---- --
-- getSpj :: MonadIO m => ReaderT SqlBackend m (Maybe User) -- getSpj = get spjId ---- --
-- mspj <- getSpj ---- -- The above query when applied on dataset-1, will get this: -- --
-- +------+-----+ -- | name | age | -- +------+-----+ -- | SPJ | 40 | -- +------+-----+ --get :: forall record m. (PersistStoreRead backend, MonadIO m, PersistRecordBackend record backend) => Key record -> ReaderT backend m (Maybe record) -- | Get many records by their respective identifiers, if available. -- --
-- getUsers :: MonadIO m => ReaderT SqlBackend m (Map (Key User) User) -- getUsers = getMany allkeys ---- --
-- musers <- getUsers ---- -- The above query when applied on dataset-1, will get these -- records: -- --
-- +----+-------+-----+ -- | id | name | age | -- +----+-------+-----+ -- | 1 | SPJ | 40 | -- +----+-------+-----+ -- | 2 | Simon | 41 | -- +----+-------+-----+ --getMany :: forall record m. (PersistStoreRead backend, MonadIO m, PersistRecordBackend record backend) => [Key record] -> ReaderT backend m (Map (Key record) record) class (Show (BackendKey backend), Read (BackendKey backend), Eq (BackendKey backend), Ord (BackendKey backend), PersistStoreRead backend, PersistField (BackendKey backend), ToJSON (BackendKey backend), FromJSON (BackendKey backend)) => PersistStoreWrite backend -- | Create a new record in the database, returning an automatically -- created key (in SQL an auto-increment id). -- --
-- insertJohn :: MonadIO m => ReaderT SqlBackend m (Key User) -- insertJohn = insert $ User "John" 30 ---- --
-- johnId <- insertJohn ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+------+-----+ -- |id |name |age | -- +-----+------+-----+ -- |1 |SPJ |40 | -- +-----+------+-----+ -- |2 |Simon |41 | -- +-----+------+-----+ -- |3 |John |30 | -- +-----+------+-----+ --insert :: forall record m. (PersistStoreWrite backend, MonadIO m, PersistRecordBackend record backend, SafeToInsert record) => record -> ReaderT backend m (Key record) -- | Same as insert, but doesn't return a Key. -- --
-- insertJohn :: MonadIO m => ReaderT SqlBackend m (Key User) -- insertJohn = insert_ $ User "John" 30 ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+------+-----+ -- |id |name |age | -- +-----+------+-----+ -- |1 |SPJ |40 | -- +-----+------+-----+ -- |2 |Simon |41 | -- +-----+------+-----+ -- |3 |John |30 | -- +-----+------+-----+ --insert_ :: forall record m. (PersistStoreWrite backend, MonadIO m, PersistRecordBackend record backend, SafeToInsert record) => record -> ReaderT backend m () -- | Create multiple records in the database and return their Keys. -- -- If you don't need the inserted Keys, use insertMany_. -- -- The MongoDB and PostgreSQL backends insert all records and retrieve -- their keys in one database query. -- -- The SQLite and MySQL backends use the slow, default implementation of -- mapM insert. -- --
-- insertUsers :: MonadIO m => ReaderT SqlBackend m [Key User] -- insertUsers = insertMany [User "John" 30, User "Nick" 32, User "Jane" 20] ---- --
-- userIds <- insertUsers ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+------+-----+ -- |id |name |age | -- +-----+------+-----+ -- |1 |SPJ |40 | -- +-----+------+-----+ -- |2 |Simon |41 | -- +-----+------+-----+ -- |3 |John |30 | -- +-----+------+-----+ -- |4 |Nick |32 | -- +-----+------+-----+ -- |5 |Jane |20 | -- +-----+------+-----+ --insertMany :: forall record m. (PersistStoreWrite backend, MonadIO m, PersistRecordBackend record backend, SafeToInsert record) => [record] -> ReaderT backend m [Key record] -- | Same as insertMany, but doesn't return any Keys. -- -- The MongoDB, PostgreSQL, SQLite and MySQL backends insert all records -- in one database query. -- --
-- insertUsers_ :: MonadIO m => ReaderT SqlBackend m () -- insertUsers_ = insertMany_ [User "John" 30, User "Nick" 32, User "Jane" 20] ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+------+-----+ -- |id |name |age | -- +-----+------+-----+ -- |1 |SPJ |40 | -- +-----+------+-----+ -- |2 |Simon |41 | -- +-----+------+-----+ -- |3 |John |30 | -- +-----+------+-----+ -- |4 |Nick |32 | -- +-----+------+-----+ -- |5 |Jane |20 | -- +-----+------+-----+ --insertMany_ :: forall record m. (PersistStoreWrite backend, MonadIO m, PersistRecordBackend record backend, SafeToInsert record) => [record] -> ReaderT backend m () -- | Same as insertMany_, but takes an Entity instead of just -- a record. -- -- Useful when migrating data from one entity to another and want to -- preserve ids. -- -- The MongoDB, PostgreSQL, SQLite and MySQL backends insert all records -- in one database query. -- --
-- insertUserEntityMany :: MonadIO m => ReaderT SqlBackend m () -- insertUserEntityMany = insertEntityMany [SnakeEntity, EvaEntity] ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+------+-----+ -- |id |name |age | -- +-----+------+-----+ -- |1 |SPJ |40 | -- +-----+------+-----+ -- |2 |Simon |41 | -- +-----+------+-----+ -- |3 |Snake |38 | -- +-----+------+-----+ -- |4 |Eva |38 | -- +-----+------+-----+ --insertEntityMany :: forall record m. (PersistStoreWrite backend, MonadIO m, PersistRecordBackend record backend) => [Entity record] -> ReaderT backend m () -- | Create a new record in the database using the given key. -- --
-- insertAliceKey :: MonadIO m => Key User -> ReaderT SqlBackend m () -- insertAliceKey key = insertKey key $ User "Alice" 20 ---- --
-- insertAliceKey $ UserKey {unUserKey = SqlBackendKey {unSqlBackendKey = 3}}
--
--
-- The above query when applied on dataset-1, will produce this:
--
-- -- +-----+------+-----+ -- |id |name |age | -- +-----+------+-----+ -- |1 |SPJ |40 | -- +-----+------+-----+ -- |2 |Simon |41 | -- +-----+------+-----+ -- |3 |Alice |20 | -- +-----+------+-----+ --insertKey :: forall record m. (PersistStoreWrite backend, MonadIO m, PersistRecordBackend record backend) => Key record -> record -> ReaderT backend m () -- | Put the record in the database with the given key. Unlike -- replace, if a record with the given key does not exist then a -- new record will be inserted. -- --
-- insertPhilip :: MonadIO m => ReaderT SqlBackend m (Key User) -- insertPhilip = insert $ User "Philip" 42 ---- --
-- philipId <- insertPhilip ---- -- This query will produce: -- --
-- +-----+------+-----+ -- |id |name |age | -- +-----+------+-----+ -- |1 |SPJ |40 | -- +-----+------+-----+ -- |2 |Simon |41 | -- +-----+------+-----+ -- |3 |Philip|42 | -- +-----+------+-----+ ---- --
-- repsertHaskell :: MonadIO m => Key record -> ReaderT SqlBackend m () -- repsertHaskell id = repsert id $ User "Haskell" 81 ---- --
-- repsertHaskell philipId ---- -- This query will replace Philip's record with Haskell's one: -- --
-- +-----+-----------------+--------+ -- |id |name |age | -- +-----+-----------------+--------+ -- |1 |SPJ |40 | -- +-----+-----------------+--------+ -- |2 |Simon |41 | -- +-----+-----------------+--------+ -- |3 |Philip -> Haskell|42 -> 81| -- +-----+-----------------+--------+ ---- -- repsert inserts the given record if the key doesn't exist. -- --
-- repsertXToUnknown :: MonadIO m => ReaderT SqlBackend m () -- repsertXToUnknown = repsert unknownId $ User "X" 999 ---- -- For example, applying the above query to dataset-1 will produce -- this: -- --
-- +-----+------+-----+ -- |id |name |age | -- +-----+------+-----+ -- |1 |SPJ |40 | -- +-----+------+-----+ -- |2 |Simon |41 | -- +-----+------+-----+ -- |3 |X |999 | -- +-----+------+-----+ --repsert :: forall record m. (PersistStoreWrite backend, MonadIO m, PersistRecordBackend record backend) => Key record -> record -> ReaderT backend m () -- | Put many entities into the database. -- -- Batch version of repsert for SQL backends. -- -- Useful when migrating data from one entity to another and want to -- preserve ids. -- --
-- repsertManyUsers :: MonadIO m =>ReaderT SqlBackend m () -- repsertManyusers = repsertMany [(simonId, User "Philip" 20), (unknownId999, User "Mr. X" 999)] ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+----------------+---------+ -- |id |name |age | -- +-----+----------------+---------+ -- |1 |SPJ |40 | -- +-----+----------------+---------+ -- |2 |Simon -> Philip |41 -> 20 | -- +-----+----------------+---------+ -- |999 |Mr. X |999 | -- +-----+----------------+---------+ --repsertMany :: forall record m. (PersistStoreWrite backend, MonadIO m, PersistRecordBackend record backend) => [(Key record, record)] -> ReaderT backend m () -- | Replace the record in the database with the given key. Note that the -- result is undefined if such record does not exist, so you must use -- insertKey or repsert in these cases. -- --
-- replaceSpj :: MonadIO m => User -> ReaderT SqlBackend m () -- replaceSpj record = replace spjId record ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+------+-----+ -- |id |name |age | -- +-----+------+-----+ -- |1 |Mike |45 | -- +-----+------+-----+ -- |2 |Simon |41 | -- +-----+------+-----+ --replace :: forall record m. (PersistStoreWrite backend, MonadIO m, PersistRecordBackend record backend) => Key record -> record -> ReaderT backend m () -- | Delete a specific record by identifier. Does nothing if record does -- not exist. -- --
-- deleteSpj :: MonadIO m => ReaderT SqlBackend m () -- deleteSpj = delete spjId ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+------+-----+ -- |id |name |age | -- +-----+------+-----+ -- |2 |Simon |41 | -- +-----+------+-----+ --delete :: forall record m. (PersistStoreWrite backend, MonadIO m, PersistRecordBackend record backend) => Key record -> ReaderT backend m () -- | Update individual fields on a specific record. -- --
-- updateSpj :: MonadIO m => [Update User] -> ReaderT SqlBackend m () -- updateSpj updates = update spjId updates ---- --
-- updateSpj [UserAge +=. 100] ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+------+-----+ -- |id |name |age | -- +-----+------+-----+ -- |1 |SPJ |140 | -- +-----+------+-----+ -- |2 |Simon |41 | -- +-----+------+-----+ --update :: forall record m. (PersistStoreWrite backend, MonadIO m, PersistRecordBackend record backend) => Key record -> [Update record] -> ReaderT backend m () -- | Update individual fields on a specific record, and retrieve the -- updated value from the database. -- -- Note that this function will throw an exception if the given key is -- not found in the database. -- --
-- updateGetSpj :: MonadIO m => [Update User] -> ReaderT SqlBackend m User -- updateGetSpj updates = updateGet spjId updates ---- --
-- spj <- updateGetSpj [UserAge +=. 100] ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+------+-----+ -- |id |name |age | -- +-----+------+-----+ -- |1 |SPJ |140 | -- +-----+------+-----+ -- |2 |Simon |41 | -- +-----+------+-----+ --updateGet :: forall record m. (PersistStoreWrite backend, MonadIO m, PersistRecordBackend record backend) => Key record -> [Update record] -> ReaderT backend m record -- | Like get, but returns the complete Entity. -- --
-- getSpjEntity :: MonadIO m => ReaderT SqlBackend m (Maybe (Entity User)) -- getSpjEntity = getEntity spjId ---- --
-- mSpjEnt <- getSpjEntity ---- -- The above query when applied on dataset-1, will get this -- entity: -- --
-- +----+------+-----+ -- | id | name | age | -- +----+------+-----+ -- | 1 | SPJ | 40 | -- +----+------+-----+ --getEntity :: forall e backend m. (PersistStoreRead backend, PersistRecordBackend e backend, MonadIO m) => Key e -> ReaderT backend m (Maybe (Entity e)) -- | Same as get, but for a non-null (not Maybe) foreign key. Unsafe -- unless your database is enforcing that the foreign key is valid. -- --
-- getJustSpj :: MonadIO m => ReaderT SqlBackend m User -- getJustSpj = getJust spjId ---- --
-- spj <- getJust spjId ---- -- The above query when applied on dataset-1, will get this -- record: -- --
-- +----+------+-----+ -- | id | name | age | -- +----+------+-----+ -- | 1 | SPJ | 40 | -- +----+------+-----+ ---- --
-- getJustUnknown :: MonadIO m => ReaderT SqlBackend m User -- getJustUnknown = getJust unknownId ---- -- mrx <- getJustUnknown -- -- This just throws an error. getJust :: forall record backend m. (PersistStoreRead backend, PersistRecordBackend record backend, MonadIO m) => Key record -> ReaderT backend m record -- | Same as getJust, but returns an Entity instead of just -- the record. -- --
-- getJustEntitySpj :: MonadIO m => ReaderT SqlBackend m (Entity User) -- getJustEntitySpj = getJustEntity spjId ---- --
-- spjEnt <- getJustEntitySpj ---- -- The above query when applied on dataset-1, will get this -- entity: -- --
-- +----+------+-----+ -- | id | name | age | -- +----+------+-----+ -- | 1 | SPJ | 40 | -- +----+------+-----+ --getJustEntity :: forall record backend m. (PersistEntityBackend record ~ BaseBackend backend, MonadIO m, PersistEntity record, PersistStoreRead backend) => Key record -> ReaderT backend m (Entity record) -- | Curry this to make a convenience function that loads an associated -- model. -- --
-- foreign = belongsTo foreignId --belongsTo :: forall ent1 ent2 backend m. (PersistStoreRead backend, PersistEntity ent1, PersistRecordBackend ent2 backend, MonadIO m) => (ent1 -> Maybe (Key ent2)) -> ent1 -> ReaderT backend m (Maybe ent2) -- | Same as belongsTo, but uses getJust and therefore is -- similarly unsafe. belongsToJust :: forall ent1 ent2 backend m. (PersistStoreRead backend, PersistEntity ent1, PersistRecordBackend ent2 backend, MonadIO m) => (ent1 -> Key ent2) -> ent1 -> ReaderT backend m ent2 -- | Like insert, but returns the complete Entity. -- --
-- insertHaskellEntity :: MonadIO m => ReaderT SqlBackend m (Entity User) -- insertHaskellEntity = insertEntity $ User "Haskell" 81 ---- --
-- haskellEnt <- insertHaskellEntity ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +----+---------+-----+ -- | id | name | age | -- +----+---------+-----+ -- | 1 | SPJ | 40 | -- +----+---------+-----+ -- | 2 | Simon | 41 | -- +----+---------+-----+ -- | 3 | Haskell | 81 | -- +----+---------+-----+ --insertEntity :: forall e backend m. (PersistStoreWrite backend, PersistRecordBackend e backend, SafeToInsert e, MonadIO m, HasCallStack) => e -> ReaderT backend m (Entity e) -- | Like insertEntity but just returns the record instead of -- Entity. -- --
-- insertDaveRecord :: MonadIO m => ReaderT SqlBackend m User -- insertDaveRecord = insertRecord $ User "Dave" 50 ---- --
-- dave <- insertDaveRecord ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+------+-----+ -- |id |name |age | -- +-----+------+-----+ -- |1 |SPJ |40 | -- +-----+------+-----+ -- |2 |Simon |41 | -- +-----+------+-----+ -- |3 |Dave |50 | -- +-----+------+-----+ --insertRecord :: forall record backend m. (PersistEntityBackend record ~ BaseBackend backend, PersistEntity record, MonadIO m, PersistStoreWrite backend, SafeToInsert record, HasCallStack) => record -> ReaderT backend m record -- | ToBackendKey converts a PersistEntity Key into a -- BackendKey This can be used by each backend to convert between -- a Key and a plain Haskell type. For Sql, that is done with -- toSqlKey and fromSqlKey. -- -- By default, a PersistEntity uses the default BackendKey -- for its Key and is an instance of ToBackendKey -- -- A Key that instead uses a custom type will not be an instance -- of ToBackendKey. class (PersistEntity record, PersistEntityBackend record ~ backend, PersistCore backend) => ToBackendKey backend record toBackendKey :: ToBackendKey backend record => Key record -> BackendKey backend fromBackendKey :: ToBackendKey backend record => BackendKey backend -> Key record -- | This class witnesses that two backend are compatible, and that you can -- convert from the sub backend into the sup backend. -- This is similar to the HasPersistBackend and -- IsPersistBackend classes, but where you don't want to fix the -- type associated with the PersistEntityBackend of a record. -- -- Generally speaking, where you might have: -- --
-- foo :: -- ( PersistEntity record -- , PeristEntityBackend record ~ BaseBackend backend -- , IsSqlBackend backend -- ) ---- -- this can be replaced with: -- --
-- foo :: -- ( PersistEntity record, -- , PersistEntityBackend record ~ backend -- , BackendCompatible SqlBackend backend -- ) ---- -- This works for SqlReadBackend because of the instance -- BackendCompatible SqlBackend -- SqlReadBackend, without needing to go through the -- BaseBackend type family. -- -- Likewise, functions that are currently hardcoded to use -- SqlBackend can be generalized: -- --
-- -- before: -- asdf :: ReaderT SqlBackend m () -- asdf = pure () -- -- -- after: -- asdf' :: BackendCompatible SqlBackend backend => ReaderT backend m () -- asdf' = withCompatibleBackend asdf --class BackendCompatible sup sub projectBackend :: BackendCompatible sup sub => sub -> sup -- | Run a query against a compatible backend, by projecting the backend -- -- This is a helper for using queries which run against a specific -- backend type that your backend is compatible with. withCompatibleBackend :: BackendCompatible sup sub => ReaderT sup m a -> ReaderT sub m a module Database.Persist.SqlBackend.Internal -- | A SqlBackend represents a handle or connection to a database. -- It contains functions and values that allow databases to have more -- optimized implementations, as well as references that benefit -- performance and sharing. -- -- Instead of using the SqlBackend constructor directly, use the -- mkSqlBackend function. -- -- A SqlBackend is *not* thread-safe. You should not assume that a -- SqlBackend can be shared among threads and run concurrent -- queries. This *will* result in problems. Instead, you should create a -- Pool SqlBackend, known as a -- ConnectionPool, and pass that around in multi-threaded -- applications. -- -- To run actions in the persistent library, you should use the -- runSqlConn function. If you're using a multithreaded -- application, use the runSqlPool function. data SqlBackend SqlBackend :: (Text -> IO Statement) -> (EntityDef -> [PersistValue] -> InsertSqlResult) -> Maybe (EntityDef -> [[PersistValue]] -> InsertSqlResult) -> Maybe (EntityDef -> NonEmpty (FieldNameHS, FieldNameDB) -> Text -> Text) -> Maybe (EntityDef -> Int -> Text) -> StatementCache -> IO () -> ([EntityDef] -> (Text -> IO Statement) -> EntityDef -> IO (Either [Text] [(Bool, Text)])) -> ((Text -> IO Statement) -> Maybe IsolationLevel -> IO ()) -> ((Text -> IO Statement) -> IO ()) -> ((Text -> IO Statement) -> IO ()) -> (FieldNameDB -> Text) -> (EntityDef -> Text) -> (Text -> Text) -> Text -> Text -> ((Int, Int) -> Text -> Text) -> LogFunc -> Maybe Int -> Maybe (EntityDef -> Int -> Text) -> Vault -> SqlBackendHooks -> SqlBackend -- | This function should prepare a Statement in the target -- database, which should allow for efficient query reuse. [connPrepare] :: SqlBackend -> Text -> IO Statement -- | This function generates the SQL and values necessary for performing an -- insert against the database. [connInsertSql] :: SqlBackend -> EntityDef -> [PersistValue] -> InsertSqlResult -- | SQL for inserting many rows and returning their primary keys, for -- backends that support this functionality. If Nothing, rows will -- be inserted one-at-a-time using connInsertSql. [connInsertManySql] :: SqlBackend -> Maybe (EntityDef -> [[PersistValue]] -> InsertSqlResult) -- | Some databases support performing UPSERT _and_ RETURN entity in a -- single call. -- -- This field when set will be used to generate the UPSERT+RETURN sql -- given * an entity definition * updates to be run on unique key(s) -- collision -- -- When left as Nothing, we find the unique key from entity def -- before * trying to fetch an entity by said key * perform an update -- when result found, else issue an insert * return new entity from db [connUpsertSql] :: SqlBackend -> Maybe (EntityDef -> NonEmpty (FieldNameHS, FieldNameDB) -> Text -> Text) -- | Some databases support performing bulk UPSERT, specifically "insert or -- replace many records" in a single call. -- -- This field when set, given * an entity definition * number of records -- to be inserted should produce a PUT MANY sql with placeholders for -- records -- -- When left as Nothing, we default to using -- defaultPutMany. [connPutManySql] :: SqlBackend -> Maybe (EntityDef -> Int -> Text) -- | A reference to the cache of statements. Statements are keyed by -- the Text queries that generated them. [connStmtMap] :: SqlBackend -> StatementCache -- | Close the underlying connection. [connClose] :: SqlBackend -> IO () -- | This function returns the migrations required to include the -- EntityDef parameter in the [EntityDef] -- database. This might include creating a new table if the entity is not -- present, or altering an existing table if it is. [connMigrateSql] :: SqlBackend -> [EntityDef] -> (Text -> IO Statement) -> EntityDef -> IO (Either [Text] [(Bool, Text)]) -- | A function to begin a transaction for the underlying database. [connBegin] :: SqlBackend -> (Text -> IO Statement) -> Maybe IsolationLevel -> IO () -- | A function to commit a transaction to the underlying database. [connCommit] :: SqlBackend -> (Text -> IO Statement) -> IO () -- | A function to roll back a transaction on the underlying database. [connRollback] :: SqlBackend -> (Text -> IO Statement) -> IO () -- | A function to extract and escape the name of the column corresponding -- to the provided field. [connEscapeFieldName] :: SqlBackend -> FieldNameDB -> Text -- | A function to extract and escape the name of the table corresponding -- to the provided entity. PostgreSQL uses this to support schemas. [connEscapeTableName] :: SqlBackend -> EntityDef -> Text -- | A function to escape raw DB identifiers. MySQL uses backticks, while -- PostgreSQL uses quotes, and so on. [connEscapeRawName] :: SqlBackend -> Text -> Text [connNoLimit] :: SqlBackend -> Text -- | A tag displaying what database the SqlBackend is for. Can be -- used to differentiate features in downstream libraries for different -- database backends. [connRDBMS] :: SqlBackend -> Text -- | Attach a 'LIMIT/OFFSET' clause to a SQL query. Note that LIMIT/OFFSET -- is problematic for performance, and indexed range queries are the -- superior way to offer pagination. [connLimitOffset] :: SqlBackend -> (Int, Int) -> Text -> Text -- | A log function for the SqlBackend to use. [connLogFunc] :: SqlBackend -> LogFunc -- | Some databases (probably only Sqlite) have a limit on how many -- question-mark parameters may be used in a statement [connMaxParams] :: SqlBackend -> Maybe Int -- | Some databases support performing bulk an atomic+bulk INSERT where -- constraint conflicting entities can replace existing entities. -- -- This field when set, given * an entity definition * number of records -- to be inserted should produce a INSERT sql with placeholders for -- primary+record fields -- -- When left as Nothing, we default to using -- defaultRepsertMany. [connRepsertManySql] :: SqlBackend -> Maybe (EntityDef -> Int -> Text) -- | Carry arbitrary payloads for the connection that may be used to -- propagate information into hooks. [connVault] :: SqlBackend -> Vault -- | Instrumentation hooks that may be used to track the behaviour of a -- backend. [connHooks] :: SqlBackend -> SqlBackendHooks newtype SqlBackendHooks SqlBackendHooks :: (SqlBackend -> Text -> Statement -> IO Statement) -> SqlBackendHooks [hookGetStatement] :: SqlBackendHooks -> SqlBackend -> Text -> Statement -> IO Statement emptySqlBackendHooks :: SqlBackendHooks -- | A function for creating a value of the SqlBackend type. You -- should prefer to use this instead of the constructor for -- SqlBackend, because default values for this will be provided -- for new fields on the record when new functionality is added. mkSqlBackend :: MkSqlBackendArgs -> SqlBackend instance Database.Persist.Class.PersistStore.HasPersistBackend Database.Persist.SqlBackend.Internal.SqlBackend instance Database.Persist.Class.PersistStore.IsPersistBackend Database.Persist.SqlBackend.Internal.SqlBackend -- | This module contains types and information necessary for a SQL -- database. Database support libraries, like -- persistent-postgresql, will be responsible for constructing -- these values. module Database.Persist.SqlBackend -- | A SqlBackend represents a handle or connection to a database. -- It contains functions and values that allow databases to have more -- optimized implementations, as well as references that benefit -- performance and sharing. -- -- Instead of using the SqlBackend constructor directly, use the -- mkSqlBackend function. -- -- A SqlBackend is *not* thread-safe. You should not assume that a -- SqlBackend can be shared among threads and run concurrent -- queries. This *will* result in problems. Instead, you should create a -- Pool SqlBackend, known as a -- ConnectionPool, and pass that around in multi-threaded -- applications. -- -- To run actions in the persistent library, you should use the -- runSqlConn function. If you're using a multithreaded -- application, use the runSqlPool function. data SqlBackend -- | A function for creating a value of the SqlBackend type. You -- should prefer to use this instead of the constructor for -- SqlBackend, because default values for this will be provided -- for new fields on the record when new functionality is added. mkSqlBackend :: MkSqlBackendArgs -> SqlBackend -- | This type shares many of the same field names as the -- SqlBackend type. It's useful for library authors to use this -- when migrating from using the SqlBackend constructor directly -- to the mkSqlBackend function. -- -- This type will only contain required fields for constructing a -- SqlBackend. For fields that aren't present on this record, -- you'll want to use the various set functions or data MkSqlBackendArgs MkSqlBackendArgs :: (Text -> IO Statement) -> (EntityDef -> [PersistValue] -> InsertSqlResult) -> IORef (Map Text Statement) -> IO () -> ([EntityDef] -> (Text -> IO Statement) -> EntityDef -> IO (Either [Text] [(Bool, Text)])) -> ((Text -> IO Statement) -> Maybe IsolationLevel -> IO ()) -> ((Text -> IO Statement) -> IO ()) -> ((Text -> IO Statement) -> IO ()) -> (FieldNameDB -> Text) -> (EntityDef -> Text) -> (Text -> Text) -> Text -> Text -> ((Int, Int) -> Text -> Text) -> LogFunc -> MkSqlBackendArgs -- | This function should prepare a Statement in the target -- database, which should allow for efficient query reuse. [connPrepare] :: MkSqlBackendArgs -> Text -> IO Statement -- | This function generates the SQL and values necessary for performing an -- insert against the database. [connInsertSql] :: MkSqlBackendArgs -> EntityDef -> [PersistValue] -> InsertSqlResult -- | A reference to the cache of statements. Statements are keyed by -- the Text queries that generated them. [connStmtMap] :: MkSqlBackendArgs -> IORef (Map Text Statement) -- | Close the underlying connection. [connClose] :: MkSqlBackendArgs -> IO () -- | This function returns the migrations required to include the -- EntityDef parameter in the [EntityDef] -- database. This might include creating a new table if the entity is not -- present, or altering an existing table if it is. [connMigrateSql] :: MkSqlBackendArgs -> [EntityDef] -> (Text -> IO Statement) -> EntityDef -> IO (Either [Text] [(Bool, Text)]) -- | A function to begin a transaction for the underlying database. [connBegin] :: MkSqlBackendArgs -> (Text -> IO Statement) -> Maybe IsolationLevel -> IO () -- | A function to commit a transaction to the underlying database. [connCommit] :: MkSqlBackendArgs -> (Text -> IO Statement) -> IO () -- | A function to roll back a transaction on the underlying database. [connRollback] :: MkSqlBackendArgs -> (Text -> IO Statement) -> IO () -- | A function to extract and escape the name of the column corresponding -- to the provided field. [connEscapeFieldName] :: MkSqlBackendArgs -> FieldNameDB -> Text -- | A function to extract and escape the name of the table corresponding -- to the provided entity. PostgreSQL uses this to support schemas. [connEscapeTableName] :: MkSqlBackendArgs -> EntityDef -> Text -- | A function to escape raw DB identifiers. MySQL uses backticks, while -- PostgreSQL uses quotes, and so on. [connEscapeRawName] :: MkSqlBackendArgs -> Text -> Text [connNoLimit] :: MkSqlBackendArgs -> Text -- | A tag displaying what database the SqlBackend is for. Can be -- used to differentiate features in downstream libraries for different -- database backends. [connRDBMS] :: MkSqlBackendArgs -> Text -- | Attach a 'LIMIT/OFFSET' clause to a SQL query. Note that LIMIT/OFFSET -- is problematic for performance, and indexed range queries are the -- superior way to offer pagination. [connLimitOffset] :: MkSqlBackendArgs -> (Int, Int) -> Text -> Text -- | A log function for the SqlBackend to use. [connLogFunc] :: MkSqlBackendArgs -> LogFunc data SqlBackendHooks emptySqlBackendHooks :: SqlBackendHooks -- | Get a tag displaying what database the SqlBackend is for. Can -- be used to differentiate features in downstream libraries for -- different database backends. @since 2.13.3.0 getRDBMS :: (BackendCompatible SqlBackend backend, MonadReader backend m) => m Text -- | This function can be used directly with a SqlBackend to escape -- a FieldNameDB. -- --
-- let conn :: SqlBackend -- getEscapedFieldName (FieldNameDB "asdf") conn ---- -- Alternatively, you can use it in a ReaderT -- SqlBackend context, like SqlPersistT: -- --
-- query :: SqlPersistM Text -- query = do -- field <- getEscapedFieldName (FieldNameDB "asdf") -- pure field --getEscapedFieldName :: (BackendCompatible SqlBackend backend, MonadReader backend m) => FieldNameDB -> m Text -- | This function can be used directly with a SqlBackend to escape -- a raw Text. -- --
-- let conn :: SqlBackend -- getEscapedRawName (FieldNameDB "asdf") conn ---- -- Alternatively, you can use it in a ReaderT -- SqlBackend context, like SqlPersistT: -- --
-- query :: SqlPersistM Text -- query = do -- field <- getEscapedRawName (FieldNameDB "asdf") -- pure field --getEscapedRawName :: (BackendCompatible SqlBackend backend, MonadReader backend m) => Text -> m Text -- | Return the function for escaping a raw name. getEscapeRawNameFunction :: (BackendCompatible SqlBackend backend, MonadReader backend m) => m (Text -> Text) -- | Decorate the given SQL query with the (LIMIT, OFFSET) -- specified. getConnLimitOffset :: (BackendCompatible SqlBackend backend, MonadReader backend m) => (Int, Int) -> Text -> m Text -- | Retrieve the function for generating an upsert statement, if the -- backend supports it. getConnUpsertSql :: (BackendCompatible SqlBackend backend, MonadReader backend m) => m (Maybe (EntityDef -> NonEmpty (FieldNameHS, FieldNameDB) -> Text -> Text)) -- | Retrieve the vault from the provided database backend. getConnVault :: (BackendCompatible SqlBackend backend, MonadReader backend m) => m Vault -- | Retrieve instrumentation hooks from the provided database backend. getConnHooks :: (BackendCompatible SqlBackend backend, MonadReader backend m) => m SqlBackendHooks -- | Set the maximum parameters that may be issued in a given SQL query. -- This should be used only if the database backend have this limitation. setConnMaxParams :: Int -> SqlBackend -> SqlBackend -- | Set the connRepsertManySql field on the SqlBackend. This -- should only be set by the database backend library. If this is not -- set, a slow default will be used. setConnRepsertManySql :: (EntityDef -> Int -> Text) -> SqlBackend -> SqlBackend -- | Set the connInsertManySql field on the SqlBackend. This -- should only be used by the database backend library to provide an -- efficient implementation of a bulk insert function. If this is not -- set, a slow default will be used. setConnInsertManySql :: (EntityDef -> [[PersistValue]] -> InsertSqlResult) -> SqlBackend -> SqlBackend -- | Set the connUpsertSql field on the SqlBackend. This -- should only be used by the database backend library to provide an -- efficient implementation of a bulk insert function. If this is not -- set, a slow default will be used. setConnUpsertSql :: (EntityDef -> NonEmpty (FieldNameHS, FieldNameDB) -> Text -> Text) -> SqlBackend -> SqlBackend -- | Set the 'connPutManySql field on the SqlBackend. This should -- only be used by the database backend library to provide an efficient -- implementation of a bulk insert function. If this is not set, a slow -- default will be used. setConnPutManySql :: (EntityDef -> Int -> Text) -> SqlBackend -> SqlBackend -- | Set the vault on the provided database backend. setConnVault :: Vault -> SqlBackend -> SqlBackend -- | Modify the vault on the provided database backend. modifyConnVault :: (Vault -> Vault) -> SqlBackend -> SqlBackend -- | Set hooks on the provided database backend. setConnHooks :: SqlBackendHooks -> SqlBackend -> SqlBackend module Database.Persist.Class.PersistUnique -- | Queries against Unique keys (other than the id Key). -- -- Please read the general Persistent documentation to learn how to -- create Unique keys. -- -- Using this with an Entity without a Unique key leads to undefined -- behavior. A few of these functions require a single -- Unique, so using an Entity with multiple Uniques is also -- undefined. In these cases persistent's goal is to throw an exception -- as soon as possible, but persistent is still transitioning to that. -- -- SQL backends automatically create uniqueness constraints, but for -- MongoDB you must manually place a unique index on a field to have a -- uniqueness constraint. class PersistStoreRead backend => PersistUniqueRead backend -- | Get a record by unique key, if available. Returns also the identifier. -- --
-- getBySpjName :: MonadIO m => ReaderT SqlBackend m (Maybe (Entity User)) -- getBySpjName = getBy $ UniqueUserName "SPJ" ---- --
-- mSpjEnt <- getBySpjName ---- -- The above query when applied on dataset-1, will get this -- entity: -- --
-- +----+------+-----+ -- | id | name | age | -- +----+------+-----+ -- | 1 | SPJ | 40 | -- +----+------+-----+ --getBy :: forall record m. (PersistUniqueRead backend, MonadIO m, PersistRecordBackend record backend) => Unique record -> ReaderT backend m (Maybe (Entity record)) -- | Some functions in this module (insertUnique, insertBy, -- and replaceUnique) first query the unique indexes to check for -- conflicts. You could instead optimistically attempt to perform the -- operation (e.g. replace instead of replaceUnique). -- However, -- --
-- deleteBySpjName :: MonadIO m => ReaderT SqlBackend m () -- deleteBySpjName = deleteBy UniqueUserName "SPJ" ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+------+-----+ -- |id |name |age | -- +-----+------+-----+ -- |2 |Simon |41 | -- +-----+------+-----+ --deleteBy :: forall record m. (PersistUniqueWrite backend, MonadIO m, PersistRecordBackend record backend) => Unique record -> ReaderT backend m () -- | Like insert, but returns Nothing when the record -- couldn't be inserted because of a uniqueness constraint. -- --
-- linusId <- insertUnique $ User "Linus" 48 -- spjId <- insertUnique $ User "SPJ" 90 ---- --
-- +-----+------+-----+ -- |id |name |age | -- +-----+------+-----+ -- |1 |SPJ |40 | -- +-----+------+-----+ -- |2 |Simon |41 | -- +-----+------+-----+ -- |3 |Linus |48 | -- +-----+------+-----+ ---- -- Linus's record was inserted to dataset-1, while SPJ wasn't -- because SPJ already exists in dataset-1. insertUnique :: forall record m. (PersistUniqueWrite backend, MonadIO m, PersistRecordBackend record backend, SafeToInsert record) => record -> ReaderT backend m (Maybe (Key record)) -- | Update based on a uniqueness constraint or insert: -- --
-- upsertSpj :: MonadIO m => [Update User] -> ReaderT SqlBackend m (Maybe (Entity User)) -- upsertSpj updates = upsert (User "SPJ" 999) upadtes ---- --
-- mSpjEnt <- upsertSpj [UserAge +=. 15] ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+-----+--------+ -- |id |name |age | -- +-----+-----+--------+ -- |1 |SPJ |40 -> 55| -- +-----+-----+--------+ -- |2 |Simon|41 | -- +-----+-----+--------+ ---- --
-- upsertX :: MonadIO m => [Update User] -> ReaderT SqlBackend m (Maybe (Entity User)) -- upsertX updates = upsert (User "X" 999) updates ---- --
-- mXEnt <- upsertX [UserAge +=. 15] ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+-----+--------+ -- |id |name |age | -- +-----+-----+--------+ -- |1 |SPJ |40 | -- +-----+-----+--------+ -- |2 |Simon|41 | -- +-----+-----+--------+ -- |3 |X |999 | -- +-----+-----+--------+ ---- -- Next, what if the schema has two uniqueness constraints? Let's check -- it out using schema-2: -- --
-- mSpjEnt <- upsertSpj [UserAge +=. 15] ---- -- This fails with a compile-time type error alerting us to the fact that -- this record has multiple unique keys, and suggests that we look for -- upsertBy to select the unique key we want. upsert :: forall record m. (PersistUniqueWrite backend, MonadIO m, PersistRecordBackend record backend, OnlyOneUniqueKey record, SafeToInsert record) => record -> [Update record] -> ReaderT backend m (Entity record) -- | Update based on a given uniqueness constraint or insert: -- --
-- upsertBySpjName :: MonadIO m => User -> [Update User] -> ReaderT SqlBackend m (Entity User) -- upsertBySpjName record updates = upsertBy (UniqueUserName "SPJ") record updates ---- --
-- mSpjEnt <- upsertBySpjName (Person "X" 999) [PersonAge += .15] ---- -- The above query will alter dataset-1 to: -- --
-- +-----+-----+--------+ -- |id |name |age | -- +-----+-----+--------+ -- |1 |SPJ |40 -> 55| -- +-----+-----+--------+ -- |2 |Simon|41 | -- +-----+-----+--------+ ---- --
-- upsertBySimonAge :: MonadIO m => User -> [Update User] -> ReaderT SqlBackend m (Entity User) -- upsertBySimonAge record updates = upsertBy (UniqueUserName "SPJ") record updates ---- --
-- mPhilipEnt <- upsertBySimonAge (User "X" 999) [UserName =. "Philip"] ---- -- The above query will alter dataset-1 to: -- --
-- +----+-----------------+-----+ -- | id | name | age | -- +----+-----------------+-----+ -- | 1 | SPJ | 40 | -- +----+-----------------+-----+ -- | 2 | Simon -> Philip | 41 | -- +----+-----------------+-----+ ---- --
-- upsertByUnknownName :: MonadIO m => User -> [Update User] -> ReaderT SqlBackend m (Entity User) -- upsertByUnknownName record updates = upsertBy (UniqueUserName "Unknown") record updates ---- --
-- mXEnt <- upsertByUnknownName (User "X" 999) [UserAge +=. 15] ---- -- This query will alter dataset-1 to: -- --
-- +-----+-----+-----+ -- |id |name |age | -- +-----+-----+-----+ -- |1 |SPJ |40 | -- +-----+-----+-----+ -- |2 |Simon|41 | -- +-----+-----+-----+ -- |3 |X |999 | -- +-----+-----+-----+ --upsertBy :: forall record m. (PersistUniqueWrite backend, MonadIO m, PersistRecordBackend record backend, SafeToInsert record) => Unique record -> record -> [Update record] -> ReaderT backend m (Entity record) -- | Put many records into db -- --
-- mSpjEnt <- getBySpjValue ---- -- The above query when applied on dataset-1, will get this -- record: -- --
-- +----+------+-----+ -- | id | name | age | -- +----+------+-----+ -- | 1 | SPJ | 40 | -- +----+------+-----+ --getByValue :: forall record m backend. (MonadIO m, PersistUniqueRead backend, PersistRecordBackend record backend, AtLeastOneUniqueKey record) => record -> ReaderT backend m (Maybe (Entity record)) -- | Retrieve a record from the database using the given unique keys. It -- will attempt to find a matching record for each Unique in the -- list, and returns the first one that has a match. -- -- Returns Nothing if you provide an empty list ('[]') or if no -- value matches in the database. getByValueUniques :: forall record backend m. (MonadIO m, PersistUniqueRead backend, PersistRecordBackend record backend) => [Unique record] -> ReaderT backend m (Maybe (Entity record)) -- | Insert a value, checking for conflicts with any unique constraints. If -- a duplicate exists in the database, it is returned as Left. -- Otherwise, the new 'Key is returned as Right. -- --
-- l1 <- insertBy $ User "SPJ" 20 -- l2 <- insertBy $ User "XXX" 41 -- l3 <- insertBy $ User "SPJ" 40 -- r1 <- insertBy $ User "XXX" 100 ---- -- First three lines return Left because there're duplicates in -- given record's uniqueness constraints. While the last line returns a -- new key as Right. insertBy :: forall record backend m. (MonadIO m, PersistUniqueWrite backend, PersistRecordBackend record backend, AtLeastOneUniqueKey record, SafeToInsert record) => record -> ReaderT backend m (Either (Entity record) (Key record)) -- | Like insertEntity, but returns Nothing when the record -- couldn't be inserted because of a uniqueness constraint. -- --
-- insertUniqueSpjEntity :: MonadIO m => ReaderT SqlBackend m (Maybe (Entity User)) -- insertUniqueSpjEntity = insertUniqueEntity $ User "SPJ" 50 ---- --
-- mSpjEnt <- insertUniqueSpjEntity ---- -- The above query results Nothing as SPJ already exists. -- --
-- insertUniqueAlexaEntity :: MonadIO m => ReaderT SqlBackend m (Maybe (Entity User)) -- insertUniqueAlexaEntity = insertUniqueEntity $ User "Alexa" 3 ---- --
-- mAlexaEnt <- insertUniqueSpjEntity ---- -- Because there's no such unique keywords of the given record, the above -- query when applied on dataset-1, will produce this: -- --
-- +----+-------+-----+ -- | id | name | age | -- +----+-------+-----+ -- | 1 | SPJ | 40 | -- +----+-------+-----+ -- | 2 | Simon | 41 | -- +----+-------+-----+ -- | 3 | Alexa | 3 | -- +----+-------+-----+ --insertUniqueEntity :: forall record backend m. (MonadIO m, PersistRecordBackend record backend, PersistUniqueWrite backend, SafeToInsert record) => record -> ReaderT backend m (Maybe (Entity record)) -- | Attempt to replace the record of the given key with the given new -- record. First query the unique fields to make sure the replacement -- maintains uniqueness constraints. -- -- Return Nothing if the replacement was made. If uniqueness is -- violated, return a Just with the Unique violation replaceUnique :: forall record backend m. (MonadIO m, Eq (Unique record), PersistRecordBackend record backend, PersistUniqueWrite backend) => Key record -> record -> ReaderT backend m (Maybe (Unique record)) -- | Check whether there are any conflicts for unique keys with this entity -- and existing entities in the database. -- -- Returns Nothing if the entity would be unique, and could thus -- safely be inserted. on a conflict returns the conflicting key -- --
-- mAlanConst <- checkUnique $ User "Alan" 70 ---- -- While this would be Just because SPJ already exists: -- --
-- mSpjConst <- checkUnique $ User "SPJ" 60 --checkUnique :: forall record backend m. (MonadIO m, PersistRecordBackend record backend, PersistUniqueRead backend) => record -> ReaderT backend m (Maybe (Unique record)) -- | Check whether there are any conflicts for unique keys with this entity -- and existing entities in the database. -- -- Returns Nothing if the entity would stay unique, and could thus -- safely be updated. on a conflict returns the conflicting key -- -- This is similar to checkUnique, except it's useful for updating -- - when the particular entity already exists, it would normally -- conflict with itself. This variant ignores those conflicts -- --
-- mAlanConst <- checkUnique $ User "Alan" 70 ---- -- While this would be Just because SPJ already exists: -- --
-- mSpjConst <- checkUnique $ User "SPJ" 60 --checkUniqueUpdateable :: forall record backend m. (MonadIO m, PersistRecordBackend record backend, PersistUniqueRead backend) => Entity record -> ReaderT backend m (Maybe (Unique record)) -- | Return the single unique key for a record. -- --
-- onlySimonConst :: MonadIO m => ReaderT SqlBackend m (Unique User) -- onlySimonConst = onlyUnique $ User "Simon" 999 ---- --
-- mSimonConst <- onlySimonConst ---- -- mSimonConst would be Simon's uniqueness constraint. Note that -- onlyUnique doesn't work if there're more than two -- constraints. It will fail with a type error instead. onlyUnique :: forall record backend m. (MonadIO m, PersistUniqueWrite backend, PersistRecordBackend record backend, OnlyOneUniqueKey record) => record -> ReaderT backend m (Unique record) -- | The slow but generic upsertBy implementation for any -- PersistUniqueRead. * Lookup corresponding entities (if any) -- getBy. * If the record exists, update using updateGet. * -- If it does not exist, insert using insertEntity. @since 2.11 defaultUpsertBy :: (PersistEntityBackend record ~ BaseBackend backend, PersistEntity record, MonadIO m, PersistStoreWrite backend, PersistUniqueRead backend, SafeToInsert record) => Unique record -> record -> [Update record] -> ReaderT backend m (Entity record) -- | The slow but generic putMany implementation for any -- PersistUniqueRead. * Lookup corresponding entities (if any) for -- each record using getByValue * For pre-existing records, issue -- a replace for each old key and new record * For new records, -- issue a bulk insertMany_ defaultPutMany :: forall record backend m. (PersistEntityBackend record ~ BaseBackend backend, PersistEntity record, MonadIO m, PersistStoreWrite backend, PersistUniqueRead backend, SafeToInsert record) => [record] -> ReaderT backend m () -- | This function returns a list of PersistValue that correspond to -- the Unique keys on that record. This is useful for comparing -- two records for equality only on the basis of their -- Unique keys. persistUniqueKeyValues :: PersistEntity record => record -> [PersistValue] module Database.Persist.Class.PersistQuery -- | Returns a [Entity record] corresponding to the filters -- and options provided. -- -- Filters are constructed using the operators defined in -- Database.Persist (and re-exported from -- Database.Persist.Sql). Let's look at some examples: -- --
-- usersWithAgeOver40 :: SqlPersistT IO [Entity User] -- usersWithAgeOver40 = -- selectList [UserAge >=. 40] [] ---- -- If you provide multiple values in the list, the conditions are -- ANDed together. -- --
-- usersWithAgeBetween30And50 :: SqlPersistT IO [Entity User] -- usersWithAgeBetween30And50 = -- selectList -- [ UserAge >=. 30 -- , UserAge <=. 50 -- ] -- [] ---- -- The second list contains the SelectOpt for a record. We can -- select the first ten records with LimitTo -- --
-- firstTenUsers = -- selectList [] [LimitTo 10] ---- -- And we can select the second ten users with OffsetBy. -- --
-- secondTenUsers = -- selectList [] [LimitTo 10, OffsetBy 10] ---- -- Warning that LIMIT/OFFSET is bad for pagination! -- -- With Asc and Desc, we can provide the field we want to -- sort on. We can provide multiple sort orders - later ones are used to -- sort records that are equal on the first field. -- --
-- newestUsers = -- selectList [] [Desc UserCreatedAt, LimitTo 10] -- -- oldestUsers = -- selectList [] [Asc UserCreatedAt, LimitTo 10] --selectList :: forall record backend m. (MonadIO m, PersistQueryRead backend, PersistRecordBackend record backend) => [Filter record] -> [SelectOpt record] -> ReaderT backend m [Entity record] -- | Backends supporting conditional read operations. class (PersistCore backend, PersistStoreRead backend) => PersistQueryRead backend -- | Get all records matching the given criterion in the specified order. -- Returns also the identifiers. -- -- NOTE: This function returns an Acquire and a ConduitM, -- which implies that it streams from the database. It does not. Please -- use selectList to simplify the code. If you want streaming -- behavior, consider persistent-pagination which efficiently -- chunks a query into ranges, or investigate a backend-specific -- streaming solution. selectSourceRes :: (PersistQueryRead backend, PersistRecordBackend record backend, MonadIO m1, MonadIO m2) => [Filter record] -> [SelectOpt record] -> ReaderT backend m1 (Acquire (ConduitM () (Entity record) m2 ())) -- | Get just the first record for the criterion. selectFirst :: (PersistQueryRead backend, MonadIO m, PersistRecordBackend record backend) => [Filter record] -> [SelectOpt record] -> ReaderT backend m (Maybe (Entity record)) -- | Get the Keys of all records matching the given criterion. selectKeysRes :: (PersistQueryRead backend, MonadIO m1, MonadIO m2, PersistRecordBackend record backend) => [Filter record] -> [SelectOpt record] -> ReaderT backend m1 (Acquire (ConduitM () (Key record) m2 ())) -- | The total number of records fulfilling the given criterion. count :: (PersistQueryRead backend, MonadIO m, PersistRecordBackend record backend) => [Filter record] -> ReaderT backend m Int -- | Check if there is at least one record fulfilling the given criterion. exists :: (PersistQueryRead backend, MonadIO m, PersistRecordBackend record backend) => [Filter record] -> ReaderT backend m Bool -- | Backends supporting conditional write operations class (PersistQueryRead backend, PersistStoreWrite backend) => PersistQueryWrite backend -- | Update individual fields on any record matching the given criterion. updateWhere :: (PersistQueryWrite backend, MonadIO m, PersistRecordBackend record backend) => [Filter record] -> [Update record] -> ReaderT backend m () -- | Delete all records matching the given criterion. deleteWhere :: (PersistQueryWrite backend, MonadIO m, PersistRecordBackend record backend) => [Filter record] -> ReaderT backend m () -- | Get all records matching the given criterion in the specified order. -- Returns also the identifiers. -- -- WARNING: This function returns a ConduitM, which implies that -- it streams the results. It does not stream results on most backends. -- If you need streaming, see persistent-pagination for a means -- of chunking results based on indexed ranges. selectSource :: forall record backend m. (PersistQueryRead backend, MonadResource m, PersistRecordBackend record backend, MonadReader backend m) => [Filter record] -> [SelectOpt record] -> ConduitM () (Entity record) m () -- | Get the Keys of all records matching the given criterion. -- -- For an example, see selectList. selectKeys :: forall record backend m. (PersistQueryRead backend, MonadResource m, PersistRecordBackend record backend, MonadReader backend m) => [Filter record] -> [SelectOpt record] -> ConduitM () (Key record) m () -- | Call selectKeys but return the result as a list. selectKeysList :: forall record backend m. (MonadIO m, PersistQueryRead backend, PersistRecordBackend record backend) => [Filter record] -> [SelectOpt record] -> ReaderT backend m [Key record] -- | This module exports all of the type classes in persistent for -- operating on the database backends. -- -- persistent offers methods that are abstract in the specific -- backend type. For SQL databases, this wil be -- SqlBackend. Other database backends will define their own -- types. -- -- Methods and functions in this module have examples documented under an -- "Example Usage" thing, that you need to click on to expand. module Database.Persist.Class -- | A backwards-compatible alias for those that don't care about -- distinguishing between read and write queries. It signifies the -- assumption that, by default, a backend can write as well as read. type PersistStore a = PersistStoreWrite a class (Show (BackendKey backend), Read (BackendKey backend), Eq (BackendKey backend), Ord (BackendKey backend), PersistCore backend, PersistField (BackendKey backend), ToJSON (BackendKey backend), FromJSON (BackendKey backend)) => PersistStoreRead backend -- | Get a record by identifier, if available. -- --
-- getSpj :: MonadIO m => ReaderT SqlBackend m (Maybe User) -- getSpj = get spjId ---- --
-- mspj <- getSpj ---- -- The above query when applied on dataset-1, will get this: -- --
-- +------+-----+ -- | name | age | -- +------+-----+ -- | SPJ | 40 | -- +------+-----+ --get :: forall record m. (PersistStoreRead backend, MonadIO m, PersistRecordBackend record backend) => Key record -> ReaderT backend m (Maybe record) -- | Get many records by their respective identifiers, if available. -- --
-- getUsers :: MonadIO m => ReaderT SqlBackend m (Map (Key User) User) -- getUsers = getMany allkeys ---- --
-- musers <- getUsers ---- -- The above query when applied on dataset-1, will get these -- records: -- --
-- +----+-------+-----+ -- | id | name | age | -- +----+-------+-----+ -- | 1 | SPJ | 40 | -- +----+-------+-----+ -- | 2 | Simon | 41 | -- +----+-------+-----+ --getMany :: forall record m. (PersistStoreRead backend, MonadIO m, PersistRecordBackend record backend) => [Key record] -> ReaderT backend m (Map (Key record) record) class (Show (BackendKey backend), Read (BackendKey backend), Eq (BackendKey backend), Ord (BackendKey backend), PersistStoreRead backend, PersistField (BackendKey backend), ToJSON (BackendKey backend), FromJSON (BackendKey backend)) => PersistStoreWrite backend -- | Create a new record in the database, returning an automatically -- created key (in SQL an auto-increment id). -- --
-- insertJohn :: MonadIO m => ReaderT SqlBackend m (Key User) -- insertJohn = insert $ User "John" 30 ---- --
-- johnId <- insertJohn ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+------+-----+ -- |id |name |age | -- +-----+------+-----+ -- |1 |SPJ |40 | -- +-----+------+-----+ -- |2 |Simon |41 | -- +-----+------+-----+ -- |3 |John |30 | -- +-----+------+-----+ --insert :: forall record m. (PersistStoreWrite backend, MonadIO m, PersistRecordBackend record backend, SafeToInsert record) => record -> ReaderT backend m (Key record) -- | Same as insert, but doesn't return a Key. -- --
-- insertJohn :: MonadIO m => ReaderT SqlBackend m (Key User) -- insertJohn = insert_ $ User "John" 30 ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+------+-----+ -- |id |name |age | -- +-----+------+-----+ -- |1 |SPJ |40 | -- +-----+------+-----+ -- |2 |Simon |41 | -- +-----+------+-----+ -- |3 |John |30 | -- +-----+------+-----+ --insert_ :: forall record m. (PersistStoreWrite backend, MonadIO m, PersistRecordBackend record backend, SafeToInsert record) => record -> ReaderT backend m () -- | Create multiple records in the database and return their Keys. -- -- If you don't need the inserted Keys, use insertMany_. -- -- The MongoDB and PostgreSQL backends insert all records and retrieve -- their keys in one database query. -- -- The SQLite and MySQL backends use the slow, default implementation of -- mapM insert. -- --
-- insertUsers :: MonadIO m => ReaderT SqlBackend m [Key User] -- insertUsers = insertMany [User "John" 30, User "Nick" 32, User "Jane" 20] ---- --
-- userIds <- insertUsers ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+------+-----+ -- |id |name |age | -- +-----+------+-----+ -- |1 |SPJ |40 | -- +-----+------+-----+ -- |2 |Simon |41 | -- +-----+------+-----+ -- |3 |John |30 | -- +-----+------+-----+ -- |4 |Nick |32 | -- +-----+------+-----+ -- |5 |Jane |20 | -- +-----+------+-----+ --insertMany :: forall record m. (PersistStoreWrite backend, MonadIO m, PersistRecordBackend record backend, SafeToInsert record) => [record] -> ReaderT backend m [Key record] -- | Same as insertMany, but doesn't return any Keys. -- -- The MongoDB, PostgreSQL, SQLite and MySQL backends insert all records -- in one database query. -- --
-- insertUsers_ :: MonadIO m => ReaderT SqlBackend m () -- insertUsers_ = insertMany_ [User "John" 30, User "Nick" 32, User "Jane" 20] ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+------+-----+ -- |id |name |age | -- +-----+------+-----+ -- |1 |SPJ |40 | -- +-----+------+-----+ -- |2 |Simon |41 | -- +-----+------+-----+ -- |3 |John |30 | -- +-----+------+-----+ -- |4 |Nick |32 | -- +-----+------+-----+ -- |5 |Jane |20 | -- +-----+------+-----+ --insertMany_ :: forall record m. (PersistStoreWrite backend, MonadIO m, PersistRecordBackend record backend, SafeToInsert record) => [record] -> ReaderT backend m () -- | Same as insertMany_, but takes an Entity instead of just -- a record. -- -- Useful when migrating data from one entity to another and want to -- preserve ids. -- -- The MongoDB, PostgreSQL, SQLite and MySQL backends insert all records -- in one database query. -- --
-- insertUserEntityMany :: MonadIO m => ReaderT SqlBackend m () -- insertUserEntityMany = insertEntityMany [SnakeEntity, EvaEntity] ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+------+-----+ -- |id |name |age | -- +-----+------+-----+ -- |1 |SPJ |40 | -- +-----+------+-----+ -- |2 |Simon |41 | -- +-----+------+-----+ -- |3 |Snake |38 | -- +-----+------+-----+ -- |4 |Eva |38 | -- +-----+------+-----+ --insertEntityMany :: forall record m. (PersistStoreWrite backend, MonadIO m, PersistRecordBackend record backend) => [Entity record] -> ReaderT backend m () -- | Create a new record in the database using the given key. -- --
-- insertAliceKey :: MonadIO m => Key User -> ReaderT SqlBackend m () -- insertAliceKey key = insertKey key $ User "Alice" 20 ---- --
-- insertAliceKey $ UserKey {unUserKey = SqlBackendKey {unSqlBackendKey = 3}}
--
--
-- The above query when applied on dataset-1, will produce this:
--
-- -- +-----+------+-----+ -- |id |name |age | -- +-----+------+-----+ -- |1 |SPJ |40 | -- +-----+------+-----+ -- |2 |Simon |41 | -- +-----+------+-----+ -- |3 |Alice |20 | -- +-----+------+-----+ --insertKey :: forall record m. (PersistStoreWrite backend, MonadIO m, PersistRecordBackend record backend) => Key record -> record -> ReaderT backend m () -- | Put the record in the database with the given key. Unlike -- replace, if a record with the given key does not exist then a -- new record will be inserted. -- --
-- insertPhilip :: MonadIO m => ReaderT SqlBackend m (Key User) -- insertPhilip = insert $ User "Philip" 42 ---- --
-- philipId <- insertPhilip ---- -- This query will produce: -- --
-- +-----+------+-----+ -- |id |name |age | -- +-----+------+-----+ -- |1 |SPJ |40 | -- +-----+------+-----+ -- |2 |Simon |41 | -- +-----+------+-----+ -- |3 |Philip|42 | -- +-----+------+-----+ ---- --
-- repsertHaskell :: MonadIO m => Key record -> ReaderT SqlBackend m () -- repsertHaskell id = repsert id $ User "Haskell" 81 ---- --
-- repsertHaskell philipId ---- -- This query will replace Philip's record with Haskell's one: -- --
-- +-----+-----------------+--------+ -- |id |name |age | -- +-----+-----------------+--------+ -- |1 |SPJ |40 | -- +-----+-----------------+--------+ -- |2 |Simon |41 | -- +-----+-----------------+--------+ -- |3 |Philip -> Haskell|42 -> 81| -- +-----+-----------------+--------+ ---- -- repsert inserts the given record if the key doesn't exist. -- --
-- repsertXToUnknown :: MonadIO m => ReaderT SqlBackend m () -- repsertXToUnknown = repsert unknownId $ User "X" 999 ---- -- For example, applying the above query to dataset-1 will produce -- this: -- --
-- +-----+------+-----+ -- |id |name |age | -- +-----+------+-----+ -- |1 |SPJ |40 | -- +-----+------+-----+ -- |2 |Simon |41 | -- +-----+------+-----+ -- |3 |X |999 | -- +-----+------+-----+ --repsert :: forall record m. (PersistStoreWrite backend, MonadIO m, PersistRecordBackend record backend) => Key record -> record -> ReaderT backend m () -- | Put many entities into the database. -- -- Batch version of repsert for SQL backends. -- -- Useful when migrating data from one entity to another and want to -- preserve ids. -- --
-- repsertManyUsers :: MonadIO m =>ReaderT SqlBackend m () -- repsertManyusers = repsertMany [(simonId, User "Philip" 20), (unknownId999, User "Mr. X" 999)] ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+----------------+---------+ -- |id |name |age | -- +-----+----------------+---------+ -- |1 |SPJ |40 | -- +-----+----------------+---------+ -- |2 |Simon -> Philip |41 -> 20 | -- +-----+----------------+---------+ -- |999 |Mr. X |999 | -- +-----+----------------+---------+ --repsertMany :: forall record m. (PersistStoreWrite backend, MonadIO m, PersistRecordBackend record backend) => [(Key record, record)] -> ReaderT backend m () -- | Replace the record in the database with the given key. Note that the -- result is undefined if such record does not exist, so you must use -- insertKey or repsert in these cases. -- --
-- replaceSpj :: MonadIO m => User -> ReaderT SqlBackend m () -- replaceSpj record = replace spjId record ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+------+-----+ -- |id |name |age | -- +-----+------+-----+ -- |1 |Mike |45 | -- +-----+------+-----+ -- |2 |Simon |41 | -- +-----+------+-----+ --replace :: forall record m. (PersistStoreWrite backend, MonadIO m, PersistRecordBackend record backend) => Key record -> record -> ReaderT backend m () -- | Delete a specific record by identifier. Does nothing if record does -- not exist. -- --
-- deleteSpj :: MonadIO m => ReaderT SqlBackend m () -- deleteSpj = delete spjId ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+------+-----+ -- |id |name |age | -- +-----+------+-----+ -- |2 |Simon |41 | -- +-----+------+-----+ --delete :: forall record m. (PersistStoreWrite backend, MonadIO m, PersistRecordBackend record backend) => Key record -> ReaderT backend m () -- | Update individual fields on a specific record. -- --
-- updateSpj :: MonadIO m => [Update User] -> ReaderT SqlBackend m () -- updateSpj updates = update spjId updates ---- --
-- updateSpj [UserAge +=. 100] ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+------+-----+ -- |id |name |age | -- +-----+------+-----+ -- |1 |SPJ |140 | -- +-----+------+-----+ -- |2 |Simon |41 | -- +-----+------+-----+ --update :: forall record m. (PersistStoreWrite backend, MonadIO m, PersistRecordBackend record backend) => Key record -> [Update record] -> ReaderT backend m () -- | Update individual fields on a specific record, and retrieve the -- updated value from the database. -- -- Note that this function will throw an exception if the given key is -- not found in the database. -- --
-- updateGetSpj :: MonadIO m => [Update User] -> ReaderT SqlBackend m User -- updateGetSpj updates = updateGet spjId updates ---- --
-- spj <- updateGetSpj [UserAge +=. 100] ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+------+-----+ -- |id |name |age | -- +-----+------+-----+ -- |1 |SPJ |140 | -- +-----+------+-----+ -- |2 |Simon |41 | -- +-----+------+-----+ --updateGet :: forall record m. (PersistStoreWrite backend, MonadIO m, PersistRecordBackend record backend) => Key record -> [Update record] -> ReaderT backend m record -- | A convenient alias for common type signatures type PersistRecordBackend record backend = (PersistEntity record, PersistEntityBackend record ~ BaseBackend backend) -- | Same as get, but for a non-null (not Maybe) foreign key. Unsafe -- unless your database is enforcing that the foreign key is valid. -- --
-- getJustSpj :: MonadIO m => ReaderT SqlBackend m User -- getJustSpj = getJust spjId ---- --
-- spj <- getJust spjId ---- -- The above query when applied on dataset-1, will get this -- record: -- --
-- +----+------+-----+ -- | id | name | age | -- +----+------+-----+ -- | 1 | SPJ | 40 | -- +----+------+-----+ ---- --
-- getJustUnknown :: MonadIO m => ReaderT SqlBackend m User -- getJustUnknown = getJust unknownId ---- -- mrx <- getJustUnknown -- -- This just throws an error. getJust :: forall record backend m. (PersistStoreRead backend, PersistRecordBackend record backend, MonadIO m) => Key record -> ReaderT backend m record -- | Same as getJust, but returns an Entity instead of just -- the record. -- --
-- getJustEntitySpj :: MonadIO m => ReaderT SqlBackend m (Entity User) -- getJustEntitySpj = getJustEntity spjId ---- --
-- spjEnt <- getJustEntitySpj ---- -- The above query when applied on dataset-1, will get this -- entity: -- --
-- +----+------+-----+ -- | id | name | age | -- +----+------+-----+ -- | 1 | SPJ | 40 | -- +----+------+-----+ --getJustEntity :: forall record backend m. (PersistEntityBackend record ~ BaseBackend backend, MonadIO m, PersistEntity record, PersistStoreRead backend) => Key record -> ReaderT backend m (Entity record) -- | Like get, but returns the complete Entity. -- --
-- getSpjEntity :: MonadIO m => ReaderT SqlBackend m (Maybe (Entity User)) -- getSpjEntity = getEntity spjId ---- --
-- mSpjEnt <- getSpjEntity ---- -- The above query when applied on dataset-1, will get this -- entity: -- --
-- +----+------+-----+ -- | id | name | age | -- +----+------+-----+ -- | 1 | SPJ | 40 | -- +----+------+-----+ --getEntity :: forall e backend m. (PersistStoreRead backend, PersistRecordBackend e backend, MonadIO m) => Key e -> ReaderT backend m (Maybe (Entity e)) -- | Curry this to make a convenience function that loads an associated -- model. -- --
-- foreign = belongsTo foreignId --belongsTo :: forall ent1 ent2 backend m. (PersistStoreRead backend, PersistEntity ent1, PersistRecordBackend ent2 backend, MonadIO m) => (ent1 -> Maybe (Key ent2)) -> ent1 -> ReaderT backend m (Maybe ent2) -- | Same as belongsTo, but uses getJust and therefore is -- similarly unsafe. belongsToJust :: forall ent1 ent2 backend m. (PersistStoreRead backend, PersistEntity ent1, PersistRecordBackend ent2 backend, MonadIO m) => (ent1 -> Key ent2) -> ent1 -> ReaderT backend m ent2 -- | Like insert, but returns the complete Entity. -- --
-- insertHaskellEntity :: MonadIO m => ReaderT SqlBackend m (Entity User) -- insertHaskellEntity = insertEntity $ User "Haskell" 81 ---- --
-- haskellEnt <- insertHaskellEntity ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +----+---------+-----+ -- | id | name | age | -- +----+---------+-----+ -- | 1 | SPJ | 40 | -- +----+---------+-----+ -- | 2 | Simon | 41 | -- +----+---------+-----+ -- | 3 | Haskell | 81 | -- +----+---------+-----+ --insertEntity :: forall e backend m. (PersistStoreWrite backend, PersistRecordBackend e backend, SafeToInsert e, MonadIO m, HasCallStack) => e -> ReaderT backend m (Entity e) -- | Like insertEntity but just returns the record instead of -- Entity. -- --
-- insertDaveRecord :: MonadIO m => ReaderT SqlBackend m User -- insertDaveRecord = insertRecord $ User "Dave" 50 ---- --
-- dave <- insertDaveRecord ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+------+-----+ -- |id |name |age | -- +-----+------+-----+ -- |1 |SPJ |40 | -- +-----+------+-----+ -- |2 |Simon |41 | -- +-----+------+-----+ -- |3 |Dave |50 | -- +-----+------+-----+ --insertRecord :: forall record backend m. (PersistEntityBackend record ~ BaseBackend backend, PersistEntity record, MonadIO m, PersistStoreWrite backend, SafeToInsert record, HasCallStack) => record -> ReaderT backend m record -- | A backwards-compatible alias for those that don't care about -- distinguishing between read and write queries. It signifies the -- assumption that, by default, a backend can write as well as read. type PersistUnique a = PersistUniqueWrite a -- | Queries against Unique keys (other than the id Key). -- -- Please read the general Persistent documentation to learn how to -- create Unique keys. -- -- Using this with an Entity without a Unique key leads to undefined -- behavior. A few of these functions require a single -- Unique, so using an Entity with multiple Uniques is also -- undefined. In these cases persistent's goal is to throw an exception -- as soon as possible, but persistent is still transitioning to that. -- -- SQL backends automatically create uniqueness constraints, but for -- MongoDB you must manually place a unique index on a field to have a -- uniqueness constraint. class PersistStoreRead backend => PersistUniqueRead backend -- | Get a record by unique key, if available. Returns also the identifier. -- --
-- getBySpjName :: MonadIO m => ReaderT SqlBackend m (Maybe (Entity User)) -- getBySpjName = getBy $ UniqueUserName "SPJ" ---- --
-- mSpjEnt <- getBySpjName ---- -- The above query when applied on dataset-1, will get this -- entity: -- --
-- +----+------+-----+ -- | id | name | age | -- +----+------+-----+ -- | 1 | SPJ | 40 | -- +----+------+-----+ --getBy :: forall record m. (PersistUniqueRead backend, MonadIO m, PersistRecordBackend record backend) => Unique record -> ReaderT backend m (Maybe (Entity record)) -- | Some functions in this module (insertUnique, insertBy, -- and replaceUnique) first query the unique indexes to check for -- conflicts. You could instead optimistically attempt to perform the -- operation (e.g. replace instead of replaceUnique). -- However, -- --
-- deleteBySpjName :: MonadIO m => ReaderT SqlBackend m () -- deleteBySpjName = deleteBy UniqueUserName "SPJ" ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+------+-----+ -- |id |name |age | -- +-----+------+-----+ -- |2 |Simon |41 | -- +-----+------+-----+ --deleteBy :: forall record m. (PersistUniqueWrite backend, MonadIO m, PersistRecordBackend record backend) => Unique record -> ReaderT backend m () -- | Like insert, but returns Nothing when the record -- couldn't be inserted because of a uniqueness constraint. -- --
-- linusId <- insertUnique $ User "Linus" 48 -- spjId <- insertUnique $ User "SPJ" 90 ---- --
-- +-----+------+-----+ -- |id |name |age | -- +-----+------+-----+ -- |1 |SPJ |40 | -- +-----+------+-----+ -- |2 |Simon |41 | -- +-----+------+-----+ -- |3 |Linus |48 | -- +-----+------+-----+ ---- -- Linus's record was inserted to dataset-1, while SPJ wasn't -- because SPJ already exists in dataset-1. insertUnique :: forall record m. (PersistUniqueWrite backend, MonadIO m, PersistRecordBackend record backend, SafeToInsert record) => record -> ReaderT backend m (Maybe (Key record)) -- | Update based on a uniqueness constraint or insert: -- --
-- upsertSpj :: MonadIO m => [Update User] -> ReaderT SqlBackend m (Maybe (Entity User)) -- upsertSpj updates = upsert (User "SPJ" 999) upadtes ---- --
-- mSpjEnt <- upsertSpj [UserAge +=. 15] ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+-----+--------+ -- |id |name |age | -- +-----+-----+--------+ -- |1 |SPJ |40 -> 55| -- +-----+-----+--------+ -- |2 |Simon|41 | -- +-----+-----+--------+ ---- --
-- upsertX :: MonadIO m => [Update User] -> ReaderT SqlBackend m (Maybe (Entity User)) -- upsertX updates = upsert (User "X" 999) updates ---- --
-- mXEnt <- upsertX [UserAge +=. 15] ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+-----+--------+ -- |id |name |age | -- +-----+-----+--------+ -- |1 |SPJ |40 | -- +-----+-----+--------+ -- |2 |Simon|41 | -- +-----+-----+--------+ -- |3 |X |999 | -- +-----+-----+--------+ ---- -- Next, what if the schema has two uniqueness constraints? Let's check -- it out using schema-2: -- --
-- mSpjEnt <- upsertSpj [UserAge +=. 15] ---- -- This fails with a compile-time type error alerting us to the fact that -- this record has multiple unique keys, and suggests that we look for -- upsertBy to select the unique key we want. upsert :: forall record m. (PersistUniqueWrite backend, MonadIO m, PersistRecordBackend record backend, OnlyOneUniqueKey record, SafeToInsert record) => record -> [Update record] -> ReaderT backend m (Entity record) -- | Update based on a given uniqueness constraint or insert: -- --
-- upsertBySpjName :: MonadIO m => User -> [Update User] -> ReaderT SqlBackend m (Entity User) -- upsertBySpjName record updates = upsertBy (UniqueUserName "SPJ") record updates ---- --
-- mSpjEnt <- upsertBySpjName (Person "X" 999) [PersonAge += .15] ---- -- The above query will alter dataset-1 to: -- --
-- +-----+-----+--------+ -- |id |name |age | -- +-----+-----+--------+ -- |1 |SPJ |40 -> 55| -- +-----+-----+--------+ -- |2 |Simon|41 | -- +-----+-----+--------+ ---- --
-- upsertBySimonAge :: MonadIO m => User -> [Update User] -> ReaderT SqlBackend m (Entity User) -- upsertBySimonAge record updates = upsertBy (UniqueUserName "SPJ") record updates ---- --
-- mPhilipEnt <- upsertBySimonAge (User "X" 999) [UserName =. "Philip"] ---- -- The above query will alter dataset-1 to: -- --
-- +----+-----------------+-----+ -- | id | name | age | -- +----+-----------------+-----+ -- | 1 | SPJ | 40 | -- +----+-----------------+-----+ -- | 2 | Simon -> Philip | 41 | -- +----+-----------------+-----+ ---- --
-- upsertByUnknownName :: MonadIO m => User -> [Update User] -> ReaderT SqlBackend m (Entity User) -- upsertByUnknownName record updates = upsertBy (UniqueUserName "Unknown") record updates ---- --
-- mXEnt <- upsertByUnknownName (User "X" 999) [UserAge +=. 15] ---- -- This query will alter dataset-1 to: -- --
-- +-----+-----+-----+ -- |id |name |age | -- +-----+-----+-----+ -- |1 |SPJ |40 | -- +-----+-----+-----+ -- |2 |Simon|41 | -- +-----+-----+-----+ -- |3 |X |999 | -- +-----+-----+-----+ --upsertBy :: forall record m. (PersistUniqueWrite backend, MonadIO m, PersistRecordBackend record backend, SafeToInsert record) => Unique record -> record -> [Update record] -> ReaderT backend m (Entity record) -- | Put many records into db -- --
-- mSpjEnt <- getBySpjValue ---- -- The above query when applied on dataset-1, will get this -- record: -- --
-- +----+------+-----+ -- | id | name | age | -- +----+------+-----+ -- | 1 | SPJ | 40 | -- +----+------+-----+ --getByValue :: forall record m backend. (MonadIO m, PersistUniqueRead backend, PersistRecordBackend record backend, AtLeastOneUniqueKey record) => record -> ReaderT backend m (Maybe (Entity record)) -- | Insert a value, checking for conflicts with any unique constraints. If -- a duplicate exists in the database, it is returned as Left. -- Otherwise, the new 'Key is returned as Right. -- --
-- l1 <- insertBy $ User "SPJ" 20 -- l2 <- insertBy $ User "XXX" 41 -- l3 <- insertBy $ User "SPJ" 40 -- r1 <- insertBy $ User "XXX" 100 ---- -- First three lines return Left because there're duplicates in -- given record's uniqueness constraints. While the last line returns a -- new key as Right. insertBy :: forall record backend m. (MonadIO m, PersistUniqueWrite backend, PersistRecordBackend record backend, AtLeastOneUniqueKey record, SafeToInsert record) => record -> ReaderT backend m (Either (Entity record) (Key record)) -- | Like insertEntity, but returns Nothing when the record -- couldn't be inserted because of a uniqueness constraint. -- --
-- insertUniqueSpjEntity :: MonadIO m => ReaderT SqlBackend m (Maybe (Entity User)) -- insertUniqueSpjEntity = insertUniqueEntity $ User "SPJ" 50 ---- --
-- mSpjEnt <- insertUniqueSpjEntity ---- -- The above query results Nothing as SPJ already exists. -- --
-- insertUniqueAlexaEntity :: MonadIO m => ReaderT SqlBackend m (Maybe (Entity User)) -- insertUniqueAlexaEntity = insertUniqueEntity $ User "Alexa" 3 ---- --
-- mAlexaEnt <- insertUniqueSpjEntity ---- -- Because there's no such unique keywords of the given record, the above -- query when applied on dataset-1, will produce this: -- --
-- +----+-------+-----+ -- | id | name | age | -- +----+-------+-----+ -- | 1 | SPJ | 40 | -- +----+-------+-----+ -- | 2 | Simon | 41 | -- +----+-------+-----+ -- | 3 | Alexa | 3 | -- +----+-------+-----+ --insertUniqueEntity :: forall record backend m. (MonadIO m, PersistRecordBackend record backend, PersistUniqueWrite backend, SafeToInsert record) => record -> ReaderT backend m (Maybe (Entity record)) -- | Attempt to replace the record of the given key with the given new -- record. First query the unique fields to make sure the replacement -- maintains uniqueness constraints. -- -- Return Nothing if the replacement was made. If uniqueness is -- violated, return a Just with the Unique violation replaceUnique :: forall record backend m. (MonadIO m, Eq (Unique record), PersistRecordBackend record backend, PersistUniqueWrite backend) => Key record -> record -> ReaderT backend m (Maybe (Unique record)) -- | Check whether there are any conflicts for unique keys with this entity -- and existing entities in the database. -- -- Returns Nothing if the entity would be unique, and could thus -- safely be inserted. on a conflict returns the conflicting key -- --
-- mAlanConst <- checkUnique $ User "Alan" 70 ---- -- While this would be Just because SPJ already exists: -- --
-- mSpjConst <- checkUnique $ User "SPJ" 60 --checkUnique :: forall record backend m. (MonadIO m, PersistRecordBackend record backend, PersistUniqueRead backend) => record -> ReaderT backend m (Maybe (Unique record)) -- | Check whether there are any conflicts for unique keys with this entity -- and existing entities in the database. -- -- Returns Nothing if the entity would stay unique, and could thus -- safely be updated. on a conflict returns the conflicting key -- -- This is similar to checkUnique, except it's useful for updating -- - when the particular entity already exists, it would normally -- conflict with itself. This variant ignores those conflicts -- --
-- mAlanConst <- checkUnique $ User "Alan" 70 ---- -- While this would be Just because SPJ already exists: -- --
-- mSpjConst <- checkUnique $ User "SPJ" 60 --checkUniqueUpdateable :: forall record backend m. (MonadIO m, PersistRecordBackend record backend, PersistUniqueRead backend) => Entity record -> ReaderT backend m (Maybe (Unique record)) -- | Return the single unique key for a record. -- --
-- onlySimonConst :: MonadIO m => ReaderT SqlBackend m (Unique User) -- onlySimonConst = onlyUnique $ User "Simon" 999 ---- --
-- mSimonConst <- onlySimonConst ---- -- mSimonConst would be Simon's uniqueness constraint. Note that -- onlyUnique doesn't work if there're more than two -- constraints. It will fail with a type error instead. onlyUnique :: forall record backend m. (MonadIO m, PersistUniqueWrite backend, PersistRecordBackend record backend, OnlyOneUniqueKey record) => record -> ReaderT backend m (Unique record) -- | Returns a [Entity record] corresponding to the filters -- and options provided. -- -- Filters are constructed using the operators defined in -- Database.Persist (and re-exported from -- Database.Persist.Sql). Let's look at some examples: -- --
-- usersWithAgeOver40 :: SqlPersistT IO [Entity User] -- usersWithAgeOver40 = -- selectList [UserAge >=. 40] [] ---- -- If you provide multiple values in the list, the conditions are -- ANDed together. -- --
-- usersWithAgeBetween30And50 :: SqlPersistT IO [Entity User] -- usersWithAgeBetween30And50 = -- selectList -- [ UserAge >=. 30 -- , UserAge <=. 50 -- ] -- [] ---- -- The second list contains the SelectOpt for a record. We can -- select the first ten records with LimitTo -- --
-- firstTenUsers = -- selectList [] [LimitTo 10] ---- -- And we can select the second ten users with OffsetBy. -- --
-- secondTenUsers = -- selectList [] [LimitTo 10, OffsetBy 10] ---- -- Warning that LIMIT/OFFSET is bad for pagination! -- -- With Asc and Desc, we can provide the field we want to -- sort on. We can provide multiple sort orders - later ones are used to -- sort records that are equal on the first field. -- --
-- newestUsers = -- selectList [] [Desc UserCreatedAt, LimitTo 10] -- -- oldestUsers = -- selectList [] [Asc UserCreatedAt, LimitTo 10] --selectList :: forall record backend m. (MonadIO m, PersistQueryRead backend, PersistRecordBackend record backend) => [Filter record] -> [SelectOpt record] -> ReaderT backend m [Entity record] -- | Get the Keys of all records matching the given criterion. -- -- For an example, see selectList. selectKeys :: forall record backend m. (PersistQueryRead backend, MonadResource m, PersistRecordBackend record backend, MonadReader backend m) => [Filter record] -> [SelectOpt record] -> ConduitM () (Key record) m () -- | A backwards-compatible alias for those that don't care about -- distinguishing between read and write queries. It signifies the -- assumption that, by default, a backend can write as well as read. type PersistQuery a = PersistQueryWrite a -- | Backends supporting conditional read operations. class (PersistCore backend, PersistStoreRead backend) => PersistQueryRead backend -- | Get all records matching the given criterion in the specified order. -- Returns also the identifiers. -- -- NOTE: This function returns an Acquire and a ConduitM, -- which implies that it streams from the database. It does not. Please -- use selectList to simplify the code. If you want streaming -- behavior, consider persistent-pagination which efficiently -- chunks a query into ranges, or investigate a backend-specific -- streaming solution. selectSourceRes :: (PersistQueryRead backend, PersistRecordBackend record backend, MonadIO m1, MonadIO m2) => [Filter record] -> [SelectOpt record] -> ReaderT backend m1 (Acquire (ConduitM () (Entity record) m2 ())) -- | Get just the first record for the criterion. selectFirst :: (PersistQueryRead backend, MonadIO m, PersistRecordBackend record backend) => [Filter record] -> [SelectOpt record] -> ReaderT backend m (Maybe (Entity record)) -- | Get the Keys of all records matching the given criterion. selectKeysRes :: (PersistQueryRead backend, MonadIO m1, MonadIO m2, PersistRecordBackend record backend) => [Filter record] -> [SelectOpt record] -> ReaderT backend m1 (Acquire (ConduitM () (Key record) m2 ())) -- | The total number of records fulfilling the given criterion. count :: (PersistQueryRead backend, MonadIO m, PersistRecordBackend record backend) => [Filter record] -> ReaderT backend m Int -- | Check if there is at least one record fulfilling the given criterion. exists :: (PersistQueryRead backend, MonadIO m, PersistRecordBackend record backend) => [Filter record] -> ReaderT backend m Bool -- | Backends supporting conditional write operations class (PersistQueryRead backend, PersistStoreWrite backend) => PersistQueryWrite backend -- | Update individual fields on any record matching the given criterion. updateWhere :: (PersistQueryWrite backend, MonadIO m, PersistRecordBackend record backend) => [Filter record] -> [Update record] -> ReaderT backend m () -- | Delete all records matching the given criterion. deleteWhere :: (PersistQueryWrite backend, MonadIO m, PersistRecordBackend record backend) => [Filter record] -> ReaderT backend m () -- | Get all records matching the given criterion in the specified order. -- Returns also the identifiers. -- -- WARNING: This function returns a ConduitM, which implies that -- it streams the results. It does not stream results on most backends. -- If you need streaming, see persistent-pagination for a means -- of chunking results based on indexed ranges. selectSource :: forall record backend m. (PersistQueryRead backend, MonadResource m, PersistRecordBackend record backend, MonadReader backend m) => [Filter record] -> [SelectOpt record] -> ConduitM () (Entity record) m () -- | Call selectKeys but return the result as a list. selectKeysList :: forall record backend m. (MonadIO m, PersistQueryRead backend, PersistRecordBackend record backend) => [Filter record] -> [SelectOpt record] -> ReaderT backend m [Key record] -- | Persistent serialized Haskell records to the database. A Database -- Entity (A row in SQL, a document in MongoDB, etc) corresponds -- to a Key plus a Haskell record. -- -- For every Haskell record type stored in the database there is a -- corresponding PersistEntity instance. An instance of -- PersistEntity contains meta-data for the record. PersistEntity also -- helps abstract over different record types. That way the same query -- interface can return a PersistEntity, with each query returning -- different types of Haskell records. -- -- Some advanced type system capabilities are used to make this process -- type-safe. Persistent users usually don't need to understand the class -- associated data and functions. class (PersistField (Key record), ToJSON (Key record), FromJSON (Key record), Show (Key record), Read (Key record), Eq (Key record), Ord (Key record)) => PersistEntity record where { -- | Persistent allows multiple different backends (databases). type family PersistEntityBackend record; -- | By default, a backend will automatically generate the key Instead you -- can specify a Primary key made up of unique values. data family Key record; -- | An EntityField is parameterised by the Haskell record it -- belongs to and the additional type of that field. -- -- As of persistent-2.11.0.0, it's possible to use the -- OverloadedLabels language extension to refer to -- EntityField values polymorphically. See the documentation on -- SymbolToField for more information. data family EntityField record :: Type -> Type; -- | Unique keys besides the Key. data family Unique record; } -- | A lower-level key operation. keyToValues :: PersistEntity record => Key record -> [PersistValue] -- | A lower-level key operation. keyFromValues :: PersistEntity record => [PersistValue] -> Either Text (Key record) -- | A meta-operation to retrieve the Key EntityField. persistIdField :: PersistEntity record => EntityField record (Key record) -- | Retrieve the EntityDef meta-data for the record. entityDef :: PersistEntity record => proxy record -> EntityDef -- | Return meta-data for a given EntityField. persistFieldDef :: PersistEntity record => EntityField record typ -> FieldDef -- | A meta-operation to get the database fields of a record. toPersistFields :: PersistEntity record => record -> [PersistValue] -- | A lower-level operation to convert from database values to a Haskell -- record. fromPersistValues :: PersistEntity record => [PersistValue] -> Either Text record -- | This function allows you to build an Entity a by -- specifying an action that returns a value for the field in the -- callback function. Let's look at an example. -- --
-- parseFromEnvironmentVariables :: IO (Entity User) -- parseFromEnvironmentVariables = -- tabulateEntityA $ \userField -> -- case userField of -- UserName -> -- getEnv USER_NAME -- UserAge -> do -- ageVar <- getEnv USER_AGE -- case readMaybe ageVar of -- Just age -> -- pure age -- Nothing -> -- error $ "Failed to parse Age from: " <> ageVar -- UserAddressId -> do -- addressVar <- getEnv USER_ADDRESS_ID -- pure $ AddressKey addressVar --tabulateEntityA :: (PersistEntity record, Applicative f) => (forall a. EntityField record a -> f a) -> f (Entity record) -- | A meta operation to retrieve all the Unique keys. persistUniqueKeys :: PersistEntity record => record -> [Unique record] -- | A lower level operation. persistUniqueToFieldNames :: PersistEntity record => Unique record -> NonEmpty (FieldNameHS, FieldNameDB) -- | A lower level operation. persistUniqueToValues :: PersistEntity record => Unique record -> [PersistValue] -- | Use a PersistField as a lens. fieldLens :: PersistEntity record => EntityField record field -> forall f. Functor f => (field -> f field) -> Entity record -> f (Entity record) -- | Extract a Key record from a record value. -- Currently, this is only defined for entities using the -- Primary syntax for natural/composite keys. In a future -- version of persistent which incorporates the ID directly into -- the entity, this will always be Just. keyFromRecordM :: PersistEntity record => Maybe (record -> Key record) -- | Construct an Entity record by providing a value for -- each of the record's fields. -- -- These constructions are equivalent: -- --
-- entityMattConstructor, entityMattTabulate :: Entity User
-- entityMattConstructor =
-- Entity
-- { entityKey = toSqlKey 123
-- , entityVal =
-- User
-- { userName = Matt
-- , userAge = 33
-- }
-- }
--
-- entityMattTabulate =
-- tabulateEntity $ \case
-- UserId ->
-- toSqlKey 123
-- UserName ->
-- Matt
-- UserAge ->
-- 33
--
--
-- This is a specialization of tabulateEntityA, which allows you
-- to construct an Entity by providing an Applicative
-- action for each field instead of a regular function.
tabulateEntity :: PersistEntity record => (forall a. EntityField record a -> a) -> Entity record
-- | This type class is used with the OverloadedLabels extension
-- to provide a more convenient means of using the EntityField
-- type. EntityField definitions are prefixed with the type name
-- to avoid ambiguity, but this ambiguity can result in verbose code.
--
-- If you have a table User with a name Text field,
-- then the corresponding EntityField is UserName. With
-- this, we can write #name :: EntityField User Text.
--
-- What's more fun is that the type is more general: it's actually
-- #name :: (SymbolToField "name" rec typ) => EntityField rec
-- typ
--
-- Which means it is *polymorphic* over the actual record. This allows
-- you to write code that can be generic over the tables, provided they
-- have the right fields.
class SymbolToField (sym :: Symbol) rec typ | sym rec -> typ
symbolToField :: SymbolToField sym rec typ => EntityField rec typ
-- | This class teaches Persistent how to take a custom type and marshal it
-- to and from a PersistValue, allowing it to be stored in a
-- database.
--
--
-- {-# LANGUAGE GeneralizedNewtypeDeriving #-}
--
-- newtype HashedPassword = HashedPassword ByteString
-- deriving (Eq, Show, PersistField, PersistFieldSql)
--
--
--
-- {-# LANGUAGE GeneralizedNewtypeDeriving #-}
-- import qualified Data.Text as T
-- import qualified Data.Char as C
--
-- -- | An American Social Security Number
-- newtype SSN = SSN Text
-- deriving (Eq, Show, PersistFieldSql)
--
-- mkSSN :: Text -> Either Text SSN
-- mkSSN t = if (T.length t == 9) && (T.all C.isDigit t)
-- then Right $ SSN t
-- else Left $ "Invalid SSN: " <> t
--
-- instance PersistField SSN where
-- toPersistValue (SSN t) = PersistText t
-- fromPersistValue (PersistText t) = mkSSN t
-- -- Handle cases where the database does not give us PersistText
-- fromPersistValue x = Left $ "File.hs: When trying to deserialize an SSN: expected PersistText, received: " <> T.pack (show x)
--
--
-- Tips:
--
-- -- foo :: -- ( PersistEntity record -- , PeristEntityBackend record ~ BaseBackend backend -- , IsSqlBackend backend -- ) ---- -- this can be replaced with: -- --
-- foo :: -- ( PersistEntity record, -- , PersistEntityBackend record ~ backend -- , BackendCompatible SqlBackend backend -- ) ---- -- This works for SqlReadBackend because of the instance -- BackendCompatible SqlBackend -- SqlReadBackend, without needing to go through the -- BaseBackend type family. -- -- Likewise, functions that are currently hardcoded to use -- SqlBackend can be generalized: -- --
-- -- before: -- asdf :: ReaderT SqlBackend m () -- asdf = pure () -- -- -- after: -- asdf' :: BackendCompatible SqlBackend backend => ReaderT backend m () -- asdf' = withCompatibleBackend asdf --class BackendCompatible sup sub projectBackend :: BackendCompatible sup sub => sub -> sup -- | Run a query against a compatible backend, by projecting the backend -- -- This is a helper for using queries which run against a specific -- backend type that your backend is compatible with. withCompatibleBackend :: BackendCompatible sup sub => ReaderT sup m a -> ReaderT sub m a class PersistCore backend where { data family BackendKey backend; } -- | ToBackendKey converts a PersistEntity Key into a -- BackendKey This can be used by each backend to convert between -- a Key and a plain Haskell type. For Sql, that is done with -- toSqlKey and fromSqlKey. -- -- By default, a PersistEntity uses the default BackendKey -- for its Key and is an instance of ToBackendKey -- -- A Key that instead uses a custom type will not be an instance -- of ToBackendKey. class (PersistEntity record, PersistEntityBackend record ~ backend, PersistCore backend) => ToBackendKey backend record toBackendKey :: ToBackendKey backend record => Key record -> BackendKey backend fromBackendKey :: ToBackendKey backend record => BackendKey backend -> Key record -- | Predefined toJSON. The resulting JSON looks like {"key": -- 1, "value": {"name": ...}}. -- -- The typical usage is: -- --
-- instance ToJSON (Entity User) where -- toJSON = keyValueEntityToJSON --keyValueEntityToJSON :: (PersistEntity record, ToJSON record) => Entity record -> Value -- | Predefined parseJSON. The input JSON looks like {"key": -- 1, "value": {"name": ...}}. -- -- The typical usage is: -- --
-- instance FromJSON (Entity User) where -- parseJSON = keyValueEntityFromJSON --keyValueEntityFromJSON :: (PersistEntity record, FromJSON record) => Value -> Parser (Entity record) -- | Predefined toJSON. The resulting JSON looks like {"id": -- 1, "name": ...}. -- -- The typical usage is: -- --
-- instance ToJSON (Entity User) where -- toJSON = entityIdToJSON --entityIdToJSON :: (PersistEntity record, ToJSON record) => Entity record -> Value -- | Predefined parseJSON. The input JSON looks like {"id": 1, -- "name": ...}. -- -- The typical usage is: -- --
-- instance FromJSON (Entity User) where -- parseJSON = entityIdFromJSON --entityIdFromJSON :: (PersistEntity record, FromJSON record) => Value -> Parser (Entity record) -- | Convenience function for getting a free PersistField instance -- from a type with JSON instances. -- -- Example usage in combination with fromPersistValueJSON: -- --
-- instance PersistField MyData where -- fromPersistValue = fromPersistValueJSON -- toPersistValue = toPersistValueJSON --toPersistValueJSON :: ToJSON a => a -> PersistValue -- | Convenience function for getting a free PersistField instance -- from a type with JSON instances. The JSON parser used will accept JSON -- values other that object and arrays. So, if your instance serializes -- the data to a JSON string, this will still work. -- -- Example usage in combination with toPersistValueJSON: -- --
-- instance PersistField MyData where -- fromPersistValue = fromPersistValueJSON -- toPersistValue = toPersistValueJSON --fromPersistValueJSON :: FromJSON a => PersistValue -> Either Text a -- | Breaking changes to this module are not reflected in the major version -- number. Prefer to import from Database.Persist.Sql instead. If -- you neeed something from this module, please file an issue on GitHub. module Database.Persist.Sql.Types.Internal -- | Class which allows the plucking of a BaseBackend backend from -- some larger type. For example, instance HasPersistBackend -- (SqlReadBackend, Int) where type BaseBackend (SqlReadBackend, Int) = -- SqlBackend persistBackend = unSqlReadBackend . fst class HasPersistBackend backend where { type family BaseBackend backend; } persistBackend :: HasPersistBackend backend => backend -> BaseBackend backend -- | Class which witnesses that backend is essentially the same as -- BaseBackend backend. That is, they're isomorphic and -- backend is just some wrapper over BaseBackend -- backend. class (HasPersistBackend backend) => IsPersistBackend backend -- | This function is how we actually construct and tag a backend as having -- read or write capabilities. It should be used carefully and only when -- actually constructing a backend. Careless use allows us to -- accidentally run a write query against a read-only database. mkPersistBackend :: IsPersistBackend backend => BaseBackend backend -> backend -- | An SQL backend which can only handle read queries -- -- The constructor was exposed in 2.10.0. newtype SqlReadBackend SqlReadBackend :: SqlBackend -> SqlReadBackend [$sel:unSqlReadBackend:SqlReadBackend] :: SqlReadBackend -> SqlBackend -- | An SQL backend which can handle read or write queries -- -- The constructor was exposed in 2.10.0 newtype SqlWriteBackend SqlWriteBackend :: SqlBackend -> SqlWriteBackend [$sel:unSqlWriteBackend:SqlWriteBackend] :: SqlWriteBackend -> SqlBackend -- | Useful for running a read query against a backend with unknown -- capabilities. readToUnknown :: Monad m => ReaderT SqlReadBackend m a -> ReaderT SqlBackend m a -- | Useful for running a read query against a backend with read and write -- capabilities. readToWrite :: Monad m => ReaderT SqlReadBackend m a -> ReaderT SqlWriteBackend m a -- | Useful for running a write query against an untagged backend with -- unknown capabilities. writeToUnknown :: Monad m => ReaderT SqlWriteBackend m a -> ReaderT SqlBackend m a type LogFunc = Loc -> LogSource -> LogLevel -> LogStr -> IO () data InsertSqlResult ISRSingle :: Text -> InsertSqlResult ISRInsertGet :: Text -> Text -> InsertSqlResult ISRManyKeys :: Text -> [PersistValue] -> InsertSqlResult -- | A Statement is a representation of a database query that has -- been prepared and stored on the server side. data Statement Statement :: IO () -> IO () -> ([PersistValue] -> IO Int64) -> (forall m. MonadIO m => [PersistValue] -> Acquire (ConduitM () [PersistValue] m ())) -> Statement [stmtFinalize] :: Statement -> IO () [stmtReset] :: Statement -> IO () [stmtExecute] :: Statement -> [PersistValue] -> IO Int64 [stmtQuery] :: Statement -> forall m. MonadIO m => [PersistValue] -> Acquire (ConduitM () [PersistValue] m ()) -- | Please refer to the documentation for the database in question for a -- full overview of the semantics of the varying isloation levels data IsolationLevel ReadUncommitted :: IsolationLevel ReadCommitted :: IsolationLevel RepeatableRead :: IsolationLevel Serializable :: IsolationLevel makeIsolationLevelStatement :: (Monoid s, IsString s) => IsolationLevel -> s -- | A SqlBackend represents a handle or connection to a database. -- It contains functions and values that allow databases to have more -- optimized implementations, as well as references that benefit -- performance and sharing. -- -- Instead of using the SqlBackend constructor directly, use the -- mkSqlBackend function. -- -- A SqlBackend is *not* thread-safe. You should not assume that a -- SqlBackend can be shared among threads and run concurrent -- queries. This *will* result in problems. Instead, you should create a -- Pool SqlBackend, known as a -- ConnectionPool, and pass that around in multi-threaded -- applications. -- -- To run actions in the persistent library, you should use the -- runSqlConn function. If you're using a multithreaded -- application, use the runSqlPool function. data SqlBackend SqlBackend :: (Text -> IO Statement) -> (EntityDef -> [PersistValue] -> InsertSqlResult) -> Maybe (EntityDef -> [[PersistValue]] -> InsertSqlResult) -> Maybe (EntityDef -> NonEmpty (FieldNameHS, FieldNameDB) -> Text -> Text) -> Maybe (EntityDef -> Int -> Text) -> StatementCache -> IO () -> ([EntityDef] -> (Text -> IO Statement) -> EntityDef -> IO (Either [Text] [(Bool, Text)])) -> ((Text -> IO Statement) -> Maybe IsolationLevel -> IO ()) -> ((Text -> IO Statement) -> IO ()) -> ((Text -> IO Statement) -> IO ()) -> (FieldNameDB -> Text) -> (EntityDef -> Text) -> (Text -> Text) -> Text -> Text -> ((Int, Int) -> Text -> Text) -> LogFunc -> Maybe Int -> Maybe (EntityDef -> Int -> Text) -> Vault -> SqlBackendHooks -> SqlBackend -- | This function should prepare a Statement in the target -- database, which should allow for efficient query reuse. [connPrepare] :: SqlBackend -> Text -> IO Statement -- | This function generates the SQL and values necessary for performing an -- insert against the database. [connInsertSql] :: SqlBackend -> EntityDef -> [PersistValue] -> InsertSqlResult -- | SQL for inserting many rows and returning their primary keys, for -- backends that support this functionality. If Nothing, rows will -- be inserted one-at-a-time using connInsertSql. [connInsertManySql] :: SqlBackend -> Maybe (EntityDef -> [[PersistValue]] -> InsertSqlResult) -- | Some databases support performing UPSERT _and_ RETURN entity in a -- single call. -- -- This field when set will be used to generate the UPSERT+RETURN sql -- given * an entity definition * updates to be run on unique key(s) -- collision -- -- When left as Nothing, we find the unique key from entity def -- before * trying to fetch an entity by said key * perform an update -- when result found, else issue an insert * return new entity from db [connUpsertSql] :: SqlBackend -> Maybe (EntityDef -> NonEmpty (FieldNameHS, FieldNameDB) -> Text -> Text) -- | Some databases support performing bulk UPSERT, specifically "insert or -- replace many records" in a single call. -- -- This field when set, given * an entity definition * number of records -- to be inserted should produce a PUT MANY sql with placeholders for -- records -- -- When left as Nothing, we default to using -- defaultPutMany. [connPutManySql] :: SqlBackend -> Maybe (EntityDef -> Int -> Text) -- | A reference to the cache of statements. Statements are keyed by -- the Text queries that generated them. [connStmtMap] :: SqlBackend -> StatementCache -- | Close the underlying connection. [connClose] :: SqlBackend -> IO () -- | This function returns the migrations required to include the -- EntityDef parameter in the [EntityDef] -- database. This might include creating a new table if the entity is not -- present, or altering an existing table if it is. [connMigrateSql] :: SqlBackend -> [EntityDef] -> (Text -> IO Statement) -> EntityDef -> IO (Either [Text] [(Bool, Text)]) -- | A function to begin a transaction for the underlying database. [connBegin] :: SqlBackend -> (Text -> IO Statement) -> Maybe IsolationLevel -> IO () -- | A function to commit a transaction to the underlying database. [connCommit] :: SqlBackend -> (Text -> IO Statement) -> IO () -- | A function to roll back a transaction on the underlying database. [connRollback] :: SqlBackend -> (Text -> IO Statement) -> IO () -- | A function to extract and escape the name of the column corresponding -- to the provided field. [connEscapeFieldName] :: SqlBackend -> FieldNameDB -> Text -- | A function to extract and escape the name of the table corresponding -- to the provided entity. PostgreSQL uses this to support schemas. [connEscapeTableName] :: SqlBackend -> EntityDef -> Text -- | A function to escape raw DB identifiers. MySQL uses backticks, while -- PostgreSQL uses quotes, and so on. [connEscapeRawName] :: SqlBackend -> Text -> Text [connNoLimit] :: SqlBackend -> Text -- | A tag displaying what database the SqlBackend is for. Can be -- used to differentiate features in downstream libraries for different -- database backends. [connRDBMS] :: SqlBackend -> Text -- | Attach a 'LIMIT/OFFSET' clause to a SQL query. Note that LIMIT/OFFSET -- is problematic for performance, and indexed range queries are the -- superior way to offer pagination. [connLimitOffset] :: SqlBackend -> (Int, Int) -> Text -> Text -- | A log function for the SqlBackend to use. [connLogFunc] :: SqlBackend -> LogFunc -- | Some databases (probably only Sqlite) have a limit on how many -- question-mark parameters may be used in a statement [connMaxParams] :: SqlBackend -> Maybe Int -- | Some databases support performing bulk an atomic+bulk INSERT where -- constraint conflicting entities can replace existing entities. -- -- This field when set, given * an entity definition * number of records -- to be inserted should produce a INSERT sql with placeholders for -- primary+record fields -- -- When left as Nothing, we default to using -- defaultRepsertMany. [connRepsertManySql] :: SqlBackend -> Maybe (EntityDef -> Int -> Text) -- | Carry arbitrary payloads for the connection that may be used to -- propagate information into hooks. [connVault] :: SqlBackend -> Vault -- | Instrumentation hooks that may be used to track the behaviour of a -- backend. [connHooks] :: SqlBackend -> SqlBackendHooks -- | A constraint synonym which witnesses that a backend is SQL and can run -- read queries. type SqlBackendCanRead backend = (BackendCompatible SqlBackend backend, PersistQueryRead backend, PersistStoreRead backend, PersistUniqueRead backend) -- | A constraint synonym which witnesses that a backend is SQL and can run -- read and write queries. type SqlBackendCanWrite backend = (SqlBackendCanRead backend, PersistQueryWrite backend, PersistStoreWrite backend, PersistUniqueWrite backend) -- | Like SqlPersistT but compatible with any SQL backend which -- can handle read queries. type SqlReadT m a = forall backend. (SqlBackendCanRead backend) => ReaderT backend m a -- | Like SqlPersistT but compatible with any SQL backend which -- can handle read and write queries. type SqlWriteT m a = forall backend. (SqlBackendCanWrite backend) => ReaderT backend m a -- | A backend which is a wrapper around SqlBackend. type IsSqlBackend backend = (IsPersistBackend backend, BaseBackend backend ~ SqlBackend) newtype SqlBackendHooks SqlBackendHooks :: (SqlBackend -> Text -> Statement -> IO Statement) -> SqlBackendHooks [hookGetStatement] :: SqlBackendHooks -> SqlBackend -> Text -> Statement -> IO Statement instance Database.Persist.Class.PersistStore.HasPersistBackend Database.Persist.Sql.Types.Internal.SqlWriteBackend instance Database.Persist.Class.PersistStore.IsPersistBackend Database.Persist.Sql.Types.Internal.SqlWriteBackend instance Database.Persist.Class.PersistStore.HasPersistBackend Database.Persist.Sql.Types.Internal.SqlReadBackend instance Database.Persist.Class.PersistStore.IsPersistBackend Database.Persist.Sql.Types.Internal.SqlReadBackend -- | Welcome to persistent! -- -- This library intends to provide an easy, flexible, and convenient -- interface to various data storage backends. Backends include SQL -- databases, like mysql, postgresql, and -- sqlite, as well as NoSQL databases, like mongodb and -- redis. -- -- If you intend on using a SQL database, then check out -- Database.Persist.Sql. module Database.Persist -- | Assign a field a value. -- --
-- updateAge :: MonadIO m => ReaderT SqlBackend m () -- updateAge = updateWhere [UserName ==. "SPJ" ] [UserAge =. 45] ---- -- Similar to updateWhere which is shown in the above example you -- can use other functions present in the module -- Database.Persist.Class. Note that the first parameter of -- updateWhere is [Filter val] and second parameter is -- [Update val]. By comparing this with the type of ==. and -- =., you can see that they match up in the above usage. -- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+-----+--------+ -- |id |name |age | -- +-----+-----+--------+ -- |1 |SPJ |40 -> 45| -- +-----+-----+--------+ -- |2 |Simon|41 | -- +-----+-----+--------+ --(=.) :: forall v typ. PersistField typ => EntityField v typ -> typ -> Update v infixr 3 =. -- | Assign a field by addition (+=). -- --
-- addAge :: MonadIO m => ReaderT SqlBackend m () -- addAge = updateWhere [UserName ==. "SPJ" ] [UserAge +=. 1] ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+-----+---------+ -- |id |name |age | -- +-----+-----+---------+ -- |1 |SPJ |40 -> 41 | -- +-----+-----+---------+ -- |2 |Simon|41 | -- +-----+-----+---------+ --(+=.) :: forall v typ. PersistField typ => EntityField v typ -> typ -> Update v infixr 3 +=. -- | Assign a field by subtraction (-=). -- --
-- subtractAge :: MonadIO m => ReaderT SqlBackend m () -- subtractAge = updateWhere [UserName ==. "SPJ" ] [UserAge -=. 1] ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+-----+---------+ -- |id |name |age | -- +-----+-----+---------+ -- |1 |SPJ |40 -> 39 | -- +-----+-----+---------+ -- |2 |Simon|41 | -- +-----+-----+---------+ --(-=.) :: forall v typ. PersistField typ => EntityField v typ -> typ -> Update v infixr 3 -=. -- | Assign a field by multiplication (*=). -- --
-- multiplyAge :: MonadIO m => ReaderT SqlBackend m () -- multiplyAge = updateWhere [UserName ==. "SPJ" ] [UserAge *=. 2] ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+-----+--------+ -- |id |name |age | -- +-----+-----+--------+ -- |1 |SPJ |40 -> 80| -- +-----+-----+--------+ -- |2 |Simon|41 | -- +-----+-----+--------+ --(*=.) :: forall v typ. PersistField typ => EntityField v typ -> typ -> Update v infixr 3 *=. -- | Assign a field by division (/=). -- --
-- divideAge :: MonadIO m => ReaderT SqlBackend m () -- divideAge = updateWhere [UserName ==. "SPJ" ] [UserAge /=. 2] ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+-----+---------+ -- |id |name |age | -- +-----+-----+---------+ -- |1 |SPJ |40 -> 20 | -- +-----+-----+---------+ -- |2 |Simon|41 | -- +-----+-----+---------+ --(/=.) :: forall v typ. PersistField typ => EntityField v typ -> typ -> Update v infixr 3 /=. -- | Check for equality. -- --
-- selectSPJ :: MonadIO m => ReaderT SqlBackend m [Entity User] -- selectSPJ = selectList [UserName ==. "SPJ" ] [] ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+-----+-----+ -- |id |name |age | -- +-----+-----+-----+ -- |1 |SPJ |40 | -- +-----+-----+-----+ --(==.) :: forall v typ. PersistField typ => EntityField v typ -> typ -> Filter v infix 4 ==. -- | Non-equality check. -- --
-- selectSimon :: MonadIO m => ReaderT SqlBackend m [Entity User] -- selectSimon = selectList [UserName !=. "SPJ" ] [] ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+-----+-----+ -- |id |name |age | -- +-----+-----+-----+ -- |2 |Simon|41 | -- +-----+-----+-----+ --(!=.) :: forall v typ. PersistField typ => EntityField v typ -> typ -> Filter v infix 4 !=. -- | Less-than check. -- --
-- selectLessAge :: MonadIO m => ReaderT SqlBackend m [Entity User] -- selectLessAge = selectList [UserAge <. 41 ] [] ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+-----+-----+ -- |id |name |age | -- +-----+-----+-----+ -- |1 |SPJ |40 | -- +-----+-----+-----+ --(<.) :: forall v typ. PersistField typ => EntityField v typ -> typ -> Filter v infix 4 <. -- | Greater-than check. -- --
-- selectGreaterAge :: MonadIO m => ReaderT SqlBackend m [Entity User] -- selectGreaterAge = selectList [UserAge >. 40 ] [] ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+-----+-----+ -- |id |name |age | -- +-----+-----+-----+ -- |2 |Simon|41 | -- +-----+-----+-----+ --(>.) :: forall v typ. PersistField typ => EntityField v typ -> typ -> Filter v infix 4 >. -- | Less-than or equal check. -- --
-- selectLessEqualAge :: MonadIO m => ReaderT SqlBackend m [Entity User] -- selectLessEqualAge = selectList [UserAge <=. 40 ] [] ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+-----+-----+ -- |id |name |age | -- +-----+-----+-----+ -- |1 |SPJ |40 | -- +-----+-----+-----+ --(<=.) :: forall v typ. PersistField typ => EntityField v typ -> typ -> Filter v infix 4 <=. -- | Greater-than or equal check. -- --
-- selectGreaterEqualAge :: MonadIO m => ReaderT SqlBackend m [Entity User] -- selectGreaterEqualAge = selectList [UserAge >=. 41 ] [] ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+-----+-----+ -- |id |name |age | -- +-----+-----+-----+ -- |2 |Simon|41 | -- +-----+-----+-----+ --(>=.) :: forall v typ. PersistField typ => EntityField v typ -> typ -> Filter v infix 4 >=. -- | Check if value is in given list. -- --
-- selectUsers :: MonadIO m => ReaderT SqlBackend m [Entity User] -- selectUsers = selectList [UserAge <-. [40, 41]] [] ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+-----+-----+ -- |id |name |age | -- +-----+-----+-----+ -- |1 |SPJ |40 | -- +-----+-----+-----+ -- |2 |Simon|41 | -- +-----+-----+-----+ ---- --
-- selectSPJ :: MonadIO m => ReaderT SqlBackend m [Entity User] -- selectSPJ = selectList [UserAge <-. [40]] [] ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+-----+-----+ -- |id |name |age | -- +-----+-----+-----+ -- |1 |SPJ |40 | -- +-----+-----+-----+ --(<-.) :: forall v typ. PersistField typ => EntityField v typ -> [typ] -> Filter v infix 4 <-. -- | Check if value is not in given list. -- --
-- selectSimon :: MonadIO m => ReaderT SqlBackend m [Entity User] -- selectSimon = selectList [UserAge /<-. [40]] [] ---- -- The above query when applied on dataset-1, will produce this: -- --
-- +-----+-----+-----+ -- |id |name |age | -- +-----+-----+-----+ -- |2 |Simon|41 | -- +-----+-----+-----+ --(/<-.) :: forall v typ. PersistField typ => EntityField v typ -> [typ] -> Filter v infix 4 /<-. -- | The OR of two lists of filters. For example: -- --
-- selectList -- ([ PersonAge >. 25 -- , PersonAge <. 30 ] ||. -- [ PersonIncome >. 15000 -- , PersonIncome <. 25000 ]) -- [] ---- -- will filter records where a person's age is between 25 and 30 -- or a person's income is between (15000 and 25000). -- -- If you are looking for an (&&.) operator to do (A -- AND B AND (C OR D)) you can use the (++) operator -- instead as there is no (&&.). For example: -- --
-- selectList -- ([ PersonAge >. 25 -- , PersonAge <. 30 ] ++ -- ([PersonCategory ==. 1] ||. -- [PersonCategory ==. 5])) -- [] ---- -- will filter records where a person's age is between 25 and 30 -- and (person's category is either 1 or 5). (||.) :: forall v. [Filter v] -> [Filter v] -> [Filter v] infixl 3 ||. -- | Convert list of PersistValues into textual representation of -- JSON object. This is a type-constrained synonym for toJsonText. listToJSON :: [PersistValue] -> Text -- | Convert map (list of tuples) into textual representation of JSON -- object. This is a type-constrained synonym for toJsonText. mapToJSON :: [(Text, PersistValue)] -> Text -- | A more general way to convert instances of ToJSON type class to -- strict text Text. toJsonText :: ToJSON j => j -> Text -- | FIXME Add documentation to that. getPersistMap :: PersistValue -> Either Text [(Text, PersistValue)] -- | FIXME What's this exactly? limitOffsetOrder :: PersistEntity val => [SelectOpt val] -> (Int, Int, [SelectOpt val]) module Database.Persist.Sql.Util parseEntityValues :: PersistEntity record => EntityDef -> [PersistValue] -> Either Text (Entity record) keyAndEntityColumnNames :: EntityDef -> SqlBackend -> NonEmpty Text entityColumnCount :: EntityDef -> Int isIdField :: forall record typ. PersistEntity record => EntityField record typ -> Bool -- | Returns True if the entity has a natural key defined with the -- Primary keyword. -- -- A natural key is a key that is inherent to the record, and is part of -- the actual Haskell record. The opposite of a natural key is a -- "surrogate key", which is not part of the normal domain object. -- Automatically generated ID columns are the most common surrogate ID, -- while an email address is a common natural key. -- --
-- User -- email String -- name String -- Primary email -- -- Person -- Id UUID -- name String -- -- Follower -- name String ---- -- Given these entity definitions, User would return -- True, because the Primary keyword sets the -- email column to be the primary key. The generated Haskell -- type would look like this: -- --
-- data User = User
-- { userEmail :: String
-- , userName :: String
-- }
--
--
-- Person would be false. While the Id syntax allows
-- you to define a custom ID type for an entity, the Id column
-- is a surrogate key.
--
-- The same is true for Follower. The automatically generated
-- autoincremented integer primary key is a surrogate key.
--
-- There's nothing preventing you from defining a Primary
-- definition that refers to a surrogate key. This is totally fine.
hasNaturalKey :: EntityDef -> Bool
-- | Returns True if the provided entity has a custom composite
-- primary key. Composite keys have multiple fields in them.
--
-- -- User -- email String -- name String -- Primary userId -- -- Profile -- personId PersonId -- email String -- Primary personId email -- -- Person -- Id UUID -- name String -- -- Follower -- name String ---- -- Given these entity definitions, only Profile would return -- True, because it is the only entity with multiple columns in -- the primary key. User has a single column natural key. -- Person has a custom single column surrogate key defined with -- Id. And Follower has a default single column -- surrogate key. hasCompositePrimaryKey :: EntityDef -> Bool dbIdColumns :: SqlBackend -> EntityDef -> NonEmpty Text dbIdColumnsEsc :: (FieldNameDB -> Text) -> EntityDef -> NonEmpty Text dbColumns :: SqlBackend -> EntityDef -> NonEmpty Text -- | Gets the FieldDef for an Update. updateFieldDef :: PersistEntity v => Update v -> FieldDef updatePersistValue :: Update v -> PersistValue mkUpdateText :: PersistEntity record => SqlBackend -> Update record -> Text mkUpdateText' :: PersistEntity record => (FieldNameDB -> Text) -> (Text -> Text) -> Update record -> Text commaSeparated :: [Text] -> Text parenWrapped :: Text -> Text -- | Make a list PersistValue suitable for database inserts. Pairs -- nicely with the function mkInsertPlaceholders. -- -- Does not include generated columns. mkInsertValues :: PersistEntity rec => rec -> [PersistValue] -- | Returns a list of escaped field names and "?" placeholder -- values for performing inserts. This does not include generated -- columns. -- -- Does not include generated columns. mkInsertPlaceholders :: EntityDef -> (FieldNameDB -> Text) -> [(Text, Text)] module Database.Persist.SqlBackend.SqlPoolHooks -- | A set of hooks that may be used to alter the behaviour of -- runSqlPoolWithExtensibleHooks in a backwards-compatible -- fashion. data SqlPoolHooks m backend -- | Lifecycle hooks that may be altered to extend SQL pool behavior in a -- backwards compatible fashion. -- -- By default, the hooks have the following semantics: -- --
-- runSqlCommand $ rawExecute "CREATE EXTENSION IF NOT EXISTS "uuid-ossp";" [] --runSqlCommand :: SqlPersistT IO () -> Migration -- | An exception indicating that Persistent refused to run some unsafe -- migrations. Contains a list of pairs where the Bool tracks whether the -- migration was unsafe (True means unsafe), and the Sql is the sql -- statement for the migration. newtype PersistUnsafeMigrationException PersistUnsafeMigrationException :: [(Bool, Sql)] -> PersistUnsafeMigrationException instance GHC.Show.Show Database.Persist.Sql.Migration.PersistUnsafeMigrationException instance GHC.Exception.Type.Exception Database.Persist.Sql.Migration.PersistUnsafeMigrationException -- | of this module will not have a corresponding major version bump. -- -- Please depend on Database.Persist.ImplicitIdDef instead. If you -- can't use that module, please file an issue on GitHub with your -- desired use case. module Database.Persist.ImplicitIdDef.Internal -- | A specification for how the implied ID columns are created. -- -- By default, persistent will give each table a default column -- named id (customizable by PersistSettings), and the -- column type will be whatever you'd expect from BackendKey -- yourBackendType. For The SqlBackend type, this is an -- auto incrementing integer primary key. -- -- You might want to give a different example. A common use case in -- postgresql is to use the UUID type, and automatically generate them -- using a SQL function. -- -- Previously, you'd need to add a custom Id annotation for each -- model. -- --
-- User -- Id UUID default="uuid_generate_v1mc()" -- name Text -- -- Dog -- Id UUID default="uuid_generate_v1mc()" -- name Text -- user UserId ---- -- Now, you can simply create an ImplicitIdDef that corresponds to -- this declaration. -- --
-- newtype UUID = UUID ByteString -- -- instance PersistField UUID where -- toPersistValue (UUID bs) = -- PersistLiteral_ Escaped bs -- fromPersistValue pv = -- case pv of -- PersistLiteral_ Escaped bs -> -- Right (UUID bs) -- _ -> -- Left "nope" -- -- instance PersistFieldSql UUID where -- sqlType _ = SqlOther UUID ---- -- With this instance at the ready, we can now create our implicit -- definition: -- --
-- uuidDef :: ImplicitIdDef -- uuidDef = mkImplicitIdDef @UUID "uuid_generate_v1mc()" ---- -- And we can use setImplicitIdDef to use this with the -- MkPersistSettings for our block. -- --
-- mkPersist (setImplicitIdDef uuidDef sqlSettings) [persistLowerCase| ... |] ---- -- TODO: either explain interaction with mkMigrate or fix it. see issue -- #1249 for more details. data ImplicitIdDef ImplicitIdDef :: (EntityNameHS -> FieldType) -> SqlType -> (Bool -> Type -> Type) -> Maybe Text -> Maybe Integer -> ImplicitIdDef -- | The field type. Accepts the EntityNameHS if you want to refer -- to it. By default, Id is appended to the end of the Haskell -- name. [iidFieldType] :: ImplicitIdDef -> EntityNameHS -> FieldType -- | The SqlType for the default column. By default, this is -- SqlInt64 to correspond with an autoincrementing integer primary -- key. [iidFieldSqlType] :: ImplicitIdDef -> SqlType -- | The Bool argument is whether or not the MkPersistBackend type -- has the mpsGeneric field set. -- -- The Type is the mpsBackend value. -- -- The default uses BackendKey SqlBackend (or -- a generic equivalent). [iidType] :: ImplicitIdDef -> Bool -> Type -> Type -- | The default expression for the field. Note that setting this to -- Nothing is unsafe. see -- https://github.com/yesodweb/persistent/issues/1247 for more -- information. -- -- With some cases - like the Postgresql SERIAL type - this is -- safe, since there's an implied default. [iidDefault] :: ImplicitIdDef -> Maybe Text -- | Specify the maximum length for a key column. This is necessary for -- VARCHAR columns, like UUID in MySQL. MySQL will -- throw a runtime error if a text or binary column is used in an index -- without a length specification. [iidMaxLen] :: ImplicitIdDef -> Maybe Integer -- | Create an ImplicitIdDef based on the Typeable and -- PersistFieldSql constraints in scope. -- -- This function uses the TypeApplications syntax. Let's look at -- an example that works with Postgres UUIDs. -- --
-- newtype UUID = UUID Text -- deriving newtype PersistField -- -- instance PersistFieldSql UUID where -- sqlType _ = SqlOther "UUID" -- -- idDef :: ImplicitIdDef -- idDef = mkImplicitIdDefTypeable @UUID "uuid_generate_v1mc()" ---- -- This ImplicitIdDef will generate default UUID columns, and the -- database will call the uuid_generate_v1mc() function to -- generate the value for new rows being inserted. -- -- If the type t is Text or String then a -- max_len attribute of 200 is set. To customize this, use -- setImplicitIdDefMaxLen. mkImplicitIdDef :: forall t. (Typeable t, PersistFieldSql t) => Text -> ImplicitIdDef -- | Set the maximum length of the implied ID column. This is required for -- any type where the associated SqlType is a TEXT or -- VARCHAR sort of thing. setImplicitIdDefMaxLen :: Integer -> ImplicitIdDef -> ImplicitIdDef -- | This function converts a Typeable type into a -- persistent representation of the type of a field - -- FieldTyp. fieldTypeFromTypeable :: forall t. (PersistField t, Typeable t) => FieldType -- | Remove the default attribute of the ImplicitIdDef column. This -- will require you to provide an ID for the model with every insert, -- using insertKey instead of insert, unless the type -- has some means of getting around that in the migrations. -- -- As an example, the Postgresql SERIAL type expands to an -- autoincrementing integer. Postgres will implicitly create the relevant -- series and set the default to be -- NEXTVAL(series_name). A default is therefore -- unnecessary to use for this type. -- -- However, for a UUID, postgres *does not* have an implicit -- default. You must either specify a default UUID generation function, -- or insert them yourself (again, using insertKey). -- -- This function will be deprecated in the future when omiting the -- default implicit ID column is more fully supported. unsafeClearDefaultImplicitId :: ImplicitIdDef -> ImplicitIdDef -- | This module contains types and functions for creating an -- ImplicitIdDef, which allows you to customize the implied ID -- column that persistent generates. -- -- If this module doesn't suit your needs, you may want to import -- Database.Persist.ImplicitIdDef.Internal instead. If you do so, -- please file an issue on GitHub so we can support your needs. Breaking -- changes to that module will *not* be accompanied with a major version -- bump. module Database.Persist.ImplicitIdDef -- | A specification for how the implied ID columns are created. -- -- By default, persistent will give each table a default column -- named id (customizable by PersistSettings), and the -- column type will be whatever you'd expect from BackendKey -- yourBackendType. For The SqlBackend type, this is an -- auto incrementing integer primary key. -- -- You might want to give a different example. A common use case in -- postgresql is to use the UUID type, and automatically generate them -- using a SQL function. -- -- Previously, you'd need to add a custom Id annotation for each -- model. -- --
-- User -- Id UUID default="uuid_generate_v1mc()" -- name Text -- -- Dog -- Id UUID default="uuid_generate_v1mc()" -- name Text -- user UserId ---- -- Now, you can simply create an ImplicitIdDef that corresponds to -- this declaration. -- --
-- newtype UUID = UUID ByteString -- -- instance PersistField UUID where -- toPersistValue (UUID bs) = -- PersistLiteral_ Escaped bs -- fromPersistValue pv = -- case pv of -- PersistLiteral_ Escaped bs -> -- Right (UUID bs) -- _ -> -- Left "nope" -- -- instance PersistFieldSql UUID where -- sqlType _ = SqlOther UUID ---- -- With this instance at the ready, we can now create our implicit -- definition: -- --
-- uuidDef :: ImplicitIdDef -- uuidDef = mkImplicitIdDef @UUID "uuid_generate_v1mc()" ---- -- And we can use setImplicitIdDef to use this with the -- MkPersistSettings for our block. -- --
-- mkPersist (setImplicitIdDef uuidDef sqlSettings) [persistLowerCase| ... |] ---- -- TODO: either explain interaction with mkMigrate or fix it. see issue -- #1249 for more details. data ImplicitIdDef -- | Create an ImplicitIdDef based on the Typeable and -- PersistFieldSql constraints in scope. -- -- This function uses the TypeApplications syntax. Let's look at -- an example that works with Postgres UUIDs. -- --
-- newtype UUID = UUID Text -- deriving newtype PersistField -- -- instance PersistFieldSql UUID where -- sqlType _ = SqlOther "UUID" -- -- idDef :: ImplicitIdDef -- idDef = mkImplicitIdDefTypeable @UUID "uuid_generate_v1mc()" ---- -- This ImplicitIdDef will generate default UUID columns, and the -- database will call the uuid_generate_v1mc() function to -- generate the value for new rows being inserted. -- -- If the type t is Text or String then a -- max_len attribute of 200 is set. To customize this, use -- setImplicitIdDefMaxLen. mkImplicitIdDef :: forall t. (Typeable t, PersistFieldSql t) => Text -> ImplicitIdDef -- | This is the default variant. Setting the implicit ID definition to -- this value should not have any change at all on how entities are -- defined by default. autoIncrementingInteger :: ImplicitIdDef -- | Set the maximum length of the implied ID column. This is required for -- any type where the associated SqlType is a TEXT or -- VARCHAR sort of thing. setImplicitIdDefMaxLen :: Integer -> ImplicitIdDef -> ImplicitIdDef -- | Remove the default attribute of the ImplicitIdDef column. This -- will require you to provide an ID for the model with every insert, -- using insertKey instead of insert, unless the type -- has some means of getting around that in the migrations. -- -- As an example, the Postgresql SERIAL type expands to an -- autoincrementing integer. Postgres will implicitly create the relevant -- series and set the default to be -- NEXTVAL(series_name). A default is therefore -- unnecessary to use for this type. -- -- However, for a UUID, postgres *does not* have an implicit -- default. You must either specify a default UUID generation function, -- or insert them yourself (again, using insertKey). -- -- This function will be deprecated in the future when omiting the -- default implicit ID column is more fully supported. unsafeClearDefaultImplicitId :: ImplicitIdDef -> ImplicitIdDef module Database.Persist.Compatible -- | A newtype wrapper for compatible backends, mainly useful for -- DerivingVia. -- -- When writing a new backend that is BackendCompatible with an -- existing backend, instances for the new backend can be naturally -- defined in terms of the instances for the existing backend. -- -- For example, if you decide to augment the SqlBackend with -- some additional features: -- --
-- data BetterSqlBackend = BetterSqlBackend { sqlBackend :: SqlBackend, ... }
--
-- instance BackendCompatible SqlBackend BetterSqlBackend where
-- projectBackend = sqlBackend
--
--
-- Then you can use DerivingVia to automatically get instances
-- like:
--
-- -- deriving via (Compatible SqlBackend BetterSqlBackend) instance PersistStoreRead BetterSqlBackend -- deriving via (Compatible SqlBackend BetterSqlBackend) instance PersistStoreWrite BetterSqlBackend -- ... ---- -- These instances will go through the compatible backend (in this case, -- SqlBackend) for all their queries. -- -- These instances require that both backends have the same -- BaseBackend, but deriving HasPersistBackend will enforce -- that for you. -- --
-- deriving via (Compatible SqlBackend BetterSqlBackend) instance HasPersistBackend BetterSqlBackend --newtype Compatible b s Compatible :: s -> Compatible b s [unCompatible] :: Compatible b s -> s -- | Gives a bunch of useful instance declarations for a backend based on -- its compatibility with another backend, using Compatible. -- -- The argument should be a type of the form forall v1 ... vn. -- Compatible b s (Quantification is optional, but supported -- because TH won't let you have unbound type variables in a type -- splice). The instance is produced for s based on the instance -- defined for b, which is constrained in the instance head to -- exist. -- -- v1 ... vn are implicitly quantified in the instance, which is -- derived via Compatible b s. makeCompatibleInstances :: Q Type -> Q [Dec] -- | Gives a bunch of useful instance declarations for a backend key based -- on its compatibility with another backend & key, using -- Compatible. -- -- The argument should be a type of the form forall v1 ... vn. -- Compatible b s (Quantification is optional, but supported -- because TH won't let you have unbound type variables in a type -- splice). The instance is produced for BackendKey s -- based on the instance defined for BackendKey b, which -- is constrained in the instance head to exist. -- -- v1 ... vn are implicitly quantified in the instance, which is -- derived via BackendKey (Compatible b s). makeCompatibleKeyInstances :: Q Type -> Q [Dec] -- | This module is the primary entry point if you're working with -- persistent on a SQL database. -- --
-- data Switch = On | Off -- deriving (Show, Eq) -- -- instance PersistField Switch where -- toPersistValue s = case s of -- On -> PersistBool True -- Off -> PersistBool False -- fromPersistValue (PersistBool b) = if b then Right On else Right Off -- fromPersistValue x = Left $ "File.hs: When trying to deserialize a Switch: expected PersistBool, received: " <> T.pack (show x) -- -- instance PersistFieldSql Switch where -- sqlType _ = SqlBool ---- --
-- import qualified Data.UUID as UUID -- instance PersistField UUID where -- toPersistValue = PersistLiteralEncoded . toASCIIBytes -- fromPersistValue (PersistLiteralEncoded uuid) = -- case fromASCIIBytes uuid of -- Nothing -> Left $ "Model/CustomTypes.hs: Failed to deserialize a UUID; received: " <> T.pack (show uuid) -- Just uuid' -> Right uuid' -- fromPersistValue x = Left $ "File.hs: When trying to deserialize a UUID: expected PersistLiteralEncoded, received: "-- > <> T.pack (show x) -- -- instance PersistFieldSql UUID where -- sqlType _ = SqlOther "uuid" ---- --
-- CREATE DOMAIN ssn AS text
-- CHECK ( value ~ '^[0-9]{9}$');
--
--
-- -- instance PersistFieldSQL SSN where -- sqlType _ = SqlOther "ssn" ---- --
-- CREATE TYPE rainbow_color AS ENUM ('red', 'orange', 'yellow', 'green', 'blue', 'indigo', 'violet');
--
--
-- -- instance PersistFieldSQL RainbowColor where -- sqlType _ = SqlOther "rainbow_color" --class PersistField a => PersistFieldSql a sqlType :: PersistFieldSql a => Proxy a -> SqlType -- | This newtype wrapper is useful when selecting an entity out of the -- database and you want to provide a prefix to the table being selected. -- -- Consider this raw SQL query: -- --
-- SELECT ?? -- FROM my_long_table_name AS mltn -- INNER JOIN other_table AS ot -- ON mltn.some_col = ot.other_col -- WHERE ... ---- -- We don't want to refer to my_long_table_name every time, so -- we create an alias. If we want to select it, we have to tell the raw -- SQL quasi-quoter that we expect the entity to be prefixed with some -- other name. -- -- We can give the above query a type with this, like: -- --
-- getStuff :: SqlPersistM [EntityWithPrefix "mltn" MyLongTableName] -- getStuff = rawSql queryText [] ---- -- The EntityWithPrefix bit is a boilerplate newtype wrapper, so -- you can remove it with unPrefix, like this: -- --
-- getStuff :: SqlPersistM [Entity MyLongTableName] -- getStuff = unPrefix @"mltn" <$> rawSql queryText [] ---- -- The symbol is a "type application" and requires the -- TypeApplications@ language extension. newtype EntityWithPrefix (prefix :: Symbol) record EntityWithPrefix :: Entity record -> EntityWithPrefix (prefix :: Symbol) record [unEntityWithPrefix] :: EntityWithPrefix (prefix :: Symbol) record -> Entity record -- | A helper function to tell GHC what the EntityWithPrefix prefix -- should be. This allows you to use a type application to specify the -- prefix, instead of specifying the etype on the result. -- -- As an example, here's code that uses this: -- --
-- myQuery :: SqlPersistM [Entity Person] -- myQuery = fmap (unPrefix @"p") $ rawSql query [] -- where -- query = "SELECT ?? FROM person AS p" --unPrefix :: forall prefix record. EntityWithPrefix prefix record -> Entity record -- | Get a connection from the pool, run the given action, and then return -- the connection to the pool. -- -- This function performs the given action in a transaction. If an -- exception occurs during the action, then the transaction is rolled -- back. -- -- Note: This function previously timed out after 2 seconds, but this -- behavior was buggy and caused more problems than it solved. Since -- version 2.1.2, it performs no timeout checks. runSqlPool :: forall backend m a. (MonadUnliftIO m, BackendCompatible SqlBackend backend) => ReaderT backend m a -> Pool backend -> m a -- | Like runSqlPool, but supports specifying an isolation level. runSqlPoolWithIsolation :: forall backend m a. (MonadUnliftIO m, BackendCompatible SqlBackend backend) => ReaderT backend m a -> Pool backend -> IsolationLevel -> m a -- | Like runSqlPool, but does not surround the action in a -- transaction. This action might leave your database in a weird state. runSqlPoolNoTransaction :: forall backend m a. (MonadUnliftIO m, BackendCompatible SqlBackend backend) => ReaderT backend m a -> Pool backend -> Maybe IsolationLevel -> m a -- | This function is how runSqlPool and -- runSqlPoolNoTransaction are defined. In addition to the action -- to be performed and the Pool of conections to use, we give you -- the opportunity to provide three actions - initialize, afterwards, and -- onException. runSqlPoolWithHooks :: forall backend m a before after onException. (MonadUnliftIO m, BackendCompatible SqlBackend backend) => ReaderT backend m a -> Pool backend -> Maybe IsolationLevel -> (backend -> m before) -> (backend -> m after) -> (backend -> SomeException -> m onException) -> m a -- | This function is how runSqlPoolWithHooks is defined. -- -- It's currently the most general function for using a SQL pool. runSqlPoolWithExtensibleHooks :: forall backend m a. (MonadUnliftIO m, BackendCompatible SqlBackend backend) => ReaderT backend m a -> Pool backend -> Maybe IsolationLevel -> SqlPoolHooks m backend -> m a -- | Starts a new transaction on the connection. When the acquired -- connection is released the transaction is committed and the connection -- returned to the pool. -- -- Upon an exception the transaction is rolled back and the connection -- destroyed. -- -- This is equivalent to runSqlConn but does not incur the -- MonadUnliftIO constraint, meaning it can be used within, for -- example, a Conduit pipeline. acquireSqlConn :: (MonadReader backend m, BackendCompatible SqlBackend backend) => m (Acquire backend) -- | Like acquireSqlConn, but lets you specify an explicit isolation -- level. acquireSqlConnWithIsolation :: (MonadReader backend m, BackendCompatible SqlBackend backend) => IsolationLevel -> m (Acquire backend) runSqlConn :: forall backend m a. (MonadUnliftIO m, BackendCompatible SqlBackend backend) => ReaderT backend m a -> backend -> m a -- | Like runSqlConn, but supports specifying an isolation level. runSqlConnWithIsolation :: forall backend m a. (MonadUnliftIO m, BackendCompatible SqlBackend backend) => ReaderT backend m a -> backend -> IsolationLevel -> m a runSqlPersistM :: BackendCompatible SqlBackend backend => ReaderT backend (NoLoggingT (ResourceT IO)) a -> backend -> IO a runSqlPersistMPool :: BackendCompatible SqlBackend backend => ReaderT backend (NoLoggingT (ResourceT IO)) a -> Pool backend -> IO a liftSqlPersistMPool :: forall backend m a. (MonadIO m, BackendCompatible SqlBackend backend) => ReaderT backend (NoLoggingT (ResourceT IO)) a -> Pool backend -> m a withSqlPool :: forall backend m a. (MonadLoggerIO m, MonadUnliftIO m, BackendCompatible SqlBackend backend) => (LogFunc -> IO backend) -> Int -> (Pool backend -> m a) -> m a -- | Creates a pool of connections to a SQL database which can be used by -- the Pool backend -> m a function. After the function -- completes, the connections are destroyed. withSqlPoolWithConfig :: forall backend m a. (MonadLoggerIO m, MonadUnliftIO m, BackendCompatible SqlBackend backend) => (LogFunc -> IO backend) -> ConnectionPoolConfig -> (Pool backend -> m a) -> m a createSqlPool :: forall backend m. (MonadLoggerIO m, MonadUnliftIO m, BackendCompatible SqlBackend backend) => (LogFunc -> IO backend) -> Int -> m (Pool backend) -- | Creates a pool of connections to a SQL database. createSqlPoolWithConfig :: forall m backend. (MonadLoggerIO m, MonadUnliftIO m, BackendCompatible SqlBackend backend) => (LogFunc -> IO backend) -> ConnectionPoolConfig -> m (Pool backend) -- | Create a connection and run sql queries within it. This function -- automatically closes the connection on it's completion. -- --
-- {-# LANGUAGE GADTs #-}
-- {-# LANGUAGE ScopedTypeVariables #-}
-- {-# LANGUAGE OverloadedStrings #-}
-- {-# LANGUAGE MultiParamTypeClasses #-}
-- {-# LANGUAGE TypeFamilies#-}
-- {-# LANGUAGE TemplateHaskell#-}
-- {-# LANGUAGE QuasiQuotes#-}
-- {-# LANGUAGE GeneralizedNewtypeDeriving #-}
--
-- import Control.Monad.IO.Class (liftIO)
-- import Control.Monad.Logger
-- import Conduit
-- import Database.Persist
-- import Database.Sqlite
-- import Database.Persist.Sqlite
-- import Database.Persist.TH
--
-- share [mkPersist sqlSettings, mkMigrate "migrateAll"] [persistLowerCase|
-- Person
-- name String
-- age Int Maybe
-- deriving Show
-- |]
--
-- openConnection :: LogFunc -> IO SqlBackend
-- openConnection logfn = do
-- conn <- open "/home/sibi/test.db"
-- wrapConnection conn logfn
--
-- main :: IO ()
-- main = do
-- runNoLoggingT $ runResourceT $ withSqlConn openConnection (\backend ->
-- flip runSqlConn backend $ do
-- runMigration migrateAll
-- insert_ $ Person "John doe" $ Just 35
-- insert_ $ Person "Divya" $ Just 36
-- (pers :: [Entity Person]) <- selectList [] []
-- liftIO $ print pers
-- return ()
-- )
--
--
-- On executing it, you get this output:
--
--
-- Migrating: CREATE TABLE "person"("id" INTEGER PRIMARY KEY,"name" VARCHAR NOT NULL,"age" INTEGER NULL)
-- [Entity {entityKey = PersonKey {unPersonKey = SqlBackendKey {unSqlBackendKey = 1}}, entityVal = Person {personName = "John doe", personAge = Just 35}},Entity {entityKey = PersonKey {unPersonKey = SqlBackendKey {unSqlBackendKey = 2}}, entityVal = Person {personName = "Hema", personAge = Just 36}}]
--
withSqlConn :: forall backend m a. (MonadUnliftIO m, MonadLoggerIO m, BackendCompatible SqlBackend backend) => (LogFunc -> IO backend) -> (backend -> m a) -> m a
close' :: BackendCompatible SqlBackend backend => backend -> IO ()
withRawQuery :: MonadIO m => Text -> [PersistValue] -> ConduitM [PersistValue] Void IO a -> ReaderT SqlBackend m a
data family BackendKey backend
toSqlKey :: ToBackendKey SqlBackend record => Int64 -> Key record
fromSqlKey :: ToBackendKey SqlBackend record => Key record -> Int64
-- | get the SQL string for the field that an EntityField represents Useful
-- for raw SQL queries
--
-- Your backend may provide a more convenient fieldName function which
-- does not operate in a Monad
getFieldName :: forall record typ m backend. (PersistEntity record, PersistEntityBackend record ~ SqlBackend, BackendCompatible SqlBackend backend, Monad m) => EntityField record typ -> ReaderT backend m Text
-- | get the SQL string for the table that a PeristEntity represents Useful
-- for raw SQL queries
--
-- Your backend may provide a more convenient tableName function which
-- does not operate in a Monad
getTableName :: forall record m backend. (PersistEntity record, BackendCompatible SqlBackend backend, Monad m) => record -> ReaderT backend m Text
-- | useful for a backend to implement tableName by adding escaping
tableDBName :: PersistEntity record => record -> EntityNameDB
-- | useful for a backend to implement fieldName by adding escaping
fieldDBName :: forall record typ. PersistEntity record => EntityField record typ -> FieldNameDB
rawQuery :: (MonadResource m, MonadReader env m, BackendCompatible SqlBackend env) => Text -> [PersistValue] -> ConduitM () [PersistValue] m ()
rawQueryRes :: (MonadIO m1, MonadIO m2, BackendCompatible SqlBackend env) => Text -> [PersistValue] -> ReaderT env m1 (Acquire (ConduitM () [PersistValue] m2 ()))
-- | Execute a raw SQL statement
rawExecute :: (MonadIO m, BackendCompatible SqlBackend backend) => Text -> [PersistValue] -> ReaderT backend m ()
-- | Execute a raw SQL statement and return the number of rows it has
-- modified.
rawExecuteCount :: (MonadIO m, BackendCompatible SqlBackend backend) => Text -> [PersistValue] -> ReaderT backend m Int64
-- | Execute a raw SQL statement and return its results as a list. If you
-- do not expect a return value, use of rawExecute is recommended.
--
-- If you're using Entitys (which is quite likely), then
-- you must use entity selection placeholders (double question
-- mark, ??). These ?? placeholders are then replaced
-- for the names of the columns that we need for your entities. You'll
-- receive an error if you don't use the placeholders. Please see the
-- Entitys documentation for more details.
--
-- You may put value placeholders (question marks, ?) in your
-- SQL query. These placeholders are then replaced by the values you pass
-- on the second parameter, already correctly escaped. You may want to
-- use toPersistValue to help you constructing the placeholder
-- values.
--
-- Since you're giving a raw SQL statement, you don't get any guarantees
-- regarding safety. If rawSql is not able to parse the results of
-- your query back, then an exception is raised. However, most common
-- problems are mitigated by using the entity selection placeholder
-- ??, and you shouldn't see any error at all if you're not
-- using Single.
--
-- Some example of rawSql based on this schema:
--
-- -- share [mkPersist sqlSettings, mkMigrate "migrateAll"] [persistLowerCase| -- Person -- name String -- age Int Maybe -- deriving Show -- BlogPost -- title String -- authorId PersonId -- deriving Show -- |] ---- -- Examples based on the above schema: -- --
-- getPerson :: MonadIO m => ReaderT SqlBackend m [Entity Person] -- getPerson = rawSql "select ?? from person where name=?" [PersistText "john"] -- -- getAge :: MonadIO m => ReaderT SqlBackend m [Single Int] -- getAge = rawSql "select person.age from person where name=?" [PersistText "john"] -- -- getAgeName :: MonadIO m => ReaderT SqlBackend m [(Single Int, Single Text)] -- getAgeName = rawSql "select person.age, person.name from person where name=?" [PersistText "john"] -- -- getPersonBlog :: MonadIO m => ReaderT SqlBackend m [(Entity Person, Entity BlogPost)] -- getPersonBlog = rawSql "select ??,?? from person,blog_post where person.id = blog_post.author_id" [] ---- -- Minimal working program for PostgreSQL backend based on the above -- concepts: -- --
-- {-# LANGUAGE EmptyDataDecls #-}
-- {-# LANGUAGE FlexibleContexts #-}
-- {-# LANGUAGE GADTs #-}
-- {-# LANGUAGE GeneralizedNewtypeDeriving #-}
-- {-# LANGUAGE MultiParamTypeClasses #-}
-- {-# LANGUAGE OverloadedStrings #-}
-- {-# LANGUAGE QuasiQuotes #-}
-- {-# LANGUAGE TemplateHaskell #-}
-- {-# LANGUAGE TypeFamilies #-}
--
-- import Control.Monad.IO.Class (liftIO)
-- import Control.Monad.Logger (runStderrLoggingT)
-- import Database.Persist
-- import Control.Monad.Reader
-- import Data.Text
-- import Database.Persist.Sql
-- import Database.Persist.Postgresql
-- import Database.Persist.TH
--
-- share [mkPersist sqlSettings, mkMigrate "migrateAll"] [persistLowerCase|
-- Person
-- name String
-- age Int Maybe
-- deriving Show
-- |]
--
-- conn = "host=localhost dbname=new_db user=postgres password=postgres port=5432"
--
-- getPerson :: MonadIO m => ReaderT SqlBackend m [Entity Person]
-- getPerson = rawSql "select ?? from person where name=?" [PersistText "sibi"]
--
-- liftSqlPersistMPool y x = liftIO (runSqlPersistMPool y x)
--
-- main :: IO ()
-- main = runStderrLoggingT $ withPostgresqlPool conn 10 $ liftSqlPersistMPool $ do
-- runMigration migrateAll
-- xs <- getPerson
-- liftIO (print xs)
--
rawSql :: (RawSql a, MonadIO m, BackendCompatible SqlBackend backend) => Text -> [PersistValue] -> ReaderT backend m [a]
-- | Same as deleteWhere, but returns the number of rows affected.
deleteWhereCount :: (PersistEntity val, MonadIO m, PersistEntityBackend val ~ SqlBackend, BackendCompatible SqlBackend backend) => [Filter val] -> ReaderT backend m Int64
-- | Same as updateWhere, but returns the number of rows affected.
updateWhereCount :: (PersistEntity val, MonadIO m, SqlBackend ~ PersistEntityBackend val, BackendCompatible SqlBackend backend) => [Filter val] -> [Update val] -> ReaderT backend m Int64
-- | Render a [Filter record] into a Text value
-- suitable for inclusion into a SQL query.
filterClause :: PersistEntity val => Maybe FilterTablePrefix -> SqlBackend -> [Filter val] -> Text
-- | Render a [Filter record] into a Text value
-- suitable for inclusion into a SQL query, as well as the
-- [PersistValue] to properly fill in the ?
-- place holders.
filterClauseWithVals :: PersistEntity val => Maybe FilterTablePrefix -> SqlBackend -> [Filter val] -> (Text, [PersistValue])
-- | Render a [SelectOpt record] made up *only* of
-- Asc and Desc constructors into a Text value
-- suitable for inclusion into a SQL query.
orderClause :: PersistEntity val => Maybe FilterTablePrefix -> SqlBackend -> [SelectOpt val] -> Text
-- | Used when determining how to prefix a column name in a WHERE
-- clause.
data FilterTablePrefix
-- | Prefix the column with the table name. This is useful if the column
-- name might be ambiguous.
PrefixTableName :: FilterTablePrefix
-- | Prefix the column name with the EXCLUDED keyword. This is
-- used with the Postgresql backend when doing ON CONFLICT DO
-- UPDATE clauses - see the documentation on upsertWhere
-- and upsertManyWhere.
PrefixExcluded :: FilterTablePrefix
-- | Commit the current transaction and begin a new one. This is used when
-- a transaction commit is required within the context of
-- runSqlConn (which brackets its provided action with a
-- transaction begin/commit pair).
transactionSave :: MonadIO m => ReaderT SqlBackend m ()
-- | Commit the current transaction and begin a new one with the specified
-- isolation level.
transactionSaveWithIsolation :: MonadIO m => IsolationLevel -> ReaderT SqlBackend m ()
-- | Roll back the current transaction and begin a new one. This rolls back
-- to the state of the last call to transactionSave or the
-- enclosing runSqlConn call.
transactionUndo :: MonadIO m => ReaderT SqlBackend m ()
-- | Roll back the current transaction and begin a new one with the
-- specified isolation level.
transactionUndoWithIsolation :: MonadIO m => IsolationLevel -> ReaderT SqlBackend m ()
getStmtConn :: SqlBackend -> Text -> IO Statement
-- | Create the list of columns for the given entity.
mkColumns :: [EntityDef] -> EntityDef -> BackendSpecificOverrides -> ([Column], [UniqueDef], [ForeignDef])
-- | Record of functions to override the default behavior in
-- mkColumns. It is recommended you initialize this with
-- emptyBackendSpecificOverrides and override the default values,
-- so that as new fields are added, your code still compiles.
--
-- For added safety, use the getBackendSpecific* and
-- setBackendSpecific* functions, as a breaking change to the
-- record field labels won't be reflected in a major version bump of the
-- library.
data BackendSpecificOverrides
-- | Creates an empty BackendSpecificOverrides (i.e. use the default
-- behavior; no overrides)
emptyBackendSpecificOverrides :: BackendSpecificOverrides
-- | If the override is defined, then this returns a function that accepts
-- an entity name and field name and provides the ConstraintNameDB
-- for the foreign key constraint.
--
-- An abstract accessor for the BackendSpecificOverrides
getBackendSpecificForeignKeyName :: BackendSpecificOverrides -> Maybe (EntityNameDB -> FieldNameDB -> ConstraintNameDB)
-- | Set the backend's foreign key generation function to this value.
setBackendSpecificForeignKeyName :: (EntityNameDB -> FieldNameDB -> ConstraintNameDB) -> BackendSpecificOverrides -> BackendSpecificOverrides
defaultAttribute :: [FieldAttr] -> Maybe Text
-- | Please refer to the documentation for the database in question for a
-- full overview of the semantics of the varying isloation levels
data IsolationLevel
ReadUncommitted :: IsolationLevel
ReadCommitted :: IsolationLevel
RepeatableRead :: IsolationLevel
Serializable :: IsolationLevel
-- | Generates sql for limit and offset for postgres, sqlite and mysql.
decorateSQLWithLimitOffset :: Text -> (Int, Int) -> Text -> Text
-- | A single column (see rawSql). Any PersistField may
-- be used here, including PersistValue (which does not do any
-- processing).
newtype Single a
Single :: a -> Single a
[unSingle] :: Single a -> a
-- | Values to configure a pool of database connections. See
-- Data.Pool for details.
data ConnectionPoolConfig
ConnectionPoolConfig :: Int -> NominalDiffTime -> Int -> ConnectionPoolConfig
-- | How many stripes to divide the pool into. See Data.Pool for
-- details. Default: 1.
[connectionPoolConfigStripes] :: ConnectionPoolConfig -> Int
-- | How long connections can remain idle before being disposed of, in
-- seconds. Default: 600
[connectionPoolConfigIdleTimeout] :: ConnectionPoolConfig -> NominalDiffTime
-- | How many connections should be held in the connection pool. Default:
-- 10
[connectionPoolConfigSize] :: ConnectionPoolConfig -> Int
type ConnectionPool = Pool SqlBackend
type SqlPersistM = SqlPersistT (NoLoggingT (ResourceT IO))
type SqlPersistT = ReaderT SqlBackend
data PersistentSqlException
StatementAlreadyFinalized :: Text -> PersistentSqlException
Couldn'tGetSQLConnection :: PersistentSqlException
-- | This value specifies how a field references another table.
data ColumnReference
ColumnReference :: !EntityNameDB -> !ConstraintNameDB -> !FieldCascade -> ColumnReference
-- | The table name that the
[crTableName] :: ColumnReference -> !EntityNameDB
-- | The name of the foreign key constraint.
[crConstraintName] :: ColumnReference -> !ConstraintNameDB
-- | Whether or not updates/deletions to the referenced table cascade to
-- this table.
[crFieldCascade] :: ColumnReference -> !FieldCascade
data Column
Column :: !FieldNameDB -> !Bool -> !SqlType -> !Maybe Text -> !Maybe Text -> !Maybe ConstraintNameDB -> !Maybe Integer -> !Maybe ColumnReference -> Column
[cName] :: Column -> !FieldNameDB
[cNull] :: Column -> !Bool
[cSqlType] :: Column -> !SqlType
[cDefault] :: Column -> !Maybe Text
[cGenerated] :: Column -> !Maybe Text
[cDefaultConstraintName] :: Column -> !Maybe ConstraintNameDB
[cMaxLen] :: Column -> !Maybe Integer
[cReference] :: Column -> !Maybe ColumnReference
-- | Initializes a ConnectionPoolConfig with default values. See the
-- documentation of ConnectionPoolConfig for each field's default
-- value.
defaultConnectionPoolConfig :: ConnectionPoolConfig
-- | A SqlBackend represents a handle or connection to a database.
-- It contains functions and values that allow databases to have more
-- optimized implementations, as well as references that benefit
-- performance and sharing.
--
-- Instead of using the SqlBackend constructor directly, use the
-- mkSqlBackend function.
--
-- A SqlBackend is *not* thread-safe. You should not assume that a
-- SqlBackend can be shared among threads and run concurrent
-- queries. This *will* result in problems. Instead, you should create a
-- Pool SqlBackend, known as a
-- ConnectionPool, and pass that around in multi-threaded
-- applications.
--
-- To run actions in the persistent library, you should use the
-- runSqlConn function. If you're using a multithreaded
-- application, use the runSqlPool function.
data SqlBackend
-- | An SQL backend which can only handle read queries
--
-- The constructor was exposed in 2.10.0.
newtype SqlReadBackend
SqlReadBackend :: SqlBackend -> SqlReadBackend
[$sel:unSqlReadBackend:SqlReadBackend] :: SqlReadBackend -> SqlBackend
-- | An SQL backend which can handle read or write queries
--
-- The constructor was exposed in 2.10.0
newtype SqlWriteBackend
SqlWriteBackend :: SqlBackend -> SqlWriteBackend
[$sel:unSqlWriteBackend:SqlWriteBackend] :: SqlWriteBackend -> SqlBackend
-- | A Statement is a representation of a database query that has
-- been prepared and stored on the server side.
data Statement
Statement :: IO () -> IO () -> ([PersistValue] -> IO Int64) -> (forall m. MonadIO m => [PersistValue] -> Acquire (ConduitM () [PersistValue] m ())) -> Statement
[stmtFinalize] :: Statement -> IO ()
[stmtReset] :: Statement -> IO ()
[stmtExecute] :: Statement -> [PersistValue] -> IO Int64
[stmtQuery] :: Statement -> forall m. MonadIO m => [PersistValue] -> Acquire (ConduitM () [PersistValue] m ())
type LogFunc = Loc -> LogSource -> LogLevel -> LogStr -> IO ()
data InsertSqlResult
ISRSingle :: Text -> InsertSqlResult
ISRInsertGet :: Text -> Text -> InsertSqlResult
ISRManyKeys :: Text -> [PersistValue] -> InsertSqlResult
-- | Useful for running a read query against a backend with unknown
-- capabilities.
readToUnknown :: Monad m => ReaderT SqlReadBackend m a -> ReaderT SqlBackend m a
-- | Useful for running a read query against a backend with read and write
-- capabilities.
readToWrite :: Monad m => ReaderT SqlReadBackend m a -> ReaderT SqlWriteBackend m a
-- | Useful for running a write query against an untagged backend with
-- unknown capabilities.
writeToUnknown :: Monad m => ReaderT SqlWriteBackend m a -> ReaderT SqlBackend m a
-- | A constraint synonym which witnesses that a backend is SQL and can run
-- read queries.
type SqlBackendCanRead backend = (BackendCompatible SqlBackend backend, PersistQueryRead backend, PersistStoreRead backend, PersistUniqueRead backend)
-- | A constraint synonym which witnesses that a backend is SQL and can run
-- read and write queries.
type SqlBackendCanWrite backend = (SqlBackendCanRead backend, PersistQueryWrite backend, PersistStoreWrite backend, PersistUniqueWrite backend)
-- | Like SqlPersistT but compatible with any SQL backend which
-- can handle read queries.
type SqlReadT m a = forall backend. (SqlBackendCanRead backend) => ReaderT backend m a
-- | Like SqlPersistT but compatible with any SQL backend which
-- can handle read and write queries.
type SqlWriteT m a = forall backend. (SqlBackendCanWrite backend) => ReaderT backend m a
-- | A backend which is a wrapper around SqlBackend.
type IsSqlBackend backend = (IsPersistBackend backend, BaseBackend backend ~ SqlBackend)
-- | Prior to persistent-2.11.0, we provided an instance of
-- PersistField for the Natural type. This was in error,
-- because Natural represents an infinite value, and databases
-- don't have reasonable types for this.
--
-- The instance for Natural used the Int64 underlying type,
-- which will cause underflow and overflow errors. This type has the
-- exact same code in the instances, and will work seamlessly.
--
-- A more appropriate type for this is the Word series of types
-- from Data.Word. These have a bounded size, are guaranteed to be
-- non-negative, and are quite efficient for the database to store.
newtype OverflowNatural
OverflowNatural :: Natural -> OverflowNatural
[unOverflowNatural] :: OverflowNatural -> Natural
-- | Values to configure a pool of database connections. See
-- Data.Pool for details.
data ConnectionPoolConfig
ConnectionPoolConfig :: Int -> NominalDiffTime -> Int -> ConnectionPoolConfig
-- | How many stripes to divide the pool into. See Data.Pool for
-- details. Default: 1.
[connectionPoolConfigStripes] :: ConnectionPoolConfig -> Int
-- | How long connections can remain idle before being disposed of, in
-- seconds. Default: 600
[connectionPoolConfigIdleTimeout] :: ConnectionPoolConfig -> NominalDiffTime
-- | How many connections should be held in the connection pool. Default:
-- 10
[connectionPoolConfigSize] :: ConnectionPoolConfig -> Int
-- | This module provides the tools for defining your database schema and
-- using it to generate Haskell data types and migrations.
module Database.Persist.TH
-- | Converts a quasi-quoted syntax into a list of entity definitions, to
-- be used as input to the template haskell generation code (mkPersist).
persistWith :: PersistSettings -> QuasiQuoter
-- | Apply persistWith to upperCaseSettings.
persistUpperCase :: QuasiQuoter
-- | Apply persistWith to lowerCaseSettings.
persistLowerCase :: QuasiQuoter
-- | Same as persistWith, but uses an external file instead of a
-- quasiquotation. The recommended file extension is
-- .persistentmodels.
persistFileWith :: PersistSettings -> FilePath -> Q Exp
-- | Same as persistFileWith, but uses several external files
-- instead of one. Splitting your Persistent definitions into multiple
-- modules can potentially dramatically speed up compile times.
--
-- The recommended file extension is .persistentmodels.
--
-- -- -- Model1.hs -- share -- [mkPersist sqlSettings] -- $(persistFileWith lowerCaseSettings "models1") ---- --
-- -- Model2.hs -- share -- [mkPersist sqlSettings] -- $(persistFileWith lowerCaseSettings "models2") ---- -- Use persistManyFileWith to create your migrations: -- --
-- -- Migrate.hs -- share -- [mkMigrate "migrateAll"] -- $(persistManyFileWith lowerCaseSettings ["models1.persistentmodels","models2.persistentmodels"]) ---- -- Tip: To get the same import behavior as if you were declaring all your -- models in one file, import your new files as Name into -- another file, then export module Name. -- -- This approach may be used in the future to reduce memory usage during -- compilation, but so far we've only seen mild reductions. -- -- See persistent#778 and persistent#791 for more details. persistManyFileWith :: PersistSettings -> [FilePath] -> Q Exp -- | Create data types and appropriate PersistEntity instances for -- the given EntityDefs. Works well with the persist quasi-quoter. mkPersist :: MkPersistSettings -> [UnboundEntityDef] -> Q [Dec] -- | Like ' mkPersistWith :: MkPersistSettings -> [EntityDef] -> [UnboundEntityDef] -> Q [Dec] -- | Settings to be passed to the mkPersist function. data MkPersistSettings -- | Which database backend we're using. This type is used for the -- PersistEntityBackend associated type in the entities that are -- generated. -- -- If the mpsGeneric value is set to True, then this type -- is used for the non-Generic type alias. The data and type will be -- named: -- --
-- data ModelGeneric backend = Model { ... }
--
--
-- And, for convenience's sake, we provide a type alias:
--
-- -- type Model = ModelGeneric $(the type you give here) --mpsBackend :: MkPersistSettings -> Type -- | Create generic types that can be used with multiple backends. Good for -- reusable code, but makes error messages harder to understand. Default: -- False. -- | Deprecated: The mpsGeneric function adds a considerable amount of -- overhead and complexity to the library without bringing significant -- benefit. We would like to remove it. If you require this feature, -- please comment on the linked GitHub issue, and we'll either keep it -- around, or we can figure out a nicer way to solve your problem. -- Github: https://github.com/yesodweb/persistent/issues/1204 mpsGeneric :: MkPersistSettings -> Bool -- | Prefix field names with the model name. Default: True. -- -- Note: this field is deprecated. Use the mpsFieldLabelModifier and -- mpsConstraintLabelModifier instead. mpsPrefixFields :: MkPersistSettings -> Bool -- | Customise the field accessors and lens names using the entity and -- field name. Both arguments are upper cased. -- -- Default: appends entity and field. -- -- Note: this setting is ignored if mpsPrefixFields is set to False. mpsFieldLabelModifier :: MkPersistSettings -> Text -> Text -> Text -- | Customise the Constraint names using the entity and field name. The -- result should be a valid haskell type (start with an upper cased -- letter). -- -- Default: appends entity and field -- -- Note: this setting is ignored if mpsPrefixFields is set to False. mpsConstraintLabelModifier :: MkPersistSettings -> Text -> Text -> Text -- | Generate ToJSON/FromJSON instances for each model -- types. If it's Nothing, no instances will be generated. -- Default: -- --
-- Just EntityJSON
-- { entityToJSON = 'entityIdToJSON
-- , entityFromJSON = 'entityIdFromJSON
-- }
--
mpsEntityJSON :: MkPersistSettings -> Maybe EntityJSON
-- | Instead of generating normal field accessors, generator lens-style
-- accessors.
--
-- Default: False
mpsGenerateLenses :: MkPersistSettings -> Bool
-- | Automatically derive these typeclass instances for all record and key
-- types.
--
-- Default: []
mpsDeriveInstances :: MkPersistSettings -> [Name]
data EntityJSON
EntityJSON :: Name -> Name -> EntityJSON
-- | Name of the toJSON implementation for Entity a.
[entityToJSON] :: EntityJSON -> Name
-- | Name of the fromJSON implementation for Entity a.
[entityFromJSON] :: EntityJSON -> Name
-- | Create an MkPersistSettings with default values.
mkPersistSettings :: Type -> MkPersistSettings
-- | Use the SqlPersist backend.
sqlSettings :: MkPersistSettings
-- | A specification for how the implied ID columns are created.
--
-- By default, persistent will give each table a default column
-- named id (customizable by PersistSettings), and the
-- column type will be whatever you'd expect from BackendKey
-- yourBackendType. For The SqlBackend type, this is an
-- auto incrementing integer primary key.
--
-- You might want to give a different example. A common use case in
-- postgresql is to use the UUID type, and automatically generate them
-- using a SQL function.
--
-- Previously, you'd need to add a custom Id annotation for each
-- model.
--
-- -- User -- Id UUID default="uuid_generate_v1mc()" -- name Text -- -- Dog -- Id UUID default="uuid_generate_v1mc()" -- name Text -- user UserId ---- -- Now, you can simply create an ImplicitIdDef that corresponds to -- this declaration. -- --
-- newtype UUID = UUID ByteString -- -- instance PersistField UUID where -- toPersistValue (UUID bs) = -- PersistLiteral_ Escaped bs -- fromPersistValue pv = -- case pv of -- PersistLiteral_ Escaped bs -> -- Right (UUID bs) -- _ -> -- Left "nope" -- -- instance PersistFieldSql UUID where -- sqlType _ = SqlOther UUID ---- -- With this instance at the ready, we can now create our implicit -- definition: -- --
-- uuidDef :: ImplicitIdDef -- uuidDef = mkImplicitIdDef @UUID "uuid_generate_v1mc()" ---- -- And we can use setImplicitIdDef to use this with the -- MkPersistSettings for our block. -- --
-- mkPersist (setImplicitIdDef uuidDef sqlSettings) [persistLowerCase| ... |] ---- -- TODO: either explain interaction with mkMigrate or fix it. see issue -- #1249 for more details. data ImplicitIdDef -- | Set the ImplicitIdDef in the given MkPersistSettings. -- The default value is autoIncrementingInteger. setImplicitIdDef :: ImplicitIdDef -> MkPersistSettings -> MkPersistSettings -- | Creates a single function to perform all migrations for the entities -- defined here. One thing to be aware of is dependencies: if you have -- entities with foreign references, make sure to place those definitions -- after the entities they reference. -- -- In persistent-2.13.0.0, this was changed to *ignore* the -- input entity def list, and instead defer to mkEntityDefList to -- get the correct entities. This avoids problems where the QuasiQuoter -- is unable to know what the right reference types are. This sets -- mkPersist to be the "single source of truth" for entity -- definitions. mkMigrate :: String -> [UnboundEntityDef] -> Q [Dec] -- | The basic function for migrating models, no Template Haskell required. -- -- It's probably best to use this in concert with mkEntityDefList, -- and then call migrateModels with the result from that function. -- --
-- share [mkPersist sqlSettings, mkEntityDefList "entities"] [persistLowerCase| ... |] -- -- migrateAll = migrateModels entities ---- -- The function mkMigrate currently implements exactly this -- behavior now. If you're splitting up the entity definitions into -- separate files, then it is better to use the entity definition list -- and the concatenate all the models together into a big list to call -- with migrateModels. -- --
-- module Foo where -- -- share [mkPersist s, mkEntityDefList "fooModels"] ... -- -- -- module Bar where -- -- share [mkPersist s, mkEntityDefList "barModels"] ... -- -- module Migration where -- -- import Foo -- import Bar -- -- migrateAll = migrateModels (fooModels <> barModels) --migrateModels :: [EntityDef] -> Migration discoverEntities :: Q Exp -- | Creates a declaration for the [EntityDef] from the -- persistent schema. This is necessary because the Persistent -- QuasiQuoter is unable to know the correct type of ID fields, and -- assumes that they are all Int64. -- -- Provide this in the list you give to share, much like -- mkMigrate. -- --
-- share [mkMigrate "migrateAll", mkEntityDefList "entityDefs"] [...] --mkEntityDefList :: String -> [UnboundEntityDef] -> Q [Dec] -- | Apply the given list of functions to the same EntityDefs. -- -- This function is useful for cases such as: -- --
-- >>> share [mkEntityDefList "myDefs", mkPersist sqlSettings] [persistLowerCase|...|] --share :: [[a] -> Q [Dec]] -> [a] -> Q [Dec] -- | Automatically creates a valid PersistField instance for any -- datatype that has valid Show and Read instances. Can be -- very convenient for Enum types. derivePersistField :: String -> Q [Dec] -- | Automatically creates a valid PersistField instance for any -- datatype that has valid ToJSON and FromJSON instances. -- For a datatype T it generates instances similar to these: -- --
-- instance PersistField T where -- toPersistValue = PersistByteString . L.toStrict . encode -- fromPersistValue = (left T.pack) . eitherDecodeStrict' <=< fromPersistValue -- instance PersistFieldSql T where -- sqlType _ = SqlString --derivePersistFieldJSON :: String -> Q [Dec] -- | Produce code similar to the following: -- --
-- instance PersistEntity e => PersistField e where -- toPersistValue = entityToPersistValueHelper -- fromPersistValue = entityFromPersistValueHelper ["col1", "col2"] -- sqlType _ = SqlString --persistFieldFromEntity :: MkPersistSettings -> UnboundEntityDef -> Q [Dec] lensPTH :: (s -> a) -> (s -> b -> t) -> Lens s t a b -- | Calls parse to Quasi.parse individual entities in isolation -- afterwards, sets references to other entities -- -- In 2.13.0.0, this was changed to splice in -- [UnboundEntityDef] instead of -- [EntityDef]. parseReferences :: PersistSettings -> Text -> Q Exp -- | Takes a list of (potentially) independently defined entities and -- properly links all foreign keys to reference the right -- EntityDef, tying the knot between entities. -- -- Allows users to define entities indepedently or in separate modules -- and then fix the cross-references between them at runtime to create a -- Migration. embedEntityDefs :: [EntityDef] -> [UnboundEntityDef] -> [UnboundEntityDef] -- | Render an error message based on the tableName and -- fieldName with the provided message. fieldError :: Text -> Text -> Text -> Text -- | This class is used to ensure that functions requring at least one -- unique key are not called with records that have 0 unique keys. The -- quasiquoter automatically writes working instances for appropriate -- entities, and generates TypeError instances for records that -- have 0 unique keys. class PersistEntity record => AtLeastOneUniqueKey record requireUniquesP :: AtLeastOneUniqueKey record => record -> NonEmpty (Unique record) -- | This class is used to ensure that upsert is only called on -- records that have a single Unique key. The quasiquoter -- automatically generates working instances for appropriate records, and -- generates TypeError instances for records that have 0 or -- multiple unique keys. class PersistEntity record => OnlyOneUniqueKey record onlyUniqueP :: OnlyOneUniqueKey record => record -> Unique record -- | Returns True if the key definition has less than 2 fields. pkNewtype :: MkPersistSettings -> UnboundEntityDef -> Bool instance GHC.Show.Show Database.Persist.TH.SqlTypeExp instance GHC.Show.Show Database.Persist.TH.FTTypeConDescr