Compare commits

..

No commits in common. "develop" and "issue-210" have entirely different histories.

76 changed files with 1719 additions and 3726 deletions

View file

@ -1,8 +0,0 @@
{:hooks
{:analyze-call
{next.jdbc/with-transaction
hooks.com.github.seancorfield.next-jdbc/with-transaction
next.jdbc/with-transaction+options
hooks.com.github.seancorfield.next-jdbc/with-transaction+options}}
:lint-as {next.jdbc/on-connection clojure.core/with-open
next.jdbc/on-connection+options clojure.core/with-open}}

View file

@ -1,34 +0,0 @@
(ns hooks.com.github.seancorfield.next-jdbc
(:require [clj-kondo.hooks-api :as api]))
(defn with-transaction
"Expands (with-transaction [tx expr opts] body)
to (let [tx expr] opts body) per clj-kondo examples."
[{:keys [:node]}]
(let [[binding-vec & body] (rest (:children node))
[sym val opts] (:children binding-vec)]
(when-not (and sym val)
(throw (ex-info "No sym and val provided" {})))
(let [new-node (api/list-node
(list*
(api/token-node 'let)
(api/vector-node [sym val])
opts
body))]
{:node new-node})))
(defn with-transaction+options
"Expands (with-transaction+options [tx expr opts] body)
to (let [tx expr] opts body) per clj-kondo examples."
[{:keys [:node]}]
(let [[binding-vec & body] (rest (:children node))
[sym val opts] (:children binding-vec)]
(when-not (and sym val)
(throw (ex-info "No sym and val provided" {})))
(let [new-node (api/list-node
(list*
(api/token-node 'let)
(api/vector-node [sym val])
opts
body))]
{:node new-node})))

View file

@ -1,8 +0,0 @@
{:linters {:xtql/redundant-pipeline {:level :warning}
:xtql/redundant-unify {:level :warning}
:xtql/unrecognized-operation {:level :error}
:xtql/unrecognized-parameter {:level :warning}
:xtql/missing-parameter {:level :error}
:xtql/type-mismatch {:level :error}
:xtql/invalid-arity {:level :error}}
:hooks {:analyze-call {xtdb.api/q hooks.xtql/q}}}

View file

@ -1,567 +0,0 @@
(ns ^:no-doc hooks.xtql
(:require [clj-kondo.hooks-api :as api]))
(def source-op?
#{'from 'rel 'unify})
(def tail-op?
#{'aggregate
'limit 'offset
'where
'order-by
'with 'without 'return
'unnest})
(def unify-clause?
#{'from 'rel
'join 'left-join
'unnest
'where
'with})
(defn node-map? [node]
(contains? #{:map :namespaced-map}
(:tag node)))
(defn node-namespaced-map? [node]
(= :namespaced-map (:tag node)))
(defn map-children [node]
(->> (if (node-namespaced-map? node)
(-> node :children first)
node)
:children
(partition-all 2)))
(defn node-vector? [node]
(= :vector (:tag node)))
(defn node-list? [node]
(= :list (:tag node)))
(defn node-symbol? [node]
(symbol? (:value node)))
(defn node-symbol [node]
(:value node))
(defn node-keyword? [node]
(keyword? (:k node)))
(defn node-keyword [node]
(:k node))
(defn node-quote? [node]
(= :quote (:tag node)))
(defn node-op [node]
(-> node :children first))
(declare lint-query)
(defmulti lint-unify-clause #(-> % node-op node-symbol))
(defmulti lint-source-op #(-> % node-op node-symbol))
(defmulti lint-tail-op #(-> % node-op node-symbol))
(defn lint-not-arg-symbol [node]
(when (= \$ (-> node node-symbol str first))
(api/reg-finding!
(assoc (meta node)
:message "unexpected parameter in binding"
:type :xtql/unrecognized-parameter))))
(defn lint-bind [node]
(cond
(node-symbol? node)
;; TODO: Make own type, should really be a warning
(lint-not-arg-symbol node)
(node-map? node)
(doseq [[k _v] (map-children node)]
(when-not (node-keyword? k)
(api/reg-finding!
(assoc (meta k)
:message "all keys in binding maps must be keywords"
:type :xtql/type-mismatch))))
:else
(api/reg-finding!
(assoc (meta node)
:message "expected a symbol or map"
:type :xtql/type-mismatch))))
;; TODO: Lint more unify clauses
(defmethod lint-unify-clause :default [node]
(when-not (unify-clause? (-> node node-op node-symbol))
(api/reg-finding!
(assoc (some-> node :children first meta)
:message "unrecognized unify clause"
:type :xtql/unrecognized-operation))))
(defmethod lint-unify-clause 'from [node]
(lint-source-op node))
(defmethod lint-unify-clause 'rel [node]
(lint-source-op node))
(defmethod lint-unify-clause 'with [node]
(let [opts (-> node :children rest)]
(when-not (>= (count opts) 1)
(api/reg-finding!
(assoc (meta node)
:message "expected at least one argument"
:type :xtql/invalid-arity)))
(doseq [opt opts]
(if (node-map? opt)
(let [ks (->> opt
map-children
(map first)
(remove node-symbol?))]
(doseq [k ks]
(api/reg-finding!
(assoc (meta k)
:message "expected all keys to be symbols in a unify"
:type :xtql/type-mismatch))))
(api/reg-finding!
(assoc (meta opt)
:message "opts must be a map"
:type :xtql/type-mismatch))))))
(defn lint-join-clause [node]
(let [args (-> node :children rest)]
(if-not (= (count args) 2)
(api/reg-finding!
(assoc (meta node)
:message "expected at exactly two arguments"
:type :xtql/invalid-arity))
(let [[query opts] args]
(lint-query query)
(cond
(node-vector? opts)
(->> opts :children (run! lint-bind))
(node-map? opts)
(let [kvs (map-children opts)
ks (->> kvs
(map first)
(map node-keyword)
(remove nil?)
(into #{}))]
(when-not (contains? ks :bind)
(api/reg-finding!
(assoc (meta opts)
:message "Missing :bind parameter"
:type :xtql/missing-parameter)))
(doseq [[k v] kvs]
(when-not (node-keyword? k)
(api/reg-finding!
(assoc (meta k)
:message "All keys in 'opts' must be keywords"
:type :xtql/type-mismatch)))
(case (node-keyword k)
:bind (if (node-vector? v)
(->> v :children (run! lint-bind))
(api/reg-finding!
(assoc (meta v)
:message "expected :bind value to be a vector"
:type :xtql/type-mismatch)))
:args (if (node-vector? v)
;; TODO: Make args specific
(->> v :children (run! lint-bind))
(api/reg-finding!
(assoc (meta v)
:message "expected :args value to be a vector"
:type :xtql/type-mismatch)))
; else
(api/reg-finding!
(assoc (meta k)
:message "unrecognized parameter"
:type :xtql/unrecognized-parameter)))))
:else
(api/reg-finding!
(assoc (meta node)
:message "opts must be a map or vector"
:type :xtql/type-mismatch)))))))
(defmethod lint-unify-clause 'join [node]
(lint-join-clause node))
(defmethod lint-unify-clause 'inner-join [node]
(lint-join-clause node))
(defmethod lint-unify-clause 'unnest [node]
(let [opts (-> node :children rest)]
(when-not (= 1 (count opts))
(api/reg-finding!
(assoc (meta node)
:message "expected at exactly one argument"
:type :xtql/invalid-arity)))
(let [opt (first opts)]
(if (node-map? opt)
(doseq [[k _v] (map-children opt)]
(when-not (node-symbol? k)
(api/reg-finding!
(assoc (meta k)
:message "expected all columns to be symbols"
:type :xtql/type-mismatch))))
(api/reg-finding!
(assoc (meta opt)
:message "expected opt to be a map"
:type :xtql/type-mismatch))))))
(defmethod lint-source-op :default [node]
(let [op (-> node node-op node-symbol)]
(if (tail-op? op)
(api/reg-finding!
(assoc (some-> node :children first meta)
:message "tail op in source position"
:type :xtql/unrecognized-operation))
(when-not (source-op? op)
(api/reg-finding!
(assoc (some-> node :children first meta)
:message "unrecognized source operation"
:type :xtql/unrecognized-operation))))))
(defmethod lint-source-op 'from [node]
(let [[_ table opts] (some-> node :children)]
(when-not (node-keyword? table)
(api/reg-finding!
(assoc (meta table)
:message "expected 'table' to be a keyword"
:type :xtql/type-mismatch)))
(case (:tag opts)
:vector (->> opts :children (run! lint-bind))
:map
(let [kvs (map-children opts)
ks (->> kvs
(map first)
(map node-keyword)
(remove nil?)
(into #{}))]
(when-not (contains? ks :bind)
(api/reg-finding!
(assoc (meta opts)
:message "Missing :bind parameter"
:type :xtql/missing-parameter)))
(doseq [[k v] kvs]
(when-not (node-keyword? k)
(api/reg-finding!
(assoc (meta k)
:message "All keys in 'opts' must be keywords"
:type :xtql/type-mismatch)))
(case (node-keyword k)
:bind (if (node-vector? v)
(->> v :children (run! lint-bind))
(api/reg-finding!
(assoc (meta opts)
:message "expected :bind value to be a vector"
:type :xtql/type-mismatch)))
;; TODO
:for-valid-time nil
;; TODO
:for-system-time nil
; else
(api/reg-finding!
(assoc (meta k)
:message "unrecognized parameter"
:type :xtql/unrecognized-parameter)))))
(api/reg-finding!
(assoc (meta opts)
:message "expected 'opts' to be either a map or vector"
:type :xtql/type-mismatch)))))
(defmethod lint-source-op 'unify [node]
(let [[_ & clauses] (some-> node :children)]
(doseq [bad-op (remove node-list? clauses)]
(api/reg-finding!
(assoc (meta bad-op)
:message "all operations in a unify must be lists"
:type :xtql/type-mismatch)))
(when (= (count clauses) 1)
(let [clause (first clauses)
clause-op (-> clause node-op node-symbol)
unify-node (some-> node :children first)]
(case clause-op
from (api/reg-finding!
(assoc (meta unify-node)
:message "redundant unify"
:type :xtql/redundant-unify))
rel (api/reg-finding!
(assoc (meta unify-node)
:message "redundant unify"
:type :xtql/redundant-unify))
;; TODO: Cover other operators
nil)))
(->> clauses
(filter node-list?)
(run! lint-unify-clause))))
(defmethod lint-source-op 'rel [node]
(let [[_ _expr binds] (some-> node :children)]
(if (node-vector? binds)
(->> binds :children (run! lint-bind))
(api/reg-finding!
(assoc (meta binds)
:message "expected rel binding to be a vector"
:type :xtql/type-mismatch)))))
;; TODO: Lint more tail ops
(defmethod lint-tail-op :default [node]
(let [op (-> node node-op node-symbol)]
(if (source-op? op)
(api/reg-finding!
(assoc (some-> node :children first meta)
:message "source op in tail position"
:type :xtql/unrecognized-operation))
(when-not (tail-op? op)
(api/reg-finding!
(assoc (some-> node :children first meta)
:message "unrecognized tail operation"
:type :xtql/unrecognized-operation))))))
(defn lint-keyword [node name]
(when-not (node-keyword? node)
(api/reg-finding!
(assoc (meta node)
:message (str "expected '" name "' to be a keyword")
:type :xtql/type-mismatch))))
(defn lint-enum [node name values]
;; TODO: Expand to more than just keywords?
;; Maybe a `node-value` function?
(when-not (contains? values (node-keyword node))
(api/reg-finding!
(assoc (meta node)
:message (str "expected '" name "' to be one of " values)
;; TODO: change to different type?
:type :xtql/type-mismatch))))
(defmethod lint-tail-op 'limit [node]
(let [opts (-> node :children rest)]
(when-not (= 1 (count opts))
(api/reg-finding!
(assoc (meta node)
:message "expected exactly one argument"
:type :xtql/invalid-arity)))
(when-let [opt (first opts)]
(when-not (some-> opt :value int?)
(api/reg-finding!
(assoc (meta opt)
:message "expected limit to be an integer"
:type :xtql/type-mismatch))))))
(defmethod lint-tail-op 'offset [node]
(let [opts (-> node :children rest)]
(when-not (= 1 (count opts))
(api/reg-finding!
(assoc (meta node)
:message "expected exactly one argument"
:type :xtql/invalid-arity)))
(when-let [opt (first opts)]
(when-not (some-> opt :value int?)
(api/reg-finding!
(assoc (meta opt)
:message "expected offset to be an integer"
:type :xtql/type-mismatch))))))
(defmethod lint-tail-op 'with [node]
(let [opts (-> node :children rest)]
(when-not (>= (count opts) 1)
(api/reg-finding!
(assoc (meta node)
:message "expected at least one argument"
:type :xtql/invalid-arity)))
(doseq [opt opts]
(cond
(node-symbol? opt)
(lint-not-arg-symbol opt)
(node-map? opt)
(let [ks (->> opt
map-children
(map first)
(remove node-keyword?))]
(doseq [k ks]
(api/reg-finding!
(assoc (meta k)
:message "expected all keys to be keywords"
:type :xtql/type-mismatch))))
:else
(api/reg-finding!
(assoc (meta opt)
:message "opts must be a symbol or map"
:type :xtql/type-mismatch))))))
(defmethod lint-tail-op 'return [node]
(let [opts (-> node :children rest)]
(when-not (>= (count opts) 1)
(api/reg-finding!
(assoc (meta node)
:message "expected at least one argument"
:type :xtql/invalid-arity)))
(doseq [opt opts]
(cond
(node-symbol? opt)
(lint-not-arg-symbol opt)
(node-map? opt)
(let [ks (->> opt
map-children
(map first)
(remove node-keyword?))]
(doseq [k ks]
(api/reg-finding!
(assoc (meta k)
:message "expected all keys to be keywords"
:type :xtql/type-mismatch))))
:else
(api/reg-finding!
(assoc (meta opt)
:message "opts must be a symbol or map"
:type :xtql/type-mismatch))))))
(defmethod lint-tail-op 'order-by [node]
(doseq [opt (-> node :children rest)]
(cond
(node-symbol? opt)
(lint-not-arg-symbol opt)
(node-map? opt)
(let [kvs (map-children opt)
ks (->> kvs
(map first)
(map node-keyword)
(remove nil?)
(into #{}))]
(when-not (contains? ks :val)
(api/reg-finding!
(assoc (meta opt)
:message "Missing :val parameter"
:type :xtql/missing-parameter)))
(doseq [[k v] kvs]
(when-not (node-keyword? k)
(api/reg-finding!
(assoc (meta k)
:message "All keys in 'opts' must be keywords"
:type :xtql/type-mismatch)))
(case (node-keyword k)
:val
(cond
(node-symbol? v)
(lint-not-arg-symbol v)
(node-keyword? v)
(api/reg-finding!
(assoc (meta v)
:message "expected :val value to be a symbol or an expression"
:type :xtql/type-mismatch)))
; else do nothing
:dir
(if (node-keyword? v)
(lint-enum v :dir #{:asc :desc})
(lint-keyword v ":dir value"))
:nulls
(if (node-keyword? v)
(lint-enum v :nulls #{:first :last})
(lint-keyword v ":nulls value"))
; else
(api/reg-finding!
(assoc (meta k)
:message "unrecognized parameter"
:type :xtql/unrecognized-parameter)))))
:else
(api/reg-finding!
(assoc (meta opt)
:message "opts must be a symbol or map"
:type :xtql/type-mismatch)))))
(defmethod lint-tail-op 'without [node]
(let [columns (-> node :children rest)]
(when-not (>= (count columns) 1)
;; TODO: Should be a warning really
(api/reg-finding!
(assoc (meta node)
:message "expected at least one column"
:type :xtql/invalid-arity)))
(doseq [column columns]
(when-not (node-keyword? column)
(api/reg-finding!
(assoc (meta column)
:message "expected column to be a keyword"
:type :xtql/type-mismatch))))))
(defmethod lint-tail-op 'aggregate [node]
(let [opts (-> node :children rest)]
(when-not (>= (count opts) 1)
(api/reg-finding!
(assoc (meta node)
:message "expected at least one argument"
:type :xtql/invalid-arity)))
(doseq [opt opts]
(cond
(node-symbol? opt)
(lint-not-arg-symbol opt)
(node-map? opt)
(doseq [[k _v] (map-children opt)]
(when-not (node-keyword? k)
(api/reg-finding!
(assoc (meta k)
:message "expected all keys to be keywords"
:type :xtql/type-mismatch))))
:else
(api/reg-finding!
(assoc (meta opt)
:message "expected opts to be a symbol or map"
:type :xtql/type-mismatch))))))
(defmethod lint-tail-op 'unnest [node]
(let [opts (-> node :children rest)]
(when-not (= 1 (count opts))
(api/reg-finding!
(assoc (meta node)
:message "expected at exactly one argument"
:type :xtql/invalid-arity)))
(let [opt (first opts)]
(if (node-map? opt)
(doseq [[k _v] (map-children opt)]
(when-not (node-keyword? k)
(api/reg-finding!
(assoc (meta k)
:message "expected all columns to be keywords"
:type :xtql/type-mismatch))))
(api/reg-finding!
(assoc (meta opt)
:message "expected opt to be a map"
:type :xtql/type-mismatch))))))
(defn lint-pipeline [node]
(let [[_ & ops] (some-> node :children)]
(doseq [bad-op (remove node-list? ops)]
(api/reg-finding!
(assoc (meta bad-op)
:message "all operations in a pipeline must be lists"
:type :xtql/type-mismatch)))
(when (= 1 (count ops))
(api/reg-finding!
(assoc (-> node :children first meta)
:message "redundant pipeline"
:type :xtql/redundant-pipeline)))
(let [first-op (first ops)]
(when (node-list? first-op)
(lint-source-op (first ops))))
(->> ops
(drop 1)
(filter node-list?)
(run! lint-tail-op))))
(defn lint-query [node]
(if (= '-> (node-symbol (-> node :children first)))
(lint-pipeline node)
(lint-source-op node)))
;; TODO: Lint other functions that take queries
(defn q [{:keys [node]}]
(let [[_ _node quoted-query] (some-> node :children)]
(when (node-quote? quoted-query)
(let [query (-> quoted-query :children first)]
(lint-query query)))))

View file

@ -1,6 +0,0 @@
{:linters {:cond-plus/empty-else {:level :error}
:cond-plus/missing-fn {:level :error}
:cond-plus/non-final-else {:level :error}
:cond-plus/sequence {:level :error}
:unresolved-symbol {:exclude [(cond-plus.core/cond+ [=> else])]}}
:hooks {:analyze-call {cond-plus.core/cond+ hooks.cond-plus-hook/cond+}}}

View file

@ -1,65 +0,0 @@
(ns hooks.cond-plus-hook
(:require [clj-kondo.hooks-api :as api]))
(defn analyze-clauses [clauses]
(reduce
(fn [found-else? clause]
;; non-sequence clause
(if (not (or (api/list-node? clause)
(api/vector-node? clause)))
(let [{:keys [row col]} (meta clause)]
(api/reg-finding!
{:message "must be sequence"
:type :cond-plus/sequence
:row row
:col col})
found-else?)
(let [[sym arrow fn-expr] (api/sexpr clause)]
(cond
;; non-final else
found-else?
(do (api/reg-finding!
(merge
{:message ":else must be in final position"
:type :cond-plus/non-final-else}
found-else?))
(reduced nil))
;; check fn-exprs
(and (or (= :> arrow)
(= '=> arrow))
(nil? fn-expr))
(let [{:keys [row col]} (meta clause)]
(api/reg-finding!
{:message "fn-expr must have third position symbol"
:type :cond-plus/missing-fn
:row row
:col col})
found-else?)
;; else handling
(or (= :else sym)
(= 'else sym))
(if found-else?
(let [{:keys [row col]} (meta clause)]
(api/reg-finding!
{:message "only one :else clause allowed"
:type :cond-plus/empty-else
:row row
:col col})
;; early exit cuz not worth analyzing the rest
(reduced nil))
(do (when-not arrow
(let [{:keys [row col]} (meta clause)]
(api/reg-finding!
{:message ":else must have a body"
:type :cond-plus/empty-else
:row row
:col col})))
;; Store row and col from existing else as we don't throw until
;; we've seen a following clause
(select-keys (meta clause) [:row :col])))))))
nil
clauses))
(defn cond+ [{:keys [node]}]
(analyze-clauses (rest (:children node)))
node)

View file

@ -1,23 +0,0 @@
{:lint-as {lazytest.core/given clojure.core/let
lazytest.core/around clojure.core/fn
lazytest.core/defdescribe clojure.core/def
;; clojure.test interface
lazytest.experimental.interfaces.clojure-test/deftest clojure.test/deftest
lazytest.experimental.interfaces.clojure-test/testing clojure.test/testing
lazytest.experimental.interfaces.clojure-test/is clojure.test/is
lazytest.experimental.interfaces.clojure-test/are clojure.test/are
;; xunit interface
lazytest.experimental.interfaces.xunit/defsuite clojure.core/def
;; Expectations v2
lazytest.extensions.expectations/defexpect clojure.core/def
lazytest.extensions.expectations/from-each clojure.core/for
lazytest.extensions.expectations/=? clojure.core/=
}
:hooks {:analyze-call {;; Expectations v2
lazytest.extensions.expectations/more-> hooks.lazytest.expectations/more->
lazytest.extensions.expectations/more-of hooks.lazytest.expectations/more-of
}}
:linters {:clojure-lsp/unused-public-var
{:exclude-when-defined-by #{lazytest.core/defdescribe
lazytest.experimental.interfaces.xunit/defsuite
lazytest.experimental.interfaces.clojure-test/deftest}}}}

View file

@ -1,31 +0,0 @@
;; Copied from https://github.com/clojure-expectations/clojure-test/blob/b90ed5b24924238b3b16b0bbaaee4c3b05a1268a
(ns hooks.lazytest.expectations
(:require [clj-kondo.hooks-api :as api]))
(defn more-> [{:keys [node]}]
(let [tail (rest (:children node))
rewritten
(api/list-node
(list*
(api/token-node 'cond->)
(api/token-node 'nil)
tail))]
{:node rewritten}))
(defn more-of [{:keys [node]}]
(let [bindings (fnext (:children node))
pairs (partition 2 (nnext (:children node)))
rewritten
(api/list-node
(list*
(api/token-node 'fn)
(api/vector-node (vector bindings))
(map (fn [[e a]]
(api/list-node
(list
(api/token-node 'lazytest.core/expect)
e
a)))
pairs)))]
{:node rewritten}))

View file

@ -1,4 +0,0 @@
{:linters
{:unresolved-symbol
{:exclude [(cljs.test/is [match? thrown-match?])
(clojure.test/is [match? thrown-match?])]}}}

View file

@ -1,5 +0,0 @@
{:lint-as
{rewrite-clj.zip/subedit-> clojure.core/->
rewrite-clj.zip/subedit->> clojure.core/->>
rewrite-clj.zip/edit-> clojure.core/->
rewrite-clj.zip/edit->> clojure.core/->>}}

View file

@ -1 +0,0 @@
{:config-in-call {xtdb.api/template {:ignore [:unresolved-symbol :unresolved-namespace]}}}

239
.clover/config.cljs Normal file
View file

@ -0,0 +1,239 @@
;; ~/.config/clover/config.cljs
;; for running in GitPod, we don't tap> values:
(defn- wrap-in-tap [code] code)
(defn tap-top-block []
(p/let [block (editor/get-top-block)]
(when (seq (:text block))
(-> block
(update :text wrap-in-tap)
(editor/eval-and-render)))))
(defn tap-block []
(p/let [block (editor/get-block)]
(when (seq (:text block))
(-> block
(update :text wrap-in-tap)
(editor/eval-and-render)))))
(defn tap-selection []
(p/let [block (editor/get-selection)]
(when (seq (:text block))
(-> block
(update :text wrap-in-tap)
(editor/eval-and-render)))))
(defn tap-def-var []
(p/let [block (editor/get-selection)]
(when (seq (:text block))
(-> block
(update :text
#(str "(def " % ")"))
(update :text wrap-in-tap)
(editor/eval-and-render)))))
(defn tap-var []
(p/let [block (editor/get-var)]
(when (seq (:text block))
(-> block
(update :text #(str "(or (find-ns '" % ") (resolve '" % "))"))
(update :text wrap-in-tap)
(editor/eval-and-render)))))
(defn tap-ns []
(p/let [block (editor/get-namespace)
here (editor/get-selection)]
(when (seq (:text block))
(-> block
(update :text #(str "(find-ns '" % ")"))
(update :text wrap-in-tap)
(assoc :range (:range here))
(editor/eval-and-render)))))
(defn- wrap-in-clean-ns
"Given a string, find the namespace, and clean it up:
remove its aliases, its refers, and any interns."
[s]
(str "(when-let [ns (find-ns '" s ")]"
" (run! #(try (ns-unalias ns %) (catch Throwable _)) (keys (ns-aliases ns)))"
" (run! #(try (ns-unmap ns %) (catch Throwable _)) (keys (ns-interns ns)))"
" (->> (ns-refers ns)"
" (remove (fn [[_ v]] (.startsWith (str v) \"#'clojure.core/\")))"
" (map key)"
" (run! #(try (ns-unmap ns %) (catch Throwable _)))))"))
(defn tap-remove-ns []
(p/let [block (editor/get-namespace)
here (editor/get-selection)]
(when (seq (:text block))
(editor/run-callback
:notify
{:type :info :title "Removing..." :message (:text block)})
(-> block
(update :text wrap-in-clean-ns)
(update :text wrap-in-tap)
(assoc :range (:range here))
(editor/eval-and-render)))))
(defn tap-reload-all-ns []
(p/let [block (editor/get-namespace)
here (editor/get-selection)]
(when (seq (:text block))
(editor/run-callback
:notify
{:type :info :title "Reloading all..." :message (:text block)})
(p/let [res (editor/eval-and-render
(-> block
(update :text #(str "(require '" % " :reload-all)"))
(update :text wrap-in-tap)
(assoc :range (:range here))))]
(editor/run-callback
:notify
{:type (if (:error res) :warning :info)
:title (if (:error res)
"Reload failed for..."
"Reload succeeded!")
:message (:text block)})))))
(defn- format-test-result [{:keys [test pass fail error]}]
(str "Ran " test " test"
(when-not (= 1 test) "s")
(when-not (zero? pass)
(str ", " pass " assertion"
(when-not (= 1 pass) "s")
" passed"))
(when-not (zero? fail)
(str ", " fail " failed"))
(when-not (zero? error)
(str ", " error " errored"))
"."))
(defn tap-run-current-test []
(p/let [block (editor/get-top-block)
test-name (when (seq (:text block))
(clojure.string/replace (:text block)
#"\(def[a-z]* ([^\s]*)[^]*"
"$1"))
here (editor/get-selection)]
(when (seq test-name)
(p/let [res (editor/eval-and-render
(-> block
(update :text
(fn [_]
(str "
(with-out-str
(binding [clojure.test/*test-out* *out*]
(clojure.test/test-vars [#'" test-name "])))")))
(update :text wrap-in-tap)
(assoc :range (:range here))))]
(editor/run-callback
:notify
(if (:error res)
{:type :info
:title "Failed to run tests for"
:message test-name}
(try
(let [s (str (:result res))]
(if (re-find #"FAIL in" s)
{:type :warning
:title test-name
:message s}
{:type :info
:title (str test-name " passed")
:message (when (seq s) s)}))
(catch js/Error e
{:type :warning
:title "EXCEPTION!"
:message (ex-message e)}))))))))
(defn tap-run-tests []
(p/let [block (editor/get-namespace)
here (editor/get-selection)]
(when (seq (:text block))
(p/let [res (editor/eval-and-render
(-> block
(update :text (fn [s] (str "
(try
(let [nt (symbol \"" s "\")]
(clojure.test/run-tests nt))
(catch Throwable _))")))
(update :text wrap-in-tap)
(assoc :range (:range here))))]
(editor/run-callback
:notify
{:type (if (:error res) :warning :info)
:title (if (:error res)
"Failed to run tests for..."
"Tests completed!")
:message (if (:error res) (:text block) (format-test-result (:result res)))})))))
(defn tap-run-side-tests []
(p/let [block (editor/get-namespace)
here (editor/get-selection)]
(when (seq (:text block))
(p/let [res (editor/eval-and-render
(-> block
(update :text (fn [s] (str "
(some #(try
(let [nt (symbol (str \"" s "\" \"-\" %))]
(require nt)
(clojure.test/run-tests nt))
(catch Throwable _))
[\"test\" \"expectations\"])")))
(update :text wrap-in-tap)
(assoc :range (:range here))))]
(editor/run-callback
:notify
{:type (if (:error res) :warning :info)
:title (if (:error res)
"Failed to run tests for..."
"Tests completed!")
:message (if (:error res) (:text block) (format-test-result (:result res)))})))))
(defn tap-doc-var []
(p/let [block (editor/get-var)]
(when (seq (:text block))
(-> block
(update :text
#(str
"(java.net.URL."
" (str \"http://clojuredocs.org/\""
" (-> (str (symbol #'" % "))"
;; clean up ? ! &
" (clojure.string/replace \"?\" \"%3f\")"
" (clojure.string/replace \"!\" \"%21\")"
" (clojure.string/replace \"&\" \"%26\")"
")))"))
(update :text wrap-in-tap)
(editor/eval-and-render)))))
(defn tap-javadoc []
(p/let [block (editor/get-selection)
block (if (< 1 (count (:text block))) block (editor/get-var))]
(when (seq (:text block))
(-> block
(update :text
#(str
"(let [c-o-o " %
" ^Class c (if (instance? Class c-o-o) c-o-o (class c-o-o))] "
" (java.net.URL. "
" (clojure.string/replace"
" ((requiring-resolve 'clojure.java.javadoc/javadoc-url)"
" (.getName c))"
;; strip inner class
" #\"\\$[a-zA-Z0-9_]+\" \"\""
")))"))
(update :text wrap-in-tap)
(editor/eval-and-render)))))
(defn- add-libs [deps]
(str "((requiring-resolve 'clojure.tools.deps.alpha.repl/add-libs) '" deps ")"))
(defn tap-add-libs []
(p/let [block (editor/get-block)]
(when (seq (:text block))
(-> block
(update :text add-libs)
(update :text wrap-in-tap)
(editor/eval-and-render)))))

View file

@ -9,19 +9,19 @@ jobs:
build-and-release:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v2
with:
fetch-depth: 0
- uses: actions/setup-java@v4
- uses: actions/setup-java@v2
with:
distribution: 'temurin'
java-version: '21'
distribution: 'adopt'
java-version: '11'
- name: Setup Clojure
uses: DeLaGuardo/setup-clojure@master
with:
cli: '1.12.0.1530'
tools-deps: '1.10.3.1053'
- name: Cache All The Things
uses: actions/cache@v4
uses: actions/cache@v2
with:
path: |
~/.m2/repository
@ -30,23 +30,19 @@ jobs:
~/.cpcache
key: ${{ runner.os }}-${{ hashFiles('**/deps.edn') }}
- name: Setup Databases
run: docker compose up -d
run: docker-compose up -d
env:
MYSQL_ROOT_PASSWORD: testing
- name: Run MariaDB Tests
run: clojure -M:test:runner
- name: Create ClojureTest
run: ./run-tests.sh create
env:
MYSQL_ROOT_PASSWORD: testing
NEXT_JDBC_TEST_MYSQL: yes
NEXT_JDBC_TEST_MARIADB: yes
- name: Run All Tests and Release
- name: Run All Tests
run: ./run-tests.sh
env:
MYSQL_ROOT_PASSWORD: testing
- name: Run Tests
run: clojure -T:build ci :snapshot false
env:
MYSQL_ROOT_PASSWORD: testing
NEXT_JDBC_TEST_MYSQL: yes
NEXT_JDBC_TEST_XTDB: yes
NEXT_JDBC_TEST_MSSQL: yes
MSSQL_SA_PASSWORD: Str0ngP4ssw0rd
- name: Deploy Release
run: clojure -T:build deploy :snapshot false
env:

View file

@ -9,17 +9,17 @@ jobs:
build-and-snapshot:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-java@v4
- uses: actions/checkout@v2
- uses: actions/setup-java@v2
with:
distribution: 'temurin'
java-version: '21'
distribution: 'adopt'
java-version: '11'
- name: Setup Clojure
uses: DeLaGuardo/setup-clojure@master
with:
cli: '1.12.0.1530'
tools-deps: '1.10.3.1053'
- name: Cache All The Things
uses: actions/cache@v4
uses: actions/cache@v2
with:
path: |
~/.m2/repository
@ -28,23 +28,19 @@ jobs:
~/.cpcache
key: ${{ runner.os }}-${{ hashFiles('**/deps.edn') }}
- name: Setup Databases
run: docker compose up -d
run: docker-compose up -d
env:
MYSQL_ROOT_PASSWORD: testing
- name: Run MariaDB Tests
run: clojure -M:test:runner
- name: Create ClojureTest
run: ./run-tests.sh create
env:
MYSQL_ROOT_PASSWORD: testing
NEXT_JDBC_TEST_MYSQL: yes
NEXT_JDBC_TEST_MARIADB: yes
- name: Run All Tests and Snapshot
- name: Run All Tests
run: ./run-tests.sh
env:
MYSQL_ROOT_PASSWORD: testing
- name: Run Tests
run: clojure -T:build ci :snapshot true
env:
MYSQL_ROOT_PASSWORD: testing
NEXT_JDBC_TEST_MYSQL: yes
NEXT_JDBC_TEST_XTDB: yes
NEXT_JDBC_TEST_MSSQL: yes
MSSQL_SA_PASSWORD: Str0ngP4ssw0rd
- name: Deploy Snapshot
run: clojure -T:build deploy :snapshot true
env:
@ -55,24 +51,54 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
java: [ '11', '17', '21' ]
java: [ '8', '14', '17' ]
steps:
- uses: actions/checkout@v4
- uses: actions/setup-java@v4
- uses: actions/checkout@v2
- uses: actions/setup-java@v2
with:
distribution: 'temurin'
distribution: 'adopt'
java-version: ${{ matrix.java }}
- name: Setup Clojure
uses: DeLaGuardo/setup-clojure@master
with:
cli: '1.12.0.1530'
tools-deps: '1.10.3.1053'
- name: Cache All The Things
uses: actions/cache@v4
uses: actions/cache@v2
with:
path: |
~/.m2/repository
~/.gitlibs
~/.clojure
~/.cpcache
key: ${{ runner.os }}-${{ hashFiles('**/deps.edn') }}
- name: Run Tests
run: clojure -T:build:jdk${{ matrix.java }} test
run: clojure -T:build test
build-graalvm-new:
runs-on: ubuntu-latest
strategy:
matrix:
graalvm: [ '21.1.0' ]
base: [ 'java11', 'java16' ]
steps:
- uses: actions/checkout@v2
- name: Setup GraalVM
uses: DeLaGuardo/setup-graalvm@5.0
with:
graalvm: ${{ matrix.graalvm }}
java: ${{ matrix.base }}
- name: Setup Clojure
uses: DeLaGuardo/setup-clojure@master
with:
tools-deps: '1.10.3.1053'
- name: Cache All The Things
uses: actions/cache@v2
with:
path: |
~/.m2/repository
~/.gitlibs
~/.clojure
~/.cpcache
key: ${{ runner.os }}-${{ hashFiles('**/deps.edn') }}
- name: Run Tests
run: clojure -T:build test

View file

@ -7,19 +7,19 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
java: [ '11', '17', '21' ]
java: [ '8', '11', '14', '17', '18' ]
steps:
- uses: actions/checkout@v4
- uses: actions/setup-java@v4
- uses: actions/checkout@v2
- uses: actions/setup-java@v2
with:
distribution: 'temurin'
distribution: 'adopt'
java-version: ${{ matrix.java }}
- name: Setup Clojure
uses: DeLaGuardo/setup-clojure@master
with:
cli: '1.12.0.1530'
tools-deps: '1.10.3.1053'
- name: Cache All The Things
uses: actions/cache@v4
uses: actions/cache@v2
with:
path: |
~/.m2/repository
@ -28,20 +28,43 @@ jobs:
~/.cpcache
key: ${{ runner.os }}-${{ hashFiles('**/deps.edn') }}
- name: Setup Databases
run: docker compose up -d
run: docker-compose up -d
env:
MYSQL_ROOT_PASSWORD: testing
- name: Run MariaDB Tests
run: clojure -M:test:runner
- name: Create ClojureTest
run: ./run-tests.sh create
env:
MYSQL_ROOT_PASSWORD: testing
NEXT_JDBC_TEST_MYSQL: yes
NEXT_JDBC_TEST_MARIADB: yes
- name: Run All Tests
run: clojure -M:test:runner:jdk${{ matrix.java }}
- name: Run Tests
run: ./run-tests.sh
env:
MYSQL_ROOT_PASSWORD: testing
NEXT_JDBC_TEST_MYSQL: yes
NEXT_JDBC_TEST_XTDB: yes
NEXT_JDBC_TEST_MSSQL: yes
MSSQL_SA_PASSWORD: Str0ngP4ssw0rd
build-graalvm-new:
runs-on: ubuntu-latest
strategy:
matrix:
graalvm: [ '21.1.0' ]
base: [ 'java11', 'java16' ]
steps:
- uses: actions/checkout@v2
- name: Setup GraalVM
uses: DeLaGuardo/setup-graalvm@5.0
with:
graalvm: ${{ matrix.graalvm }}
java: ${{ matrix.base }}
- name: Setup Clojure
uses: DeLaGuardo/setup-clojure@master
with:
tools-deps: '1.10.3.1053'
- name: Cache All The Things
uses: actions/cache@v2
with:
path: |
~/.m2/repository
~/.gitlibs
~/.clojure
~/.cpcache
key: ${{ runner.os }}-${{ hashFiles('**/deps.edn') }}
- name: Run Tests
run: clojure -T:build test

4
.gitignore vendored
View file

@ -1,8 +1,7 @@
.calva/output-window/
.calva/repl.calva-repl
.classpath
.clj-kondo/.cache
.clj-kondo/.lock
.clj-kondo/com.github.seancorfield/next.jdbc
.cpcache
.eastwood
.factorypath
@ -20,6 +19,7 @@
.settings
.socket-repl-port
.sw*
.vscode
*.class
*.jar
*.swp

3
.gitpod.dockerfile Normal file
View file

@ -0,0 +1,3 @@
FROM gitpod/workspace-full
RUN brew install clojure/tools/clojure@1.10.3.933

23
.gitpod.yml Normal file
View file

@ -0,0 +1,23 @@
image:
file: .gitpod.dockerfile
vscode:
extensions:
- betterthantomorrow.calva
- mauricioszabo.clover
tasks:
- name: Prepare deps/clover
init: |
clojure -A:test -P
echo 50505 > .socket-repl-port
mkdir ~/.config/clover
cp .clover/config.cljs ~/.config/clover/
- name: Start REPL
command: clojure -J-Dclojure.server.repl="{:address \"0.0.0.0\" :port 50505 :accept clojure.core.server/repl}" -A:test
- name: See Changes
command: code CHANGELOG.md
github:
prebuilds:
develop: true

6
.joker Normal file
View file

@ -0,0 +1,6 @@
{:known-macros [next.jdbc/with-transaction]
:ignored-unused-namespaces [next.jdbc.connection
next.jdbc.date-time
next.jdbc.prepare
next.jdbc.result-set
next.jdbc.transaction]}

View file

@ -2,116 +2,8 @@
Only accretive/fixative changes will be made from now on.
* 1.3.next in progress
* Fix handling of `false` in `clob-column-reader` [#299](https://github.com/seancorfield/next-jdbc/issues/299) via PR [#300](https://github.com/seancorfield/next-jdbc/pull/300) from [@GAumala](https://github.com/GAumala)
* Switch tests to LazyTest via PR [#297](https://github.com/seancorfield/next-jdbc/pull/297).
* Update dev/test/build deps.
* 1.3.1002 -- 2025-03-06
* Address [#296](https://github.com/seancorfield/next-jdbc/issues/296) by adding an explicit check (and `throw`) for `sql-params` in `next.jdbc` functions.
* Address [#295](https://github.com/seancorfield/next-jdbc/issues/295) by providing a way to tell `next.jdbc` that certain options should be passed "as-is" in the `Properties` object when creating a `Connection` -- `:next.jdbc/as-is-properties` accepts a sequence (or set) of keywords, identifying properties that should not be converted to strings.
* Fix [#181](https://github.com/seancorfield/next-jdbc/issues/181) (again!) by adding `Wrapped` protocol as a way for `DefaultOptions` and `SQLLogging` to consistently expose the underlying connectable, even when nested.
* 1.3.994 -- 2025-01-28
* Fix [#293](https://github.com/seancorfield/next-jdbc/issues/293) by no longer `locking` on the `Connection` retrieved from a `DataSource`.
* Fix documentation examples of `execute-batch!` via PR [#292](https://github.com/seancorfield/next-jdbc/pull/292) from [@devurandom](https://github.com/devurandom).
* Update `java.data` to 1.3.113.
* Beef up bit/boolean tests and enable them for XTDB.
* 1.3.981 -- 2024-12-13
* Address [#291](https://github.com/seancorfield/next-jdbc/issues/291) by adding an XTDB section to **Tips & Tricks**.
* Added XTDB as a supported database for testing via PR [#290](https://github.com/seancorfield/next-jdbc/pull/290). _Note: not all features are tested against XTDB due to several fundamental differences in architecture, mostly around primary key/generated keys and lack of DDL operations (since XTDB is schemaless)._
* Update dev/test dependencies.
* 1.3.967 -- 2024-12-02
* Address [#288](https://github.com/seancorfield/next-jdbc/issues/288) by adding speculative support for `:dbtype "xtdb"`.
* Fix [#287](https://github.com/seancorfield/next-jdbc/issues/287) by merging user-supplied options over `:return-keys true`.
* Fix [#282](https://github.com/seancorfield/next-jdbc/issues/282) by tracking raw `Connection` objects for active TXs, which relaxes several of the conditions around nested transactions.
* Replace `assert` calls with proper validation, throwing `IllegalArgumentException` on failure.
* Removed (experimental) `:name-fn` option since the driver for it no longer exists (qualified columns names in XTDB).
* 1.3.955 -- 2024-10-06
* Address [#285](https://github.com/seancorfield/next-jdbc/issues/285) by setting the default Clojure version to the earliest supported (1.10.3) to give a better hint to users.
* Update PostgreSQL **Tips & Tricks** example code to fix possible NPE. PR [#284](https://github.com/seancorfield/next-jdbc/pull/284) from [@ExNexu](https://github.com/ExNexu).
* Address [#283](https://github.com/seancorfield/next-jdbc/issues/283) by adding a note in the documentation, linking to the PostgreSQL bug report about `ANY(array)`.
* ~Address [#269](https://github.com/seancorfield/next-jdbc/issues/269) by adding `:name-fn` as an option (primarily for the SQL builder functions, but also for result set processing); the default is `clojure.core/name` but you can now use `next.jdbc.sql.builder/qualified-name` to preserve the qualifier.~ _[This was removed in 1.3.967 since XTDB no longer supports qualified column names]_
* Update testing deps; `docker-compose` => `docker compose`.
* 1.3.939 -- 2024-05-17
* Fix [#280](https://github.com/seancorfield/next-jdbc/issues/280) by allowing `-` as well as `_` in `nav` foreign key names.
* Address [#279](https://github.com/seancorfield/next-jdbc/issues/279) by adding the missing documentation.
* Address [#278](https://github.com/seancorfield/next-jdbc/issues/278) by fixing link in options page.
* Update dev dependencies, including testing against Clojure 1.12 Alpha 11.
* 1.3.925 -- 2024-03-15
* Address [#275](https://github.com/seancorfield/next-jdbc/issues/275) by noting that PostgreSQL may perform additional SQL queries to produce table names used in qualified result set builders.
* Address [#274](https://github.com/seancorfield/next-jdbc/issues/274) by adding `next.jdbc.sql/aggregate-by-keys` as a convenient wrapper around `find-by-keys` when you want just a single aggregate value back (such as `count`, `max`, etc).
* Address [#273](https://github.com/seancorfield/next-jdbc/issues/273) by linking to [PG2](https://github.com/igrishaev/pg2) in the PostgreSQL **Tips & Tricks** section.
* Address [#268](https://github.com/seancorfield/next-jdbc/issues/268) by expanding the documentation around `insert-multi!` and `insert!`.
* Update dependency versions (including Clojure).
* Code cleanup per `clj-kondo`.
* 1.3.909 -- 2023-12-16
* Address [#267](https://github.com/seancorfield/next-jdbc/issues/267) by adding the `:schema-opts` option to override the default conventions for identifying foreign keys in columns.
* Address [#264](https://github.com/seancorfield/next-jdbc/issues/264) by letting `insert-multi!` accept empty rows (and producing an empty result vector). This improves compatibility with `clojure.java.jdbc`.
* Address [#258](https://github.com/seancorfield/next-jdbc/issues/258) by updating all the library (driver) versions in Getting Started to match the latest versions being tested (from `deps.edn`).
* Update `java.data` to 1.1.103 so that `next.jdbc` no longer has a transitive dependency on `org.clojure/tools.logging`!
* Attempt to clarify that when calling `reduce` on the result of `plan`, you must provide an initial value.
* Expand examples for calling `next.jdbc.sql/find-by-keys` to show `LIKE` and `IN` clauses.
* Update `tools.build` to 0.9.6 (and get rid of `template/pom.xml` in favor of new `:pom-data` option to `b/write-pom`).
* 1.3.894 -- 2023-09-24
* Fix [#257](https://github.com/seancorfield/next-jdbc/issues/257) by making the `fdef` spec for `with-transaction` more permissive. Also add specs for `on-connection` and the `+options` variants of both macros.
* Address [#256](https://github.com/seancorfield/next-jdbc/issues/256) by adding `with-transaction+options` and `on-connection+options`.
* Updates most of the JDBC drivers used for testing, including SQLite 3.43.0.0 which now throws an exception when `.getGeneratedKeys()` is called so you cannot use `:return-generated-keys true` with it but you can add `RETURNING *` to your SQL statements instead (the tests have been updated to reflect this).
* Update `tools.build` to 0.9.5 (and remove `:java-opts` from `build/test`)
* 1.3.883 -- 2023-06-25
* Address [#254](https://github.com/seancorfield/next-jdbc/issues/254) by adding `next.jdbc/active-tx?` and adding more explanation to [**Transactions**](https://cljdoc.org/d/com.github.seancorfield/next.jdbc/CURRENT/doc/getting-started/transactions) about the conventions behind transactions and the limitations of thread-local tracking of active transactions in `next.jdbc`.
* Address [#251](https://github.com/seancorfield/next-jdbc/issues/251) by updating `next.jdbc/with-logging` docstring.
* Update dev/test dependencies.
* 1.3.874 -- 2023-04-15
* Fix [#248](https://github.com/seancorfield/next-jdbc/issues/248) by allowing `:port` to be `:none`.
* Address [#247](https://github.com/seancorfield/next-jdbc/issues/247) by adding examples of using `next.jdbc.connection/jdbc-url` to build a connection string with additional parameters when creating connection pools.
* 1.3.865 -- 2023-03-31
* Fix [#246](https://github.com/seancorfield/next-jdbc/issues/246) by adopting the `strop` function from HoneySQL.
* Address [#245](https://github.com/seancorfield/next-jdbc/issues/245) by not `locking` the `Connection` when `*nested-tx*` is bound to `:ignore` -- improving `clojure.java.jdbc` compatibility.
* Address [#237](https://github.com/seancorfield/next-jdbc/issues/237) by adding an `:init-fn` option to the `db-spec` argument for `next.jdbc.connection/component`.
* 1.3.862 -- 2023-03-13
* Fix [#243](https://github.com/seancorfield/next-jdbc/issues/243) by ensuring URI properties become keywords.
* Fix [#242](https://github.com/seancorfield/next-jdbc/issues/242) by making the logging wrapper aware of the default options wrapper.
* 1.3.858 -- 2023-03-05
* Address [#241](https://github.com/seancorfield/next-jdbc/issues/241) by correcting link to PostgreSQL docs.
* Address [clj-kondo#1685](https://github.com/clj-kondo/clj-kondo/issues/1685) by using `.clj_kondo` extension for hook files.
* Improve docs for SQLite users per [#239](https://github.com/seancorfield/next-jdbc/pull/239) -- [peristeri](https://github.com/peristeri).
* Address [#236](https://github.com/seancorfield/next-jdbc/issues/236) by showing examples of `run!` over `plan`.
* 1.3.847 -- 2022-11-04
* Fix [#232](https://github.com/seancorfield/next-jdbc/issues/232) by using `as-cols` in `insert-multi!` SQL builder. Thanks to @changsu-farmmorning for spotting that bug!
* Fix [#229](https://github.com/seancorfield/next-jdbc/issues/229) by adding `next.jdbc.connect/uri->db-spec` which converts a URI string to a db-spec hash map; in addition, if `DriverManager/getConnection` fails, it assumes it was passed a URI instead of a JDBC URL, and retries after calling that function and then recreating the JDBC URL (which should have the effect of moving the embedded user/password credentials into the properties structure instead of the URL).
* Address [#228](https://github.com/seancorfield/next-jdbc/issues/228) by adding `PreparedStatement` caveat to the Oracle **Tips & Tricks** section.
* Address [#226](https://github.com/seancorfield/next-jdbc/issues/226) by adding a section on exception handling to **Tips & Tricks** (TL;DR: it's all horribly vendor-specific!).
* Add `on-connection` to exported `clj-kondo` configuration.
* Switch `run-test` from `sh` to `bb`.
* 1.3.834 -- 2022-09-23
* Fix [#227](https://github.com/seancorfield/next-jdbc/issues/227) by correcting how [#221](https://github.com/seancorfield/next-jdbc/issues/221) was implemented.
* Address [#224](https://github.com/seancorfield/next-jdbc/issues/224) by attempting to clarify how to use the snake/kebab options and builders.
* 1.3.828 -- 2022-09-11
* Fix [#222](https://github.com/seancorfield/next-jdbc/issues/222) by correcting implementation of `.cons` on a row.
* Address [#221](https://github.com/seancorfield/next-jdbc/issues/221) by supporting `:column-fn` a top-level option in `plan`-related functions to transform keys used in reducing function contexts. Also corrects handling of column names in schema `nav`igation (which previously only supported `:table-fn` and incorrectly applied it to columns as well).
* Address [#218](https://github.com/seancorfield/next-jdbc/issues/218) by moving `:extend-via-metadata true` after the protocols' docstrings.
* Document `:useBulkCopyForBatchInsert` for Microsoft SQL Server via PR [#216](https://github.com/seancorfield/next-jdbc/issues/216) -- [danskarda](https://github.com/danskarda).
* Address [#215](https://github.com/seancorfield/next-jdbc/issues/215) by dropping official support for JDK 8 and updating various JDBC drivers in the testing matrix.
* Address [#214](https://github.com/seancorfield/next-jdbc/issues/214) by updating test/CI versions.
* 1.2.next in progress
* Address [#212](https://github.com/seancorfield/next-jdbc/issues/212) by documenting the problem with SQLite's JDBC driver.
* Fix [#211](https://github.com/seancorfield/next-jdbc/issues/211) by auto-creating `clojure_test` DB in MySQL if needed; also streamline the CI processes.
* Fix [#210](https://github.com/seancorfield/next-jdbc/issues/210) by updating CI to test against MySQL and SQL Server.
* Switch SQL Server testing setup to `docker-compose`.
* 1.2.796 -- 2022-08-01
@ -219,7 +111,7 @@ Only accretive/fixative changes will be made from now on.
* Fix #130 by implementing `clojure.lang.ILookup` on the three builder adapters.
* Fix #129 by adding `with-column-value` to `RowBuilder` and a more generic `builder-adapter`.
* Fix #128 by adding a test for the "not found" arity of lookup on mapified result sets.
* Fix #121 by conditionally adding `next.jdbc/snake-kebab-opts`, `next.jdbc/unqualified-snake-kebab-opts`, `next.jdbc.result-set/as-kebab-maps`, and `next.jdbc.result-set/as-unqualified-kebab-maps` (which are present only if `camel-snake-kebab` is on your classpath). _As of 1.2.659, these are included unconditionally and `next.jdbc` depends directly on `camel-snake-kebab`._
* Fix #121 by conditionally adding `next.jdbc/snake-kebab-opts`, `next.jdbc/unqualified-snake-kebab-opts`, `next.jdbc.result-set/as-kebab-maps`, and `next.jdbc.result-set/as-unqualified-kebab-maps` (which are present only if `camel-snake-kebab` is on your classpath).
* Correct MySQL batch statement rewrite tip: it's `:rewriteBatchedStatements true` (plural). Also surface the batch statement tips in the **Tips & Tricks** page.
* Clarify how combining is interleaving with reducing in **Reducing and Folding with `plan`**.
* Use "JDBC URL" consistently everywhere (instead of "JDBC URI" in several places).

View file

@ -1,4 +1,4 @@
# next.jdbc [![Clojure CI Release](https://github.com/seancorfield/next-jdbc/actions/workflows/test-and-release.yml/badge.svg)](https://github.com/seancorfield/next-jdbc/actions/workflows/test-and-release.yml) [![Clojure CI Develop](https://github.com/seancorfield/next-jdbc/actions/workflows/test-and-snapshot.yml/badge.svg)](https://github.com/seancorfield/next-jdbc/actions/workflows/test-and-snapshot.yml) [![Clojure CI Pull Request](https://github.com/seancorfield/next-jdbc/actions/workflows/test.yml/badge.svg)](https://github.com/seancorfield/next-jdbc/actions/workflows/test.yml)
# next.jdbc [![Clojure CI](https://github.com/seancorfield/next-jdbc/actions/workflows/test.yml/badge.svg)](https://github.com/seancorfield/next-jdbc/actions/workflows/test.yml) [![Open in Gitpod](https://gitpod.io/button/open-in-gitpod.svg)](https://gitpod.io/#https://github.com/seancorfield/next-jdbc)
The next generation of `clojure.java.jdbc`: a new low-level Clojure wrapper for JDBC-based access to databases.
@ -8,11 +8,7 @@ The next generation of `clojure.java.jdbc`: a new low-level Clojure wrapper for
The latest versions on Clojars and on cljdoc:
[![Clojars](https://img.shields.io/badge/clojars-com.github.seancorfield/next.jdbc_1.3.1002-blue.svg?logo=data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAMAAABEpIrGAAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAABjFBMVEUAAAAdCh0qDikdChwAAAAnDSY0EjM2FjUnDiYnDSYnDSYpDigyEDEEAQRGNUb///////8mDSYAAAAAAAAAAAAFAgUqEyoAAAAAAAAAAAAFAgUAAABXU1c2FjVMx+dQx+f///////9Nx+b////4/f6y4vRPt+RQtOT///9Qt+P///8oDSey4vRQr9/////3/P5hzelNx+dNx+dNx+f///8AAAAuDy0zETIAAAAoDScAAAAAAAARBREAAAAvDy40ETMwEC9gSF+Ne42ilKKuoK6Rg5B5ZXlaP1o4Gzf///9nTWZ4YncyEDF/bn/8/Pz9/P339/c1FTUlDCRRM1AbCRtlS2QyEDEuDy1gRWAxEDAzETIwEC/g4OAvDy40EjOaiZorDiq9sbzNyM3UzdQyEDE0ETMzETKflZ/UzdQ5Fzmu4fNYyuhNx+dPt+RLu9xQyOhBbo81GTuW2vCo4PJNx+c4MFE5N1lHiLFEhKQyEDGDboMzETI5Fjh5bXje2d57aHrIw8jc2NyWhJUrDioxe9o4AAAAPnRSTlMAkf+IAQj9+e7n6e31RtqAD/QAAAED+A0ZEQ8DwvkLBsmcR4aG8+cdAD6C8/MC94eP+qoTrgH+/wj1HA8eEvpXOCUAAAABYktHRA8YugDZAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAB3RJTUUH3wcHFjou4Z/shwAAAUpJREFUOMul0/VTwzAUB/AAwyW4y3B3h8EDNuTh7u6UDHcd8I+TbHSjWdrjju/1h77kc+3Lu5aQvyakF/r6B5wu1+DQMEBomLRtG0EpozYDCEccA4iIjIqOiY0bB5iYxHgZ4FQCpYneKmmal0aQPMOXZnUAvJhLkbpInf8NFtKCTrGImK6DJcTlDGl/BXGV6oCsrSNIYAM3aQDwl2xJYBtBB5lZAuyYgWzY3YMcNcjN2wc4EGMEFTg8+hlyfgEenygAj71Q9FBExH0wKC4p1bRTJlJWXqEAVNM05ovbXfkPAHBmAUQPAGaAsXMBLiwA8z3h0gRcsWsObuAWLJu8Awb3ZoB5T8EvS/CgBo9Y5Z8TPwXBJwlUI9Ia/yRrEZ8lID71Olrf0MiamkkL4kurDEjba+C/e2sninR0wrsH8eMTvrqIWbodjh7jyjdtCY3Aniz4jwAAACV0RVh0ZGF0ZTpjcmVhdGUAMjAxNS0wNy0wN1QyMjo1ODo0NiswMjowMCgWtSoAAAAldEVYdGRhdGU6bW9kaWZ5ADIwMTUtMDctMDdUMjI6NTg6NDYrMDI6MDBZSw2WAAAAAElFTkSuQmCC)](https://clojars.org/com.github.seancorfield/next.jdbc)
[![cljdoc](https://cljdoc.org/badge/com.github.seancorfield/next.jdbc?1.3.1002)](https://cljdoc.org/d/com.github.seancorfield/next.jdbc/CURRENT)
[![Slack](https://img.shields.io/badge/slack-next.jdbc-orange.svg?logo=slack)](https://clojurians.slack.com/app_redirect?channel=sql)
[![Join Slack](https://img.shields.io/badge/slack-join_clojurians-orange.svg?logo=slack)](http://clojurians.net)
[![Zulip](https://img.shields.io/badge/zulip-next.jdbc-orange.svg?logo=zulip)](https://clojurians.zulipchat.com/#narrow/channel/152063-sql)
[![Clojars Project](https://clojars.org/com.github.seancorfield/next.jdbc/latest-version.svg)](https://clojars.org/com.github.seancorfield/next.jdbc) [![cljdoc badge](https://cljdoc.org/badge/com.github.seancorfield/next.jdbc?1.2.796)](https://cljdoc.org/d/com.github.seancorfield/next.jdbc/CURRENT)
The documentation on [cljdoc.org](https://cljdoc.org/d/com.github.seancorfield/next.jdbc/CURRENT) is for the current version of `next.jdbc`:
@ -21,11 +17,11 @@ The documentation on [cljdoc.org](https://cljdoc.org/d/com.github.seancorfield/n
* [Migrating from `clojure.java.jdbc`](https://cljdoc.org/d/com.github.seancorfield/next.jdbc/CURRENT/doc/migration-from-clojure-java-jdbc)
* Feedback via [issues](https://github.com/seancorfield/next-jdbc/issues) or in the [`#sql` channel on the Clojurians Slack](https://clojurians.slack.com/messages/C1Q164V29/) or the [`#sql` stream on the Clojurians Zulip](https://clojurians.zulipchat.com/#narrow/stream/152063-sql).
The documentation on GitHub is for **develop** since the 1.3.1002 release -- [see the CHANGELOG](https://github.com/seancorfield/next-jdbc/blob/develop/CHANGELOG.md) and then read the [corresponding updated documentation](https://github.com/seancorfield/next-jdbc/tree/develop/doc) on GitHub if you want. Older versions of `next.jdbc` were published under the `seancorfield` group ID and you can find [older seancorfield/next.jdbc documentation on cljdoc.org](https://cljdoc.org/versions/seancorfield/next.jdbc).
The documentation on GitHub is for **develop** since the 1.2.796 release -- [see the CHANGELOG](https://github.com/seancorfield/next-jdbc/blob/develop/CHANGELOG.md) and then read the [corresponding updated documentation](https://github.com/seancorfield/next-jdbc/tree/develop/doc) on GitHub if you want. Older versions of `next.jdbc` were published under the `seancorfield` group ID and you can find [older seancorfield/next.jdbc documentation on cljdoc.org](https://cljdoc.org/versions/seancorfield/next.jdbc).
This project follows the version scheme MAJOR.MINOR.COMMITS where MAJOR and MINOR provide some relative indication of the size of the change, but do not follow semantic versioning. In general, all changes endeavor to be non-breaking (by moving to new names rather than by breaking existing names). COMMITS is an ever-increasing counter of commits since the beginning of this repository.
> Note: every commit to the **develop** branch runs CI (GitHub Actions) and successful runs push a MAJOR.MINOR.9999-SNAPSHOT build to Clojars so the very latest version of `next.jdbc` is always available either via that [snapshot on Clojars](https://clojars.org/com.github.seancorfield/next.jdbc) or via a git dependency on the latest SHA.
> Note: every commit to the **develop** branch runs CI (GitHub Actions) and successful runs push a MAJOR.MINOR.999-SNAPSHOT build to Clojars so the very latest version of `next.jdbc` is always available either via that [snapshot on Clojars](https://clojars.org/com.github.seancorfield/next.jdbc) or via a git dependency on the latest SHA.
## Motivation
@ -50,8 +46,8 @@ The primary concepts behind `next.jdbc` are that you start by producing a `javax
From a `DataSource`, either you or `next.jdbc` can create a `java.sql.Connection` via the `get-connection` function. You can specify an options hash map to `get-connection` to modify the connection that is created: `:read-only`, `:auto-commit`.
The primary SQL execution API in `next.jdbc` is:
* `plan` -- yields an `IReduceInit` that, when reduced with an initial value, executes the SQL statement and then reduces over the `ResultSet` with as little overhead as possible.
* `execute!` -- executes the SQL statement and produces a vector of realized hash maps, that use qualified keywords for the column names, of the form `:<table>/<column>`. If you join across multiple tables, the qualified keywords will reflect the originating tables for each of the columns. If the SQL produces named values that do not come from an associated table, a simple, unqualified keyword will be used. The realized hash maps returned by `execute!` are `Datafiable` and thus `Navigable` (see Clojure 1.10's `datafy` and `nav` functions, and tools like [Portal](https://github.com/djblue/portal), [Reveal](https://github.com/vlaaad/reveal), and Nubank's Morse -- formerly Cognitect's REBL). Alternatively, you can specify `{:builder-fn rs/as-arrays}` and produce a vector with column names followed by vectors of row values. `rs/as-maps` is the default for `:builder-fn` but there are also `rs/as-unqualified-maps` and `rs/as-unqualified-arrays` if you want unqualified `:<column>` column names (and there are also lower-case variants of all of these).
* `plan` -- yields an `IReduceInit` that, when reduced, executes the SQL statement and then reduces over the `ResultSet` with as little overhead as possible.
* `execute!` -- executes the SQL statement and produces a vector of realized hash maps, that use qualified keywords for the column names, of the form `:<table>/<column>`. If you join across multiple tables, the qualified keywords will reflect the originating tables for each of the columns. If the SQL produces named values that do not come from an associated table, a simple, unqualified keyword will be used. The realized hash maps returned by `execute!` are `Datafiable` and thus `Navigable` (see Clojure 1.10's `datafy` and `nav` functions, and tools like [Portal](https://github.com/djblue/portal), [Reveal](https://github.com/vlaaad/reveal), and Cognitect's REBL). Alternatively, you can specify `{:builder-fn rs/as-arrays}` and produce a vector with column names followed by vectors of row values. `rs/as-maps` is the default for `:builder-fn` but there are also `rs/as-unqualified-maps` and `rs/as-unqualified-arrays` if you want unqualified `:<column>` column names (and there are also lower-case variants of all of these).
* `execute-one!` -- executes the SQL or DDL statement and produces a single realized hash map. The realized hash map returned by `execute-one!` is `Datafiable` and thus `Navigable`.
In addition, there are API functions to create `PreparedStatement`s (`prepare`) from `Connection`s, which can be passed to `plan`, `execute!`, or `execute-one!`, and to run code inside a transaction (the `transact` function and the `with-transaction` macro).
@ -89,6 +85,6 @@ In addition, convenience functions -- "syntactic sugar" -- are provided to inser
## License
Copyright © 2018-2024 Sean Corfield
Copyright © 2018-2021 Sean Corfield
Distributed under the Eclipse Public License version 1.0.

View file

@ -5,78 +5,36 @@
clojure -T:build deploy
Run tests via:
clojure -M:test:runner
clojure -X:test
For more information, run:
clojure -A:deps -T:build help/doc"
(:refer-clojure :exclude [test])
(:require [clojure.tools.build.api :as b]
[deps-deploy.deps-deploy :as dd]
[clojure.string :as str]))
[org.corfield.build :as bb]))
(def lib 'com.github.seancorfield/next.jdbc)
(defn- the-version [patch] (format "1.3.%s" patch))
(defn- the-version [patch] (format "1.2.%s" patch))
(def version (the-version (b/git-count-revs nil)))
(def snapshot (the-version "9999-SNAPSHOT"))
(def class-dir "target/classes")
(def snapshot (the-version "999-SNAPSHOT"))
(defn test "Run all the tests." [opts]
(doseq [alias [:1.10 :1.11 :1.12]]
(println "\nRunning tests for Clojure" (name alias))
(let [basis (b/create-basis
{:aliases (cond-> [:test alias]
(str/starts-with? (System/getProperty "java.version") "21")
(conj :jdk21))})
cmds (b/java-command
{:basis basis
:main 'clojure.main
:main-args ["-m" "lazytest.main"]})
{:keys [exit]} (b/process cmds)]
(when-not (zero? exit) (throw (ex-info "Tests failed" {})))))
(reduce (fn [opts alias]
(bb/run-tests (assoc opts :aliases [alias])))
opts
[:1.10 :1.11 :master])
opts)
(defn- pom-template [version]
[[:description "The next generation of clojure.java.jdbc: a new low-level Clojure wrapper for JDBC-based access to databases."]
[:url "https://github.com/seancorfield/next-jdbc"]
[:licenses
[:license
[:name "Eclipse Public License"]
[:url "http://www.eclipse.org/legal/epl-v10.html"]]]
[:developers
[:developer
[:name "Sean Corfield"]]]
[:scm
[:url "https://github.com/seancorfield/next-jdbc"]
[:connection "scm:git:https://github.com/seancorfield/next-jdbc.git"]
[:developerConnection "scm:git:ssh:git@github.com:seancorfield/next-jdbc.git"]
[:tag (str "v" version)]]])
(defn- jar-opts [opts]
(let [version (if (:snapshot opts) snapshot version)]
(assoc opts
:lib lib :version version
:jar-file (format "target/%s-%s.jar" lib version)
:basis (b/create-basis {})
:class-dir class-dir
:target "target"
:src-dirs ["src"]
:pom-data (pom-template version))))
(defn ci "Run the CI pipeline of tests (and build the JAR)." [opts]
(test opts)
(b/delete {:path "target"})
(let [opts (jar-opts opts)]
(println "\nWriting pom.xml...")
(b/write-pom opts)
(println "\nCopying source...")
(b/copy-dir {:src-dirs ["resources" "src"] :target-dir class-dir})
(println "\nBuilding" (:jar-file opts) "...")
(b/jar opts))
opts)
(-> opts
(assoc :lib lib :version (if (:snapshot opts) snapshot version))
(test)
(bb/clean)
(assoc :src-pom "template/pom.xml")
(bb/jar)))
(defn deploy "Deploy the JAR to Clojars." [opts]
(let [{:keys [jar-file] :as opts} (jar-opts opts)]
(dd/deploy {:installer :remote :artifact (b/resolve-path jar-file)
:pom-file (b/pom-path (select-keys opts [:lib :class-dir]))}))
opts)
(-> opts
(assoc :lib lib :version (if (:snapshot opts) snapshot version))
(bb/deploy)))

View file

@ -1,58 +1,56 @@
{:mvn/repos {"sonatype" {:url "https://oss.sonatype.org/content/repositories/snapshots/"}
"ossrh-snapshots" {:url "https://s01.oss.sonatype.org/content/repositories/snapshots"}}
{:mvn/repos {"sonatype" {:url "https://oss.sonatype.org/content/repositories/snapshots/"}}
:paths ["src" "resources"]
:deps {org.clojure/clojure {:mvn/version "1.10.3"}
org.clojure/java.data {:mvn/version "1.3.113"}
org.clojure/java.data {:mvn/version "1.0.95"}
camel-snake-kebab/camel-snake-kebab {:mvn/version "0.4.3"}}
camel-snake-kebab/camel-snake-kebab {:mvn/version "0.4.2"}}
:aliases
{;; for help: clojure -A:deps -T:build help/doc
:build {:deps {io.github.clojure/tools.build {:mvn/version "0.10.7"}
slipset/deps-deploy {:mvn/version "0.2.2"}}
:build {:deps {io.github.seancorfield/build-clj
{:git/tag "v0.8.0" :git/sha "9bd8b8a"}}
:ns-default build}
;; versions to test against:
:1.10 {:override-deps {org.clojure/clojure {:mvn/version "1.10.3"}}}
:1.11 {:override-deps {org.clojure/clojure {:mvn/version "1.11.4"}}}
:1.12 {:override-deps {org.clojure/clojure {:mvn/version "1.12.0"}}}
:1.11 {:override-deps {org.clojure/clojure {:mvn/version "1.11.0"}}}
:master {:override-deps {org.clojure/clojure {:mvn/version "1.12.0-master-SNAPSHOT"}}}
;; running tests/checks of various kinds:
:test {:extra-paths ["test"]
:test {:extra-paths ["test"] ; can also run clojure -X:test
:extra-deps {org.clojure/test.check {:mvn/version "1.1.1"}
io.github.noahtheduke/lazytest {:mvn/version "1.6.1"}
io.github.cognitect-labs/test-runner
{:git/tag "v0.5.0" :git/sha "48c3c67"}
;; connection pooling
com.zaxxer/HikariCP {:mvn/version "6.3.0"}
com.mchange/c3p0 {:mvn/version "0.10.1"}
;; 5.0.0 is not compatible with JDK 8:
com.zaxxer/HikariCP {:mvn/version "3.4.2"}
com.mchange/c3p0 {:mvn/version "0.9.5.5"}
;; JDBC drivers
;; 10.16.x is JDK17+
org.apache.derby/derby {:mvn/version "10.15.2.0"}
org.apache.derby/derbyshared {:mvn/version "10.15.2.0"}
org.hsqldb/hsqldb {:mvn/version "2.7.4"}
com.h2database/h2 {:mvn/version "2.3.232"}
;; compatible with JDK8+:
org.apache.derby/derby {:mvn/version "10.14.2.0"}
;; compatible only with JDK9+:
;; org.apache.derby/derby {:mvn/version "10.15.2.0"}
;; org.apache.derby/derbyshared {:mvn/version "10.15.2.0"}
;; 2.6.1 is not compatible with JDK 8:
org.hsqldb/hsqldb {:mvn/version "2.5.0"}
com.h2database/h2 {:mvn/version "1.4.200"}
net.sourceforge.jtds/jtds {:mvn/version "1.3.1"}
org.mariadb.jdbc/mariadb-java-client {:mvn/version "3.5.2"}
com.mysql/mysql-connector-j {:mvn/version "9.2.0"}
;; 42.7.4 changes update count (to -1) for stored procs:
org.postgresql/postgresql {:mvn/version "42.7.5"}
io.zonky.test/embedded-postgres {:mvn/version "2.1.0"}
io.zonky.test.postgres/embedded-postgres-binaries-darwin-amd64 {:mvn/version "17.4.0"}
io.zonky.test.postgres/embedded-postgres-binaries-linux-amd64 {:mvn/version "17.4.0"}
io.zonky.test.postgres/embedded-postgres-binaries-windows-amd64 {:mvn/version "17.4.0"}
org.xerial/sqlite-jdbc {:mvn/version "3.49.1.0"}
com.microsoft.sqlserver/mssql-jdbc {:mvn/version "12.10.0.jre11"}
;; use log4j2 to reduce log noise during testing:
org.apache.logging.log4j/log4j-api {:mvn/version "2.24.3"}
;; bridge everything into log4j:
org.apache.logging.log4j/log4j-1.2-api {:mvn/version "2.24.3"}
org.apache.logging.log4j/log4j-jcl {:mvn/version "2.24.3"}
org.apache.logging.log4j/log4j-jul {:mvn/version "2.24.3"}
org.apache.logging.log4j/log4j-slf4j-impl {:mvn/version "2.24.3"}
org.apache.logging.log4j/log4j-slf4j2-impl {:mvn/version "2.24.3"}}
:jvm-opts ["-Dlog4j2.configurationFile=log4j2-info.properties"]}
:runner {:main-opts ["-m" "lazytest.main"]}
:jdk11 {}
:jdk17 {}
:jdk21 {:extra-deps {;; only need the XTDB JDBC module:
com.xtdb/xtdb-jdbc {:mvn/version "2.0.0-beta7"}}}
:jdk24 {:jvm-opts [;; for SQLite on JDK 24 locally
"--enable-native-access=ALL-UNNAMED"]}}}
org.mariadb.jdbc/mariadb-java-client {:mvn/version "2.7.4"}
mysql/mysql-connector-java {:mvn/version "8.0.27"}
org.postgresql/postgresql {:mvn/version "42.3.1"}
io.zonky.test/embedded-postgres {:mvn/version "1.3.1"}
;; 14.1.0 doesn't spin up fast enough: tests timeout at startup after 10s
io.zonky.test.postgres/embedded-postgres-binaries-darwin-amd64 {:mvn/version "13.4.0"}
io.zonky.test.postgres/embedded-postgres-binaries-linux-amd64 {:mvn/version "13.4.0"}
io.zonky.test.postgres/embedded-postgres-binaries-windows-amd64 {:mvn/version "13.4.0"}
org.xerial/sqlite-jdbc {:mvn/version "3.36.0.3"}
com.microsoft.sqlserver/mssql-jdbc {:mvn/version "9.4.1.jre8"}
;; supplementary test stuff
;; use log4j 2.x:
org.apache.logging.log4j/log4j-api {:mvn/version "2.17.1"}
;; bridge into log4j:
org.apache.logging.log4j/log4j-1.2-api {:mvn/version "2.17.1"}
org.apache.logging.log4j/log4j-jcl {:mvn/version "2.17.1"}
org.apache.logging.log4j/log4j-jul {:mvn/version "2.17.1"}
org.apache.logging.log4j/log4j-slf4j-impl {:mvn/version "2.17.1"}}
:jvm-opts ["-Dlog4j2.configurationFile=log4j2-info.properties"]
:exec-fn cognitect.test-runner.api/test}}}

View file

@ -13,10 +13,10 @@ Although `get-datasource` does not accept options, the "db spec" hash map passed
* `:dbname-separator` -- an optional string that can be used to override the `/` or `:` that is normally placed in front of the database name in the JDBC URL,
* `:host` -- an optional string that identifies the IP address or hostname of the server on which the database is running; the default is `"127.0.0.1"`; if `:none` is specified, `next.jdbc` will assume this is for a local database and will omit the host/port segment of the JDBC URL,
* `:host-prefix` -- an optional string that can be used to override the `//` that is normally placed in front of the IP address or hostname in the JDBC URL,
* `:port` -- an optional integer that identifies the port on which the database is running; for common database types, `next.jdbc` knows the default so this should only be needed for non-standard setups or "exotic" database types; if `:none` is specified, `next.jdbc` will omit the port segment of the JDBC URL,
* `:port` -- an optional integer that identifies the port on which the database is running; for common database types, `next.jdbc` knows the default so this should only be needed for non-standard setups or "exotic" database types,
* `:property-separator` -- an optional string that can be used to override the separators used in `next.jdbc.connection/jdbc-url` for the properties (after the initial JDBC URL portion); by default `?` and `&` are used to build JDBC URLs with properties; for SQL Server drivers (both MS and jTDS) `:property-separator ";"` is used, so this option should only be necessary when you are specifying "unusual" databases that `next.jdbc` does not already know about,
* `:classname` -- an optional string that identifies the name of the JDBC driver class to be used for the connection; for common database types, `next.jdbc` knows the default so this should only be needed for "exotic" database types,
* `:user` -- an optional string that identifies the database username to be used when authenticating (NOTE: HikariCP needs `:username` instead see below),
* `:user` -- an optional string that identifies the database username to be used when authenticating,
* `:password` -- an optional string that identifies the database password to be used when authenticating.
If you already have a JDBC URL, you can either specify that string _instead_ of a "db spec" hash map or, if you need additional properties passed to the JDBC driver, you can use a hash map containing `:jdbcUrl`, specifying the JDBC URL, and any properties you need as additional keys in the hash map.
@ -33,23 +33,16 @@ Any path that calls `get-connection` will accept the following options:
If you need additional options set on a connection, you can either use Java interop to set them directly, or provide them as part of the "db spec" hash map passed to `get-datasource` (although then they will apply to _all_ connections obtained from that datasource).
Additional options passed are set as `java.util.Properties` and, by default, are coerced to strings.
If you are working with a driver that requires a non-string value for a property (such as the Snowflake driver), you can provide a `:next.jdbc/as-is-properties` option containing a sequence of options that should be added as-is, rather than coerced to strings.
> Note: If `plan`, `execute!`, or `execute-one!` are passed a `DataSource`, a "db spec" hash map, or a JDBC URL string, they will call `get-connection`, so they will accept the above options in those cases.
## Generating SQL
Except for `query` (which is simply an alias for `execute!`), all the "friendly" SQL functions accept the following options (in addition to all the options that `plan`, `execute!`, and `execute-one!` can accept):
* `:table-fn` -- the quoting function to be used on the string that identifies the table name, if provided; this also applies to assumed table names when `nav`igating schemas,
* `:column-fn` -- the quoting function to be used on any string that identifies a column name, if provided; this also applies to the reducing function context over `plan` and to assumed foreign key column names when `nav`igating schemas.
* `:table-fn` -- the quoting function to be used on the string that identifies the table name, if provided,
* `:column-fn` -- the quoting function to be used on any string that identifies a column name, if provided.
They also support a `:suffix` argument which can be used to specify a SQL string that should be appended to the generated SQL string before executing it, e.g., `:suffix "FOR UPDATE"` or, for an `insert!` call `:suffix "RETURNING *"`.
The latter is particularly useful for databases, such as SQLite these days,
which do not support calling `.getGeneratedKeys()` on `PreparedStatement` objects,
so you cannot use `:return-generated-keys` to get back the keys -- you must
use `RETURNING *`.
They also support a `:suffix` argument which can be used to specify a SQL string that should be appended to the generated SQL string before executing it, e.g., `:suffix "FOR UPDATE"`.
In addition, `find-by-keys` accepts the following options (see its docstring for more details):
@ -61,10 +54,6 @@ In the simple case, the `:columns` option expects a vector of keywords and each
> Note: `get-by-id` accepts the same options as `find-by-keys` but it will only ever produce one row, as a hash map, so sort order and pagination are less applicable, although `:columns` may be useful.
As of 1.3.925, `aggregate-by-keys` exists as a wrapper around `find-by-keys`
that accepts the same options as `find-by-keys` except that `:columns` may not
be specified (since it is used to add the aggregate to the query).
## Generating Rows and Result Sets
Any function that might realize a row or a result set will accept:
@ -72,23 +61,11 @@ Any function that might realize a row or a result set will accept:
* `:builder-fn` -- a function that implements the `RowBuilder` and `ResultSetBuilder` protocols; strictly speaking, `plan` and `execute-one!` only need `RowBuilder` to be implemented (and `plan` only needs that if it actually has to realize a row) but most generation functions will implement both for ease of use.
* `:label-fn` -- if `:builder-fn` is specified as one of `next.jdbc.result-set`'s `as-modified-*` builders, this option must be present and should specify a string-to-string transformation that will be applied to the column label for each returned column name.
* `:qualifier-fn` -- if `:builder-fn` is specified as one of `next.jdbc.result-set`'s `as-modified-*` builders, this option should specify a string-to-string transformation that will be applied to the table name for each returned column name. It will be called with an empty string if the table name is not available. It can be omitted for the `as-unqualified-modified-*` variants.
* `:column-fn` -- if present, applied to each column name before looking up the column in the `ResultSet` to get that column's value.
In addition, `execute!` accepts the `:multi-rs true` option to return multiple result sets -- as a vector of result sets.
> Note: Subject to the caveats above about `:builder-fn`, that means that `plan`, `execute!`, `execute-one!`, and the "friendly" SQL functions will all accept these options for generating rows and result sets.
## Datafying & Navigating Rows and Result Sets
Any function that produces a result set will accept the following options
that modify the behavior of `datafy` and `nav` applied to the rows in that
result set:
* `:schema` -- override the conventions for identifying foreign keys and the related (primary) keys in the tables to which they refer, on a per table/column basis; can also be used to indicate a fk relationship is one-to-many or many-to-many rather than one-to-one or one-to-many,
* `:schema-opts` -- override the default conventions for identifying foreign keys and the related (primary) keys in the tables to which they refer, as a whole.
See [`datafy`, `nav`, and `:schema`](/doc/datafy-nav-and-schema.md) for more details.
## Statements & Prepared Statements
Any function that creates a `Statement` or a `PreparedStatement` will accept the following options (see below for additional options for `PreparedStatement`):
@ -110,7 +87,6 @@ Any function that creates a `PreparedStatement` will additionally accept the fol
* `:return-keys` -- a truthy value asks that the JDBC driver to return any generated keys created by the operation; it can be `true` or it can be a vector of keywords identifying column names that should be returned.
Not all databases or drivers support all of these options, or all values for any given option. If `:return-keys` is a vector of column names and that is not supported, `next.jdbc` will attempt a generic "return generated keys" option instead. If that is not supported, `next.jdbc` will fall back to a regular SQL operation. If other options are not supported, you may get a `SQLException`.
You may need to use `RETURNING *` on `INSERT` statements instead of using `:return-keys` with some database drivers.
> Note: If `plan`, `execute!`, or `execute-one!` are passed a `DataSource`, a "db spec" hash map, or a JDBC URL string, they will call `prepare` to create a `PreparedStatement`, so they will accept the above options in those cases.
@ -118,13 +94,13 @@ In addition to the above, `next.jdbc/execute-batch!` (which may create a `Prepar
* `:batch-size` -- an integer that determines how to partition the parameter groups for submitting to the database in batches,
* `:large` -- a Boolean flag that indicates whether the batch will produce large update counts (`long` rather than `int` values),
* `:return-generated-keys` -- a Boolean flag that indicates whether `.getGeneratedKeys` should be called on the `PreparedStatement` after each batch is executed (if `true`, `execute-batch!` will return a vector of hash maps containing generated keys). Some databases do not support this and you need to use `RETURNING *` on `INSERT` statements instead.
* `:return-generated-keys` -- a Boolean flag that indicates whether `.getGeneratedKeys` should be called on the `PreparedStatement` after each batch is executed (if `true`, `execute-batch!` will return a vector of hash maps containing generated keys).
## Transactions
The `transact` function and `with-transaction` (`+options`) macro accept the following options:
The `transact` function and `with-transaction` macro accept the following options:
* `:isolation` -- a keyword that identifies the isolation to be used for this transaction: `:none`, `:read-committed`, `:read-uncommitted`, `:repeatable-read`, or `:serializable`; these represent increasingly strict levels of transaction isolation and may not all be available depending on the database and/or JDBC driver being used,
* `:isolation` -- a keyword that identifies the isolation to be used for this transaction: `:none`, `:read-committed`, `:read-uncommitted`, `:repeatedable-read`, or `:serializable`; these represent increasingly strict levels of transaction isolation and may not all be available depending on the database and/or JDBC driver being used,
* `:read-only` -- a `Boolean` that indicates whether the transaction should be read-only or not (the default),
* `:rollback-only` -- a `Boolean` that indicates whether the transaction should commit on success (the default) or rollback.

View file

@ -1,6 +1,6 @@
# `datafy`, `nav`, and the `:schema` option
Clojure 1.10 introduced a new namespace, [`clojure.datafy`](http://clojure.github.io/clojure/clojure.datafy-api.html), and two new protocols (`Datafiable` and `Navigable`) that allow for generalized, lazy navigation around data structures. Cognitect also released REBL (now Nubank's [Morse](https://github.com/nubank/morse)) -- a graphical, interactive tool for browsing Clojure data structures, based on the new `datafy` and `nav` functions.
Clojure 1.10 introduced a new namespace, [`clojure.datafy`](http://clojure.github.io/clojure/clojure.datafy-api.html), and two new protocols (`Datafiable` and `Navigable`) that allow for generalized, lazy navigation around data structures. Cognitect also released [REBL](http://rebl.cognitect.com/) -- a graphical, interactive tool for browsing Clojure data structures, based on the new `datafy` and `nav` functions.
Shortly after REBL's release, I added experimental support to `clojure.java.jdbc` for `datafy` and `nav` that supported lazy navigation through result sets into foreign key relationships and connected rows and tables. `next.jdbc` bakes that support into result sets produced by `execute!` and `execute-one!`.
@ -13,8 +13,8 @@ Additional tools that understand `datafy` and `nav` include [Portal](https://git
Here's how the process works, for result sets produced by `next.jdbc`:
* `execute!` and `execute-one!` produce result sets containing rows that are `Datafiable`,
* Tools like Portal, Reveal, and Morse can call `datafy` on result sets to render them as "pure data" (which they already are, but this makes them also `Navigable`),
* Tools like Portal, Reveal, and Morse allow users to "drill down" into elements of rows in the "pure data" result set, using `nav`,
* Tools like Portal, Reveal, and REBL can call `datafy` on result sets to render them as "pure data" (which they already are, but this makes them also `Navigable`),
* Tools like Portal, Reveal, and REBL allow users to "drill down" into elements of rows in the "pure data" result set, using `nav`,
* If a column in a row represents a foreign key into another table, calling `nav` will fetch the related row(s),
* Those can in turn be `datafy`'d and `nav`'d to continue drilling down through connected data in the database.
@ -26,29 +26,6 @@ By default, `next.jdbc` assumes that a column named `<something>id` or `<somethi
You can override this default behavior for any column in any table by providing a `:schema` option that is a hash map whose keys are column names (usually the table-qualified keywords that `next.jdbc` produces by default) and whose values are table-qualified keywords, optionally wrapped in vectors, that identity the name of the table to which that column is a foreign key and the name of the key column within that table.
As of 1.3.909, you can also override this behavior via the `:schema-opts`
option. This is a hash map whose keys can be:
* `:fk-suffix` -- a string used instead of `"id"` to identify foreign keys,
* `:pk` -- a string used instead of `"id"` for the primary key column in the target table,
* `:pk-fn` -- a function that takes the table name and the value of `:pk` and returns the name of the primary key column in the target table, instead of just using the value of `:pk` (the default is effectively `(constantly <pk>)`).
For `:fk-suffix`, the `_` is still permitted and optional in the column name,
so if you specified `:schema-opts {:fk-suffix "fk"}` then `addressfk` and
`address_fk` would both be treated as foreign keys into the `address` table.
_Note: as of 1.3.939, `-` is permitted in key names (in addition to `_`) so that kebab result set builders work as expected._
The `:pk-fn` can use the table name to determine the primary key column name
for exceptions to the `:pk` value. For example, if you have a table `address`
with a primary key column `address_id` instead of `id`, you could use:
```clojure
:pk-fn (fn [table pk]
(if (= "address" table)
"address_id"
pk))
```
The default behavior in the example above is equivalent to this `:schema` value:
```clojure
@ -58,16 +35,6 @@ The default behavior in the example above is equivalent to this `:schema` value:
{:schema {:contact/addressid :address/id}})
```
or these `:schema-opts` values:
```clojure
(jdbc/execute! ds
["select * from contact where city = ?" "San Francisco"]
;; a one-to-one or many-to-one relationship
{:schema-opts {:fk-suffix "id" :pk "id"
:pk-fn (constantly "id")}})
```
If you had a table to track the valid/bouncing status of email addresses over time, `:deliverability`, where `email` is the non-unique key, you could provide automatic navigation into that using:
```clojure
@ -78,14 +45,9 @@ If you had a table to track the valid/bouncing status of email addresses over ti
:address/email [:deliverability/email]}})
```
Since this relies on a foreign key that does not follow a standard suffix
pattern, there is no comparable `:schema-opts` version. In addition, the
`:schema-opts` approach cannot designate a one-to-many or many-to-many
relationship.
When you indicate a `*-to-many` relationship, by wrapping the foreign table/key in a vector, `next.jdbc`'s implementation of `nav` will fetch a multi-row result set from the target table.
If you use foreign key constraints in your database, you could probably generate this `:schema` data structure automatically from the metadata in your database. Similarly, if you use a library that depends on an entity relationship map (such as [seql](https://github.com/exoscale/seql) or [walkable](https://walkable.gitlab.io/)), then you could probably generate this `:schema` data structure from that entity map.
If you use foreign key constraints in your database, you could probably generate this `:schema` data structure automatically from the metadata in your database. Similarly, if you use a library that depends on an entity relationship map (such as [seql](https://exoscale.github.io/seql/) or [walkable](https://walkable.gitlab.io/)), then you could probably generate this `:schema` data structure from that entity map.
### Behind The Scenes
@ -93,11 +55,7 @@ Making rows datafiable is implemented by adding metadata to each row with a key
When called (`datafy` on a row), it adds metadata to the row with a key of `clojure.core.protocols/nav` and another function as the value. That function also closes over the connectable and options passed in.
When that is called (`nav` on a row, column name, and column value), if a
`:schema` entry exists for that column or it matches the convention described
above (either by default or via `:schema-opts`), then it will fetch row(s)
using `next.jdbc`'s `Executable` functions `-execute-one` or `-execute-all`,
passing in the connectable and options closed over.
When that is called (`nav` on a row, column name, and column value), if a `:schema` entry exists for that column or it matches the default convention described above, then it will fetch row(s) using `next.jdbc`'s `Executable` functions `-execute-one` or `-execute-all`, passing in the connectable and options closed over.
The protocol `next.jdbc.result-set/DatafiableRow` has a default implementation of `datafiable-row` for `clojure.lang.IObj` that just adds the metadata to support `datafy`. There is also an implementation baked into the result set handling behind `plan` so that you can call `datafiable-row` directly during reduction and get a fully-realized row that can be `datafy`'d (and then `nav`igated).

View file

@ -25,12 +25,6 @@ These functions are described in more detail below. They are deliberately simple
If you prefer to write your SQL separately from your code, take a look at [HugSQL](https://github.com/layerware/hugsql) -- [HugSQL documentation](https://www.hugsql.org/) -- which has a `next.jdbc` adapter, as of version 0.5.1. See below for a "[quick start](#hugsql-quick-start)" for using HugSQL with `next.jdbc`.
As of 1.3.925, `aggregate-by-keys` exists as a wrapper around `find-by-keys`
that accepts the same options as `find-by-keys` and an aggregate SQL expression
and it returns a single value (the aggregate). `aggregate-by-keys` accepts the
same options as `find-by-keys` except that `:columns` may not be specified
(since it is used to add the aggregate to the query).
## `insert!`
Given a table name (as a keyword) and a hash map of column names and values, this performs a single row insertion into the database:
@ -40,22 +34,11 @@ Given a table name (as a keyword) and a hash map of column names and values, thi
;; equivalent to
(jdbc/execute-one! ds ["INSERT INTO address (name,email) VALUES (?,?)"
"A.Person" "albert@person.org"] {:return-keys true})
;; some databases may require this instead
(jdbc/execute-one! ds ["INSERT INTO address (name,email) VALUES (?,?) RETURNING *"
"A.Person" "albert@person.org"])
;; which you can achieve with the :suffix option
(sql/insert! ds :address {:name "A. Person" :email "albert@person.org"}
{:suffix "RETURNING *"})
```
If you have multiple rows (hash maps) to insert and they all have the same
set of keys, you can use `insert-multi!` instead (see below), which will
perform a single multi-row insertion, which will generally be faster.
## `insert-multi!`
Given a table name (as a keyword), a vector of column names, and a vector of
row value vectors, this performs a single multi-row insertion into the database:
Given a table name (as a keyword), a vector of column names, and a vector of row value vectors, this performs a multi-row insertion into the database:
```clojure
(sql/insert-multi! ds :address
@ -70,11 +53,7 @@ row value vectors, this performs a single multi-row insertion into the database:
"Aunt Sally" "sour@lagunitas.beer"] {:return-keys true})
```
All the row vectors must be the same length, and must match the number of
columns specified.
Given a table name (as a keyword) and a vector of hash maps, this performs a
single multi-row insertion into the database:
Given a table name (as a keyword) and a vector of hash maps, this performs a multi-row insertion into the database:
```clojure
(sql/insert-multi! ds :address
@ -88,15 +67,7 @@ single multi-row insertion into the database:
"Aunt Sally" "sour@lagunitas.beer"] {:return-keys true})
```
All the hash maps must have the same set of keys, so that the vector of hash
maps can be converted to a vector of columns names and a vector of row value
vectors, as above, so a single multi-row insertion can be performed.
If you wish to insert multiple hash maps that do not have identical keys, you
need to iterate over `insert!` and insert one row at a time, which will
generally be much slower.
> Note: both of these expand to a single SQL statement with placeholders for every
> Note: this expands to a single SQL statement with placeholders for every
value being inserted -- for large sets of rows, this may exceed the limits
on SQL string size and/or number of parameters for your JDBC driver or your
database. Several databases have a limit of 1,000 parameter placeholders.
@ -117,8 +88,8 @@ will use `execute-batch!` under the hood, instead of `execute!`, as follows:
{:batch true})
;; equivalent to
(jdbc/execute-batch! ds
"INSERT INTO address (name,email) VALUES (?,?)"
[["Stella" "stella@artois.beer"]
["INSERT INTO address (name,email) VALUES (?,?)"
["Stella" "stella@artois.beer"]
["Waldo" "waldo@lagunitas.beer"]
["Aunt Sally" "sour@lagunitas.beer"]]
{:return-keys true :return-generated-keys true})
@ -131,14 +102,14 @@ will use `execute-batch!` under the hood, instead of `execute!`, as follows:
{:batch true})
;; equivalent to
(jdbc/execute-batch! ds
"INSERT INTO address (name,email) VALUES (?,?)"
[["Stella" "stella@artois.beer"]
["INSERT INTO address (name,email) VALUES (?,?)"
["Stella" "stella@artois.beer"]
["Waldo" "waldo@lagunitas.beer"]
["Aunt Sally" "sour@lagunitas.beer"]]
{:return-keys true :return-generated-keys true})
```
> Note: not all databases or drivers support returning generated keys like this -- see [**Batched Parameters**](https://cljdoc.org/d/com.github.seancorfield/next.jdbc/CURRENT/doc/getting-started/prepared-statements#caveats) for caveats and possible database-specific behaviors. You may need `RETURNING *` in your SQL instead.
See [**Batched Parameters**](https://cljdoc.org/d/com.github.seancorfield/next.jdbc/CURRENT/doc/getting-started/prepared-statements#caveats) for caveats and possible database-specific behaviors.
## `query`
@ -191,25 +162,6 @@ Given a table name (as a keyword) and either a hash map of column names and valu
"Stella" "stella@artois.beer"])
```
While the hash map approach -- "query by example" -- is great for equality
comparisons, sometimes you need other types of comparisons. For example, you
might want to find all the rows where the email address ends in `.beer`:
```clojure
(sql/find-by-keys ds :address ["email LIKE ?" "%.beer"])
;; equivalent to
(jdbc/execute! ds ["SELECT * FROM address WHERE email LIKE ?" "%.beer"])
```
Or you may want to find all the rows where the name is one of a specific
set of values:
```clojure
(sql/find-by-keys ds :address ["name IN (?,?)" "Stella" "Waldo"])
;; equivalent to
(jdbc/execute! ds ["SELECT * FROM address WHERE name IN (?,?)" "Stella" "Waldo"])
```
The default behavior is to return all the columns in each row. You can specify a subset of columns to return using the `:columns` option. It takes a vector and each element of the vector can be:
* a simple keyword representing the column name (`:column-fn` will be applied, if provided),
@ -253,26 +205,6 @@ If you want to match all rows in a table -- perhaps with the pagination options
If no rows match, `find-by-keys` returns `[]`, just like `execute!`.
## `aggregate-by-keys`
Added in 1.3.925, this is a wrapper around `find-by-keys` that makes it easier
to perform aggregate queries::
```clojure
(sql/aggregate-by-keys ds :address "count(*)" {:name "Stella"
:email "stella@artois.beer"})
;; is roughly equivalent to
(-> (sql/find-by-keys ds :address {:name "Stella" :email "stella@artois.beer"}
{:columns [["count(*)" :next_jdbc_aggregate_123]]})
(first)
(get :next_jdbc_aggregate_123))
```
(where `:next_jdbc_aggregate_123` is a unique alias generated by `next.jdbc`,
derived from the aggregate expression string).
> Note: the SQL string provided for the aggregate is copied exactly as-is into the generated SQL -- you are responsible for ensuring it is legal SQL!
## `get-by-id`
Given a table name (as a keyword) and a primary key value, with an optional primary key column name, execute a query on the database:
@ -313,14 +245,7 @@ These quoting functions can be provided to any of the friendly SQL functions abo
(sql/insert! ds :my-table {:some "data"} {:table-fn snake-case})
```
`next.jdbc` provides `snake-kebab-opts` and `unqualified-snake-kebab-opts` which are hash maps containing `:column-fn` and `:table-fn` that use the `->snake_case` function from the [camel-snake-kebab library](https://github.com/clj-commons/camel-snake-kebab/) which performs a more sophisticated transformation:
```clojure
;; transforms :my-table to my_table as above but will also transform
;; column names; in addition, it will perform the reverse transformation
;; on any results, e.g., turning MySQL's :GENERATED_KEY into :generated-key
(sql/insert! ds :my-table {:some "data"} jdbc/snake-kebab-opts)
```
`next.jdbc` provides `snake-kebab-opts` and `unqualified-snake-kebab-opts` which are hash maps containing `:column-fn` and `:table-fn` that use the `->snake_case` function from the [camel-snake-kebab library](https://github.com/clj-commons/camel-snake-kebab/) which performs a more sophisticated transformation.
> Note: The entity naming function is passed a string, the result of calling `name` on the keyword passed in. Also note that the default quoting functions do not handle schema-qualified names, such as `dbo.table_name` -- `sql-server` would produce `[dbo.table_name]` from that. Use the `schema` function to wrap the quoting function if you need that behavior, e.g,. `{:table-fn (schema sql-server)}` which would produce `[dbo].[table_name]`.
@ -329,8 +254,8 @@ These quoting functions can be provided to any of the friendly SQL functions abo
Here's how to get up and running quickly with `next.jdbc` and HugSQL. For more detail, consult the [HugSQL documentation](https://www.hugsql.org/). Add the following dependencies to your project (in addition to `com.github.seancorfield/next.jdbc` and whichever JDBC drivers you need):
```clojure
com.layerware/hugsql-core {:mvn/version "0.5.3"}
com.layerware/hugsql-adapter-next-jdbc {:mvn/version "0.5.3"}
com.layerware/hugsql-core {:mvn/version "0.5.1"}
com.layerware/hugsql-adapter-next-jdbc {:mvn/version "0.5.1"}
```
_Check the HugSQL documentation for the latest versions to use!_

View file

@ -6,26 +6,25 @@ It is designed to work with Clojure 1.10 or later, supports `datafy`/`nav`, and
## Installation
**You must be using Clojure 1.10 or later.** 1.12.0 is the most recent stable version of Clojure (as of March 15th, 2024).
**You must be using Clojure 1.10 or later.** 1.10.3 is the most recent stable version of Clojure (as of March 4th, 2021).
You can add `next.jdbc` to your project with either:
```clojure
com.github.seancorfield/next.jdbc {:mvn/version "1.3.1002"}
com.github.seancorfield/next.jdbc {:mvn/version "1.2.796"}
```
for `deps.edn` or:
```clojure
[com.github.seancorfield/next.jdbc "1.3.1002"]
[com.github.seancorfield/next.jdbc "1.2.796"]
```
for `project.clj` or `build.boot`.
**In addition, you will need to add dependencies for the JDBC drivers you wish to use for whatever databases you are using. For example:**
* MySQL: `com.mysql/mysql-connector-j {:mvn/version "9.1.0"}` ([search for latest version](https://search.maven.org/artifact/com.mysql/mysql-connector-j))
* PostgreSQL: `org.postgresql/postgresql {:mvn/version "42.7.4"}` ([search for latest version](https://search.maven.org/artifact/org.postgresql/postgresql))
* Microsoft SQL Server: `com.microsoft.sqlserver/mssql-jdbc {:mvn/version "12.8.1.jre11"}` ([search for latest version](https://search.maven.org/artifact/com.microsoft.sqlserver/mssql-jdbc))
* Sqlite: `org.xerial/sqlite-jdbc {:mvn/version "3.47.1.0"}` ([search for latest version](https://search.maven.org/artifact/org.xerial/sqlite-jdbc))
* MySQL: `mysql/mysql-connector-java {:mvn/version "8.0.19"}` ([search for latest version](https://search.maven.org/artifact/mysql/mysql-connector-java))
* PostgreSQL: `org.postgresql/postgresql {:mvn/version "42.2.10"}` ([search for latest version](https://search.maven.org/artifact/org.postgresql/postgresql))
* Microsoft SQL Server: `com.microsoft.sqlserver/mssql-jdbc {:mvn/version "8.2.1.jre8"}` ([search for latest version](https://search.maven.org/artifact/com.microsoft.sqlserver/mssql-jdbc))
> Note: these are the versions that `next.jdbc` is tested against but there may be more recent versions and those should generally work too -- click the "search for latest version" link to see all available versions of those drivers on Maven Central. You can see the full list of drivers and versions that `next.jdbc` is tested against in [the project's `deps.edn` file](https://github.com/seancorfield/next-jdbc/blob/develop/deps.edn#L10-L27), but many other JDBC drivers for other databases should also work (e.g., Oracle, Red Shift).
@ -37,9 +36,9 @@ For the examples in this documentation, we will use a local H2 database on disk,
```clojure
;; deps.edn
{:deps {org.clojure/clojure {:mvn/version "1.12.0"}
com.github.seancorfield/next.jdbc {:mvn/version "1.3.1002"}
com.h2database/h2 {:mvn/version "2.3.232"}}}
{:deps {org.clojure/clojure {:mvn/version "1.10.3"}
com.github.seancorfield/next.jdbc {:mvn/version "1.2.796"}
com.h2database/h2 {:mvn/version "1.4.199"}}}
```
### Create & Populate a Database
@ -48,7 +47,7 @@ In this REPL session, we'll define an H2 datasource, create a database with a si
```clojure
> clj
Clojure 1.12.0
Clojure 1.10.3
user=> (require '[next.jdbc :as jdbc])
nil
user=> (def db {:dbtype "h2" :dbname "example"})
@ -77,11 +76,6 @@ We described the database with just `:dbtype` and `:dbname` because it is create
> Note: You can see the full list of `:dbtype` values supported in [next.jdbc/get-datasource](https://cljdoc.org/d/com.github.seancorfield/next.jdbc/CURRENT/api/next.jdbc#get-datasource)'s docstring. If you need this programmatically, you can get it from the [next.jdbc.connection/dbtypes](https://cljdoc.org/d/com.github.seancorfield/next.jdbc/CURRENT/api/next.jdbc.connection#dbtypes) hash map. If those lists differ, the hash map is the definitive list (and I'll need to fix the docstring!). The docstring of that Var explains how to tell `next.jdbc` about additional databases.
The hash map can contain arbitrary keys and values: any keys not specifically
recognized by `next.jdbc` will be passed through to the JDBC driver as part
of the connection string. For example, if you specify `:useSSL false`, then
the connection string will have `&useSSL=false` appended to it.
If you already have a JDBC URL (string), you can use that as-is instead of the db-spec hash map. If you have a JDBC URL and still need additional options passed into the JDBC driver, you can use a hash map with the `:jdbcUrl` key specifying the string and whatever additional options you need.
### `execute!` & `execute-one!`
@ -134,7 +128,6 @@ user=>
Relying on the default result set builder -- and table-qualified column names -- is the recommended approach to take, if possible, with a few caveats:
* MS SQL Server produces unqualified column names by default (see [**Tips & Tricks**](/doc/tips-and-tricks.md) for how to get table names back from MS SQL Server),
* Oracle's JDBC driver doesn't support `.getTableName()` so it will only produce unqualified column names (also mentioned in **Tips & Tricks**),
* PostgreSQL's JDBC driver performs an extra SQL query to get the necessary metadata, so there is some overhead to using qualified column names (also mentioned in **Tips & Tricks**),
* If your SQL query joins tables in a way that produces duplicate column names, and you use unqualified column names, then those duplicated column names will conflict and you will get only one of them in your result -- use aliases in SQL (`as`) to make the column names distinct,
* If your SQL query joins a table to itself under different aliases, the _qualified_ column names will conflict because they are based on the underlying table name provided by the JDBC driver rather the alias you used in your query -- again, use aliases in SQL to make those column names distinct.
@ -161,9 +154,6 @@ In addition, two pre-built option hash maps are available in `next.jdbc`, that l
* `snake-kebab-opts` -- provides `:column-fn`, `:table-fn`, `:label-fn`, `:qualifier-fn`, and `:builder-fn` that will convert Clojure identifiers in `:kebab-case` to SQL entities in `snake_case` and will produce result sets with qualified `:kebab-case` names from SQL entities that use `snake_case`,
* `unqualified-snake-kebab-opts` -- provides `:column-fn`, `:table-fn`, `:label-fn`, `:qualifier-fn`, and `:builder-fn` that will convert Clojure identifiers in `:kebab-case` to SQL entities in `snake_case` and will produce result sets with _unqualified_ `:kebab-case` names from SQL entities that use `snake_case`.
You can `assoc` any additional options you need into these pre-built option hash maps
and pass the combined options into any of this library's functions.
> Note: Using `camel-snake-kebab` might also be helpful if your database has `camelCase` table and column names, although you'll have to provide `:column-fn` and `:table-fn` yourself as `->camelCase` from that library. Either way, consider relying on the _default_ result set builder first and avoid converting column and table names (see [Advantages of 'snake case': portability and ubiquity](https://vvvvalvalval.github.io/posts/clojure-key-namespacing-convention-considered-harmful.html#advantages_of_'snake_case':_portability_and_ubiquity) for an interesting discussion on kebab-case vs snake_case -- I do not agree with all of the author's points in that article, particularly his position against qualified keywords, but his argument for retaining snake_case around system boundaries is compelling).
@ -199,10 +189,7 @@ user=> (reduce
14.67M
```
The call to `jdbc/plan` returns an `IReduceInit` object (a "reducible collection" that requires an initial value) but does not actually run the SQL.
Only when the returned object is reduced is the connection obtained from the data source, the SQL executed, and the computation performed. The connection is closed automatically when the reduction is complete. The `row` in the reduction is an abstraction over the underlying (mutable) `ResultSet` object -- it is not a Clojure data structure. Because of that, you can simply access the columns via their SQL labels as shown -- you do not need to use the column-qualified name, and you do not need to worry about the database returning uppercase column names (SQL labels are not case sensitive).
> Note: if you want a column name transformation to be applied here, specify `:column-fn` as an option to the `plan` call.
The call to `jdbc/plan` returns an `IReduceInit` object but does not actually run the SQL. Only when the returned object is reduced is the connection obtained from the data source, the SQL executed, and the computation performed. The connection is closed automatically when the reduction is complete. The `row` in the reduction is an abstraction over the underlying (mutable) `ResultSet` object -- it is not a Clojure data structure. Because of that, you can simply access the columns via their SQL labels as shown -- you do not need to use the column-qualified name, and you do not need to worry about the database returning uppercase column names (SQL labels are not case sensitive).
Here's the same computation rewritten using `transduce`:
@ -247,18 +234,6 @@ user=> (into #{}
#{"apple" "banana" "cucumber"}
```
If you want to process the rows purely for side-effects, without a result, you
can use `run!`:
```clojure
user=> (run! #(println (:product %))
(jdbc/plan ds ["select * from invoice where customer_id = ?" 100]))
apple
banana
cucumber
nil
```
Any operation that can perform key-based lookup can be used here without creating hash maps from the rows: `get`, `contains?`, `find` (returns a `MapEntry` of whatever key you requested and the corresponding column value), or direct keyword access as shown above. Any operation that would require a Clojure hash map, such as `assoc` or anything that invokes `seq` (`keys`, `vals`), will cause the full row to be expanded into a hash map, such as produced by `execute!` or `execute-one!`, which implements `Datafiable` and `Navigable` and supports lazy navigation via foreign keys, explained in [`datafy`, `nav`, and the `:schema` option](/doc/datafy-nav-and-schema.md).
This means that `select-keys` can be used to create regular Clojure hash map from (a subset of) columns in the row, without realizing the row, and it will not implement `Datafiable` or `Navigable`.
@ -313,8 +288,6 @@ As of 1.1.588, two helper functions are available to make some `plan` operations
* `next.jdbc.plan/select-one!` -- reduces over `plan` and returns part of just the first row,
* `next.jdbc.plan/select!` -- reduces over `plan` and returns a sequence of parts of each row.
> Note: in both those cases, an appropriate initial value is supplied to the `reduce` (since `plan` returns an `IReduceInit` object).
`select!` accepts a vector of column names to extract or a function to apply to each row. It is equivalent to the following:
```clojure
@ -454,7 +427,7 @@ You can read more about [working with transactions](/doc/transactions.md) furthe
(jdbc/execute! con-opts ...) ; auto-committed
(jdbc/with-transaction [tx con-opts] ; will commit or rollback this group:
(let [tx-opts (jdbc/with-options tx (:options con-opts))]
(let [tx-opts (jdbc/with-options tx (:options con-opts)]
(jdbc/execute! tx-opts ...)
(jdbc/execute! tx-opts ...)
(into [] (map :column) (jdbc/plan tx-opts ...))))
@ -462,11 +435,6 @@ You can read more about [working with transactions](/doc/transactions.md) furthe
(jdbc/execute! con-opts ...))) ; auto-committed
```
As of 1.3.894, you can use `next.jdbc/with-transaction+options` instead,
which will automatically rewrap the `Connection` with the options from the
initial transactable. Be aware that means you cannot use Java interop on the
new connectable because it is no longer a plain Java `java.sql.Connection` object.
### Prepared Statement Caveat
Not all databases support using a `PreparedStatement` for every type of SQL operation. You might have to create a `java.sql.Statement` instead, directly from a `java.sql.Connection` and use that, without parameters, in `plan`, `execute!`, or `execute-one!`. See the following example:
@ -487,9 +455,9 @@ Not all databases support using a `PreparedStatement` for every type of SQL oper
First, you need to add the connection pooling library as a dependency, e.g.,
```clojure
com.zaxxer/HikariCP {:mvn/version "6.2.1"}
com.zaxxer/HikariCP {:mvn/version "3.3.1"}
;; or:
com.mchange/c3p0 {:mvn/version "0.10.1"}
com.mchange/c3p0 {:mvn/version "0.9.5.4"}
```
_Check those libraries' documentation for the latest version to use!_
@ -518,19 +486,6 @@ In addition, for HikariCP, you can specify properties to be applied to the under
_(under the hood, `java.data` converts that hash map to a `java.util.Properties` object with `String` keys and `String` values)_
If you need to pass in extra connection URL parameters, it can be easier to use
`next.jdbc.connection/jdbc-url` to construct URL, e.g.,
```clojure
(connection/->pool com.zaxxer.hikari.HikariDataSource
{:jdbcUrl
(connection/jdbc-url {:dbtype "mysql" :dbname "thedb" :useSSL false})
:username "dbuser" :password "secret"})
```
Here we pass `:useSSL false` to `jdbc-url` so that it ends up in the
connection string, but pass `:username` and `:password` for the pool itself.
> Note: both HikariCP and c3p0 defer validation of the settings until a connection is requested. If you want to ensure that your datasource is set up correctly, and the database is reachable, when you first create the connection pool, you will need to call `jdbc/get-connection` on it (and then close that connection and return it to the pool). This will also ensure that the pool is fully initialized. See the examples below.
Some important notes regarding HikariCP:
@ -595,12 +550,6 @@ If you are using [Component](https://github.com/stuartsierra/component), a conne
(component/stop ds)))))
```
If you have want to either modify the connection pooled datasource after it is
created, or want to perform some database initialization, you can pass a
function as `:init-fn` in the `db-spec` hash map. The `component` function
will arrange for that initialization function to be invoked on the newly-created
datasource whenever `start` is called on the Component returned.
## Working with Additional Data Types
By default, `next.jdbc` relies on the JDBC driver to handle all data type conversions when reading from a result set (to produce Clojure values from SQL values) or setting parameters (to produce SQL values from Clojure values). Sometimes that means that you will get back a database-specific Java object that would need to be manually converted to a Clojure data structure, or that certain database column types require you to manually construct the appropriate database-specific Java object to pass into a SQL operation. You can usually automate those conversions using either the [`ReadableColumn` protocol](/doc/result-set-builders.md#readablecolumn) (for converting database-specific types to Clojure values) or the [`SettableParameter` protocol](/doc/prepared-statements.md#prepared-statement-parameters) (for converting Clojure values to database-specific types).
@ -643,14 +592,6 @@ if one is passed or create a new one if needed (and automatically close it after
> Note: to avoid confusion and/or incorrect usage, you cannot pass options to `on-connection` because they would be ignored in some cases (existing `Connection` or a wrapped `Connection`).
As of 1.3.894, if you want the options from a wrapped connectable to flow
through to the new connectable inside `on-connection`, you can use the
`on-connection+options` variant of the macro. This will automatically rewrap
the connectable produced with the options from the initial connectable.
Be aware that means you cannot
use plain Java interop inside the body of the macro because the connectable
is no longer a plain Java `java.sql.Connection` object.
## Logging
Sometimes it is convenient to have database operations logged automatically. `next.jdbc/with-logging`

View file

@ -24,14 +24,14 @@ Although both libraries support transactions -- via `clojure.java.jdbc/with-db-t
via `next.jdbc/with-transaction` -- there are some important considerations when you are migrating:
* `clojure.java.jdbc/with-db-transaction` allows nested calls to be present but it tracks the "depth" of such calls and "nested" calls are simply ignored (because transactions do not actually nest in JDBC).
* `next.jdbc/with-transaction` will attempt to set up a transaction on an existing `Connection` if that is what it is passed (otherwise a new `Connection` is created and a new transaction set up on that). That means that if you have nested calls, the inner transaction will commit (or rollback) all the way to the outermost transaction. `next.jdbc` "trusts" the programmer to know what they are doing. You can bind `next.jdbc.transaction/*nested-tx*` to `:ignore` if you want the same behavior as `clojure.java.jdbc` where all nested calls are ignored and the outermost transaction is in full control. _Note that this is a per-thread "global" setting and not related to just a single connection, so you can't use this setting if you are working with multiple databases in the same dynamic thread context (`binding`)._
* `next.jdbc/with-transaction` will attempt to set up a transaction on an existing `Connection` if that is what it is passed (otherwise a new `Connection` is created and a new transaction set up on that). That means that if you have nested calls, the inner transaction will commit (or rollback) all the way to the outermost transaction. `next.jdbc` "trusts" the programmer to know what they are doing. You can bind `next.jdbc.transaction/*nested-tx*` to `:ignore` if you want the same behavior as `clojure.java.jdbc` where all nested calls are ignored and the outermost transaction is in full control. _Note that this is a **global** setting and not related to just a single connection, so you can't use this setting if you are working with multiple databases._
* Every operation in `clojure.java.jdbc` attempts to create its own transaction, which is a no-op inside an `with-db-transaction` so it is safe; transactions are _implicit_ in `clojure.java.jdbc`. However, if you have migrated that `with-db-transaction` call over to `next.jdbc/with-transaction` then any `clojure.java.jdbc` operations invoked inside the body of that migrated transaction _will still try to create their own transactions_ and `with-db-transaction` won't know about the outer `with-transaction` call. That means you will effectively get the "overlapping" behavior of `next.jdbc` since the `clojure.java.jdbc` operation will cause the outermost transaction to be committed or rolled back.
* None of the operations in `next.jdbc` try to create transactions -- exception `with-transaction`. All `Connection`s are auto-commit by default so it doesn't need the local transactions that `clojure.java.jdbc` tries to create; transactions are _explicit_ in `next.jdbc`.
There are some strategies you can take to mitigate these differences:
1. Migrate code bottom-up so that you don't end up with calls to `clojure.java.jdbc` operations inside `next.jdbc/with-transaction` calls.
2. When you migrate a `with-db-transaction` call, think carefully about whether it could be a nested call (in which case simply remove it) or a conditionally nested call which you'll need to be much more careful about migrating.
3. You can bind `next.jdbc.transaction/*nested-tx*` to `:prohibit` which will throw exceptions if you accidentally nest calls to `next.jdbc/with-transaction`. Although you can bind it to `:ignore` in order to mimic the behavior of `clojure.java.jdbc`, that should be considered a last resort for dealing with complex conditional nesting of transaction calls. _Note that this is a per-thread "global" setting and not related to just a single connection, so you can't use this setting if you are working with multiple databases in the same dynamic thread context (`binding`)._
3. You can bind `next.jdbc.transaction/*nested-tx*` to `:prohibit` which will throw exceptions if you accidentally nest calls to `next.jdbc/with-transaction`. Although you can bind it to `:ignore` in order to mimic the behavior of `clojure.java.jdbc`, that should be considered a last resort for dealing with complex conditional nesting of transaction calls. _Note that this is a **global** setting and not related to just a single connection, so you can't use this setting if you are working with multiple databases._
### Option Handling
@ -64,7 +64,7 @@ If you were using other forms of the `db-spec` hash map, you'll need to adjust t
The `next.jdbc.sql` namespace contains several functions with similarities to `clojure.java.jdbc`'s core API:
* `insert!` -- similar to `clojure.java.jdbc/insert!` but only supports inserting a single map,
* `insert-multi!` -- similar to `clojure.java.jdbc/insert-multi!` but only supports inserting columns and a vector of row values, or a sequence of hash maps _that all have the same keys_ -- unlike `clojure.java.jdbc/insert-multi!`, you should always get a single multi-row insertion,
* `insert-multi!` -- similar to `clojure.java.jdbc/insert-multi!` but only supports inserting columns and a vector of row values,
* `query` -- similar to `clojure.java.jdbc/query`,
* `find-by-keys` -- similar to `clojure.java.jdbc/find-by-keys` but will also accept a partial where clause (vector) instead of a hash map of column name/value pairs,
* `get-by-id` -- similar to `clojure.java.jdbc/get-by-id`,
@ -118,7 +118,7 @@ Several methods on `DatabaseMetaData` return a `ResultSet` object. All of those
These are mostly drawn from [Issue #5](https://github.com/seancorfield/next-jdbc/issues/5) although most of the bullets in that issue are described in more detail above.
* Keyword options no longer end in `?` -- for consistency (in `clojure.java.jdbc`, some flag options ended in `?` and some did not; also some options that ended in `?` accepted non-`Boolean` values),
* Keyword options no longer end in `?` -- for consistency (in `clojure.java.jdbc`, some flag options ended in `?` and some did not; also some options that ended in `?` accepted non-`Boolean` values, e.g., `:as-arrays?` and `:explain?`),
* `with-db-connection` has been replaced by just `with-open` containing a call to `get-connection`,
* `with-transaction` can take a `:rollback-only` option, but there is no built-in way to change a transaction to rollback _dynamically_; either throw an exception (all transactions roll back on an exception) or call `.rollback` directly on the `java.sql.Connection` object (see [Manual Rollback Inside a Transactions](/doc/transactions.md#manual-rollback-inside-a-transaction) and the following section about save points),
* `clojure.java.jdbc` implicitly allowed transactions to nest and just silently ignored the inner, nested transactions (so you only really had the top-level, outermost transaction); `next.jdbc` by default assumes you know what you are doing and so an inner (nested) transaction will commit or rollback the work done so far in outer transaction (and then when that outer transaction ends, the remaining work is rolled back or committed); `next.jdbc.transaction/*nested-tx*` is a dynamic var that can be bound to `:ignore` to get similar behavior to `clojure.java.jdbc`.

View file

@ -133,7 +133,7 @@ If you want to get the generated keys from an `insert` done via `execute-batch!`
This calls `rs/datafiable-result-set` behind the scenes so you can also pass a `:builder-fn` option to `execute-batch!` if you want something other than qualified as-is hash maps.
> Note: not all databases support calling `.getGeneratedKeys` here (everything I test against seems to, except MS SQL Server and SQLite). Some databases will only return one generated key per batch, rather than a generated key for every row inserted. You may need to add `RETURNING *` to your `INSERT` statements instead.
> Note: not all databases support calling `.getGeneratedKeys` here (everything I test against seems to, except MS SQL Server). Some databases will only return one generated key per batch, rather than a generated key for every row inserted.
### Caveats
@ -141,4 +141,4 @@ There are several caveats around using batched parameters. Some JDBC drivers nee
In addition, if the batch operation fails for a group of parameters, it is database-specific whether the remaining groups of parameters are used, i.e., whether the operation is performed for any further groups of parameters after the one that failed. The result of calling `execute-batch!` is a vector of integers. Each element of the vector is the number of rows affected by the operation for each group of parameters. `execute-batch!` may throw a `BatchUpdateException` and calling `.getUpdateCounts` (or `.getLargeUpdateCounts`) on the exception may return an array containing a mix of update counts and error values (a Java `int[]` or `long[]`). Some databases don't always return an update count but instead a value indicating the number of rows is not known (but sometimes you can still get the update counts).
Finally, some database drivers don't do batched operations at all -- they accept `.executeBatch` but they run the operation as separate commands for the database rather than a single batched command. Some database drivers do not support `.getGeneratedKeys` (e.g., MS SQL Server and SQLite) so you cannot use `:return-generated-keys` and you need to use `RETURNING *` in your `INSERT` statements instead.
Finally, some database drivers don't do batched operations at all -- they accept `.executeBatch` but they run the operation as separate commands for the database rather than a single batched command.

View file

@ -26,7 +26,7 @@ In addition, the following generic builders can take `:label-fn` and `:qualifier
* `as-modified-arrays` -- table-qualified keywords,
* `as-unqualified-modified-arrays` -- simple keywords.
An example builder that naively converts `snake_case` database table/column names to `kebab-case` keywords:
An example builder that converts `snake_case` database table/column names to `kebab-case` keywords:
```clojure
(defn as-kebab-maps [rs opts]
@ -34,9 +34,7 @@ An example builder that naively converts `snake_case` database table/column name
(result-set/as-modified-maps rs (assoc opts :qualifier-fn kebab :label-fn kebab))))
```
However, a version of `as-kebab-maps` is built-in, as is `as-unqualified-kebab-maps`, which both use the `->kebab-case` function from the [camel-snake-kebab library](https://github.com/clj-commons/camel-snake-kebab/) with `as-modified-maps` and `as-unqualified-modified-maps` respectively, so you can just use the built-in `result-set/as-kebab-maps` (or `result-set/as-unqualified-kebab-maps`) builder as a `:builder-fn` option instead of writing your own.
> Note: `next.jdbc/snake-kebab-opts` and `next.jdbc/unqualified-snake-kebab-opts` exist, providing pre-built options hash maps that contain these `:builder-fn` options, as well as appropriate `:table-fn` and `:column-fn` options for the **Friendly SQL Functions** so those are often the most convenient way to enable snake/kebab case conversions with `next.jdbc`.
However, a version of `as-kebab-maps` is built-in, as is `as-unqualified-kebab-maps`, which both use the `->kebab-case` function from the [camel-snake-kebab library](https://github.com/clj-commons/camel-snake-kebab/) with `as-modified-maps` and `as-unqualified-modified-maps` respectively.
And finally there are two styles of adapters for the existing builders that let you override the default way that columns are read from result sets.
The first style takes a `column-reader` function, which is called with the `ResultSet`, the `ResultSetMetaData`, and the column index, and is expected to read the raw column value from the result set and return it. The result is then passed through `read-column-by-index` (from `ReadableColumn`, which may be implemented directly via protocol extension or via metadata on the result of the `column-reader` function):

View file

@ -38,42 +38,6 @@ Consult the [java.sql.Blob documentation](https://docs.oracle.com/javase/8/docs/
> Note: the standard MySQL JDBC driver seems to return `BLOB` data as `byte[]` instead of `java.sql.Blob`.
## Exceptions
A lot of JDBC operations can fail with an exception. JDBC 4.0 has a
[well-defined hierarchy of exception types](https://docs.oracle.com/en/java/javase/17/docs/api/java.sql/java/sql/package-tree.html)
and you can often catch a specific type of exception to do useful handling
of various error conditions that you might "expect" when working with a
database.
A good example is [SQLIntegrityConstraintViolationException](https://docs.oracle.com/en/java/javase/17/docs/api/java.sql/java/sql/SQLIntegrityConstraintViolationException.html)
which typically represents an index/key constraint violation such as a
duplicate primary key insertion attempt.
However, like some other areas when dealing with JDBC, the reality can
be very database-specific. Some database drivers **don't** use the hierarchy
above -- notably PostgreSQL, which has a generic `PSQLException` type
with its own subclasses and semantics. See [PostgreSQL JDBC issue #963](https://github.com/pgjdbc/pgjdbc/issues/963)
for a discussion of the difficulty in adopting the standard JDBC hierarchy
(dating back to 2017!).
The `java.sql.SQLException` class provides `.getErrorCode()` and
`.getSQLState()` methods but the values returned by those are
explicitly vendor-specific (error code) or only partly standardized (state).
In theory, the SQL state should follow either the X/Open (Open Group) or
ANSI SQL 2003 conventions, both of which were behind paywalls(!). The most
complete public listing is probably the IBM DB2
[SQL State](https://www.ibm.com/docs/en/db2woc?topic=messages-sqlstate)
document.
See also this [Stack Overflow post about SQL State](https://stackoverflow.com/questions/1399574/what-are-all-the-possible-values-for-sqlexception-getsqlstate)
for more references and links. Not all database drivers follow either of
these conventions for SQL State so you may still have to consult your
vendor's specific documentation.
All of this makes writing _generic_ error handling, that works across
multiple databases, very hard indeed. You can't rely on the JDBC `SQLException`
hierarchy; you can sometimes rely on a subset of SQL State values.
## Handling Timeouts
JDBC provides a number of ways in which you can decide how long an operation should run before it times out. Some of these timeouts are specified in seconds and some are in milliseconds. Some are handled via connection properties (or JDBC URL parameters), some are handled via methods on various JDBC objects.
@ -116,15 +80,6 @@ Examples:
Most of this documentation describes using `plan` specifically for reducing and notes that you can avoid the overhead of realizing rows from the `ResultSet` into Clojure data structures if your reducing function uses only functions that get column values by name. If you perform any function on the row that would require an actual hash map or a sequence, the row will be realized into a full Clojure hash map via the builder function passed in the options (or via `next.jdbc.result-set/as-maps` by default).
One of the benefits of reducing over `plan` is that you can stream very large result sets, very efficiently, without having the entire result set in memory (assuming your reducing function doesn't build a data structure that is too large!). See the tips below on **Streaming Result Sets**.
If you want to process a `plan` result purely for side-effects, without producing a result,
you can use `run!` instead of `reduce`:
```clojure
(run! process-row (jdbc/plan ds ...))
```
`run!` is based on `reduce` and `process-row` here takes just one argument --
the row -- rather than the usual reducing function that takes two.
The result of `plan` is also foldable in the [clojure.core.reducers](https://clojure.org/reference/reducers) sense. While you could use `execute!` to produce a vector of fully-realized rows as hash maps and then fold that vector (Clojure's vectors support fork-join parallel reduce-combine), that wouldn't be possible for very large result sets. If you fold the result of `plan`, the result set will be partitioned and processed using fork-join parallel reduce-combine. Unlike reducing over `plan`, each row **is** realized into a Clojure data structure and each batch is forked for reduction as soon as that many rows have been realized. By default, `fold`'s batch size is 512 but you can specify a different value in the 4-arity call. Once the entire result set has been read, the last (partial) batch is forked for reduction. The combining operations are forked and interleaved with the reducing operations, so the order (of forked tasks) is batch-1, batch-2, combine-1-2, batch-3, combine-1&2-3, batch-4, combine-1&2&3-4, etc. The amount of parallelization you get will depend on many factors including the number of processors, the speed of your reducing function, the speed of your combining function, and the speed with which result sets can actually be streamed from your database.
@ -165,12 +120,6 @@ MS SQL Server supports execution of multiple statements when surrounded by `begi
[{.. table2 row ..} {.. table2 row ..} {..}]]
```
### Batch Statements
Even when using `next.jdbc/execute-batch!`, Microsoft's JDBC driver will still send multiple insert statements to the database unless you specify `:useBulkCopyForBatchInsert true` as part of the db-spec hash map or JDBC URL when the datasource is created.
To use this feature your Microsoft's JDBC driver should be at least version 9.2 and you can use only limited set of data types. For example if you use `inst` to bulk insert smalldatetime value driver will revert to old (slow) behavior. For more details see [Using bulk copy API for batch insert operation](https://docs.microsoft.com/en-us/sql/connect/jdbc/use-bulk-copy-api-batch-insert-operation?view=sql-server-ver16) and [Release notes for JDBC drivers](https://docs.microsoft.com/en-us/sql/connect/jdbc/release-notes-for-the-jdbc-driver?view=sql-server-ver16).
## MySQL & MariaDB
In MySQL, the generated key from an insert comes back as `:GENERATED_KEY`. In MariaDB, the generated key from an insert comes back as `:insert_id`.
@ -212,47 +161,11 @@ An important performance issue to be aware of with Oracle's JDBC driver is that
either need to either specify `:prefetch` in your db-spec hash map with a suitable value (say 1,000 or larger), or specify `&prefetch=` in your JDBC URL string. If you want
to keep the default, you can change it on a per-statement basis by specifying `:fetch-size` as an option to `execute!` etc.
If you are using the 10g or later JDBC driver and you try to execute DDL statements that include SQL entities
that start with a `:` (such as `:new` or `:old`), they will be treated as bindable parameter references if
you use a `PreparedStatement` to execute them. Since that's the default for `execute!` etc, it means that you
will likely get an error like the following:
```
Missing IN or OUT parameter at index:: 1
```
You will need to use `next.jdbc.prepare/statement` to create a `Statement` object and then call `execute!`
on that to avoid this error. Don't forget to `.close` the `Statement` after `execute!` -- using `with-open`
is the best way to ensure the statement is properly closed after use.
## PostgreSQL
As you can see in this section (and elsewhere in this documentation), the
PostgreSQL JDBC driver has a number of interesting quirks and behaviors that
you need to be aware of. Although accessing PostgreSQL via JDBC is the most
common approach, there is also a non-JDBC Clojure/Java driver for PostgreSQL called
[PG2](https://github.com/igrishaev/pg2) which supports JSON operations natively
(see below for what's required for JDBC), as well as supporting Java Time natively
(see the section above about **Times, Dates, and Timezones**), and it also
quite a bit faster than using JDBC.
When you use `:return-keys true` with `execute!` or `execute-one!` (or you use `insert!`), PostgreSQL returns the entire inserted row (unlike nearly every other database that just returns any generated keys!).
_[It seems to achieve this by the equivalent of automatically appending `RETURNING *` to your SQL, if necessary.]_
The default result set builder for `next.jdbc` is `as-qualified-maps` which
uses the `.getTableName()` method on `ResultSetMetaData` to qualify the
columns in the result set. While some database drivers have this information
on hand from the original SQL operation, PostgreSQL's JDBC driver does not
and it performs an extra SQL query to fetch table names the first time this
method is called for each query. If you want to avoid those extra queries,
and you can live with unqualified column names, you can use `as-unqualified-maps`
as the result set builder instead.
If you have a query where you want to select where a column is `IN` a sequence of values, you can use `col = ANY(?)` with a native array of the values instead of `IN (?,?,?,,,?)` and a sequence of values. **Be aware of
[PostgreSQL bug 17822](https://www.postgresql.org/message-id/flat/17922-1e2e0aeedd294424%40postgresql.org)
which can cause pathological performance when the array has a single element!**
If you think you might have a single-element array, consider using `UNNEST` and
`IN` instead.
If you have a query where you want to select where a column is `IN` a sequence of values, you can use `col = ANY(?)` with a native array of the values instead of `IN (?,?,?,,,?)` and a sequence of values.
What does this mean for your use of `next.jdbc`? In `plan`, `execute!`, and `execute-one!`, you can use `col = ANY(?)` in the SQL string and a single primitive array parameter, such as `(int-array [1 2 3 4])`. That means that in `next.jdbc.sql`'s functions that take a where clause (`find-by-keys`, `update!`, and `delete!`) you can specify `["col = ANY(?)" (int-array data)]` for what would be a `col IN (?,?,?,,,?)` where clause for other databases and require multiple values.
@ -302,7 +215,7 @@ create table example(
```
> Note: PostgreSQL JDBC driver supports only 7 primitive array types, but not array types like `UUID[]` -
[PostgreSQL™ Extensions to the JDBC API](https://jdbc.postgresql.org/documentation/server-prepare/#arrays).
[PostgreSQL™ Extensions to the JDBC API](https://jdbc.postgresql.org/documentation/head/arrays.html).
### Working with Date and Time
@ -418,12 +331,14 @@ containing JSON:
(.setValue (->json x)))))
(defn <-pgobject
"Transform PGobject containing `json` or `jsonb` value to Clojure data."
[^PGobject v]
"Transform PGobject containing `json` or `jsonb` value to Clojure
data."
[^org.postgresql.util.PGobject v]
(let [type (.getType v)
value (.getValue v)]
(if (#{"jsonb" "json"} type)
(some-> value <-json (with-meta {:pgtype type}))
(when value
(with-meta (<-json value) {:pgtype type}))
value)))
```
@ -548,8 +463,6 @@ SQLite supports both `bool` and `bit` column types but, unlike pretty much every
You can work around this using a builder that handles reading the column directly as a `Boolean`:
```clojure
(import java.sql ResultSet ResultSetMetaData)
(jdbc/execute! ds ["select * from some_table"]
{:builder-fn (rs/builder-adapter
rs/as-maps
@ -572,29 +485,3 @@ If you are using `plan`, you'll most likely be accessing columns by just the lab
[]
(jdbc/plan ds ["select * from some_table"]))
```
See also [`datafy`, `nav`, and `:schema` > **SQLite**](/doc/datafy-nav-and-schema.md#sqlite)
for additional caveats on the `next.jdbc.datafy` namespace when using SQLite.
## XTDB
XTDB is a bitemporal, schemaless, document-oriented database that presents
itself as a PostgreSQL-compatible database, in terms of JDBC. It has a number
of SQL extensions, and some differences from common JDBC behavior. See
its documentation for details:
* [SQL Overview](https://docs.xtdb.com/quickstart/sql-overview.html)
* [SQL Queries](https://docs.xtdb.com/reference/main/sql/queries.html)
* [SQL Transactions/DML](https://docs.xtdb.com/reference/main/sql/txs.html)
`next.jdbc` officially supports XTDB as of 1.3.981 but there are some caveats:
* You can use `:dbtype "xtdb"` to identify XTDB as the database type.
* You must specify `:dbname "xtdb"` in the db-spec hash map or JDBC URL.
* XTDB does not support `.getTableName()` so you always get unqualified column names in result sets.
* The primary key on all tables is `_id` and it must be specified in all `INSERT` operations (no auto-generated keys).
* That means that `next.jdbc.sql/get-by-id` requires the 5-argument call, so that you can specify the `pk-name` as `:_id` and provide an options map.
* If you want to use `next.jdbc`'s built-in `datafy` / `nav` functionality, you need to explicitly specify `:schema-opts {:pk "_id"}` to override the default assumption of `id` as the primary key.
* DML operations (`INSERT`, `UPDATE`, and `DELETE`) are essentially asynchronous in XTDB and therefore can not return an accurate `next.jdbc/update-count` (so it is always 0).
* `INSERT` operations do not return the inserted row (like PostgreSQL does) nor even the provided `_id` primary key.
* That means that the `next.jdbc.defer` namespace functions do not work well with XTDB.
* `next.jdbc.sql/insert-multi!` returns an empty vector for XTDB (since `INSERT` operations do not return keys or update counts).
* The `next.jdbc.result-set/*-kebab-maps` functions (and associated `next.jdbc/*-kebab-opts` option maps) cause leading `_` to be stripped from column names and cannot be used with XTDB (this is inherent in the underlying library that `next.jdbc` relies on -- you can of course write your own custom result set builder function to handle this).

View file

@ -10,10 +10,6 @@ By default, all connections that `next.jdbc` creates are automatically committab
It is possible to tell `next.jdbc` to create connections that do not automatically commit operations: pass `{:auto-commit false}` as part of the options map to anything that creates a connection (including `get-connection` itself). You can then decide when to commit or rollback by calling `.commit` or `.rollback` on the connection object itself. You can also create save points (`(.setSavepoint con)`, `(.setSavepoint con name)`) and rollback to them (`(.rollback con save-point)`). You can also change the auto-commit state of an open connection at any time (`(.setAutoCommit con on-off)`).
This is the machinery behind "transactions": one or more operations on a
`Connection` that are not automatically committed, and which can be rolled back
or committed explicitly at any point.
## Automatic Commit & Rollback
`next.jdbc`'s transaction handling provides a convenient baseline for either committing a group of operations if they all succeed or rolling them all back if any of them fails, by throwing an exception. You can either do this on an existing connection -- and `next.jdbc` will try to restore the state of the connection after the transaction completes -- or by providing a datasource and letting `with-transaction` create and manage its own connection:
@ -39,14 +35,6 @@ You can also provide an options map as the third element of the binding vector (
The latter can be particularly useful in tests, to run a series of SQL operations during a test and then roll them all back at the end.
If you use `next.jdbc/with-transaction` (or `next.jdbc/transact`), then
`next.jdbc` keeps track of whether a "transaction" is in progress or not, and
you can call `next.jdbc/active-tx?` to determine that, in your own code, in
case you want to write code that behaves differently inside or outside a
transaction.
> Note: `active-tx?` only knows about `next.jdbc` transactions -- it cannot track any transactions that you create yourself using the underlying JDBC `Connection`. In addition, this is a per-thread "global" setting and not related to just a single connection, so you can't use this setting if you are working with multiple databases in the same dynamic thread context (`binding`).
## Manual Rollback Inside a Transaction
Instead of throwing an exception (which will propagate through `with-transaction` and therefore provide no result), you can also explicitly rollback if you want to return a result in that case:
@ -107,16 +95,4 @@ transactions in the code under test.
* `(binding [next.jdbc.transaction/*nested-tx* :ignore] ...)` provides the same behavior as `clojure.java.jdbc` where nested calls are essentially ignored and only the outermost transaction takes effect,
* `(binding [next.jdbc.transaction/*nested-tx* :prohibit] ...)` will cause any attempt to start a nested transaction to throw an exception instead; this could be a useful way to detect the potentially buggy behavior described above (for either `:allow` or `:ignore`).
> Note: this is a per-thread "global" setting and not related to just a single connection, so you can't use this setting if you are working with multiple databases in the same dynamic thread context (`binding`).
### `with-options`
If you are using `with-options` to produce wrapped connectables / transactables,
it's important to be aware that `with-transaction` produces a bare Java
`java.sql.Connection` object that cannot have options -- but does allow direct
interop. If you want to use `with-options` with `with-transaction`, you must
either rewrap the `Connection` with a nested call to `with-options` or,
as of 1.3.894, you can use `with-transaction+options` which will automatically
rewrap the `Connection` in a new connectable along with the options from the
original transactable. Be aware that you cannot use Java interop on this
wrapped connectable.
> Note: this is a **global** setting (per thread) and not related to just a single connection, so you can't use this setting if you are working with multiple databases in the same context.

View file

@ -1,3 +1,4 @@
version: '2'
services:
mysql:
image: percona:5.7
@ -8,14 +9,9 @@ services:
command:
[--character-set-server=utf8mb4, --collation-server=utf8mb4_unicode_ci]
sqlserver:
image: mcr.microsoft.com/mssql/server:2022-latest
image: mcr.microsoft.com/mssql/server:2019-GA-ubuntu-16.04
environment:
ACCEPT_EULA: Y
MSSQL_SA_PASSWORD: Str0ngP4ssw0rd
SA_PASSWORD: Str0ngP4ssw0rd
ports:
- "1433:1433"
xtdb:
image: ghcr.io/xtdb/xtdb:latest
# pull_policy: always
ports:
- "5432:5432"

View file

@ -1,8 +1,4 @@
{:hooks
{:analyze-call
{next.jdbc/with-transaction
hooks.com.github.seancorfield.next-jdbc/with-transaction
next.jdbc/with-transaction+options
hooks.com.github.seancorfield.next-jdbc/with-transaction+options}}
:lint-as {next.jdbc/on-connection clojure.core/with-open
next.jdbc/on-connection+options clojure.core/with-open}}
hooks.com.github.seancorfield.next-jdbc/with-transaction}}}

View file

@ -0,0 +1,18 @@
(ns hooks.com.github.seancorfield.next-jdbc
(:require [clj-kondo.hooks-api :as api]))
(defn with-transaction
"Expands (with-transaction [tx expr opts] body)
to (let [tx expr] opts body) pre clj-kondo examples."
[{:keys [:node]}]
(let [[binding-vec & body] (rest (:children node))
[sym val opts] (:children binding-vec)]
(when-not (and sym val)
(throw (ex-info "No sym and val provided" {})))
(let [new-node (api/list-node
(list*
(api/token-node 'let)
(api/vector-node [sym val])
opts
body))]
{:node new-node})))

View file

@ -1,34 +0,0 @@
(ns hooks.com.github.seancorfield.next-jdbc
(:require [clj-kondo.hooks-api :as api]))
(defn with-transaction
"Expands (with-transaction [tx expr opts] body)
to (let [tx expr] opts body) per clj-kondo examples."
[{:keys [:node]}]
(let [[binding-vec & body] (rest (:children node))
[sym val opts] (:children binding-vec)]
(when-not (and sym val)
(throw (ex-info "No sym and val provided" {})))
(let [new-node (api/list-node
(list*
(api/token-node 'let)
(api/vector-node [sym val])
opts
body))]
{:node new-node})))
(defn with-transaction+options
"Expands (with-transaction+options [tx expr opts] body)
to (let [tx expr] opts body) per clj-kondo examples."
[{:keys [:node]}]
(let [[binding-vec & body] (rest (:children node))
[sym val opts] (:children binding-vec)]
(when-not (and sym val)
(throw (ex-info "No sym and val provided" {})))
(let [new-node (api/list-node
(list*
(api/token-node 'let)
(api/vector-node [sym val])
opts
body))]
{:node new-node})))

View file

@ -1,34 +0,0 @@
#!/usr/bin/env bb
(require '[babashka.process :as p])
(defn- run-tests [env v]
(when v (println "\nTesting Clojure" v))
(let [{:keys [exit]}
(p/shell {:extra-env env}
"clojure"
(str "-M"
(when v (str ":" v))
":test:runner"
;; jdk21+ adds xtdb:
(when (System/getenv "NEXT_JDBC_TEST_XTDB")
":jdk21")
;; to suppress native access warnings on JDK24:
":jdk24")
"--output" "dots")]
(when-not (zero? exit)
(System/exit exit))))
(let [maria? (some #(= "maria" %) *command-line-args*)
xtdb? (some #(= "xtdb" %) *command-line-args*)
all? (some #(= "all" %) *command-line-args*)
env
(cond-> {"NEXT_JDBC_TEST_MSSQL" "yes"
"NEXT_JDBC_TEST_MYSQL" "yes"
"MSSQL_SA_PASSWORD" "Str0ngP4ssw0rd"}
maria?
(assoc "NEXT_JDBC_TEST_MARIADB" "yes")
xtdb?
(assoc "NEXT_JDBC_TEST_XTDB" "yes"))]
(doseq [v (if all? ["1.10" "1.11" "1.12"] [nil])]
(run-tests env v)))

29
run-tests.sh Executable file
View file

@ -0,0 +1,29 @@
#!/bin/sh
# start databases with: docker-compose up
# then: ./run-tests.sh create
# - creates a new database in MySQL for running tests
#
# test against "all" databases with MySQL JDBC driver:
# ./run-tests.sh
#
# test against "all" databases with MariaDB JDBC driver:
# ./run-tests.sh maria
if test "$1" = "create"
then
sleep 30
# assumes you already have a MySQL instance running locally
NEXT_JDBC_TEST_MYSQL=yes clojure -X:test next.jdbc.test-fixtures/create-clojure-test
fi
if test "$1" = "maria"
then
NEXT_JDBC_TEST_MSSQL=yes MSSQL_SA_PASSWORD=Str0ngP4ssw0rd \
NEXT_JDBC_TEST_MYSQL=yes NEXT_JDBC_TEST_MARIADB=yes clojure -X:test
fi
if test "$1" = ""
then
NEXT_JDBC_TEST_MSSQL=yes MSSQL_SA_PASSWORD=Str0ngP4ssw0rd \
NEXT_JDBC_TEST_MYSQL=yes clojure -X:test
fi
exit $?

View file

@ -1,4 +1,4 @@
;; copyright (c) 2018-2025 Sean Corfield, all rights reserved
;; copyright (c) 2018-2021 Sean Corfield, all rights reserved
(ns next.jdbc
"The public API of the next generation java.jdbc library.
@ -14,8 +14,8 @@
* `get-connection` -- given a connectable, obtain a new `java.sql.Connection`
from it and return that,
* `plan` -- given a connectable and SQL + parameters or a statement,
return a reducible that, when reduced (with an initial value) will
execute the SQL and consume the `ResultSet` produced,
return a reducible that, when reduced will execute the SQL and consume
the `ResultSet` produced,
* `execute!` -- given a connectable and SQL + parameters or a statement,
execute the SQL, consume the `ResultSet` produced, and return a vector
of hash maps representing the rows (@1); this can be datafied to allow
@ -68,7 +68,7 @@
[next.jdbc.protocols :as p]
[next.jdbc.result-set :as rs]
[next.jdbc.sql-logging :as logger]
[next.jdbc.transaction :as tx])
[next.jdbc.transaction])
(:import (java.sql PreparedStatement)))
(set! *warn-on-reflection* true)
@ -92,8 +92,7 @@
can be `:none` which means the host/port segment of the JDBC URL should
be omitted entirely (for 'local' databases)
* `:port` -- the port for the database connection (the default is database-
specific -- see below); can be `:none` which means the port segment of
the JDBC URL should be omitted entirely
specific -- see below)
* `:classname` -- if you need to override the default for the `:dbtype`
(or you want to use a database that next.jdbc does not know about!)
@ -137,7 +136,6 @@
* `sqlserver`, `mssql` -- `com.microsoft.sqlserver.jdbc.SQLServerDriver` -- `1433`
* `timesten:client` -- `com.timesten.jdbc.TimesTenClientDriver`
* `timesten:direct` -- `com.timesten.jdbc.TimesTenDriver`
* `xtdb` -- `xtdb.jdbc.Driver` -- an XTDB wrapper around `postgresql`
For more details about `:dbtype` and `:classname` values, see:
https://cljdoc.org/d/com.github.seancorfield/next.jdbc/CURRENT/api/next.jdbc.connection#dbtypes"
@ -177,14 +175,6 @@
[spec user password opts]
(p/get-connection spec (assoc opts :user user :password password))))
(defn- ensure-sql-params [sql-params]
(when-not (or (nil? sql-params)
(and (seqable? sql-params)
(or (empty? sql-params)
(string? (first sql-params)))))
(throw (ex-info "sql-params should be a vector containing a SQL string and any parameters"
{:sql-params sql-params}))))
(defn prepare
"Given a connection to a database, and a vector containing SQL and any
parameters it needs, return a new `PreparedStatement`.
@ -199,21 +189,16 @@
See the list of options above (in the namespace docstring) for what can
be passed to prepare."
(^java.sql.PreparedStatement
[connection sql-params]
(ensure-sql-params sql-params)
(p/prepare connection sql-params {}))
[connection sql-params]
(p/prepare connection sql-params {}))
(^java.sql.PreparedStatement
[connection sql-params opts]
(ensure-sql-params sql-params)
(p/prepare connection sql-params opts)))
[connection sql-params opts]
(p/prepare connection sql-params opts)))
(defn plan
"General SQL execution function (for working with result sets).
Returns a reducible that, when reduced (with an initial value), runs the
SQL and yields the result. `plan` returns an `IReduceInit` object so you
must provide an initial value when calling `reduce` on it.
Returns a reducible that, when reduced, runs the SQL and yields the result.
The reducible is also foldable (in the `clojure.core.reducers` sense) but
see the **Tips & Tricks** section of the documentation for some important
caveats about that.
@ -238,18 +223,16 @@
(or they can be different, depending on how you want the row to be built,
and how you want any subsequent lazy navigation to be handled)."
(^clojure.lang.IReduceInit
[stmt]
(p/-execute stmt [] {}))
[stmt]
(p/-execute stmt [] {}))
(^clojure.lang.IReduceInit
[connectable sql-params]
(ensure-sql-params sql-params)
(p/-execute connectable sql-params
{:next.jdbc/sql-params sql-params}))
[connectable sql-params]
(p/-execute connectable sql-params
{:next.jdbc/sql-params sql-params}))
(^clojure.lang.IReduceInit
[connectable sql-params opts]
(ensure-sql-params sql-params)
(p/-execute connectable sql-params
(assoc opts :next.jdbc/sql-params sql-params))))
[connectable sql-params opts]
(p/-execute connectable sql-params
(assoc opts :next.jdbc/sql-params sql-params))))
(defn execute!
"General SQL execution function.
@ -264,11 +247,9 @@
([stmt]
(p/-execute-all stmt [] {}))
([connectable sql-params]
(ensure-sql-params sql-params)
(p/-execute-all connectable sql-params
{:next.jdbc/sql-params sql-params}))
([connectable sql-params opts]
(ensure-sql-params sql-params)
(p/-execute-all connectable sql-params
(assoc opts :next.jdbc/sql-params sql-params))))
@ -285,11 +266,9 @@
([stmt]
(p/-execute-one stmt [] {}))
([connectable sql-params]
(ensure-sql-params sql-params)
(p/-execute-one connectable sql-params
{:next.jdbc/sql-params sql-params}))
([connectable sql-params opts]
(ensure-sql-params sql-params)
(p/-execute-one connectable sql-params
(assoc opts :next.jdbc/sql-params sql-params))))
@ -352,9 +331,9 @@
result))))
params)))
([connectable sql param-groups opts]
(when-not (string? sql)
(throw (IllegalArgumentException. "execute-batch! requires a SQL string")))
(if (instance? java.sql.Connection (p/unwrap connectable))
(if (or (instance? java.sql.Connection connectable)
(and (satisfies? p/Connectable connectable)
(instance? java.sql.Connection (:connectable connectable))))
(with-open [ps (prepare connectable [sql] opts)]
(execute-batch! ps param-groups opts))
(with-open [con (get-connection connectable)]
@ -380,47 +359,16 @@
Otherwise, creates a new `Connection` object from the connectable,
executes the body, and automatically closes it for you."
[[sym connectable] & body]
(let [con-sym (vary-meta sym assoc :tag 'java.sql.Connection)]
`(let [con-obj# ~connectable
bare-con# (p/unwrap con-obj#)]
(if (instance? java.sql.Connection bare-con#)
((^{:once true} fn* [~con-sym] ~@body) bare-con#)
(with-open [con# (get-connection con-obj#)]
((^{:once true} fn* [~con-sym] ~@body) con#))))))
(defmacro on-connection+options
"Given a connectable object, assumed to be wrapped with options, gets
a connection, rewraps it with those options, and binds it to `sym`,
then executes the `body` in that context.
This allows you to write generic, **wrapped** connectable code without
needing to know the exact type of an incoming datasource:
```clojure
(on-connection+options [conn datasource]
(execute! conn some-insert-sql)
(execute! conn some-update-sql))
```
If passed a `Connection` then that `Connection` is used as-is.
If passed a `Connectable` that wraps a `Connection`, then that
`Connectable` is used as-is.
Otherwise, creates a new `Connection` object from the connectable,
wraps that with options, executes the body, and automatically closes
the new `Connection` for you.
Note: the bound `sym` will be a **wrapped** connectable and not a plain
Java object, so you cannot call JDBC methods directly on it like you can
with `on-connection`."
[[sym connectable] & body]
`(let [con-obj# ~connectable]
(if (instance? java.sql.Connection (p/unwrap con-obj#))
((^{:once true} fn* [~sym] ~@body) con-obj#)
(with-open [con# (get-connection con-obj#)]
((^{:once true} fn* [~sym] ~@body)
(with-options con# (:options con-obj# {})))))))
(let [con-sym (vary-meta sym assoc :tag 'java.sql.Connection)
con-obj connectable]
`(cond (instance? java.sql.Connection ~con-obj)
((^{:once true} fn* [~con-sym] ~@body) ~con-obj)
(and (satisfies? p/Connectable ~con-obj)
(instance? java.sql.Connection (:connectable ~con-obj)))
((^{:once true} fn* [~con-sym] ~@body) (:connectable ~con-obj))
:else
(with-open [con# (get-connection ~con-obj)]
((^{:once true} fn* [~con-sym] ~@body) con#)))))
(defn transact
"Given a transactable object and a function (taking a `Connection`),
@ -440,9 +388,6 @@
Like `with-open`, if `with-transaction` creates a new `Connection` object,
it will automatically close it for you.
If you are working with default options via `with-options`, you might want
to use `with-transaction+options` instead.
The options map supports:
* `:isolation` -- `:none`, `:read-committed`, `:read-uncommitted`,
`:repeatable-read`, `:serializable`,
@ -453,24 +398,6 @@
(let [con (vary-meta sym assoc :tag 'java.sql.Connection)]
`(transact ~transactable (^{:once true} fn* [~con] ~@body) ~(or opts {}))))
(defn active-tx?
"Returns true if `next.jdbc` has a currently active transaction in the
current thread, else false.
With no arguments, tells you if any transaction is currently active.
With a `Connection` argument, tells you if a transaction is currently
active on that specific connection.
Note: transactions are a convention of operations on a `Connection` so
this predicate only reflects `next.jdbc/transact` and `next.jdbc/with-transaction`
operations -- it does not reflect any other operations on a `Connection`,
performed via JDBC interop directly."
([]
(boolean (seq @#'tx/*active-tx*)))
([con]
(contains? @#'tx/*active-tx* con)))
(defn with-options
"Given a connectable/transactable object and a set of (default) options
that should be used on all operations on that object, return a new
@ -480,44 +407,9 @@
return plain Java objects, so if you call any of those on this wrapped
object, you'll need to re-wrap the Java object `with-options` again. See
the Datasources, Connections & Transactions section of Getting Started for
more details, and some examples of use with these functions.
`with-transaction+options` exists to automatically rewrap a `Connection`
with the options from a `with-options` wrapper."
more details, and some examples of use with these functions."
[connectable opts]
(let [c (:connectable connectable)
o (:options connectable)]
(if (and c o)
(opts/->DefaultOptions c (merge o opts))
(opts/->DefaultOptions connectable opts))))
(defmacro with-transaction+options
"Given a transactable object, assumed to be wrapped with options, gets a
connection, rewraps it with those options, and binds it to `sym`, then
executes the `body` in that context, committing any changes if the body
completes successfully, otherwise rolling back any changes made.
Like `with-open`, if `with-transaction+options` creates a new `Connection`
object, it will automatically close it for you.
Note: the bound `sym` will be a **wrapped** connectable and not a plain
Java object, so you cannot call JDBC methods directly on it like you can
with `with-transaction`.
The options map supports:
* `:isolation` -- `:none`, `:read-committed`, `:read-uncommitted`,
`:repeatable-read`, `:serializable`,
* `:read-only` -- `true` / `false` (`true` will make the `Connection` readonly),
* `:rollback-only` -- `true` / `false` (`true` will make the transaction
rollback, even if it would otherwise succeed)."
[[sym transactable opts] & body]
`(let [tx# ~transactable]
(transact tx#
(^{:once true} fn*
[con#] ; this is the unwrapped java.sql.connection
(let [~sym (with-options con# (:options tx# {}))]
~@body))
~(or opts {}))))
(opts/->DefaultOptions connectable opts))
(defn with-logging
"Given a connectable/transactable object and a sql/params logging
@ -530,13 +422,8 @@
* `next.jdbc/plan`, `next.jdbc/execute-one!`, `next.jdbc/execute!`,
or `next.jdbc/prepare`
* the vector containing the SQL string and its parameters
Whatever the sql/params logging function returns will be passed as a
`state` argument to the optional result logging function. This means you can
use this mechanism to provide some timing information, since your sql/params
logging function can return the current system time, and your result logging
function can then calculate the elapsed time. There is an example of this in
the Naive Logging with Timing section of Getting Started.
`state` argument to the optional result logging function.
The result logging function, if provided, will be called with the
same symbol passed to the sql/params logging function, the `state`
@ -551,7 +438,7 @@
the Datasources, Connections & Transactions section of Getting Started for
more details, and some examples of use with these functions."
[connectable sql-logger & [result-logger]]
(logger/->SQLLogging connectable sql-logger result-logger (:options connectable)))
(logger/->SQLLogging connectable sql-logger result-logger))
(def snake-kebab-opts
"A hash map of options that will convert Clojure identifiers to

View file

@ -1,4 +1,4 @@
;; copyright (c) 2018-2025 Sean Corfield, all rights reserved
;; copyright (c) 2018-2021 Sean Corfield, all rights reserved
(ns next.jdbc.connection
"Standard implementations of `get-datasource` and `get-connection`.
@ -130,9 +130,22 @@
:host :none}
"timesten:direct" {:classname "com.timesten.jdbc.TimesTenDriver"
:dbname-separator ":dsn="
:host :none}
"xtdb" {:classname "xtdb.jdbc.Driver"
:port 5432}})
:host :none}})
(defn- ^Properties as-properties
"Convert any seq of pairs to a `java.util.Properties` instance."
[m]
(let [p (Properties.)]
(doseq [[k v] m]
(.setProperty p (name k) (str v)))
p))
(defn- get-driver-connection
"Common logic for loading the designated JDBC driver class and
obtaining the appropriate `Connection` object."
[url timeout etc]
(when timeout (DriverManager/setLoginTimeout timeout))
(DriverManager/getConnection url (as-properties etc)))
(def ^:private driver-cache
"An optimization for repeated calls to get-datasource, or for get-connection
@ -178,7 +191,7 @@
(str "jdbc:" subprotocol ":"
(or host-prefix (-> dbtype dbtypes :host-prefix (or "//")))
host
(when (and port (not= :none port)) (str ":" port))
(when port (str ":" port))
db-sep dbname))]
;; verify the datasource is loadable
(if-let [class-name (or classname (-> dbtype dbtypes :classname))]
@ -255,16 +268,6 @@
`.setJdbcUrl`). `clojure.java.data/to-java` is used to construct the
object and call the setters.
If you need to pass in connection URL parameters, it can be easier to use
`next.jdbc.connection/jdbc-url` to construct URL, e.g.,
(->pool HikariDataSource
{:jdbcUrl (jdbc-url {:dbtype .. :dbname .. :useSSL false})
:username .. :password ..})
Here we pass `:useSSL false` to `jdbc-url` so that it ends up in the
connection string, but pass `:username` and `:password` for the pool itself.
Note that the result is not type-hinted (because there's no common base
class or interface that can be assumed). In particular, connection pooled
datasource objects may need to be closed but they don't necessarily implement
@ -300,11 +303,6 @@
called on it to shutdown the datasource (and return a new startable
entity).
If `db-spec` contains `:init-fn`, that is assumed to be a function
that should be called on the newly-created datasource. This allows for
modification of (mutable) connection pooled datasource and/or some sort
of database initialization/setup to be called automatically.
By default, the datasource is shutdown by calling `.close` on it.
If the datasource class implements `java.io.Closeable` then a direct,
type-hinted call to `.close` will be used, with no reflection,
@ -322,9 +320,7 @@
(with-meta {}
{'com.stuartsierra.component/start
(fn [_]
(let [init-fn (:init-fn db-spec)
pool (->pool clazz (dissoc db-spec :init-fn))]
(when init-fn (init-fn pool))
(let [pool (->pool clazz db-spec)]
(with-meta (fn ^DataSource [] pool)
{'com.stuartsierra.component/stop
(fn [_]
@ -332,18 +328,11 @@
(component clazz db-spec close-fn))})))})))
(comment
(require '[com.stuartsierra.component :as component]
'[next.jdbc.sql :as sql])
(require '[com.stuartsierra.component :as component])
(import '(com.mchange.v2.c3p0 ComboPooledDataSource PooledDataSource)
'(com.zaxxer.hikari HikariDataSource))
(isa? PooledDataSource java.io.Closeable) ;=> false
(isa? HikariDataSource java.io.Closeable) ;=> true
;; create a pool with a combination of JDBC URL and username/password:
(->pool HikariDataSource
{:jdbcUrl
(jdbc-url {:dbtype "mysql" :dbname "clojure_test"
:useSSL false})
:username "root" :password (System/getenv "MYSQL_ROOT_PASSWORD")})
;; use c3p0 with default reflection-based closing function:
(def dbc (component ComboPooledDataSource
{:dbtype "mysql" :dbname "clojure_test"
@ -361,67 +350,15 @@
;; start the chosen datasource component:
(def ds (component/start dbc))
;; invoke datasource component to get the underlying javax.sql.DataSource:
(sql/get-by-id (ds) :fruit 1)
(next.jdbc.sql/get-by-id (ds) :fruit 1)
;; stop the component and close the pooled datasource:
(component/stop ds)
)
(component/stop ds))
(defn- string->url+etc
"Given a JDBC URL, return it with an empty set of options with no parsing."
[s]
[s {}])
(defn- as-properties
"Convert any seq of pairs to a `java.util.Properties` instance."
^Properties [m]
(let [p (Properties.)
as-is (set (:next.jdbc/as-is-properties m))]
(doseq [[k v] (dissoc m :next.jdbc/as-is-properties)]
(if (contains? as-is k)
(.put p (name k) v)
(.setProperty p (name k) (str v))))
p))
(defn uri->db-spec
"clojure.java.jdbc (and some users out there) considered the URI format
to be an acceptable JDBC URL, i.e., with credentials embdedded in the string,
rather than as query parameters.
This function accepts a URI string, optionally prefixed with `jdbc:` and
returns a db-spec hash map."
[uri]
(let [{:keys [scheme userInfo host port path query]}
(j/from-java (java.net.URI. (str/replace uri #"^jdbc:" "")))
[user password] (when (seq userInfo) (str/split userInfo #":"))
properties (when (seq query)
(into {}
(map #(let [[k v] (str/split % #"=")]
[(keyword k) v]))
(str/split query #"\&")))]
(cond-> (assoc properties
:dbtype scheme
:host host
:port port)
(seq path) (assoc :dbname (subs path 1))
user (assoc :user user)
password (assoc :password password))))
(defn- get-driver-connection
"Common logic for loading the designated JDBC driver class and
obtaining the appropriate `Connection` object."
[url timeout etc]
(when timeout (DriverManager/setLoginTimeout timeout))
(try
(DriverManager/getConnection url (as-properties etc))
(catch Exception e
(try
(let [db-spec (uri->db-spec url)
[url' etc'] (spec->url+etc db-spec)]
(DriverManager/getConnection url' (as-properties (merge etc' etc))))
(catch Exception _
;; if the fallback fails too, throw the original exception
(throw e))))))
(defn- url+etc->datasource
"Given a JDBC URL and a map of options, return a `DataSource` that can be
used to obtain a new database connection."

View file

@ -1,4 +1,4 @@
;; copyright (c) 2020-2024 Sean Corfield, all rights reserved
;; copyright (c) 2020-2021 Sean Corfield, all rights reserved
(ns next.jdbc.datafy
"This namespace provides datafication of several JDBC object types,

View file

@ -1,4 +1,4 @@
;; copyright (c) 2019-2024 Sean Corfield, all rights reserved
;; copyright (c) 2019-2021 Sean Corfield, all rights reserved
(ns next.jdbc.date-time
"Optional namespace that extends `next.jdbc.prepare/SettableParameter`

View file

@ -1,4 +1,4 @@
;; copyright (c) 2020-2025 Sean Corfield, all rights reserved
;; copyright (c) 2020-2021 Sean Corfield, all rights reserved
(ns ^:no-doc next.jdbc.default-options
"Implementation of default options logic."
@ -8,10 +8,6 @@
(defrecord DefaultOptions [connectable options])
(extend-protocol p/Wrapped
DefaultOptions
(unwrap [this] (p/unwrap (:connectable this))))
(extend-protocol p/Sourceable
DefaultOptions
(get-datasource [this]

View file

@ -1,94 +0,0 @@
;; copyright (c) 2024 Sean Corfield, all rights reserved
(ns next.jdbc.defer
"The idea behind the next.jdbc.defer namespace is to provide a
way to defer the execution of a series of SQL statements until
a later time, but still provide a way for inserted keys to be
used in later SQL statements.
The principle is to provide a core subset of the next.jdbc
and next.jdbc.sql API that produces a data structure that
describes a series of SQL operations to be performed, that
are held in a dynamic var, and that can be executed at a
later time, in a transaction."
(:require [next.jdbc :as jdbc]
[next.jdbc.sql.builder :refer [for-delete for-insert for-update]]))
(set! *warn-on-reflection* true)
(def ^:private ^:dynamic *deferred* nil)
(defn execute-one!
"Given a vector containing a SQL statement and parameters, defer
execution of that statement."
([sql-p]
(execute-one! sql-p {}))
([sql-p opts]
(swap! *deferred* conj
{:sql-p sql-p
:key-fn (or (:key-fn opts) (comp first vals))
:key (:key opts)
:opts opts})))
(defn insert!
"Given a table name, and a data hash map, defer an insertion of the
data as a single row in the database."
([table key-map]
(insert! table key-map {}))
([table key-map opts]
(swap! *deferred* conj
{:sql-p (for-insert table key-map opts)
:key-fn (or (:key-fn opts) (comp first vals))
:key (:key opts)
:opts opts})))
(defn update!
"Given a table name, a hash map of columns and values to set, and
either a hash map of columns and values to search on or a vector
of a SQL where clause and parameters, defer an update on the table."
([table key-map where-params]
(update! table key-map where-params {}))
([table key-map where-params opts]
(swap! *deferred* conj
{:sql-p (for-update table key-map where-params opts)
:opts opts})))
(defn delete!
"Given a table name, and either a hash map of columns and values
to search on or a vector of a SQL where clause and parameters,
defer a delete on the table."
([table where-params]
(delete! table where-params {}))
([table where-params opts]
(swap! *deferred* conj
{:sql-p (for-delete table where-params opts)
:opts opts})))
(defn deferrable [transactable stmts]
(reify clojure.lang.IDeref
(deref [_]
(let [keys (atom {})]
(jdbc/with-transaction [conn transactable]
(doseq [{:keys [sql-p key-fn key opts]} @stmts]
(let [sql-p
(mapv (fn [v]
(if (keyword? v)
(if (contains? @keys v)
(get @keys v)
(throw (ex-info (str "Deferred key not found " v)
{:key v})))
v))
sql-p)
result (jdbc/execute-one! conn sql-p opts)]
(when key
(swap! keys assoc key (key-fn result))))))
@keys))))
(defn defer-ops [f]
(binding [*deferred* (atom [])]
(f)
*deferred*))
(defmacro with-deferred [connectable & body]
`(let [conn# ~connectable]
(deferrable conn# (defer-ops (^{:once true} fn* [] ~@body)))))

View file

@ -1,4 +1,4 @@
;; copyright (c) 2019-2024 Sean Corfield, all rights reserved
;; copyright (c) 2019-2021 Sean Corfield, all rights reserved
(ns next.jdbc.optional
"Builders that treat NULL SQL values as 'optional' and omit the
@ -11,8 +11,8 @@
(defrecord MapResultSetOptionalBuilder [^ResultSet rs rsmeta cols]
rs/RowBuilder
(->row [_this] (transient {}))
(column-count [_this] (count cols))
(->row [this] (transient {}))
(column-count [this] (count cols))
(with-column [this row i]
;; short-circuit on null to avoid column reading logic
(let [v (.getObject rs ^Integer i)]
@ -20,17 +20,17 @@
row
(rs/with-column-value this row (nth cols (dec i))
(rs/read-column-by-index v rsmeta i)))))
(with-column-value [_this row col v]
(with-column-value [this row col v]
;; ensure that even if this is adapted, we omit null columns
(if (nil? v)
row
(assoc! row col v)))
(row! [_this row] (persistent! row))
(row! [this row] (persistent! row))
rs/ResultSetBuilder
(->rs [_this] (transient []))
(with-row [_this mrs row]
(->rs [this] (transient []))
(with-row [this mrs row]
(conj! mrs row))
(rs! [_this mrs] (persistent! mrs)))
(rs! [this mrs] (persistent! mrs)))
(defn as-maps
"Given a `ResultSet` and options, return a `RowBuilder` / `ResultSetBuilder`
@ -76,7 +76,7 @@
locales where the lower case version of a character is not a valid SQL
entity name (e.g., Turkish)."
[^String s]
(.toLowerCase s Locale/US))
(.toLowerCase s (Locale/US)))
(defn as-lower-maps
"Given a `ResultSet` and options, return a `RowBuilder` / `ResultSetBuilder`
@ -117,25 +117,25 @@
(let [mrsb (builder-fn rs opts)]
(reify
rs/RowBuilder
(->row [_this] (rs/->row mrsb))
(column-count [_this] (rs/column-count mrsb))
(with-column [_this row i]
(->row [this] (rs/->row mrsb))
(column-count [this] (rs/column-count mrsb))
(with-column [this row i]
;; short-circuit on null to avoid column reading logic
(let [v (column-reader rs (:rsmeta mrsb) i)]
(if (nil? v)
row
(rs/with-column-value mrsb row (nth (:cols mrsb) (dec i))
(rs/read-column-by-index v (:rsmeta mrsb) i)))))
(with-column-value [_this row col v]
(with-column-value [this row col v]
;; ensure that even if this is adapted, we omit null columns
(if (nil? v)
row
(rs/with-column-value mrsb row col v)))
(row! [_this row] (rs/row! mrsb row))
(row! [this row] (rs/row! mrsb row))
rs/ResultSetBuilder
(->rs [_this] (rs/->rs mrsb))
(with-row [_this mrs row] (rs/with-row mrsb mrs row))
(rs! [_this mrs] (rs/rs! mrsb mrs))
(->rs [this] (rs/->rs mrsb))
(with-row [this mrs row] (rs/with-row mrsb mrs row))
(rs! [this mrs] (rs/rs! mrsb mrs))
clojure.lang.ILookup
(valAt [_this k] (get mrsb k))
(valAt [_this k not-found] (get mrsb k not-found))))))
(valAt [this k] (get mrsb k))
(valAt [this k not-found] (get mrsb k not-found))))))

View file

@ -1,4 +1,4 @@
;; copyright (c) 2020-2024 Sean Corfield, all rights reserved
;; copyright (c) 2020-2021 Sean Corfield, all rights reserved
(ns next.jdbc.plan
"Some helper functions that make common operations with `next.jdbc/plan`

View file

@ -1,4 +1,4 @@
;; copyright (c) 2018-2024 Sean Corfield, all rights reserved
;; copyright (c) 2018-2021 Sean Corfield, all rights reserved
(ns next.jdbc.prepare
"Mostly an implementation namespace for how `PreparedStatement` objects are
@ -26,13 +26,12 @@
(set! *warn-on-reflection* true)
(defprotocol SettableParameter
(defprotocol SettableParameter :extend-via-metadata true
"Protocol for setting SQL parameters in statement objects, which
can convert from Clojure values. The default implementation just
calls `.setObject` on the parameter value. It can be extended to
use other methods of `PreparedStatement` to convert and set parameter
values. Extension via metadata is supported."
:extend-via-metadata true
(set-parameter [val stmt ix]
"Convert a Clojure value into a SQL value and store it as the ix'th
parameter in the given SQL statement object."))

View file

@ -1,4 +1,4 @@
;; copyright (c) 2018-2025 Sean Corfield, all rights reserved
;; copyright (c) 2018-2021 Sean Corfield, all rights reserved
(ns next.jdbc.protocols
"This is the extensible core of the next generation java.jdbc library.
@ -11,16 +11,15 @@
(set! *warn-on-reflection* true)
(defprotocol Sourceable
(defprotocol Sourceable :extend-via-metadata true
"Protocol for producing a `javax.sql.DataSource`.
Implementations are provided for strings, hash maps (`db-spec` structures),
and also a `DataSource` (which just returns itself).
Extension via metadata is supported."
:extend-via-metadata true
(get-datasource ^javax.sql.DataSource [this]
"Produce a `javax.sql.DataSource`."))
"Produce a `javax.sql.DataSource`."))
(defprotocol Connectable
"Protocol for producing a new JDBC connection that should be closed when you
@ -38,8 +37,8 @@
`PreparedStatement`, and `Object`, on the assumption that an `Object` can be
turned into a `DataSource` and therefore used to get a `Connection`."
(-execute ^clojure.lang.IReduceInit [this sql-params opts]
"Produce a 'reducible' that, when reduced (with an initial value), executes
the SQL and processes the rows of the `ResultSet` directly.")
"Produce a 'reducible' that, when reduced, executes the SQL and
processes the rows of the `ResultSet` directly.")
(-execute-one [this sql-params opts]
"Executes the SQL or DDL and produces the first row of the `ResultSet`
as a fully-realized, datafiable hash map (by default).")
@ -55,23 +54,10 @@
(prepare ^java.sql.PreparedStatement [this sql-params opts]
"Produce a new `java.sql.PreparedStatement` for use with `with-open`."))
(defprotocol Transactable
(defprotocol Transactable :extend-via-metadata true
"Protocol for running SQL operations in a transaction.
Implementations are provided for `Connection`, `DataSource`, and `Object`
(on the assumption that an `Object` can be turned into a `DataSource`)."
:extend-via-metadata true
(-transact [this body-fn opts]
"Run the `body-fn` inside a transaction."))
(defprotocol Wrapped
"Protocol for (un)wrapping a `next.jdbc` connectable.
Implementations are provided for `Object` (identity) and `DefaultOptions`
and SQLLogging."
(unwrap [this]
"Unwrap the connectable to get the underlying connectable."))
(extend-protocol Wrapped
Object
(unwrap [this] this))

View file

@ -1,4 +1,4 @@
;; copyright (c) 2019-2024 Sean Corfield, all rights reserved
;; copyright (c) 2019-2021 Sean Corfield, all rights reserved
(ns next.jdbc.quoted
"Provides functions for use with the `:table-fn` and `:column-fn` options
@ -8,16 +8,11 @@
(set! *warn-on-reflection* true)
(defn strop
"Escape any embedded closing strop characters."
[s x e]
(str s (str/replace x (str e) (str e e)) e))
(defn ansi "ANSI \"quoting\"" [s] (str \" s \"))
(defn ansi "ANSI \"quoting\"" [s] (strop \" s \"))
(defn mysql "MySQL `quoting`" [s] (str \` s \`))
(defn mysql "MySQL `quoting`" [s] (strop \` s \`))
(defn sql-server "SQL Server [quoting]" [s] (strop \[ s \]))
(defn sql-server "SQL Server [quoting]" [s] (str \[ s \]))
(def oracle "Oracle \"quoting\" (ANSI)" ansi)

View file

@ -1,4 +1,4 @@
;; copyright (c) 2018-2024 Sean Corfield, all rights reserved
;; copyright (c) 2018-2021 Sean Corfield, all rights reserved
(ns next.jdbc.result-set
"An implementation of `ResultSet` handling functions.
@ -58,9 +58,6 @@
(mapv (fn [^Integer i] (keyword (.getColumnLabel rsmeta i)))
(range 1 (inc (if rsmeta (.getColumnCount rsmeta) 0)))))
(defn- validate [expr ^String msg]
(when-not expr (throw (IllegalArgumentException. msg))))
(defn get-modified-column-names
"Given `ResultSetMetaData`, return a vector of modified column names, each
qualified by the table from which it came.
@ -69,8 +66,8 @@
[^ResultSetMetaData rsmeta opts]
(let [qf (:qualifier-fn opts)
lf (:label-fn opts)]
(validate qf ":qualifier-fn is required")
(validate lf ":label-fn is required")
(assert qf ":qualifier-fn is required")
(assert lf ":label-fn is required")
(mapv (fn [^Integer i]
(if-let [q (some-> (get-table-name rsmeta i) (qf) (not-empty))]
(keyword q (-> (.getColumnLabel rsmeta i) (lf)))
@ -84,7 +81,7 @@
Requires the `:label-fn` option."
[^ResultSetMetaData rsmeta opts]
(let [lf (:label-fn opts)]
(validate lf ":label-fn is required")
(assert lf ":label-fn is required")
(mapv (fn [^Integer i] (keyword (lf (.getColumnLabel rsmeta i))))
(range 1 (inc (if rsmeta (.getColumnCount rsmeta) 0))))))
@ -93,7 +90,7 @@
locales where the lower case version of a character is not a valid SQL
entity name (e.g., Turkish)."
[^String s]
(.toLowerCase s Locale/US))
(.toLowerCase s (Locale/US)))
(defn get-lower-column-names
"Given `ResultSetMetaData`, return a vector of lower-case column names, each
@ -109,13 +106,12 @@
(get-unqualified-modified-column-names rsmeta
(assoc opts :label-fn lower-case)))
(defprotocol ReadableColumn
(defprotocol ReadableColumn :extend-via-metadata true
"Protocol for reading objects from the `java.sql.ResultSet`. Default
implementations (for `Object` and `nil`) return the argument, and the
`Boolean` implementation ensures a canonicalized `true`/`false` value,
but it can be extended to provide custom behavior for special types.
Extension via metadata is supported."
:extend-via-metadata true
(read-column-by-label [val label]
"Function for transforming values after reading them via a column label.")
(read-column-by-index [val rsmeta idx]
@ -186,37 +182,37 @@
(let [builder (builder-fn rs opts)]
(reify
RowBuilder
(->row [_this] (->row builder))
(column-count [_this] (column-count builder))
(->row [this] (->row builder))
(column-count [this] (column-count builder))
(with-column [this row i]
(with-column-value this row (nth (:cols builder) (dec i))
(column-by-index-fn builder rs i)))
(with-column-value [_this row col v]
(with-column-value [this row col v]
(with-column-value builder row col v))
(row! [_this row] (row! builder row))
(row! [this row] (row! builder row))
ResultSetBuilder
(->rs [_this] (->rs builder))
(with-row [_this mrs row] (with-row builder mrs row))
(rs! [_this mrs] (rs! builder mrs))
(->rs [this] (->rs builder))
(with-row [this mrs row] (with-row builder mrs row))
(rs! [this mrs] (rs! builder mrs))
clojure.lang.ILookup
(valAt [_this k] (get builder k))
(valAt [_this k not-found] (get builder k not-found))))))
(valAt [this k] (get builder k))
(valAt [this k not-found] (get builder k not-found))))))
(defrecord MapResultSetBuilder [^ResultSet rs rsmeta cols]
RowBuilder
(->row [_this] (transient {}))
(column-count [_this] (count cols))
(->row [this] (transient {}))
(column-count [this] (count cols))
(with-column [this row i]
(with-column-value this row (nth cols (dec i))
(read-column-by-index (.getObject rs ^Integer i) rsmeta i)))
(with-column-value [_this row col v]
(with-column-value [this row col v]
(assoc! row col v))
(row! [_this row] (persistent! row))
(row! [this row] (persistent! row))
ResultSetBuilder
(->rs [_this] (transient []))
(with-row [_this mrs row]
(->rs [this] (transient []))
(with-row [this mrs row]
(conj! mrs row))
(rs! [_this mrs] (persistent! mrs)))
(rs! [this mrs] (persistent! mrs)))
(defn as-maps
"Given a `ResultSet` and options, return a `RowBuilder` / `ResultSetBuilder`
@ -278,10 +274,6 @@
:qualifier-fn ->kebab-case
:label-fn ->kebab-case)))
(comment
(->kebab-case "_id") ;;=> "id"!!
)
(defn as-unqualified-kebab-maps
"Given a `ResultSet` and options, return a `RowBuilder` / `ResultSetBuilder`
that produces bare vectors of hash map rows, with simple, kebab-case keys."
@ -323,26 +315,26 @@
"An example column-reader that still uses `.getObject` but expands CLOB
columns into strings."
[^ResultSet rs ^ResultSetMetaData _ ^Integer i]
(let [value (.getObject rs i)]
(when-let [value (.getObject rs i)]
(cond-> value
(instance? Clob value)
(clob->string))))
(defrecord ArrayResultSetBuilder [^ResultSet rs rsmeta cols]
RowBuilder
(->row [_this] (transient []))
(column-count [_this] (count cols))
(->row [this] (transient []))
(column-count [this] (count cols))
(with-column [this row i]
(with-column-value this row nil
(read-column-by-index (.getObject rs ^Integer i) rsmeta i)))
(with-column-value [_this row _ v]
(with-column-value [this row _ v]
(conj! row v))
(row! [_this row] (persistent! row))
(row! [this row] (persistent! row))
ResultSetBuilder
(->rs [_this] (transient [cols]))
(with-row [_this ars row]
(->rs [this] (transient [cols]))
(with-row [this ars row]
(conj! ars row))
(rs! [_this ars] (persistent! ars)))
(rs! [this ars] (persistent! ars)))
(defn as-arrays
"Given a `ResultSet` and options, return a `RowBuilder` / `ResultSetBuilder`
@ -451,7 +443,7 @@
(definterface MapifiedResultSet)
(defprotocol InspectableMapifiedResultSet
(defprotocol InspectableMapifiedResultSet :extend-via-metadata true
"Protocol for exposing aspects of the (current) result set via functions.
The intent here is to expose information that is associated with either
@ -460,7 +452,6 @@
`next.jdbc/plan`, including situations where the reducing function has
to realize a row by calling `datafiable-row` but still wants to call
these functions on the (realized) row."
:extend-via-metadata true
(row-number [this]
"Return the current 1-based row number, if available.
@ -492,85 +483,81 @@
realize the full row explicitly before performing other
(metadata-preserving) operations on it."
[^ResultSet rs opts]
(let [builder (delay ((get opts :builder-fn as-maps) rs opts))
name-fn (if (contains? opts :column-fn)
(comp (get opts :column-fn) name)
name)]
(let [builder (delay ((get opts :builder-fn as-maps) rs opts))]
(reify
MapifiedResultSet
;; marker, just for printing resolution
InspectableMapifiedResultSet
(row-number [_this] (.getRow rs))
(column-names [_this] (:cols @builder))
(metadata [_this] (d/datafy (.getMetaData rs)))
(row-number [this] (.getRow rs))
(column-names [this] (:cols @builder))
(metadata [this] (d/datafy (.getMetaData rs)))
clojure.lang.IPersistentMap
(assoc [_this k v]
(assoc [this k v]
(assoc (row-builder @builder) k v))
(assocEx [_this k v]
(assocEx [this k v]
(.assocEx ^clojure.lang.IPersistentMap (row-builder @builder) k v))
(without [_this k]
(without [this k]
(dissoc (row-builder @builder) k))
java.lang.Iterable ; Java 7 compatible: no forEach / spliterator
(iterator [_this]
(iterator [this]
(.iterator ^java.lang.Iterable (row-builder @builder)))
clojure.lang.Associative
(containsKey [_this k]
(containsKey [this k]
(try
(.getObject rs ^String (name-fn k))
(.getObject rs (name k))
true
(catch SQLException _
false)))
(entryAt [_this k]
(entryAt [this k]
(try
(clojure.lang.MapEntry. k (read-column-by-label
(.getObject rs ^String (name-fn k))
^String (name-fn k)))
(.getObject rs (name k))
(name k)))
(catch SQLException _)))
clojure.lang.Counted
(count [_this]
(count [this]
(column-count @builder))
clojure.lang.IPersistentCollection
(cons [_this obj]
(let [row (row-builder @builder)]
(conj row obj)))
(empty [_this]
(cons [this obj]
(cons obj (seq (row-builder @builder))))
(empty [this]
{})
(equiv [_this obj]
(equiv [this obj]
(.equiv ^clojure.lang.IPersistentCollection (row-builder @builder) obj))
;; we support get with a numeric key for array-based builders:
clojure.lang.ILookup
(valAt [_this k]
(valAt [this k]
(try
(if (number? k)
(let [^Integer i (inc k)]
(read-column-by-index (.getObject rs i) (:rsmeta @builder) i))
(read-column-by-label (.getObject rs ^String (name-fn k)) ^String (name-fn k)))
(read-column-by-label (.getObject rs (name k)) (name k)))
(catch SQLException _)))
(valAt [_this k not-found]
(valAt [this k not-found]
(try
(if (number? k)
(let [^Integer i (inc k)]
(read-column-by-index (.getObject rs i) (:rsmeta @builder) i))
(read-column-by-label (.getObject rs ^String (name-fn k)) ^String (name-fn k)))
(read-column-by-label (.getObject rs (name k)) (name k)))
(catch SQLException _
not-found)))
;; we support nth for array-based builders (i is primitive int here!):
clojure.lang.Indexed
(nth [_this i]
(nth [this i]
(try
(let [i (inc i)]
(read-column-by-index (.getObject rs i) (:rsmeta @builder) i))
(catch SQLException _)))
(nth [_this i not-found]
(nth [this i not-found]
(try
(let [i (inc i)]
(read-column-by-index (.getObject rs i) (:rsmeta @builder) i))
@ -578,11 +565,11 @@
not-found)))
clojure.lang.Seqable
(seq [_this]
(seq [this]
(seq (row-builder @builder)))
DatafiableRow
(datafiable-row [_this connectable opts]
(datafiable-row [this connectable opts]
;; since we have to call these eagerly, we trap any exceptions so
;; that they can be thrown when the actual functions are called
(let [row (try (.getRow rs) (catch Throwable t t))
@ -631,9 +618,11 @@
;; in reality, this is going to be over-optimistic and will like cause `nav`
;; to fail on attempts to navigate into result sets that are not hash maps
(datafiable-row [this connectable opts]
(vary-meta this assoc
`core-p/datafy (navize-row connectable opts)
`core-p/nav (navable-row connectable opts))))
(vary-meta
this
assoc
`core-p/datafy (navize-row connectable opts)
`core-p/nav (navable-row connectable opts))))
(defn datafiable-result-set
"Given a ResultSet, a connectable, and an options hash map, return a fully
@ -913,7 +902,7 @@
(first sql-params)
(rest sql-params)
opts)]
(reduce-stmt stmt f init opts)))
(reduce-stmt stmt f init opts)))
r/CollFold
(coll-fold [_ n combinef reducef]
(with-open [con (p/get-connection this opts)
@ -929,12 +918,12 @@
(first sql-params)
(rest sql-params)
opts)]
(if-let [rs (stmt->result-set stmt opts)]
(let [builder-fn (get opts :builder-fn as-maps)
builder (builder-fn rs opts)]
(when (.next rs)
(datafiable-row (row-builder builder) this opts)))
{:next.jdbc/update-count (.getUpdateCount stmt)})))
(if-let [rs (stmt->result-set stmt opts)]
(let [builder-fn (get opts :builder-fn as-maps)
builder (builder-fn rs opts)]
(when (.next rs)
(datafiable-row (row-builder builder) this opts)))
{:next.jdbc/update-count (.getUpdateCount stmt)})))
(-execute-all [this sql-params opts]
(with-open [con (p/get-connection this opts)
stmt (prepare/create con
@ -958,14 +947,14 @@
(reify
clojure.lang.IReduceInit
(reduce [_ f init]
(reduce-stmt this f init (merge {:return-keys true} opts)))
(reduce-stmt this f init (assoc opts :return-keys true)))
r/CollFold
(coll-fold [_ n combinef reducef]
(fold-stmt this n combinef reducef (.getConnection this)
(merge {:return-keys true} opts)))
(assoc opts :return-keys true)))
(toString [_] "`IReduceInit` from `plan` -- missing reduction?")))
(-execute-one [this _ opts]
(if-let [rs (stmt->result-set this (merge {:return-keys true} opts))]
(if-let [rs (stmt->result-set this (assoc opts :return-keys true))]
(let [builder-fn (get opts :builder-fn as-maps)
builder (builder-fn rs opts)]
(when (.next rs)
@ -976,17 +965,17 @@
(if (:multi-rs opts)
(loop [go (.execute this) acc []]
(if-let [rs (stmt->result-set-update-count
(.getConnection this) this go (merge {:return-keys true} opts))]
(.getConnection this) this go (assoc opts :return-keys true))]
(recur (.getMoreResults this) (conj acc rs))
acc))
(if-let [rs (stmt->result-set this (merge {:return-keys true} opts))]
(if-let [rs (stmt->result-set this (assoc opts :return-keys true))]
(datafiable-result-set rs (.getConnection this) opts)
[{:next.jdbc/update-count (.getUpdateCount this)}])))
java.sql.Statement
(-execute [this sql-params opts]
(validate (= 1 (count sql-params))
"Parameters cannot be provided when executing a non-prepared Statement")
(assert (= 1 (count sql-params))
"Parameters cannot be provided when executing a non-prepared Statement")
(reify
clojure.lang.IReduceInit
(reduce [_ f init]
@ -997,8 +986,8 @@
(.getConnection this) opts))
(toString [_] "`IReduceInit` from `plan` -- missing reduction?")))
(-execute-one [this sql-params opts]
(validate (= 1 (count sql-params))
"Parameters cannot be provided when executing a non-prepared Statement")
(assert (= 1 (count sql-params))
"Parameters cannot be provided when executing a non-prepared Statement")
(if-let [rs (stmt-sql->result-set this (first sql-params))]
(let [builder-fn (get opts :builder-fn as-maps)
builder (builder-fn rs opts)]
@ -1007,12 +996,12 @@
(.getConnection this) opts)))
{:next.jdbc/update-count (.getUpdateCount this)}))
(-execute-all [this sql-params opts]
(validate (= 1 (count sql-params))
"Parameters cannot be provided when executing a non-prepared Statement")
(assert (= 1 (count sql-params))
"Parameters cannot be provided when executing a non-prepared Statement")
(if (:multi-rs opts)
(loop [go (.execute this (first sql-params)) acc []]
(if-let [rs (stmt->result-set-update-count
(.getConnection this) this go (merge {:return-keys true} opts))]
(.getConnection this) this go (assoc opts :return-keys true))]
(recur (.getMoreResults this) (conj acc rs))
acc))
(if-let [rs (stmt-sql->result-set this (first sql-params))]
@ -1030,61 +1019,12 @@
(defn- default-schema
"The default schema lookup rule for column names.
We have a foreign key column suffix convention of `<table><fk>` or
`<table>_<fk>`, which maps to a (primary) key in the `<table` called
`<pk>`.
By default, both `<fk>` and `<pk>` are assumed to be `id`. That can be
overridden by the `:schema-opts` hash map in the options:
* `:fk-suffix` -- the suffix for foreign key columns, default `id`
* `:pk` -- the (primary) key column name, default `id`
* `:pk-fn` -- a function to apply to the table name and the value of `:pk`
to get the (primary) key column name, default `(constantly <pk>)`."
[opts col]
(let [fk-suffix (get-in opts [:schema-opts :fk-suffix] "id")
pk (get-in opts [:schema-opts :pk] "id")
pk-fn (get-in opts [:schema-opts :pk-fn] (constantly (name pk)))
[_ table] (re-find (re-pattern (str "(?i)^(.+?)[-_]?"
(name fk-suffix)
"$"))
(name col))]
If a column name ends with `_id` or `id`, it is assumed to be a foreign key
into the table identified by the first part of the column name."
[col]
(let [[_ table] (re-find #"(?i)^(.+?)_?id$" (name col))]
(when table
[(keyword table) (keyword (pk-fn table pk))])))
(comment
(default-schema {} :userstatusid)
(default-schema {} :userstatus_id)
(default-schema {} :user_statusid)
(default-schema {:schema-opts {:fk-suffix "did"}} :user_id)
(default-schema {:schema-opts {:fk-suffix "did"}} :user_did)
(default-schema {:schema-opts {:fk-suffix "did"}} :user-did)
(default-schema {:schema-opts {:fk-suffix "(did|id)"}} :user_id)
(default-schema {:schema-opts {:fk-suffix "(did|id)"}} :user_did)
(default-schema {:schema-opts {:fk-suffix "(did|id)"}} :user-did)
(default-schema {:schema-opts {:fk-suffix "(did|id)"
:pk :did}} :user_did)
(default-schema {:schema-opts {:fk-suffix "(did|id)"
:pk :did
:pk-fn (fn [table pk]
(if (= "user" table)
"id"
pk))}}
:user_did)
(default-schema {:schema-opts {:fk-suffix "(did|id)"
:pk :did
:pk-fn (fn [table pk]
(if (= "user" table)
"id"
pk))}}
:user-did)
(default-schema {:schema-opts {:fk-suffix "(did|id)"
:pk :did
:pk-fn (fn [table pk]
(if (= "user" table)
"id"
pk))}}
:book_did)
)
[(keyword table) :id])))
(defn- expand-schema
"Given a (possibly nil) schema entry, return it expanded to a triple of:
@ -1126,12 +1066,49 @@
(defn- navize-row
"Given a connectable object, return a function that knows how to turn a row
into a `nav`igable object.
into a `nav`igable object.
See navable-row below for more details."
A `:schema` option can provide a map from qualified column names
(`:<table>/<column>`) to tuples that indicate for which table they are a
foreign key, the name of the key within that table, and (optionality) the
cardinality of that relationship (`:many`, `:one`).
If no `:schema` item is provided for a column, the convention of `<table>id` or
`<table>_id` is used, and the assumption is that such columns are foreign keys
in the `<table>` portion of their name, the key is called `id`, and the
cardinality is `:one`.
Rows are looked up using `-execute-all` or `-execute-one`, and the `:table-fn`
option, if provided, is applied to both the assumed table name and the
assumed foreign key column name."
[connectable opts]
(fn [row]
(vary-meta row assoc `core-p/nav (navable-row connectable opts))))
(vary-meta
row
assoc
`core-p/nav (fn [_ k v]
(try
(let [[table fk cardinality]
(expand-schema k (or (get-in opts [:schema k])
(default-schema k)))]
(if (and fk connectable)
(let [entity-fn (:table-fn opts identity)
exec-fn! (if (= :many cardinality)
p/-execute-all
p/-execute-one)]
(exec-fn! connectable
[(str "SELECT * FROM "
(entity-fn (name table))
" WHERE "
(entity-fn (name fk))
" = ?")
v]
opts))
v))
(catch Exception _
;; assume an exception means we just cannot
;; navigate anywhere, so return just the value
v))))))
(defn- navable-row
"Given a connectable object, return a function that knows how to `nav`
@ -1147,33 +1124,30 @@
in the `<table>` portion of their name, the key is called `id`, and the
cardinality is `:one`.
That convention can in turn be modified via the `:schema-opts` option.
Rows are looked up using `-execute-all` or `-execute-one`, and the `:table-fn`
option, if provided, is applied to the assumed table name and `:column-fn` if
provided to the assumed foreign key column name."
option, if provided, is applied to both the assumed table name and the
assumed foreign key column name."
[connectable opts]
(fn [_ k v]
(try
(let [[table fk cardinality]
(expand-schema k (or (get-in opts [:schema k])
(default-schema opts k)))]
(default-schema k)))]
(if (and fk connectable)
(let [table-fn (:table-fn opts identity)
column-fn (:column-fn opts identity)
(let [entity-fn (:table-fn opts identity)
exec-fn! (if (= :many cardinality)
p/-execute-all
p/-execute-one)]
(exec-fn! connectable
[(str "SELECT * FROM "
(table-fn (name table))
(entity-fn (name table))
" WHERE "
(column-fn (name fk))
(entity-fn (name fk))
" = ?")
v]
opts))
v))
(catch Exception _
;; assume an exception means we just cannot
;; navigate anywhere, so return just the value
;; assume an exception means we just cannot
;; navigate anywhere, so return just the value
v))))

View file

@ -1,4 +1,4 @@
;; copyright (c) 2019-2024 Sean Corfield, all rights reserved
;; copyright (c) 2019-2021 Sean Corfield, all rights reserved
(ns next.jdbc.specs
"Specs for the core API of next.jdbc.
@ -35,8 +35,7 @@
(s/def ::host (s/or :name string?
:none #{:none}))
(s/def ::host-prefix string?)
(s/def ::port (s/or :port pos-int?
:none #{:none}))
(s/def ::port pos-int?)
(s/def ::db-spec-map (s/keys :req-un [::dbtype ::dbname]
:opt-un [::classname
::user ::password
@ -110,7 +109,7 @@
:opts (s/? ::opts-map)))
(s/fdef jdbc/prepare
:args (s/cat :connection ::proto-connectable
:args (s/cat :connection ::connection
:sql-params ::sql-params
:opts (s/? ::opts-map)))
@ -151,30 +150,11 @@
:opts ::opts-map))
(s/fdef jdbc/with-transaction
:args (s/cat :binding (s/and vector?
(s/cat :sym simple-symbol?
:transactable ::transactable
:opts (s/? any?)))
:body (s/* any?)))
(s/fdef jdbc/with-transaction+options
:args (s/cat :binding (s/and vector?
(s/cat :sym simple-symbol?
:transactable ::transactable
:opts (s/? any?)))
:body (s/* any?)))
(s/fdef jdbc/on-connection
:args (s/cat :binding (s/and vector?
(s/cat :sym simple-symbol?
:connectable ::connectable))
:body (s/* any?)))
(s/fdef jdbc/on-connection+options
:args (s/cat :binding (s/and vector?
(s/cat :sym simple-symbol?
:connectable ::connectable))
:body (s/* any?)))
:args (s/cat :binding (s/and vector?
(s/cat :sym simple-symbol?
:transactable ::transactable
:opts (s/? ::opts-map)))
:body (s/* any?)))
(s/fdef connection/->pool
:args (s/cat :clazz #(instance? Class %)
@ -205,10 +185,10 @@
:with-rows-and-columns
(s/and (s/cat :connectable ::connectable
:table keyword?
:cols (s/coll-of keyword? :kind sequential?)
:rows (s/coll-of (s/coll-of any?
:kind sequential?
:min-count 1)
:cols (s/coll-of keyword?
:kind sequential?
:min-count 1)
:rows (s/coll-of (s/coll-of any? :kind sequential?)
:kind sequential?)
:opts (s/? ::opts-map))
#(apply = (count (:cols %))
@ -216,7 +196,9 @@
:with-hash-maps
(s/cat :connectable ::connectable
:table keyword?
:hash-maps (s/coll-of map? :kind sequential?)
:hash-maps (s/coll-of map?
:kind sequential?
:min-count 1)
:opts (s/? ::opts-map))))
(s/fdef sql/query
@ -232,15 +214,6 @@
:all #{:all})
:opts (s/? ::opts-map)))
(s/fdef sql/aggregate-by-keys
:args (s/cat :connectable ::connectable
:table keyword?
:aggregate string?
:key-map (s/or :example ::example-map
:where ::sql-params
:all #{:all})
:opts (s/? ::opts-map)))
(s/fdef sql/get-by-id
:args (s/alt :with-id (s/cat :connectable ::connectable
:table keyword?

View file

@ -1,4 +1,4 @@
;; copyright (c) 2019-2024 Sean Corfield, all rights reserved
;; copyright (c) 2019-2022 Sean Corfield, all rights reserved
(ns next.jdbc.sql
"Some utility functions that make common operations easier by
@ -21,11 +21,10 @@
In addition, `find-by-keys` supports `:order-by` to add an `ORDER BY`
clause to the generated SQL."
(:require [clojure.string :as str]
[next.jdbc :refer [execute! execute-batch! execute-one!]]
(:require [next.jdbc :refer [execute! execute-one! execute-batch!]]
[next.jdbc.sql.builder
:refer [for-delete for-insert for-insert-multi for-query
for-update]]))
:refer [for-delete for-insert for-insert-multi
for-query for-update]]))
(set! *warn-on-reflection* true)
@ -53,8 +52,8 @@
generated keys.
Given a connectable object, a table name, a sequence of hash maps of data,
which all have the same set of keys, inserts the data as multiple rows in
the database and attempts to return a vector of maps of generated keys.
inserts the data as multiple rows in the database and attempts to return
a vector of maps of generated keys.
If called with `:batch` true will call `execute-batch!` - see its documentation
for situations in which the generated keys may or may not be returned as well as
@ -79,9 +78,7 @@
(throw (IllegalArgumentException.
"insert-multi! hash maps must all have the same keys")))
(insert-multi! connectable table cols (map ->row hash-maps-or-cols) opts-or-rows))
(if (map? opts-or-rows)
(insert-multi! connectable table hash-maps-or-cols [] opts-or-rows)
(insert-multi! connectable table hash-maps-or-cols opts-or-rows {}))))
(insert-multi! connectable table hash-maps-or-cols opts-or-rows {})))
([connectable table cols rows opts]
(if (seq rows)
(let [opts (merge (:options connectable) opts)
@ -139,42 +136,6 @@
(let [opts (merge (:options connectable) opts)]
(execute! connectable (for-query table key-map opts) opts))))
(defn aggregate-by-keys
"A wrapper over `find-by-keys` that additionally takes an aggregate SQL
expression (a string), and returns just a single result: the value of that
of that aggregate for the matching rows.
Accepts all the same options as `find-by-keys` except `:columns` since that
is used internally by this wrapper to pass the aggregate expression in."
([connectable table aggregate key-map]
(aggregate-by-keys connectable table aggregate key-map {}))
([connectable table aggregate key-map opts]
(let [opts (merge (:options connectable) opts)
_
(when-not (string? aggregate)
(throw (IllegalArgumentException.
"aggregate-by-keys requires a string aggregate expression")))
_
(when (:columns opts)
(throw (IllegalArgumentException.
"aggregate-by-keys does not support the :columns option")))
;; this should be unique enough as an alias to never clash with
;; a real column name in anyone's tables -- in addition it is
;; stable for a given aggregate expression so it should allow
;; for query caching in the JDBC driver:
;; (we use abs to avoid negative hash codes which would produce
;; a hyphen in the alias name which is not valid in SQL identifiers)
total-name (str "next_jdbc_aggregate_"
(Math/abs (.hashCode ^String aggregate)))
total-column (keyword total-name)
;; because some databases return uppercase column names:
total-col-u (keyword (str/upper-case total-name))]
(-> (find-by-keys connectable table key-map
(assoc opts :columns [[aggregate total-column]]))
(first)
(as-> row (or (get row total-column) (get row total-col-u)))))))
(defn get-by-id
"Syntactic sugar over `execute-one!` to make certain common queries easier.

View file

@ -1,10 +1,10 @@
;; copyright (c) 2019-2024 Sean Corfield, all rights reserved
;; copyright (c) 2019-2022 Sean Corfield, all rights reserved
(ns next.jdbc.sql.builder
"Some utility functions for building SQL strings.
These were originally private functions in `next.jdbc.sql` but
they may prove useful to developers who want to write their own
they may proof useful to developers who want to write their own
'SQL sugar' functions, such as a database-specific `upsert!` etc."
(:require [clojure.string :as str]))
@ -70,9 +70,6 @@
[key-map opts]
(as-cols (keys key-map) opts))
(defn- validate [expr ^String msg]
(when-not expr (throw (IllegalArgumentException. msg))))
(defn by-keys
"Given a hash map of column names and values and a clause type
(`:set`, `:where`), return a vector of a SQL clause and its parameters.
@ -87,7 +84,7 @@
[(conj conds (str e " = ?")) (conj params v)])))
[[] []]
key-map)]
(validate (seq where) "key-map may not be empty")
(assert (seq where) "key-map may not be empty")
(into [(str (str/upper-case (safe-name clause)) " "
(str/join (if (= :where clause) " AND " ", ") where))]
params)))
@ -125,7 +122,7 @@
(let [entity-fn (:table-fn opts identity)
params (as-keys key-map opts)
places (as-? key-map opts)]
(validate (seq key-map) "key-map may not be empty")
(assert (seq key-map) "key-map may not be empty")
(into [(str "INSERT INTO " (entity-fn (safe-name table))
" (" params ")"
" VALUES (" places ")"
@ -147,14 +144,15 @@
If `:suffix` is provided in `opts`, that string is appended to the
`INSERT ...` statement."
[table cols rows opts]
(validate (apply = (count cols) (map count rows))
"column counts are not consistent across cols and rows")
(assert (apply = (count cols) (map count rows))
"column counts are not consistent across cols and rows")
;; to avoid generating bad SQL
(validate (seq cols) "cols may not be empty")
(validate (seq rows) "rows may not be empty")
(assert (seq cols) "cols may not be empty")
(assert (seq rows) "rows may not be empty")
(let [table-fn (:table-fn opts identity)
column-fn (:column-fn opts identity)
batch? (:batch opts)
params (as-cols cols opts)
params (str/join ", " (map (comp column-fn name) cols))
places (as-? (first rows) opts)]
(into [(str "INSERT INTO " (table-fn (safe-name table))
" (" params ")"
@ -167,12 +165,6 @@
(if batch? identity cat)
rows)))
(comment
(as-cols [:aa :bb :cc] {})
(for-insert-multi :table [:aa :bb :cc] [[1 2 3] [4 5 6]]
{:table-fn str/upper-case :column-fn str/capitalize})
)
(defn for-order-col
"Given a column name, or a pair of column name and direction,
return the sub-clause for addition to `ORDER BY`."
@ -198,7 +190,7 @@
[order-by opts]
(when-not (vector? order-by)
(throw (IllegalArgumentException. ":order-by must be a vector")))
(validate (seq order-by) ":order-by may not be empty")
(assert (seq order-by) ":order-by may not be empty")
(str "ORDER BY "
(str/join ", " (map #(for-order-col % opts) order-by))))

View file

@ -1,4 +1,4 @@
;; copyright (c) 2021-2025 Sean Corfield, all rights reserved
;; copyright (c) 2021 Sean Corfield, all rights reserved
(ns ^:no-doc next.jdbc.sql-logging
"Implementation of sql-logging logic."
@ -6,11 +6,7 @@
(set! *warn-on-reflection* true)
(defrecord SQLLogging [connectable sql-logger result-logger options])
(extend-protocol p/Wrapped
SQLLogging
(unwrap [this] (p/unwrap (:connectable this))))
(defrecord SQLLogging [connectable sql-logger result-logger])
(extend-protocol p/Sourceable
SQLLogging

View file

@ -1,4 +1,4 @@
;; copyright (c) 2018-2024 Sean Corfield, all rights reserved
;; copyright (c) 2018-2021 Sean Corfield, all rights reserved
(ns next.jdbc.transaction
"Implementation of SQL transaction logic.
@ -38,7 +38,7 @@
:allow)
(defonce ^:private ^:dynamic ^{:doc "Used to detect nested transactions."}
*active-tx* #{})
*active-tx* false)
(def ^:private isolation-levels
"Transaction isolation levels."
@ -112,44 +112,38 @@
(.setReadOnly con old-readonly)
(catch Exception _))))))))
(defn- raw-connection ^Connection [^Connection con]
(try ; because some drivers do not implement this :(
(if (.isWrapperFor con Connection)
(.unwrap con Connection)
con)
(catch Throwable _ ; to catch AbstractMethodError :(
con)))
(extend-protocol p/Transactable
java.sql.Connection
(-transact [this body-fn opts]
(let [raw (raw-connection this)]
(cond
(and (not (contains? *active-tx* raw)) (= :ignore *nested-tx*))
;; #245 do not lock when in c.j.j compatibility mode:
(binding [*active-tx* (conj *active-tx* raw)]
(transact* this body-fn opts))
(or (not (contains? *active-tx* raw)) (= :allow *nested-tx*))
(locking this
(binding [*active-tx* (conj *active-tx* raw)]
(transact* this body-fn opts)))
(= :ignore *nested-tx*)
(body-fn this)
(= :prohibit *nested-tx*)
(throw (IllegalStateException. "Nested transactions are prohibited"))
:else
(throw (IllegalArgumentException.
(str "*nested-tx* ("
*nested-tx*
") was not :allow, :ignore, or :prohibit"))))))
(cond (or (not *active-tx*) (= :allow *nested-tx*))
(locking this
(binding [*active-tx* true]
(transact* this body-fn opts)))
(= :ignore *nested-tx*)
(body-fn this)
(= :prohibit *nested-tx*)
(throw (IllegalStateException. "Nested transactions are prohibited"))
:else
(throw (IllegalArgumentException.
(str "*nested-tx* ("
*nested-tx*
") was not :allow, :ignore, or :prohibit")))))
javax.sql.DataSource
(-transact [this body-fn opts]
(with-open [con (p/get-connection this opts)]
;; this connection is assumed unique so we do not need the active-tx check:
(let [raw (raw-connection con)]
;; we don't lock either, per #293:
(binding [*active-tx* (conj *active-tx* raw)]
(transact* con body-fn opts)))))
(cond (or (not *active-tx*) (= :allow *nested-tx*))
(binding [*active-tx* true]
(with-open [con (p/get-connection this opts)]
(transact* con body-fn opts)))
(= :ignore *nested-tx*)
(with-open [con (p/get-connection this opts)]
(body-fn con))
(= :prohibit *nested-tx*)
(throw (IllegalStateException. "Nested transactions are prohibited"))
:else
(throw (IllegalArgumentException.
(str "*nested-tx* ("
*nested-tx*
") was not :allow, :ignore, or :prohibit")))))
Object
(-transact [this body-fn opts]
(p/-transact (p/get-datasource this) body-fn opts)))

View file

@ -1,11 +1,11 @@
;; copyright (c) 2018-2024 Sean Corfield, all rights reserved
;; copyright (c) 2018-2021 Sean Corfield, all rights reserved
(ns next.jdbc.types
"Provides convenience functions for wrapping values you pass into SQL
operations that have per-instance implementations of `SettableParameter`
so that `.setObject()` is called with the appropriate `java.sql.Types` value."
(:require [clojure.string :as str]
[next.jdbc.prepare])
[next.jdbc.prepare :as prep])
(:import (java.lang.reflect Field Modifier)
(java.sql PreparedStatement)))

23
template/pom.xml Normal file
View file

@ -0,0 +1,23 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<name>next.jdbc</name>
<description>The next generation of clojure.java.jdbc: a new low-level Clojure wrapper for JDBC-based access to databases.</description>
<url>https://github.com/seancorfield/next-jdbc</url>
<licenses>
<license>
<name>Eclipse Public License</name>
<url>http://www.eclipse.org/legal/epl-v10.html</url>
</license>
</licenses>
<developers>
<developer>
<name>Sean Corfield</name>
</developer>
</developers>
<scm>
<url>https://github.com/seancorfield/next-jdbc</url>
<connection>scm:git:git://github.com/seancorfield/next-jdbc.git</connection>
<developerConnection>scm:git:ssh://git@github.com/seancorfield/next-jdbc.git</developerConnection>
</scm>
</project>

View file

@ -1,56 +0,0 @@
;; copyright (c) 2019-2025 Sean Corfield, all rights reserved
(ns next.jdbc.connection-string-test
"Tests for the main hash map spec to JDBC URL logic and the get-datasource
and get-connection protocol implementations.
At some point, the datasource/connection tests should probably be extended
to accept EDN specs from an external source (environment variables?)."
(:require [clojure.string :as str]
[lazytest.core :refer [around set-ns-context!]]
[lazytest.experimental.interfaces.clojure-test :refer [deftest is testing]]
[next.jdbc.connection :as c]
[next.jdbc.protocols :as p]
[next.jdbc.specs :as specs]
[next.jdbc.test-fixtures :refer [db with-test-db]])
(:import [java.util Properties]))
(set! *warn-on-reflection* true)
(set-ns-context! [(around [f] (with-test-db f))])
(specs/instrument)
(deftest test-uri-strings
(testing "datasource via String"
(let [db-spec (db)
db-spec (if (= "embedded-postgres" (:dbtype db-spec))
(assoc db-spec :dbtype "postgresql")
db-spec)
[url etc] (#'c/spec->url+etc db-spec)
{:keys [user password]} etc
etc (dissoc etc :user :password)
uri (-> url
;; strip jdbc: prefix for fun
(str/replace #"^jdbc:" "")
(str/replace #";" "?") ; for SQL Server tests
(str/replace #":sqlserver" "") ; for SQL Server tests
(cond-> (and user password)
(str/replace #"://" (str "://" user ":" password "@"))))
ds (p/get-datasource (assoc etc :jdbcUrl uri))]
(when (and user password)
(with-open [con (p/get-connection ds {})]
(is (instance? java.sql.Connection con)))))))
(deftest property-tests
(is (string? (.getProperty ^Properties (#'c/as-properties {:foo [42]}) "foo")))
(is (string? (.get ^Properties (#'c/as-properties {:foo [42]}) "foo")))
(is (vector? (.get ^Properties (#'c/as-properties
{:foo [42]
:next.jdbc/as-is-properties [:foo]})
"foo")))
;; because .getProperty drops non-string values!
(is (nil? (.getProperty ^Properties (#'c/as-properties
{:foo [42]
:next.jdbc/as-is-properties [:foo]})
"foo"))))

View file

@ -1,4 +1,4 @@
;; copyright (c) 2019-2025 Sean Corfield, all rights reserved
;; copyright (c) 2019-2021 Sean Corfield, all rights reserved
(ns next.jdbc.connection-test
"Tests for the main hash map spec to JDBC URL logic and the get-datasource
@ -7,7 +7,7 @@
At some point, the datasource/connection tests should probably be extended
to accept EDN specs from an external source (environment variables?)."
(:require [clojure.string :as str]
[lazytest.experimental.interfaces.clojure-test :refer [deftest is testing]]
[clojure.test :refer [deftest is testing]]
[next.jdbc.connection :as c]
[next.jdbc.protocols :as p])
(:import (com.zaxxer.hikari HikariDataSource)
@ -74,10 +74,6 @@
(is (= ["jdbc:acme:(*)12.34.56.70:1234/my-db" {} nil]
(#'c/spec->url+etc {:dbtype "acme" :classname "java.lang.String"
:dbname "my-db" :host "12.34.56.70" :port 1234
:host-prefix "(*)"})))
(is (= ["jdbc:acme:(*)12.34.56.70/my-db" {} nil]
(#'c/spec->url+etc {:dbtype "acme" :classname "java.lang.String"
:dbname "my-db" :host "12.34.56.70" :port :none
:host-prefix "(*)"}))))
(deftest jdbc-url-tests
@ -133,12 +129,6 @@
(= "derby" db)
(assoc :create true))))
(deftest test-sourceable-via-metadata
(doseq [db test-dbs]
(let [ds (p/get-datasource
^{`p/get-datasource (fn [v] (p/get-datasource (first v)))} [db])]
(is (instance? javax.sql.DataSource ds)))))
(deftest test-get-connection
(doseq [db test-dbs]
(println 'test-get-connection (:dbtype db))
@ -192,13 +182,3 @@
(testing "connection via map (Object)"
(with-open [con (p/get-connection db {})]
(is (instance? java.sql.Connection con))))))
(deftest issue-243-uri->db-spec
(is (= {:dbtype "mysql" :dbname "mydb"
:host "myserver" :port 1234
:user "foo" :password "bar"}
(c/uri->db-spec "mysql://foo:bar@myserver:1234/mydb")))
(is (= {:dbtype "mysql" :dbname "mydb"
:host "myserver" :port 1234
:user "foo" :password "bar"}
(c/uri->db-spec "jdbc:mysql://myserver:1234/mydb?user=foo&password=bar"))))

View file

@ -1,22 +1,21 @@
;; copyright (c) 2020-2025 Sean Corfield, all rights reserved
;; copyright (c) 2020-2021 Sean Corfield, all rights reserved
(ns next.jdbc.datafy-test
"Tests for the datafy extensions over JDBC types."
(:require [clojure.datafy :as d]
[clojure.set :as set]
[lazytest.core :refer [around set-ns-context!]]
[lazytest.experimental.interfaces.clojure-test :refer [deftest is testing]]
[clojure.test :refer [deftest is testing use-fixtures]]
[next.jdbc :as jdbc]
[next.jdbc.datafy]
[next.jdbc.result-set :as rs]
[next.jdbc.specs :as specs]
[next.jdbc.test-fixtures
:refer [db derby? ds jtds? mysql? postgres? sqlite? with-test-db
xtdb?]]))
:refer [with-test-db db ds
derby? jtds? mysql? postgres? sqlite?]]))
(set! *warn-on-reflection* true)
(set-ns-context! [(around [f] (with-test-db f))])
(use-fixtures :once with-test-db)
(specs/instrument)
@ -84,26 +83,6 @@
:rowIdLifetime/exception))
(postgres?) (-> (disj :rowIdLifetime)
(conj :rowIdLifetime/exception))
(xtdb?) (-> (disj :clientInfoProperties
:defaultTransactionIsolation
:maxCatalogNameLength
:maxColumnNameLength
:maxCursorNameLength
:maxProcedureNameLength
:maxSchemaNameLength
:maxTableNameLength
:maxUserNameLength
:rowIdLifetime)
(conj :clientInfoProperties/exception
:defaultTransactionIsolation/exception
:maxCatalogNameLength/exception
:maxColumnNameLength/exception
:maxCursorNameLength/exception
:maxProcedureNameLength/exception
:maxSchemaNameLength/exception
:maxTableNameLength/exception
:maxUserNameLength/exception
:rowIdLifetime/exception))
(sqlite?) (-> (disj :clientInfoProperties :rowIdLifetime)
(conj :clientInfoProperties/exception
:rowIdLifetime/exception)))
@ -118,8 +97,7 @@
(let [data (d/datafy (.getMetaData con))]
(doseq [k (cond-> #{:catalogs :clientInfoProperties :schemas :tableTypes :typeInfo}
(jtds?) (disj :clientInfoProperties)
(sqlite?) (disj :clientInfoProperties)
(xtdb?) (disj :clientInfoProperties))]
(sqlite?) (disj :clientInfoProperties))]
(let [rs (d/nav data k nil)]
(is (vector? rs))
(is (every? map? rs))))))))
@ -144,5 +122,4 @@
(.execute ps)
(.getResultSet ps)
(.close ps)
(.close con)
)
(.close con))

View file

@ -1,4 +1,4 @@
;; copyright (c) 2019-2025 Sean Corfield, all rights reserved
;; copyright (c) 2019-2021 Sean Corfield, all rights reserved
(ns next.jdbc.date-time-test
"Date/time parameter auto-conversion tests.
@ -6,45 +6,44 @@
These tests contain no assertions. Without requiring `next.jdbc.date-time`
several of the `insert` operations would throw exceptions for some databases
so the test here just checks those operations 'succeed'."
(:require [lazytest.core :refer [around set-ns-context!]]
[lazytest.experimental.interfaces.clojure-test :refer [deftest]]
(:require [clojure.test :refer [deftest is testing use-fixtures]]
[next.jdbc :as jdbc]
[next.jdbc.date-time] ; to extend SettableParameter to date/time
[next.jdbc.test-fixtures :refer [with-test-db ds
mssql? xtdb?]]
[next.jdbc.specs :as specs]))
[next.jdbc.test-fixtures :refer [with-test-db db ds
mssql?]]
[next.jdbc.specs :as specs])
(:import (java.sql ResultSet)))
(set! *warn-on-reflection* true)
(set-ns-context! [(around [f] (with-test-db f))])
(use-fixtures :once with-test-db)
(specs/instrument)
(deftest issue-73
(when-not (xtdb?)
(try
(jdbc/execute-one! (ds) ["drop table fruit_time"])
(catch Throwable _))
(jdbc/execute-one! (ds) [(str "create table fruit_time (id int not null, deadline "
(if (mssql?) "datetime" "timestamp")
" not null)")])
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 1 (java.util.Date.)])
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 2 (java.time.Instant/now)])
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 3 (java.time.LocalDate/now)])
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 4 (java.time.LocalDateTime/now)])
(try
(jdbc/execute-one! (ds) ["drop table fruit_time"])
(catch Throwable _))
(jdbc/execute-one! (ds) ["create table fruit_time (id int not null, deadline time not null)"])
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 1 (java.util.Date.)])
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 2 (java.time.Instant/now)])
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 3 (java.time.LocalDate/now)])
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 4 (java.time.LocalDateTime/now)])
(try
(jdbc/execute-one! (ds) ["drop table fruit_time"])
(catch Throwable _))
(jdbc/execute-one! (ds) ["create table fruit_time (id int not null, deadline date not null)"])
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 1 (java.util.Date.)])
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 2 (java.time.Instant/now)])
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 3 (java.time.LocalDate/now)])
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 4 (java.time.LocalDateTime/now)])))
(try
(jdbc/execute-one! (ds) ["drop table fruit_time"])
(catch Throwable _))
(jdbc/execute-one! (ds) [(str "create table fruit_time (id int not null, deadline "
(if (mssql?) "datetime" "timestamp")
" not null)")])
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 1 (java.util.Date.)])
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 2 (java.time.Instant/now)])
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 3 (java.time.LocalDate/now)])
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 4 (java.time.LocalDateTime/now)])
(try
(jdbc/execute-one! (ds) ["drop table fruit_time"])
(catch Throwable _))
(jdbc/execute-one! (ds) ["create table fruit_time (id int not null, deadline time not null)"])
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 1 (java.util.Date.)])
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 2 (java.time.Instant/now)])
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 3 (java.time.LocalDate/now)])
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 4 (java.time.LocalDateTime/now)])
(try
(jdbc/execute-one! (ds) ["drop table fruit_time"])
(catch Throwable _))
(jdbc/execute-one! (ds) ["create table fruit_time (id int not null, deadline date not null)"])
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 1 (java.util.Date.)])
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 2 (java.time.Instant/now)])
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 3 (java.time.LocalDate/now)])
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 4 (java.time.LocalDateTime/now)]))

View file

@ -1,8 +1,9 @@
;; copyright (c) 2020-2025 Sean Corfield, all rights reserved
;; copyright (c) 2020-2021 Sean Corfield, all rights reserved
(ns next.jdbc.default-options-test
"Stub test namespace for default options. Nothing can really be tested
at this level tho'..."
(:require [next.jdbc.default-options]))
(:require [clojure.test :refer [deftest is testing]]
[next.jdbc.default-options :refer :all]))
(set! *warn-on-reflection* true)

View file

@ -1,55 +0,0 @@
;; copyright (c) 2024-2025 Sean Corfield, all rights reserved
(ns next.jdbc.defer-test
"The idea behind the next.jdbc.defer namespace is to provide a
way to defer the execution of a series of SQL statements until
a later time, but still provide a way for inserted keys to be
used in later SQL statements.
The principle is to provide a core subset of the next.jdbc
and next.jdbc.sql API that produces a data structure that
describes a series of SQL operations to be performed, that
are held in a dynamic var, and that can be executed at a
later time, in a transaction."
(:require [lazytest.core :refer [around set-ns-context!]]
[lazytest.experimental.interfaces.clojure-test :refer [deftest is testing]]
[next.jdbc :as jdbc]
[next.jdbc.defer :as sut]
[next.jdbc.test-fixtures
:refer [ds with-test-db xtdb?]]))
(set! *warn-on-reflection* true)
(set-ns-context! [(around [f] (with-test-db f))])
(deftest basic-test
(when-not (xtdb?)
(testing "data structures"
(is (= [{:sql-p ["INSERT INTO foo (name) VALUES (?)" "Sean"]
:key-fn :GENERATED_KEY
:key :id
:opts {:key-fn :GENERATED_KEY :key :id}}]
@(sut/defer-ops
#(sut/insert! :foo {:name "Sean"} {:key-fn :GENERATED_KEY :key :id})))))
(testing "execution"
(let [effects (sut/with-deferred (ds)
(sut/insert! :fruit {:name "Mango"} {:key :test}))]
(is (= {:test 1} @effects))
(is (= 1 (count (jdbc/execute! (ds)
["select * from fruit where name = ?"
"Mango"])))))
(let [effects (sut/with-deferred (ds)
(sut/insert! :fruit {:name "Dragonfruit"} {:key :test})
(sut/update! :fruit {:cost 123} {:name "Dragonfruit"})
(sut/delete! :fruit {:name "Dragonfruit"}))]
(is (= {:test 1} @effects))
(is (= 0 (count (jdbc/execute! (ds)
["select * from fruit where name = ?"
"Dragonfruit"])))))
(let [effects (sut/with-deferred (ds)
(sut/insert! :fruit {:name "Grapefruit" :bad_column 0} {:key :test}))]
(is (= :failed (try @effects
(catch Exception _ :failed))))
(is (= 0 (count (jdbc/execute! (ds)
["select * from fruit where name = ?"
"Grapefruit"]))))))))

View file

@ -1,25 +1,23 @@
;; copyright (c) 2019-2025 Sean Corfield, all rights reserved
;; copyright (c) 2019-2021 Sean Corfield, all rights reserved
(ns next.jdbc.optional-test
"Test namespace for the optional builder functions."
(:require [clojure.string :as str]
[lazytest.core :refer [around set-ns-context!]]
[lazytest.experimental.interfaces.clojure-test :refer [deftest is testing]]
[clojure.test :refer [deftest is testing use-fixtures]]
[next.jdbc.optional :as opt]
[next.jdbc.protocols :as p]
[next.jdbc.test-fixtures :refer [col-kw column default-options ds index
with-test-db]])
(:import
(java.sql ResultSet ResultSetMetaData)))
[next.jdbc.test-fixtures :refer [with-test-db ds column
default-options]])
(:import (java.sql ResultSet ResultSetMetaData)))
(set! *warn-on-reflection* true)
(set-ns-context! [(around [f] (with-test-db f))])
(use-fixtures :once with-test-db)
(deftest test-map-row-builder
(testing "default row builder"
(let [row (p/-execute-one (ds)
[(str "select * from fruit where " (index) " = ?") 1]
["select * from fruit where id = ?" 1]
(assoc (default-options)
:builder-fn opt/as-maps))]
(is (map? row))
@ -28,7 +26,7 @@
(is (= "Apple" ((column :FRUIT/NAME) row)))))
(testing "unqualified row builder"
(let [row (p/-execute-one (ds)
[(str "select * from fruit where " (index) " = ?") 2]
["select * from fruit where id = ?" 2]
{:builder-fn opt/as-unqualified-maps})]
(is (map? row))
(is (not (contains? row (column :COST))))
@ -36,23 +34,23 @@
(is (= "Banana" ((column :NAME) row)))))
(testing "lower-case row builder"
(let [row (p/-execute-one (ds)
[(str "select * from fruit where " (index) " = ?") 3]
["select * from fruit where id = ?" 3]
(assoc (default-options)
:builder-fn opt/as-lower-maps))]
(is (map? row))
(is (not (contains? row (col-kw :fruit/appearance))))
(is (= 3 ((col-kw :fruit/id) row)))
(is (= "Peach" ((col-kw :fruit/name) row)))))
(is (not (contains? row :fruit/appearance)))
(is (= 3 (:fruit/id row)))
(is (= "Peach" (:fruit/name row)))))
(testing "unqualified lower-case row builder"
(let [row (p/-execute-one (ds)
[(str "select * from fruit where " (index) " = ?") 4]
["select * from fruit where id = ?" 4]
{:builder-fn opt/as-unqualified-lower-maps})]
(is (map? row))
(is (= 4 ((col-kw :id) row)))
(is (= "Orange" ((col-kw :name) row)))))
(is (= 4 (:id row)))
(is (= "Orange" (:name row)))))
(testing "custom row builder"
(let [row (p/-execute-one (ds)
[(str "select * from fruit where " (index) " = ?") 3]
["select * from fruit where id = ?" 3]
(assoc (default-options)
:builder-fn opt/as-modified-maps
:label-fn str/lower-case
@ -63,13 +61,13 @@
(is (= "Peach" ((column :FRUIT/name) row))))))
(defn- default-column-reader
[^ResultSet rs ^ResultSetMetaData _ ^Integer i]
[^ResultSet rs ^ResultSetMetaData rsmeta ^Integer i]
(.getObject rs i))
(deftest test-map-row-adapter
(testing "default row builder"
(let [row (p/-execute-one (ds)
[(str "select * from fruit where " (index) " = ?") 1]
["select * from fruit where id = ?" 1]
(assoc (default-options)
:builder-fn (opt/as-maps-adapter
opt/as-maps
@ -80,7 +78,7 @@
(is (= "Apple" ((column :FRUIT/NAME) row)))))
(testing "unqualified row builder"
(let [row (p/-execute-one (ds)
[(str "select * from fruit where " (index) " = ?") 2]
["select * from fruit where id = ?" 2]
{:builder-fn (opt/as-maps-adapter
opt/as-unqualified-maps
default-column-reader)})]
@ -90,27 +88,27 @@
(is (= "Banana" ((column :NAME) row)))))
(testing "lower-case row builder"
(let [row (p/-execute-one (ds)
[(str "select * from fruit where " (index) " = ?") 3]
["select * from fruit where id = ?" 3]
(assoc (default-options)
:builder-fn (opt/as-maps-adapter
opt/as-lower-maps
default-column-reader)))]
(is (map? row))
(is (not (contains? row (col-kw :fruit/appearance))))
(is (= 3 ((col-kw :fruit/id) row)))
(is (= "Peach" ((col-kw :fruit/name) row)))))
(is (not (contains? row :fruit/appearance)))
(is (= 3 (:fruit/id row)))
(is (= "Peach" (:fruit/name row)))))
(testing "unqualified lower-case row builder"
(let [row (p/-execute-one (ds)
[(str "select * from fruit where " (index) " = ?") 4]
["select * from fruit where id = ?" 4]
{:builder-fn (opt/as-maps-adapter
opt/as-unqualified-lower-maps
default-column-reader)})]
(is (map? row))
(is (= 4 ((col-kw :id) row)))
(is (= "Orange" ((col-kw :name) row)))))
(is (= 4 (:id row)))
(is (= "Orange" (:name row)))))
(testing "custom row builder"
(let [row (p/-execute-one (ds)
[(str "select * from fruit where " (index) " = ?") 3]
["select * from fruit where id = ?" 3]
(assoc (default-options)
:builder-fn (opt/as-maps-adapter
opt/as-modified-maps

View file

@ -1,75 +1,57 @@
;; copyright (c) 2020-2025 Sean Corfield, all rights reserved
;; copyright (c) 2020-2021 Sean Corfield, all rights reserved
(ns next.jdbc.plan-test
"Tests for the plan helpers."
(:require [lazytest.core :refer [around]]
[lazytest.experimental.interfaces.clojure-test :refer [deftest is]]
(:require [clojure.test :refer [deftest is use-fixtures]]
[next.jdbc.plan :as plan]
[next.jdbc.specs :as specs]
[next.jdbc.test-fixtures
:refer [with-test-db ds col-kw index]]
[clojure.string :as str]))
:refer [with-test-db ds]]))
(set! *warn-on-reflection* true)
;; around each test because of the folding tests using 1,000 rows
(use-fixtures :each with-test-db)
(specs/instrument)
(deftest select-one!-tests
{:context [(around [f] (with-test-db f))]}
(is (= {(col-kw :id) 1}
(plan/select-one! (ds) [(col-kw :id)] [(str "select * from fruit order by " (index))])))
(is (= {:id 1}
(plan/select-one! (ds) [:id] ["select * from fruit order by id"])))
(is (= 1
(plan/select-one! (ds) (col-kw :id) [(str "select * from fruit order by " (index))])))
(plan/select-one! (ds) :id ["select * from fruit order by id"])))
(is (= "Banana"
(plan/select-one! (ds) :name [(str "select * from fruit where " (index) " = ?") 2])))
(plan/select-one! (ds) :name ["select * from fruit where id = ?" 2])))
(is (= [1 "Apple"]
(plan/select-one! (ds) (juxt (col-kw :id) :name)
[(str "select * from fruit order by " (index))])))
(is (= {(col-kw :id) 1 :name "Apple"}
(plan/select-one! (ds) #(select-keys % [(col-kw :id) :name])
[(str "select * from fruit order by " (index))]))))
(plan/select-one! (ds) (juxt :id :name)
["select * from fruit order by id"])))
(is (= {:id 1 :name "Apple"}
(plan/select-one! (ds) #(select-keys % [:id :name])
["select * from fruit order by id"]))))
(deftest select-vector-tests
{:context [(around [f] (with-test-db f))]}
(is (= [{(col-kw :id) 1} {(col-kw :id) 2} {(col-kw :id) 3} {(col-kw :id) 4}]
(plan/select! (ds) [(col-kw :id)] [(str "select * from fruit order by " (index))])))
(is (= [{:id 1} {:id 2} {:id 3} {:id 4}]
(plan/select! (ds) [:id] ["select * from fruit order by id"])))
(is (= [1 2 3 4]
(plan/select! (ds) (col-kw :id) [(str "select * from fruit order by " (index))])))
(plan/select! (ds) :id ["select * from fruit order by id"])))
(is (= ["Banana"]
(plan/select! (ds) :name [(str "select * from fruit where " (index) " = ?") 2])))
(plan/select! (ds) :name ["select * from fruit where id = ?" 2])))
(is (= [[2 "Banana"]]
(plan/select! (ds) (juxt (col-kw :id) :name)
[(str "select * from fruit where " (index) " = ?") 2])))
(is (= [{(col-kw :id) 2 :name "Banana"}]
(plan/select! (ds) [(col-kw :id) :name]
[(str "select * from fruit where " (index) " = ?") 2]))))
(plan/select! (ds) (juxt :id :name)
["select * from fruit where id = ?" 2])))
(is (= [{:id 2 :name "Banana"}]
(plan/select! (ds) [:id :name]
["select * from fruit where id = ?" 2]))))
(deftest select-set-tests
{:context [(around [f] (with-test-db f))]}
(is (= #{{(col-kw :id) 1} {(col-kw :id) 2} {(col-kw :id) 3} {(col-kw :id) 4}}
(plan/select! (ds) [(col-kw :id)] [(str "select * from fruit order by " (index))]
(is (= #{{:id 1} {:id 2} {:id 3} {:id 4}}
(plan/select! (ds) [:id] ["select * from fruit order by id"]
{:into #{}})))
(is (= #{1 2 3 4}
(plan/select! (ds) (col-kw :id) [(str "select * from fruit order by " (index))]
(plan/select! (ds) :id ["select * from fruit order by id"]
{:into #{}}))))
(deftest select-map-tests
{:context [(around [f] (with-test-db f))]}
(is (= {1 "Apple", 2 "Banana", 3 "Peach", 4 "Orange"}
(plan/select! (ds) (juxt (col-kw :id) :name) [(str "select * from fruit order by " (index))]
(plan/select! (ds) (juxt :id :name) ["select * from fruit order by id"]
{:into {}}))))
(deftest select-issue-227
{:context [(around [f] (with-test-db f))]}
(is (= ["Apple"]
(plan/select! (ds) :name [(str "select * from fruit where " (index) " = ?") 1]
{:column-fn #(str/replace % "-" "_")})))
(is (= ["Apple"]
(plan/select! (ds) :foo/name [(str "select * from fruit where " (index) " = ?") 1]
{:column-fn #(str/replace % "-" "_")})))
(is (= ["Apple"]
(plan/select! (ds) #(get % "name") [(str "select * from fruit where " (index) " = ?") 1]
{:column-fn #(str/replace % "-" "_")})))
(is (= [["Apple"]]
(plan/select! (ds) (juxt :name) [(str "select * from fruit where " (index) " = ?") 1]
{:column-fn #(str/replace % "-" "_")}))))

View file

@ -1,4 +1,4 @@
;; copyright (c) 2019-2025 Sean Corfield, all rights reserved
;; copyright (c) 2019-2021 Sean Corfield, all rights reserved
(ns next.jdbc.prepare-test
"Stub test namespace for PreparedStatement creation etc.
@ -8,40 +8,75 @@
The tests for the deprecated version of `execute-batch!` are here
as a guard against regressions."
(:require [lazytest.core :refer [around set-ns-context!]]
[lazytest.experimental.interfaces.clojure-test :refer [deftest is testing]]
(:require [clojure.test :refer [deftest is testing use-fixtures]]
[next.jdbc :as jdbc]
[next.jdbc.test-fixtures
:refer [with-test-db ds jtds? mssql? sqlite? xtdb?]]
:refer [with-test-db ds jtds? mssql? sqlite?]]
[next.jdbc.prepare :as prep]
[next.jdbc.specs :as specs]))
(set! *warn-on-reflection* true)
(set-ns-context! [(around [f] (with-test-db f))])
(use-fixtures :once with-test-db)
(specs/instrument)
(deftest execute-batch-tests
(when-not (xtdb?)
(testing "simple batch insert"
(is (= [1 1 1 1 1 1 1 1 1 13]
(jdbc/with-transaction [t (ds) {:rollback-only true}]
(with-open [ps (jdbc/prepare t ["
(testing "simple batch insert"
(is (= [1 1 1 1 1 1 1 1 1 13]
(jdbc/with-transaction [t (ds) {:rollback-only true}]
(with-open [ps (jdbc/prepare t ["
INSERT INTO fruit (name, appearance) VALUES (?,?)
"])]
(let [result (prep/execute-batch! ps [["fruit1" "one"]
["fruit2" "two"]
["fruit3" "three"]
["fruit4" "four"]
["fruit5" "five"]
["fruit6" "six"]
["fruit7" "seven"]
["fruit8" "eight"]
["fruit9" "nine"]])]
(conj result (count (jdbc/execute! t ["select * from fruit"]))))))))
(is (= 4 (count (jdbc/execute! (ds) ["select * from fruit"])))))
(testing "small batch insert"
(let [result (prep/execute-batch! ps [["fruit1" "one"]
["fruit2" "two"]
["fruit3" "three"]
["fruit4" "four"]
["fruit5" "five"]
["fruit6" "six"]
["fruit7" "seven"]
["fruit8" "eight"]
["fruit9" "nine"]])]
(conj result (count (jdbc/execute! t ["select * from fruit"]))))))))
(is (= 4 (count (jdbc/execute! (ds) ["select * from fruit"])))))
(testing "small batch insert"
(is (= [1 1 1 1 1 1 1 1 1 13]
(jdbc/with-transaction [t (ds) {:rollback-only true}]
(with-open [ps (jdbc/prepare t ["
INSERT INTO fruit (name, appearance) VALUES (?,?)
"])]
(let [result (prep/execute-batch! ps [["fruit1" "one"]
["fruit2" "two"]
["fruit3" "three"]
["fruit4" "four"]
["fruit5" "five"]
["fruit6" "six"]
["fruit7" "seven"]
["fruit8" "eight"]
["fruit9" "nine"]]
{:batch-size 3})]
(conj result (count (jdbc/execute! t ["select * from fruit"]))))))))
(is (= 4 (count (jdbc/execute! (ds) ["select * from fruit"])))))
(testing "big batch insert"
(is (= [1 1 1 1 1 1 1 1 1 13]
(jdbc/with-transaction [t (ds) {:rollback-only true}]
(with-open [ps (jdbc/prepare t ["
INSERT INTO fruit (name, appearance) VALUES (?,?)
"])]
(let [result (prep/execute-batch! ps [["fruit1" "one"]
["fruit2" "two"]
["fruit3" "three"]
["fruit4" "four"]
["fruit5" "five"]
["fruit6" "six"]
["fruit7" "seven"]
["fruit8" "eight"]
["fruit9" "nine"]]
{:batch-size 8})]
(conj result (count (jdbc/execute! t ["select * from fruit"]))))))))
(is (= 4 (count (jdbc/execute! (ds) ["select * from fruit"])))))
(testing "large batch insert"
(when-not (or (jtds?) (sqlite?))
(is (= [1 1 1 1 1 1 1 1 1 13]
(jdbc/with-transaction [t (ds) {:rollback-only true}]
(with-open [ps (jdbc/prepare t ["
@ -56,70 +91,33 @@ INSERT INTO fruit (name, appearance) VALUES (?,?)
["fruit7" "seven"]
["fruit8" "eight"]
["fruit9" "nine"]]
{:batch-size 3})]
{:batch-size 4
:large true})]
(conj result (count (jdbc/execute! t ["select * from fruit"]))))))))
(is (= 4 (count (jdbc/execute! (ds) ["select * from fruit"])))))
(testing "big batch insert"
(is (= [1 1 1 1 1 1 1 1 1 13]
(jdbc/with-transaction [t (ds) {:rollback-only true}]
(with-open [ps (jdbc/prepare t ["
INSERT INTO fruit (name, appearance) VALUES (?,?)
"])]
(let [result (prep/execute-batch! ps [["fruit1" "one"]
["fruit2" "two"]
["fruit3" "three"]
["fruit4" "four"]
["fruit5" "five"]
["fruit6" "six"]
["fruit7" "seven"]
["fruit8" "eight"]
["fruit9" "nine"]]
{:batch-size 8})]
(conj result (count (jdbc/execute! t ["select * from fruit"]))))))))
(is (= 4 (count (jdbc/execute! (ds) ["select * from fruit"])))))
(testing "large batch insert"
(when-not (or (jtds?) (sqlite?))
(is (= [1 1 1 1 1 1 1 1 1 13]
(jdbc/with-transaction [t (ds) {:rollback-only true}]
(with-open [ps (jdbc/prepare t ["
INSERT INTO fruit (name, appearance) VALUES (?,?)
"])]
(let [result (prep/execute-batch! ps [["fruit1" "one"]
["fruit2" "two"]
["fruit3" "three"]
["fruit4" "four"]
["fruit5" "five"]
["fruit6" "six"]
["fruit7" "seven"]
["fruit8" "eight"]
["fruit9" "nine"]]
{:batch-size 4
:large true})]
(conj result (count (jdbc/execute! t ["select * from fruit"]))))))))
(is (= 4 (count (jdbc/execute! (ds) ["select * from fruit"]))))))
(testing "return generated keys"
(when-not (or (mssql?) (sqlite?))
(let [results
(jdbc/with-transaction [t (ds) {:rollback-only true}]
(with-open [ps (jdbc/prepare t ["
(is (= 4 (count (jdbc/execute! (ds) ["select * from fruit"]))))))
(testing "return generated keys"
(when-not (mssql?)
(let [results
(jdbc/with-transaction [t (ds) {:rollback-only true}]
(with-open [ps (jdbc/prepare t ["
INSERT INTO fruit (name, appearance) VALUES (?,?)
"]
{:return-keys true})]
(let [result (prep/execute-batch! ps [["fruit1" "one"]
["fruit2" "two"]
["fruit3" "three"]
["fruit4" "four"]
["fruit5" "five"]
["fruit6" "six"]
["fruit7" "seven"]
["fruit8" "eight"]
["fruit9" "nine"]]
{:batch-size 4
:return-generated-keys true})]
(conj result (count (jdbc/execute! t ["select * from fruit"]))))))]
(is (= 13 (last results)))
(is (every? map? (butlast results)))
{:return-keys true})]
(let [result (prep/execute-batch! ps [["fruit1" "one"]
["fruit2" "two"]
["fruit3" "three"]
["fruit4" "four"]
["fruit5" "five"]
["fruit6" "six"]
["fruit7" "seven"]
["fruit8" "eight"]
["fruit9" "nine"]]
{:batch-size 4
:return-generated-keys true})]
(conj result (count (jdbc/execute! t ["select * from fruit"]))))))]
(is (= 13 (last results)))
(is (every? map? (butlast results)))
;; Derby and SQLite only return one generated key per batch so there
;; are only three keys, plus the overall count here:
(is (< 3 (count results))))
(is (= 4 (count (jdbc/execute! (ds) ["select * from fruit"]))))))))
(is (< 3 (count results))))
(is (= 4 (count (jdbc/execute! (ds) ["select * from fruit"])))))))

View file

@ -1,8 +1,9 @@
;; copyright (c) 2019-2025 Sean Corfield, all rights reserved
;; copyright (c) 2019-2021 Sean Corfield, all rights reserved
(ns next.jdbc.protocols-test
"Stub test namespace for low-level protocols. Nothing can really be tested
at this level tho'..."
(:require [next.jdbc.protocols]))
(:require [clojure.test :refer [deftest is testing]]
[next.jdbc.protocols :refer :all]))
(set! *warn-on-reflection* true)

View file

@ -1,30 +1,34 @@
;; copyright (c) 2019-2025 Sean Corfield, all rights reserved
;; copyright (c) 2019-2021 Sean Corfield, all rights reserved
(ns next.jdbc.quoted-test
"Basic tests for quoting strategies. These are also tested indirectly
via the next.jdbc.sql tests."
(:require [lazytest.core :refer [defdescribe describe it expect]]
(:require [clojure.test :refer [deftest are testing]]
[next.jdbc.quoted :refer [ansi mysql sql-server oracle postgres
schema]]))
(set! *warn-on-reflection* true)
(def ^:private quote-fns [ansi mysql sql-server oracle postgres])
(deftest basic-quoting
(are [quote-fn quoted] (= (quote-fn "x") quoted)
ansi "\"x\""
mysql "`x`"
sql-server "[x]"
oracle "\"x\""
postgres "\"x\""))
(defdescribe quoted-functionality
(describe "base quoting"
(it "should correctly quote simple names"
(doseq [[f e] (map vector quote-fns
["\"x\"" "`x`" "[x]" "\"x\"" "\"x\""])]
(expect (= (f "x") e)))))
(describe "dotted name quoting"
(describe "basic quoting"
(it "should quote dotted names 'as-is'"
(doseq [[f e] (map vector quote-fns
["\"x.y\"" "`x.y`" "[x.y]" "\"x.y\"" "\"x.y\""])]
(expect (= (f "x.y") e)))))
(describe "schema quoting"
(it "should split and quote dotted names with schema"
(doseq [[f e] (map vector quote-fns
["\"x\".\"y\"" "`x`.`y`" "[x].[y]" "\"x\".\"y\"" "\"x\".\"y\""])]
(expect (= ((schema f) "x.y") e)))))))
(deftest schema-quoting
(testing "verify non-schema behavior"
(are [quote-fn quoted] (= (quote-fn "x.y") quoted)
ansi "\"x.y\""
mysql "`x.y`"
sql-server "[x.y]"
oracle "\"x.y\""
postgres "\"x.y\""))
(testing "verify schema behavior"
(are [quote-fn quoted] (= ((schema quote-fn) "x.y") quoted)
ansi "\"x\".\"y\""
mysql "`x`.`y`"
sql-server "[x].[y]"
oracle "\"x\".\"y\""
postgres "\"x\".\"y\"")))

View file

@ -1,4 +1,4 @@
;; copyright (c) 2019-2025 Sean Corfield, all rights reserved
;; copyright (c) 2019-2021 Sean Corfield, all rights reserved
(ns next.jdbc.result-set-test
"Test namespace for the result set functions.
@ -8,19 +8,18 @@
(:require [clojure.core.protocols :as core-p]
[clojure.datafy :as d]
[clojure.string :as str]
[lazytest.core :refer [around set-ns-context!]]
[lazytest.experimental.interfaces.clojure-test :refer [deftest is testing]]
[clojure.test :refer [deftest is testing use-fixtures]]
[next.jdbc.protocols :as p]
[next.jdbc.result-set :as rs]
[next.jdbc.specs :as specs]
[next.jdbc.test-fixtures :refer [with-test-db ds column index col-kw
default-options
derby? mssql? mysql? postgres? xtdb?]])
[next.jdbc.test-fixtures :refer [with-test-db ds column
default-options
derby? mssql? mysql? postgres?]])
(:import (java.sql ResultSet ResultSetMetaData)))
(set! *warn-on-reflection* true)
(set-ns-context! [(around [f] (with-test-db f))])
(use-fixtures :once with-test-db)
(specs/instrument)
@ -28,9 +27,7 @@
(testing "default schema"
(let [connectable (ds)
test-row (rs/datafiable-row {:TABLE/FRUIT_ID 1} connectable
(cond-> (default-options)
(xtdb?)
(assoc :schema-opts {:pk "_id"})))
(default-options))
data (d/datafy test-row)
v (get data :TABLE/FRUIT_ID)]
;; check datafication is sane
@ -43,10 +40,7 @@
(let [connectable (ds)
test-row (rs/datafiable-row {:foo/bar 2} connectable
(assoc (default-options)
:schema {:foo/bar
(if (xtdb?)
:fruit/_id
:fruit/id)}))
:schema {:foo/bar :fruit/id}))
data (d/datafy test-row)
v (get data :foo/bar)]
;; check datafication is sane
@ -59,10 +53,7 @@
(let [connectable (ds)
test-row (rs/datafiable-row {:foo/bar 3} connectable
(assoc (default-options)
:schema {:foo/bar
[(if (xtdb?)
:fruit/_id
:fruit/id)]}))
:schema {:foo/bar [:fruit/id]}))
data (d/datafy test-row)
v (get data :foo/bar)]
;; check datafication is sane
@ -76,7 +67,7 @@
(let [connectable (ds)
test-row (rs/datafiable-row {:foo/bar 2} connectable
(assoc (default-options)
:schema {:foo/bar [:fruit (col-kw :id)]}))
:schema {:foo/bar [:fruit :id]}))
data (d/datafy test-row)
v (get data :foo/bar)]
;; check datafication is sane
@ -88,7 +79,7 @@
(let [connectable (ds)
test-row (rs/datafiable-row {:foo/bar 3} connectable
(assoc (default-options)
:schema {:foo/bar [:fruit (col-kw :id) :many]}))
:schema {:foo/bar [:fruit :id :many]}))
data (d/datafy test-row)
v (get data :foo/bar)]
;; check datafication is sane
@ -102,7 +93,7 @@
(deftest test-map-row-builder
(testing "default row builder"
(let [row (p/-execute-one (ds)
[(str "select * from fruit where " (index) " = ?") 1]
["select * from fruit where id = ?" 1]
(default-options))]
(is (map? row))
(is (contains? row (column :FRUIT/GRADE)))
@ -110,7 +101,7 @@
(is (= 1 ((column :FRUIT/ID) row)))
(is (= "Apple" ((column :FRUIT/NAME) row))))
(let [rs (p/-execute-all (ds)
[(str "select * from fruit order by " (index))]
["select * from fruit order by id"]
(default-options))]
(is (every? map? rs))
(is (= 1 ((column :FRUIT/ID) (first rs))))
@ -119,7 +110,7 @@
(is (= "Orange" ((column :FRUIT/NAME) (last rs))))))
(testing "unqualified row builder"
(let [row (p/-execute-one (ds)
[(str "select * from fruit where " (index) " = ?") 2]
["select * from fruit where id = ?" 2]
{:builder-fn rs/as-unqualified-maps})]
(is (map? row))
(is (contains? row (column :COST)))
@ -128,35 +119,34 @@
(is (= "Banana" ((column :NAME) row)))))
(testing "lower-case row builder"
(let [row (p/-execute-one (ds)
[(str "select * from fruit where " (index) " = ?") 3]
["select * from fruit where id = ?" 3]
(assoc (default-options)
:builder-fn rs/as-lower-maps))]
(is (map? row))
(is (contains? row (col-kw :fruit/appearance)))
(is (nil? ((col-kw :fruit/appearance) row)))
(is (= 3 ((col-kw :fruit/id) row)))
(is (= "Peach" ((col-kw :fruit/name) row)))))
(is (contains? row :fruit/appearance))
(is (nil? (:fruit/appearance row)))
(is (= 3 (:fruit/id row)))
(is (= "Peach" (:fruit/name row)))))
(testing "unqualified lower-case row builder"
(let [row (p/-execute-one (ds)
[(str "select * from fruit where " (index) " = ?") 4]
["select * from fruit where id = ?" 4]
{:builder-fn rs/as-unqualified-lower-maps})]
(is (map? row))
(is (= 4 ((col-kw :id) row)))
(is (= "Orange" ((col-kw :name) row)))))
(is (= 4 (:id row)))
(is (= "Orange" (:name row)))))
(testing "kebab-case row builder"
(let [row (p/-execute-one (ds)
[(str "select " (index) ",name,appearance as looks_like from fruit where " (index) " = ?") 3]
["select id,name,appearance as looks_like from fruit where id = ?" 3]
(assoc (default-options)
:builder-fn rs/as-kebab-maps))]
(is (map? row))
(is (contains? row (col-kw :fruit/looks-like)))
(is (nil? ((col-kw :fruit/looks-like) row)))
;; kebab-case strips leading _ from _id (XTDB):
(is (= 3 ((if (xtdb?) :id :fruit/id) row)))
(is (= "Peach" ((col-kw :fruit/name) row)))))
(is (contains? row :fruit/looks-like))
(is (nil? (:fruit/looks-like row)))
(is (= 3 (:fruit/id row)))
(is (= "Peach" (:fruit/name row)))))
(testing "unqualified kebab-case row builder"
(let [row (p/-execute-one (ds)
[(str "select " (index) ",name,appearance as looks_like from fruit where " (index) " = ?") 4]
["select id,name,appearance as looks_like from fruit where id = ?" 4]
{:builder-fn rs/as-unqualified-kebab-maps})]
(is (map? row))
(is (contains? row :looks-like))
@ -165,7 +155,7 @@
(is (= "Orange" (:name row)))))
(testing "custom row builder 1"
(let [row (p/-execute-one (ds)
[(str "select fruit.*, " (index) " + 100 as newid from fruit where " (index) " = ?") 3]
["select fruit.*, id + 100 as newid from fruit where id = ?" 3]
(assoc (default-options)
:builder-fn rs/as-modified-maps
:label-fn str/lower-case
@ -178,7 +168,7 @@
(is (= "Peach" ((column :FRUIT/name) row)))))
(testing "custom row builder 2"
(let [row (p/-execute-one (ds)
[(str "select fruit.*, " (index) " + 100 as newid from fruit where " (index) " = ?") 3]
["select fruit.*, id + 100 as newid from fruit where id = ?" 3]
(assoc (default-options)
:builder-fn rs/as-modified-maps
:label-fn str/lower-case
@ -186,12 +176,12 @@
(is (map? row))
(is (contains? row :vegetable/appearance))
(is (nil? (:vegetable/appearance row)))
(is (= 3 ((if (xtdb?) :vegetable/_id :vegetable/id) row)))
(is (= 3 (:vegetable/id row)))
(is (= 103 (:vegetable/newid row))) ; constant qualifier here
(is (= "Peach" (:vegetable/name row)))))
(testing "adapted row builder"
(let [row (p/-execute-one (ds)
[(str "select * from fruit where " (index) " = ?") 3]
["select * from fruit where id = ?" 3]
(assoc
(default-options)
:builder-fn (rs/as-maps-adapter
@ -217,7 +207,7 @@
(fn [^ResultSet rs _ ^Integer i]
(.getObject rs i)))
row (p/-execute-one (ds)
[(str "select * from fruit where " (index) " = ?") 3]
["select * from fruit where id = ?" 3]
(assoc
(default-options)
:builder-fn (rs/as-maps-adapter
@ -246,7 +236,7 @@
(testing "row-numbers on bare abstraction"
(is (= [1 2 3]
(into [] (map rs/row-number)
(p/-execute (ds) [(str "select * from fruit where " (index) " < ?") 4]
(p/-execute (ds) ["select * from fruit where id < ?" 4]
;; we do not need a real builder here...
(cond-> {:builder-fn (constantly nil)}
(derby?)
@ -257,7 +247,7 @@
(is (= [1 2 3]
(into [] (comp (map #(rs/datafiable-row % (ds) {}))
(map rs/row-number))
(p/-execute (ds) [(str "select * from fruit where " (index) " < ?") 4]
(p/-execute (ds) ["select * from fruit where id < ?" 4]
;; ...but datafiable-row requires a real builder
(cond-> {:builder-fn rs/as-arrays}
(derby?)
@ -267,7 +257,7 @@
(deftest test-column-names
(testing "column-names on bare abstraction"
(is (= #{(index) "appearance" "grade" "cost" "name"}
(is (= #{"id" "appearance" "grade" "cost" "name"}
(reduce (fn [_ row]
(-> row
(->> (rs/column-names)
@ -275,11 +265,11 @@
(set)
(reduced))))
nil
(p/-execute (ds) [(str "select * from fruit where " (index) " < ?") 4]
(p/-execute (ds) ["select * from fruit where id < ?" 4]
;; column-names require a real builder
{:builder-fn rs/as-arrays})))))
(testing "column-names on realized row"
(is (= #{(index) "appearance" "grade" "cost" "name"}
(is (= #{"id" "appearance" "grade" "cost" "name"}
(reduce (fn [_ row]
(-> row
(rs/datafiable-row (ds) {})
@ -288,7 +278,7 @@
(set)
(reduced))))
nil
(p/-execute (ds) [(str "select * from fruit where " (index) " < ?") 4]
(p/-execute (ds) ["select * from fruit where id < ?" 4]
{:builder-fn rs/as-arrays}))))))
(deftest test-over-partition-all
@ -309,31 +299,31 @@
(testing "no row builder is used"
(is (= [true]
(into [] (map map?) ; it looks like a real map now
(p/-execute (ds) [(str "select * from fruit where " (index) " = ?") 1]
(p/-execute (ds) ["select * from fruit where id = ?" 1]
{:builder-fn (constantly nil)}))))
(is (= ["Apple"]
(into [] (map :name) ; keyword selection works
(p/-execute (ds) [(str "select * from fruit where " (index) " = ?") 1]
(p/-execute (ds) ["select * from fruit where id = ?" 1]
{:builder-fn (constantly nil)}))))
(is (= [[2 [:name "Banana"]]]
(into [] (map (juxt #(get % (index)) ; get by string key works
(into [] (map (juxt #(get % "id") ; get by string key works
#(find % :name))) ; get MapEntry works
(p/-execute (ds) [(str "select * from fruit where " (index) " = ?") 2]
(p/-execute (ds) ["select * from fruit where id = ?" 2]
{:builder-fn (constantly nil)}))))
(is (= [{(col-kw :id) 3 :name "Peach"}]
(into [] (map #(select-keys % [(col-kw :id) :name])) ; select-keys works
(p/-execute (ds) [(str "select * from fruit where " (index) " = ?") 3]
(is (= [{:id 3 :name "Peach"}]
(into [] (map #(select-keys % [:id :name])) ; select-keys works
(p/-execute (ds) ["select * from fruit where id = ?" 3]
{:builder-fn (constantly nil)}))))
(is (= [[:orange 4]]
(into [] (map #(vector (if (contains? % :name) ; contains works
(keyword (str/lower-case (:name %)))
:unnamed)
(get % (col-kw :id) 0))) ; get with not-found works
(p/-execute (ds) [(str "select * from fruit where " (index) " = ?") 4]
(get % :id 0))) ; get with not-found works
(p/-execute (ds) ["select * from fruit where id = ?" 4]
{:builder-fn (constantly nil)}))))
(is (= [{}]
(into [] (map empty) ; return empty map without building
(p/-execute (ds) [(str "select * from fruit where " (index) " = ?") 1]
(p/-execute (ds) ["select * from fruit where id = ?" 1]
{:builder-fn (constantly nil)})))))
(testing "count does not build a map"
(let [count-builder (fn [_1 _2]
@ -341,7 +331,7 @@
(column-count [_] 13)))]
(is (= [13]
(into [] (map count) ; count relies on columns, not row fields
(p/-execute (ds) [(str "select * from fruit where " (index) " = ?") 1]
(p/-execute (ds) ["select * from fruit where id = ?" 1]
{:builder-fn count-builder}))))))
(testing "assoc, dissoc, cons, seq, and = build maps"
(is (map? (reduce (fn [_ row] (reduced (assoc row :x 1)))
@ -377,40 +367,7 @@
(p/-execute (ds) ["select * from fruit"] {})))))
(is (every? map-entry? (reduce (fn [_ row] (reduced (seq row)))
nil
(p/-execute (ds) ["select * from fruit"] {}))))
(is (map? (reduce (fn [_ row] (reduced (conj row {:a 1})))
nil
(p/-execute (ds) ["select * from fruit"] {}))))
(is (map? (reduce (fn [_ row] (reduced (conj row [:a 1])))
nil
(p/-execute (ds) ["select * from fruit"] {}))))
(is (map? (reduce (fn [_ row] (reduced (conj row {:a 1 :b 2})))
nil
(p/-execute (ds) ["select * from fruit"] {}))))
(is (= 1 (:a (reduce (fn [_ row] (reduced (conj row {:a 1})))
nil
(p/-execute (ds) ["select * from fruit"] {})))))
(is (= 1 (:a (reduce (fn [_ row] (reduced (conj row [:a 1])))
nil
(p/-execute (ds) ["select * from fruit"] {})))))
(is (= 1 (:a (reduce (fn [_ row] (reduced (conj row {:a 1 :b 2})))
nil
(p/-execute (ds) ["select * from fruit"] {})))))
(is (= 2 (:b (reduce (fn [_ row] (reduced (conj row {:a 1 :b 2})))
nil
(p/-execute (ds) ["select * from fruit"] {})))))
(is (vector? (reduce (fn [_ row] (reduced (conj row :a)))
nil
(p/-execute (ds) ["select * from fruit"]
{:builder-fn rs/as-arrays}))))
(is (= :a (peek (reduce (fn [_ row] (reduced (conj row :a)))
nil
(p/-execute (ds) ["select * from fruit"]
{:builder-fn rs/as-arrays})))))
(is (= :b (peek (reduce (fn [_ row] (reduced (conj row :a :b)))
nil
(p/-execute (ds) ["select * from fruit"]
{:builder-fn rs/as-arrays}))))))
(p/-execute (ds) ["select * from fruit"] {})))))
(testing "datafiable-row builds map; with metadata"
(is (map? (reduce (fn [_ row] (reduced (rs/datafiable-row row (ds) {})))
nil
@ -427,7 +384,7 @@
(defn fruit-builder [^ResultSet rs ^ResultSetMetaData rsmeta]
(reify
rs/RowBuilder
(->row [_] (->Fruit (.getObject rs ^String (index))
(->row [_] (->Fruit (.getObject rs "id")
(.getObject rs "name")
(.getObject rs "appearance")
(.getObject rs "cost")
@ -444,7 +401,7 @@
(valAt [this k] (get this k nil))
(valAt [this k not-found]
(case k
:cols [(col-kw :id) :name :appearance :cost :grade]
:cols [:id :name :appearance :cost :grade]
:rsmeta rsmeta
not-found))))
@ -477,7 +434,7 @@
metadata))))
(deftest clob-reading
(when-not (or (mssql?) (mysql?) (postgres?) (xtdb?)) ; no clob in these
(when-not (or (mssql?) (mysql?) (postgres?)) ; no clob in these
(with-open [con (p/get-connection (ds) {})]
(try
(p/-execute-one con ["DROP TABLE CLOBBER"] {})
@ -507,10 +464,10 @@ CREATE TABLE CLOBBER (
(testing "get n on bare abstraction over arrays"
(is (= [1 2 3]
(into [] (map #(get % 0))
(p/-execute (ds) [(str "select " (index) " from fruit where " (index) " < ? order by " (index)) 4]
(p/-execute (ds) ["select id from fruit where id < ?" 4]
{:builder-fn rs/as-arrays})))))
(testing "nth on bare abstraction over arrays"
(is (= [1 2 3]
(into [] (map #(nth % 0))
(p/-execute (ds) [(str "select " (index) " from fruit where " (index) " < ? order by " (index)) 4]
(p/-execute (ds) ["select id from fruit where id < ?" 4]
{:builder-fn rs/as-arrays}))))))

View file

@ -1,10 +1,11 @@
;; copyright (c) 2019-2025 Sean Corfield, all rights reserved
;; copyright (c) 2019-2021 Sean Corfield, all rights reserved
(ns next.jdbc.specs-test
"Stub test namespace for the specs.
The specs are used (and 'tested') as part of the tests for the
next.jdbc and next.jdbc.sql namespaces."
(:require [next.jdbc.specs]))
(:require [clojure.test :refer [deftest is testing]]
[next.jdbc.specs :refer :all]))
(set! *warn-on-reflection* true)

View file

@ -1,8 +1,8 @@
;; copyright (c) 2019-2025 Sean Corfield, all rights reserved
;; copyright (c) 2019-2021 Sean Corfield, all rights reserved
(ns next.jdbc.sql.builder-test
"Tests for the SQL string building functions in next.jdbc.sql.builder."
(:require [lazytest.experimental.interfaces.clojure-test :refer [deftest is testing thrown?]]
(:require [clojure.test :refer [deftest is testing]]
[next.jdbc.quoted :refer [mysql sql-server]]
[next.jdbc.sql.builder :as builder]))
@ -11,13 +11,9 @@
(deftest test-by-keys
(testing ":where clause"
(is (= (builder/by-keys {:a nil :b 42 :c "s"} :where {})
["WHERE a IS NULL AND b = ? AND c = ?" 42 "s"]))
(is (= (builder/by-keys {:q/a nil :q/b 42 :q/c "s"} :where {})
["WHERE a IS NULL AND b = ? AND c = ?" 42 "s"])))
(testing ":set clause"
(is (= (builder/by-keys {:a nil :b 42 :c "s"} :set {})
["SET a = ?, b = ?, c = ?" nil 42 "s"]))
(is (= (builder/by-keys {:q/a nil :q/b 42 :q/c "s"} :set {})
["SET a = ?, b = ?, c = ?" nil 42 "s"]))))
(deftest test-as-cols
@ -26,24 +22,14 @@
(is (= (builder/as-cols [[:a :aa] :b ["count(*)" :c]] {})
"a AS aa, b, count(*) AS c"))
(is (= (builder/as-cols [[:a :aa] :b ["count(*)" :c]] {:column-fn mysql})
"`a` AS `aa`, `b`, count(*) AS `c`"))
(is (= (builder/as-cols [:q/a :q/b :q/c] {})
"a, b, c"))
(is (= (builder/as-cols [[:q/a :q/aa] :q/b ["count(*)" :q/c]] {})
"a AS aa, b, count(*) AS c"))
(is (= (builder/as-cols [[:q/a :q/aa] :q/b ["count(*)" :q/c]] {:column-fn mysql})
"`a` AS `aa`, `b`, count(*) AS `c`")))
(deftest test-as-keys
(is (= (builder/as-keys {:a nil :b 42 :c "s"} {})
"a, b, c"))
(is (= (builder/as-keys {:q/a nil :q/b 42 :q/c "s"} {})
"a, b, c")))
(deftest test-as-?
(is (= (builder/as-? {:a nil :b 42 :c "s"} {})
"?, ?, ?"))
(is (= (builder/as-? {:q/a nil :q/b 42 :q/c "s"} {})
"?, ?, ?")))
(deftest test-for-query
@ -59,18 +45,6 @@
{:id nil}
{:table-fn sql-server :column-fn mysql
:suffix "FOR UPDATE"})
["SELECT * FROM [user] WHERE `id` IS NULL FOR UPDATE"]))
(is (= (builder/for-query
:t/user
{:q/id 9}
{:table-fn sql-server :column-fn mysql :order-by [:x/a [:x/b :desc]]})
["SELECT * FROM [user] WHERE `id` = ? ORDER BY `a`, `b` DESC" 9]))
(is (= (builder/for-query :t/user {:q/id nil} {:table-fn sql-server :column-fn mysql})
["SELECT * FROM [user] WHERE `id` IS NULL"]))
(is (= (builder/for-query :t/user
{:q/id nil}
{:table-fn sql-server :column-fn mysql
:suffix "FOR UPDATE"})
["SELECT * FROM [user] WHERE `id` IS NULL FOR UPDATE"])))
(testing "by where clause"
(is (= (builder/for-query
@ -138,27 +112,17 @@
:user
{:opt nil :id 9}
{:table-fn sql-server :column-fn mysql})
["DELETE FROM [user] WHERE `opt` IS NULL AND `id` = ?" 9]))
(is (= (builder/for-delete
:t/user
{:q/opt nil :q/id 9}
{:table-fn sql-server :column-fn mysql})
["DELETE FROM [user] WHERE `opt` IS NULL AND `id` = ?" 9])))
(testing "by where clause"
(is (= (builder/for-delete
:user
["id = ? and opt is null" 9]
{:table-fn sql-server :column-fn mysql})
["DELETE FROM [user] WHERE id = ? and opt is null" 9]))
(is (= (builder/for-delete
:t/user
["id = ? and opt is null" 9]
{:table-fn sql-server :column-fn mysql})
["DELETE FROM [user] WHERE id = ? and opt is null" 9]))))
(deftest test-for-update
(testing "empty example (would be a SQL error)"
(is (thrown? IllegalArgumentException
(is (thrown? AssertionError ; changed in #44
(builder/for-update :user
{:status 42}
{}
@ -168,11 +132,6 @@
{:status 42}
{:id 9}
{:table-fn sql-server :column-fn mysql})
["UPDATE [user] SET `status` = ? WHERE `id` = ?" 42 9]))
(is (= (builder/for-update :t/user
{:q/status 42}
{:q/id 9}
{:table-fn sql-server :column-fn mysql})
["UPDATE [user] SET `status` = ? WHERE `id` = ?" 42 9])))
(testing "by where clause, with nil set value"
(is (= (builder/for-update :user
@ -186,10 +145,6 @@
(is (= (builder/for-insert :user
{:id 9 :status 42 :opt nil}
{:table-fn sql-server :column-fn mysql})
["INSERT INTO [user] (`id`, `status`, `opt`) VALUES (?, ?, ?)" 9 42 nil]))
(is (= (builder/for-insert :t/user
{:q/id 9 :q/status 42 :q/opt nil}
{:table-fn sql-server :column-fn mysql})
["INSERT INTO [user] (`id`, `status`, `opt`) VALUES (?, ?, ?)" 9 42 nil])))
(testing "multi-row insert (normal mode)"
(is (= (builder/for-insert-multi :user
@ -198,13 +153,6 @@
[35 "world"]
[64 "dollars"]]
{:table-fn sql-server :column-fn mysql})
["INSERT INTO [user] (`id`, `status`) VALUES (?, ?), (?, ?), (?, ?)" 42 "hello" 35 "world" 64 "dollars"]))
(is (= (builder/for-insert-multi :t/user
[:q/id :q/status]
[[42 "hello"]
[35 "world"]
[64 "dollars"]]
{:table-fn sql-server :column-fn mysql})
["INSERT INTO [user] (`id`, `status`) VALUES (?, ?), (?, ?), (?, ?)" 42 "hello" 35 "world" 64 "dollars"])))
(testing "multi-row insert (batch mode)"
(is (= (builder/for-insert-multi :user
@ -213,11 +161,4 @@
[35 "world"]
[64 "dollars"]]
{:table-fn sql-server :column-fn mysql :batch true})
["INSERT INTO [user] (`id`, `status`) VALUES (?, ?)" [42 "hello"] [35 "world"] [64 "dollars"]]))
(is (= (builder/for-insert-multi :t/user
[:q/id :q/status]
[[42 "hello"]
[35 "world"]
[64 "dollars"]]
{:table-fn sql-server :column-fn mysql :batch true})
["INSERT INTO [user] (`id`, `status`) VALUES (?, ?)" [42 "hello"] [35 "world"] [64 "dollars"]]))))

View file

@ -1,26 +1,25 @@
;; copyright (c) 2019-2025 Sean Corfield, all rights reserved
;; copyright (c) 2019-2021 Sean Corfield, all rights reserved
(ns next.jdbc.sql-test
"Tests for the syntactic sugar SQL functions."
(:require [lazytest.core :refer [around set-ns-context!]]
[lazytest.experimental.interfaces.clojure-test :refer [deftest is testing thrown?]]
(:require [clojure.test :refer [deftest is testing use-fixtures]]
[next.jdbc :as jdbc]
[next.jdbc.specs :as specs]
[next.jdbc.sql :as sql]
[next.jdbc.test-fixtures
:refer [col-kw column default-options derby? ds index jtds?
maria? mssql? mysql? postgres? sqlite? with-test-db xtdb?]]
:refer [with-test-db ds column default-options
derby? jtds? maria? mssql? mysql? postgres? sqlite?]]
[next.jdbc.types :refer [as-other as-real as-varchar]]))
(set! *warn-on-reflection* true)
(set-ns-context! [(around [f] (with-test-db f))])
(use-fixtures :once with-test-db)
(specs/instrument)
(deftest test-query
(let [ds-opts (jdbc/with-options (ds) (default-options))
rs (sql/query ds-opts [(str "select * from fruit order by " (index))])]
rs (sql/query ds-opts ["select * from fruit order by id"])]
(is (= 4 (count rs)))
(is (every? map? rs))
(is (every? meta rs))
@ -35,10 +34,10 @@
(if (or (mysql?) (sqlite?))
{:limit 2 :offset 1}
{:offset 1 :fetch 2})
:columns [(col-kw :ID)
:columns [:ID
["CASE WHEN grade > 91 THEN 'ok ' ELSE 'bad' END"
:QUALITY]]
:order-by [(col-kw :id)]))]
:order-by [:id]))]
(is (= 2 (count rs)))
(is (every? map? rs))
(is (every? meta rs))
@ -59,27 +58,10 @@
(is (every? meta rs))
(is (= 2 ((column :FRUIT/ID) (first rs)))))))
(deftest test-aggregate-by-keys
(let [ds-opts (jdbc/with-options (ds) (default-options))]
(let [count-v (sql/aggregate-by-keys ds-opts :fruit "count(*)" {:appearance "neon-green"})]
(is (number? count-v))
(is (= 0 count-v)))
(let [count-v (sql/aggregate-by-keys ds-opts :fruit "count(*)" {:appearance "yellow"})]
(is (= 1 count-v)))
(let [count-v (sql/aggregate-by-keys ds-opts :fruit "count(*)" :all)]
(is (= 4 count-v)))
(let [max-id (sql/aggregate-by-keys ds-opts :fruit (str "max(" (index) ")") :all)]
(is (= 4 max-id)))
(when-not (xtdb?) ; XTDB does not support min/max on strings?
(let [min-name (sql/aggregate-by-keys ds-opts :fruit "min(name)" :all)]
(is (= "Apple" min-name))))
(is (thrown? IllegalArgumentException
(sql/aggregate-by-keys ds-opts :fruit "count(*)" :all {:columns []})))))
(deftest test-get-by-id
(let [ds-opts (jdbc/with-options (ds) (default-options))]
(is (nil? (sql/get-by-id ds-opts :fruit -1 (col-kw :id) {})))
(let [row (sql/get-by-id ds-opts :fruit 3 (col-kw :id) {})]
(is (nil? (sql/get-by-id ds-opts :fruit -1)))
(let [row (sql/get-by-id ds-opts :fruit 3)]
(is (map? row))
(is (= "Peach" ((column :FRUIT/NAME) row))))
(let [row (sql/get-by-id ds-opts :fruit "juicy" :appearance {})]
@ -90,28 +72,23 @@
(is (map? row))
(is (= 2 ((column :FRUIT/ID) row))))))
(defn- update-count [n]
(if (xtdb?)
{:next.jdbc/update-count 0}
{:next.jdbc/update-count n}))
(deftest test-update!
(let [ds-opts (jdbc/with-options (ds) (default-options))]
(try
(is (= (update-count 1)
(sql/update! ds-opts :fruit {:appearance "brown"} {(col-kw :id) 2})))
(is (= {:next.jdbc/update-count 1}
(sql/update! ds-opts :fruit {:appearance "brown"} {:id 2})))
(is (= "brown" ((column :FRUIT/APPEARANCE)
(sql/get-by-id ds-opts :fruit 2 (col-kw :id) {}))))
(sql/get-by-id ds-opts :fruit 2))))
(finally
(sql/update! ds-opts :fruit {:appearance "yellow"} {(col-kw :id) 2})))
(sql/update! ds-opts :fruit {:appearance "yellow"} {:id 2})))
(try
(is (= (update-count 1)
(is (= {:next.jdbc/update-count 1}
(sql/update! ds-opts :fruit {:appearance "green"}
["name = ?" "Banana"])))
(is (= "green" ((column :FRUIT/APPEARANCE)
(sql/get-by-id ds-opts :fruit 2 (col-kw :id) {}))))
(sql/get-by-id ds-opts :fruit 2))))
(finally
(sql/update! ds-opts :fruit {:appearance "yellow"} {(col-kw :id) 2})))))
(sql/update! ds-opts :fruit {:appearance "yellow"} {:id 2})))))
(deftest test-insert-delete
(let [new-key (cond (derby?) :1
@ -120,143 +97,95 @@
(mssql?) :GENERATED_KEYS
(mysql?) :GENERATED_KEY
(postgres?) :fruit/id
;; XTDB does not return the generated key so we fix it
;; to be the one we insert here, and then fake it in all
;; the other tests.
(xtdb?) (constantly 5)
(sqlite?) (keyword "last_insert_rowid()")
:else :FRUIT/ID)]
(testing "single insert/delete"
(is (== 5 (new-key (sql/insert! (ds) :fruit
(cond-> {:name (as-varchar "Kiwi")
:appearance "green & fuzzy"
:cost 100 :grade (as-real 99.9)}
(xtdb?)
(assoc :_id 5))
{:suffix
(when (sqlite?)
"RETURNING *")}))))
{:name (as-varchar "Kiwi")
:appearance "green & fuzzy"
:cost 100 :grade (as-real 99.9)}))))
(is (= 5 (count (sql/query (ds) ["select * from fruit"]))))
(is (= (update-count 1)
(sql/delete! (ds) :fruit {(col-kw :id) 5})))
(is (= {:next.jdbc/update-count 1}
(sql/delete! (ds) :fruit {:id 5})))
(is (= 4 (count (sql/query (ds) ["select * from fruit"])))))
(testing "multiple insert/delete"
(is (= (cond (derby?)
[nil] ; WTF Apache Derby?
(mssql?)
[8M]
(maria?)
[6]
(xtdb?)
[]
(sqlite?)
[8]
:else
[6 7 8])
(mapv new-key
(sql/insert-multi! (ds) :fruit
(cond->> [:name :appearance :cost :grade]
(xtdb?) (cons :_id))
(cond->> [["Kiwi" "green & fuzzy" 100 99.9]
["Grape" "black" 10 50]
["Lemon" "yellow" 20 9.9]]
(xtdb?)
(map cons [6 7 8]))
{:suffix
(when (sqlite?)
"RETURNING *")}))))
[:name :appearance :cost :grade]
[["Kiwi" "green & fuzzy" 100 99.9]
["Grape" "black" 10 50]
["Lemon" "yellow" 20 9.9]]))))
(is (= 7 (count (sql/query (ds) ["select * from fruit"]))))
(is (= (update-count 1)
(sql/delete! (ds) :fruit {(col-kw :id) 6})))
(is (= {:next.jdbc/update-count 1}
(sql/delete! (ds) :fruit {:id 6})))
(is (= 6 (count (sql/query (ds) ["select * from fruit"]))))
(is (= (update-count 2)
(sql/delete! (ds) :fruit [(str (index) " > ?") 4])))
(is (= {:next.jdbc/update-count 2}
(sql/delete! (ds) :fruit ["id > ?" 4])))
(is (= 4 (count (sql/query (ds) ["select * from fruit"])))))
(testing "multiple insert/delete with sequential cols/rows" ; per #43
(is (= (cond (derby?)
[nil] ; WTF Apache Derby?
(mssql?)
[11M]
(maria?)
[9]
(xtdb?)
[]
(sqlite?)
[11]
:else
[9 10 11])
(mapv new-key
(sql/insert-multi! (ds) :fruit
(cond->> '(:name :appearance :cost :grade)
(xtdb?) (cons :_id))
(cond->> '(("Kiwi" "green & fuzzy" 100 99.9)
("Grape" "black" 10 50)
("Lemon" "yellow" 20 9.9))
(xtdb?)
(map cons [9 10 11]))
{:suffix
(when (sqlite?)
"RETURNING *")}))))
'(:name :appearance :cost :grade)
'(("Kiwi" "green & fuzzy" 100 99.9)
("Grape" "black" 10 50)
("Lemon" "yellow" 20 9.9))))))
(is (= 7 (count (sql/query (ds) ["select * from fruit"]))))
(is (= (update-count 1)
(sql/delete! (ds) :fruit {(col-kw :id) 9})))
(is (= {:next.jdbc/update-count 1}
(sql/delete! (ds) :fruit {:id 9})))
(is (= 6 (count (sql/query (ds) ["select * from fruit"]))))
(is (= (update-count 2)
(sql/delete! (ds) :fruit [(str (index) " > ?") 4])))
(is (= {:next.jdbc/update-count 2}
(sql/delete! (ds) :fruit ["id > ?" 4])))
(is (= 4 (count (sql/query (ds) ["select * from fruit"])))))
(testing "multiple insert/delete with maps"
(is (= (cond (derby?)
[nil] ; WTF Apache Derby?
(mssql?)
[14M]
(maria?)
[12]
(xtdb?)
[]
(sqlite?)
[14]
:else
[12 13 14])
(mapv new-key
(sql/insert-multi! (ds) :fruit
(cond->> [{:name "Kiwi"
:appearance "green & fuzzy"
:cost 100
:grade 99.9}
{:name "Grape"
:appearance "black"
:cost 10
:grade 50}
{:name "Lemon"
:appearance "yellow"
:cost 20
:grade 9.9}]
(xtdb?)
(map #(assoc %2 :_id %1) [12 13 14]))
{:suffix
(when (sqlite?)
"RETURNING *")}))))
[{:name "Kiwi"
:appearance "green & fuzzy"
:cost 100
:grade 99.9}
{:name "Grape"
:appearance "black"
:cost 10
:grade 50}
{:name "Lemon"
:appearance "yellow"
:cost 20
:grade 9.9}]))))
(is (= 7 (count (sql/query (ds) ["select * from fruit"]))))
(is (= (update-count 1)
(sql/delete! (ds) :fruit {(col-kw :id) 12})))
(is (= {:next.jdbc/update-count 1}
(sql/delete! (ds) :fruit {:id 12})))
(is (= 6 (count (sql/query (ds) ["select * from fruit"]))))
(is (= (update-count 2)
(sql/delete! (ds) :fruit [(str (index) " > ?") 10])))
(is (= {:next.jdbc/update-count 2}
(sql/delete! (ds) :fruit ["id > ?" 10])))
(is (= 4 (count (sql/query (ds) ["select * from fruit"])))))
(testing "empty insert-multi!" ; per #44 and #264
(testing "empty insert-multi!" ; per #44
(is (= [] (sql/insert-multi! (ds) :fruit
[:name :appearance :cost :grade]
[]
{:suffix
(when (sqlite?)
"RETURNING *")})))
;; per #264 the following should all be legal too:
(is (= [] (sql/insert-multi! (ds) :fruit
[]
{:suffix
(when (sqlite?)
"RETURNING *")})))
(is (= [] (sql/insert-multi! (ds) :fruit
[]
[]
{:suffix
(when (sqlite?)
"RETURNING *")})))
(is (= [] (sql/insert-multi! (ds) :fruit [])))
(is (= [] (sql/insert-multi! (ds) :fruit [] []))))))
[]))))))
(deftest no-empty-example-maps
(is (thrown? clojure.lang.ExceptionInfo
@ -277,12 +206,12 @@
(deftest no-empty-order-by
(is (thrown? clojure.lang.ExceptionInfo
(sql/find-by-keys (ds) :fruit
{:name "Apple"}
{:order-by []}))))
{:name "Apple"}
{:order-by []}))))
(deftest array-in
(when (postgres?)
(let [data (sql/find-by-keys (ds) :fruit [(str (index) " = any(?)") (int-array [1 2 3 4])])]
(let [data (sql/find-by-keys (ds) :fruit ["id = any(?)" (int-array [1 2 3 4])])]
(is (= 4 (count data))))))
(deftest enum-pg

View file

@ -1,4 +1,4 @@
;; copyright (c) 2019-2024 Sean Corfield, all rights reserved
;; copyright (c) 2019-2021 Sean Corfield, all rights reserved
(ns next.jdbc.test-fixtures
"Multi-database testing fixtures."
@ -36,24 +36,16 @@
(def ^:private test-mysql
(when (System/getenv "NEXT_JDBC_TEST_MYSQL") test-mysql-map))
(defn- create-clojure-test []
(defn create-clojure-test [_]
(when test-mysql
(let [mysql (assoc test-mysql :dbname "mysql")]
(println "Creating clojure-test database in MySQL...")
(loop [n 0]
(when (try
(jdbc/execute-one! mysql ["create database if not exists clojure_test"])
false ; done
(catch Throwable t
(when (< 10 n) (throw t))
(println "\t" (ex-message t) "(will retry)")
(Thread/sleep 3000)
true))
(recur (inc n))))
(println "...done!"))))
(jdbc/execute-one! mysql ["create database if not exists clojure_test"])
(println "...done!")
(shutdown-agents))))
(def ^:private test-mssql-map
{:dbtype "mssql" :dbname "model" :encrypt false :trustServerCertificate true
{:dbtype "mssql" :dbname "model"
:user "sa" :password (System/getenv "MSSQL_SA_PASSWORD")})
(def ^:private test-mssql
(when (System/getenv "NEXT_JDBC_TEST_MSSQL") test-mssql-map))
@ -64,27 +56,16 @@
(def ^:private test-jtds
(when (System/getenv "NEXT_JDBC_TEST_MSSQL") test-jtds-map))
(def ^:private test-xtdb-map {:dbtype "xtdb" :dbname "xtdb"})
(def ^:private test-xtdb
(when (and (System/getenv "NEXT_JDBC_TEST_XTDB")
;; only if we're on jdk21+
(str/starts-with? (System/getProperty "java.version") "2"))
test-xtdb-map))
(def ^:private test-db-specs
(cond-> [test-derby test-h2-mem test-h2 test-hsql test-sqlite]
test-postgres (conj test-postgres)
test-mysql (conj test-mysql)
test-mssql (conj test-mssql test-jtds)
test-xtdb (conj test-xtdb)))
test-mssql (conj test-mssql test-jtds)))
(def ^:private test-db-spec (atom nil))
(defn derby? [] (= "derby" (:dbtype @test-db-spec)))
(defn h2? [] (str/starts-with? (:dbtype @test-db-spec) "h2"))
(defn hsqldb? [] (= "hsqldb" (:dbtype @test-db-spec)))
(defn jtds? [] (= "jtds" (:dbtype @test-db-spec)))
@ -97,34 +78,19 @@
(defn postgres? [] (= "embedded-postgres" (:dbtype @test-db-spec)))
(defn xtdb? [] (= "xtdb" (:dbtype @test-db-spec)))
(defn sqlite? [] (= "sqlite" (:dbtype @test-db-spec)))
(defn stored-proc? [] (not (#{"derby" "h2" "h2:mem" "sqlite" "xtdb"}
(:dbtype @test-db-spec))))
(defn stored-proc? [] (not (#{"derby" "h2" "h2:mem" "sqlite"} (:dbtype @test-db-spec))))
(defn column [k]
(let [n (namespace k)]
(keyword (when n (cond (postgres?) (str/lower-case n)
(mssql?) (str/lower-case n)
(mysql?) (str/lower-case n)
(xtdb?) nil
:else n))
(cond (postgres?) (str/lower-case (name k))
(xtdb?) (let [c (str/lower-case (name k))]
(if (= "id" c) "_id" c))
:else (name k)))))
(defn index []
(if (xtdb?) "_id" "id"))
(defn col-kw [k]
(if (xtdb?)
(let [n (name k)]
(if (= "id" (str/lower-case n)) :_id (keyword n)))
k))
(defn default-options []
(if (mssql?) ; so that we get table names back from queries
{:result-type :scroll-insensitive :concurrency :read-only}
@ -182,54 +148,29 @@
:else
"AUTO_INCREMENT PRIMARY KEY")]
(with-open [con (jdbc/get-connection (ds))]
(if (xtdb?) ; no DDL for creation
(do
(try
(do-commands con ["ERASE FROM fruit WHERE true"])
(catch Throwable _))
(try
(do-commands con ["ERASE FROM btest WHERE true"])
(catch Throwable _))
(sql/insert-multi! con :fruit
[:_id :name :appearance :cost]
[[1 "Apple" "red" 59]]
{:return-keys false})
(sql/insert-multi! con :fruit
[:_id :name :appearance :grade]
[[2 "Banana" "yellow" 92.2]]
{:return-keys false})
(sql/insert-multi! con :fruit
[:_id :name :cost :grade]
[[3 "Peach" 139 90.0]]
{:return-keys false})
(sql/insert-multi! con :fruit
[:_id :name :appearance :cost :grade]
[[4 "Orange" "juicy" 89 88.6]]
{:return-keys false}))
(do
(when (stored-proc?)
(try
(jdbc/execute-one! con ["DROP PROCEDURE FRUITP"])
(catch Throwable _)))
(try
(do-commands con [(str "DROP TABLE " fruit)])
(catch Exception _))
(try
(do-commands con [(str "DROP TABLE " btest)])
(catch Exception _))
(when (postgres?)
(try
(do-commands con ["DROP TABLE LANG_TEST"])
(catch Exception _))
(try
(do-commands con ["DROP TYPE LANGUAGE"])
(catch Exception _))
(do-commands con ["CREATE TYPE LANGUAGE AS ENUM('en','fr','de')"])
(do-commands con ["
(when (stored-proc?)
(try
(jdbc/execute-one! con ["DROP PROCEDURE FRUITP"])
(catch Throwable _)))
(try
(do-commands con [(str "DROP TABLE " fruit)])
(catch Exception _))
(try
(do-commands con [(str "DROP TABLE " btest)])
(catch Exception _))
(when (postgres?)
(try
(do-commands con ["DROP TABLE LANG_TEST"])
(catch Exception _))
(try
(do-commands con ["DROP TYPE LANGUAGE"])
(catch Exception _))
(do-commands con ["CREATE TYPE LANGUAGE AS ENUM('en','fr','de')"])
(do-commands con ["
CREATE TABLE LANG_TEST (
LANG LANGUAGE NOT NULL
)"]))
(do-commands con [(str "
(do-commands con [(str "
CREATE TABLE " fruit " (
ID INTEGER " auto-inc-pk ",
NAME VARCHAR(32),
@ -237,28 +178,28 @@ CREATE TABLE " fruit " (
COST INT DEFAULT NULL,
GRADE REAL DEFAULT NULL
)")])
(let [created (atom false)]
(let [created (atom false)]
;; MS SQL Server does not support bool/boolean:
(doseq [btype ["BOOL" "BOOLEAN" "BIT"]]
(doseq [btype ["BOOL" "BOOLEAN" "BIT"]]
;; Derby does not support bit:
(doseq [bitty ["BIT" "SMALLINT"]]
(try
(when-not @created
(do-commands con [(str "
(doseq [bitty ["BIT" "SMALLINT"]]
(try
(when-not @created
(do-commands con [(str "
CREATE TABLE " btest " (
NAME VARCHAR(32),
IS_IT " btype ",
TWIDDLE " bitty "
)")])
(reset! created true))
(catch Throwable _))))
(when-not @created
(println (:dbtype db) "failed btest creation")
#_(throw (ex-info (str (:dbtype db) " has no boolean type?") {}))))
(when (stored-proc?)
(let [[begin end] (if (postgres?) ["$$" "$$"] ["BEGIN" "END"])]
(try
(do-commands con [(str "
(reset! created true))
(catch Throwable _))))
(when-not @created
(println (:dbtype db) "failed btest creation")
#_(throw (ex-info (str (:dbtype db) " has no boolean type?") {}))))
(when (stored-proc?)
(let [[begin end] (if (postgres?) ["$$" "$$"] ["BEGIN" "END"])]
(try
(do-commands con [(str "
CREATE PROCEDURE FRUITP" (cond (hsqldb?) "() READS SQL DATA DYNAMIC RESULT SETS 2 "
(mssql?) " AS "
(postgres?) "() LANGUAGE SQL AS "
@ -274,28 +215,29 @@ CREATE PROCEDURE FRUITP" (cond (hsqldb?) "() READS SQL DATA DYNAMIC RESULT SETS
SELECT * FROM " fruit " WHERE GRADE >= 90.0;")) "
" end "
")])
(catch Throwable t
(println 'procedure (:dbtype db) (ex-message t))))))
(sql/insert-multi! con :fruit
[:name :appearance :cost :grade]
[["Apple" "red" 59 nil]
["Banana" "yellow" nil 92.2]
["Peach" nil 139 90.0]
["Orange" "juicy" 89 88.6]]
{:return-keys false})))
(catch Throwable t
(println 'procedure (:dbtype db) (ex-message t))))))
(sql/insert-multi! con :fruit
[:name :appearance :cost :grade]
[["Apple" "red" 59 nil]
["Banana" "yellow" nil 92.2]
["Peach" nil 139 90.0]
["Orange" "juicy" 89 88.6]]
{:return-keys false})
(t)))))
(create-clojure-test)
(comment
;; this is a convenience to bring next.jdbc's test dependencies
;; into any REPL running Clojure 1.12.0's new add-libs API
;; into any REPL that has the add-lib3 branch of tools.deps.alpha
;; which allows me to develop and test next.jdbc inside my work's
;; "everything" REPL environment
(require '[clojure.repl.deps :refer [add-libs]]
(require '[clojure.tools.deps.alpha.repl :refer [add-libs]]
'[clojure.java.io :as io]
'[clojure.edn :as edn])
(def test-deps (-> (slurp "https://raw.githubusercontent.com/seancorfield/next-jdbc/develop/deps.edn")
(edn/read-string)
(def repo-path "/Developer/workspace/next.jdbc")
(def test-deps (-> (io/reader (str repo-path "/deps.edn"))
(java.io.PushbackReader.)
(edn/read)
:aliases
:test
:extra-deps))

View file

@ -1,10 +1,17 @@
;; copyright (c) 2019-2025 Sean Corfield, all rights reserved
;; copyright (c) 2019-2021 Sean Corfield, all rights reserved
(ns next.jdbc.transaction-test
"Stub test namespace for transaction handling."
(:require [next.jdbc.specs :as specs]
[next.jdbc.transaction]))
(:require [clojure.test :refer [deftest is testing use-fixtures]]
[next.jdbc :as jdbc]
[next.jdbc.specs :as specs]
[next.jdbc.test-fixtures :refer [with-test-db db ds column
default-options
derby? mssql? mysql? postgres?]]
[next.jdbc.transaction :as tx]))
(set! *warn-on-reflection* true)
(use-fixtures :once with-test-db)
(specs/instrument)

View file

@ -1,19 +1,14 @@
;; copyright (c) 2020-2025 Sean Corfield, all rights reserved
;; copyright (c) 2020-2021 Sean Corfield, all rights reserved
(ns next.jdbc.types-test
"Some tests for the type-assist functions."
(:require [lazytest.core :refer [defdescribe describe it expect]]
(:require [clojure.test :refer [deftest is]]
[next.jdbc.types :refer [as-varchar]]))
(set! *warn-on-reflection* true)
(defdescribe as-varchar-tests
(deftest as-varchar-test
(let [v (as-varchar "Hello")]
(describe "produces a function"
(it "yields the original value when invoked"
(expect (fn? v))
(expect (= "Hello" (v)))))
(describe "carries metadata"
(it "has a `set-parameter` function"
(expect (contains? (meta v) 'next.jdbc.prepare/set-parameter))
(expect (fn? (get (meta v) 'next.jdbc.prepare/set-parameter)))))))
(is (= "Hello" (v)))
(is (contains? (meta v) 'next.jdbc.prepare/set-parameter))
(is (fn? (get (meta v) 'next.jdbc.prepare/set-parameter)))))

File diff suppressed because it is too large Load diff