Compare commits
585 commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e3b80e6128 | ||
|
|
150a4aa605 | ||
|
|
2a30c5cffd | ||
|
|
0c385c36a6 | ||
|
|
af827c0200 | ||
|
|
0ff09dca21 | ||
|
|
fd3c61b1e6 | ||
|
|
a3fba32605 | ||
|
|
f39f6b3d24 | ||
|
|
99075cd408 | ||
|
|
14b4c3435f | ||
|
|
a40abc285e | ||
|
|
16c1faeaf6 | ||
|
|
d1115c8c42 | ||
|
|
60503dac03 | ||
|
|
a829074b99 | ||
|
|
532d77f372 | ||
|
|
e650137f07 | ||
|
|
9fdc24eab7 | ||
|
|
1642cc04ca | ||
|
|
9d1c35b86c | ||
|
|
ed8ba5402a | ||
|
|
403d156331 | ||
|
|
78894f8ba4 | ||
|
|
0d7b3b60ed | ||
|
|
2214e06cfa | ||
|
|
0d7d58fece | ||
|
|
c2b64dbd54 | ||
|
|
24085c459f | ||
|
|
1062f572d5 | ||
|
|
220957a69f | ||
|
|
c89744b05f | ||
|
|
75dab5d843 | ||
|
|
22e96dcb84 | ||
|
|
9ed335dc8d | ||
|
|
e6d1abf3ec | ||
|
|
946d160409 | ||
|
|
1294e35b98 | ||
|
|
fa6f9040d7 | ||
|
|
3de04761cc | ||
|
|
08c08e7f7b | ||
|
|
56bd2356ac | ||
|
|
c6f86b042e | ||
|
|
a592b7ebae | ||
|
|
d71a3b6423 | ||
|
|
295fd5ddf5 | ||
|
|
f9bfb1248a | ||
|
|
d5f284871b | ||
|
|
2dc72d9254 | ||
|
|
b981357d47 | ||
|
|
801e6c923b | ||
|
|
151b7d8d0b | ||
|
|
87f4224c22 | ||
|
|
4db63f6122 | ||
|
|
0282df7629 | ||
|
|
80748a9593 | ||
|
|
9196b34249 | ||
|
|
b7858c937d | ||
|
|
d1c3291224 | ||
|
|
585a7120a9 | ||
|
|
f43e5490ac | ||
|
|
cad6462c53 | ||
|
|
05cfe1f3fa | ||
|
|
564c43bc79 | ||
|
|
5f0c93642a | ||
|
|
0c50cf28b5 | ||
|
|
ecd950d009 | ||
|
|
bcc6ad85dd | ||
|
|
224333abe6 | ||
|
|
beaaea8f48 | ||
|
|
b2da0156f7 | ||
|
|
f52897adea | ||
|
|
b0a640a101 | ||
|
|
a75468105f | ||
|
|
03117d1160 | ||
|
|
218cf82637 | ||
|
|
64c1f3fefd | ||
|
|
a9058cde18 | ||
|
|
a6eb05ea30 | ||
|
|
b2656120b4 | ||
|
|
4ea3bcf4a3 | ||
|
|
a6ecdee5ac | ||
|
|
fb3188320d | ||
|
|
31ad33dc85 | ||
|
|
26e642096b | ||
|
|
1fed35adc0 | ||
|
|
d6d0edb7a6 | ||
|
|
005ec2ac83 | ||
|
|
1bd4bdedce | ||
|
|
aa7c358cde | ||
|
|
5c9d4795e3 | ||
|
|
212f31d9aa | ||
|
|
35805f1235 | ||
|
|
49a22db52e | ||
|
|
c77f7539c6 | ||
|
|
9e914bc5e1 | ||
|
|
87ea23271e | ||
|
|
eb7dafbe58 | ||
|
|
e2f21e4845 | ||
|
|
05553eee0e | ||
|
|
f03b1ba316 | ||
|
|
4c3e0129bf | ||
|
|
ed95235b7e | ||
|
|
7b32faa7d1 | ||
|
|
3249f31062 | ||
|
|
39018404ab | ||
|
|
3042079138 | ||
|
|
10fd00a756 | ||
|
|
581df5839f | ||
|
|
1476c35413 | ||
|
|
e21a7ab590 | ||
|
|
d644ec4a95 | ||
|
|
ee1511f097 | ||
|
|
0a51334dbf | ||
|
|
a5be7d00cb | ||
|
|
44b3cc206f | ||
|
|
da2bb35ff0 | ||
|
|
6a8c40c998 | ||
|
|
23ae537625 | ||
|
|
60d67d875e | ||
|
|
902eebe536 | ||
|
|
b35d531705 | ||
|
|
81ebccc490 | ||
|
|
bc92cc027d | ||
|
|
6de1175bd8 | ||
|
|
4910030ad3 | ||
|
|
d6fe4c1577 | ||
|
|
2b7f25ac78 | ||
|
|
06bdbd8139 | ||
|
|
c148d6ae81 | ||
|
|
bea53cb15b | ||
|
|
35eb931877 | ||
|
|
115fe1507d | ||
|
|
d688bfaf85 | ||
|
|
02637c9c6c | ||
|
|
e3ff1b7218 | ||
|
|
d8447b66fd | ||
|
|
04588e8ef2 | ||
|
|
c6b4587408 | ||
|
|
190958b74e | ||
|
|
c8cf8c7d2f | ||
|
|
abd926cde3 | ||
|
|
7155ca39c3 | ||
|
|
9ea5b172bb | ||
|
|
cd214cb17e | ||
|
|
044de70b49 | ||
|
|
8ad1110fcb | ||
|
|
fd95a69b5c | ||
|
|
b86c7b42aa | ||
|
|
41e9bf2d6c | ||
|
|
ca23277d42 | ||
|
|
0bb665f767 | ||
|
|
06f9baea01 | ||
|
|
24d55f3165 | ||
|
|
1589fb7e5c | ||
|
|
9db2adc0ea | ||
|
|
0a9ea43277 | ||
|
|
642fcaea8d | ||
|
|
f7938b0ca8 | ||
|
|
0d5758d839 | ||
|
|
b888ed12e0 | ||
|
|
3d512a8f04 | ||
|
|
6432f2f05c | ||
|
|
6e09de7604 | ||
|
|
a2237e474f | ||
|
|
575dac2ec9 | ||
|
|
b5674a169c | ||
|
|
8f372917a4 | ||
|
|
890ff9de7b | ||
|
|
e32013f5a4 | ||
|
|
6f33d5bde2 | ||
|
|
70a5db8b29 | ||
|
|
f6489e0078 | ||
|
|
d46460a2d0 | ||
|
|
a7833b1858 | ||
|
|
482b26ade7 | ||
|
|
7fca17e7d2 | ||
|
|
da58e3c227 | ||
|
|
9c11e1f43f | ||
|
|
6bdc76a244 | ||
|
|
016e7428b0 | ||
|
|
9288257d1e | ||
|
|
26ed77cc50 | ||
|
|
a3ca9dc47c | ||
|
|
dacf1b3904 | ||
|
|
06f9b7908c | ||
|
|
454fa21f42 | ||
|
|
d1f3306e63 | ||
|
|
5557a0e16c | ||
|
|
334d295cc9 | ||
|
|
18b09c0bd7 | ||
|
|
c4fe14e9e6 | ||
|
|
28ab698fda | ||
|
|
a27aa70ff4 | ||
|
|
dd040150fc | ||
|
|
a0be7d162d | ||
|
|
5a6ecca8bc | ||
|
|
961b880e08 | ||
|
|
0c6591054f | ||
|
|
3008aa2c68 | ||
|
|
515618a976 | ||
|
|
12bff229d5 | ||
|
|
edae40525a | ||
|
|
0f3b92a1ee | ||
|
|
77248fc2eb | ||
|
|
e12342dec1 | ||
|
|
075ffa83b3 | ||
|
|
65b26a0fa4 | ||
|
|
8a5f0298cd | ||
|
|
dfceb4d188 | ||
|
|
d5766743f2 | ||
|
|
d83ab07f9f | ||
|
|
af57829fcb | ||
|
|
aac1a6642d | ||
|
|
33dcfb2072 | ||
|
|
c5d8d3c34d | ||
|
|
7a7c31ece4 | ||
|
|
8b9b6af023 | ||
|
|
4191311972 | ||
|
|
88046a4163 | ||
|
|
428647e2a8 | ||
|
|
a1679c33e1 | ||
|
|
6ea7150326 | ||
|
|
00fb7437c0 | ||
|
|
57526124c3 | ||
|
|
92afbcea2c | ||
|
|
fe6f11e4e5 | ||
|
|
07fed8dd31 | ||
|
|
aa933ea94a | ||
|
|
9843ddb73c | ||
|
|
34994332c6 | ||
|
|
d4164c4696 | ||
|
|
56f5c6724b | ||
|
|
aadad07c59 | ||
|
|
bb4bea7d49 | ||
|
|
09cbf11b77 | ||
|
|
a74f62c7c8 | ||
|
|
6f178bc5c1 | ||
|
|
ae9ffd9a09 | ||
|
|
9e21460cbb | ||
|
|
7d5ee09e79 | ||
|
|
1894ec9937 | ||
|
|
5ce2c32732 | ||
|
|
3ed1f4b99c | ||
|
|
0bdffbc87f | ||
|
|
68cff61e94 | ||
|
|
003d47ea5e | ||
|
|
0531ae0268 | ||
|
|
1d0abe2286 | ||
|
|
4104832be3 | ||
|
|
1c56645e62 | ||
|
|
03092d023f | ||
|
|
0cb10285af | ||
|
|
b0af453994 | ||
|
|
147077dea4 | ||
|
|
77a9cb52b1 | ||
|
|
38b924b36e | ||
|
|
5104216560 | ||
|
|
d778fcdb1f | ||
|
|
9fa2828963 | ||
|
|
9433b86b71 | ||
|
|
24bf1dbaa4 | ||
|
|
8c65558f4f | ||
|
|
21a520a8d8 | ||
|
|
c4df28bfe8 | ||
|
|
93b48c8189 | ||
|
|
cdedd4aca7 | ||
|
|
a55d42e1dd | ||
|
|
ff142c7dd0 | ||
|
|
1ff90b1c86 | ||
|
|
8849bacf49 | ||
|
|
dec4526a7f | ||
|
|
f4812b6f55 | ||
|
|
dea3c589c2 | ||
|
|
4009ff41ee | ||
|
|
a2a09516d2 | ||
|
|
5a1fea5bd3 | ||
|
|
baf4e8b7e7 | ||
|
|
83dd26dd2d | ||
|
|
ebcc728206 | ||
|
|
c321905680 | ||
|
|
de1229cce2 | ||
|
|
0565b2c8bb | ||
|
|
99f814bb25 | ||
|
|
ca7a516555 | ||
|
|
1af1ce3a5a | ||
|
|
5d68f59608 | ||
|
|
431b787e13 | ||
|
|
37ca171c84 | ||
|
|
3d8af40a73 | ||
|
|
47ab71ae63 | ||
|
|
6cf2cd52b8 | ||
|
|
d555d42e79 | ||
|
|
e4137797c3 | ||
|
|
061041df6d | ||
|
|
07c517f912 | ||
|
|
b76b60d964 | ||
|
|
70f1eaaf77 | ||
|
|
5343edb667 | ||
|
|
6004b6a183 | ||
|
|
b3f32dab8a | ||
|
|
1fdf6774b1 | ||
|
|
5a9afd5337 | ||
|
|
2cef41d9ae | ||
|
|
f4f743b94f | ||
|
|
eddd940018 | ||
|
|
de432a2245 | ||
|
|
3b2cf7b4ee | ||
|
|
dcc621fb64 | ||
|
|
31b238bfc0 | ||
|
|
3a33a65d79 | ||
|
|
92f838b915 | ||
|
|
fa820cfbdb | ||
|
|
c6124eb8dd | ||
|
|
689747eeb6 | ||
|
|
91aae2c60f | ||
|
|
f3977722f1 | ||
|
|
1a9bc8cbb4 | ||
|
|
16d1ec607f | ||
|
|
a85fd42447 | ||
|
|
0e906581c3 | ||
|
|
0b7293ae4b | ||
|
|
5e4bc9d961 | ||
|
|
b96be78007 | ||
|
|
9dd848412d | ||
|
|
cc975f825d | ||
|
|
b9708e825f | ||
|
|
f9ce47851c | ||
|
|
4f52e2a7ab | ||
|
|
3ebe2732c1 | ||
|
|
5992c8d9bb | ||
|
|
1b150531fd | ||
|
|
522586667b | ||
|
|
1b277587f7 | ||
|
|
a17eec1f37 | ||
|
|
b474a0839a | ||
|
|
f3d8d9a070 | ||
|
|
4b50f19e39 | ||
|
|
237ff37988 | ||
|
|
91a2854449 | ||
|
|
53a9688639 | ||
|
|
08cabd627b | ||
|
|
d3df912aae | ||
|
|
e738120014 | ||
|
|
5aaa280987 | ||
|
|
f322593731 | ||
|
|
dd5a381d0b | ||
|
|
03194a68fe | ||
|
|
dbe4737270 | ||
|
|
5a5730a71b | ||
|
|
1dd46cf1d8 | ||
|
|
f9a05acb16 | ||
|
|
8e68c1be61 | ||
|
|
9cd75dc4b2 | ||
|
|
84216df047 | ||
|
|
273d97a48e | ||
|
|
f80fefa6d6 | ||
|
|
93fbf49893 | ||
|
|
e136673e48 | ||
|
|
6fe7d5b91c | ||
|
|
164572969b | ||
|
|
a568f0fa87 | ||
|
|
8d02bd68f0 | ||
|
|
aea0d1d22f | ||
|
|
b961ef0451 | ||
|
|
92db8477bf | ||
|
|
338f2f2402 | ||
|
|
1792eefe17 | ||
|
|
277409dbd6 | ||
|
|
b5b1a1e371 | ||
|
|
5c1fb1514b | ||
|
|
1177b4ad44 | ||
|
|
38f46e9cc1 | ||
|
|
800baf3085 | ||
|
|
ca5d67f701 | ||
|
|
e8759ceabf | ||
|
|
1f5e79b294 | ||
|
|
fb5bb32044 | ||
|
|
461f712182 | ||
|
|
fed6e671f9 | ||
|
|
91dda2cdae | ||
|
|
1edba5079a | ||
|
|
1d3fd03ae8 | ||
|
|
c0c2522e28 | ||
|
|
00a6a4561b | ||
|
|
7e13e719ce | ||
|
|
21ca944b24 | ||
|
|
c7881fe078 | ||
|
|
dd62b5fcff | ||
|
|
abc4d22302 | ||
|
|
09382ea879 | ||
|
|
d943549758 | ||
|
|
10d6925efd | ||
|
|
63a78f5610 | ||
|
|
e956428ac4 | ||
|
|
04be334a2e | ||
|
|
63eda11e07 | ||
|
|
a1e51bf007 | ||
|
|
e54044e1e6 | ||
|
|
dc240652fc | ||
|
|
268a4b9193 | ||
|
|
8d3758b5aa | ||
|
|
a3fe68e3d1 | ||
|
|
b9a4d0e12d | ||
|
|
d618942ea5 | ||
|
|
1ed8bfcebd | ||
|
|
42ad0eab29 | ||
|
|
102aed61d4 | ||
|
|
ca8c1e4f18 | ||
|
|
fff8c99624 | ||
|
|
4bd71d96c9 | ||
|
|
3059946a8c | ||
|
|
a51f273cca | ||
|
|
0ad59657d7 | ||
|
|
7a9b311f8b | ||
|
|
3b749d3b61 | ||
|
|
9d8e5f808a | ||
|
|
5737495c4e | ||
|
|
3a30e22840 | ||
|
|
5e3657c25f | ||
|
|
347ad43f0b | ||
|
|
6809a6a70e | ||
|
|
f125d056c2 | ||
|
|
f15aa4242f | ||
|
|
f75b1d8b8f | ||
|
|
c76d2241a8 | ||
|
|
75763ba86f | ||
|
|
65ec940ccc | ||
|
|
9cc0577214 | ||
|
|
63b3931134 | ||
|
|
67eadf1bbb | ||
|
|
c2ebaa36fd | ||
|
|
0142b25dc6 | ||
|
|
ef96e04cb6 | ||
|
|
528b5dd726 | ||
|
|
97ff2296e4 | ||
|
|
aa6a92c6c7 | ||
|
|
84fe3ea1af | ||
|
|
c736a63382 | ||
|
|
6c8c4528d4 | ||
|
|
3c50e801bf | ||
|
|
5197e172d8 | ||
|
|
d042939424 | ||
|
|
2adf4a80e8 | ||
|
|
fdeea5c0e9 | ||
|
|
97332c1a1a | ||
|
|
82a6242419 | ||
|
|
9acc198591 | ||
|
|
e9c75a7afd | ||
|
|
4a96165b0b | ||
|
|
049f34e311 | ||
|
|
e006be8ac4 | ||
|
|
0fcbfcdc74 | ||
|
|
583aa5b545 | ||
|
|
db857d335d | ||
|
|
0af209c862 | ||
|
|
c2a18bfea6 | ||
|
|
b210cbead7 | ||
|
|
2f89d3d52b | ||
|
|
734c4bfada | ||
|
|
3143cd1acd | ||
|
|
32e03ad80f | ||
|
|
9501d9293c | ||
|
|
0d2d2c78cd | ||
|
|
52ade2bce8 | ||
|
|
7e8555a2b3 | ||
|
|
8f1ba70d8c | ||
|
|
d7adfc9d5c | ||
|
|
b56bd27702 | ||
|
|
574280dc1d | ||
|
|
e750f20df8 | ||
|
|
531fb702ad | ||
|
|
35ff47cda5 | ||
|
|
931023de09 | ||
|
|
5e38494d8b | ||
|
|
37f167eed1 | ||
|
|
8a8a0b2122 | ||
|
|
d443c28817 | ||
|
|
65296ee4ad | ||
|
|
71ea50eff8 | ||
|
|
cacb4bdb4a | ||
|
|
7ff03231b8 | ||
|
|
e918c36789 | ||
|
|
150acbfb51 | ||
|
|
4a607cbdbb | ||
|
|
e9c1bda637 | ||
|
|
fbbd2b5d4d | ||
|
|
1ee2238df8 | ||
|
|
c09d72f187 | ||
|
|
112f9622bd | ||
|
|
b86145f47e | ||
|
|
bdf69871fc | ||
|
|
3ba67c3f3b | ||
|
|
53b5619566 | ||
|
|
90a6476e52 | ||
|
|
7b24e2ff55 | ||
|
|
99dd44c2ba | ||
|
|
bb0447a14e | ||
|
|
cd3ddc3181 | ||
|
|
c4430abe49 | ||
|
|
d6693a9032 | ||
|
|
eb5bfef585 | ||
|
|
217621cb1c | ||
|
|
c36858ca95 | ||
|
|
d3b51f9cc6 | ||
|
|
265c624f81 | ||
|
|
77ebb31026 | ||
|
|
68d8f98d26 | ||
|
|
efa37ad84f | ||
|
|
469eb0959a | ||
|
|
8085acfcfc | ||
|
|
0cc88f816b | ||
|
|
53ee1c5367 | ||
|
|
0eb183a0a0 | ||
|
|
25cdb3bc3b | ||
|
|
d974ed2313 | ||
|
|
41265e634e | ||
|
|
9987d84210 | ||
|
|
a66e87aed0 | ||
|
|
74988dcb24 | ||
|
|
a4a8602af4 | ||
|
|
3c4638a269 | ||
|
|
036ed0deba | ||
|
|
7dfcb9f9f6 | ||
|
|
4de1d445ec | ||
|
|
26b8fa1600 | ||
|
|
5b15215f83 | ||
|
|
0cfbb58b2e | ||
|
|
9d6e7ab145 | ||
|
|
2a8571bb14 | ||
|
|
3464fca0a2 | ||
|
|
2e1e11c025 | ||
|
|
03792303bc | ||
|
|
2b0515f657 | ||
|
|
7f702c96fe | ||
|
|
71283ce437 | ||
|
|
c7b4bf06c2 | ||
|
|
85734ab724 | ||
|
|
aed1938e89 | ||
|
|
decabae0ab | ||
|
|
7a06e9cca8 | ||
|
|
ffc15fb1f6 | ||
|
|
b9b77bb40f | ||
|
|
15ddd15fe9 | ||
|
|
467a0517bb | ||
|
|
aec0693b14 | ||
|
|
a56c18d531 | ||
|
|
067919a296 | ||
|
|
92f7923a2a | ||
|
|
2d727cc1ac | ||
|
|
4045e18f3e | ||
|
|
509e065fbf | ||
|
|
cfd403301f | ||
|
|
35fc00d4b3 | ||
|
|
751c967741 | ||
|
|
fd37ed8d19 | ||
|
|
e201abdc1e | ||
|
|
6167440500 | ||
|
|
5e98c828a5 | ||
|
|
73aea0ceef | ||
|
|
326977dddc | ||
|
|
ec31aaa427 | ||
|
|
9240d41ed6 | ||
|
|
0a27e51f37 | ||
|
|
f5f05ad537 | ||
|
|
3a6539f3e2 | ||
|
|
7c0cee09bf | ||
|
|
4342bb76da | ||
|
|
6d362ab611 | ||
|
|
768ba3ac44 | ||
|
|
cc3988e773 | ||
|
|
d24dd892dd | ||
|
|
cc1781d3fb | ||
|
|
53c3b5f48e | ||
|
|
6de53525d0 | ||
|
|
0b8b058870 | ||
|
|
ace912c867 | ||
|
|
ab588070e6 | ||
|
|
4be2b7130b | ||
|
|
e406c90d6f | ||
|
|
0952503860 | ||
|
|
db95d370d8 | ||
|
|
332cf4a3ed | ||
|
|
6a6e42e9af | ||
|
|
8f6844aa5d |
79 changed files with 7059 additions and 1483 deletions
|
|
@ -1,19 +0,0 @@
|
||||||
version: 2
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
working_directory: ~/next-jdbc
|
|
||||||
docker:
|
|
||||||
- image: circleci/clojure:openjdk-11-tools-deps-1.10.0.442
|
|
||||||
# environment:
|
|
||||||
# JVM_OPTS: -Xmx3200m
|
|
||||||
steps:
|
|
||||||
- checkout
|
|
||||||
- restore_cache:
|
|
||||||
key: next-jdbc-{{ checksum "deps.edn" }}
|
|
||||||
- run: clojure -R:test:runner -Spath
|
|
||||||
- save_cache:
|
|
||||||
paths:
|
|
||||||
- ~/.m2
|
|
||||||
- ~/.gitlibs
|
|
||||||
key: next-jdbc-{{ checksum "deps.edn" }}
|
|
||||||
- run: clojure -A:test:runner
|
|
||||||
1
.clj-kondo/config.edn
Normal file
1
.clj-kondo/config.edn
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
{:config-paths ["resources/clj-kondo.exports/com.github.seancorfield/next.jdbc"]}
|
||||||
|
|
@ -0,0 +1,8 @@
|
||||||
|
{:hooks
|
||||||
|
{:analyze-call
|
||||||
|
{next.jdbc/with-transaction
|
||||||
|
hooks.com.github.seancorfield.next-jdbc/with-transaction
|
||||||
|
next.jdbc/with-transaction+options
|
||||||
|
hooks.com.github.seancorfield.next-jdbc/with-transaction+options}}
|
||||||
|
:lint-as {next.jdbc/on-connection clojure.core/with-open
|
||||||
|
next.jdbc/on-connection+options clojure.core/with-open}}
|
||||||
|
|
@ -0,0 +1,34 @@
|
||||||
|
(ns hooks.com.github.seancorfield.next-jdbc
|
||||||
|
(:require [clj-kondo.hooks-api :as api]))
|
||||||
|
|
||||||
|
(defn with-transaction
|
||||||
|
"Expands (with-transaction [tx expr opts] body)
|
||||||
|
to (let [tx expr] opts body) per clj-kondo examples."
|
||||||
|
[{:keys [:node]}]
|
||||||
|
(let [[binding-vec & body] (rest (:children node))
|
||||||
|
[sym val opts] (:children binding-vec)]
|
||||||
|
(when-not (and sym val)
|
||||||
|
(throw (ex-info "No sym and val provided" {})))
|
||||||
|
(let [new-node (api/list-node
|
||||||
|
(list*
|
||||||
|
(api/token-node 'let)
|
||||||
|
(api/vector-node [sym val])
|
||||||
|
opts
|
||||||
|
body))]
|
||||||
|
{:node new-node})))
|
||||||
|
|
||||||
|
(defn with-transaction+options
|
||||||
|
"Expands (with-transaction+options [tx expr opts] body)
|
||||||
|
to (let [tx expr] opts body) per clj-kondo examples."
|
||||||
|
[{:keys [:node]}]
|
||||||
|
(let [[binding-vec & body] (rest (:children node))
|
||||||
|
[sym val opts] (:children binding-vec)]
|
||||||
|
(when-not (and sym val)
|
||||||
|
(throw (ex-info "No sym and val provided" {})))
|
||||||
|
(let [new-node (api/list-node
|
||||||
|
(list*
|
||||||
|
(api/token-node 'let)
|
||||||
|
(api/vector-node [sym val])
|
||||||
|
opts
|
||||||
|
body))]
|
||||||
|
{:node new-node})))
|
||||||
8
.clj-kondo/imports/com.xtdb/xtdb-api/config.edn
Normal file
8
.clj-kondo/imports/com.xtdb/xtdb-api/config.edn
Normal file
|
|
@ -0,0 +1,8 @@
|
||||||
|
{:linters {:xtql/redundant-pipeline {:level :warning}
|
||||||
|
:xtql/redundant-unify {:level :warning}
|
||||||
|
:xtql/unrecognized-operation {:level :error}
|
||||||
|
:xtql/unrecognized-parameter {:level :warning}
|
||||||
|
:xtql/missing-parameter {:level :error}
|
||||||
|
:xtql/type-mismatch {:level :error}
|
||||||
|
:xtql/invalid-arity {:level :error}}
|
||||||
|
:hooks {:analyze-call {xtdb.api/q hooks.xtql/q}}}
|
||||||
567
.clj-kondo/imports/com.xtdb/xtdb-api/hooks/xtql.clj
Normal file
567
.clj-kondo/imports/com.xtdb/xtdb-api/hooks/xtql.clj
Normal file
|
|
@ -0,0 +1,567 @@
|
||||||
|
(ns ^:no-doc hooks.xtql
|
||||||
|
(:require [clj-kondo.hooks-api :as api]))
|
||||||
|
|
||||||
|
(def source-op?
|
||||||
|
#{'from 'rel 'unify})
|
||||||
|
|
||||||
|
(def tail-op?
|
||||||
|
#{'aggregate
|
||||||
|
'limit 'offset
|
||||||
|
'where
|
||||||
|
'order-by
|
||||||
|
'with 'without 'return
|
||||||
|
'unnest})
|
||||||
|
|
||||||
|
(def unify-clause?
|
||||||
|
#{'from 'rel
|
||||||
|
'join 'left-join
|
||||||
|
'unnest
|
||||||
|
'where
|
||||||
|
'with})
|
||||||
|
|
||||||
|
(defn node-map? [node]
|
||||||
|
(contains? #{:map :namespaced-map}
|
||||||
|
(:tag node)))
|
||||||
|
|
||||||
|
(defn node-namespaced-map? [node]
|
||||||
|
(= :namespaced-map (:tag node)))
|
||||||
|
|
||||||
|
(defn map-children [node]
|
||||||
|
(->> (if (node-namespaced-map? node)
|
||||||
|
(-> node :children first)
|
||||||
|
node)
|
||||||
|
:children
|
||||||
|
(partition-all 2)))
|
||||||
|
|
||||||
|
(defn node-vector? [node]
|
||||||
|
(= :vector (:tag node)))
|
||||||
|
|
||||||
|
(defn node-list? [node]
|
||||||
|
(= :list (:tag node)))
|
||||||
|
|
||||||
|
(defn node-symbol? [node]
|
||||||
|
(symbol? (:value node)))
|
||||||
|
|
||||||
|
(defn node-symbol [node]
|
||||||
|
(:value node))
|
||||||
|
|
||||||
|
(defn node-keyword? [node]
|
||||||
|
(keyword? (:k node)))
|
||||||
|
|
||||||
|
(defn node-keyword [node]
|
||||||
|
(:k node))
|
||||||
|
|
||||||
|
(defn node-quote? [node]
|
||||||
|
(= :quote (:tag node)))
|
||||||
|
|
||||||
|
(defn node-op [node]
|
||||||
|
(-> node :children first))
|
||||||
|
|
||||||
|
(declare lint-query)
|
||||||
|
|
||||||
|
(defmulti lint-unify-clause #(-> % node-op node-symbol))
|
||||||
|
(defmulti lint-source-op #(-> % node-op node-symbol))
|
||||||
|
(defmulti lint-tail-op #(-> % node-op node-symbol))
|
||||||
|
|
||||||
|
(defn lint-not-arg-symbol [node]
|
||||||
|
(when (= \$ (-> node node-symbol str first))
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta node)
|
||||||
|
:message "unexpected parameter in binding"
|
||||||
|
:type :xtql/unrecognized-parameter))))
|
||||||
|
|
||||||
|
(defn lint-bind [node]
|
||||||
|
(cond
|
||||||
|
(node-symbol? node)
|
||||||
|
;; TODO: Make own type, should really be a warning
|
||||||
|
(lint-not-arg-symbol node)
|
||||||
|
|
||||||
|
(node-map? node)
|
||||||
|
(doseq [[k _v] (map-children node)]
|
||||||
|
(when-not (node-keyword? k)
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta k)
|
||||||
|
:message "all keys in binding maps must be keywords"
|
||||||
|
:type :xtql/type-mismatch))))
|
||||||
|
|
||||||
|
:else
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta node)
|
||||||
|
:message "expected a symbol or map"
|
||||||
|
:type :xtql/type-mismatch))))
|
||||||
|
|
||||||
|
;; TODO: Lint more unify clauses
|
||||||
|
(defmethod lint-unify-clause :default [node]
|
||||||
|
(when-not (unify-clause? (-> node node-op node-symbol))
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (some-> node :children first meta)
|
||||||
|
:message "unrecognized unify clause"
|
||||||
|
:type :xtql/unrecognized-operation))))
|
||||||
|
|
||||||
|
(defmethod lint-unify-clause 'from [node]
|
||||||
|
(lint-source-op node))
|
||||||
|
|
||||||
|
(defmethod lint-unify-clause 'rel [node]
|
||||||
|
(lint-source-op node))
|
||||||
|
|
||||||
|
(defmethod lint-unify-clause 'with [node]
|
||||||
|
(let [opts (-> node :children rest)]
|
||||||
|
(when-not (>= (count opts) 1)
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta node)
|
||||||
|
:message "expected at least one argument"
|
||||||
|
:type :xtql/invalid-arity)))
|
||||||
|
(doseq [opt opts]
|
||||||
|
(if (node-map? opt)
|
||||||
|
(let [ks (->> opt
|
||||||
|
map-children
|
||||||
|
(map first)
|
||||||
|
(remove node-symbol?))]
|
||||||
|
(doseq [k ks]
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta k)
|
||||||
|
:message "expected all keys to be symbols in a unify"
|
||||||
|
:type :xtql/type-mismatch))))
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta opt)
|
||||||
|
:message "opts must be a map"
|
||||||
|
:type :xtql/type-mismatch))))))
|
||||||
|
|
||||||
|
(defn lint-join-clause [node]
|
||||||
|
(let [args (-> node :children rest)]
|
||||||
|
(if-not (= (count args) 2)
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta node)
|
||||||
|
:message "expected at exactly two arguments"
|
||||||
|
:type :xtql/invalid-arity))
|
||||||
|
(let [[query opts] args]
|
||||||
|
(lint-query query)
|
||||||
|
(cond
|
||||||
|
(node-vector? opts)
|
||||||
|
(->> opts :children (run! lint-bind))
|
||||||
|
(node-map? opts)
|
||||||
|
(let [kvs (map-children opts)
|
||||||
|
ks (->> kvs
|
||||||
|
(map first)
|
||||||
|
(map node-keyword)
|
||||||
|
(remove nil?)
|
||||||
|
(into #{}))]
|
||||||
|
(when-not (contains? ks :bind)
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta opts)
|
||||||
|
:message "Missing :bind parameter"
|
||||||
|
:type :xtql/missing-parameter)))
|
||||||
|
(doseq [[k v] kvs]
|
||||||
|
(when-not (node-keyword? k)
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta k)
|
||||||
|
:message "All keys in 'opts' must be keywords"
|
||||||
|
:type :xtql/type-mismatch)))
|
||||||
|
(case (node-keyword k)
|
||||||
|
:bind (if (node-vector? v)
|
||||||
|
(->> v :children (run! lint-bind))
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta v)
|
||||||
|
:message "expected :bind value to be a vector"
|
||||||
|
:type :xtql/type-mismatch)))
|
||||||
|
:args (if (node-vector? v)
|
||||||
|
;; TODO: Make args specific
|
||||||
|
(->> v :children (run! lint-bind))
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta v)
|
||||||
|
:message "expected :args value to be a vector"
|
||||||
|
:type :xtql/type-mismatch)))
|
||||||
|
; else
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta k)
|
||||||
|
:message "unrecognized parameter"
|
||||||
|
:type :xtql/unrecognized-parameter)))))
|
||||||
|
:else
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta node)
|
||||||
|
:message "opts must be a map or vector"
|
||||||
|
:type :xtql/type-mismatch)))))))
|
||||||
|
|
||||||
|
(defmethod lint-unify-clause 'join [node]
|
||||||
|
(lint-join-clause node))
|
||||||
|
|
||||||
|
(defmethod lint-unify-clause 'inner-join [node]
|
||||||
|
(lint-join-clause node))
|
||||||
|
|
||||||
|
(defmethod lint-unify-clause 'unnest [node]
|
||||||
|
(let [opts (-> node :children rest)]
|
||||||
|
(when-not (= 1 (count opts))
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta node)
|
||||||
|
:message "expected at exactly one argument"
|
||||||
|
:type :xtql/invalid-arity)))
|
||||||
|
(let [opt (first opts)]
|
||||||
|
(if (node-map? opt)
|
||||||
|
(doseq [[k _v] (map-children opt)]
|
||||||
|
(when-not (node-symbol? k)
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta k)
|
||||||
|
:message "expected all columns to be symbols"
|
||||||
|
:type :xtql/type-mismatch))))
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta opt)
|
||||||
|
:message "expected opt to be a map"
|
||||||
|
:type :xtql/type-mismatch))))))
|
||||||
|
|
||||||
|
|
||||||
|
(defmethod lint-source-op :default [node]
|
||||||
|
(let [op (-> node node-op node-symbol)]
|
||||||
|
(if (tail-op? op)
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (some-> node :children first meta)
|
||||||
|
:message "tail op in source position"
|
||||||
|
:type :xtql/unrecognized-operation))
|
||||||
|
(when-not (source-op? op)
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (some-> node :children first meta)
|
||||||
|
:message "unrecognized source operation"
|
||||||
|
:type :xtql/unrecognized-operation))))))
|
||||||
|
|
||||||
|
(defmethod lint-source-op 'from [node]
|
||||||
|
(let [[_ table opts] (some-> node :children)]
|
||||||
|
(when-not (node-keyword? table)
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta table)
|
||||||
|
:message "expected 'table' to be a keyword"
|
||||||
|
:type :xtql/type-mismatch)))
|
||||||
|
(case (:tag opts)
|
||||||
|
:vector (->> opts :children (run! lint-bind))
|
||||||
|
:map
|
||||||
|
(let [kvs (map-children opts)
|
||||||
|
ks (->> kvs
|
||||||
|
(map first)
|
||||||
|
(map node-keyword)
|
||||||
|
(remove nil?)
|
||||||
|
(into #{}))]
|
||||||
|
(when-not (contains? ks :bind)
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta opts)
|
||||||
|
:message "Missing :bind parameter"
|
||||||
|
:type :xtql/missing-parameter)))
|
||||||
|
(doseq [[k v] kvs]
|
||||||
|
(when-not (node-keyword? k)
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta k)
|
||||||
|
:message "All keys in 'opts' must be keywords"
|
||||||
|
:type :xtql/type-mismatch)))
|
||||||
|
(case (node-keyword k)
|
||||||
|
:bind (if (node-vector? v)
|
||||||
|
(->> v :children (run! lint-bind))
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta opts)
|
||||||
|
:message "expected :bind value to be a vector"
|
||||||
|
:type :xtql/type-mismatch)))
|
||||||
|
;; TODO
|
||||||
|
:for-valid-time nil
|
||||||
|
;; TODO
|
||||||
|
:for-system-time nil
|
||||||
|
; else
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta k)
|
||||||
|
:message "unrecognized parameter"
|
||||||
|
:type :xtql/unrecognized-parameter)))))
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta opts)
|
||||||
|
:message "expected 'opts' to be either a map or vector"
|
||||||
|
:type :xtql/type-mismatch)))))
|
||||||
|
|
||||||
|
(defmethod lint-source-op 'unify [node]
|
||||||
|
(let [[_ & clauses] (some-> node :children)]
|
||||||
|
(doseq [bad-op (remove node-list? clauses)]
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta bad-op)
|
||||||
|
:message "all operations in a unify must be lists"
|
||||||
|
:type :xtql/type-mismatch)))
|
||||||
|
(when (= (count clauses) 1)
|
||||||
|
(let [clause (first clauses)
|
||||||
|
clause-op (-> clause node-op node-symbol)
|
||||||
|
unify-node (some-> node :children first)]
|
||||||
|
(case clause-op
|
||||||
|
from (api/reg-finding!
|
||||||
|
(assoc (meta unify-node)
|
||||||
|
:message "redundant unify"
|
||||||
|
:type :xtql/redundant-unify))
|
||||||
|
rel (api/reg-finding!
|
||||||
|
(assoc (meta unify-node)
|
||||||
|
:message "redundant unify"
|
||||||
|
:type :xtql/redundant-unify))
|
||||||
|
;; TODO: Cover other operators
|
||||||
|
nil)))
|
||||||
|
(->> clauses
|
||||||
|
(filter node-list?)
|
||||||
|
(run! lint-unify-clause))))
|
||||||
|
|
||||||
|
(defmethod lint-source-op 'rel [node]
|
||||||
|
(let [[_ _expr binds] (some-> node :children)]
|
||||||
|
(if (node-vector? binds)
|
||||||
|
(->> binds :children (run! lint-bind))
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta binds)
|
||||||
|
:message "expected rel binding to be a vector"
|
||||||
|
:type :xtql/type-mismatch)))))
|
||||||
|
|
||||||
|
;; TODO: Lint more tail ops
|
||||||
|
(defmethod lint-tail-op :default [node]
|
||||||
|
(let [op (-> node node-op node-symbol)]
|
||||||
|
(if (source-op? op)
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (some-> node :children first meta)
|
||||||
|
:message "source op in tail position"
|
||||||
|
:type :xtql/unrecognized-operation))
|
||||||
|
(when-not (tail-op? op)
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (some-> node :children first meta)
|
||||||
|
:message "unrecognized tail operation"
|
||||||
|
:type :xtql/unrecognized-operation))))))
|
||||||
|
|
||||||
|
(defn lint-keyword [node name]
|
||||||
|
(when-not (node-keyword? node)
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta node)
|
||||||
|
:message (str "expected '" name "' to be a keyword")
|
||||||
|
:type :xtql/type-mismatch))))
|
||||||
|
|
||||||
|
(defn lint-enum [node name values]
|
||||||
|
;; TODO: Expand to more than just keywords?
|
||||||
|
;; Maybe a `node-value` function?
|
||||||
|
(when-not (contains? values (node-keyword node))
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta node)
|
||||||
|
:message (str "expected '" name "' to be one of " values)
|
||||||
|
;; TODO: change to different type?
|
||||||
|
:type :xtql/type-mismatch))))
|
||||||
|
|
||||||
|
(defmethod lint-tail-op 'limit [node]
|
||||||
|
(let [opts (-> node :children rest)]
|
||||||
|
(when-not (= 1 (count opts))
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta node)
|
||||||
|
:message "expected exactly one argument"
|
||||||
|
:type :xtql/invalid-arity)))
|
||||||
|
(when-let [opt (first opts)]
|
||||||
|
(when-not (some-> opt :value int?)
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta opt)
|
||||||
|
:message "expected limit to be an integer"
|
||||||
|
:type :xtql/type-mismatch))))))
|
||||||
|
|
||||||
|
(defmethod lint-tail-op 'offset [node]
|
||||||
|
(let [opts (-> node :children rest)]
|
||||||
|
(when-not (= 1 (count opts))
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta node)
|
||||||
|
:message "expected exactly one argument"
|
||||||
|
:type :xtql/invalid-arity)))
|
||||||
|
(when-let [opt (first opts)]
|
||||||
|
(when-not (some-> opt :value int?)
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta opt)
|
||||||
|
:message "expected offset to be an integer"
|
||||||
|
:type :xtql/type-mismatch))))))
|
||||||
|
|
||||||
|
(defmethod lint-tail-op 'with [node]
|
||||||
|
(let [opts (-> node :children rest)]
|
||||||
|
(when-not (>= (count opts) 1)
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta node)
|
||||||
|
:message "expected at least one argument"
|
||||||
|
:type :xtql/invalid-arity)))
|
||||||
|
(doseq [opt opts]
|
||||||
|
(cond
|
||||||
|
(node-symbol? opt)
|
||||||
|
(lint-not-arg-symbol opt)
|
||||||
|
(node-map? opt)
|
||||||
|
(let [ks (->> opt
|
||||||
|
map-children
|
||||||
|
(map first)
|
||||||
|
(remove node-keyword?))]
|
||||||
|
(doseq [k ks]
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta k)
|
||||||
|
:message "expected all keys to be keywords"
|
||||||
|
:type :xtql/type-mismatch))))
|
||||||
|
:else
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta opt)
|
||||||
|
:message "opts must be a symbol or map"
|
||||||
|
:type :xtql/type-mismatch))))))
|
||||||
|
|
||||||
|
(defmethod lint-tail-op 'return [node]
|
||||||
|
(let [opts (-> node :children rest)]
|
||||||
|
(when-not (>= (count opts) 1)
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta node)
|
||||||
|
:message "expected at least one argument"
|
||||||
|
:type :xtql/invalid-arity)))
|
||||||
|
(doseq [opt opts]
|
||||||
|
(cond
|
||||||
|
(node-symbol? opt)
|
||||||
|
(lint-not-arg-symbol opt)
|
||||||
|
(node-map? opt)
|
||||||
|
(let [ks (->> opt
|
||||||
|
map-children
|
||||||
|
(map first)
|
||||||
|
(remove node-keyword?))]
|
||||||
|
(doseq [k ks]
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta k)
|
||||||
|
:message "expected all keys to be keywords"
|
||||||
|
:type :xtql/type-mismatch))))
|
||||||
|
:else
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta opt)
|
||||||
|
:message "opts must be a symbol or map"
|
||||||
|
:type :xtql/type-mismatch))))))
|
||||||
|
|
||||||
|
(defmethod lint-tail-op 'order-by [node]
|
||||||
|
(doseq [opt (-> node :children rest)]
|
||||||
|
(cond
|
||||||
|
(node-symbol? opt)
|
||||||
|
(lint-not-arg-symbol opt)
|
||||||
|
|
||||||
|
(node-map? opt)
|
||||||
|
(let [kvs (map-children opt)
|
||||||
|
ks (->> kvs
|
||||||
|
(map first)
|
||||||
|
(map node-keyword)
|
||||||
|
(remove nil?)
|
||||||
|
(into #{}))]
|
||||||
|
(when-not (contains? ks :val)
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta opt)
|
||||||
|
:message "Missing :val parameter"
|
||||||
|
:type :xtql/missing-parameter)))
|
||||||
|
(doseq [[k v] kvs]
|
||||||
|
(when-not (node-keyword? k)
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta k)
|
||||||
|
:message "All keys in 'opts' must be keywords"
|
||||||
|
:type :xtql/type-mismatch)))
|
||||||
|
(case (node-keyword k)
|
||||||
|
:val
|
||||||
|
(cond
|
||||||
|
(node-symbol? v)
|
||||||
|
(lint-not-arg-symbol v)
|
||||||
|
(node-keyword? v)
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta v)
|
||||||
|
:message "expected :val value to be a symbol or an expression"
|
||||||
|
:type :xtql/type-mismatch)))
|
||||||
|
; else do nothing
|
||||||
|
:dir
|
||||||
|
(if (node-keyword? v)
|
||||||
|
(lint-enum v :dir #{:asc :desc})
|
||||||
|
(lint-keyword v ":dir value"))
|
||||||
|
:nulls
|
||||||
|
(if (node-keyword? v)
|
||||||
|
(lint-enum v :nulls #{:first :last})
|
||||||
|
(lint-keyword v ":nulls value"))
|
||||||
|
; else
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta k)
|
||||||
|
:message "unrecognized parameter"
|
||||||
|
:type :xtql/unrecognized-parameter)))))
|
||||||
|
|
||||||
|
:else
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta opt)
|
||||||
|
:message "opts must be a symbol or map"
|
||||||
|
:type :xtql/type-mismatch)))))
|
||||||
|
|
||||||
|
(defmethod lint-tail-op 'without [node]
|
||||||
|
(let [columns (-> node :children rest)]
|
||||||
|
(when-not (>= (count columns) 1)
|
||||||
|
;; TODO: Should be a warning really
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta node)
|
||||||
|
:message "expected at least one column"
|
||||||
|
:type :xtql/invalid-arity)))
|
||||||
|
(doseq [column columns]
|
||||||
|
(when-not (node-keyword? column)
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta column)
|
||||||
|
:message "expected column to be a keyword"
|
||||||
|
:type :xtql/type-mismatch))))))
|
||||||
|
|
||||||
|
(defmethod lint-tail-op 'aggregate [node]
|
||||||
|
(let [opts (-> node :children rest)]
|
||||||
|
(when-not (>= (count opts) 1)
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta node)
|
||||||
|
:message "expected at least one argument"
|
||||||
|
:type :xtql/invalid-arity)))
|
||||||
|
(doseq [opt opts]
|
||||||
|
(cond
|
||||||
|
(node-symbol? opt)
|
||||||
|
(lint-not-arg-symbol opt)
|
||||||
|
(node-map? opt)
|
||||||
|
(doseq [[k _v] (map-children opt)]
|
||||||
|
(when-not (node-keyword? k)
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta k)
|
||||||
|
:message "expected all keys to be keywords"
|
||||||
|
:type :xtql/type-mismatch))))
|
||||||
|
|
||||||
|
:else
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta opt)
|
||||||
|
:message "expected opts to be a symbol or map"
|
||||||
|
:type :xtql/type-mismatch))))))
|
||||||
|
|
||||||
|
(defmethod lint-tail-op 'unnest [node]
|
||||||
|
(let [opts (-> node :children rest)]
|
||||||
|
(when-not (= 1 (count opts))
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta node)
|
||||||
|
:message "expected at exactly one argument"
|
||||||
|
:type :xtql/invalid-arity)))
|
||||||
|
(let [opt (first opts)]
|
||||||
|
(if (node-map? opt)
|
||||||
|
(doseq [[k _v] (map-children opt)]
|
||||||
|
(when-not (node-keyword? k)
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta k)
|
||||||
|
:message "expected all columns to be keywords"
|
||||||
|
:type :xtql/type-mismatch))))
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta opt)
|
||||||
|
:message "expected opt to be a map"
|
||||||
|
:type :xtql/type-mismatch))))))
|
||||||
|
|
||||||
|
(defn lint-pipeline [node]
|
||||||
|
(let [[_ & ops] (some-> node :children)]
|
||||||
|
(doseq [bad-op (remove node-list? ops)]
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (meta bad-op)
|
||||||
|
:message "all operations in a pipeline must be lists"
|
||||||
|
:type :xtql/type-mismatch)))
|
||||||
|
(when (= 1 (count ops))
|
||||||
|
(api/reg-finding!
|
||||||
|
(assoc (-> node :children first meta)
|
||||||
|
:message "redundant pipeline"
|
||||||
|
:type :xtql/redundant-pipeline)))
|
||||||
|
(let [first-op (first ops)]
|
||||||
|
(when (node-list? first-op)
|
||||||
|
(lint-source-op (first ops))))
|
||||||
|
(->> ops
|
||||||
|
(drop 1)
|
||||||
|
(filter node-list?)
|
||||||
|
(run! lint-tail-op))))
|
||||||
|
|
||||||
|
(defn lint-query [node]
|
||||||
|
(if (= '-> (node-symbol (-> node :children first)))
|
||||||
|
(lint-pipeline node)
|
||||||
|
(lint-source-op node)))
|
||||||
|
|
||||||
|
;; TODO: Lint other functions that take queries
|
||||||
|
|
||||||
|
(defn q [{:keys [node]}]
|
||||||
|
(let [[_ _node quoted-query] (some-> node :children)]
|
||||||
|
(when (node-quote? quoted-query)
|
||||||
|
(let [query (-> quoted-query :children first)]
|
||||||
|
(lint-query query)))))
|
||||||
6
.clj-kondo/imports/cond_plus/cond_plus/config.edn
Normal file
6
.clj-kondo/imports/cond_plus/cond_plus/config.edn
Normal file
|
|
@ -0,0 +1,6 @@
|
||||||
|
{:linters {:cond-plus/empty-else {:level :error}
|
||||||
|
:cond-plus/missing-fn {:level :error}
|
||||||
|
:cond-plus/non-final-else {:level :error}
|
||||||
|
:cond-plus/sequence {:level :error}
|
||||||
|
:unresolved-symbol {:exclude [(cond-plus.core/cond+ [=> else])]}}
|
||||||
|
:hooks {:analyze-call {cond-plus.core/cond+ hooks.cond-plus-hook/cond+}}}
|
||||||
|
|
@ -0,0 +1,65 @@
|
||||||
|
(ns hooks.cond-plus-hook
|
||||||
|
(:require [clj-kondo.hooks-api :as api]))
|
||||||
|
|
||||||
|
(defn analyze-clauses [clauses]
|
||||||
|
(reduce
|
||||||
|
(fn [found-else? clause]
|
||||||
|
;; non-sequence clause
|
||||||
|
(if (not (or (api/list-node? clause)
|
||||||
|
(api/vector-node? clause)))
|
||||||
|
(let [{:keys [row col]} (meta clause)]
|
||||||
|
(api/reg-finding!
|
||||||
|
{:message "must be sequence"
|
||||||
|
:type :cond-plus/sequence
|
||||||
|
:row row
|
||||||
|
:col col})
|
||||||
|
found-else?)
|
||||||
|
(let [[sym arrow fn-expr] (api/sexpr clause)]
|
||||||
|
(cond
|
||||||
|
;; non-final else
|
||||||
|
found-else?
|
||||||
|
(do (api/reg-finding!
|
||||||
|
(merge
|
||||||
|
{:message ":else must be in final position"
|
||||||
|
:type :cond-plus/non-final-else}
|
||||||
|
found-else?))
|
||||||
|
(reduced nil))
|
||||||
|
;; check fn-exprs
|
||||||
|
(and (or (= :> arrow)
|
||||||
|
(= '=> arrow))
|
||||||
|
(nil? fn-expr))
|
||||||
|
(let [{:keys [row col]} (meta clause)]
|
||||||
|
(api/reg-finding!
|
||||||
|
{:message "fn-expr must have third position symbol"
|
||||||
|
:type :cond-plus/missing-fn
|
||||||
|
:row row
|
||||||
|
:col col})
|
||||||
|
found-else?)
|
||||||
|
;; else handling
|
||||||
|
(or (= :else sym)
|
||||||
|
(= 'else sym))
|
||||||
|
(if found-else?
|
||||||
|
(let [{:keys [row col]} (meta clause)]
|
||||||
|
(api/reg-finding!
|
||||||
|
{:message "only one :else clause allowed"
|
||||||
|
:type :cond-plus/empty-else
|
||||||
|
:row row
|
||||||
|
:col col})
|
||||||
|
;; early exit cuz not worth analyzing the rest
|
||||||
|
(reduced nil))
|
||||||
|
(do (when-not arrow
|
||||||
|
(let [{:keys [row col]} (meta clause)]
|
||||||
|
(api/reg-finding!
|
||||||
|
{:message ":else must have a body"
|
||||||
|
:type :cond-plus/empty-else
|
||||||
|
:row row
|
||||||
|
:col col})))
|
||||||
|
;; Store row and col from existing else as we don't throw until
|
||||||
|
;; we've seen a following clause
|
||||||
|
(select-keys (meta clause) [:row :col])))))))
|
||||||
|
nil
|
||||||
|
clauses))
|
||||||
|
|
||||||
|
(defn cond+ [{:keys [node]}]
|
||||||
|
(analyze-clauses (rest (:children node)))
|
||||||
|
node)
|
||||||
23
.clj-kondo/imports/io.github.noahtheduke/lazytest/config.edn
Normal file
23
.clj-kondo/imports/io.github.noahtheduke/lazytest/config.edn
Normal file
|
|
@ -0,0 +1,23 @@
|
||||||
|
{:lint-as {lazytest.core/given clojure.core/let
|
||||||
|
lazytest.core/around clojure.core/fn
|
||||||
|
lazytest.core/defdescribe clojure.core/def
|
||||||
|
;; clojure.test interface
|
||||||
|
lazytest.experimental.interfaces.clojure-test/deftest clojure.test/deftest
|
||||||
|
lazytest.experimental.interfaces.clojure-test/testing clojure.test/testing
|
||||||
|
lazytest.experimental.interfaces.clojure-test/is clojure.test/is
|
||||||
|
lazytest.experimental.interfaces.clojure-test/are clojure.test/are
|
||||||
|
;; xunit interface
|
||||||
|
lazytest.experimental.interfaces.xunit/defsuite clojure.core/def
|
||||||
|
;; Expectations v2
|
||||||
|
lazytest.extensions.expectations/defexpect clojure.core/def
|
||||||
|
lazytest.extensions.expectations/from-each clojure.core/for
|
||||||
|
lazytest.extensions.expectations/=? clojure.core/=
|
||||||
|
}
|
||||||
|
:hooks {:analyze-call {;; Expectations v2
|
||||||
|
lazytest.extensions.expectations/more-> hooks.lazytest.expectations/more->
|
||||||
|
lazytest.extensions.expectations/more-of hooks.lazytest.expectations/more-of
|
||||||
|
}}
|
||||||
|
:linters {:clojure-lsp/unused-public-var
|
||||||
|
{:exclude-when-defined-by #{lazytest.core/defdescribe
|
||||||
|
lazytest.experimental.interfaces.xunit/defsuite
|
||||||
|
lazytest.experimental.interfaces.clojure-test/deftest}}}}
|
||||||
|
|
@ -0,0 +1,31 @@
|
||||||
|
;; Copied from https://github.com/clojure-expectations/clojure-test/blob/b90ed5b24924238b3b16b0bbaaee4c3b05a1268a
|
||||||
|
|
||||||
|
(ns hooks.lazytest.expectations
|
||||||
|
(:require [clj-kondo.hooks-api :as api]))
|
||||||
|
|
||||||
|
(defn more-> [{:keys [node]}]
|
||||||
|
(let [tail (rest (:children node))
|
||||||
|
rewritten
|
||||||
|
(api/list-node
|
||||||
|
(list*
|
||||||
|
(api/token-node 'cond->)
|
||||||
|
(api/token-node 'nil)
|
||||||
|
tail))]
|
||||||
|
{:node rewritten}))
|
||||||
|
|
||||||
|
(defn more-of [{:keys [node]}]
|
||||||
|
(let [bindings (fnext (:children node))
|
||||||
|
pairs (partition 2 (nnext (:children node)))
|
||||||
|
rewritten
|
||||||
|
(api/list-node
|
||||||
|
(list*
|
||||||
|
(api/token-node 'fn)
|
||||||
|
(api/vector-node (vector bindings))
|
||||||
|
(map (fn [[e a]]
|
||||||
|
(api/list-node
|
||||||
|
(list
|
||||||
|
(api/token-node 'lazytest.core/expect)
|
||||||
|
e
|
||||||
|
a)))
|
||||||
|
pairs)))]
|
||||||
|
{:node rewritten}))
|
||||||
4
.clj-kondo/imports/nubank/matcher-combinators/config.edn
Normal file
4
.clj-kondo/imports/nubank/matcher-combinators/config.edn
Normal file
|
|
@ -0,0 +1,4 @@
|
||||||
|
{:linters
|
||||||
|
{:unresolved-symbol
|
||||||
|
{:exclude [(cljs.test/is [match? thrown-match?])
|
||||||
|
(clojure.test/is [match? thrown-match?])]}}}
|
||||||
5
.clj-kondo/imports/rewrite-clj/rewrite-clj/config.edn
Normal file
5
.clj-kondo/imports/rewrite-clj/rewrite-clj/config.edn
Normal file
|
|
@ -0,0 +1,5 @@
|
||||||
|
{:lint-as
|
||||||
|
{rewrite-clj.zip/subedit-> clojure.core/->
|
||||||
|
rewrite-clj.zip/subedit->> clojure.core/->>
|
||||||
|
rewrite-clj.zip/edit-> clojure.core/->
|
||||||
|
rewrite-clj.zip/edit->> clojure.core/->>}}
|
||||||
1
.clj-kondo/inline-configs/xtdb.api.clj/config.edn
Normal file
1
.clj-kondo/inline-configs/xtdb.api.clj/config.edn
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
{:config-in-call {xtdb.api/template {:ignore [:unresolved-symbol :unresolved-namespace]}}}
|
||||||
1
.github/FUNDING.yml
vendored
Normal file
1
.github/FUNDING.yml
vendored
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
github: seancorfield
|
||||||
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
|
|
@ -28,7 +28,7 @@ If possible, please include your Leiningen or clj dependencies, at least for Clo
|
||||||
**Environment (please complete the following information):**
|
**Environment (please complete the following information):**
|
||||||
- OS: [e.g. Linux]
|
- OS: [e.g. Linux]
|
||||||
- Java Version: [e.g. (AdoptOpenJDK)(build 1.8.0_192-b12)]
|
- Java Version: [e.g. (AdoptOpenJDK)(build 1.8.0_192-b12)]
|
||||||
- Clojure Version: [e.g. 1.10.1]
|
- Clojure Version: [e.g. 1.10.3]
|
||||||
- Database: [e.g., MySQL 5.7 (Percona)]
|
- Database: [e.g., MySQL 5.7 (Percona)]
|
||||||
- Driver Library Version: [e.g., mysql/mysql-connector-java 5.1.41]
|
- Driver Library Version: [e.g., mysql/mysql-connector-java 5.1.41]
|
||||||
|
|
||||||
|
|
|
||||||
54
.github/workflows/test-and-release.yml
vendored
Normal file
54
.github/workflows/test-and-release.yml
vendored
Normal file
|
|
@ -0,0 +1,54 @@
|
||||||
|
name: Release Version
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- "v*"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-and-release:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- uses: actions/setup-java@v4
|
||||||
|
with:
|
||||||
|
distribution: 'temurin'
|
||||||
|
java-version: '21'
|
||||||
|
- name: Setup Clojure
|
||||||
|
uses: DeLaGuardo/setup-clojure@master
|
||||||
|
with:
|
||||||
|
cli: '1.12.0.1530'
|
||||||
|
- name: Cache All The Things
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.m2/repository
|
||||||
|
~/.gitlibs
|
||||||
|
~/.clojure
|
||||||
|
~/.cpcache
|
||||||
|
key: ${{ runner.os }}-${{ hashFiles('**/deps.edn') }}
|
||||||
|
- name: Setup Databases
|
||||||
|
run: docker compose up -d
|
||||||
|
env:
|
||||||
|
MYSQL_ROOT_PASSWORD: testing
|
||||||
|
- name: Run MariaDB Tests
|
||||||
|
run: clojure -M:test:runner
|
||||||
|
env:
|
||||||
|
MYSQL_ROOT_PASSWORD: testing
|
||||||
|
NEXT_JDBC_TEST_MYSQL: yes
|
||||||
|
NEXT_JDBC_TEST_MARIADB: yes
|
||||||
|
- name: Run All Tests and Release
|
||||||
|
run: clojure -T:build ci :snapshot false
|
||||||
|
env:
|
||||||
|
MYSQL_ROOT_PASSWORD: testing
|
||||||
|
NEXT_JDBC_TEST_MYSQL: yes
|
||||||
|
NEXT_JDBC_TEST_XTDB: yes
|
||||||
|
NEXT_JDBC_TEST_MSSQL: yes
|
||||||
|
MSSQL_SA_PASSWORD: Str0ngP4ssw0rd
|
||||||
|
- name: Deploy Release
|
||||||
|
run: clojure -T:build deploy :snapshot false
|
||||||
|
env:
|
||||||
|
CLOJARS_PASSWORD: ${{secrets.DEPLOY_TOKEN}}
|
||||||
|
CLOJARS_USERNAME: ${{secrets.DEPLOY_USERNAME}}
|
||||||
78
.github/workflows/test-and-snapshot.yml
vendored
Normal file
78
.github/workflows/test-and-snapshot.yml
vendored
Normal file
|
|
@ -0,0 +1,78 @@
|
||||||
|
name: Develop & Snapshot
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- "develop"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-and-snapshot:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-java@v4
|
||||||
|
with:
|
||||||
|
distribution: 'temurin'
|
||||||
|
java-version: '21'
|
||||||
|
- name: Setup Clojure
|
||||||
|
uses: DeLaGuardo/setup-clojure@master
|
||||||
|
with:
|
||||||
|
cli: '1.12.0.1530'
|
||||||
|
- name: Cache All The Things
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.m2/repository
|
||||||
|
~/.gitlibs
|
||||||
|
~/.clojure
|
||||||
|
~/.cpcache
|
||||||
|
key: ${{ runner.os }}-${{ hashFiles('**/deps.edn') }}
|
||||||
|
- name: Setup Databases
|
||||||
|
run: docker compose up -d
|
||||||
|
env:
|
||||||
|
MYSQL_ROOT_PASSWORD: testing
|
||||||
|
- name: Run MariaDB Tests
|
||||||
|
run: clojure -M:test:runner
|
||||||
|
env:
|
||||||
|
MYSQL_ROOT_PASSWORD: testing
|
||||||
|
NEXT_JDBC_TEST_MYSQL: yes
|
||||||
|
NEXT_JDBC_TEST_MARIADB: yes
|
||||||
|
- name: Run All Tests and Snapshot
|
||||||
|
run: clojure -T:build ci :snapshot true
|
||||||
|
env:
|
||||||
|
MYSQL_ROOT_PASSWORD: testing
|
||||||
|
NEXT_JDBC_TEST_MYSQL: yes
|
||||||
|
NEXT_JDBC_TEST_XTDB: yes
|
||||||
|
NEXT_JDBC_TEST_MSSQL: yes
|
||||||
|
MSSQL_SA_PASSWORD: Str0ngP4ssw0rd
|
||||||
|
- name: Deploy Snapshot
|
||||||
|
run: clojure -T:build deploy :snapshot true
|
||||||
|
env:
|
||||||
|
CLOJARS_PASSWORD: ${{secrets.DEPLOY_TOKEN}}
|
||||||
|
CLOJARS_USERNAME: ${{secrets.DEPLOY_USERNAME}}
|
||||||
|
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
java: [ '11', '17', '21' ]
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-java@v4
|
||||||
|
with:
|
||||||
|
distribution: 'temurin'
|
||||||
|
java-version: ${{ matrix.java }}
|
||||||
|
- name: Setup Clojure
|
||||||
|
uses: DeLaGuardo/setup-clojure@master
|
||||||
|
with:
|
||||||
|
cli: '1.12.0.1530'
|
||||||
|
- name: Cache All The Things
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.m2/repository
|
||||||
|
~/.clojure
|
||||||
|
~/.cpcache
|
||||||
|
key: ${{ runner.os }}-${{ hashFiles('**/deps.edn') }}
|
||||||
|
- name: Run Tests
|
||||||
|
run: clojure -T:build:jdk${{ matrix.java }} test
|
||||||
45
.github/workflows/test.yml
vendored
45
.github/workflows/test.yml
vendored
|
|
@ -1,22 +1,47 @@
|
||||||
name: Clojure CI
|
name: Pull Request
|
||||||
|
|
||||||
on: [push]
|
on: [pull_request]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
java: [ '8', '11', '14' ]
|
java: [ '11', '17', '21' ]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v4
|
||||||
- name: Setup Java
|
- uses: actions/setup-java@v4
|
||||||
uses: actions/setup-java@v1
|
|
||||||
with:
|
with:
|
||||||
|
distribution: 'temurin'
|
||||||
java-version: ${{ matrix.java }}
|
java-version: ${{ matrix.java }}
|
||||||
- name: Setup Clojure
|
- name: Setup Clojure
|
||||||
uses: DeLaGuardo/setup-clojure@2.0
|
uses: DeLaGuardo/setup-clojure@master
|
||||||
with:
|
with:
|
||||||
tools-deps: '1.10.1.536'
|
cli: '1.12.0.1530'
|
||||||
- name: Run Tests
|
- name: Cache All The Things
|
||||||
run: clojure -A:test:runner
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.m2/repository
|
||||||
|
~/.gitlibs
|
||||||
|
~/.clojure
|
||||||
|
~/.cpcache
|
||||||
|
key: ${{ runner.os }}-${{ hashFiles('**/deps.edn') }}
|
||||||
|
- name: Setup Databases
|
||||||
|
run: docker compose up -d
|
||||||
|
env:
|
||||||
|
MYSQL_ROOT_PASSWORD: testing
|
||||||
|
- name: Run MariaDB Tests
|
||||||
|
run: clojure -M:test:runner
|
||||||
|
env:
|
||||||
|
MYSQL_ROOT_PASSWORD: testing
|
||||||
|
NEXT_JDBC_TEST_MYSQL: yes
|
||||||
|
NEXT_JDBC_TEST_MARIADB: yes
|
||||||
|
- name: Run All Tests
|
||||||
|
run: clojure -M:test:runner:jdk${{ matrix.java }}
|
||||||
|
env:
|
||||||
|
MYSQL_ROOT_PASSWORD: testing
|
||||||
|
NEXT_JDBC_TEST_MYSQL: yes
|
||||||
|
NEXT_JDBC_TEST_XTDB: yes
|
||||||
|
NEXT_JDBC_TEST_MSSQL: yes
|
||||||
|
MSSQL_SA_PASSWORD: Str0ngP4ssw0rd
|
||||||
|
|
|
||||||
44
.gitignore
vendored
44
.gitignore
vendored
|
|
@ -1,17 +1,31 @@
|
||||||
/target
|
.calva/output-window/
|
||||||
/classes
|
.calva/repl.calva-repl
|
||||||
/checkouts
|
.classpath
|
||||||
pom.xml.asc
|
.clj-kondo/.cache
|
||||||
*.jar
|
.clj-kondo/.lock
|
||||||
*.class
|
.cpcache
|
||||||
/.cpcache
|
.eastwood
|
||||||
/.eastwood
|
.factorypath
|
||||||
/.lein-*
|
|
||||||
/.nrepl-history
|
|
||||||
/.nrepl-port
|
|
||||||
.hgignore
|
|
||||||
.hg/
|
.hg/
|
||||||
/clojure_test*
|
.hgignore
|
||||||
/example*db
|
.java-version
|
||||||
|
.lein-*
|
||||||
|
.lsp/.cache
|
||||||
|
.lsp/sqlite.db
|
||||||
|
.nrepl-history
|
||||||
|
.nrepl-port
|
||||||
|
.portal
|
||||||
|
.project
|
||||||
|
.rebel_readline_history
|
||||||
|
.settings
|
||||||
|
.socket-repl-port
|
||||||
|
.sw*
|
||||||
|
*.class
|
||||||
|
*.jar
|
||||||
|
*.swp
|
||||||
|
*~
|
||||||
|
/checkouts
|
||||||
|
/classes
|
||||||
|
/clojure_test_*
|
||||||
/derby.log
|
/derby.log
|
||||||
/run-tests.sh
|
/target
|
||||||
|
|
|
||||||
6
.joker
6
.joker
|
|
@ -1,6 +0,0 @@
|
||||||
{:known-macros [next.jdbc/with-transaction]
|
|
||||||
:ignored-unused-namespaces [next.jdbc.connection
|
|
||||||
next.jdbc.date-time
|
|
||||||
next.jdbc.prepare
|
|
||||||
next.jdbc.result-set
|
|
||||||
next.jdbc.transaction]}
|
|
||||||
305
CHANGELOG.md
305
CHANGELOG.md
|
|
@ -2,9 +2,256 @@
|
||||||
|
|
||||||
Only accretive/fixative changes will be made from now on.
|
Only accretive/fixative changes will be made from now on.
|
||||||
|
|
||||||
## Stable Builds
|
* 1.3.next in progress
|
||||||
|
* Fix handling of `false` in `clob-column-reader` [#299](https://github.com/seancorfield/next-jdbc/issues/299) via PR [#300](https://github.com/seancorfield/next-jdbc/pull/300) from [@GAumala](https://github.com/GAumala)
|
||||||
|
* Switch tests to LazyTest via PR [#297](https://github.com/seancorfield/next-jdbc/pull/297).
|
||||||
|
* Update dev/test/build deps.
|
||||||
|
|
||||||
* 2020-05-23 -- 1.0.445
|
* 1.3.1002 -- 2025-03-06
|
||||||
|
* Address [#296](https://github.com/seancorfield/next-jdbc/issues/296) by adding an explicit check (and `throw`) for `sql-params` in `next.jdbc` functions.
|
||||||
|
* Address [#295](https://github.com/seancorfield/next-jdbc/issues/295) by providing a way to tell `next.jdbc` that certain options should be passed "as-is" in the `Properties` object when creating a `Connection` -- `:next.jdbc/as-is-properties` accepts a sequence (or set) of keywords, identifying properties that should not be converted to strings.
|
||||||
|
* Fix [#181](https://github.com/seancorfield/next-jdbc/issues/181) (again!) by adding `Wrapped` protocol as a way for `DefaultOptions` and `SQLLogging` to consistently expose the underlying connectable, even when nested.
|
||||||
|
|
||||||
|
* 1.3.994 -- 2025-01-28
|
||||||
|
* Fix [#293](https://github.com/seancorfield/next-jdbc/issues/293) by no longer `locking` on the `Connection` retrieved from a `DataSource`.
|
||||||
|
* Fix documentation examples of `execute-batch!` via PR [#292](https://github.com/seancorfield/next-jdbc/pull/292) from [@devurandom](https://github.com/devurandom).
|
||||||
|
* Update `java.data` to 1.3.113.
|
||||||
|
* Beef up bit/boolean tests and enable them for XTDB.
|
||||||
|
|
||||||
|
* 1.3.981 -- 2024-12-13
|
||||||
|
* Address [#291](https://github.com/seancorfield/next-jdbc/issues/291) by adding an XTDB section to **Tips & Tricks**.
|
||||||
|
* Added XTDB as a supported database for testing via PR [#290](https://github.com/seancorfield/next-jdbc/pull/290). _Note: not all features are tested against XTDB due to several fundamental differences in architecture, mostly around primary key/generated keys and lack of DDL operations (since XTDB is schemaless)._
|
||||||
|
* Update dev/test dependencies.
|
||||||
|
|
||||||
|
* 1.3.967 -- 2024-12-02
|
||||||
|
* Address [#288](https://github.com/seancorfield/next-jdbc/issues/288) by adding speculative support for `:dbtype "xtdb"`.
|
||||||
|
* Fix [#287](https://github.com/seancorfield/next-jdbc/issues/287) by merging user-supplied options over `:return-keys true`.
|
||||||
|
* Fix [#282](https://github.com/seancorfield/next-jdbc/issues/282) by tracking raw `Connection` objects for active TXs, which relaxes several of the conditions around nested transactions.
|
||||||
|
* Replace `assert` calls with proper validation, throwing `IllegalArgumentException` on failure.
|
||||||
|
* Removed (experimental) `:name-fn` option since the driver for it no longer exists (qualified columns names in XTDB).
|
||||||
|
|
||||||
|
* 1.3.955 -- 2024-10-06
|
||||||
|
* Address [#285](https://github.com/seancorfield/next-jdbc/issues/285) by setting the default Clojure version to the earliest supported (1.10.3) to give a better hint to users.
|
||||||
|
* Update PostgreSQL **Tips & Tricks** example code to fix possible NPE. PR [#284](https://github.com/seancorfield/next-jdbc/pull/284) from [@ExNexu](https://github.com/ExNexu).
|
||||||
|
* Address [#283](https://github.com/seancorfield/next-jdbc/issues/283) by adding a note in the documentation, linking to the PostgreSQL bug report about `ANY(array)`.
|
||||||
|
* ~Address [#269](https://github.com/seancorfield/next-jdbc/issues/269) by adding `:name-fn` as an option (primarily for the SQL builder functions, but also for result set processing); the default is `clojure.core/name` but you can now use `next.jdbc.sql.builder/qualified-name` to preserve the qualifier.~ _[This was removed in 1.3.967 since XTDB no longer supports qualified column names]_
|
||||||
|
* Update testing deps; `docker-compose` => `docker compose`.
|
||||||
|
|
||||||
|
* 1.3.939 -- 2024-05-17
|
||||||
|
* Fix [#280](https://github.com/seancorfield/next-jdbc/issues/280) by allowing `-` as well as `_` in `nav` foreign key names.
|
||||||
|
* Address [#279](https://github.com/seancorfield/next-jdbc/issues/279) by adding the missing documentation.
|
||||||
|
* Address [#278](https://github.com/seancorfield/next-jdbc/issues/278) by fixing link in options page.
|
||||||
|
* Update dev dependencies, including testing against Clojure 1.12 Alpha 11.
|
||||||
|
|
||||||
|
* 1.3.925 -- 2024-03-15
|
||||||
|
* Address [#275](https://github.com/seancorfield/next-jdbc/issues/275) by noting that PostgreSQL may perform additional SQL queries to produce table names used in qualified result set builders.
|
||||||
|
* Address [#274](https://github.com/seancorfield/next-jdbc/issues/274) by adding `next.jdbc.sql/aggregate-by-keys` as a convenient wrapper around `find-by-keys` when you want just a single aggregate value back (such as `count`, `max`, etc).
|
||||||
|
* Address [#273](https://github.com/seancorfield/next-jdbc/issues/273) by linking to [PG2](https://github.com/igrishaev/pg2) in the PostgreSQL **Tips & Tricks** section.
|
||||||
|
* Address [#268](https://github.com/seancorfield/next-jdbc/issues/268) by expanding the documentation around `insert-multi!` and `insert!`.
|
||||||
|
* Update dependency versions (including Clojure).
|
||||||
|
* Code cleanup per `clj-kondo`.
|
||||||
|
|
||||||
|
* 1.3.909 -- 2023-12-16
|
||||||
|
* Address [#267](https://github.com/seancorfield/next-jdbc/issues/267) by adding the `:schema-opts` option to override the default conventions for identifying foreign keys in columns.
|
||||||
|
* Address [#264](https://github.com/seancorfield/next-jdbc/issues/264) by letting `insert-multi!` accept empty rows (and producing an empty result vector). This improves compatibility with `clojure.java.jdbc`.
|
||||||
|
* Address [#258](https://github.com/seancorfield/next-jdbc/issues/258) by updating all the library (driver) versions in Getting Started to match the latest versions being tested (from `deps.edn`).
|
||||||
|
* Update `java.data` to 1.1.103 so that `next.jdbc` no longer has a transitive dependency on `org.clojure/tools.logging`!
|
||||||
|
* Attempt to clarify that when calling `reduce` on the result of `plan`, you must provide an initial value.
|
||||||
|
* Expand examples for calling `next.jdbc.sql/find-by-keys` to show `LIKE` and `IN` clauses.
|
||||||
|
* Update `tools.build` to 0.9.6 (and get rid of `template/pom.xml` in favor of new `:pom-data` option to `b/write-pom`).
|
||||||
|
|
||||||
|
* 1.3.894 -- 2023-09-24
|
||||||
|
* Fix [#257](https://github.com/seancorfield/next-jdbc/issues/257) by making the `fdef` spec for `with-transaction` more permissive. Also add specs for `on-connection` and the `+options` variants of both macros.
|
||||||
|
* Address [#256](https://github.com/seancorfield/next-jdbc/issues/256) by adding `with-transaction+options` and `on-connection+options`.
|
||||||
|
* Updates most of the JDBC drivers used for testing, including SQLite 3.43.0.0 which now throws an exception when `.getGeneratedKeys()` is called so you cannot use `:return-generated-keys true` with it but you can add `RETURNING *` to your SQL statements instead (the tests have been updated to reflect this).
|
||||||
|
* Update `tools.build` to 0.9.5 (and remove `:java-opts` from `build/test`)
|
||||||
|
|
||||||
|
* 1.3.883 -- 2023-06-25
|
||||||
|
* Address [#254](https://github.com/seancorfield/next-jdbc/issues/254) by adding `next.jdbc/active-tx?` and adding more explanation to [**Transactions**](https://cljdoc.org/d/com.github.seancorfield/next.jdbc/CURRENT/doc/getting-started/transactions) about the conventions behind transactions and the limitations of thread-local tracking of active transactions in `next.jdbc`.
|
||||||
|
* Address [#251](https://github.com/seancorfield/next-jdbc/issues/251) by updating `next.jdbc/with-logging` docstring.
|
||||||
|
* Update dev/test dependencies.
|
||||||
|
|
||||||
|
* 1.3.874 -- 2023-04-15
|
||||||
|
* Fix [#248](https://github.com/seancorfield/next-jdbc/issues/248) by allowing `:port` to be `:none`.
|
||||||
|
* Address [#247](https://github.com/seancorfield/next-jdbc/issues/247) by adding examples of using `next.jdbc.connection/jdbc-url` to build a connection string with additional parameters when creating connection pools.
|
||||||
|
|
||||||
|
* 1.3.865 -- 2023-03-31
|
||||||
|
* Fix [#246](https://github.com/seancorfield/next-jdbc/issues/246) by adopting the `strop` function from HoneySQL.
|
||||||
|
* Address [#245](https://github.com/seancorfield/next-jdbc/issues/245) by not `locking` the `Connection` when `*nested-tx*` is bound to `:ignore` -- improving `clojure.java.jdbc` compatibility.
|
||||||
|
* Address [#237](https://github.com/seancorfield/next-jdbc/issues/237) by adding an `:init-fn` option to the `db-spec` argument for `next.jdbc.connection/component`.
|
||||||
|
|
||||||
|
* 1.3.862 -- 2023-03-13
|
||||||
|
* Fix [#243](https://github.com/seancorfield/next-jdbc/issues/243) by ensuring URI properties become keywords.
|
||||||
|
* Fix [#242](https://github.com/seancorfield/next-jdbc/issues/242) by making the logging wrapper aware of the default options wrapper.
|
||||||
|
|
||||||
|
* 1.3.858 -- 2023-03-05
|
||||||
|
* Address [#241](https://github.com/seancorfield/next-jdbc/issues/241) by correcting link to PostgreSQL docs.
|
||||||
|
* Address [clj-kondo#1685](https://github.com/clj-kondo/clj-kondo/issues/1685) by using `.clj_kondo` extension for hook files.
|
||||||
|
* Improve docs for SQLite users per [#239](https://github.com/seancorfield/next-jdbc/pull/239) -- [peristeri](https://github.com/peristeri).
|
||||||
|
* Address [#236](https://github.com/seancorfield/next-jdbc/issues/236) by showing examples of `run!` over `plan`.
|
||||||
|
|
||||||
|
* 1.3.847 -- 2022-11-04
|
||||||
|
* Fix [#232](https://github.com/seancorfield/next-jdbc/issues/232) by using `as-cols` in `insert-multi!` SQL builder. Thanks to @changsu-farmmorning for spotting that bug!
|
||||||
|
* Fix [#229](https://github.com/seancorfield/next-jdbc/issues/229) by adding `next.jdbc.connect/uri->db-spec` which converts a URI string to a db-spec hash map; in addition, if `DriverManager/getConnection` fails, it assumes it was passed a URI instead of a JDBC URL, and retries after calling that function and then recreating the JDBC URL (which should have the effect of moving the embedded user/password credentials into the properties structure instead of the URL).
|
||||||
|
* Address [#228](https://github.com/seancorfield/next-jdbc/issues/228) by adding `PreparedStatement` caveat to the Oracle **Tips & Tricks** section.
|
||||||
|
* Address [#226](https://github.com/seancorfield/next-jdbc/issues/226) by adding a section on exception handling to **Tips & Tricks** (TL;DR: it's all horribly vendor-specific!).
|
||||||
|
* Add `on-connection` to exported `clj-kondo` configuration.
|
||||||
|
* Switch `run-test` from `sh` to `bb`.
|
||||||
|
|
||||||
|
* 1.3.834 -- 2022-09-23
|
||||||
|
* Fix [#227](https://github.com/seancorfield/next-jdbc/issues/227) by correcting how [#221](https://github.com/seancorfield/next-jdbc/issues/221) was implemented.
|
||||||
|
* Address [#224](https://github.com/seancorfield/next-jdbc/issues/224) by attempting to clarify how to use the snake/kebab options and builders.
|
||||||
|
|
||||||
|
* 1.3.828 -- 2022-09-11
|
||||||
|
* Fix [#222](https://github.com/seancorfield/next-jdbc/issues/222) by correcting implementation of `.cons` on a row.
|
||||||
|
* Address [#221](https://github.com/seancorfield/next-jdbc/issues/221) by supporting `:column-fn` a top-level option in `plan`-related functions to transform keys used in reducing function contexts. Also corrects handling of column names in schema `nav`igation (which previously only supported `:table-fn` and incorrectly applied it to columns as well).
|
||||||
|
* Address [#218](https://github.com/seancorfield/next-jdbc/issues/218) by moving `:extend-via-metadata true` after the protocols' docstrings.
|
||||||
|
* Document `:useBulkCopyForBatchInsert` for Microsoft SQL Server via PR [#216](https://github.com/seancorfield/next-jdbc/issues/216) -- [danskarda](https://github.com/danskarda).
|
||||||
|
* Address [#215](https://github.com/seancorfield/next-jdbc/issues/215) by dropping official support for JDK 8 and updating various JDBC drivers in the testing matrix.
|
||||||
|
* Address [#214](https://github.com/seancorfield/next-jdbc/issues/214) by updating test/CI versions.
|
||||||
|
* Address [#212](https://github.com/seancorfield/next-jdbc/issues/212) by documenting the problem with SQLite's JDBC driver.
|
||||||
|
* Fix [#211](https://github.com/seancorfield/next-jdbc/issues/211) by auto-creating `clojure_test` DB in MySQL if needed; also streamline the CI processes.
|
||||||
|
* Fix [#210](https://github.com/seancorfield/next-jdbc/issues/210) by updating CI to test against MySQL and SQL Server.
|
||||||
|
* Switch SQL Server testing setup to `docker-compose`.
|
||||||
|
|
||||||
|
* 1.2.796 -- 2022-08-01
|
||||||
|
* Make `Transactable` extensible via metadata, via PR [#209](https://github.com/seancorfield/next-jdbc/issues/209) -- [@vemv](https://github.com/vemv).
|
||||||
|
* Fix [#208](https://github.com/seancorfield/next-jdbc/issues/208) by treating unsupported exception as an empty string, just like the JDBC docs say should happen.
|
||||||
|
|
||||||
|
* 1.2.790 -- 2022-07-29
|
||||||
|
* Address [#207](https://github.com/seancorfield/next-jdbc/issues/207) by supporting "db-spec" hash maps containing `:datasource` or `:connection-uri` (this is otherwise undocumented and intended to aid migration from `clojure.java.jdbc`).
|
||||||
|
* Address [#199](https://github.com/seancorfield/next-jdbc/issues/199) by adding notes on UTC usage -- [@denismccarthykerry](https://github.com/denismccarthykerry).
|
||||||
|
* Enhance `insert-multi!` to accept a sequence of hash maps and also to support batch execution, via PR [#206](https://github.com/seancorfield/next-jdbc/pull/206) -- [@rschmukler](https://github.com/rschmukler).
|
||||||
|
* Fix HikariCP pooling example.
|
||||||
|
|
||||||
|
* 1.2.780 -- 2022-04-04
|
||||||
|
* Address [#204](https://github.com/seancorfield/next-jdbc/issues/204) by adding `next.jdbc/on-connection`.
|
||||||
|
* Address [#203](https://github.com/seancorfield/next-jdbc/issues/203) by adding a note to the **PostgreSQL Tips & Tricks** section.
|
||||||
|
* Update `build-clj` to v0.8.0.
|
||||||
|
|
||||||
|
* 1.2.772 -- 2022-02-09
|
||||||
|
* To support more tools that perform `datafy`/`nav`, make rows directly `nav`able (even though this is not really the correct behavior).
|
||||||
|
* Address #193 by expanding the argument specs for `get-datasource` and `get-connection`.
|
||||||
|
* Streamline `execute-batch!` for `with-options` and `with-logging` (and this should generalize to any wrapper that satisfies `Connectable` and stores the actual `Connection` under the `:connectable` key).
|
||||||
|
* Update log4j2 test dependency.
|
||||||
|
* Update `build-clj` to v0.6.7.
|
||||||
|
|
||||||
|
* 1.2.761 -- 2021-12-15
|
||||||
|
* Fix #194 by throwing an exception if a table or column name used with the friendly SQL functions (or the SQL builder functions behind them) contains a "suspicious" character (currently, that's just `;`).
|
||||||
|
* Update several test dependencies (incl. log4j2).
|
||||||
|
* Update `build-clj` to v0.6.3.
|
||||||
|
|
||||||
|
* 1.2.753 -- 2021-11-17
|
||||||
|
* Address #187 by adding `clj-kondo.exports` for future expansion (`with-transaction` is already built into `clj-kondo`).
|
||||||
|
* Documentation updates; `pom.xml` template cleanup.
|
||||||
|
* Update `build-clj` to v0.5.4.
|
||||||
|
|
||||||
|
* 1.2.737 -- 2021-10-17
|
||||||
|
* Address #186 by updating `java.data` to 1.0.92 and documenting HikariCP's `:dataSourceProperties`.
|
||||||
|
* Address #184 by improving documentation about `:jdbcUrl`.
|
||||||
|
|
||||||
|
* 1.2.731 -- 2021-10-04
|
||||||
|
* Fix #181 by supporting option-wrapped connectables in `execute-batch!`.
|
||||||
|
* Address #179 by improving documentation around connection pool initialization.
|
||||||
|
* Update `build-clj` to v0.5.0.
|
||||||
|
|
||||||
|
* 1.2.724 -- 2021-09-25
|
||||||
|
* Make `next.jdbc` compatible with GraalVM 22+ (PR #178, @FieryCod).
|
||||||
|
* Address #177 by adding an important performance tip for Oracle.
|
||||||
|
* Update most of the JDBC drivers for testing; make it easier to test MariaDB's driver;
|
||||||
|
|
||||||
|
* 1.2.709 -- 2021-08-30
|
||||||
|
* Fix #174 by removing `:property-separator` from "etc" map and defaulting H2 to `";"` for this.
|
||||||
|
* Switch to `tools.build` for running tests and JAR building etc.
|
||||||
|
|
||||||
|
* 1.2.689 -- 2021-08-01
|
||||||
|
* Address #173 by extending `DatafiableRow` to `ResultSet` so there's a public method to call on (rows of) a JDBC result set directly.
|
||||||
|
* Address #171 by clarifying that you cannot use `clojure.java.jdbc` functions inside `next.jdbc/with-transaction` and discuss how to migrate transaction-based code in the **Migration** guide.
|
||||||
|
* Address #169 by expanding the description of `with-transaction` in **Getting Started**.
|
||||||
|
* Cross-link to HoneySQL documentation for JSON/JSONB manipulation.
|
||||||
|
* Remove superfluous prev/next links in docs (cljdoc does this automatically now).
|
||||||
|
* Update `depstar`, `test-runner`, and CI versions. Add example `build.clj` to run tests in a subprocess (purely educational).
|
||||||
|
|
||||||
|
* 1.2.674 -- 2021-06-16
|
||||||
|
* Fix #167 by adding `:property-separator` to `next.jdbc.connection/dbtypes` and using it in `jdbc-url`.
|
||||||
|
* Address #166 by adding `next.jdbc/with-logging` to create a wrapped connectable that will invoke logging functions with the SQL/parameters and optionally the result or exception for each operation.
|
||||||
|
* Fix `:unit_count` references in **Getting Started** (were `:unit_cost`).
|
||||||
|
* Update `test-runner`.
|
||||||
|
|
||||||
|
* 1.2.659 -- 2021-05-05
|
||||||
|
* Address #164 by making `clj-commons/camel-snake-kebab` an unconditional dependency. _[Being a conditional dependency that could be brought in at runtime caused problems with GraalVM-based native compilation as well as with multi-project monorepos]_
|
||||||
|
* Add **Tips & Tricks** section about working with PostgreSQL "interval" types (via PR #163 from @snorremd).
|
||||||
|
* Address #162 by adding GraalVM to the test matrix (thank you @DeLaGuardo).
|
||||||
|
* Update several dependency versions.
|
||||||
|
|
||||||
|
* 1.1.646 -- 2021-03-15
|
||||||
|
* Fix #161 by allowing `execute-batch!` to work with datasources and connections, and providing the SQL statement directly.
|
||||||
|
|
||||||
|
* 1.1.643 -- 2021-03-06
|
||||||
|
* Change coordinates to `com.github.seancorfield/next.jdbc` (although new versions will continue to be deployed to `seancorfield/next.jdbc` for a while -- see the [Clojars Verified Group Names policy](https://github.com/clojars/clojars-web/wiki/Verified-Group-Names)).
|
||||||
|
* Documented `next.jdbc.transaction/*nested-tx*` more thoroughly since that difference from `clojure.java.jdbc` has come up in conversation a few times recently.
|
||||||
|
* Fix #158 by documenting (and testing) `:allowMultiQueries true` as an option for MySQL/MariaDB to allow multiple statements to be executed and multiple result sets to be returned.
|
||||||
|
* Fix #157 by copying `next.jdbc.prepare/execute-batch!` to `next.jdbc/execute-batch!` (to avoid a circular dependency that previously relied on requiring `next.jdbc.result-set` at runtime -- which was problematic for GraalVM-based native compilation); **`next.jdbc.prepare/execute-batch!` is deprecated:** it will continue to exist and work, but is no longer documented. In addition, `next.jdbc.prepare/execute-batch!` now relies on a private `volatile!` in order to reference `next.jdbc.result-set/datafiable-result-set` so that it is GraalVM-friendly. Note: code that requires `next.jdbc.prepare` and uses `execute-batch!` without also requiring something that causes `next.jdbc.result-set` to be loaded will no longer return generated keys from `execute-batch!` but that's an almost impossible path since nearly all code that uses `execute-batch!` will have called `next.jdbc/prepare` to get the `PreparedStatement` in the first place.
|
||||||
|
|
||||||
|
* 1.1.613 -- 2020-11-05
|
||||||
|
* Fix #144 by ensuring `camel-snake-case` is properly required before use in an uberjar context.
|
||||||
|
|
||||||
|
* 1.1.610 -- 2020-10-19
|
||||||
|
* Fix #140 by adding `"duckdb"` to `next.jdbc.connection/dbtypes`.
|
||||||
|
* Change `next.jdbc.types/as-*` functions to use a thunk instead of a vector to convey metadata, so that wrapped values do not get unpacked by HoneySQL.
|
||||||
|
* Refactor reducing and folding code around `ResultSet`, so that `reducible-result-set` and `foldable-result-set` can be exposed for folks who want more control over processing result sets obtained from database metadata.
|
||||||
|
* `datafiable-result-set` can now be called without the `connectable` and/or `opts` arguments; a `nil` connectable now disables foreign key navigation in datafied results (rather than throwing an obscure exception).
|
||||||
|
|
||||||
|
* 1.1.588 -- 2020-09-09
|
||||||
|
* Fix #139 by adding `next.jdbc.plan/select-one!` and `next.jdbc.plan/select!`.
|
||||||
|
* If `ResultSet.getMetaData()` returns `null`, we assume the column count is zero, i.e., an empty result set. This should "never happen" but some JDBC drivers are badly behaved and their idea of an "empty result set" does not match the JDBC API spec.
|
||||||
|
|
||||||
|
* 1.1.582 -- 2020-08-05
|
||||||
|
* Fix #138 by exposing `next.jdbc.connection/jdbc-url` to build `:jdbcUrl` values that can be passed to `->pool` or `component`.
|
||||||
|
|
||||||
|
* 1.1.581 -- 2020-08-03
|
||||||
|
* Fix #137 by adding support for specifying username and password per-connection (if your datasource supports this).
|
||||||
|
* Document SQLite handling of `bool` and `bit` columns in a new **Tips & Tricks** section, inspired by #134.
|
||||||
|
* Address #133 by adding `:return-generated-keys` as an option on `execute-batch!`.
|
||||||
|
|
||||||
|
* 1.1.569 -- 2020-07-10
|
||||||
|
* Fix #132 by adding specs for `next.jdbc/with-options` and `next.jdbc.prepare/statement`; correct spec for `next.jdbc.connection/component`. PR #131 from @Briaoeuidhtns.
|
||||||
|
* Fix #130 by implementing `clojure.lang.ILookup` on the three builder adapters.
|
||||||
|
* Fix #129 by adding `with-column-value` to `RowBuilder` and a more generic `builder-adapter`.
|
||||||
|
* Fix #128 by adding a test for the "not found" arity of lookup on mapified result sets.
|
||||||
|
* Fix #121 by conditionally adding `next.jdbc/snake-kebab-opts`, `next.jdbc/unqualified-snake-kebab-opts`, `next.jdbc.result-set/as-kebab-maps`, and `next.jdbc.result-set/as-unqualified-kebab-maps` (which are present only if `camel-snake-kebab` is on your classpath). _As of 1.2.659, these are included unconditionally and `next.jdbc` depends directly on `camel-snake-kebab`._
|
||||||
|
* Correct MySQL batch statement rewrite tip: it's `:rewriteBatchedStatements true` (plural). Also surface the batch statement tips in the **Tips & Tricks** page.
|
||||||
|
* Clarify how combining is interleaving with reducing in **Reducing and Folding with `plan`**.
|
||||||
|
* Use "JDBC URL" consistently everywhere (instead of "JDBC URI" in several places).
|
||||||
|
|
||||||
|
* 1.1.547 -- 2020-06-29
|
||||||
|
* Address #125 by making the result of `plan` foldable (in the `clojure.core.reducers` sense).
|
||||||
|
* Address #124 by extending `next.jdbc.sql.builder/for-query` to support `:top` (SQL Server), `:limit` / `:offset` (MySQL/PostgreSQL), `:offset` / `:fetch` (SQL Standard) for `find-by-keys`.
|
||||||
|
* Address #117 by adding `next.jdbc.transaction/*nested-tx*` to provide control over how attempts to create nested transactions should be handled.
|
||||||
|
* Address #116 by adding a `:multi-rs` option to `execute!` to retrieve multiple result sets, for example from stored procedure calls or T-SQL scripts.
|
||||||
|
* Allow `:all` to be passed into `find-by-keys` instead of an example hash map or a where clause vector so all rows will be returned (expected to be used with `:offset` etc to support simple pagination of an entire table).
|
||||||
|
* Add `:columns` option to `find-by-keys` (and `get-by-id`) to specify a subset of columns to be returned in each row. This can also specify an alias for the column and allows for computed expressions to be selected with an alias.
|
||||||
|
|
||||||
|
* 1.0.478 -- 2020-06-24
|
||||||
|
* Address #123 by adding `next.jdbc.types` namespace, full of auto-generated `as-xxx` functions, one for each of the `java.sql.Types` values.
|
||||||
|
|
||||||
|
* 1.0.476 -- 2020-06-22
|
||||||
|
* Extend default options behavior to `next.jdbc.sql` functions.
|
||||||
|
|
||||||
|
* 1.0.475 -- 2020-06-22
|
||||||
|
* Add tests for `"jtds"` database driver (against MS SQL Server), making it officially supported.
|
||||||
|
* Switch from OpenTable Embedded PostgreSQL to Zonky's version, so that testing can move forward from PostgreSQL 10.11 to 12.2.0.
|
||||||
|
* Fix potential reflection warnings caused by `next.jdbc.prepare/statement` being incorrectly type-hinted.
|
||||||
|
* Address #122 by adding `next.jdbc/with-options` that lets you wrap up a connectable along with default options that should be applied to all operations on that connectable.
|
||||||
|
* Address #119 by clarifying realization actions in the docstrings for `row-number`, `column-names`, and `metadata`.
|
||||||
|
* Address #115 by adding equivalent of `db-do-commands` in the `clojure.java.jdbc` migration guide.
|
||||||
|
* Add log4j2 as a test dependency so that I have better control over logging (which makes debugging easier!).
|
||||||
|
|
||||||
|
* 1.0.462 -- 2020-05-31
|
||||||
|
* Addition of `next.jdbc.datafy` to provide more `datafy`/`nav` introspection (see the additional section in **datafy, nav, and :schema** for details).
|
||||||
|
* Addition of `next.jdbc.result-set/metadata` to provide (datafied) result set metadata within `plan`.
|
||||||
|
|
||||||
|
* 1.0.445 -- 2020-05-23
|
||||||
* Enhanced support in `plan` for "metadata" access: `row-number` and `column-names` can be called on the abstract row (even after calling `datafiable-row`). In addition, `Associative` access via numeric "keys" will read columns by index, and row abstractions now support `Indexed` access via `nth` (which will also read columns by index). Fixes #110.
|
* Enhanced support in `plan` for "metadata" access: `row-number` and `column-names` can be called on the abstract row (even after calling `datafiable-row`). In addition, `Associative` access via numeric "keys" will read columns by index, and row abstractions now support `Indexed` access via `nth` (which will also read columns by index). Fixes #110.
|
||||||
* Support for Stuart Sierra's Component library, via `next.jdbc.connection/component`. See updated **Getting Started** guide for usage.
|
* Support for Stuart Sierra's Component library, via `next.jdbc.connection/component`. See updated **Getting Started** guide for usage.
|
||||||
* Add example of getting generated keys from `execute-batch!`.
|
* Add example of getting generated keys from `execute-batch!`.
|
||||||
|
|
@ -12,7 +259,7 @@ Only accretive/fixative changes will be made from now on.
|
||||||
* Add array handling example to PostgreSQL **Tips & Tricks**. PR #108 from @maxp.
|
* Add array handling example to PostgreSQL **Tips & Tricks**. PR #108 from @maxp.
|
||||||
* Investigate possible solutions for #106 (mutable transaction thread safety) -- experimental `locking` on `Connection` object.
|
* Investigate possible solutions for #106 (mutable transaction thread safety) -- experimental `locking` on `Connection` object.
|
||||||
|
|
||||||
* 2020-04-10 -- 1.0.424
|
* 1.0.424 -- 2020-04-10
|
||||||
* In **Tips & Tricks**, noted that MySQL returns `BLOB` columns as `byte[]` instead of `java.sql.Blob`.
|
* In **Tips & Tricks**, noted that MySQL returns `BLOB` columns as `byte[]` instead of `java.sql.Blob`.
|
||||||
* Address #103, #104 by adding a section on timeouts to **Tips & Tricks**.
|
* Address #103, #104 by adding a section on timeouts to **Tips & Tricks**.
|
||||||
* Fix #102 by allowing keywords or strings in `:return-keys`.
|
* Fix #102 by allowing keywords or strings in `:return-keys`.
|
||||||
|
|
@ -20,20 +267,20 @@ Only accretive/fixative changes will be made from now on.
|
||||||
* Add support for calling `.getLoginTimeout`/`.setLoginTimeout` on the reified `DataSource` returned by `get-datasource` when called on a hash map "db-spec" or JDBC URL string.
|
* Add support for calling `.getLoginTimeout`/`.setLoginTimeout` on the reified `DataSource` returned by `get-datasource` when called on a hash map "db-spec" or JDBC URL string.
|
||||||
* Documentation improvements based on feedback (mostly from Slack), including a section on database metadata near the end of **Getting Started**.
|
* Documentation improvements based on feedback (mostly from Slack), including a section on database metadata near the end of **Getting Started**.
|
||||||
|
|
||||||
* 2020-03-16 -- 1.0.409
|
* 1.0.409 -- 2020-03-16
|
||||||
* Address #100 by adding support for MariaDB (@green-coder). Set `NEXT_JDBC_TEST_MARIADB=true` as well as `NEXT_JDBC_TEST_MYSQL=true` in order to run tests against MariaDB.
|
* Address #100 by adding support for MariaDB (@green-coder). Set `NEXT_JDBC_TEST_MARIADB=true` as well as `NEXT_JDBC_TEST_MYSQL=true` in order to run tests against MariaDB.
|
||||||
|
|
||||||
* 2020-03-14 -- 1.0.405 (no code changes -- just documentation)
|
* 1.0.405 -- 2020-03-14 (no code changes -- just documentation)
|
||||||
* Improve documentation around `plan` so `reduce` etc is more obvious.
|
* Improve documentation around `plan` so `reduce` etc is more obvious.
|
||||||
* Attempt to drive readers to cljdoc.org instead of the GitHub version (which is harder to navigate).
|
* Attempt to drive readers to cljdoc.org instead of the GitHub version (which is harder to navigate).
|
||||||
|
|
||||||
* 2020-03-02 -- 1.0.395
|
* 1.0.395 -- 2020-03-02
|
||||||
* Add `read-as-instant` and `read-as-local` functions to `next.jdbc.date-time` to extend `ReadableColumn` so that SQL `DATE` and `TIMESTAMP` columns can be read as Java Time types.
|
* Add `read-as-instant` and `read-as-local` functions to `next.jdbc.date-time` to extend `ReadableColumn` so that SQL `DATE` and `TIMESTAMP` columns can be read as Java Time types.
|
||||||
* Specifically call out PostgreSQL as needing `next.jdbc.date-time` to enable automatic conversion of `java.util.Date` objects to SQL timestamps for prepared statements (#95).
|
* Specifically call out PostgreSQL as needing `next.jdbc.date-time` to enable automatic conversion of `java.util.Date` objects to SQL timestamps for prepared statements (#95).
|
||||||
* Split **Tips & Tricks** into its own page, with a whole new section on using JSON data types with PostgreSQL (#94 -- thank you @vharmain).
|
* Split **Tips & Tricks** into its own page, with a whole new section on using JSON data types with PostgreSQL (#94 -- thank you @vharmain).
|
||||||
* Bump dependencies to latest.
|
* Bump dependencies to latest.
|
||||||
|
|
||||||
* 2020-02-28 -- 1.0.384
|
* 1.0.384 -- 2020-02-28
|
||||||
* Add PostgreSQL streaming option information to **Tips & Tricks** (#87).
|
* Add PostgreSQL streaming option information to **Tips & Tricks** (#87).
|
||||||
* Minor documentation fixes (including #85, #92, #93).
|
* Minor documentation fixes (including #85, #92, #93).
|
||||||
* Improve `Unknown dbtype` exception message (to clarify that `:classname` is also missing, #90).
|
* Improve `Unknown dbtype` exception message (to clarify that `:classname` is also missing, #90).
|
||||||
|
|
@ -41,25 +288,25 @@ Only accretive/fixative changes will be made from now on.
|
||||||
* Address #89, #91 by making minor performance tweaks to `next.jdbc.result-set` functions.
|
* Address #89, #91 by making minor performance tweaks to `next.jdbc.result-set` functions.
|
||||||
* Planning to move to MAJOR.MINOR.COMMITS versioning scheme (1.0.384).
|
* Planning to move to MAJOR.MINOR.COMMITS versioning scheme (1.0.384).
|
||||||
|
|
||||||
* 2019-12-20 -- 1.0.13
|
* 1.0.13 -- 2019-12-20
|
||||||
* Fix #82 by adding `clojure.java.data`-based support for setting arbitrary properties on `Connection` and `PreparedStatement` objects, post-creation. Note: this uses the Java reflection API under the hood.
|
* Fix #82 by adding `clojure.java.data`-based support for setting arbitrary properties on `Connection` and `PreparedStatement` objects, post-creation. Note: this uses the Java reflection API under the hood.
|
||||||
* Adds `next.jdbc.prepare/statement` to create `Statement` objects with all the options available to `prepare` except `:return-keys`.
|
* Adds `next.jdbc.prepare/statement` to create `Statement` objects with all the options available to `prepare` except `:return-keys`.
|
||||||
* Update `org.clojure/java.data` to 0.1.5 (for property setting).
|
* Update `org.clojure/java.data` to 0.1.5 (for property setting).
|
||||||
* Additional clarifications in the documentation based on feedback on Slack.
|
* Additional clarifications in the documentation based on feedback on Slack.
|
||||||
|
|
||||||
* 2019-12-11 -- 1.0.12
|
* 1.0.12 -- 2019-12-11
|
||||||
* Address #81 by splitting the SQL-building functions out of `next.jdbc.sql` into `next.jdbc.sql.builder`.
|
* Address #81 by splitting the SQL-building functions out of `next.jdbc.sql` into `next.jdbc.sql.builder`.
|
||||||
* Fix #80 by avoiding the auto-commit restore after a failed rollback in a failed transaction.
|
* Fix #80 by avoiding the auto-commit restore after a failed rollback in a failed transaction.
|
||||||
* Address #78 by documenting the `:connectionInitSql` workaround for HikariCP/PostgreSQL and non-default schemas.
|
* Address #78 by documenting the `:connectionInitSql` workaround for HikariCP/PostgreSQL and non-default schemas.
|
||||||
|
|
||||||
* 2019-12-07 -- 1.0.11
|
* 1.0.11 -- 2019-12-07
|
||||||
* Fix #76 by avoiding conversions on `java.sql.Date` and `java.sql.Timestamp`.
|
* Fix #76 by avoiding conversions on `java.sql.Date` and `java.sql.Timestamp`.
|
||||||
* Add testing against Microsoft SQL Server (run tests with environment variables `NEXT_JDBC_TEST_MSSQL=yes` and `MSSQL_SA_PASSWORD` set to your local -- `127.0.0.1:1433` -- SQL Server `sa` user password; assumes that it can create and drop `fruit` and `fruit_time` tables in the `model` database).
|
* Add testing against Microsoft SQL Server (run tests with environment variables `NEXT_JDBC_TEST_MSSQL=yes` and `MSSQL_SA_PASSWORD` set to your local -- `127.0.0.1:1433` -- SQL Server `sa` user password; assumes that it can create and drop `fruit` and `fruit_time` tables in the `model` database).
|
||||||
* Add testing against MySQL (run tests with environment variables `NEXT_JDBC_TEST_MYSQL=yes` and `MYSQL_ROOT_PASSWORD` set to your local -- `127.0.0.1:3306` -- MySQL `root` user password; assumes you have already created an empty database called `clojure_test`).
|
* Add testing against MySQL (run tests with environment variables `NEXT_JDBC_TEST_MYSQL=yes` and `MYSQL_ROOT_PASSWORD` set to your local -- `127.0.0.1:3306` -- MySQL `root` user password; assumes you have already created an empty database called `clojure_test`).
|
||||||
* Bump several JDBC driver versions for up-to-date testing.
|
* Bump several JDBC driver versions for up-to-date testing.
|
||||||
* Minor documentation fixes.
|
* Minor documentation fixes.
|
||||||
|
|
||||||
* 2019-11-14 -- 1.0.10
|
* 1.0.10 -- 2019-11-14
|
||||||
* Fix #75 by adding support for `java.sql.Statement` to `plan`, `execute!`, and `execute-one!`.
|
* Fix #75 by adding support for `java.sql.Statement` to `plan`, `execute!`, and `execute-one!`.
|
||||||
* Address #74 by making several small changes to satisfy Eastwood.
|
* Address #74 by making several small changes to satisfy Eastwood.
|
||||||
* Fix #73 by providing a new, optional namespace `next.jdbc.date-time` that can be required if your database driver needs assistance converting `java.util.Date` (PostgreSQL!) or the Java Time types to SQL `timestamp` (or SQL `date`/`time`).
|
* Fix #73 by providing a new, optional namespace `next.jdbc.date-time` that can be required if your database driver needs assistance converting `java.util.Date` (PostgreSQL!) or the Java Time types to SQL `timestamp` (or SQL `date`/`time`).
|
||||||
|
|
@ -72,45 +319,45 @@ Only accretive/fixative changes will be made from now on.
|
||||||
* Improve `datafy`/`nav` documentation around `:schema`.
|
* Improve `datafy`/`nav` documentation around `:schema`.
|
||||||
* Update `org.clojure/java.data` to `"0.1.4"` (0.1.2 fixes a number of reflection warnings).
|
* Update `org.clojure/java.data` to `"0.1.4"` (0.1.2 fixes a number of reflection warnings).
|
||||||
|
|
||||||
* 2019-10-11 -- 1.0.9
|
* 1.0.9 -- 2019-10-11
|
||||||
* Address #69 by trying to clarify when to use `execute-one!` vs `execute!` vs `plan`.
|
* Address #69 by trying to clarify when to use `execute-one!` vs `execute!` vs `plan`.
|
||||||
* Address #68 by clarifying that builder functions do not affect the "fake result set" containing `:next.jdbc/update-count`.
|
* Address #68 by clarifying that builder functions do not affect the "fake result set" containing `:next.jdbc/update-count`.
|
||||||
* Fix #67 by adding `:jdbcUrl` version spec.
|
* Fix #67 by adding `:jdbcUrl` version spec.
|
||||||
* Add `next.jdbc.optional/as-maps-adapter` to provide a way to override the default result set reading behavior of using `.getObject` when omitting SQL `NULL` values from result set maps.
|
* Add `next.jdbc.optional/as-maps-adapter` to provide a way to override the default result set reading behavior of using `.getObject` when omitting SQL `NULL` values from result set maps.
|
||||||
|
|
||||||
* 2019-09-27 -- 1.0.8
|
* 1.0.8 -- 2019-09-27
|
||||||
* Fix #66 by adding support for a db-spec hash map format containing a `:jdbcUrl` key (consistent with `->pool`) so that you can create a datasource from a JDBC URL string and additional options.
|
* Fix #66 by adding support for a db-spec hash map format containing a `:jdbcUrl` key (consistent with `->pool`) so that you can create a datasource from a JDBC URL string and additional options.
|
||||||
* Address #65 by adding a HugSQL "quick start" to the **Friendly SQL Functions** section of the docs.
|
* Address #65 by adding a HugSQL "quick start" to the **Friendly SQL Functions** section of the docs.
|
||||||
* Add `next.jdbc.specs/unstrument`. PR #64 (@gerred).
|
* Add `next.jdbc.specs/unstrument`. PR #64 (@gerred).
|
||||||
* Address #63 by improving documentation around qualified column names and `:qualifier` (`clojure.java.jdbc`) migration, with a specific caveat about Oracle not fully supporting `.getTableName()`.
|
* Address #63 by improving documentation around qualified column names and `:qualifier` (`clojure.java.jdbc`) migration, with a specific caveat about Oracle not fully supporting `.getTableName()`.
|
||||||
|
|
||||||
* 2019-09-09 -- 1.0.7
|
* 1.0.7 -- 2019-09-09
|
||||||
* Address #60 by supporting simpler schema entry formats: `:table/column` is equivalent to the old `[:table :column :one]` and `[:table/column]` is equivalent to the old `[:table :column :many]`. The older formats will continue to be supported but should be considered deprecated. PR #62 (@seancorfield).
|
* Address #60 by supporting simpler schema entry formats: `:table/column` is equivalent to the old `[:table :column :one]` and `[:table/column]` is equivalent to the old `[:table :column :many]`. The older formats will continue to be supported but should be considered deprecated. PR #62 (@seancorfield).
|
||||||
* Added test for using `ANY(?)` and arrays in PostgreSQL for `IN (?,,,?)` style queries. Added a **Tips & Tricks** section to **Friendly SQL Functions** with database-specific suggestions, that starts with this one.
|
* Added test for using `ANY(?)` and arrays in PostgreSQL for `IN (?,,,?)` style queries. Added a **Tips & Tricks** section to **Friendly SQL Functions** with database-specific suggestions, that starts with this one.
|
||||||
* Improved documentation in several areas.
|
* Improved documentation in several areas.
|
||||||
|
|
||||||
* 2019-08-24 -- 1.0.6
|
* 1.0.6 -- 2019-08-24
|
||||||
* Improved documentation around `insert-multi!` and `execute-batch!` (addresses #57).
|
* Improved documentation around `insert-multi!` and `execute-batch!` (addresses #57).
|
||||||
* Fix #54 by improving documentation around data type conversions (and the `ReadableColumn` and `SettableParameter` protocols).
|
* Fix #54 by improving documentation around data type conversions (and the `ReadableColumn` and `SettableParameter` protocols).
|
||||||
* Fix #52 by using a US-locale function in the "lower" result set builders to avoid unexpected character changes in column names in locales such as Turkish. If you want the locale-sensitive behavior, pass `clojure.string/lower-case` into one of the "modified" result set builders.
|
* Fix #52 by using a US-locale function in the "lower" result set builders to avoid unexpected character changes in column names in locales such as Turkish. If you want the locale-sensitive behavior, pass `clojure.string/lower-case` into one of the "modified" result set builders.
|
||||||
* Add `next.jdbc.result-set/as-maps-adapter` and `next.jdbc.result-set/as-arrays-adapter` to provide a way to override the default result set reading behavior of using `.getObject`.
|
* Add `next.jdbc.result-set/as-maps-adapter` and `next.jdbc.result-set/as-arrays-adapter` to provide a way to override the default result set reading behavior of using `.getObject`.
|
||||||
* Update `org.clojure/test.check` to `"0.10.0"`.
|
* Update `org.clojure/test.check` to `"0.10.0"`.
|
||||||
|
|
||||||
* 2019-08-05 -- 1.0.5
|
* 1.0.5 -- 2019-08-05
|
||||||
* Fix #51 by implementing `IPersistentMap` fully for the "mapified" result set inside `plan`. This adds support for `dissoc` and `cons` (which will both realize a row), `count` (which returns the column count but does not realize a row), `empty` (returns an empty hash map without realizing a row), etc.
|
* Fix #51 by implementing `IPersistentMap` fully for the "mapified" result set inside `plan`. This adds support for `dissoc` and `cons` (which will both realize a row), `count` (which returns the column count but does not realize a row), `empty` (returns an empty hash map without realizing a row), etc.
|
||||||
* Improved documentation around connection pooling (HikariCP caveats).
|
* Improved documentation around connection pooling (HikariCP caveats).
|
||||||
|
|
||||||
* 2019-07-24 -- 1.0.4
|
* 1.0.4 -- 2019-07-24
|
||||||
* Fix #50 by adding machinery to test against (embedded) PostgreSQL!
|
* Fix #50 by adding machinery to test against (embedded) PostgreSQL!
|
||||||
* Improved documentation for connection pooled datasources (including adding a Component example); clarified the recommendations for globally overriding default options (write a wrapper namespace that suits your usage).
|
* Improved documentation for connection pooled datasources (including adding a Component example); clarified the recommendations for globally overriding default options (write a wrapper namespace that suits your usage).
|
||||||
* Note: this release is primarily to fix the cljdoc.org documentation via repackaging the JAR file.
|
* Note: this release is primarily to fix the cljdoc.org documentation via repackaging the JAR file.
|
||||||
|
|
||||||
* 2019-07-23 -- 1.0.3
|
* 1.0.3 -- 2019-07-23
|
||||||
* Fix #48 by adding `next.jdbc.connection/->pool` and documenting how to use HikariCP and c3p0 in the Getting Started docs (as well as adding tests for both libraries).
|
* Fix #48 by adding `next.jdbc.connection/->pool` and documenting how to use HikariCP and c3p0 in the Getting Started docs (as well as adding tests for both libraries).
|
||||||
* Documentation improvements, including examples of extending `ReadableColumn` and `SettableParameter`.
|
* Documentation improvements, including examples of extending `ReadableColumn` and `SettableParameter`.
|
||||||
* Updated test dependencies (testing against more recent versions of several drivers).
|
* Updated test dependencies (testing against more recent versions of several drivers).
|
||||||
|
|
||||||
* 2019-07-15 -- 1.0.2
|
* 1.0.2 -- 2019-07-15
|
||||||
* Fix #47 by refactoring database specs to be a single hash map instead of pouring multiple maps into one.
|
* Fix #47 by refactoring database specs to be a single hash map instead of pouring multiple maps into one.
|
||||||
* Fix #46 by allowing `:host` to be `:none` which tells `next.jdbc` to omit the host/port section of the JDBC URL, so that local databases can be used with `:dbtype`/`:classname` for database types that `next.jdbc` does not know. Also added `:dbname-separator` and `:host-prefix` to the "db-spec" to allow fine-grained control over how the JDBC URL is assembled.
|
* Fix #46 by allowing `:host` to be `:none` which tells `next.jdbc` to omit the host/port section of the JDBC URL, so that local databases can be used with `:dbtype`/`:classname` for database types that `next.jdbc` does not know. Also added `:dbname-separator` and `:host-prefix` to the "db-spec" to allow fine-grained control over how the JDBC URL is assembled.
|
||||||
* Fix #45 by adding [TimesTen](https://www.oracle.com/database/technologies/related/timesten.html) driver support.
|
* Fix #45 by adding [TimesTen](https://www.oracle.com/database/technologies/related/timesten.html) driver support.
|
||||||
|
|
@ -118,29 +365,29 @@ Only accretive/fixative changes will be made from now on.
|
||||||
* Fix #43 by adjusting the spec for `insert-multi!` to "require less" of the `cols` and `rows` arguments.
|
* Fix #43 by adjusting the spec for `insert-multi!` to "require less" of the `cols` and `rows` arguments.
|
||||||
* Fix #42 by adding specs for `execute-batch!` and `set-parameters` in `next.jdbc.prepare`.
|
* Fix #42 by adding specs for `execute-batch!` and `set-parameters` in `next.jdbc.prepare`.
|
||||||
* Fix #41 by improving docstrings and documentation, especially around prepared statement handling.
|
* Fix #41 by improving docstrings and documentation, especially around prepared statement handling.
|
||||||
* Fix #40 by adding `next.jdbc.prepare/execute-batch!`.
|
* Fix #40 by adding `next.jdbc/execute-batch!` (previously `next.jdbc.prepare/execute-batch!`).
|
||||||
* Added `assert`s in `next.jdbc.sql` as more informative errors for cases that would generate SQL exceptions (from malformed SQL).
|
* Added `assert`s in `next.jdbc.sql` as more informative errors for cases that would generate SQL exceptions (from malformed SQL).
|
||||||
* Added spec for `:order-by` to reflect what is actually permitted.
|
* Added spec for `:order-by` to reflect what is actually permitted.
|
||||||
* Expose `next.jdbc.connect/dbtypes` as a table of known database types and aliases, along with their class name(s), port, and other JDBC string components.
|
* Expose `next.jdbc.connect/dbtypes` as a table of known database types and aliases, along with their class name(s), port, and other JDBC string components.
|
||||||
|
|
||||||
* 2019-07-03 -- 1.0.1
|
* 1.0.1 -- 2019-07-03
|
||||||
* Fix #37 by adjusting the spec for `with-transaction` to "require less" of the `:binding` vector.
|
* Fix #37 by adjusting the spec for `with-transaction` to "require less" of the `:binding` vector.
|
||||||
* Fix #36 by adding type hint in `with-transaction` macro.
|
* Fix #36 by adding type hint in `with-transaction` macro.
|
||||||
* Fix #35 by explaining the database-specific options needed to ensure `insert-multi!` performs a single, batched operation.
|
* Fix #35 by explaining the database-specific options needed to ensure `insert-multi!` performs a single, batched operation.
|
||||||
* Fix #34 by explaining save points (in the Transactions documentation).
|
* Fix #34 by explaining save points (in the Transactions documentation).
|
||||||
* Fix #33 by updating the spec for the example `key-map` in `find-by-keys`, `update!`, and `delete!` to reflect that you cannot pass an empty map to these functions (and added tests to ensure the calls fail with spec errors).
|
* Fix #33 by updating the spec for the example `key-map` in `find-by-keys`, `update!`, and `delete!` to reflect that you cannot pass an empty map to these functions (and added tests to ensure the calls fail with spec errors).
|
||||||
|
|
||||||
* 2019-06-12 -- 1.0.0 "gold"
|
* 1.0.0 "gold" -- 2019-06-12
|
||||||
* Address #31 by making `reify`'d objects produce a more informative string representation if they are printed (e.g., misusing `plan` by not reducing it or not mapping an operation over the rows).
|
* Address #31 by making `reify`'d objects produce a more informative string representation if they are printed (e.g., misusing `plan` by not reducing it or not mapping an operation over the rows).
|
||||||
* Fix #26 by exposing `next.jdbc.result-set/datafiable-result-set` so that various `java.sql.DatabaseMetaData` methods that return result metadata information in `ResultSet`s can be easily turned into a fully realized result set.
|
* Fix #26 by exposing `next.jdbc.result-set/datafiable-result-set` so that various `java.sql.DatabaseMetaData` methods that return result metadata information in `ResultSet`s can be easily turned into a fully realized result set.
|
||||||
|
|
||||||
* 2019-06-04 -- 1.0.0-rc1:
|
* 1.0.0-rc1 -- 2019-06-04
|
||||||
* Fix #24 by adding return type hints to `next.jdbc` functions.
|
* Fix #24 by adding return type hints to `next.jdbc` functions.
|
||||||
* Fix #22 by adding `next.jdbc.optional` with six map builders that omit `NULL` columns from the row hash maps.
|
* Fix #22 by adding `next.jdbc.optional` with six map builders that omit `NULL` columns from the row hash maps.
|
||||||
* Documentation improvements (#27, #28, and #29), including changing "connectable" to "transactable" for the `transact` function and the `with-transaction` macro (for consistency with the name of the underlying protocol).
|
* Documentation improvements (#27, #28, and #29), including changing "connectable" to "transactable" for the `transact` function and the `with-transaction` macro (for consistency with the name of the underlying protocol).
|
||||||
* Fix #30 by adding `modified` variants of column name functions and builders. The `lower` variants have been rewritten in terms of these new `modified` variants. This adds `:label-fn` and `:qualifier-fn` options that mirror `:column-fn` and `:table-fn` for row builders.
|
* Fix #30 by adding `modified` variants of column name functions and builders. The `lower` variants have been rewritten in terms of these new `modified` variants. This adds `:label-fn` and `:qualifier-fn` options that mirror `:column-fn` and `:table-fn` for row builders.
|
||||||
|
|
||||||
* 2019-05-24 -- 1.0.0-beta1:
|
* 1.0.0-beta1 -- 2019-05-24
|
||||||
* Set up CircleCI testing (just local DBs for now).
|
* Set up CircleCI testing (just local DBs for now).
|
||||||
* Address #21 by adding `next.jdbc.specs` and documenting basic usage.
|
* Address #21 by adding `next.jdbc.specs` and documenting basic usage.
|
||||||
* Fix #19 by caching loaded database driver classes.
|
* Fix #19 by caching loaded database driver classes.
|
||||||
|
|
@ -149,8 +396,8 @@ Only accretive/fixative changes will be made from now on.
|
||||||
|
|
||||||
## Alpha Builds
|
## Alpha Builds
|
||||||
|
|
||||||
* 2019-05-04 -- 1.0.0-alpha13 -- Fix #18 by removing more keys from properties when creating connections.
|
* 1.0.0-alpha13 -- 2019-05-04 -- Fix #18 by removing more keys from properties when creating connections.
|
||||||
* 2019-04-26 -- 1.0.0-alpha12 -- Fix #17 by renaming `:next.jdbc/sql-string` to `:next.jdbc/sql-params` (**BREAKING CHANGE!**) and pass whole vector.
|
* 1.0.0-alpha12 -- 2019-04-26 -- Fix #17 by renaming `:next.jdbc/sql-string` to `:next.jdbc/sql-params` (**BREAKING CHANGE!**) and pass whole vector.
|
||||||
* 2019-04-24 -- 1.0.0-alpha11 -- Rename `:gen-fn` to `:builder-fn` (**BREAKING CHANGE!**); Fix #13 by adding documentation for `datafy`/`nav`/`:schema`; Fix #15 by automatically adding `:next.jdbc/sql-string` (as of 1.0.0-alpha12: `:next.jdbc/sql-params`) into the options hash map, so custom builders can depend on the SQL string.
|
* 1.0.0-alpha11 -- 2019-04-24 -- Rename `:gen-fn` to `:builder-fn` (**BREAKING CHANGE!**); Fix #13 by adding documentation for `datafy`/`nav`/`:schema`; Fix #15 by automatically adding `:next.jdbc/sql-string` (as of 1.0.0-alpha12: `:next.jdbc/sql-params`) into the options hash map, so custom builders can depend on the SQL string.
|
||||||
* 2019-04-22 -- 1.0.0-alpha9 -- Fix #14 by respecting `:gen-fn` (as of 1.0.0-alpha11: `:builder-fn`) in `execute-one` for `PreparedStatement`.
|
* 1.0.0-alpha9 -- 2019-04-22 -- Fix #14 by respecting `:gen-fn` (as of 1.0.0-alpha11: `:builder-fn`) in `execute-one!` for `PreparedStatement`.
|
||||||
* 2019-04-21 -- 1.0.0-alpha8 -- Initial publicly announced release.
|
* 1.0.0-alpha8 -- 2019-04-21 -- Initial publicly announced release.
|
||||||
|
|
|
||||||
31
README.md
31
README.md
|
|
@ -1,23 +1,32 @@
|
||||||
# next.jdbc [](https://circleci.com/gh/seancorfield/next-jdbc/tree/master)
|
# next.jdbc [](https://github.com/seancorfield/next-jdbc/actions/workflows/test-and-release.yml) [](https://github.com/seancorfield/next-jdbc/actions/workflows/test-and-snapshot.yml) [](https://github.com/seancorfield/next-jdbc/actions/workflows/test.yml)
|
||||||
|
|
||||||
The next generation of `clojure.java.jdbc`: a new low-level Clojure wrapper for JDBC-based access to databases.
|
The next generation of `clojure.java.jdbc`: a new low-level Clojure wrapper for JDBC-based access to databases.
|
||||||
|
|
||||||
|
**Featured in [Jacek Schae's Learn Reitit Pro online course](https://www.jacekschae.com/learn-reitit-pro/pfec2)!**
|
||||||
|
|
||||||
## TL;DR
|
## TL;DR
|
||||||
|
|
||||||
The latest versions on Clojars and on cljdoc:
|
The latest versions on Clojars and on cljdoc:
|
||||||
|
|
||||||
[](https://clojars.org/seancorfield/next.jdbc) [](https://cljdoc.org/d/seancorfield/next.jdbc/CURRENT)
|
[](https://clojars.org/com.github.seancorfield/next.jdbc)
|
||||||
|
[](https://cljdoc.org/d/com.github.seancorfield/next.jdbc/CURRENT)
|
||||||
|
[](https://clojurians.slack.com/app_redirect?channel=sql)
|
||||||
|
[](http://clojurians.net)
|
||||||
|
[](https://clojurians.zulipchat.com/#narrow/channel/152063-sql)
|
||||||
|
|
||||||
The documentation on [cljdoc.org](https://cljdoc.org/d/seancorfield/next.jdbc/CURRENT) is for the current version of `next.jdbc`:
|
The documentation on [cljdoc.org](https://cljdoc.org/d/com.github.seancorfield/next.jdbc/CURRENT) is for the current version of `next.jdbc`:
|
||||||
|
|
||||||
* [Getting Started](https://cljdoc.org/d/seancorfield/next.jdbc/CURRENT/doc/getting-started)
|
* [Getting Started](https://cljdoc.org/d/com.github.seancorfield/next.jdbc/CURRENT/doc/getting-started)
|
||||||
* [Migrating from `clojure.java.jdbc`](https://cljdoc.org/d/seancorfield/next.jdbc/CURRENT/doc/migration-from-clojure-java-jdbc)
|
* [API Reference](https://cljdoc.org/d/com.github.seancorfield/next.jdbc/CURRENT/api/next)
|
||||||
* Feedback via [issues](https://github.com/seancorfield/next-jdbc/issues) or in the [`#sql` channel on the Clojurians Slack](https://clojurians.slack.com/messages/C1Q164V29/details/) or the [`#sql` stream on the Clojurians Zulip](https://clojurians.zulipchat.com/#narrow/stream/152063-sql).
|
* [Migrating from `clojure.java.jdbc`](https://cljdoc.org/d/com.github.seancorfield/next.jdbc/CURRENT/doc/migration-from-clojure-java-jdbc)
|
||||||
|
* Feedback via [issues](https://github.com/seancorfield/next-jdbc/issues) or in the [`#sql` channel on the Clojurians Slack](https://clojurians.slack.com/messages/C1Q164V29/) or the [`#sql` stream on the Clojurians Zulip](https://clojurians.zulipchat.com/#narrow/stream/152063-sql).
|
||||||
|
|
||||||
The documentation on GitHub is for **master** since the 1.0.445 release -- [see the CHANGELOG](https://github.com/seancorfield/next-jdbc/blob/master/CHANGELOG.md) and then read the [corresponding updated documentation](https://github.com/seancorfield/next-jdbc/tree/master/doc) on GitHub if you want.
|
The documentation on GitHub is for **develop** since the 1.3.1002 release -- [see the CHANGELOG](https://github.com/seancorfield/next-jdbc/blob/develop/CHANGELOG.md) and then read the [corresponding updated documentation](https://github.com/seancorfield/next-jdbc/tree/develop/doc) on GitHub if you want. Older versions of `next.jdbc` were published under the `seancorfield` group ID and you can find [older seancorfield/next.jdbc documentation on cljdoc.org](https://cljdoc.org/versions/seancorfield/next.jdbc).
|
||||||
|
|
||||||
This project follows the version scheme MAJOR.MINOR.COMMITS where MAJOR and MINOR provide some relative indication of the size of the change, but do not follow semantic versioning. In general, all changes endeavor to be non-breaking (by moving to new names rather than by breaking existing names). COMMITS is an ever-increasing counter of commits since the beginning of this repository.
|
This project follows the version scheme MAJOR.MINOR.COMMITS where MAJOR and MINOR provide some relative indication of the size of the change, but do not follow semantic versioning. In general, all changes endeavor to be non-breaking (by moving to new names rather than by breaking existing names). COMMITS is an ever-increasing counter of commits since the beginning of this repository.
|
||||||
|
|
||||||
|
> Note: every commit to the **develop** branch runs CI (GitHub Actions) and successful runs push a MAJOR.MINOR.9999-SNAPSHOT build to Clojars so the very latest version of `next.jdbc` is always available either via that [snapshot on Clojars](https://clojars.org/com.github.seancorfield/next.jdbc) or via a git dependency on the latest SHA.
|
||||||
|
|
||||||
## Motivation
|
## Motivation
|
||||||
|
|
||||||
Why another JDBC library? Why a different API from `clojure.java.jdbc`?
|
Why another JDBC library? Why a different API from `clojure.java.jdbc`?
|
||||||
|
|
@ -32,7 +41,7 @@ I also wanted `datafy`/`nav` support baked right in (it was added to `clojure.ja
|
||||||
|
|
||||||
As `next.jdbc` moved from alpha to beta, the last breaking change was made (renaming `reducible!` to `plan`) and the API should be considered stable. Only accretive and fixative changes will be made from now on.
|
As `next.jdbc` moved from alpha to beta, the last breaking change was made (renaming `reducible!` to `plan`) and the API should be considered stable. Only accretive and fixative changes will be made from now on.
|
||||||
|
|
||||||
After a month of alpha builds being available for testing, the first beta build was released on May 24th, 2019. A release candidate followed on June 4th and the "gold" (1.0.0) release was on June 12th. In addition to the small, core API in `next.jdbc`, there are "syntactic sugar" SQL functions (`insert!`, `query`, `update!`, and `delete!`) available in `next.jdbc.sql` that are similar to the main API in `clojure.java.jdbc`. See [Migrating from `clojure.java.jdbc`](https://cljdoc.org/d/seancorfield/next.jdbc/CURRENT/doc/migration-from-clojure-java-jdbc) for more detail about the differences.
|
After a month of alpha builds being available for testing, the first beta build was released on May 24th, 2019. A release candidate followed on June 4th and the "gold" (1.0.0) release was on June 12th. In addition to the small, core API in `next.jdbc`, there are "syntactic sugar" SQL functions (`insert!`, `query`, `update!`, and `delete!`) available in `next.jdbc.sql` that are similar to the main API in `clojure.java.jdbc`. See [Migrating from `clojure.java.jdbc`](https://cljdoc.org/d/com.github.seancorfield/next.jdbc/CURRENT/doc/migration-from-clojure-java-jdbc) for more detail about the differences.
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
|
|
@ -41,8 +50,8 @@ The primary concepts behind `next.jdbc` are that you start by producing a `javax
|
||||||
From a `DataSource`, either you or `next.jdbc` can create a `java.sql.Connection` via the `get-connection` function. You can specify an options hash map to `get-connection` to modify the connection that is created: `:read-only`, `:auto-commit`.
|
From a `DataSource`, either you or `next.jdbc` can create a `java.sql.Connection` via the `get-connection` function. You can specify an options hash map to `get-connection` to modify the connection that is created: `:read-only`, `:auto-commit`.
|
||||||
|
|
||||||
The primary SQL execution API in `next.jdbc` is:
|
The primary SQL execution API in `next.jdbc` is:
|
||||||
* `plan` -- yields an `IReduceInit` that, when reduced, executes the SQL statement and then reduces over the `ResultSet` with as little overhead as possible.
|
* `plan` -- yields an `IReduceInit` that, when reduced with an initial value, executes the SQL statement and then reduces over the `ResultSet` with as little overhead as possible.
|
||||||
* `execute!` -- executes the SQL statement and produces a vector of realized hash maps, that use qualified keywords for the column names, of the form `:<table>/<column>`. If you join across multiple tables, the qualified keywords will reflect the originating tables for each of the columns. If the SQL produces named values that do not come from an associated table, a simple, unqualified keyword will be used. The realized hash maps returned by `execute!` are `Datafiable` and thus `Navigable` (see Clojure 1.10's `datafy` and `nav` functions, and tools like Cognitect's REBL). Alternatively, you can specify `{:builder-fn rs/as-arrays}` and produce a vector with column names followed by vectors of row values. `rs/as-maps` is the default for `:builder-fn` but there are also `rs/as-unqualified-maps` and `rs/as-unqualified-arrays` if you want unqualified `:<column>` column names (and there are also lower-case variants of all of these).
|
* `execute!` -- executes the SQL statement and produces a vector of realized hash maps, that use qualified keywords for the column names, of the form `:<table>/<column>`. If you join across multiple tables, the qualified keywords will reflect the originating tables for each of the columns. If the SQL produces named values that do not come from an associated table, a simple, unqualified keyword will be used. The realized hash maps returned by `execute!` are `Datafiable` and thus `Navigable` (see Clojure 1.10's `datafy` and `nav` functions, and tools like [Portal](https://github.com/djblue/portal), [Reveal](https://github.com/vlaaad/reveal), and Nubank's Morse -- formerly Cognitect's REBL). Alternatively, you can specify `{:builder-fn rs/as-arrays}` and produce a vector with column names followed by vectors of row values. `rs/as-maps` is the default for `:builder-fn` but there are also `rs/as-unqualified-maps` and `rs/as-unqualified-arrays` if you want unqualified `:<column>` column names (and there are also lower-case variants of all of these).
|
||||||
* `execute-one!` -- executes the SQL or DDL statement and produces a single realized hash map. The realized hash map returned by `execute-one!` is `Datafiable` and thus `Navigable`.
|
* `execute-one!` -- executes the SQL or DDL statement and produces a single realized hash map. The realized hash map returned by `execute-one!` is `Datafiable` and thus `Navigable`.
|
||||||
|
|
||||||
In addition, there are API functions to create `PreparedStatement`s (`prepare`) from `Connection`s, which can be passed to `plan`, `execute!`, or `execute-one!`, and to run code inside a transaction (the `transact` function and the `with-transaction` macro).
|
In addition, there are API functions to create `PreparedStatement`s (`prepare`) from `Connection`s, which can be passed to `plan`, `execute!`, or `execute-one!`, and to run code inside a transaction (the `transact` function and the `with-transaction` macro).
|
||||||
|
|
@ -80,6 +89,6 @@ In addition, convenience functions -- "syntactic sugar" -- are provided to inser
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
Copyright © 2018-2020 Sean Corfield
|
Copyright © 2018-2024 Sean Corfield
|
||||||
|
|
||||||
Distributed under the Eclipse Public License version 1.0.
|
Distributed under the Eclipse Public License version 1.0.
|
||||||
|
|
|
||||||
82
build.clj
Normal file
82
build.clj
Normal file
|
|
@ -0,0 +1,82 @@
|
||||||
|
(ns build
|
||||||
|
"next.jdbc's build script.
|
||||||
|
|
||||||
|
clojure -T:build ci
|
||||||
|
clojure -T:build deploy
|
||||||
|
|
||||||
|
Run tests via:
|
||||||
|
clojure -M:test:runner
|
||||||
|
|
||||||
|
For more information, run:
|
||||||
|
|
||||||
|
clojure -A:deps -T:build help/doc"
|
||||||
|
(:refer-clojure :exclude [test])
|
||||||
|
(:require [clojure.tools.build.api :as b]
|
||||||
|
[deps-deploy.deps-deploy :as dd]
|
||||||
|
[clojure.string :as str]))
|
||||||
|
|
||||||
|
(def lib 'com.github.seancorfield/next.jdbc)
|
||||||
|
(defn- the-version [patch] (format "1.3.%s" patch))
|
||||||
|
(def version (the-version (b/git-count-revs nil)))
|
||||||
|
(def snapshot (the-version "9999-SNAPSHOT"))
|
||||||
|
(def class-dir "target/classes")
|
||||||
|
|
||||||
|
(defn test "Run all the tests." [opts]
|
||||||
|
(doseq [alias [:1.10 :1.11 :1.12]]
|
||||||
|
(println "\nRunning tests for Clojure" (name alias))
|
||||||
|
(let [basis (b/create-basis
|
||||||
|
{:aliases (cond-> [:test alias]
|
||||||
|
(str/starts-with? (System/getProperty "java.version") "21")
|
||||||
|
(conj :jdk21))})
|
||||||
|
cmds (b/java-command
|
||||||
|
{:basis basis
|
||||||
|
:main 'clojure.main
|
||||||
|
:main-args ["-m" "lazytest.main"]})
|
||||||
|
{:keys [exit]} (b/process cmds)]
|
||||||
|
(when-not (zero? exit) (throw (ex-info "Tests failed" {})))))
|
||||||
|
opts)
|
||||||
|
|
||||||
|
(defn- pom-template [version]
|
||||||
|
[[:description "The next generation of clojure.java.jdbc: a new low-level Clojure wrapper for JDBC-based access to databases."]
|
||||||
|
[:url "https://github.com/seancorfield/next-jdbc"]
|
||||||
|
[:licenses
|
||||||
|
[:license
|
||||||
|
[:name "Eclipse Public License"]
|
||||||
|
[:url "http://www.eclipse.org/legal/epl-v10.html"]]]
|
||||||
|
[:developers
|
||||||
|
[:developer
|
||||||
|
[:name "Sean Corfield"]]]
|
||||||
|
[:scm
|
||||||
|
[:url "https://github.com/seancorfield/next-jdbc"]
|
||||||
|
[:connection "scm:git:https://github.com/seancorfield/next-jdbc.git"]
|
||||||
|
[:developerConnection "scm:git:ssh:git@github.com:seancorfield/next-jdbc.git"]
|
||||||
|
[:tag (str "v" version)]]])
|
||||||
|
|
||||||
|
(defn- jar-opts [opts]
|
||||||
|
(let [version (if (:snapshot opts) snapshot version)]
|
||||||
|
(assoc opts
|
||||||
|
:lib lib :version version
|
||||||
|
:jar-file (format "target/%s-%s.jar" lib version)
|
||||||
|
:basis (b/create-basis {})
|
||||||
|
:class-dir class-dir
|
||||||
|
:target "target"
|
||||||
|
:src-dirs ["src"]
|
||||||
|
:pom-data (pom-template version))))
|
||||||
|
|
||||||
|
(defn ci "Run the CI pipeline of tests (and build the JAR)." [opts]
|
||||||
|
(test opts)
|
||||||
|
(b/delete {:path "target"})
|
||||||
|
(let [opts (jar-opts opts)]
|
||||||
|
(println "\nWriting pom.xml...")
|
||||||
|
(b/write-pom opts)
|
||||||
|
(println "\nCopying source...")
|
||||||
|
(b/copy-dir {:src-dirs ["resources" "src"] :target-dir class-dir})
|
||||||
|
(println "\nBuilding" (:jar-file opts) "...")
|
||||||
|
(b/jar opts))
|
||||||
|
opts)
|
||||||
|
|
||||||
|
(defn deploy "Deploy the JAR to Clojars." [opts]
|
||||||
|
(let [{:keys [jar-file] :as opts} (jar-opts opts)]
|
||||||
|
(dd/deploy {:installer :remote :artifact (b/resolve-path jar-file)
|
||||||
|
:pom-file (b/pom-path (select-keys opts [:lib :class-dir]))}))
|
||||||
|
opts)
|
||||||
89
deps.edn
89
deps.edn
|
|
@ -1,39 +1,58 @@
|
||||||
{:paths ["src"]
|
{:mvn/repos {"sonatype" {:url "https://oss.sonatype.org/content/repositories/snapshots/"}
|
||||||
:deps {org.clojure/clojure {:mvn/version "1.10.1"}
|
"ossrh-snapshots" {:url "https://s01.oss.sonatype.org/content/repositories/snapshots"}}
|
||||||
org.clojure/java.data {:mvn/version "1.0.64"}}
|
:paths ["src" "resources"]
|
||||||
|
:deps {org.clojure/clojure {:mvn/version "1.10.3"}
|
||||||
|
org.clojure/java.data {:mvn/version "1.3.113"}
|
||||||
|
|
||||||
|
camel-snake-kebab/camel-snake-kebab {:mvn/version "0.4.3"}}
|
||||||
:aliases
|
:aliases
|
||||||
{:test {:extra-paths ["test"]
|
{;; for help: clojure -A:deps -T:build help/doc
|
||||||
:extra-deps {org.clojure/test.check {:mvn/version "1.0.0"}
|
:build {:deps {io.github.clojure/tools.build {:mvn/version "0.10.7"}
|
||||||
|
slipset/deps-deploy {:mvn/version "0.2.2"}}
|
||||||
|
:ns-default build}
|
||||||
|
|
||||||
|
;; versions to test against:
|
||||||
|
:1.10 {:override-deps {org.clojure/clojure {:mvn/version "1.10.3"}}}
|
||||||
|
:1.11 {:override-deps {org.clojure/clojure {:mvn/version "1.11.4"}}}
|
||||||
|
:1.12 {:override-deps {org.clojure/clojure {:mvn/version "1.12.0"}}}
|
||||||
|
|
||||||
|
;; running tests/checks of various kinds:
|
||||||
|
:test {:extra-paths ["test"]
|
||||||
|
:extra-deps {org.clojure/test.check {:mvn/version "1.1.1"}
|
||||||
|
io.github.noahtheduke/lazytest {:mvn/version "1.6.1"}
|
||||||
;; connection pooling
|
;; connection pooling
|
||||||
com.zaxxer/HikariCP {:mvn/version "3.4.2"}
|
com.zaxxer/HikariCP {:mvn/version "6.3.0"}
|
||||||
com.mchange/c3p0 {:mvn/version "0.9.5.5"}
|
com.mchange/c3p0 {:mvn/version "0.10.1"}
|
||||||
;; JDBC drivers
|
;; JDBC drivers
|
||||||
;; compatible with JDK8+:
|
;; 10.16.x is JDK17+
|
||||||
org.apache.derby/derby {:mvn/version "10.14.2.0"}
|
org.apache.derby/derby {:mvn/version "10.15.2.0"}
|
||||||
;; compatible only with JDK9+:
|
org.apache.derby/derbyshared {:mvn/version "10.15.2.0"}
|
||||||
;; org.apache.derby/derby {:mvn/version "10.15.2.0"}
|
org.hsqldb/hsqldb {:mvn/version "2.7.4"}
|
||||||
;; org.apache.derby/derbyshared {:mvn/version "10.15.2.0"}
|
com.h2database/h2 {:mvn/version "2.3.232"}
|
||||||
org.hsqldb/hsqldb {:mvn/version "2.5.0"}
|
|
||||||
com.h2database/h2 {:mvn/version "1.4.199"}
|
|
||||||
net.sourceforge.jtds/jtds {:mvn/version "1.3.1"}
|
net.sourceforge.jtds/jtds {:mvn/version "1.3.1"}
|
||||||
org.mariadb.jdbc/mariadb-java-client {:mvn/version "2.5.4"}
|
org.mariadb.jdbc/mariadb-java-client {:mvn/version "3.5.2"}
|
||||||
mysql/mysql-connector-java {:mvn/version "8.0.19"}
|
com.mysql/mysql-connector-j {:mvn/version "9.2.0"}
|
||||||
org.postgresql/postgresql {:mvn/version "42.2.10"}
|
;; 42.7.4 changes update count (to -1) for stored procs:
|
||||||
com.opentable.components/otj-pg-embedded {:mvn/version "0.13.3"}
|
org.postgresql/postgresql {:mvn/version "42.7.5"}
|
||||||
com.impossibl.pgjdbc-ng/pgjdbc-ng {:mvn/version "0.8.3"}
|
io.zonky.test/embedded-postgres {:mvn/version "2.1.0"}
|
||||||
org.xerial/sqlite-jdbc {:mvn/version "3.30.1"}
|
io.zonky.test.postgres/embedded-postgres-binaries-darwin-amd64 {:mvn/version "17.4.0"}
|
||||||
com.microsoft.sqlserver/mssql-jdbc {:mvn/version "8.2.1.jre8"}
|
io.zonky.test.postgres/embedded-postgres-binaries-linux-amd64 {:mvn/version "17.4.0"}
|
||||||
;; supplementary test stuff
|
io.zonky.test.postgres/embedded-postgres-binaries-windows-amd64 {:mvn/version "17.4.0"}
|
||||||
org.slf4j/slf4j-nop {:mvn/version "1.7.30"}}}
|
org.xerial/sqlite-jdbc {:mvn/version "3.49.1.0"}
|
||||||
:runner
|
com.microsoft.sqlserver/mssql-jdbc {:mvn/version "12.10.0.jre11"}
|
||||||
{:extra-deps {com.cognitect/test-runner
|
;; use log4j2 to reduce log noise during testing:
|
||||||
{:git/url "https://github.com/cognitect-labs/test-runner"
|
org.apache.logging.log4j/log4j-api {:mvn/version "2.24.3"}
|
||||||
:sha "f7ef16dc3b8332b0d77bc0274578ad5270fbfedd"}}
|
;; bridge everything into log4j:
|
||||||
:main-opts ["-m" "cognitect.test-runner"
|
org.apache.logging.log4j/log4j-1.2-api {:mvn/version "2.24.3"}
|
||||||
"-d" "test"]}
|
org.apache.logging.log4j/log4j-jcl {:mvn/version "2.24.3"}
|
||||||
:jar
|
org.apache.logging.log4j/log4j-jul {:mvn/version "2.24.3"}
|
||||||
{:extra-deps {seancorfield/depstar {:mvn/version "1.0.94"}}
|
org.apache.logging.log4j/log4j-slf4j-impl {:mvn/version "2.24.3"}
|
||||||
:main-opts ["-m" "hf.depstar.jar" "next-jdbc.jar"]}
|
org.apache.logging.log4j/log4j-slf4j2-impl {:mvn/version "2.24.3"}}
|
||||||
:deploy
|
:jvm-opts ["-Dlog4j2.configurationFile=log4j2-info.properties"]}
|
||||||
{:extra-deps {deps-deploy {:mvn/version "0.0.9"}}
|
:runner {:main-opts ["-m" "lazytest.main"]}
|
||||||
:main-opts ["-m" "deps-deploy.deps-deploy" "deploy" "next-jdbc.jar"]}}}
|
:jdk11 {}
|
||||||
|
:jdk17 {}
|
||||||
|
:jdk21 {:extra-deps {;; only need the XTDB JDBC module:
|
||||||
|
com.xtdb/xtdb-jdbc {:mvn/version "2.0.0-beta7"}}}
|
||||||
|
:jdk24 {:jvm-opts [;; for SQLite on JDK 24 locally
|
||||||
|
"--enable-native-access=ALL-UNNAMED"]}}}
|
||||||
|
|
|
||||||
|
|
@ -13,11 +13,14 @@ Although `get-datasource` does not accept options, the "db spec" hash map passed
|
||||||
* `:dbname-separator` -- an optional string that can be used to override the `/` or `:` that is normally placed in front of the database name in the JDBC URL,
|
* `:dbname-separator` -- an optional string that can be used to override the `/` or `:` that is normally placed in front of the database name in the JDBC URL,
|
||||||
* `:host` -- an optional string that identifies the IP address or hostname of the server on which the database is running; the default is `"127.0.0.1"`; if `:none` is specified, `next.jdbc` will assume this is for a local database and will omit the host/port segment of the JDBC URL,
|
* `:host` -- an optional string that identifies the IP address or hostname of the server on which the database is running; the default is `"127.0.0.1"`; if `:none` is specified, `next.jdbc` will assume this is for a local database and will omit the host/port segment of the JDBC URL,
|
||||||
* `:host-prefix` -- an optional string that can be used to override the `//` that is normally placed in front of the IP address or hostname in the JDBC URL,
|
* `:host-prefix` -- an optional string that can be used to override the `//` that is normally placed in front of the IP address or hostname in the JDBC URL,
|
||||||
* `:port` -- an optional integer that identifies the port on which the database is running; for common database types, `next.jdbc` knows the default so this should only be needed for non-standard setups or "exotic" database types,
|
* `:port` -- an optional integer that identifies the port on which the database is running; for common database types, `next.jdbc` knows the default so this should only be needed for non-standard setups or "exotic" database types; if `:none` is specified, `next.jdbc` will omit the port segment of the JDBC URL,
|
||||||
|
* `:property-separator` -- an optional string that can be used to override the separators used in `next.jdbc.connection/jdbc-url` for the properties (after the initial JDBC URL portion); by default `?` and `&` are used to build JDBC URLs with properties; for SQL Server drivers (both MS and jTDS) `:property-separator ";"` is used, so this option should only be necessary when you are specifying "unusual" databases that `next.jdbc` does not already know about,
|
||||||
* `:classname` -- an optional string that identifies the name of the JDBC driver class to be used for the connection; for common database types, `next.jdbc` knows the default so this should only be needed for "exotic" database types,
|
* `:classname` -- an optional string that identifies the name of the JDBC driver class to be used for the connection; for common database types, `next.jdbc` knows the default so this should only be needed for "exotic" database types,
|
||||||
* `:user` -- an optional string that identifies the database username to be used when authenticating,
|
* `:user` -- an optional string that identifies the database username to be used when authenticating (NOTE: HikariCP needs `:username` instead – see below),
|
||||||
* `:password` -- an optional string that identifies the database password to be used when authenticating.
|
* `:password` -- an optional string that identifies the database password to be used when authenticating.
|
||||||
|
|
||||||
|
If you already have a JDBC URL, you can either specify that string _instead_ of a "db spec" hash map or, if you need additional properties passed to the JDBC driver, you can use a hash map containing `:jdbcUrl`, specifying the JDBC URL, and any properties you need as additional keys in the hash map.
|
||||||
|
|
||||||
Any additional keys provided in the "db spec" will be passed to the JDBC driver as `Properties` when each connection is made. Alternatively, when used with `next.jdbc.connection/->pool`, additional keys correspond to setters called on the pooled connection object.
|
Any additional keys provided in the "db spec" will be passed to the JDBC driver as `Properties` when each connection is made. Alternatively, when used with `next.jdbc.connection/->pool`, additional keys correspond to setters called on the pooled connection object.
|
||||||
|
|
||||||
If you are using HikariCP and `next.jdbc.connection/->pool` to create a connection pooled datasource, you need to provide `:username` for the database username (instead of, or as well as, `:user`).
|
If you are using HikariCP and `next.jdbc.connection/->pool` to create a connection pooled datasource, you need to provide `:username` for the database username (instead of, or as well as, `:user`).
|
||||||
|
|
@ -30,14 +33,37 @@ Any path that calls `get-connection` will accept the following options:
|
||||||
|
|
||||||
If you need additional options set on a connection, you can either use Java interop to set them directly, or provide them as part of the "db spec" hash map passed to `get-datasource` (although then they will apply to _all_ connections obtained from that datasource).
|
If you need additional options set on a connection, you can either use Java interop to set them directly, or provide them as part of the "db spec" hash map passed to `get-datasource` (although then they will apply to _all_ connections obtained from that datasource).
|
||||||
|
|
||||||
> Note: If `plan`, `execute!`, or `execute-one!` are passed a `DataSource`, a "db spec" hash map, or a JDBC URI string, they will call `get-connection`, so they will accept the above options in those cases.
|
Additional options passed are set as `java.util.Properties` and, by default, are coerced to strings.
|
||||||
|
If you are working with a driver that requires a non-string value for a property (such as the Snowflake driver), you can provide a `:next.jdbc/as-is-properties` option containing a sequence of options that should be added as-is, rather than coerced to strings.
|
||||||
|
|
||||||
|
> Note: If `plan`, `execute!`, or `execute-one!` are passed a `DataSource`, a "db spec" hash map, or a JDBC URL string, they will call `get-connection`, so they will accept the above options in those cases.
|
||||||
|
|
||||||
## Generating SQL
|
## Generating SQL
|
||||||
|
|
||||||
The "friendly" SQL functions all accept the following options (in addition to all the options that `plan`, `execute!`, and `execute-one!` can accept):
|
Except for `query` (which is simply an alias for `execute!`), all the "friendly" SQL functions accept the following options (in addition to all the options that `plan`, `execute!`, and `execute-one!` can accept):
|
||||||
|
|
||||||
* `:table-fn` -- the quoting function to be used on the string that identifies the table name, if provided,
|
* `:table-fn` -- the quoting function to be used on the string that identifies the table name, if provided; this also applies to assumed table names when `nav`igating schemas,
|
||||||
* `:column-fn` -- the quoting function to be used on any string that identifies a column name, if provided.
|
* `:column-fn` -- the quoting function to be used on any string that identifies a column name, if provided; this also applies to the reducing function context over `plan` and to assumed foreign key column names when `nav`igating schemas.
|
||||||
|
|
||||||
|
They also support a `:suffix` argument which can be used to specify a SQL string that should be appended to the generated SQL string before executing it, e.g., `:suffix "FOR UPDATE"` or, for an `insert!` call `:suffix "RETURNING *"`.
|
||||||
|
The latter is particularly useful for databases, such as SQLite these days,
|
||||||
|
which do not support calling `.getGeneratedKeys()` on `PreparedStatement` objects,
|
||||||
|
so you cannot use `:return-generated-keys` to get back the keys -- you must
|
||||||
|
use `RETURNING *`.
|
||||||
|
|
||||||
|
In addition, `find-by-keys` accepts the following options (see its docstring for more details):
|
||||||
|
|
||||||
|
* `:columns` -- specify one or more columns to `SELECT` to override selecting all columns,
|
||||||
|
* `:order-by` -- specify one or more columns, on which to sort the results,
|
||||||
|
* `:top` / `:limit` / `:offset` / `:fetch` to support pagination of results.
|
||||||
|
|
||||||
|
In the simple case, the `:columns` option expects a vector of keywords and each will be processed according to `:column-fn`, if provided. A column alias can be specified using a vector pair of keywords and both will be processed according to `:column-fn`, e.g., `[:foo [:bar :quux]]` would expand to `foo, bar AS quux`. You can also specify the first element of the pair as a string which will be used as-is in the generated SQL, e.g., `[:foo ["COUNT(*)" :total]]` would expand to `foo, COUNT(*) AS total`. In the latter case, the alias keyword will still be processed according to `:column-fn` but the string will be untouched -- you are responsible for any quoting and/or other formatting that might be required to produce a valid SQL expression.
|
||||||
|
|
||||||
|
> Note: `get-by-id` accepts the same options as `find-by-keys` but it will only ever produce one row, as a hash map, so sort order and pagination are less applicable, although `:columns` may be useful.
|
||||||
|
|
||||||
|
As of 1.3.925, `aggregate-by-keys` exists as a wrapper around `find-by-keys`
|
||||||
|
that accepts the same options as `find-by-keys` except that `:columns` may not
|
||||||
|
be specified (since it is used to add the aggregate to the query).
|
||||||
|
|
||||||
## Generating Rows and Result Sets
|
## Generating Rows and Result Sets
|
||||||
|
|
||||||
|
|
@ -46,9 +72,23 @@ Any function that might realize a row or a result set will accept:
|
||||||
* `:builder-fn` -- a function that implements the `RowBuilder` and `ResultSetBuilder` protocols; strictly speaking, `plan` and `execute-one!` only need `RowBuilder` to be implemented (and `plan` only needs that if it actually has to realize a row) but most generation functions will implement both for ease of use.
|
* `:builder-fn` -- a function that implements the `RowBuilder` and `ResultSetBuilder` protocols; strictly speaking, `plan` and `execute-one!` only need `RowBuilder` to be implemented (and `plan` only needs that if it actually has to realize a row) but most generation functions will implement both for ease of use.
|
||||||
* `:label-fn` -- if `:builder-fn` is specified as one of `next.jdbc.result-set`'s `as-modified-*` builders, this option must be present and should specify a string-to-string transformation that will be applied to the column label for each returned column name.
|
* `:label-fn` -- if `:builder-fn` is specified as one of `next.jdbc.result-set`'s `as-modified-*` builders, this option must be present and should specify a string-to-string transformation that will be applied to the column label for each returned column name.
|
||||||
* `:qualifier-fn` -- if `:builder-fn` is specified as one of `next.jdbc.result-set`'s `as-modified-*` builders, this option should specify a string-to-string transformation that will be applied to the table name for each returned column name. It will be called with an empty string if the table name is not available. It can be omitted for the `as-unqualified-modified-*` variants.
|
* `:qualifier-fn` -- if `:builder-fn` is specified as one of `next.jdbc.result-set`'s `as-modified-*` builders, this option should specify a string-to-string transformation that will be applied to the table name for each returned column name. It will be called with an empty string if the table name is not available. It can be omitted for the `as-unqualified-modified-*` variants.
|
||||||
|
* `:column-fn` -- if present, applied to each column name before looking up the column in the `ResultSet` to get that column's value.
|
||||||
|
|
||||||
|
In addition, `execute!` accepts the `:multi-rs true` option to return multiple result sets -- as a vector of result sets.
|
||||||
|
|
||||||
> Note: Subject to the caveats above about `:builder-fn`, that means that `plan`, `execute!`, `execute-one!`, and the "friendly" SQL functions will all accept these options for generating rows and result sets.
|
> Note: Subject to the caveats above about `:builder-fn`, that means that `plan`, `execute!`, `execute-one!`, and the "friendly" SQL functions will all accept these options for generating rows and result sets.
|
||||||
|
|
||||||
|
## Datafying & Navigating Rows and Result Sets
|
||||||
|
|
||||||
|
Any function that produces a result set will accept the following options
|
||||||
|
that modify the behavior of `datafy` and `nav` applied to the rows in that
|
||||||
|
result set:
|
||||||
|
|
||||||
|
* `:schema` -- override the conventions for identifying foreign keys and the related (primary) keys in the tables to which they refer, on a per table/column basis; can also be used to indicate a fk relationship is one-to-many or many-to-many rather than one-to-one or one-to-many,
|
||||||
|
* `:schema-opts` -- override the default conventions for identifying foreign keys and the related (primary) keys in the tables to which they refer, as a whole.
|
||||||
|
|
||||||
|
See [`datafy`, `nav`, and `:schema`](/doc/datafy-nav-and-schema.md) for more details.
|
||||||
|
|
||||||
## Statements & Prepared Statements
|
## Statements & Prepared Statements
|
||||||
|
|
||||||
Any function that creates a `Statement` or a `PreparedStatement` will accept the following options (see below for additional options for `PreparedStatement`):
|
Any function that creates a `Statement` or a `PreparedStatement` will accept the following options (see below for additional options for `PreparedStatement`):
|
||||||
|
|
@ -70,20 +110,26 @@ Any function that creates a `PreparedStatement` will additionally accept the fol
|
||||||
* `:return-keys` -- a truthy value asks that the JDBC driver to return any generated keys created by the operation; it can be `true` or it can be a vector of keywords identifying column names that should be returned.
|
* `:return-keys` -- a truthy value asks that the JDBC driver to return any generated keys created by the operation; it can be `true` or it can be a vector of keywords identifying column names that should be returned.
|
||||||
|
|
||||||
Not all databases or drivers support all of these options, or all values for any given option. If `:return-keys` is a vector of column names and that is not supported, `next.jdbc` will attempt a generic "return generated keys" option instead. If that is not supported, `next.jdbc` will fall back to a regular SQL operation. If other options are not supported, you may get a `SQLException`.
|
Not all databases or drivers support all of these options, or all values for any given option. If `:return-keys` is a vector of column names and that is not supported, `next.jdbc` will attempt a generic "return generated keys" option instead. If that is not supported, `next.jdbc` will fall back to a regular SQL operation. If other options are not supported, you may get a `SQLException`.
|
||||||
|
You may need to use `RETURNING *` on `INSERT` statements instead of using `:return-keys` with some database drivers.
|
||||||
|
|
||||||
> Note: If `plan`, `execute!`, or `execute-one!` are passed a `DataSource`, a "db spec" hash map, or a JDBC URI string, they will call `prepare` to create a `PreparedStatement`, so they will accept the above options in those cases.
|
> Note: If `plan`, `execute!`, or `execute-one!` are passed a `DataSource`, a "db spec" hash map, or a JDBC URL string, they will call `prepare` to create a `PreparedStatement`, so they will accept the above options in those cases.
|
||||||
|
|
||||||
In addition the the above, `next.jdbc.prepare/execute-batch!` (which does **not** create a `PreparedStatement`) accepts an options hash map that can also contain the following:
|
In addition to the above, `next.jdbc/execute-batch!` (which may create a `PreparedStatement` if you pass in a SQL string and either a `Connection` or `DataSource`) accepts an options hash map that can also contain the following:
|
||||||
|
|
||||||
* `:batch-size` -- an integer that determines how to partition the parameter groups for submitting to the database in batches,
|
* `:batch-size` -- an integer that determines how to partition the parameter groups for submitting to the database in batches,
|
||||||
* `:large` -- a Boolean flag that indicates whether the batch will produce large update counts (`long` rather than `int` values).
|
* `:large` -- a Boolean flag that indicates whether the batch will produce large update counts (`long` rather than `int` values),
|
||||||
|
* `:return-generated-keys` -- a Boolean flag that indicates whether `.getGeneratedKeys` should be called on the `PreparedStatement` after each batch is executed (if `true`, `execute-batch!` will return a vector of hash maps containing generated keys). Some databases do not support this and you need to use `RETURNING *` on `INSERT` statements instead.
|
||||||
|
|
||||||
## Transactions
|
## Transactions
|
||||||
|
|
||||||
The `transact` function and `with-transaction` macro accept the following options:
|
The `transact` function and `with-transaction` (`+options`) macro accept the following options:
|
||||||
|
|
||||||
* `:isolation` -- a keyword that identifies the isolation to be used for this transaction: `:none`, `:read-committed`, `:read-uncommitted`, `:repeatedable-read`, or `:serializable`; these represent increasingly strict levels of transaction isolation and may not all be available depending on the database and/or JDBC driver being used,
|
* `:isolation` -- a keyword that identifies the isolation to be used for this transaction: `:none`, `:read-committed`, `:read-uncommitted`, `:repeatable-read`, or `:serializable`; these represent increasingly strict levels of transaction isolation and may not all be available depending on the database and/or JDBC driver being used,
|
||||||
* `:read-only` -- a `Boolean` that indicates whether the transaction should be read-only or not (the default),
|
* `:read-only` -- a `Boolean` that indicates whether the transaction should be read-only or not (the default),
|
||||||
* `:rollback-only` -- a `Boolean` that indicates whether the transaction should commit on success (the default) or rollback.
|
* `:rollback-only` -- a `Boolean` that indicates whether the transaction should commit on success (the default) or rollback.
|
||||||
|
|
||||||
[<: Transactions](/doc/transactions.md) | [`datafy`, `nav`, and `:schema` :>](/doc/datafy-nav-and-schema.md)
|
## Plan Selection
|
||||||
|
|
||||||
|
The `next.jdbc.plan/select!` function accepts the following specific option:
|
||||||
|
|
||||||
|
* `:into` -- a data structure into which the selected result from a `plan` operation are poured; by default this is `[]`; could be any value that is acceptable as the first argument to `into`, subject to `into` accepting the sequence of values produced by the `plan` reduction.
|
||||||
|
|
|
||||||
|
|
@ -1,27 +1,54 @@
|
||||||
# `datafy`, `nav`, and the `:schema` option
|
# `datafy`, `nav`, and the `:schema` option
|
||||||
|
|
||||||
Clojure 1.10 introduced a new namespace, [`clojure.datafy`](http://clojure.github.io/clojure/clojure.datafy-api.html), and two new protocols (`Datafiable` and `Navigable`) that allow for generalized, lazy navigation around data structures. Cognitect also released [REBL](http://rebl.cognitect.com/) -- a graphical, interactive tool for browsing Clojure data structures, based on the new `datafy` and `nav` functions.
|
Clojure 1.10 introduced a new namespace, [`clojure.datafy`](http://clojure.github.io/clojure/clojure.datafy-api.html), and two new protocols (`Datafiable` and `Navigable`) that allow for generalized, lazy navigation around data structures. Cognitect also released REBL (now Nubank's [Morse](https://github.com/nubank/morse)) -- a graphical, interactive tool for browsing Clojure data structures, based on the new `datafy` and `nav` functions.
|
||||||
|
|
||||||
Shortly after REBL's release, I added experimental support to `clojure.java.jdbc` for `datafy` and `nav` that supported lazy navigation through result sets into foreign key relationships and connected rows and tables. `next.jdbc` bakes that support into result sets produced by `execute!` and `execute-one!`.
|
Shortly after REBL's release, I added experimental support to `clojure.java.jdbc` for `datafy` and `nav` that supported lazy navigation through result sets into foreign key relationships and connected rows and tables. `next.jdbc` bakes that support into result sets produced by `execute!` and `execute-one!`.
|
||||||
|
|
||||||
## The `datafy`/`nav` Lifecycle
|
In addition to `datafy` and `nav` support in the result sets, as of version 1.0.462, there is a `next.jdbc.datafy` namespace that can be required to extend these protocols to a number of JDBC object types. See **JDBC Datafication** near the end of this page for more detail of this.
|
||||||
|
|
||||||
Here's how the process works:
|
Additional tools that understand `datafy` and `nav` include [Portal](https://github.com/djblue/portal) and [Reveal](https://github.com/vlaaad/reveal).
|
||||||
|
|
||||||
|
## The `datafy`/`nav` Lifecycle on Result Sets
|
||||||
|
|
||||||
|
Here's how the process works, for result sets produced by `next.jdbc`:
|
||||||
|
|
||||||
* `execute!` and `execute-one!` produce result sets containing rows that are `Datafiable`,
|
* `execute!` and `execute-one!` produce result sets containing rows that are `Datafiable`,
|
||||||
* Tools like REBL can call `datafy` on result sets to render them as "pure data" (which they already are, but this makes them also `Navigable`),
|
* Tools like Portal, Reveal, and Morse can call `datafy` on result sets to render them as "pure data" (which they already are, but this makes them also `Navigable`),
|
||||||
* Tools like REBL allow users to "drill down" into elements of rows in the "pure data" result set, using `nav`,
|
* Tools like Portal, Reveal, and Morse allow users to "drill down" into elements of rows in the "pure data" result set, using `nav`,
|
||||||
* If a column in a row represents a foreign key into another table, calling `nav` will fetch the related row(s),
|
* If a column in a row represents a foreign key into another table, calling `nav` will fetch the related row(s),
|
||||||
* Those can in turn be `datafy`'d and `nav`'d to continue drilling down through connected data in the database.
|
* Those can in turn be `datafy`'d and `nav`'d to continue drilling down through connected data in the database.
|
||||||
|
|
||||||
In addition to `execute!` and `execute-one!`, you can call `next.jdbc.result-set/datafiable-result-set` on any `ResultSet` object to produce a result set whose rows are `Datafiable`. Inside a reduction over the result of `plan`, you can call `next.jdbc.result-set/datafiable-row` on a row to produce a `Datafiable` row. That will realize the entire row, including generating column names using the row builder specified (or `as-maps` by default).
|
In addition to `execute!` and `execute-one!`, you can call `next.jdbc.result-set/datafiable-result-set` on any `ResultSet` object to produce a result set whose rows are `Datafiable`. Inside a reduction over the result of `plan`, you can call `next.jdbc.result-set/datafiable-row` on a row to produce a `Datafiable` row. That will realize the entire row, including generating column names using the row builder specified (or `as-maps` by default).
|
||||||
|
|
||||||
## Identifying Foreign Keys
|
### Identifying Foreign Keys
|
||||||
|
|
||||||
By default, `next.jdbc` assumes that a column named `<something>id` or `<something>_id` is a foreign key into a table called `<something>` with a primary key called `id`. As an example, if you have a table `address` which has columns `id` (the primary key), `name`, `email`, etc, and a table `contact` which has various columns including `addressid`, then if you retrieve a result set based on `contact`, call `datafy` on it and then "drill down" into the columns, when `(nav row :contact/addressid v)` is called (where `v` is the value of that column in that row) `next.jdbc`'s implementation of `nav` will fetch a single row from the `address` table, identified by `id` matching `v`.
|
By default, `next.jdbc` assumes that a column named `<something>id` or `<something>_id` is a foreign key into a table called `<something>` with a primary key called `id`. As an example, if you have a table `address` which has columns `id` (the primary key), `name`, `email`, etc, and a table `contact` which has various columns including `addressid`, then if you retrieve a result set based on `contact`, call `datafy` on it and then "drill down" into the columns, when `(nav row :contact/addressid v)` is called (where `v` is the value of that column in that row) `next.jdbc`'s implementation of `nav` will fetch a single row from the `address` table, identified by `id` matching `v`.
|
||||||
|
|
||||||
You can override this default behavior for any column in any table by providing a `:schema` option that is a hash map whose keys are column names (usually the table-qualified keywords that `next.jdbc` produces by default) and whose values are table-qualified keywords, optionally wrapped in vectors, that identity the name of the table to which that column is a foreign key and the name of the key column within that table.
|
You can override this default behavior for any column in any table by providing a `:schema` option that is a hash map whose keys are column names (usually the table-qualified keywords that `next.jdbc` produces by default) and whose values are table-qualified keywords, optionally wrapped in vectors, that identity the name of the table to which that column is a foreign key and the name of the key column within that table.
|
||||||
|
|
||||||
|
As of 1.3.909, you can also override this behavior via the `:schema-opts`
|
||||||
|
option. This is a hash map whose keys can be:
|
||||||
|
* `:fk-suffix` -- a string used instead of `"id"` to identify foreign keys,
|
||||||
|
* `:pk` -- a string used instead of `"id"` for the primary key column in the target table,
|
||||||
|
* `:pk-fn` -- a function that takes the table name and the value of `:pk` and returns the name of the primary key column in the target table, instead of just using the value of `:pk` (the default is effectively `(constantly <pk>)`).
|
||||||
|
|
||||||
|
For `:fk-suffix`, the `_` is still permitted and optional in the column name,
|
||||||
|
so if you specified `:schema-opts {:fk-suffix "fk"}` then `addressfk` and
|
||||||
|
`address_fk` would both be treated as foreign keys into the `address` table.
|
||||||
|
|
||||||
|
_Note: as of 1.3.939, `-` is permitted in key names (in addition to `_`) so that kebab result set builders work as expected._
|
||||||
|
|
||||||
|
The `:pk-fn` can use the table name to determine the primary key column name
|
||||||
|
for exceptions to the `:pk` value. For example, if you have a table `address`
|
||||||
|
with a primary key column `address_id` instead of `id`, you could use:
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
:pk-fn (fn [table pk]
|
||||||
|
(if (= "address" table)
|
||||||
|
"address_id"
|
||||||
|
pk))
|
||||||
|
```
|
||||||
|
|
||||||
The default behavior in the example above is equivalent to this `:schema` value:
|
The default behavior in the example above is equivalent to this `:schema` value:
|
||||||
|
|
||||||
```clojure
|
```clojure
|
||||||
|
|
@ -31,6 +58,16 @@ The default behavior in the example above is equivalent to this `:schema` value:
|
||||||
{:schema {:contact/addressid :address/id}})
|
{:schema {:contact/addressid :address/id}})
|
||||||
```
|
```
|
||||||
|
|
||||||
|
or these `:schema-opts` values:
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
(jdbc/execute! ds
|
||||||
|
["select * from contact where city = ?" "San Francisco"]
|
||||||
|
;; a one-to-one or many-to-one relationship
|
||||||
|
{:schema-opts {:fk-suffix "id" :pk "id"
|
||||||
|
:pk-fn (constantly "id")}})
|
||||||
|
```
|
||||||
|
|
||||||
If you had a table to track the valid/bouncing status of email addresses over time, `:deliverability`, where `email` is the non-unique key, you could provide automatic navigation into that using:
|
If you had a table to track the valid/bouncing status of email addresses over time, `:deliverability`, where `email` is the non-unique key, you could provide automatic navigation into that using:
|
||||||
|
|
||||||
```clojure
|
```clojure
|
||||||
|
|
@ -41,20 +78,69 @@ If you had a table to track the valid/bouncing status of email addresses over ti
|
||||||
:address/email [:deliverability/email]}})
|
:address/email [:deliverability/email]}})
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Since this relies on a foreign key that does not follow a standard suffix
|
||||||
|
pattern, there is no comparable `:schema-opts` version. In addition, the
|
||||||
|
`:schema-opts` approach cannot designate a one-to-many or many-to-many
|
||||||
|
relationship.
|
||||||
|
|
||||||
When you indicate a `*-to-many` relationship, by wrapping the foreign table/key in a vector, `next.jdbc`'s implementation of `nav` will fetch a multi-row result set from the target table.
|
When you indicate a `*-to-many` relationship, by wrapping the foreign table/key in a vector, `next.jdbc`'s implementation of `nav` will fetch a multi-row result set from the target table.
|
||||||
|
|
||||||
If you use foreign key constraints in your database, you could probably generate this `:schema` data structure automatically from the metadata in your database. Similarly, if you use a library that depends on an entity relationship map (such as [seql](https://exoscale.github.io/seql/) or [walkable](https://walkable.gitlab.io/)), then you could probably generate this `:schema` data structure from that entity map.
|
If you use foreign key constraints in your database, you could probably generate this `:schema` data structure automatically from the metadata in your database. Similarly, if you use a library that depends on an entity relationship map (such as [seql](https://github.com/exoscale/seql) or [walkable](https://walkable.gitlab.io/)), then you could probably generate this `:schema` data structure from that entity map.
|
||||||
|
|
||||||
## Behind The Scenes
|
### Behind The Scenes
|
||||||
|
|
||||||
Making rows datafiable is implemented by adding metadata to each row with a key of `clojure.core.protocols/datafy` and a function as the value. That function closes over the connectable and options passed in to the `execute!` or `execute-one!` call that produced the result set containing those rows.
|
Making rows datafiable is implemented by adding metadata to each row with a key of `clojure.core.protocols/datafy` and a function as the value. That function closes over the connectable and options passed in to the `execute!` or `execute-one!` call that produced the result set containing those rows.
|
||||||
|
|
||||||
When called (`datafy` on a row), it adds metadata to the row with a key of `clojure.core.protocols/nav` and another function as the value. That function also closes over the connectable and options passed in.
|
When called (`datafy` on a row), it adds metadata to the row with a key of `clojure.core.protocols/nav` and another function as the value. That function also closes over the connectable and options passed in.
|
||||||
|
|
||||||
When that is called (`nav` on a row, column name, and column value), if a `:schema` entry exists for that column or it matches the default convention described above, then it will fetch row(s) using `next.jdbc`'s `Executable` functions `-execute-one` or `-execute-all`, passing in the connectable and options closed over.
|
When that is called (`nav` on a row, column name, and column value), if a
|
||||||
|
`:schema` entry exists for that column or it matches the convention described
|
||||||
|
above (either by default or via `:schema-opts`), then it will fetch row(s)
|
||||||
|
using `next.jdbc`'s `Executable` functions `-execute-one` or `-execute-all`,
|
||||||
|
passing in the connectable and options closed over.
|
||||||
|
|
||||||
The protocol `next.jdbc.result-set/DatafiableRow` has a default implementation of `datafiable-row` for `clojure.lang.IObj` that just adds the metadata to support `datafy`. There is also an implementation baked into the result set handling behind `plan` so that you can call `datafiable-row` directly during reduction and get a fully-realized row that can be `datafy`'d (and then `nav`igated).
|
The protocol `next.jdbc.result-set/DatafiableRow` has a default implementation of `datafiable-row` for `clojure.lang.IObj` that just adds the metadata to support `datafy`. There is also an implementation baked into the result set handling behind `plan` so that you can call `datafiable-row` directly during reduction and get a fully-realized row that can be `datafy`'d (and then `nav`igated).
|
||||||
|
|
||||||
In addition, you can call `next.jdbc.result-set/datafiable-result-set` on any `ResultSet` object and get a fully realized, datafiable result set created using any of the result set builders.
|
In addition, you can call `next.jdbc.result-set/datafiable-result-set` on any `ResultSet` object and get a fully realized, datafiable result set created using any of the result set builders.
|
||||||
|
|
||||||
[<: All The Options](/doc/all-the-options.md) | [Migration from `clojure.java.jdbc` :>](/doc/migration-from-clojure-java-jdbc.md)
|
## JDBC Datafication
|
||||||
|
|
||||||
|
If you require `next.jdbc.datafy`, the `Datafiable` protocol is extended to several JDBC object types, so that calling `datafy` will turn them into hash maps according to Java Bean introspection, similar to `clojure.core/bean` although `next.jdbc` uses `clojure.java.data/from-java-shallow` (from [`org.clojure/java.data`](https://github.com/clojure/java.data)), with some additions as described below.
|
||||||
|
|
||||||
|
* `java.sql.Connection` -- datafies as a bean; The `:metaData` property is a `java.sql.DatabaseMetaData`, which is also datafiable.
|
||||||
|
* `DatabaseMetaData` -- datafies as a bean, with an additional `:all-tables` property (that is a dummy object); six properties are navigable to produce fully-realized datafiable result sets:
|
||||||
|
* `all-tables` -- produced from `(.getTables this nil nil nil nil)`, this is all the tables and views available from the connection that produced the database metadata,
|
||||||
|
* `catalogs` -- produced from `(.getCatalogs this)`
|
||||||
|
* `clientInfoProperties` -- all the client properties that the database driver supports,
|
||||||
|
* `schemas` -- produced from `(.getSchemas this)`,
|
||||||
|
* `tableTypes` -- produced from `(.getTableTypes this)`,
|
||||||
|
* `typeInfo` -- produced from `(.getTypeInfo this)`.
|
||||||
|
* `ParameterMetaData` -- datafies as a vector of parameter descriptions; each parameter hash map has: `:class` (the name of the parameter class -- JVM), `:mode` (one of `:in`, `:in-out`, or `:out`), `:nullability` (one of: `:null`, `:not-null`, or `:unknown`), `:precision`, `:scale`, `:type` (the name of the parameter type -- SQL), and `:signed` (Boolean).
|
||||||
|
* `ResultSet` -- datafies as a bean; if the `ResultSet` has an associated `Statement` and that in turn has an associated `Connection` then an additional key of `:rows` is provided which is a datafied result set, from `next.jdbc.result-set/datafiable-result-set` with default options. This is provided as a convenience, purely for datafication of other JDBC data types -- in normal `next.jdbc` usage, result sets are datafied under full user control.
|
||||||
|
* `ResultSetMetaData` -- datafies as a vector of column descriptions; each column hash map has: `:auto-increment`, `:case-sensitive`, `:catalog`, `:class` (the name of the column class -- JVM), `:currency` (Boolean), `:definitely-writable`, `:display-size`, `:label`, `:name`, `:nullability`, `:precision`, `:read-only`, `:searchable`, `:signed`, `:scale`, `:schema`, `:table`, `:type`, and `:writable`.
|
||||||
|
* `Statement` -- datafies as a bean.
|
||||||
|
|
||||||
|
See the Java documentation for these JDBC types for further details on what all the properties from each of these classes mean and which are `int`, `String`, or some other JDBC object type.
|
||||||
|
|
||||||
|
In addition, requiring this namespace will affect how `next.jdbc.result-set/metadata` behaves inside the reducing function applied to the result of `plan`. Without this namespace loaded, that function will return a raw `ResultSetMetaData` object (which must not leak outside the reducing function). With this namespace loaded, that function will, instead, return a Clojure data structure describing the columns in the result set.
|
||||||
|
|
||||||
|
### SQLite
|
||||||
|
|
||||||
|
For some strange reason, SQLite has implemented their `ResultSetMetaData` as also
|
||||||
|
being a `ResultSet` which leads to ambiguity when datafying some things when
|
||||||
|
using SQLite. `next.jdbc` currently assumes that if it is asked to `datafy` a
|
||||||
|
`ResultSet` and that object is _also_ `ResultSetMetaData`, it will treat it
|
||||||
|
purely as `ResultSetMetaData`, which produces a vector of column metadata as
|
||||||
|
described above. However, there are some results in SQLite's JDBC driver that
|
||||||
|
look like `ResultSetMetaData` but should be treated as plain `ResultSet`
|
||||||
|
objects (which is what other databases' JDBC drivers return).
|
||||||
|
|
||||||
|
An example of this is what happens when you try to `datafy` the result of
|
||||||
|
calling `DatabaseMetaData.getTables()`: the JDBC documentation says you get
|
||||||
|
back a `ResultSet` but in SQLite, that is also an instance of `ResultSetMetaData`
|
||||||
|
and so `next.jdbc.datafy` treats it that way instead of as a plain `ResultSet`.
|
||||||
|
You can call `next.jdbc.result-set/datafiable-result-set` directly in this
|
||||||
|
case to get the rows as a hash map (although you won't get the underlying
|
||||||
|
metadata as a bean).
|
||||||
|
|
||||||
|
See issue [#212](https://github.com/seancorfield/next-jdbc/issues/212) for more details.
|
||||||
|
|
|
||||||
|
|
@ -18,13 +18,19 @@ as well as these more specific "read" operations:
|
||||||
|
|
||||||
These functions are described in more detail below. They are deliberately simple and intended to cover only the most common, basic SQL operations. The primary API (`plan`, `execute!`, `execute-one!`) is the recommended approach for everything beyond that. If you need more expressiveness, consider one of the following libraries to build SQL/parameter vectors, or run queries:
|
These functions are described in more detail below. They are deliberately simple and intended to cover only the most common, basic SQL operations. The primary API (`plan`, `execute!`, `execute-one!`) is the recommended approach for everything beyond that. If you need more expressiveness, consider one of the following libraries to build SQL/parameter vectors, or run queries:
|
||||||
|
|
||||||
* [HoneySQL](https://github.com/jkk/honeysql) -- a composable DSL for creating SQL/parameter vectors from Clojure data structures
|
* [HoneySQL](https://github.com/seancorfield/honeysql) -- a composable DSL for creating SQL/parameter vectors from Clojure data structures
|
||||||
* [seql](https://github.com/exoscale/seql) -- a simplified EQL-inspired query language, built on `next.jdbc` (as of release 0.1.6)
|
* [seql](https://github.com/exoscale/seql) -- a simplified EQL-inspired query language, built on `next.jdbc` (as of release 0.1.6)
|
||||||
* [SQLingvo](https://github.com/r0man/sqlingvo) -- a composable DSL for creating SQL/parameter vectors
|
* [SQLingvo](https://github.com/r0man/sqlingvo) -- a composable DSL for creating SQL/parameter vectors
|
||||||
* [Walkable](https://github.com/walkable-server/walkable) -- full EQL query language support for creating SQL/parameter vectors
|
* [Walkable](https://github.com/walkable-server/walkable) -- full EQL query language support for creating SQL/parameter vectors
|
||||||
|
|
||||||
If you prefer to write your SQL separately from your code, take a look at [HugSQL](https://github.com/layerware/hugsql) -- [HugSQL documentation](https://www.hugsql.org/) -- which has a `next.jdbc` adapter, as of version 0.5.1. See below for a "[quick start](#hugsql-quick-start)" for using HugSQL with `next.jdbc`.
|
If you prefer to write your SQL separately from your code, take a look at [HugSQL](https://github.com/layerware/hugsql) -- [HugSQL documentation](https://www.hugsql.org/) -- which has a `next.jdbc` adapter, as of version 0.5.1. See below for a "[quick start](#hugsql-quick-start)" for using HugSQL with `next.jdbc`.
|
||||||
|
|
||||||
|
As of 1.3.925, `aggregate-by-keys` exists as a wrapper around `find-by-keys`
|
||||||
|
that accepts the same options as `find-by-keys` and an aggregate SQL expression
|
||||||
|
and it returns a single value (the aggregate). `aggregate-by-keys` accepts the
|
||||||
|
same options as `find-by-keys` except that `:columns` may not be specified
|
||||||
|
(since it is used to add the aggregate to the query).
|
||||||
|
|
||||||
## `insert!`
|
## `insert!`
|
||||||
|
|
||||||
Given a table name (as a keyword) and a hash map of column names and values, this performs a single row insertion into the database:
|
Given a table name (as a keyword) and a hash map of column names and values, this performs a single row insertion into the database:
|
||||||
|
|
@ -34,11 +40,22 @@ Given a table name (as a keyword) and a hash map of column names and values, thi
|
||||||
;; equivalent to
|
;; equivalent to
|
||||||
(jdbc/execute-one! ds ["INSERT INTO address (name,email) VALUES (?,?)"
|
(jdbc/execute-one! ds ["INSERT INTO address (name,email) VALUES (?,?)"
|
||||||
"A.Person" "albert@person.org"] {:return-keys true})
|
"A.Person" "albert@person.org"] {:return-keys true})
|
||||||
|
;; some databases may require this instead
|
||||||
|
(jdbc/execute-one! ds ["INSERT INTO address (name,email) VALUES (?,?) RETURNING *"
|
||||||
|
"A.Person" "albert@person.org"])
|
||||||
|
;; which you can achieve with the :suffix option
|
||||||
|
(sql/insert! ds :address {:name "A. Person" :email "albert@person.org"}
|
||||||
|
{:suffix "RETURNING *"})
|
||||||
```
|
```
|
||||||
|
|
||||||
|
If you have multiple rows (hash maps) to insert and they all have the same
|
||||||
|
set of keys, you can use `insert-multi!` instead (see below), which will
|
||||||
|
perform a single multi-row insertion, which will generally be faster.
|
||||||
|
|
||||||
## `insert-multi!`
|
## `insert-multi!`
|
||||||
|
|
||||||
Given a table name (as a keyword), a vector of column names, and a vector of row value vectors, this performs a multi-row insertion into the database:
|
Given a table name (as a keyword), a vector of column names, and a vector of
|
||||||
|
row value vectors, this performs a single multi-row insertion into the database:
|
||||||
|
|
||||||
```clojure
|
```clojure
|
||||||
(sql/insert-multi! ds :address
|
(sql/insert-multi! ds :address
|
||||||
|
|
@ -53,14 +70,75 @@ Given a table name (as a keyword), a vector of column names, and a vector of row
|
||||||
"Aunt Sally" "sour@lagunitas.beer"] {:return-keys true})
|
"Aunt Sally" "sour@lagunitas.beer"] {:return-keys true})
|
||||||
```
|
```
|
||||||
|
|
||||||
> Note: this expands to a single SQL statement with placeholders for every
|
All the row vectors must be the same length, and must match the number of
|
||||||
|
columns specified.
|
||||||
|
|
||||||
|
Given a table name (as a keyword) and a vector of hash maps, this performs a
|
||||||
|
single multi-row insertion into the database:
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
(sql/insert-multi! ds :address
|
||||||
|
[{:name "Stella", :email "stella@artois.beer"}
|
||||||
|
{:name "Waldo", :email "waldo@lagunitas.beer"}
|
||||||
|
{:name "Aunt Sally", :email "sour@lagunitas.beer"}])
|
||||||
|
;; equivalent to
|
||||||
|
(jdbc/execute! ds ["INSERT INTO address (name,email) VALUES (?,?), (?,?), (?,?)"
|
||||||
|
"Stella" "stella@artois.beer"
|
||||||
|
"Waldo" "waldo@lagunitas.beer"
|
||||||
|
"Aunt Sally" "sour@lagunitas.beer"] {:return-keys true})
|
||||||
|
```
|
||||||
|
|
||||||
|
All the hash maps must have the same set of keys, so that the vector of hash
|
||||||
|
maps can be converted to a vector of columns names and a vector of row value
|
||||||
|
vectors, as above, so a single multi-row insertion can be performed.
|
||||||
|
|
||||||
|
If you wish to insert multiple hash maps that do not have identical keys, you
|
||||||
|
need to iterate over `insert!` and insert one row at a time, which will
|
||||||
|
generally be much slower.
|
||||||
|
|
||||||
|
> Note: both of these expand to a single SQL statement with placeholders for every
|
||||||
value being inserted -- for large sets of rows, this may exceed the limits
|
value being inserted -- for large sets of rows, this may exceed the limits
|
||||||
on SQL string size and/or number of parameters for your JDBC driver or your
|
on SQL string size and/or number of parameters for your JDBC driver or your
|
||||||
database. Several databases have a limit of 1,000 parameter placeholders.
|
database. Several databases have a limit of 1,000 parameter placeholders.
|
||||||
Oracle does not support this form of multi-row insert, requiring a different
|
Oracle does not support this form of multi-row insert, requiring a different
|
||||||
syntax altogether.
|
syntax altogether.
|
||||||
|
|
||||||
You should look at [`next.jdbc.prepare/execute-batch!`](https://cljdoc.org/d/seancorfield/next.jdbc/CURRENT/api/next.jdbc.prepare#execute-batch!) for an alternative approach.
|
### Batch Insertion
|
||||||
|
|
||||||
|
As of release 1.2.790, you can specify `:batch true` in the options, which
|
||||||
|
will use `execute-batch!` under the hood, instead of `execute!`, as follows:
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
(sql/insert-multi! ds :address
|
||||||
|
[:name :email]
|
||||||
|
[["Stella" "stella@artois.beer"]
|
||||||
|
["Waldo" "waldo@lagunitas.beer"]
|
||||||
|
["Aunt Sally" "sour@lagunitas.beer"]]
|
||||||
|
{:batch true})
|
||||||
|
;; equivalent to
|
||||||
|
(jdbc/execute-batch! ds
|
||||||
|
"INSERT INTO address (name,email) VALUES (?,?)"
|
||||||
|
[["Stella" "stella@artois.beer"]
|
||||||
|
["Waldo" "waldo@lagunitas.beer"]
|
||||||
|
["Aunt Sally" "sour@lagunitas.beer"]]
|
||||||
|
{:return-keys true :return-generated-keys true})
|
||||||
|
;; and
|
||||||
|
(sql/insert-multi! ds :address
|
||||||
|
[:name :email]
|
||||||
|
[{:name "Stella", :email "stella@artois.beer"}
|
||||||
|
{:name "Waldo", :email "waldo@lagunitas.beer"}
|
||||||
|
{:name "Aunt Sally", :email "sour@lagunitas.beer"}]
|
||||||
|
{:batch true})
|
||||||
|
;; equivalent to
|
||||||
|
(jdbc/execute-batch! ds
|
||||||
|
"INSERT INTO address (name,email) VALUES (?,?)"
|
||||||
|
[["Stella" "stella@artois.beer"]
|
||||||
|
["Waldo" "waldo@lagunitas.beer"]
|
||||||
|
["Aunt Sally" "sour@lagunitas.beer"]]
|
||||||
|
{:return-keys true :return-generated-keys true})
|
||||||
|
```
|
||||||
|
|
||||||
|
> Note: not all databases or drivers support returning generated keys like this -- see [**Batched Parameters**](https://cljdoc.org/d/com.github.seancorfield/next.jdbc/CURRENT/doc/getting-started/prepared-statements#caveats) for caveats and possible database-specific behaviors. You may need `RETURNING *` in your SQL instead.
|
||||||
|
|
||||||
## `query`
|
## `query`
|
||||||
|
|
||||||
|
|
@ -113,6 +191,45 @@ Given a table name (as a keyword) and either a hash map of column names and valu
|
||||||
"Stella" "stella@artois.beer"])
|
"Stella" "stella@artois.beer"])
|
||||||
```
|
```
|
||||||
|
|
||||||
|
While the hash map approach -- "query by example" -- is great for equality
|
||||||
|
comparisons, sometimes you need other types of comparisons. For example, you
|
||||||
|
might want to find all the rows where the email address ends in `.beer`:
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
(sql/find-by-keys ds :address ["email LIKE ?" "%.beer"])
|
||||||
|
;; equivalent to
|
||||||
|
(jdbc/execute! ds ["SELECT * FROM address WHERE email LIKE ?" "%.beer"])
|
||||||
|
```
|
||||||
|
|
||||||
|
Or you may want to find all the rows where the name is one of a specific
|
||||||
|
set of values:
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
(sql/find-by-keys ds :address ["name IN (?,?)" "Stella" "Waldo"])
|
||||||
|
;; equivalent to
|
||||||
|
(jdbc/execute! ds ["SELECT * FROM address WHERE name IN (?,?)" "Stella" "Waldo"])
|
||||||
|
```
|
||||||
|
|
||||||
|
The default behavior is to return all the columns in each row. You can specify a subset of columns to return using the `:columns` option. It takes a vector and each element of the vector can be:
|
||||||
|
|
||||||
|
* a simple keyword representing the column name (`:column-fn` will be applied, if provided),
|
||||||
|
* a pair of keywords representing the column name and an alias (`:column-fn` will be applied to both, if provided),
|
||||||
|
* a pair consisting of a string and a keyword, representing a SQL expression and an alias (`:column-fn` will be applied to the alias, if provided).
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
(sql/find-by-keys ds :address {:name "Stella"} {:columns [[:email :address]]})
|
||||||
|
;; equivalent to
|
||||||
|
(jdbc/execute! ds ["SELECT email AS address FROM address WHERE name = ?"
|
||||||
|
"Stella"])
|
||||||
|
|
||||||
|
(sql/find-by-keys ds :address {:name "Stella"} {:columns [["count(*)" :n]]})
|
||||||
|
;; equivalent to
|
||||||
|
(jdbc/execute! ds ["SELECT count(*) AS n FROM address WHERE name = ?"
|
||||||
|
"Stella"])
|
||||||
|
```
|
||||||
|
|
||||||
|
> Note: the SQL string provided for a column is copied exactly as-is into the generated SQL -- you are responsible for ensuring it is legal SQL!
|
||||||
|
|
||||||
`find-by-keys` supports an `:order-by` option which can specify a vector of column names to sort the results by. Elements may be column names or pairs of a column name and the direction to sort: `:asc` or `:desc`:
|
`find-by-keys` supports an `:order-by` option which can specify a vector of column names to sort the results by. Elements may be column names or pairs of a column name and the direction to sort: `:asc` or `:desc`:
|
||||||
|
|
||||||
```clojure
|
```clojure
|
||||||
|
|
@ -124,8 +241,38 @@ Given a table name (as a keyword) and either a hash map of column names and valu
|
||||||
"Stella" "stella@artois.beer"])
|
"Stella" "stella@artois.beer"])
|
||||||
```
|
```
|
||||||
|
|
||||||
|
`find-by-keys` also supports basic pagination with `:offset` and `:fetch` options which both accept numeric values and adds `OFFSET ? ROWS FETCH NEXT ? ROWS ONLY` to the generated query. To support MySQL and SQLite, you can specify `:limit` instead `:fetch` which adds `LIMIT ? OFFSET ?` to the generated query instead.
|
||||||
|
|
||||||
|
If you want to match all rows in a table -- perhaps with the pagination options in effect -- you can pass the keyword `:all` instead of either a hash map of column names and values or a vector containing a partial `WHERE` clause and parameters.
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
(sql/find-by-keys ds :address :all {:order-by [:id] :offset 5 :fetch 10})
|
||||||
|
;; equivalent to
|
||||||
|
(jdbc/execute! ds ["SELECT * FROM address ORDER BY id OFFSET ? ROWS FETCH NEXT ? ROWS ONLY" 5 10])
|
||||||
|
```
|
||||||
|
|
||||||
If no rows match, `find-by-keys` returns `[]`, just like `execute!`.
|
If no rows match, `find-by-keys` returns `[]`, just like `execute!`.
|
||||||
|
|
||||||
|
## `aggregate-by-keys`
|
||||||
|
|
||||||
|
Added in 1.3.925, this is a wrapper around `find-by-keys` that makes it easier
|
||||||
|
to perform aggregate queries::
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
(sql/aggregate-by-keys ds :address "count(*)" {:name "Stella"
|
||||||
|
:email "stella@artois.beer"})
|
||||||
|
;; is roughly equivalent to
|
||||||
|
(-> (sql/find-by-keys ds :address {:name "Stella" :email "stella@artois.beer"}
|
||||||
|
{:columns [["count(*)" :next_jdbc_aggregate_123]]})
|
||||||
|
(first)
|
||||||
|
(get :next_jdbc_aggregate_123))
|
||||||
|
```
|
||||||
|
|
||||||
|
(where `:next_jdbc_aggregate_123` is a unique alias generated by `next.jdbc`,
|
||||||
|
derived from the aggregate expression string).
|
||||||
|
|
||||||
|
> Note: the SQL string provided for the aggregate is copied exactly as-is into the generated SQL -- you are responsible for ensuring it is legal SQL!
|
||||||
|
|
||||||
## `get-by-id`
|
## `get-by-id`
|
||||||
|
|
||||||
Given a table name (as a keyword) and a primary key value, with an optional primary key column name, execute a query on the database:
|
Given a table name (as a keyword) and a primary key value, with an optional primary key column name, execute a query on the database:
|
||||||
|
|
@ -133,9 +280,9 @@ Given a table name (as a keyword) and a primary key value, with an optional prim
|
||||||
```clojure
|
```clojure
|
||||||
(sql/get-by-id ds :address 2)
|
(sql/get-by-id ds :address 2)
|
||||||
;; equivalent to
|
;; equivalent to
|
||||||
(sql/get-by-id ds :address 2 {})
|
(sql/get-by-id ds :address 2 {}) ; empty options map
|
||||||
;; equivalent to
|
;; equivalent to
|
||||||
(sql/get-by-id ds :address 2 :id {})
|
(sql/get-by-id ds :address 2 :id {}) ; empty options map
|
||||||
;; equivalent to
|
;; equivalent to
|
||||||
(jdbc/execute-one! ds ["SELECT * FROM address WHERE id = ?" 2])
|
(jdbc/execute-one! ds ["SELECT * FROM address WHERE id = ?" 2])
|
||||||
```
|
```
|
||||||
|
|
@ -166,15 +313,24 @@ These quoting functions can be provided to any of the friendly SQL functions abo
|
||||||
(sql/insert! ds :my-table {:some "data"} {:table-fn snake-case})
|
(sql/insert! ds :my-table {:some "data"} {:table-fn snake-case})
|
||||||
```
|
```
|
||||||
|
|
||||||
Note that the entity naming function is passed a string, the result of calling `name` on the keyword passed in. Also note that the default quoting functions do not handle schema-qualified names, such as `dbo.table_name` -- `sql-server` would produce `[dbo.table_name]` from that. Use the `schema` function to wrap the quoting function if you need that behavior, e.g,. `{:table-fn (schema sql-server)}` which would produce `[dbo].[table_name]`.
|
`next.jdbc` provides `snake-kebab-opts` and `unqualified-snake-kebab-opts` which are hash maps containing `:column-fn` and `:table-fn` that use the `->snake_case` function from the [camel-snake-kebab library](https://github.com/clj-commons/camel-snake-kebab/) which performs a more sophisticated transformation:
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
;; transforms :my-table to my_table as above but will also transform
|
||||||
|
;; column names; in addition, it will perform the reverse transformation
|
||||||
|
;; on any results, e.g., turning MySQL's :GENERATED_KEY into :generated-key
|
||||||
|
(sql/insert! ds :my-table {:some "data"} jdbc/snake-kebab-opts)
|
||||||
|
```
|
||||||
|
|
||||||
|
> Note: The entity naming function is passed a string, the result of calling `name` on the keyword passed in. Also note that the default quoting functions do not handle schema-qualified names, such as `dbo.table_name` -- `sql-server` would produce `[dbo.table_name]` from that. Use the `schema` function to wrap the quoting function if you need that behavior, e.g,. `{:table-fn (schema sql-server)}` which would produce `[dbo].[table_name]`.
|
||||||
|
|
||||||
## HugSQL Quick Start
|
## HugSQL Quick Start
|
||||||
|
|
||||||
Here's how to get up and running quickly with `next.jdbc` and HugSQL. For more detail, consult the [HugSQL documentation](https://www.hugsql.org/). Add the following dependencies to your project (in addition to `seancorfield/next.jdbc` and whichever JDBC drivers you need):
|
Here's how to get up and running quickly with `next.jdbc` and HugSQL. For more detail, consult the [HugSQL documentation](https://www.hugsql.org/). Add the following dependencies to your project (in addition to `com.github.seancorfield/next.jdbc` and whichever JDBC drivers you need):
|
||||||
|
|
||||||
```clojure
|
```clojure
|
||||||
com.layerware/hugsql-core {:mvn/version "0.5.1"}
|
com.layerware/hugsql-core {:mvn/version "0.5.3"}
|
||||||
com.layerware/hugsql-adapter-next-jdbc {:mvn/version "0.5.1"}
|
com.layerware/hugsql-adapter-next-jdbc {:mvn/version "0.5.3"}
|
||||||
```
|
```
|
||||||
|
|
||||||
_Check the HugSQL documentation for the latest versions to use!_
|
_Check the HugSQL documentation for the latest versions to use!_
|
||||||
|
|
@ -203,7 +359,7 @@ At program startup you'll need to call these functions (either at the top-level
|
||||||
Those calls will add function definitions to that namespace based on what is in the `.sql` files. Now set up your db-spec and datasource as usual with `next.jdbc`:
|
Those calls will add function definitions to that namespace based on what is in the `.sql` files. Now set up your db-spec and datasource as usual with `next.jdbc`:
|
||||||
|
|
||||||
```clojure
|
```clojure
|
||||||
(def db-spec {:dbytpe "h2:mem" :dbtype "example"}) ; assumes H2 driver in deps.edn
|
(def db-spec {:dbtype "h2:mem" :dbname "example"}) ; assumes H2 driver in deps.edn
|
||||||
|
|
||||||
(def ds (jdbc/get-datasource db-spec))
|
(def ds (jdbc/get-datasource db-spec))
|
||||||
```
|
```
|
||||||
|
|
@ -230,5 +386,3 @@ By default, for compatibility with their default adapter (`clojure.java.jdbc`),
|
||||||
(character-by-id ds {:id 1})
|
(character-by-id ds {:id 1})
|
||||||
;;=> #:CHARACTERS{:ID 1, :NAME "Westley", :SPECIALTY "love", :CREATED_AT #inst "2019-09-27T18:52:54.413000000-00:00"}
|
;;=> #:CHARACTERS{:ID 1, :NAME "Westley", :SPECIALTY "love", :CREATED_AT #inst "2019-09-27T18:52:54.413000000-00:00"}
|
||||||
```
|
```
|
||||||
|
|
||||||
[<: Getting Started](/doc/getting-started.md) | [Tips & Tricks :>](/doc/tips-and-tricks.md)
|
|
||||||
|
|
|
||||||
|
|
@ -6,31 +6,40 @@ It is designed to work with Clojure 1.10 or later, supports `datafy`/`nav`, and
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
|
**You must be using Clojure 1.10 or later.** 1.12.0 is the most recent stable version of Clojure (as of March 15th, 2024).
|
||||||
|
|
||||||
You can add `next.jdbc` to your project with either:
|
You can add `next.jdbc` to your project with either:
|
||||||
|
|
||||||
```clojure
|
```clojure
|
||||||
{seancorfield/next.jdbc {:mvn/version "1.0.445"}}
|
com.github.seancorfield/next.jdbc {:mvn/version "1.3.1002"}
|
||||||
```
|
```
|
||||||
for `deps.edn` or:
|
for `deps.edn` or:
|
||||||
|
|
||||||
```clojure
|
```clojure
|
||||||
[seancorfield/next.jdbc "1.0.445"]
|
[com.github.seancorfield/next.jdbc "1.3.1002"]
|
||||||
```
|
```
|
||||||
for `project.clj` or `build.boot`.
|
for `project.clj` or `build.boot`.
|
||||||
|
|
||||||
**In addition, you will need to add dependencies for the JDBC drivers you wish to use for whatever databases you are using.** You can see the drivers and versions that `next.jdbc` is tested against in [the project's `deps.edn` file](https://github.com/seancorfield/next-jdbc/blob/master/deps.edn#L10-L25), but many other JDBC drivers for other databases should also work (e.g., Oracle, Red Shift).
|
**In addition, you will need to add dependencies for the JDBC drivers you wish to use for whatever databases you are using. For example:**
|
||||||
|
|
||||||
|
* MySQL: `com.mysql/mysql-connector-j {:mvn/version "9.1.0"}` ([search for latest version](https://search.maven.org/artifact/com.mysql/mysql-connector-j))
|
||||||
|
* PostgreSQL: `org.postgresql/postgresql {:mvn/version "42.7.4"}` ([search for latest version](https://search.maven.org/artifact/org.postgresql/postgresql))
|
||||||
|
* Microsoft SQL Server: `com.microsoft.sqlserver/mssql-jdbc {:mvn/version "12.8.1.jre11"}` ([search for latest version](https://search.maven.org/artifact/com.microsoft.sqlserver/mssql-jdbc))
|
||||||
|
* Sqlite: `org.xerial/sqlite-jdbc {:mvn/version "3.47.1.0"}` ([search for latest version](https://search.maven.org/artifact/org.xerial/sqlite-jdbc))
|
||||||
|
|
||||||
|
> Note: these are the versions that `next.jdbc` is tested against but there may be more recent versions and those should generally work too -- click the "search for latest version" link to see all available versions of those drivers on Maven Central. You can see the full list of drivers and versions that `next.jdbc` is tested against in [the project's `deps.edn` file](https://github.com/seancorfield/next-jdbc/blob/develop/deps.edn#L10-L27), but many other JDBC drivers for other databases should also work (e.g., Oracle, Red Shift).
|
||||||
|
|
||||||
## An Example REPL Session
|
## An Example REPL Session
|
||||||
|
|
||||||
To start using `next.jdbc`, you need to create a datasource (an instance of `javax.sql.DataSource`). You can use `next.jdbc/get-datasource` with either a "db-spec" -- a hash map describing the database you wish to connect to -- or a JDBC URI string. Or you can construct a datasource from one of the connection pooling libraries out there, such as [HikariCP](https://brettwooldridge.github.io/HikariCP/) or [c3p0](https://www.mchange.com/projects/c3p0/) -- see [Connection Pooling](#connection-pooling) below.
|
To start using `next.jdbc`, you need to create a datasource (an instance of `javax.sql.DataSource`). You can use `next.jdbc/get-datasource` with either a "db-spec" -- a hash map describing the database you wish to connect to -- or a JDBC URL string. Or you can construct a datasource from one of the connection pooling libraries out there, such as [HikariCP](https://github.com/brettwooldridge/HikariCP) or [c3p0](https://www.mchange.com/projects/c3p0/) -- see [Connection Pooling](#connection-pooling) below.
|
||||||
|
|
||||||
For the examples in this documentation, we will use a local H2 database on disk, and we'll use the [Clojure CLI tools](https://clojure.org/guides/deps_and_cli) and `deps.edn`:
|
For the examples in this documentation, we will use a local H2 database on disk, and we'll use the [Clojure CLI tools](https://clojure.org/guides/deps_and_cli) and `deps.edn`:
|
||||||
|
|
||||||
```clojure
|
```clojure
|
||||||
;; deps.edn
|
;; deps.edn
|
||||||
{:deps {org.clojure/clojure {:mvn/version "1.10.1"}
|
{:deps {org.clojure/clojure {:mvn/version "1.12.0"}
|
||||||
seancorfield/next.jdbc {:mvn/version "1.0.445"}
|
com.github.seancorfield/next.jdbc {:mvn/version "1.3.1002"}
|
||||||
com.h2database/h2 {:mvn/version "1.4.199"}}}
|
com.h2database/h2 {:mvn/version "2.3.232"}}}
|
||||||
```
|
```
|
||||||
|
|
||||||
### Create & Populate a Database
|
### Create & Populate a Database
|
||||||
|
|
@ -39,7 +48,7 @@ In this REPL session, we'll define an H2 datasource, create a database with a si
|
||||||
|
|
||||||
```clojure
|
```clojure
|
||||||
> clj
|
> clj
|
||||||
Clojure 1.10.1
|
Clojure 1.12.0
|
||||||
user=> (require '[next.jdbc :as jdbc])
|
user=> (require '[next.jdbc :as jdbc])
|
||||||
nil
|
nil
|
||||||
user=> (def db {:dbtype "h2" :dbname "example"})
|
user=> (def db {:dbtype "h2" :dbname "example"})
|
||||||
|
|
@ -66,7 +75,12 @@ user=>
|
||||||
|
|
||||||
We described the database with just `:dbtype` and `:dbname` because it is created as a local file and needs no authentication. For most databases, you would need `:user` and `:password` for authentication, and if the database is running on a remote machine you would need `:host` and possibly `:port` (`next.jdbc` tries to guess the correct port based on the `:dbtype`).
|
We described the database with just `:dbtype` and `:dbname` because it is created as a local file and needs no authentication. For most databases, you would need `:user` and `:password` for authentication, and if the database is running on a remote machine you would need `:host` and possibly `:port` (`next.jdbc` tries to guess the correct port based on the `:dbtype`).
|
||||||
|
|
||||||
> Note: You can see the full list of `:dbtype` values supported in [next.jdbc/get-datasource](https://cljdoc.org/d/seancorfield/next.jdbc/CURRENT/api/next.jdbc#get-datasource)'s docstring. If you need this programmatically, you can get it from the [next.jdbc.connection/dbtypes](https://cljdoc.org/d/seancorfield/next.jdbc/CURRENT/api/next.jdbc.connection#dbtypes) hash map. If those lists differ, the hash map is the definitive list (and I'll need to fix the docstring!). The docstring of that Var explains how to tell `next.jdbc` about additional databases.
|
> Note: You can see the full list of `:dbtype` values supported in [next.jdbc/get-datasource](https://cljdoc.org/d/com.github.seancorfield/next.jdbc/CURRENT/api/next.jdbc#get-datasource)'s docstring. If you need this programmatically, you can get it from the [next.jdbc.connection/dbtypes](https://cljdoc.org/d/com.github.seancorfield/next.jdbc/CURRENT/api/next.jdbc.connection#dbtypes) hash map. If those lists differ, the hash map is the definitive list (and I'll need to fix the docstring!). The docstring of that Var explains how to tell `next.jdbc` about additional databases.
|
||||||
|
|
||||||
|
The hash map can contain arbitrary keys and values: any keys not specifically
|
||||||
|
recognized by `next.jdbc` will be passed through to the JDBC driver as part
|
||||||
|
of the connection string. For example, if you specify `:useSSL false`, then
|
||||||
|
the connection string will have `&useSSL=false` appended to it.
|
||||||
|
|
||||||
If you already have a JDBC URL (string), you can use that as-is instead of the db-spec hash map. If you have a JDBC URL and still need additional options passed into the JDBC driver, you can use a hash map with the `:jdbcUrl` key specifying the string and whatever additional options you need.
|
If you already have a JDBC URL (string), you can use that as-is instead of the db-spec hash map. If you have a JDBC URL and still need additional options passed into the JDBC driver, you can use a hash map with the `:jdbcUrl` key specifying the string and whatever additional options you need.
|
||||||
|
|
||||||
|
|
@ -93,13 +107,15 @@ Since we used `execute-one!`, we get just one row back (a hash map). This also s
|
||||||
If the result set contains no rows, `execute-one!` returns `nil`.
|
If the result set contains no rows, `execute-one!` returns `nil`.
|
||||||
When no result is available, and `next.jdbc` returns a fake "result set" containing the "update count", `execute-one!` returns just a single hash map with the key `next.jdbc/update-count` and the number of rows updated.
|
When no result is available, and `next.jdbc` returns a fake "result set" containing the "update count", `execute-one!` returns just a single hash map with the key `next.jdbc/update-count` and the number of rows updated.
|
||||||
|
|
||||||
|
In the same way that you would use `execute-one!` if you only want one row or one update count, compared to `execute!` for multiple rows or a vector containing an update count, you can also ask `execute!` to return multiple result sets -- such as might be returned from a stored procedure call, or a T-SQL script (for SQL Server), or multiple statements (for MySQL) -- instead of just one. If you pass the `:multi-rs true` option to `execute!`, you will get back a vector of results sets, instead of just one result set: a vector of zero or more vectors. The result may well be a mix of vectors containing realized rows and vectors containing update counts, reflecting the results from specific SQL operations in the stored procedure or script.
|
||||||
|
|
||||||
> Note: In general, you should use `execute-one!` for DDL operations since you will only get back an update count. If you have a SQL statement that you know will only return an update count, `execute-one!` is the right choice. If you have a SQL statement that you know will only return a single row in the result set, you probably want to use `execute-one!`. If you use `execute-one!` for a SQL statement that would return multiple rows in a result set, even though you will only get the first row back (as a hash map), the full result set will still be retrieved from the database -- it does not limit the SQL in any way.
|
> Note: In general, you should use `execute-one!` for DDL operations since you will only get back an update count. If you have a SQL statement that you know will only return an update count, `execute-one!` is the right choice. If you have a SQL statement that you know will only return a single row in the result set, you probably want to use `execute-one!`. If you use `execute-one!` for a SQL statement that would return multiple rows in a result set, even though you will only get the first row back (as a hash map), the full result set will still be retrieved from the database -- it does not limit the SQL in any way.
|
||||||
|
|
||||||
### Options & Result Set Builders
|
### Options & Result Set Builders
|
||||||
|
|
||||||
All functions in `next.jdbc` (except `get-datasource`) can accept, as the optional last argument, a hash map containing a [variety of options](/doc/all-the-options.md) that control the behavior of the `next.jdbc` functions.
|
All functions in `next.jdbc` (except `get-datasource`) can accept, as the optional last argument, a hash map containing a [variety of options](/doc/all-the-options.md) that control the behavior of the `next.jdbc` functions.
|
||||||
|
|
||||||
We saw `:return-keys` provided as an option to the `execute-one!` function above and mentioned the `:builder-fn` option just above that. As noted, the default behavior it to return rows as hash maps with namespace-qualified keywords identifying the column names with the table name as the qualifier. There's a whole chapter on [result set builders](/doc/result-set-builders.md) but here's a quick example showing how to get unqualified, lower case keywords instead:
|
We saw `:return-keys` provided as an option to the `execute-one!` function above and mentioned the `:builder-fn` option just above that. As noted, the default behavior is to return rows as hash maps with namespace-qualified keywords identifying the column names with the table name as the qualifier. There's a whole chapter on [result set builders](/doc/result-set-builders.md) but here's a quick example showing how to get unqualified, lower case keywords instead:
|
||||||
|
|
||||||
```clojure
|
```clojure
|
||||||
user=> (require '[next.jdbc.result-set :as rs])
|
user=> (require '[next.jdbc.result-set :as rs])
|
||||||
|
|
@ -108,19 +124,49 @@ user=> (jdbc/execute-one! ds ["
|
||||||
insert into address(name,email)
|
insert into address(name,email)
|
||||||
values('Someone Else','some@elsewhere.com')
|
values('Someone Else','some@elsewhere.com')
|
||||||
"] {:return-keys true :builder-fn rs/as-unqualified-lower-maps})
|
"] {:return-keys true :builder-fn rs/as-unqualified-lower-maps})
|
||||||
{:id 2}
|
{:id 3}
|
||||||
user=> (jdbc/execute-one! ds ["select * from address where id = ?" 2]
|
user=> (jdbc/execute-one! ds ["select * from address where id = ?" 3]
|
||||||
{:builder-fn rs/as-unqualified-lower-maps})
|
{:builder-fn rs/as-unqualified-lower-maps})
|
||||||
{:id 2, :name "Someone Else", :email "some@elsewhere.com"}
|
{:id 3, :name "Someone Else", :email "some@elsewhere.com"}
|
||||||
user=>
|
user=>
|
||||||
```
|
```
|
||||||
|
|
||||||
Relying on the default result set builder -- and table-qualified column names -- is the recommended approach to take, if possible, with a few caveats:
|
Relying on the default result set builder -- and table-qualified column names -- is the recommended approach to take, if possible, with a few caveats:
|
||||||
* MS SQL Server produces unqualified column names by default (see [**Tips & Tricks**](/doc/tips-and-tricks.md) for how to get table names back from MS SQL Server),
|
* MS SQL Server produces unqualified column names by default (see [**Tips & Tricks**](/doc/tips-and-tricks.md) for how to get table names back from MS SQL Server),
|
||||||
* Oracle's JDBC driver doesn't support `.getTableName()` so it will only produce unqualified column names (also mentioned in **Tips & Tricks**),
|
* Oracle's JDBC driver doesn't support `.getTableName()` so it will only produce unqualified column names (also mentioned in **Tips & Tricks**),
|
||||||
|
* PostgreSQL's JDBC driver performs an extra SQL query to get the necessary metadata, so there is some overhead to using qualified column names (also mentioned in **Tips & Tricks**),
|
||||||
* If your SQL query joins tables in a way that produces duplicate column names, and you use unqualified column names, then those duplicated column names will conflict and you will get only one of them in your result -- use aliases in SQL (`as`) to make the column names distinct,
|
* If your SQL query joins tables in a way that produces duplicate column names, and you use unqualified column names, then those duplicated column names will conflict and you will get only one of them in your result -- use aliases in SQL (`as`) to make the column names distinct,
|
||||||
* If your SQL query joins a table to itself under different aliases, the _qualified_ column names will conflict because they are based on the underlying table name provided by the JDBC driver rather the alias you used in your query -- again, use aliases in SQL to make those column names distinct.
|
* If your SQL query joins a table to itself under different aliases, the _qualified_ column names will conflict because they are based on the underlying table name provided by the JDBC driver rather the alias you used in your query -- again, use aliases in SQL to make those column names distinct.
|
||||||
|
|
||||||
|
If you want to pass the same set of options into several operations, you can use `next.jdbc/with-options` to wrap your datasource (or connection) in a way that will pass "default options". Here's the example above rewritten with that:
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
user=> (require '[next.jdbc.result-set :as rs])
|
||||||
|
nil
|
||||||
|
user=> (def ds-opts (jdbc/with-options ds {:builder-fn rs/as-unqualified-lower-maps}))
|
||||||
|
#'user/ds-opts
|
||||||
|
user=> (jdbc/execute-one! ds-opts ["
|
||||||
|
insert into address(name,email)
|
||||||
|
values('Someone Else','some@elsewhere.com')
|
||||||
|
"] {:return-keys true})
|
||||||
|
{:id 4}
|
||||||
|
user=> (jdbc/execute-one! ds-opts ["select * from address where id = ?" 4])
|
||||||
|
{:id 4, :name "Someone Else", :email "some@elsewhere.com"}
|
||||||
|
user=>
|
||||||
|
```
|
||||||
|
|
||||||
|
> Note: See the `next.jdbc/with-option` examples in the [**Datasources, Connections & Transactions**](#datasources-connections--transactions) below for some caveats around using this function.
|
||||||
|
|
||||||
|
In addition, two pre-built option hash maps are available in `next.jdbc`, that leverage the [camel-snake-kebab library](https://github.com/clj-commons/camel-snake-kebab/):
|
||||||
|
* `snake-kebab-opts` -- provides `:column-fn`, `:table-fn`, `:label-fn`, `:qualifier-fn`, and `:builder-fn` that will convert Clojure identifiers in `:kebab-case` to SQL entities in `snake_case` and will produce result sets with qualified `:kebab-case` names from SQL entities that use `snake_case`,
|
||||||
|
* `unqualified-snake-kebab-opts` -- provides `:column-fn`, `:table-fn`, `:label-fn`, `:qualifier-fn`, and `:builder-fn` that will convert Clojure identifiers in `:kebab-case` to SQL entities in `snake_case` and will produce result sets with _unqualified_ `:kebab-case` names from SQL entities that use `snake_case`.
|
||||||
|
|
||||||
|
You can `assoc` any additional options you need into these pre-built option hash maps
|
||||||
|
and pass the combined options into any of this library's functions.
|
||||||
|
|
||||||
|
> Note: Using `camel-snake-kebab` might also be helpful if your database has `camelCase` table and column names, although you'll have to provide `:column-fn` and `:table-fn` yourself as `->camelCase` from that library. Either way, consider relying on the _default_ result set builder first and avoid converting column and table names (see [Advantages of 'snake case': portability and ubiquity](https://vvvvalvalval.github.io/posts/clojure-key-namespacing-convention-considered-harmful.html#advantages_of_'snake_case':_portability_and_ubiquity) for an interesting discussion on kebab-case vs snake_case -- I do not agree with all of the author's points in that article, particularly his position against qualified keywords, but his argument for retaining snake_case around system boundaries is compelling).
|
||||||
|
|
||||||
|
|
||||||
### `plan` & Reducing Result Sets
|
### `plan` & Reducing Result Sets
|
||||||
|
|
||||||
While the `execute!` and `execute-one!` functions are fine for retrieving result sets as data, most of the time you want to process that data efficiently without necessarily converting the entire result set into a Clojure data structure, so `next.jdbc` provides a SQL execution function that works with `reduce` and with transducers to consume the result set without the intermediate overhead of creating Clojure data structures for every row.
|
While the `execute!` and `execute-one!` functions are fine for retrieving result sets as data, most of the time you want to process that data efficiently without necessarily converting the entire result set into a Clojure data structure, so `next.jdbc` provides a SQL execution function that works with `reduce` and with transducers to consume the result set without the intermediate overhead of creating Clojure data structures for every row.
|
||||||
|
|
@ -153,7 +199,10 @@ user=> (reduce
|
||||||
14.67M
|
14.67M
|
||||||
```
|
```
|
||||||
|
|
||||||
The call to `jdbc/plan` returns an `IReduceInit` object but does not actually run the SQL. Only when the returned object is reduced is the connection obtained from the data source, the SQL executed, and the computation performed. The connection is closed automatically when the reduction is complete. The `row` in the reduction is an abstraction over the underlying (mutable) `ResultSet` object -- it is not a Clojure data structure. Because of that, you can simply access the columns via their SQL labels as shown -- you do not need to use the column-qualified name, and you do not need to worry about the database returning uppercase column names (SQL labels are not case sensitive).
|
The call to `jdbc/plan` returns an `IReduceInit` object (a "reducible collection" that requires an initial value) but does not actually run the SQL.
|
||||||
|
Only when the returned object is reduced is the connection obtained from the data source, the SQL executed, and the computation performed. The connection is closed automatically when the reduction is complete. The `row` in the reduction is an abstraction over the underlying (mutable) `ResultSet` object -- it is not a Clojure data structure. Because of that, you can simply access the columns via their SQL labels as shown -- you do not need to use the column-qualified name, and you do not need to worry about the database returning uppercase column names (SQL labels are not case sensitive).
|
||||||
|
|
||||||
|
> Note: if you want a column name transformation to be applied here, specify `:column-fn` as an option to the `plan` call.
|
||||||
|
|
||||||
Here's the same computation rewritten using `transduce`:
|
Here's the same computation rewritten using `transduce`:
|
||||||
|
|
||||||
|
|
@ -198,32 +247,58 @@ user=> (into #{}
|
||||||
#{"apple" "banana" "cucumber"}
|
#{"apple" "banana" "cucumber"}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
If you want to process the rows purely for side-effects, without a result, you
|
||||||
|
can use `run!`:
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
user=> (run! #(println (:product %))
|
||||||
|
(jdbc/plan ds ["select * from invoice where customer_id = ?" 100]))
|
||||||
|
apple
|
||||||
|
banana
|
||||||
|
cucumber
|
||||||
|
nil
|
||||||
|
```
|
||||||
|
|
||||||
Any operation that can perform key-based lookup can be used here without creating hash maps from the rows: `get`, `contains?`, `find` (returns a `MapEntry` of whatever key you requested and the corresponding column value), or direct keyword access as shown above. Any operation that would require a Clojure hash map, such as `assoc` or anything that invokes `seq` (`keys`, `vals`), will cause the full row to be expanded into a hash map, such as produced by `execute!` or `execute-one!`, which implements `Datafiable` and `Navigable` and supports lazy navigation via foreign keys, explained in [`datafy`, `nav`, and the `:schema` option](/doc/datafy-nav-and-schema.md).
|
Any operation that can perform key-based lookup can be used here without creating hash maps from the rows: `get`, `contains?`, `find` (returns a `MapEntry` of whatever key you requested and the corresponding column value), or direct keyword access as shown above. Any operation that would require a Clojure hash map, such as `assoc` or anything that invokes `seq` (`keys`, `vals`), will cause the full row to be expanded into a hash map, such as produced by `execute!` or `execute-one!`, which implements `Datafiable` and `Navigable` and supports lazy navigation via foreign keys, explained in [`datafy`, `nav`, and the `:schema` option](/doc/datafy-nav-and-schema.md).
|
||||||
|
|
||||||
This means that `select-keys` can be used to create regular Clojure hash map from (a subset of) columns in the row, without realizing the row, and it will not implement `Datafiable` or `Navigable`.
|
This means that `select-keys` can be used to create regular Clojure hash map from (a subset of) columns in the row, without realizing the row, and it will not implement `Datafiable` or `Navigable`.
|
||||||
|
|
||||||
If you wish to create a Clojure hash map that supports that lazy navigation, you can call `next.jdbc.result-set/datafiable-row`, passing in the current row, a `connectable`, and an options hash map, just as you passed into `plan`. Compare the difference in output between these three expressions:
|
If you wish to create a Clojure hash map that supports that lazy navigation, you can call `next.jdbc.result-set/datafiable-row`, passing in the current row, a `connectable`, and an options hash map, just as you passed into `plan`. Compare the difference in output between these four expressions (see below for a simpler way to do this):
|
||||||
|
|
||||||
```clojure
|
```clojure
|
||||||
|
;; selects specific keys (as simple keywords):
|
||||||
user=> (into []
|
user=> (into []
|
||||||
(map #(select-keys % [:id :product :unit_price :unit_cost :customer_id]))
|
(map #(select-keys % [:id :product :unit_price :unit_count :customer_id]))
|
||||||
(jdbc/plan ds ["select * from invoice where customer_id = ?" 100]))
|
(jdbc/plan ds ["select * from invoice where customer_id = ?" 100]))
|
||||||
|
;; selects specific keys (as qualified keywords):
|
||||||
user=> (into []
|
user=> (into []
|
||||||
(map #(select-keys % [:invoice/id :invoice/product
|
(map #(select-keys % [:invoice/id :invoice/product
|
||||||
:invoice/unit_price :invoice/unit_cost
|
:invoice/unit_price :invoice/unit_count
|
||||||
:invoice/customer_id]))
|
:invoice/customer_id]))
|
||||||
(jdbc/plan ds ["select * from invoice where customer_id = ?" 100]))
|
(jdbc/plan ds ["select * from invoice where customer_id = ?" 100]))
|
||||||
|
;; selects specific keys (as qualified keywords -- ignoring the table name):
|
||||||
|
user=> (into []
|
||||||
|
(map #(select-keys % [:foo/id :bar/product
|
||||||
|
:quux/unit_price :wibble/unit_count
|
||||||
|
:blah/customer_id]))
|
||||||
|
(jdbc/plan ds ["select * from invoice where customer_id = ?" 100]))
|
||||||
|
;; do not do this:
|
||||||
|
user=> (into []
|
||||||
|
(map #(into {} %))
|
||||||
|
(jdbc/plan ds ["select * from invoice where customer_id = ?" 100]))
|
||||||
|
;; do this if you just want realized rows with default qualified names:
|
||||||
user=> (into []
|
user=> (into []
|
||||||
(map #(rs/datafiable-row % ds {}))
|
(map #(rs/datafiable-row % ds {}))
|
||||||
(jdbc/plan ds ["select * from invoice where customer_id = ?" 100]))
|
(jdbc/plan ds ["select * from invoice where customer_id = ?" 100]))
|
||||||
```
|
```
|
||||||
|
|
||||||
The latter produces a vector of hash maps, just like the result of `execute!`, where each "row" follows the case conventions of the database, the keys are qualified by the table name, and the hash map is datafiable and navigable.
|
The latter produces a vector of hash maps, just like the result of `execute!`, where each "row" follows the case conventions of the database, the keys are qualified by the table name, and the hash map is datafiable and navigable. The third expression produces a result that looks identical but has stripped all the metadata away: it has still called `rs/datafiable-row` to fully-realize a datafiable and navigable hash map but it has then "poured" that into a new, empty hash map, losing the metadata.
|
||||||
|
|
||||||
In addition to the hash map operations described above, the abstraction over the `ResultSet` can also respond to a couple of functions in `next.jdbc.result-set`:
|
In addition to the hash map operations described above, the abstraction over the `ResultSet` can also respond to a couple of functions in `next.jdbc.result-set`:
|
||||||
|
|
||||||
* `next.jdbc.result-set/row-number` - returns the 1-based row number, by calling `.getRow()` on the `ResultSet`,
|
* `next.jdbc.result-set/row-number` - returns the 1-based row number, by calling `.getRow()` on the `ResultSet`,
|
||||||
* `next.jdbc.result-set/column-names` - returns a vector of column names from the `ResultSet`, as created by the result set builder specified.
|
* `next.jdbc.result-set/column-names` - returns a vector of column names from the `ResultSet`, as created by the result set builder specified,
|
||||||
|
* `next.jdbc.result-set/metadata` - returns the `ResultSetMetaData` object, datafied (so the result will depend on whether you have required `next.jdbc.datafy`).
|
||||||
|
|
||||||
> Note: Apache Derby requires the following options to be provided in order to call `.getRow()` (and therefore `row-number`): `{:concurrency :read-only, :cursors :close, :result-type :scroll-insensitive}`
|
> Note: Apache Derby requires the following options to be provided in order to call `.getRow()` (and therefore `row-number`): `{:concurrency :read-only, :cursors :close, :result-type :scroll-insensitive}`
|
||||||
|
|
||||||
|
|
@ -233,6 +308,97 @@ The order of the column names returned by `column-names` matches SQL's natural o
|
||||||
|
|
||||||
> Note: since `plan` expects you to process the result set via reduction, you should not use it for DDL or for SQL statements that only produce update counts.
|
> Note: since `plan` expects you to process the result set via reduction, you should not use it for DDL or for SQL statements that only produce update counts.
|
||||||
|
|
||||||
|
As of 1.1.588, two helper functions are available to make some `plan` operations easier:
|
||||||
|
|
||||||
|
* `next.jdbc.plan/select-one!` -- reduces over `plan` and returns part of just the first row,
|
||||||
|
* `next.jdbc.plan/select!` -- reduces over `plan` and returns a sequence of parts of each row.
|
||||||
|
|
||||||
|
> Note: in both those cases, an appropriate initial value is supplied to the `reduce` (since `plan` returns an `IReduceInit` object).
|
||||||
|
|
||||||
|
`select!` accepts a vector of column names to extract or a function to apply to each row. It is equivalent to the following:
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
;; select! with vector of column names:
|
||||||
|
user=> (into [] (map #(select-keys % cols)) (jdbc/plan ...))
|
||||||
|
;; select! with a function:
|
||||||
|
user=> (into [] (map f) (jdbc/plan ...))
|
||||||
|
```
|
||||||
|
|
||||||
|
The `:into` option lets you override the default of `[]` as the first argument to `into`.
|
||||||
|
|
||||||
|
`select-one!` performs the same transformation on just the first row returned from a reduction over `plan`, equivalent to the following:
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
;; select-one! with vector of column names:
|
||||||
|
user=> (reduce (fn [_ row] (reduced (select-keys row cols))) nil (jdbc/plan ...))
|
||||||
|
;; select-one! with a function:
|
||||||
|
user=> (reduce (fn [_ row] (reduced (f row))) nil (jdbc/plan ...))
|
||||||
|
```
|
||||||
|
|
||||||
|
For example:
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
;; select columns:
|
||||||
|
user=> (plan/select-one!
|
||||||
|
ds [:n] ["select count(*) as n from invoice where customer_id = ?" 100])
|
||||||
|
{:n 3}
|
||||||
|
;; apply a function:
|
||||||
|
user=> (plan/select-one!
|
||||||
|
ds :n ["select count(*) as n from invoice where customer_id = ?" 100])
|
||||||
|
3
|
||||||
|
```
|
||||||
|
|
||||||
|
Here are some of the above sequence-producing operations, showing their `select!` equivalent:
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
user=> (require '[next.jdbc.plan :as plan])
|
||||||
|
nil
|
||||||
|
user=> (into #{}
|
||||||
|
(map :product)
|
||||||
|
(jdbc/plan ds ["select * from invoice where customer_id = ?" 100]))
|
||||||
|
#{"apple" "banana" "cucumber"}
|
||||||
|
;; or:
|
||||||
|
user=> (plan/select! ds
|
||||||
|
:product
|
||||||
|
["select * from invoice where customer_id = ?" 100]
|
||||||
|
{:into #{}}) ; product a set, rather than a vector
|
||||||
|
#{"apple" "banana" "cucumber"}
|
||||||
|
;; selects specific keys (as simple keywords):
|
||||||
|
user=> (into []
|
||||||
|
(map #(select-keys % [:id :product :unit_price :unit_count :customer_id]))
|
||||||
|
(jdbc/plan ds ["select * from invoice where customer_id = ?" 100]))
|
||||||
|
;; or:
|
||||||
|
user=> (plan/select! ds
|
||||||
|
[:id :product :unit_price :unit_count :customer_id]
|
||||||
|
["select * from invoice where customer_id = ?" 100])
|
||||||
|
;; selects specific keys (as qualified keywords):
|
||||||
|
user=> (into []
|
||||||
|
(map #(select-keys % [:invoice/id :invoice/product
|
||||||
|
:invoice/unit_price :invoice/unit_count
|
||||||
|
:invoice/customer_id]))
|
||||||
|
(jdbc/plan ds ["select * from invoice where customer_id = ?" 100]))
|
||||||
|
;; or:
|
||||||
|
user=> (plan/select! ds
|
||||||
|
[:invoice/id :invoice/product
|
||||||
|
:invoice/unit_price :invoice/unit_count
|
||||||
|
:invoice/customer_id]
|
||||||
|
["select * from invoice where customer_id = ?" 100])
|
||||||
|
;; selects specific keys (as qualified keywords -- ignoring the table name):
|
||||||
|
user=> (into []
|
||||||
|
(map #(select-keys % [:foo/id :bar/product
|
||||||
|
:quux/unit_price :wibble/unit_count
|
||||||
|
:blah/customer_id]))
|
||||||
|
(jdbc/plan ds ["select * from invoice where customer_id = ?" 100]))
|
||||||
|
;; or:
|
||||||
|
user=> (plan/select! ds
|
||||||
|
[:foo/id :bar/product
|
||||||
|
:quux/unit_price :wibble/unit_count
|
||||||
|
:blah/customer_id]
|
||||||
|
["select * from invoice where customer_id = ?" 100])
|
||||||
|
```
|
||||||
|
|
||||||
|
> Note: you need to be careful when using stateful transducers, such as `partition-by`, when reducing over the result of `plan`. Since `plan` returns an `IReduceInit`, the resource management (around the `ResultSet`) only applies to the `reduce` operation: many stateful transducers have a completing function that will access elements of the result sequence -- and this will usually fail after the reduction has cleaned up the resources. This is an inherent problem with stateful transducers over resource-managing reductions with no good solution.
|
||||||
|
|
||||||
## Datasources, Connections & Transactions
|
## Datasources, Connections & Transactions
|
||||||
|
|
||||||
In the examples above, we created a datasource and then passed it into each function call. When `next.jdbc` is given a datasource, it creates a `java.sql.Connection` from it, uses it for the SQL operation (by creating and populating a `java.sql.PreparedStatement` from the connection and the SQL string and parameters passed in), and then closes it. If you're not using a connection pooling datasource (see below), that can be quite an overhead: setting up database connections to remote servers is not cheap!
|
In the examples above, we created a datasource and then passed it into each function call. When `next.jdbc` is given a datasource, it creates a `java.sql.Connection` from it, uses it for the SQL operation (by creating and populating a `java.sql.PreparedStatement` from the connection and the SQL string and parameters passed in), and then closes it. If you're not using a connection pooling datasource (see below), that can be quite an overhead: setting up database connections to remote servers is not cheap!
|
||||||
|
|
@ -255,20 +421,52 @@ If any of these operations throws an exception, the connection will still be clo
|
||||||
(into [] (map :column) (jdbc/plan tx ...)))
|
(into [] (map :column) (jdbc/plan tx ...)))
|
||||||
```
|
```
|
||||||
|
|
||||||
If `with-transaction` is given a datasource, it will create and close the connection for you. If you pass in an existing connection, `with-transaction` will set up a transaction on that connection and, after either committing or rolling back the transaction, will restore the state of the connection and leave it open:
|
`with-transaction` behaves somewhat like Clojure's `with-open` macro: it will (generally) create a new `Connection` for you (from `ds`) and set up a transaction on it and bind it to `tx`; if the code in the body executes successfully, it will commit the transaction and close the `Connection`; if the code in the body throws an exception, it will rollback the transaction, but still close the `Connection`.
|
||||||
|
|
||||||
|
If `ds` is a `Connection`, `with-transaction` will just bind `tx` to that but will set up a transaction on that `Connection`; run the code in the body and either commit or rollback the transaction; it will leave the `Connection` open (but try to restore the state of the `Connection`).
|
||||||
|
|
||||||
|
If `ds` is a datasource, `with-transaction` will call `get-connection` on it, bind `tx` to that `Connection` and set up a transaction; run the code in the body and either commit or rollback the transaction; close the `Connection`.
|
||||||
|
|
||||||
|
If `ds` is something else, `with-transaction` will call `get-datasource` on it first and then proceed as above.
|
||||||
|
|
||||||
|
Here's what will happen in the case where `with-transaction` is given a `Connection`:
|
||||||
|
|
||||||
```clojure
|
```clojure
|
||||||
(with-open [con (jdbc/get-connection ds)]
|
(with-open [con (jdbc/get-connection ds)]
|
||||||
(jdbc/execute! con ...) ; committed
|
(jdbc/execute! con ...) ; auto-committed
|
||||||
|
|
||||||
(jdbc/with-transaction [tx con] ; will commit or rollback this group:
|
(jdbc/with-transaction [tx con] ; will commit or rollback this group:
|
||||||
|
;; note: tx is bound to the same Connection object as con
|
||||||
(jdbc/execute! tx ...)
|
(jdbc/execute! tx ...)
|
||||||
(jdbc/execute! tx ...)
|
(jdbc/execute! tx ...)
|
||||||
(into [] (map :column) (jdbc/plan tx ...)))
|
(into [] (map :column) (jdbc/plan tx ...)))
|
||||||
(jdbc/execute! con ...)) ; committed
|
|
||||||
|
(jdbc/execute! con ...)) ; auto-committed
|
||||||
```
|
```
|
||||||
|
|
||||||
You can read more about [working with transactions](/doc/transactions.md) further on in the documentation.
|
You can read more about [working with transactions](/doc/transactions.md) further on in the documentation.
|
||||||
|
|
||||||
|
> Note: Because `get-datasource` and `get-connection` return plain JDBC objects (`javax.sql.DataSource` and `java.sql.Connection` respectively), `next.jdbc/with-options` and `next.jdbc/with-logging` (see **Logging** below) cannot flow options across those calls, so if you are explicitly managing connections or transactions as above, you would need to have local bindings for the wrapped versions:
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
(with-open [con (jdbc/get-connection ds)]
|
||||||
|
(let [con-opts (jdbc/with-options con some-options)]
|
||||||
|
(jdbc/execute! con-opts ...) ; auto-committed
|
||||||
|
|
||||||
|
(jdbc/with-transaction [tx con-opts] ; will commit or rollback this group:
|
||||||
|
(let [tx-opts (jdbc/with-options tx (:options con-opts))]
|
||||||
|
(jdbc/execute! tx-opts ...)
|
||||||
|
(jdbc/execute! tx-opts ...)
|
||||||
|
(into [] (map :column) (jdbc/plan tx-opts ...))))
|
||||||
|
|
||||||
|
(jdbc/execute! con-opts ...))) ; auto-committed
|
||||||
|
```
|
||||||
|
|
||||||
|
As of 1.3.894, you can use `next.jdbc/with-transaction+options` instead,
|
||||||
|
which will automatically rewrap the `Connection` with the options from the
|
||||||
|
initial transactable. Be aware that means you cannot use Java interop on the
|
||||||
|
new connectable because it is no longer a plain Java `java.sql.Connection` object.
|
||||||
|
|
||||||
### Prepared Statement Caveat
|
### Prepared Statement Caveat
|
||||||
|
|
||||||
Not all databases support using a `PreparedStatement` for every type of SQL operation. You might have to create a `java.sql.Statement` instead, directly from a `java.sql.Connection` and use that, without parameters, in `plan`, `execute!`, or `execute-one!`. See the following example:
|
Not all databases support using a `PreparedStatement` for every type of SQL operation. You might have to create a `java.sql.Statement` instead, directly from a `java.sql.Connection` and use that, without parameters, in `plan`, `execute!`, or `execute-one!`. See the following example:
|
||||||
|
|
@ -289,9 +487,9 @@ Not all databases support using a `PreparedStatement` for every type of SQL oper
|
||||||
First, you need to add the connection pooling library as a dependency, e.g.,
|
First, you need to add the connection pooling library as a dependency, e.g.,
|
||||||
|
|
||||||
```clojure
|
```clojure
|
||||||
com.zaxxer/HikariCP {:mvn/version "3.3.1"}
|
com.zaxxer/HikariCP {:mvn/version "6.2.1"}
|
||||||
;; or:
|
;; or:
|
||||||
com.mchange/c3p0 {:mvn/version "0.9.5.4"}
|
com.mchange/c3p0 {:mvn/version "0.10.1"}
|
||||||
```
|
```
|
||||||
|
|
||||||
_Check those libraries' documentation for the latest version to use!_
|
_Check those libraries' documentation for the latest version to use!_
|
||||||
|
|
@ -309,6 +507,32 @@ Then import the appropriate classes into your code:
|
||||||
|
|
||||||
Finally, create the connection pooled datasource. `db-spec` here contains the regular `next.jdbc` options (`:dbtype`, `:dbname`, and maybe `:host`, `:port`, `:classname` etc -- or the `:jdbcUrl` format mentioned above). Those are used to construct the JDBC URL that is passed into the datasource object (by calling `.setJdbcUrl` on it). You can also specify any of the connection pooling library's options, as mixed case keywords corresponding to any simple setter methods on the class being passed in, e.g., `:connectionTestQuery`, `:maximumPoolSize` (HikariCP), `:maxPoolSize`, `:preferredTestQuery` (c3p0).
|
Finally, create the connection pooled datasource. `db-spec` here contains the regular `next.jdbc` options (`:dbtype`, `:dbname`, and maybe `:host`, `:port`, `:classname` etc -- or the `:jdbcUrl` format mentioned above). Those are used to construct the JDBC URL that is passed into the datasource object (by calling `.setJdbcUrl` on it). You can also specify any of the connection pooling library's options, as mixed case keywords corresponding to any simple setter methods on the class being passed in, e.g., `:connectionTestQuery`, `:maximumPoolSize` (HikariCP), `:maxPoolSize`, `:preferredTestQuery` (c3p0).
|
||||||
|
|
||||||
|
In addition, for HikariCP, you can specify properties to be applied to the underlying `DataSource` itself by passing `:dataSourceProperties` with a hash map containing those properties, such as `:socketTimeout`:
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
;; assumes next.jdbc.connection has been required as connection
|
||||||
|
(connection/->pool com.zaxxer.hikari.HikariDataSource
|
||||||
|
{:dbtype "postgres" :dbname "thedb" :username "dbuser" :password "secret"
|
||||||
|
:dataSourceProperties {:socketTimeout 30}})
|
||||||
|
```
|
||||||
|
|
||||||
|
_(under the hood, `java.data` converts that hash map to a `java.util.Properties` object with `String` keys and `String` values)_
|
||||||
|
|
||||||
|
If you need to pass in extra connection URL parameters, it can be easier to use
|
||||||
|
`next.jdbc.connection/jdbc-url` to construct URL, e.g.,
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
(connection/->pool com.zaxxer.hikari.HikariDataSource
|
||||||
|
{:jdbcUrl
|
||||||
|
(connection/jdbc-url {:dbtype "mysql" :dbname "thedb" :useSSL false})
|
||||||
|
:username "dbuser" :password "secret"})
|
||||||
|
```
|
||||||
|
|
||||||
|
Here we pass `:useSSL false` to `jdbc-url` so that it ends up in the
|
||||||
|
connection string, but pass `:username` and `:password` for the pool itself.
|
||||||
|
|
||||||
|
> Note: both HikariCP and c3p0 defer validation of the settings until a connection is requested. If you want to ensure that your datasource is set up correctly, and the database is reachable, when you first create the connection pool, you will need to call `jdbc/get-connection` on it (and then close that connection and return it to the pool). This will also ensure that the pool is fully initialized. See the examples below.
|
||||||
|
|
||||||
Some important notes regarding HikariCP:
|
Some important notes regarding HikariCP:
|
||||||
|
|
||||||
* Authentication credentials must use `:username` (if you are using c3p0 or regular, non-pooled, connections, then the db-spec hash map must contain `:user`).
|
* Authentication credentials must use `:username` (if you are using c3p0 or regular, non-pooled, connections, then the db-spec hash map must contain `:user`).
|
||||||
|
|
@ -319,7 +543,12 @@ You will generally want to create the connection pooled datasource at the start
|
||||||
|
|
||||||
```clojure
|
```clojure
|
||||||
(defn -main [& args]
|
(defn -main [& args]
|
||||||
|
;; db-spec must include :username
|
||||||
(with-open [^HikariDataSource ds (connection/->pool HikariDataSource db-spec)]
|
(with-open [^HikariDataSource ds (connection/->pool HikariDataSource db-spec)]
|
||||||
|
;; this code initializes the pool and performs a validation check:
|
||||||
|
(.close (jdbc/get-connection ds))
|
||||||
|
;; otherwise that validation check is deferred until the first connection
|
||||||
|
;; is requested in a regular operation:
|
||||||
(jdbc/execute! ds ...)
|
(jdbc/execute! ds ...)
|
||||||
(jdbc/execute! ds ...)
|
(jdbc/execute! ds ...)
|
||||||
(do-other-stuff ds args)
|
(do-other-stuff ds args)
|
||||||
|
|
@ -327,6 +556,10 @@ You will generally want to create the connection pooled datasource at the start
|
||||||
;; or:
|
;; or:
|
||||||
(defn -main [& args]
|
(defn -main [& args]
|
||||||
(with-open [^PooledDataSource ds (connection/->pool ComboPooledDataSource db-spec)]
|
(with-open [^PooledDataSource ds (connection/->pool ComboPooledDataSource db-spec)]
|
||||||
|
;; this code initializes the pool and performs a validation check:
|
||||||
|
(.close (jdbc/get-connection ds))
|
||||||
|
;; otherwise that validation check is deferred until the first connection
|
||||||
|
;; is requested in a regular operation:
|
||||||
(jdbc/execute! ds ...)
|
(jdbc/execute! ds ...)
|
||||||
(jdbc/execute! ds ...)
|
(jdbc/execute! ds ...)
|
||||||
(do-other-stuff ds args)
|
(do-other-stuff ds args)
|
||||||
|
|
@ -362,16 +595,24 @@ If you are using [Component](https://github.com/stuartsierra/component), a conne
|
||||||
(component/stop ds)))))
|
(component/stop ds)))))
|
||||||
```
|
```
|
||||||
|
|
||||||
|
If you have want to either modify the connection pooled datasource after it is
|
||||||
|
created, or want to perform some database initialization, you can pass a
|
||||||
|
function as `:init-fn` in the `db-spec` hash map. The `component` function
|
||||||
|
will arrange for that initialization function to be invoked on the newly-created
|
||||||
|
datasource whenever `start` is called on the Component returned.
|
||||||
|
|
||||||
## Working with Additional Data Types
|
## Working with Additional Data Types
|
||||||
|
|
||||||
By default, `next.jdbc` relies on the JDBC driver to handle all data type conversions when reading from a result set (to produce Clojure values from SQL values) or setting parameters (to produce SQL values from Clojure values). Sometimes that means that you will get back a database-specific Java object that would need to be manually converted to a Clojure data structure, or that certain database column types require you to manually construct the appropriate database-specific Java object to pass into a SQL operation. You can usually automate those conversions using either the [`ReadableColumn` protocol](/doc/result-set-builders.md#readablecolumn) (for converting database-specific types to Clojure values) or the [`SettableParameter` protocol](/doc/prepared-statements.md#prepared-statement-parameters) (for converting Clojure values to database-specific types).
|
By default, `next.jdbc` relies on the JDBC driver to handle all data type conversions when reading from a result set (to produce Clojure values from SQL values) or setting parameters (to produce SQL values from Clojure values). Sometimes that means that you will get back a database-specific Java object that would need to be manually converted to a Clojure data structure, or that certain database column types require you to manually construct the appropriate database-specific Java object to pass into a SQL operation. You can usually automate those conversions using either the [`ReadableColumn` protocol](/doc/result-set-builders.md#readablecolumn) (for converting database-specific types to Clojure values) or the [`SettableParameter` protocol](/doc/prepared-statements.md#prepared-statement-parameters) (for converting Clojure values to database-specific types).
|
||||||
|
|
||||||
In particular, PostgreSQL does not seem to perform a conversion from `java.util.Date` to a SQL data type automatically. You must `require` the [`next.jdbc.date-time` namespace](https://cljdoc.org/d/seancorfield/next.jdbc/CURRENT/api/next.jdbc.date-time) to enable that conversion.
|
In particular, PostgreSQL does not seem to perform a conversion from `java.util.Date` to a SQL data type automatically. You can `require` the [`next.jdbc.date-time` namespace](https://cljdoc.org/d/com.github.seancorfield/next.jdbc/CURRENT/api/next.jdbc.date-time) to enable that conversion.
|
||||||
|
|
||||||
If you are working with Java Time, some JDBC drivers will automatically convert `java.time.Instant` (and `java.time.LocalDate` and `java.time.LocalDateTime`) to a SQL data type automatically, but others will not. Requiring `next.jdbc.date-time` will enable those automatic conversions for all databases.
|
If you are working with Java Time, some JDBC drivers will automatically convert `java.time.Instant` (and `java.time.LocalDate` and `java.time.LocalDateTime`) to a SQL data type automatically, but others will not. Requiring `next.jdbc.date-time` will enable those automatic conversions for all databases.
|
||||||
|
|
||||||
> Note: `next.jdbc.date-time` also provides functions you can call to enable automatic conversion of SQL date/timestamp types to Clojure data types when reading result sets. If you need specific conversions beyond that to happen automatically, consider extending the `ReadableColumn` protocol, mentioned above.
|
> Note: `next.jdbc.date-time` also provides functions you can call to enable automatic conversion of SQL date/timestamp types to Clojure data types when reading result sets. If you need specific conversions beyond that to happen automatically, consider extending the `ReadableColumn` protocol, mentioned above.
|
||||||
|
|
||||||
|
The `next.jdbc.types` namespace provides over three dozen convenience functions for "type hinting" values so that the JDBC driver might automatically handle some conversions that the default parameter setting function does not. Each function is named for the corresponding SQL type, prefixed by `as-`: `as-bigint`, `as-other`, `as-real`, etc. An example of where this helps is when dealing with PostgreSQL enumerated types: the default behavior, when passed a string that should correspond to an enumerated type, is to throw an exception that `column "..." is of type ... but expression is of type character varying`. You can wrap such strings with `(as-other "...")` which tells PostgreSQL to treat this as `java.sql.Types/OTHER` when setting the parameter.
|
||||||
|
|
||||||
## Processing Database Metadata
|
## Processing Database Metadata
|
||||||
|
|
||||||
JDBC provides several features that let you introspect the database to obtain lists of tables, views, and so on. `next.jdbc` does not provide any specific functions for this but you can easily get this metadata from a `java.sql.Connection` and turn it into Clojure data as follows:
|
JDBC provides several features that let you introspect the database to obtain lists of tables, views, and so on. `next.jdbc` does not provide any specific functions for this but you can easily get this metadata from a `java.sql.Connection` and turn it into Clojure data as follows:
|
||||||
|
|
@ -387,6 +628,126 @@ JDBC provides several features that let you introspect the database to obtain li
|
||||||
Several methods on `DatabaseMetaData` return a `ResultSet` object, e.g., `.getCatalogs()`, `.getClientInfoProperties()`, `.getSchemas()`.
|
Several methods on `DatabaseMetaData` return a `ResultSet` object, e.g., `.getCatalogs()`, `.getClientInfoProperties()`, `.getSchemas()`.
|
||||||
All of those can be handled in a similar manner to the above. See the [Oracle documentation for `java.sql.DatabaseMetaData`](https://docs.oracle.com/en/java/javase/11/docs/api/java.sql/java/sql/DatabaseMetaData.html) (Java 11) for more details.
|
All of those can be handled in a similar manner to the above. See the [Oracle documentation for `java.sql.DatabaseMetaData`](https://docs.oracle.com/en/java/javase/11/docs/api/java.sql/java/sql/DatabaseMetaData.html) (Java 11) for more details.
|
||||||
|
|
||||||
|
If you are working with a generalized datasource that may be a `Connection`, a `DataSource`,
|
||||||
|
or a wrapped connectable (via something like `with-options` or `with-transaction`), you can
|
||||||
|
write generic, `Connection`-based code using `on-connection` which will reuse a `Connection`
|
||||||
|
if one is passed or create a new one if needed (and automatically close it afterward):
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
(on-connection [con ds]
|
||||||
|
(-> (.getMetaData con) ; produces java.sql.DatabaseMetaData
|
||||||
|
;; return a java.sql.ResultSet describing all tables and views:
|
||||||
|
(.getTables nil nil nil (into-array ["TABLE" "VIEW"]))
|
||||||
|
(rs/datafiable-result-set ds opts)))
|
||||||
|
```
|
||||||
|
|
||||||
|
> Note: to avoid confusion and/or incorrect usage, you cannot pass options to `on-connection` because they would be ignored in some cases (existing `Connection` or a wrapped `Connection`).
|
||||||
|
|
||||||
|
As of 1.3.894, if you want the options from a wrapped connectable to flow
|
||||||
|
through to the new connectable inside `on-connection`, you can use the
|
||||||
|
`on-connection+options` variant of the macro. This will automatically rewrap
|
||||||
|
the connectable produced with the options from the initial connectable.
|
||||||
|
Be aware that means you cannot
|
||||||
|
use plain Java interop inside the body of the macro because the connectable
|
||||||
|
is no longer a plain Java `java.sql.Connection` object.
|
||||||
|
|
||||||
|
## Logging
|
||||||
|
|
||||||
|
Sometimes it is convenient to have database operations logged automatically. `next.jdbc/with-logging`
|
||||||
|
provides a way to wrap a datasource (or connection) so that operations on it will be logged via
|
||||||
|
functions you provide.
|
||||||
|
|
||||||
|
There are two logging points:
|
||||||
|
* Logging the SQL and parameters prior to a database operation,
|
||||||
|
* Logging the result of a database operation.
|
||||||
|
|
||||||
|
`next.jdbc/with-logging` accepts two or three arguments and returns a connectable that can
|
||||||
|
be used with `plan`, `execute!`, `execute-one!`, `prepare`, or any of the "friendly SQL
|
||||||
|
functions". Since it uses a similar wrapping mechanism to `next.jdbc/with-options`, the
|
||||||
|
same caveats apply -- see [**Datasources, Connections & Transactions**](#datasources-connections--transactions) above for details.
|
||||||
|
|
||||||
|
### Logging SQL and Parameters
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
(let [log-ds (jdbc/with-logging ds my-sql-logger)]
|
||||||
|
(jdbc/execute! log-ds ["some SQL" "and" "params"])
|
||||||
|
...
|
||||||
|
(jdbc/execute! log-ds ["more SQL" "other" "params"]))
|
||||||
|
```
|
||||||
|
|
||||||
|
The `my-sql-logger` function will be invoked for each database operation, with two arguments:
|
||||||
|
* The fully-qualified symbol identifying the operation,
|
||||||
|
* The vector containing the SQL string followed by the parameters.
|
||||||
|
|
||||||
|
The symbol will be one of: `next.jdbc/plan`, `next.jdbc/execute!`, `next.jdbc/execute-one!`,
|
||||||
|
or `next.jdbc/prepare`. The friendly SQL functions invoke `execute!` or `execute-one!` under
|
||||||
|
the hood, so that is how they will be logged.
|
||||||
|
|
||||||
|
The logging function can do anything it wants with the SQL and parameters. If you are logging
|
||||||
|
parameter values, consider sensitive data that you might be passing in.
|
||||||
|
|
||||||
|
### Logging Results
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
(let [log-ds (jdbc/with-logging ds my-sql-logger my-result-logger)]
|
||||||
|
(jdbc/execute! log-ds ["some SQL" "and" "params"])
|
||||||
|
...
|
||||||
|
(jdbc/execute! log-ds ["more SQL" "other" "params"]))
|
||||||
|
```
|
||||||
|
|
||||||
|
In addition to calling `my-sql-logger` as described above, this will also call `my-result-logger`
|
||||||
|
for `execute!` and `execute-one!` operations (`plan` and `prepare` do not execute database
|
||||||
|
operations directly so they do not produce results). `my-result-logger` will be invoked with
|
||||||
|
three arguments:
|
||||||
|
* The fully-qualified symbol identify the operation,
|
||||||
|
* A "state" argument (the result of calling `my-sql-logger`),
|
||||||
|
* The result set data structure, if the call succeeded, or the exception if it failed.
|
||||||
|
|
||||||
|
The return value of the result logger function is ignored.
|
||||||
|
|
||||||
|
The symbol will be one of: `next.jdbc/execute!` or `next.jdbc/execute-one!`. The friendly
|
||||||
|
SQL functions invoke `execute!` or `execute-one!` under the hood, so that is how they will
|
||||||
|
be logged.
|
||||||
|
|
||||||
|
The "state" argument allows you to return data from the first logging function, such as the
|
||||||
|
current time, that can be consumed by the second logging function, so that you can calculate
|
||||||
|
how long an `execute!` or `execute-one!` operation took. If the first logging function
|
||||||
|
returns `nil`, that will be passed as the second argument to your second logging function.
|
||||||
|
|
||||||
|
The result set data structure could be arbitrarily large. It will generally be a vector
|
||||||
|
for calls to `execute!` or a hash map for calls to `execute-one!`, but its shape is determined
|
||||||
|
by any `:builder-fn` options in effect. You should check if `(instance? Throwable result)`
|
||||||
|
to see if the call failed and the logger has been called with the thrown exception.
|
||||||
|
|
||||||
|
For `plan` and `prepare` calls, only the first logging function is invoked (and the return
|
||||||
|
value is ignored). You can use the symbol passed in to determine this.
|
||||||
|
|
||||||
|
### Naive Logging with Timing
|
||||||
|
|
||||||
|
This example prints all SQL and parameters to `*out*` along with millisecond timing and
|
||||||
|
results, if a result set is available:
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
dev=> (def lds (jdbc/with-logging ds
|
||||||
|
#_=> (fn [sym sql-params]
|
||||||
|
#_=> (prn sym sql-params)
|
||||||
|
#_=> (System/currentTimeMillis))
|
||||||
|
#_=> (fn [sym state result]
|
||||||
|
#_=> (prn sym
|
||||||
|
#_=> (- (System/currentTimeMillis) state)
|
||||||
|
#_=> (if (map? result) result (count result))))))
|
||||||
|
#'dev/lds
|
||||||
|
dev=> (sql/find-by-keys lds :foo {:name "Person"})
|
||||||
|
next.jdbc/execute! ["SELECT * FROM foo WHERE name = ?" "Person"]
|
||||||
|
next.jdbc/execute! 813 1
|
||||||
|
[#:FOO{:NAME "Person"}]
|
||||||
|
dev=>
|
||||||
|
```
|
||||||
|
|
||||||
|
A more sophisticated example could use `sym` to decide whether to just log the SQL and
|
||||||
|
some parameter values or return the current time and the SQL and parameters, so that the
|
||||||
|
result logging could log the SQL, parameters, _and_ result set information with timing.
|
||||||
|
|
||||||
## Support from Specs
|
## Support from Specs
|
||||||
|
|
||||||
As you are developing with `next.jdbc`, it can be useful to have assistance from `clojure.spec` in checking calls to `next.jdbc`'s functions, to provide explicit argument checking and/or better error messages for some common mistakes, e.g., trying to pass a plain SQL string where a vector (containing a SQL string, and no parameters) is expected.
|
As you are developing with `next.jdbc`, it can be useful to have assistance from `clojure.spec` in checking calls to `next.jdbc`'s functions, to provide explicit argument checking and/or better error messages for some common mistakes, e.g., trying to pass a plain SQL string where a vector (containing a SQL string, and no parameters) is expected.
|
||||||
|
|
@ -408,5 +769,3 @@ A convenience function also exists to revert that instrumentation:
|
||||||
```clojure
|
```clojure
|
||||||
(specs/unstrument) ; undoes the instrumentation of all next.jdbc API functions
|
(specs/unstrument) ; undoes the instrumentation of all next.jdbc API functions
|
||||||
```
|
```
|
||||||
|
|
||||||
[Friendly SQL Functions :>](/doc/friendly-sql-functions.md)
|
|
||||||
|
|
|
||||||
|
|
@ -18,28 +18,44 @@ If you used `:as-arrays? true`, you will most likely want to use a `:builder-fn`
|
||||||
|
|
||||||
> Note: When `next.jdbc` cannot obtain a `ResultSet` object and returns `{:next.jdbc/count N}` instead, these builder functions are not applied -- the `:builder-fn` option is not used in that situation.
|
> Note: When `next.jdbc` cannot obtain a `ResultSet` object and returns `{:next.jdbc/count N}` instead, these builder functions are not applied -- the `:builder-fn` option is not used in that situation.
|
||||||
|
|
||||||
|
### Transactions
|
||||||
|
|
||||||
|
Although both libraries support transactions -- via `clojure.java.jdbc/with-db-transaction` and
|
||||||
|
via `next.jdbc/with-transaction` -- there are some important considerations when you are migrating:
|
||||||
|
|
||||||
|
* `clojure.java.jdbc/with-db-transaction` allows nested calls to be present but it tracks the "depth" of such calls and "nested" calls are simply ignored (because transactions do not actually nest in JDBC).
|
||||||
|
* `next.jdbc/with-transaction` will attempt to set up a transaction on an existing `Connection` if that is what it is passed (otherwise a new `Connection` is created and a new transaction set up on that). That means that if you have nested calls, the inner transaction will commit (or rollback) all the way to the outermost transaction. `next.jdbc` "trusts" the programmer to know what they are doing. You can bind `next.jdbc.transaction/*nested-tx*` to `:ignore` if you want the same behavior as `clojure.java.jdbc` where all nested calls are ignored and the outermost transaction is in full control. _Note that this is a per-thread "global" setting and not related to just a single connection, so you can't use this setting if you are working with multiple databases in the same dynamic thread context (`binding`)._
|
||||||
|
* Every operation in `clojure.java.jdbc` attempts to create its own transaction, which is a no-op inside an `with-db-transaction` so it is safe; transactions are _implicit_ in `clojure.java.jdbc`. However, if you have migrated that `with-db-transaction` call over to `next.jdbc/with-transaction` then any `clojure.java.jdbc` operations invoked inside the body of that migrated transaction _will still try to create their own transactions_ and `with-db-transaction` won't know about the outer `with-transaction` call. That means you will effectively get the "overlapping" behavior of `next.jdbc` since the `clojure.java.jdbc` operation will cause the outermost transaction to be committed or rolled back.
|
||||||
|
* None of the operations in `next.jdbc` try to create transactions -- exception `with-transaction`. All `Connection`s are auto-commit by default so it doesn't need the local transactions that `clojure.java.jdbc` tries to create; transactions are _explicit_ in `next.jdbc`.
|
||||||
|
|
||||||
|
There are some strategies you can take to mitigate these differences:
|
||||||
|
1. Migrate code bottom-up so that you don't end up with calls to `clojure.java.jdbc` operations inside `next.jdbc/with-transaction` calls.
|
||||||
|
2. When you migrate a `with-db-transaction` call, think carefully about whether it could be a nested call (in which case simply remove it) or a conditionally nested call which you'll need to be much more careful about migrating.
|
||||||
|
3. You can bind `next.jdbc.transaction/*nested-tx*` to `:prohibit` which will throw exceptions if you accidentally nest calls to `next.jdbc/with-transaction`. Although you can bind it to `:ignore` in order to mimic the behavior of `clojure.java.jdbc`, that should be considered a last resort for dealing with complex conditional nesting of transaction calls. _Note that this is a per-thread "global" setting and not related to just a single connection, so you can't use this setting if you are working with multiple databases in the same dynamic thread context (`binding`)._
|
||||||
|
|
||||||
### Option Handling
|
### Option Handling
|
||||||
|
|
||||||
Because `clojure.java.jdbc` focuses on a hash map for the `db-spec` that is passed around, it can hold options that act as defaults for all operations on it. In addition, all operations in `clojure.java.jdbc` can accept a hash map of options and can pass those options down the call chain. In `next.jdbc`, `get-datasource`, `get-connection`, and `prepare` all produce Java objects that cannot have any extra options attached. On one hand, that means that you cannot provide "default options", and on the other hand it means you need to be a bit more careful to ensure that you pass the appropriate options to the appropriate function, since they cannot be passed through the call chain via the `db-spec`.
|
Because `clojure.java.jdbc` focuses on a hash map for the `db-spec` that is passed around, it can hold options that act as defaults for all operations on it. In addition, all operations in `clojure.java.jdbc` can accept a hash map of options and can pass those options down the call chain. In `next.jdbc`, `get-datasource`, `get-connection`, and `prepare` all produce Java objects that cannot have any extra options attached. On one hand, that means that it is harder to provide "default options", and on the other hand it means you need to be a bit more careful to ensure that you pass the appropriate options to the appropriate function, since they cannot be passed through the call chain via the `db-spec`. That's where `next.jdbc/with-options` can come in handy to wrap a connectable (generally a datasource or a connection) but be careful where you are managing connections and/or transactions directly, as mentioned in the [Getting Started](/doc/getting-started.md) guide.
|
||||||
|
|
||||||
In [All The Options](all-the-options.md), the appropriate options are shown for each function, as well as which options _will_ get passed down the call chain, e.g., if a function can open a connection, it will accept options for `get-connection`; if a function can build a result set, it will accept `:builder-fn`. However, `get-datasource`, `get-connection`, and `prepare` cannot propagate options any further because they produce Java objects as their results -- in particular, `prepare` can't accept `:builder-fn` because it doesn't build result sets: only `plan`, `execute-one!`, and `execute!` can use `:builder-fn`.
|
In [All The Options](all-the-options.md), the appropriate options are shown for each function, as well as which options _will_ get passed down the call chain, e.g., if a function can open a connection, it will accept options for `get-connection`; if a function can build a result set, it will accept `:builder-fn`. However, `get-datasource`, `get-connection`, and `prepare` cannot propagate options any further because they produce Java objects as their results -- in particular, `prepare` can't accept `:builder-fn` because it doesn't build result sets: only `plan`, `execute-one!`, and `execute!` can use `:builder-fn`.
|
||||||
|
|
||||||
In particular, this means that you can't globally override the default options (as you could with `clojure.java.jdbc` by adding your preferred defaults to the db-spec itself). If the default options do not suit your usage and you really don't want to override them in every call, it is recommended that you provide a wrapper namespace that implements the subset of the dozen API functions (from `next.jdbc` and `next.jdbc.sql`) that you want to use, overriding their `opts` argument with your defaults.
|
In particular, this means that you can't globally override the default options (as you could with `clojure.java.jdbc` by adding your preferred defaults to the db-spec itself). If the default options do not suit your usage and you really don't want to override them in every call, it is recommended that you try to use `next.jdbc/with-options` first, and if that still doesn't satisfy you, write a wrapper namespace that implements the subset of the dozen API functions (from `next.jdbc` and `next.jdbc.sql`) that you want to use, overriding their `opts` argument with your defaults.
|
||||||
|
|
||||||
## Primary API
|
## Primary API
|
||||||
|
|
||||||
`next.jdbc` has a deliberately narrow primary API that has (almost) no direct overlap with `clojure.java.jdbc`:
|
`next.jdbc` has a deliberately narrow primary API that has (almost) no direct overlap with `clojure.java.jdbc`:
|
||||||
|
|
||||||
* `get-datasource` -- has no equivalent in `clojure.java.jdbc` but is intended to emphasize `javax.sql.DataSource` as a starting point,
|
* `get-datasource` -- has no equivalent in `clojure.java.jdbc` but is intended to emphasize `javax.sql.DataSource` as a starting point,
|
||||||
* `get-connection` -- overlaps with `clojure.java.jdbc` (and returns a `java.sql.Connection`) but accepts only a subset of the options (`:dbtype`/`:dbname` hash map, `String` JDBC URI); `clojure.java.jdbc/get-connection` accepts `{:datasource ds}` whereas `next.jdbc/get-connection` accepts the `javax.sql.DataSource` object directly,
|
* `get-connection` -- overlaps with `clojure.java.jdbc` (and returns a `java.sql.Connection`) but accepts only a subset of the options (`:dbtype`/`:dbname` hash map, `String` JDBC URL); `clojure.java.jdbc/get-connection` accepts `{:datasource ds}` whereas `next.jdbc/get-connection` accepts the `javax.sql.DataSource` object directly,
|
||||||
* `prepare` -- somewhat similar to `clojure.java.jdbc/prepare-statement` but it accepts a vector of SQL and parameters (compared to just a raw SQL string),
|
* `prepare` -- somewhat similar to `clojure.java.jdbc/prepare-statement` but it accepts a vector of SQL and parameters (compared to just a raw SQL string),
|
||||||
* `plan` -- somewhat similar to `clojure.java.jdbc/reducible-query` but accepts arbitrary SQL statements for execution,
|
* `plan` -- somewhat similar to `clojure.java.jdbc/reducible-query` but accepts arbitrary SQL statements for execution,
|
||||||
* `execute!` -- has no direct equivalent in `clojure.java.jdbc` (but it can replace most uses of both `query` and `db-do-commands`),
|
* `execute!` -- has no direct equivalent in `clojure.java.jdbc` (but it can replace most uses of both `query` and `db-do-commands`),
|
||||||
* `execute-one!` -- has no equivalent in `clojure.java.jdbc` (but it can replace most uses of `query` that currently use `:result-set-fn first`),
|
* `execute-one!` -- has no equivalent in `clojure.java.jdbc` (but it can replace most uses of `query` that currently use `:result-set-fn first`),
|
||||||
* `transact` -- similar to `clojure.java.jdbc/db-transaction*`,
|
* `transact` -- similar to `clojure.java.jdbc/db-transaction*`,
|
||||||
* `with-transaction` -- similar to `clojure.java.jdbc/with-db-transaction`.
|
* `with-transaction` -- similar to `clojure.java.jdbc/with-db-transaction`,
|
||||||
|
* `with-options` -- provides a way to specify "default options" over a group of operations, by wrapping the connectable (datasource or connection).
|
||||||
|
|
||||||
If you were using a bare `db-spec` hash map with `:dbtype`/`:dbname`, or a JDBC URI string everywhere, that should mostly work with `next.jdbc` since most functions accept a "connectable", but it would be better to create a datasource first, and then pass that around. Note that `clojure.java.jdbc` allowed the `jdbc:` prefix in a JDBC URI to be omitted but `next.jdbc` _requires that prefix!_
|
If you were using a bare `db-spec` hash map with `:dbtype`/`:dbname`, or a JDBC URL string everywhere, that should mostly work with `next.jdbc` since most functions accept a "connectable", but it would be better to create a datasource first, and then pass that around. Note that `clojure.java.jdbc` allowed the `jdbc:` prefix in a JDBC URL to be omitted but `next.jdbc` _requires that prefix!_
|
||||||
|
|
||||||
If you were already creating `db-spec` as a pooled connection datasource -- a `{:datasource ds}` hashmap -- then passing `(:datasource db-spec)` to the `next.jdbc` functions is the simplest migration path. If you are migrating piecemeal and want to support _both_ `clojure.java.jdbc` _and_ `next.jdbc` at the same time in your code, you should consider using a datasource as the common way to work with both libraries. You can using `next.jdbc`'s `get-datasource` or the `->pool` function (in `next.jdbc.connection`) to create the a `javax.sql.DataSource` and then build a `db-spec` hash map with it (`{:datasource ds}`) and pass that around your program. `clojure.java.jdbc` calls can use that as-is, `next.jdbc` calls can use `(:datasource db-spec)`, so you don't have to adjust any of your call chains (assuming you're passing `db-spec` around) and you can migrate one function at a time.
|
If you were already creating `db-spec` as a pooled connection datasource -- a `{:datasource ds}` hashmap -- then passing `(:datasource db-spec)` to the `next.jdbc` functions is the simplest migration path. If you are migrating piecemeal and want to support _both_ `clojure.java.jdbc` _and_ `next.jdbc` at the same time in your code, you should consider using a datasource as the common way to work with both libraries. You can using `next.jdbc`'s `get-datasource` or the `->pool` function (in `next.jdbc.connection`) to create the a `javax.sql.DataSource` and then build a `db-spec` hash map with it (`{:datasource ds}`) and pass that around your program. `clojure.java.jdbc` calls can use that as-is, `next.jdbc` calls can use `(:datasource db-spec)`, so you don't have to adjust any of your call chains (assuming you're passing `db-spec` around) and you can migrate one function at a time.
|
||||||
|
|
||||||
|
|
@ -48,13 +64,25 @@ If you were using other forms of the `db-spec` hash map, you'll need to adjust t
|
||||||
The `next.jdbc.sql` namespace contains several functions with similarities to `clojure.java.jdbc`'s core API:
|
The `next.jdbc.sql` namespace contains several functions with similarities to `clojure.java.jdbc`'s core API:
|
||||||
|
|
||||||
* `insert!` -- similar to `clojure.java.jdbc/insert!` but only supports inserting a single map,
|
* `insert!` -- similar to `clojure.java.jdbc/insert!` but only supports inserting a single map,
|
||||||
* `insert-multi!` -- similar to `clojure.java.jdbc/insert-multi!` but only supports inserting columns and a vector of row values,
|
* `insert-multi!` -- similar to `clojure.java.jdbc/insert-multi!` but only supports inserting columns and a vector of row values, or a sequence of hash maps _that all have the same keys_ -- unlike `clojure.java.jdbc/insert-multi!`, you should always get a single multi-row insertion,
|
||||||
* `query` -- similar to `clojure.java.jdbc/query`,
|
* `query` -- similar to `clojure.java.jdbc/query`,
|
||||||
* `find-by-keys` -- similar to `clojure.java.jdbc/find-by-keys` but will also accept a partial where clause (vector) instead of a hash map of column name/value pairs,
|
* `find-by-keys` -- similar to `clojure.java.jdbc/find-by-keys` but will also accept a partial where clause (vector) instead of a hash map of column name/value pairs,
|
||||||
* `get-by-id` -- similar to `clojure.java.jdbc/get-by-id`,
|
* `get-by-id` -- similar to `clojure.java.jdbc/get-by-id`,
|
||||||
* `update!` -- similar to `clojure.java.jdbc/update!` but will also accept a hash map of column name/value pairs instead of a partial where clause (vector),
|
* `update!` -- similar to `clojure.java.jdbc/update!` but will also accept a hash map of column name/value pairs instead of a partial where clause (vector),
|
||||||
* `delete!` -- similar to `clojure.java.jdbc/delete!` but will also accept a hash map of column name/value pairs instead of a partial where clause (vector).
|
* `delete!` -- similar to `clojure.java.jdbc/delete!` but will also accept a hash map of column name/value pairs instead of a partial where clause (vector).
|
||||||
|
|
||||||
|
If you were using `db-do-commands` in `clojure.java.jdbc` to execute DDL, the following is the equivalent in `next.jdbc`:
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
(defn do-commands [connectable commands]
|
||||||
|
(if (instance? java.sql.Connection connectable)
|
||||||
|
(with-open [stmt (next.jdbc.prepare/statement connectable)]
|
||||||
|
(run! #(.addBatch stmt %) commands)
|
||||||
|
(into [] (.executeBatch stmt)))
|
||||||
|
(with-open [conn (next.jdbc/get-connection connectable)]
|
||||||
|
(do-commands conn commands))))
|
||||||
|
```
|
||||||
|
|
||||||
### `:identifiers` and `:qualifier`
|
### `:identifiers` and `:qualifier`
|
||||||
|
|
||||||
If you are using `:identifiers`, you will need to change to the appropriate `:builder-fn` option with one of `next.jdbc.result-set`'s `as-*` functions.
|
If you are using `:identifiers`, you will need to change to the appropriate `:builder-fn` option with one of `next.jdbc.result-set`'s `as-*` functions.
|
||||||
|
|
@ -90,9 +118,8 @@ Several methods on `DatabaseMetaData` return a `ResultSet` object. All of those
|
||||||
|
|
||||||
These are mostly drawn from [Issue #5](https://github.com/seancorfield/next-jdbc/issues/5) although most of the bullets in that issue are described in more detail above.
|
These are mostly drawn from [Issue #5](https://github.com/seancorfield/next-jdbc/issues/5) although most of the bullets in that issue are described in more detail above.
|
||||||
|
|
||||||
* Keyword options no longer end in `?` -- for consistency (in `clojure.java.jdbc`, some flag options ended in `?` and some did not; also some options that ended in `?` accepted non-`Boolean` values, e.g., `:as-arrays?` and `:explain?`),
|
* Keyword options no longer end in `?` -- for consistency (in `clojure.java.jdbc`, some flag options ended in `?` and some did not; also some options that ended in `?` accepted non-`Boolean` values),
|
||||||
* `with-db-connection` has been replaced by just `with-open` containing a call to `get-connection`,
|
* `with-db-connection` has been replaced by just `with-open` containing a call to `get-connection`,
|
||||||
* `with-transaction` can take a `:rollback-only` option, but there is no built-in way to change a transaction to rollback _dynamically_; either throw an exception (all transactions roll back on an exception) or call `.rollback` directly on the `java.sql.Connection` object (see [Manual Rollback Inside a Transactions](/doc/transactions.md#manual-rollback-inside-a-transaction) and the following section about save points),
|
* `with-transaction` can take a `:rollback-only` option, but there is no built-in way to change a transaction to rollback _dynamically_; either throw an exception (all transactions roll back on an exception) or call `.rollback` directly on the `java.sql.Connection` object (see [Manual Rollback Inside a Transactions](/doc/transactions.md#manual-rollback-inside-a-transaction) and the following section about save points),
|
||||||
|
* `clojure.java.jdbc` implicitly allowed transactions to nest and just silently ignored the inner, nested transactions (so you only really had the top-level, outermost transaction); `next.jdbc` by default assumes you know what you are doing and so an inner (nested) transaction will commit or rollback the work done so far in outer transaction (and then when that outer transaction ends, the remaining work is rolled back or committed); `next.jdbc.transaction/*nested-tx*` is a dynamic var that can be bound to `:ignore` to get similar behavior to `clojure.java.jdbc`.
|
||||||
* The extension points for setting parameters and reading columns are now `SettableParameter` and `ReadableColumn` protocols.
|
* The extension points for setting parameters and reading columns are now `SettableParameter` and `ReadableColumn` protocols.
|
||||||
|
|
||||||
[<: `datafy`, `nav`, and `:schema`](/doc/datafy-nav-and-schema.md)
|
|
||||||
|
|
|
||||||
|
|
@ -46,7 +46,7 @@ This can be extended to any Clojure data type, to provide a customized way to ad
|
||||||
(.setTimestamp ps i (java.sql.Timestamp/valueOf v))))
|
(.setTimestamp ps i (java.sql.Timestamp/valueOf v))))
|
||||||
```
|
```
|
||||||
|
|
||||||
> Note: those conversions can also be enabled by requiring the [`next.jdbc.date-time` namespace](https://cljdoc.org/d/seancorfield/next.jdbc/CURRENT/api/next.jdbc.date-time).
|
> Note: those conversions can also be enabled by requiring the [`next.jdbc.date-time` namespace](https://cljdoc.org/d/com.github.seancorfield/next.jdbc/CURRENT/api/next.jdbc.date-time).
|
||||||
|
|
||||||
You can also extend this protocol via metadata so you can do it on a per-object basis if you need:
|
You can also extend this protocol via metadata so you can do it on a per-object basis if you need:
|
||||||
|
|
||||||
|
|
@ -54,6 +54,8 @@ You can also extend this protocol via metadata so you can do it on a per-object
|
||||||
(with-meta obj {'next.jdbc.prepare/set-parameter (fn [v ps i]...)})
|
(with-meta obj {'next.jdbc.prepare/set-parameter (fn [v ps i]...)})
|
||||||
```
|
```
|
||||||
|
|
||||||
|
The `next.jdbc.types` namespace provides functions to wrap values with per-object implementations of `set-parameter` for every standard `java.sql.Types` value. Each is named `as-xxx` corresponding to `java.sql.Types/XXX`.
|
||||||
|
|
||||||
The converse, converting database-specific types to Clojure values is handled by the `ReadableColumn` protocol, discussed in the previous section ([Result Set Builders](/doc/result-set-builders.md#readablecolumn)).
|
The converse, converting database-specific types to Clojure values is handled by the `ReadableColumn` protocol, discussed in the previous section ([Result Set Builders](/doc/result-set-builders.md#readablecolumn)).
|
||||||
|
|
||||||
As noted above, `next.jdbc.prepare/set-parameters` is available for you to call on any existing `PreparedStatement` to set or update the parameters that will be used when the statement is executed:
|
As noted above, `next.jdbc.prepare/set-parameters` is available for you to call on any existing `PreparedStatement` to set or update the parameters that will be used when the statement is executed:
|
||||||
|
|
@ -91,15 +93,24 @@ Here we set parameters and add them in batches to the prepared statement, then w
|
||||||
(.executeBatch ps)) ; returns int[]
|
(.executeBatch ps)) ; returns int[]
|
||||||
```
|
```
|
||||||
|
|
||||||
Both of those are somewhat ugly and contain a fair bit of boilerplate and Java interop, so a helper function is provided in `next.jdbc.prepare` to automate the execution of batched parameters:
|
Both of those are somewhat ugly and contain a fair bit of boilerplate and Java interop, so a helper function is provided in `next.jdbc` to automate the execution of batched parameters:
|
||||||
|
|
||||||
```clojure
|
```clojure
|
||||||
(with-open [con (jdbc/get-connection ds)
|
(with-open [con (jdbc/get-connection ds)
|
||||||
ps (jdbc/prepare con ["insert into status (id,name) values (?,?)"])]
|
ps (jdbc/prepare con ["insert into status (id,name) values (?,?)"])]
|
||||||
(p/execute-batch! ps [[1 "Approved"] [2 "Rejected"] [3 "New"]]))
|
(jdbc/execute-batch! ps [[1 "Approved"] [2 "Rejected"] [3 "New"]]))
|
||||||
|
;; or:
|
||||||
|
(jdbc/execute-batch! ds
|
||||||
|
"insert into status (id,name) values (?,?)"
|
||||||
|
[[1 "Approved"] [2 "Rejected"] [3 "New"]]
|
||||||
|
;; options hash map required here to disambiguate
|
||||||
|
;; this call from the 2- & 3-arity calls
|
||||||
|
{})
|
||||||
```
|
```
|
||||||
|
|
||||||
By default, this adds all the parameter groups and executes one batched command. It returns a (Clojure) vector of update counts (rather than `int[]`). If you provide an options hash map, you can specify a `:batch-size` and the parameter groups will be partitioned and executed as multiple batched commands. This is intended to allow very large sequences of parameter groups to be executed without running into limitations that may apply to a single batched command. If you expect the update counts to be very large (more than `Integer/MAX_VALUE`), you can specify `:large true` so that `.executeLargeBatch` is called instead of `.executeBatch`. Note: not all databases support `.executeLargeBatch`.
|
By default, this adds all the parameter groups and executes one batched command. It returns a (Clojure) vector of update counts (rather than `int[]`). If you provide an options hash map, you can specify a `:batch-size` and the parameter groups will be partitioned and executed as multiple batched commands. This is intended to allow very large sequences of parameter groups to be executed without running into limitations that may apply to a single batched command. If you expect the update counts to be very large (more than `Integer/MAX_VALUE`), you can specify `:large true` so that `.executeLargeBatch` is called instead of `.executeBatch`.
|
||||||
|
|
||||||
|
> Note: not all databases support `.executeLargeBatch`.
|
||||||
|
|
||||||
If you want to get the generated keys from an `insert` done via `execute-batch!`, you need a couple of extras, compared to the above:
|
If you want to get the generated keys from an `insert` done via `execute-batch!`, you need a couple of extras, compared to the above:
|
||||||
|
|
||||||
|
|
@ -108,22 +119,26 @@ If you want to get the generated keys from an `insert` done via `execute-batch!`
|
||||||
;; ensure the PreparedStatement will return the keys:
|
;; ensure the PreparedStatement will return the keys:
|
||||||
ps (jdbc/prepare con ["insert into status (id,name) values (?,?)"]
|
ps (jdbc/prepare con ["insert into status (id,name) values (?,?)"]
|
||||||
{:return-keys true})]
|
{:return-keys true})]
|
||||||
;; this returns update counts (which we'll ignore)
|
;; this will call .getGeneratedKeys for each batch and return them as a
|
||||||
(p/execute-batch! ps [[1 "Approved"] [2 "Rejected"] [3 "New"]])
|
;; vector of datafiable result sets (the keys in map are database-specific):
|
||||||
;; this produces the generated keys as a (datafiable) Clojure data structure:
|
(jdbc/execute-batch! ps [[1 "Approved"] [2 "Rejected"] [3 "New"]]
|
||||||
(rs/datafiable-result-set (.getGeneratedKeys ps) con {}))
|
{:return-generated-keys true}))
|
||||||
|
;; or:
|
||||||
|
(jdbc/execute-batch! ds
|
||||||
|
"insert into status (id,name) values (?,?)"
|
||||||
|
[[1 "Approved"] [2 "Rejected"] [3 "New"]]
|
||||||
|
{:return-keys true ; for creation of PreparedStatement
|
||||||
|
:return-generated-keys true}) ; for batch result format
|
||||||
```
|
```
|
||||||
|
|
||||||
The call to `rs/datafiable-result-set` can be passed a `:builder-fn` option if you want something other than qualified as-is hash maps.
|
This calls `rs/datafiable-result-set` behind the scenes so you can also pass a `:builder-fn` option to `execute-batch!` if you want something other than qualified as-is hash maps.
|
||||||
|
|
||||||
> Note: not all databases support calling `.getGeneratedKeys` here (everything I test against seems to, except MS SQL Server).
|
> Note: not all databases support calling `.getGeneratedKeys` here (everything I test against seems to, except MS SQL Server and SQLite). Some databases will only return one generated key per batch, rather than a generated key for every row inserted. You may need to add `RETURNING *` to your `INSERT` statements instead.
|
||||||
|
|
||||||
### Caveats
|
### Caveats
|
||||||
|
|
||||||
There are several caveats around using batched parameters. Some JDBC drivers need a "hint" in order to perform the batch operation as a single command for the database. In particular, PostgreSQL requires the `:reWriteBatchedInserts true` option and MySQL requires `:rewriteBatchedStatement true` (both non-standard JDBC options, of course!). These should be provided as part of the db-spec hash map when the datasource is created.
|
There are several caveats around using batched parameters. Some JDBC drivers need a "hint" in order to perform the batch operation as a single command for the database. In particular, PostgreSQL requires the `:reWriteBatchedInserts true` option and MySQL requires `:rewriteBatchedStatements true` (both non-standard JDBC options, of course!). These should be provided as part of the db-spec hash map when the datasource is created.
|
||||||
|
|
||||||
In addition, if the batch operation fails for a group of parameters, it is database-specific whether the remaining groups of parameters are used, i.e., whether the operation is performed for any further groups of parameters after the one that failed. The result of calling `execute-batch!` is a vector of integers. Each element of the vector is the number of rows affected by the operation for each group of parameters. `execute-batch!` may throw a `BatchUpdateException` and calling `.getUpdateCounts` (or `.getLargeUpdateCounts`) on the exception may return an array containing a mix of update counts and error values (a Java `int[]` or `long[]`). Some databases don't always return an update count but instead a value indicating the number of rows is not known (but sometimes you can still get the update counts).
|
In addition, if the batch operation fails for a group of parameters, it is database-specific whether the remaining groups of parameters are used, i.e., whether the operation is performed for any further groups of parameters after the one that failed. The result of calling `execute-batch!` is a vector of integers. Each element of the vector is the number of rows affected by the operation for each group of parameters. `execute-batch!` may throw a `BatchUpdateException` and calling `.getUpdateCounts` (or `.getLargeUpdateCounts`) on the exception may return an array containing a mix of update counts and error values (a Java `int[]` or `long[]`). Some databases don't always return an update count but instead a value indicating the number of rows is not known (but sometimes you can still get the update counts).
|
||||||
|
|
||||||
Finally, some database drivers don't do batched operations at all -- they accept `.executeBatch` but they run the operation as separate commands for the database rather than a single batched command.
|
Finally, some database drivers don't do batched operations at all -- they accept `.executeBatch` but they run the operation as separate commands for the database rather than a single batched command. Some database drivers do not support `.getGeneratedKeys` (e.g., MS SQL Server and SQLite) so you cannot use `:return-generated-keys` and you need to use `RETURNING *` in your `INSERT` statements instead.
|
||||||
|
|
||||||
[<: Result Set Builders](/doc/result-set-builders.md) | [Transactions :>](/doc/transactions.md)
|
|
||||||
|
|
|
||||||
|
|
@ -26,7 +26,7 @@ In addition, the following generic builders can take `:label-fn` and `:qualifier
|
||||||
* `as-modified-arrays` -- table-qualified keywords,
|
* `as-modified-arrays` -- table-qualified keywords,
|
||||||
* `as-unqualified-modified-arrays` -- simple keywords.
|
* `as-unqualified-modified-arrays` -- simple keywords.
|
||||||
|
|
||||||
An example builder that converts `snake_case` database table/column names to `kebab-case` keywords:
|
An example builder that naively converts `snake_case` database table/column names to `kebab-case` keywords:
|
||||||
|
|
||||||
```clojure
|
```clojure
|
||||||
(defn as-kebab-maps [rs opts]
|
(defn as-kebab-maps [rs opts]
|
||||||
|
|
@ -34,11 +34,24 @@ An example builder that converts `snake_case` database table/column names to `ke
|
||||||
(result-set/as-modified-maps rs (assoc opts :qualifier-fn kebab :label-fn kebab))))
|
(result-set/as-modified-maps rs (assoc opts :qualifier-fn kebab :label-fn kebab))))
|
||||||
```
|
```
|
||||||
|
|
||||||
And finally there are adapters for the existing builders that let you override the default way that columns are read from result sets:
|
However, a version of `as-kebab-maps` is built-in, as is `as-unqualified-kebab-maps`, which both use the `->kebab-case` function from the [camel-snake-kebab library](https://github.com/clj-commons/camel-snake-kebab/) with `as-modified-maps` and `as-unqualified-modified-maps` respectively, so you can just use the built-in `result-set/as-kebab-maps` (or `result-set/as-unqualified-kebab-maps`) builder as a `:builder-fn` option instead of writing your own.
|
||||||
|
|
||||||
|
> Note: `next.jdbc/snake-kebab-opts` and `next.jdbc/unqualified-snake-kebab-opts` exist, providing pre-built options hash maps that contain these `:builder-fn` options, as well as appropriate `:table-fn` and `:column-fn` options for the **Friendly SQL Functions** so those are often the most convenient way to enable snake/kebab case conversions with `next.jdbc`.
|
||||||
|
|
||||||
|
And finally there are two styles of adapters for the existing builders that let you override the default way that columns are read from result sets.
|
||||||
|
The first style takes a `column-reader` function, which is called with the `ResultSet`, the `ResultSetMetaData`, and the column index, and is expected to read the raw column value from the result set and return it. The result is then passed through `read-column-by-index` (from `ReadableColumn`, which may be implemented directly via protocol extension or via metadata on the result of the `column-reader` function):
|
||||||
|
|
||||||
* `as-maps-adapter` -- adapts an existing map builder function with a new column reader,
|
* `as-maps-adapter` -- adapts an existing map builder function with a new column reader,
|
||||||
* `as-arrays-adapter` -- adapts an existing array builder function with a new column reader.
|
* `as-arrays-adapter` -- adapts an existing array builder function with a new column reader.
|
||||||
|
|
||||||
|
The default `column-reader` function behavior would be:
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
(defn default-column-reader
|
||||||
|
[^ResultSet rs ^ResultSetMetaData rsmeta ^Integer i]
|
||||||
|
(.getObject rs i))
|
||||||
|
```
|
||||||
|
|
||||||
An example column reader is provided -- `clob-column-reader` -- that still uses `.getObject` but will expand `java.sql.Clob` values into string (using the `clob->string` helper function):
|
An example column reader is provided -- `clob-column-reader` -- that still uses `.getObject` but will expand `java.sql.Clob` values into string (using the `clob->string` helper function):
|
||||||
|
|
||||||
```clojure
|
```clojure
|
||||||
|
|
@ -47,13 +60,30 @@ An example column reader is provided -- `clob-column-reader` -- that still uses
|
||||||
result-set/clob-column-reader)}
|
result-set/clob-column-reader)}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
As of 1.1.569, the second style of adapter relies on `with-column-value` from `RowBuilder` (see below) and allows you to take complete control of the column reading process. This style takes a `column-by-index-fn` function, which is called with the builder itself, the `ResultSet`, and the column index, and is expected to read the raw column value from the result set and perform any and all processing on it, before returning it. The result is added directly to the current row with no further processing.
|
||||||
|
|
||||||
|
* `builder-adapter` -- adapts any existing builder function with a new column reading function.
|
||||||
|
|
||||||
|
The default `column-by-index-fn` function behavior would be:
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
(defn default-column-by-index-fn
|
||||||
|
[builder ^ResultSet rs ^Integer i]
|
||||||
|
(result-set/read-column-by-index (.getObject rs i) (:rsmeta builder) i))
|
||||||
|
```
|
||||||
|
|
||||||
|
Because the builder itself is passed in, the vector of processed column names is available as `(:cols builder)` (in addition to the `ResultSetMetaData` as `(:rsmeta builder)`). This allows you to take different actions based on the metadata or the column name, as well as bypassing the `read-column-by-index` call if you wish.
|
||||||
|
|
||||||
|
The older `as-*-adapter` functions are now implemented in terms of this `builder-adapter` because `with-column-value` abstracts away _how_ the new column's value is added to the row being built.
|
||||||
|
|
||||||
## RowBuilder Protocol
|
## RowBuilder Protocol
|
||||||
|
|
||||||
This protocol defines four functions and is used whenever `next.jdbc` needs to materialize a row from a `ResultSet` as a Clojure data structure:
|
This protocol defines five functions and is used whenever `next.jdbc` needs to materialize a row from a `ResultSet` as a Clojure data structure:
|
||||||
|
|
||||||
* `(->row builder)` -- produces a new row (a `(transient {})` by default),
|
* `(->row builder)` -- produces a new row (a `(transient {})` by default),
|
||||||
* `(column-count builder)` -- returns the number of columns in each row,
|
* `(column-count builder)` -- returns the number of columns in each row,
|
||||||
* `(with-column builder row i)` -- given the row so far, fetches column `i` from the current row of the `ResultSet`, converts it to a Clojure value, and adds it to the row (for `as-maps` this is a call to `.getObject`, a call to `read-column-by-index` -- see the `ReadableColumn` protocol below, and a call to `assoc!`),
|
* `(with-column builder row i)` -- given the row so far, fetches column `i` from the current row of the `ResultSet`, converts it to a Clojure value, and adds it to the row (for `as-maps` this is a call to `.getObject`, a call to `read-column-by-index` -- see the `ReadableColumn` protocol below, and a call to `assoc!`),
|
||||||
|
* `(with-column-value builder row col v)` -- given the row so far, the column name, and the column value, add the column name/value to the row in the appropriate way: this is a low-level utility, intended to be used in builders (or adapters) that want to control more of the value handling process -- in general, `with-column` will be implemented by calling `with-column-value`,
|
||||||
* `(row! builder row)` -- completes the row (a `(persistent! row)` call by default).
|
* `(row! builder row)` -- completes the row (a `(persistent! row)` call by default).
|
||||||
|
|
||||||
`execute!` and `execute-one!` call these functions for each row they need to build. `plan` _may_ call these functions if the reducing function causes a row to be materialized.
|
`execute!` and `execute-one!` call these functions for each row they need to build. `plan` _may_ call these functions if the reducing function causes a row to be materialized.
|
||||||
|
|
@ -72,7 +102,9 @@ Only `execute!` expects this protocol to be implemented. `execute-one!` and `pla
|
||||||
|
|
||||||
The `as-*` functions described above are all implemented in terms of these protocols. They are passed the `ResultSet` object and the options hash map (as passed into various `next.jdbc` functions). They return an implementation of the protocols that is then used to build rows and the result set. Note that the `ResultSet` passed in is _mutable_ and is advanced from row to row by the SQL execution function, so each time `->row` is called, the underlying `ResultSet` object points at each new row in turn. By contrast, `->rs` (which is only called by `execute!`) is invoked _before_ the `ResultSet` is advanced to the first row.
|
The `as-*` functions described above are all implemented in terms of these protocols. They are passed the `ResultSet` object and the options hash map (as passed into various `next.jdbc` functions). They return an implementation of the protocols that is then used to build rows and the result set. Note that the `ResultSet` passed in is _mutable_ and is advanced from row to row by the SQL execution function, so each time `->row` is called, the underlying `ResultSet` object points at each new row in turn. By contrast, `->rs` (which is only called by `execute!`) is invoked _before_ the `ResultSet` is advanced to the first row.
|
||||||
|
|
||||||
The options hash map for any `next.jdbc` function can contain a `:builder-fn` key and the value is used as the row/result set builder function. The tests for `next.jdbc.result-set` include a [record-based builder function](https://github.com/seancorfield/next-jdbc/blob/master/test/next/jdbc/result_set_test.clj#L335-L353) as an example of how you can extend this to satisfy your needs.
|
The result set builder implementation is also assumed to implement `clojure.lang.ILookup` such that the keys `:cols` and `:rsmeta` are supported and should map to the vector of column names that the builder will produce and the `ResultSetMetaData` object (which can be obtained from the `ResultSet`, if necessary). This is intended to allow `plan` and various builder adapters to access certain information that may be needed for processing results. The default builder implementations (for maps and arrays) are both records with fields `rsmeta` and `cols` (in addition to `rs` -- the `ResultSet` itself). The adapters provided in `next.jdbc.result-set` returned reified implementations that delegate field lookup to the underlying builder implementation.
|
||||||
|
|
||||||
|
The options hash map for any `next.jdbc` function can contain a `:builder-fn` key and the value is used as the row/result set builder function. The tests for `next.jdbc.result-set` include a [record-based builder function](https://github.com/seancorfield/next-jdbc/blob/develop/test/next/jdbc/result_set_test.clj#L335-L353) as an example of how you can extend this to satisfy your needs.
|
||||||
|
|
||||||
> Note: When `next.jdbc` cannot obtain a `ResultSet` object and returns `{:next.jdbc/count N}` instead, the builder function is not applied -- the `:builder-fn` option does not affect the shape of the result.
|
> Note: When `next.jdbc` cannot obtain a `ResultSet` object and returns `{:next.jdbc/count N}` instead, the builder function is not applied -- the `:builder-fn` option does not affect the shape of the result.
|
||||||
|
|
||||||
|
|
@ -92,10 +124,10 @@ This namespace contains variants of the six `as-maps`-style builders above that
|
||||||
|
|
||||||
# ReadableColumn
|
# ReadableColumn
|
||||||
|
|
||||||
As mentioned above, when `with-column` is called, the expectation is that the row builder will call `.getObject` on the current state of the `ResultSet` object with the column index and will then call `read-column-by-index`, passing the column value, the `ResultSetMetaData`, and the column index. That function is part of the `ReadableColumn` protocol that you can extend to handle conversion of arbitrary database-specific types to Clojure values.
|
As mentioned above, when `with-column` is called, the expectation is that the row builder will call `.getObject` on the current state of the `ResultSet` object with the column index and will then call `read-column-by-index`, passing the column value, the `ResultSetMetaData`, and the column index. That function is part of the `ReadableColumn` protocol that you can extend to handle conversion of arbitrary database-specific types to Clojure values. It is extensible via metadata so the value you return can have metadata specifying the implementation of `read-column-by-index`.
|
||||||
|
|
||||||
If you need more control over how values are read from the `ResultSet` object, you can use `next.jdbc.result-set/as-maps-adapter` (or `next.jdbc.result-set/as-arrays-adapter`) which takes an existing builder function and a column reading function and returns a new builder function that calls your column reading function (with the `ResultSet` object, the `ResultSetMetaData` object, and the column index) instead of calling `.getObject` directly.
|
If you need more control over how values are read from the `ResultSet` object, you can use `next.jdbc.result-set/as-maps-adapter` (or `next.jdbc.result-set/as-arrays-adapter`, or the more low-level but more generic `next.jdbc.result-set/builder-adapter`) which takes an existing builder function and a column reading function and returns a new builder function that calls your column reading function (with the `ResultSet` object, the `ResultSetMetaData` object, and the column index -- or the builder itself, the `ResultSet` object, and the column index in the case of `builder-adapter`) instead of calling `.getObject` directly.
|
||||||
Note that the adapters still call `read-column-by-index` on the value your column reading function returns.
|
Note that the `as-*` adapters still call `read-column-by-index` on the value your column reading function returns.
|
||||||
|
|
||||||
In addition, inside `plan`, as each value is looked up by name in the current state of the `ResultSet` object, the `read-column-by-label` function is called, again passing the column value and the column label (the name used in the SQL to identify that column). This function is also part of the `ReadableColumn` protocol.
|
In addition, inside `plan`, as each value is looked up by name in the current state of the `ResultSet` object, the `read-column-by-label` function is called, again passing the column value and the column label (the name used in the SQL to identify that column). This function is also part of the `ReadableColumn` protocol.
|
||||||
|
|
||||||
|
|
@ -117,8 +149,6 @@ The default implementation of this protocol is for these two functions to return
|
||||||
(.toInstant v)))
|
(.toInstant v)))
|
||||||
```
|
```
|
||||||
|
|
||||||
Remember that a protocol extension will apply to all code running in your application so with the above code **all** timestamp values coming from the database will be converted to `java.time.Instant` for all queries. If you want to control behavior across different calls, consider the adapters described above (`as-maps-adapter` and `as-arrays-adapter`).
|
Remember that a protocol extension will apply to all code running in your application so with the above code **all** timestamp values coming from the database will be converted to `java.time.Instant` for all queries. If you want to control behavior across different calls, consider the adapters described above (`as-maps-adapter`, `as-arrays-adapter`, and `builder-adapter`, and think about using metadata to implement the `rs/ReadableColumn` protocol instead of extending it).
|
||||||
|
|
||||||
Note that the converse, converting Clojure values to database-specific types is handled by the `SettableParameter` protocol, discussed in the next section ([Prepared Statements](/doc/prepared-statements.md#prepared-statement-parameters)).
|
Note that the converse, converting Clojure values to database-specific types is handled by the `SettableParameter` protocol, discussed in the next section ([Prepared Statements](/doc/prepared-statements.md#prepared-statement-parameters)).
|
||||||
|
|
||||||
[<: Tips & Tricks](/doc/tips-and-tricks.md) | [Prepared Statements :>](/doc/prepared-statements.md)
|
|
||||||
|
|
|
||||||
|
|
@ -21,9 +21,9 @@ There is a helper in `next.jdbc.result-set` to make this easier -- `clob->string
|
||||||
(extend-protocol rs/ReadableColumn
|
(extend-protocol rs/ReadableColumn
|
||||||
java.sql.Clob
|
java.sql.Clob
|
||||||
(read-column-by-label [^java.sql.Clob v _]
|
(read-column-by-label [^java.sql.Clob v _]
|
||||||
(clob->string v))
|
(rs/clob->string v))
|
||||||
(read-column-by-index [^java.sql.Clob v _2 _3]
|
(read-column-by-index [^java.sql.Clob v _2 _3]
|
||||||
(clob->string v)))
|
(rs/clob->string v)))
|
||||||
```
|
```
|
||||||
|
|
||||||
As noted in [Result Set Builders](/doc/result-set-builders.md), there is also `clob-column-reader` that can be used with the `as-*-adapter` result set builder functions.
|
As noted in [Result Set Builders](/doc/result-set-builders.md), there is also `clob-column-reader` that can be used with the `as-*-adapter` result set builder functions.
|
||||||
|
|
@ -38,6 +38,42 @@ Consult the [java.sql.Blob documentation](https://docs.oracle.com/javase/8/docs/
|
||||||
|
|
||||||
> Note: the standard MySQL JDBC driver seems to return `BLOB` data as `byte[]` instead of `java.sql.Blob`.
|
> Note: the standard MySQL JDBC driver seems to return `BLOB` data as `byte[]` instead of `java.sql.Blob`.
|
||||||
|
|
||||||
|
## Exceptions
|
||||||
|
|
||||||
|
A lot of JDBC operations can fail with an exception. JDBC 4.0 has a
|
||||||
|
[well-defined hierarchy of exception types](https://docs.oracle.com/en/java/javase/17/docs/api/java.sql/java/sql/package-tree.html)
|
||||||
|
and you can often catch a specific type of exception to do useful handling
|
||||||
|
of various error conditions that you might "expect" when working with a
|
||||||
|
database.
|
||||||
|
|
||||||
|
A good example is [SQLIntegrityConstraintViolationException](https://docs.oracle.com/en/java/javase/17/docs/api/java.sql/java/sql/SQLIntegrityConstraintViolationException.html)
|
||||||
|
which typically represents an index/key constraint violation such as a
|
||||||
|
duplicate primary key insertion attempt.
|
||||||
|
|
||||||
|
However, like some other areas when dealing with JDBC, the reality can
|
||||||
|
be very database-specific. Some database drivers **don't** use the hierarchy
|
||||||
|
above -- notably PostgreSQL, which has a generic `PSQLException` type
|
||||||
|
with its own subclasses and semantics. See [PostgreSQL JDBC issue #963](https://github.com/pgjdbc/pgjdbc/issues/963)
|
||||||
|
for a discussion of the difficulty in adopting the standard JDBC hierarchy
|
||||||
|
(dating back to 2017!).
|
||||||
|
|
||||||
|
The `java.sql.SQLException` class provides `.getErrorCode()` and
|
||||||
|
`.getSQLState()` methods but the values returned by those are
|
||||||
|
explicitly vendor-specific (error code) or only partly standardized (state).
|
||||||
|
In theory, the SQL state should follow either the X/Open (Open Group) or
|
||||||
|
ANSI SQL 2003 conventions, both of which were behind paywalls(!). The most
|
||||||
|
complete public listing is probably the IBM DB2
|
||||||
|
[SQL State](https://www.ibm.com/docs/en/db2woc?topic=messages-sqlstate)
|
||||||
|
document.
|
||||||
|
See also this [Stack Overflow post about SQL State](https://stackoverflow.com/questions/1399574/what-are-all-the-possible-values-for-sqlexception-getsqlstate)
|
||||||
|
for more references and links. Not all database drivers follow either of
|
||||||
|
these conventions for SQL State so you may still have to consult your
|
||||||
|
vendor's specific documentation.
|
||||||
|
|
||||||
|
All of this makes writing _generic_ error handling, that works across
|
||||||
|
multiple databases, very hard indeed. You can't rely on the JDBC `SQLException`
|
||||||
|
hierarchy; you can sometimes rely on a subset of SQL State values.
|
||||||
|
|
||||||
## Handling Timeouts
|
## Handling Timeouts
|
||||||
|
|
||||||
JDBC provides a number of ways in which you can decide how long an operation should run before it times out. Some of these timeouts are specified in seconds and some are in milliseconds. Some are handled via connection properties (or JDBC URL parameters), some are handled via methods on various JDBC objects.
|
JDBC provides a number of ways in which you can decide how long an operation should run before it times out. Some of these timeouts are specified in seconds and some are in milliseconds. Some are handled via connection properties (or JDBC URL parameters), some are handled via methods on various JDBC objects.
|
||||||
|
|
@ -60,7 +96,7 @@ Examples:
|
||||||
;; socketTimeout via JDBC URL:
|
;; socketTimeout via JDBC URL:
|
||||||
(def db-url (str "jdbc:sqlserver://localhost;user=sa;password=secret"
|
(def db-url (str "jdbc:sqlserver://localhost;user=sa;password=secret"
|
||||||
;; milliseconds:
|
;; milliseconds:
|
||||||
";database=master;socketTimeout=10000"))
|
";database=model;socketTimeout=10000"))
|
||||||
|
|
||||||
;; loginTimeout via DataSource:
|
;; loginTimeout via DataSource:
|
||||||
(def ds (jdbc/get-datasource db-spec))
|
(def ds (jdbc/get-datasource db-spec))
|
||||||
|
|
@ -75,12 +111,66 @@ Examples:
|
||||||
(jdbc/execute! ps))
|
(jdbc/execute! ps))
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Reducing and Folding with `plan`
|
||||||
|
|
||||||
|
Most of this documentation describes using `plan` specifically for reducing and notes that you can avoid the overhead of realizing rows from the `ResultSet` into Clojure data structures if your reducing function uses only functions that get column values by name. If you perform any function on the row that would require an actual hash map or a sequence, the row will be realized into a full Clojure hash map via the builder function passed in the options (or via `next.jdbc.result-set/as-maps` by default).
|
||||||
|
|
||||||
|
One of the benefits of reducing over `plan` is that you can stream very large result sets, very efficiently, without having the entire result set in memory (assuming your reducing function doesn't build a data structure that is too large!). See the tips below on **Streaming Result Sets**.
|
||||||
|
If you want to process a `plan` result purely for side-effects, without producing a result,
|
||||||
|
you can use `run!` instead of `reduce`:
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
(run! process-row (jdbc/plan ds ...))
|
||||||
|
```
|
||||||
|
|
||||||
|
`run!` is based on `reduce` and `process-row` here takes just one argument --
|
||||||
|
the row -- rather than the usual reducing function that takes two.
|
||||||
|
|
||||||
|
The result of `plan` is also foldable in the [clojure.core.reducers](https://clojure.org/reference/reducers) sense. While you could use `execute!` to produce a vector of fully-realized rows as hash maps and then fold that vector (Clojure's vectors support fork-join parallel reduce-combine), that wouldn't be possible for very large result sets. If you fold the result of `plan`, the result set will be partitioned and processed using fork-join parallel reduce-combine. Unlike reducing over `plan`, each row **is** realized into a Clojure data structure and each batch is forked for reduction as soon as that many rows have been realized. By default, `fold`'s batch size is 512 but you can specify a different value in the 4-arity call. Once the entire result set has been read, the last (partial) batch is forked for reduction. The combining operations are forked and interleaved with the reducing operations, so the order (of forked tasks) is batch-1, batch-2, combine-1-2, batch-3, combine-1&2-3, batch-4, combine-1&2&3-4, etc. The amount of parallelization you get will depend on many factors including the number of processors, the speed of your reducing function, the speed of your combining function, and the speed with which result sets can actually be streamed from your database.
|
||||||
|
|
||||||
|
There is no back pressure here so if your reducing function is slow, you may end up with more of the realized result set in memory than your system can cope with.
|
||||||
|
|
||||||
|
## Times, Dates, and Timezones
|
||||||
|
|
||||||
|
Working with dates and timezones in databases can be confusing, as you are
|
||||||
|
working at the intersection between the database, the JDBC library and the
|
||||||
|
date library that you happen to be using. A good rule of thumb is to keep
|
||||||
|
timezone-related logic as simple as possible. For example, with Postgres we
|
||||||
|
recommend always storing dates in a Postgres `TIMESTAMP` (without time zone)
|
||||||
|
column, storing all such timestamps in UTC, and applying your time zone logic
|
||||||
|
separately using application logic. The `TIMESTAMP WITH TIME ZONE` column type in
|
||||||
|
Postgres stores its date in UTC anyhow, and applications that need to deal with
|
||||||
|
time zones typically require richer functionality than simply adjusting the time
|
||||||
|
zone to wherever the database happens to be hosted. Treat time zone related
|
||||||
|
logic as an application concern, and keep stored dates in UTC.
|
||||||
|
|
||||||
|
For example, for a developer using [`clojure.java-time`](https://github.com/dm3/clojure.java-time), saving `(java-time/instant)`
|
||||||
|
in a timestamp column (and doing any timezone adjustment elsewhere) is a good
|
||||||
|
way to minimize long term confusion.
|
||||||
|
|
||||||
|
> Original text contributed by [Denis McCarthy](https://github.com/denismccarthykerry); in addition: I generally recommend not only using UTC everywhere but also setting your database _and your servers_ to all be in the UTC timezones, to avoid the possibly of incorrect date/time translations -- Sean Corfield.
|
||||||
|
|
||||||
## MS SQL Server
|
## MS SQL Server
|
||||||
|
|
||||||
In MS SQL Server, the generated key from an insert comes back as `:GENERATED_KEYS`.
|
In MS SQL Server, the generated key from an insert comes back as `:GENERATED_KEYS`.
|
||||||
|
|
||||||
By default, you won't get table names as qualifiers with Microsoft's JDBC driver (you might with the jTDS drive -- I haven't tried that recently). See this [MSDN forum post about `.getTableName()`](https://social.msdn.microsoft.com/Forums/sqlserver/en-US/55e8cbb2-b11c-446e-93ab-dc30658caf99/resultsetmetadatagettablename-returns-instead-of-table-name) for details. According to one of the answers posted there, if you specify `:result-type` and `:concurrency` in the options for `execute!`, `execute-one!`, `plan`, or `prepare`, that will cause SQL Server to return table names for columns. `:result-type` needs to be `:scoll-sensitive` or `:scroll-insensitive` for this to work. `:concurrency` can be `:read-only` or `:updatable`.
|
By default, you won't get table names as qualifiers with Microsoft's JDBC driver (you might with the jTDS drive -- I haven't tried that recently). See this [MSDN forum post about `.getTableName()`](https://social.msdn.microsoft.com/Forums/sqlserver/en-US/55e8cbb2-b11c-446e-93ab-dc30658caf99/resultsetmetadatagettablename-returns-instead-of-table-name) for details. According to one of the answers posted there, if you specify `:result-type` and `:concurrency` in the options for `execute!`, `execute-one!`, `plan`, or `prepare`, that will cause SQL Server to return table names for columns. `:result-type` needs to be `:scoll-sensitive` or `:scroll-insensitive` for this to work. `:concurrency` can be `:read-only` or `:updatable`.
|
||||||
|
|
||||||
|
MS SQL Server supports execution of multiple statements when surrounded by `begin`/`end` and can return multiple result sets, when requested via `:multi-rs true` on `execute!`.
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
(jdbc/execute! db-spec ["begin select * from table1; select * from table2; end"] {:multi-rs true})
|
||||||
|
;; vector of result sets:
|
||||||
|
=> [[{.. table1 row ..} {.. table1 row ..}]
|
||||||
|
[{.. table2 row ..} {.. table2 row ..} {..}]]
|
||||||
|
```
|
||||||
|
|
||||||
|
### Batch Statements
|
||||||
|
|
||||||
|
Even when using `next.jdbc/execute-batch!`, Microsoft's JDBC driver will still send multiple insert statements to the database unless you specify `:useBulkCopyForBatchInsert true` as part of the db-spec hash map or JDBC URL when the datasource is created.
|
||||||
|
|
||||||
|
To use this feature your Microsoft's JDBC driver should be at least version 9.2 and you can use only limited set of data types. For example if you use `inst` to bulk insert smalldatetime value driver will revert to old (slow) behavior. For more details see [Using bulk copy API for batch insert operation](https://docs.microsoft.com/en-us/sql/connect/jdbc/use-bulk-copy-api-batch-insert-operation?view=sql-server-ver16) and [Release notes for JDBC drivers](https://docs.microsoft.com/en-us/sql/connect/jdbc/release-notes-for-the-jdbc-driver?view=sql-server-ver16).
|
||||||
|
|
||||||
## MySQL & MariaDB
|
## MySQL & MariaDB
|
||||||
|
|
||||||
In MySQL, the generated key from an insert comes back as `:GENERATED_KEY`. In MariaDB, the generated key from an insert comes back as `:insert_id`.
|
In MySQL, the generated key from an insert comes back as `:GENERATED_KEY`. In MariaDB, the generated key from an insert comes back as `:insert_id`.
|
||||||
|
|
@ -89,6 +179,23 @@ MySQL generally stores tables as files so they are case-sensitive if your O/S is
|
||||||
|
|
||||||
It's also worth noting that column comparisons are case-insensitive so `WHERE foo = 'BAR'` will match `"bar"` or `"BAR"` etc.
|
It's also worth noting that column comparisons are case-insensitive so `WHERE foo = 'BAR'` will match `"bar"` or `"BAR"` etc.
|
||||||
|
|
||||||
|
MySQL has a connection option, `:allowMultiQueries true`, that allows you to pass multiple SQL statements in a single operation and can return multiple result sets, when requested via `:multi-rs true`.
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
(def db-spec {:dbtype "mysql" .. :allowMultiQueries true})
|
||||||
|
;; equivalent to allowMultiQueries=true in the JDBC URL
|
||||||
|
(jdbc/execute! db-spec ["select * from table1; select * from table2"] {:multi-rs true})
|
||||||
|
;; vector of result sets:
|
||||||
|
=> [[{.. table1 row ..} {.. table1 row ..}]
|
||||||
|
[{.. table2 row ..} {.. table2 row ..} {..}]]
|
||||||
|
```
|
||||||
|
|
||||||
|
Compare this with MS SQL Server above: MySQL does not support `begin`/`end` here. This is not the default behavior because allowing multiple statements in a single operation is generally considered a bit of a risk as it can make it easier for SQL injection attacks to be performed.
|
||||||
|
|
||||||
|
### Batch Statements
|
||||||
|
|
||||||
|
Even when using `next.jdbc/execute-batch!`, MySQL will still send multiple statements to the database unless you specify `:rewriteBatchedStatements true` as part of the db-spec hash map or JDBC URL when the datasource is created.
|
||||||
|
|
||||||
### Streaming Result Sets
|
### Streaming Result Sets
|
||||||
|
|
||||||
You should be able to get MySQL to stream very large result sets (when you are reducing over `plan`) by setting the following options:
|
You should be able to get MySQL to stream very large result sets (when you are reducing over `plan`) by setting the following options:
|
||||||
|
|
@ -101,14 +208,58 @@ You should be able to get MySQL to stream very large result sets (when you are r
|
||||||
|
|
||||||
Ah, dear old Oracle! Over the years of maintaining `clojure.java.jdbc` and now `next.jdbc`, I've had all sorts of bizarre and non-standard behavior reported from Oracle users. The main issue I'm aware of with `next.jdbc` is that Oracle's JDBC drivers all return an empty string from `ResultSetMetaData.getTableName()` so you won't get qualified keywords in the result set hash maps. Sorry!
|
Ah, dear old Oracle! Over the years of maintaining `clojure.java.jdbc` and now `next.jdbc`, I've had all sorts of bizarre and non-standard behavior reported from Oracle users. The main issue I'm aware of with `next.jdbc` is that Oracle's JDBC drivers all return an empty string from `ResultSetMetaData.getTableName()` so you won't get qualified keywords in the result set hash maps. Sorry!
|
||||||
|
|
||||||
|
An important performance issue to be aware of with Oracle's JDBC driver is that the default fetch size is just 10 records. If you are working with large datasets, you will
|
||||||
|
either need to either specify `:prefetch` in your db-spec hash map with a suitable value (say 1,000 or larger), or specify `&prefetch=` in your JDBC URL string. If you want
|
||||||
|
to keep the default, you can change it on a per-statement basis by specifying `:fetch-size` as an option to `execute!` etc.
|
||||||
|
|
||||||
|
If you are using the 10g or later JDBC driver and you try to execute DDL statements that include SQL entities
|
||||||
|
that start with a `:` (such as `:new` or `:old`), they will be treated as bindable parameter references if
|
||||||
|
you use a `PreparedStatement` to execute them. Since that's the default for `execute!` etc, it means that you
|
||||||
|
will likely get an error like the following:
|
||||||
|
|
||||||
|
```
|
||||||
|
Missing IN or OUT parameter at index:: 1
|
||||||
|
```
|
||||||
|
|
||||||
|
You will need to use `next.jdbc.prepare/statement` to create a `Statement` object and then call `execute!`
|
||||||
|
on that to avoid this error. Don't forget to `.close` the `Statement` after `execute!` -- using `with-open`
|
||||||
|
is the best way to ensure the statement is properly closed after use.
|
||||||
|
|
||||||
## PostgreSQL
|
## PostgreSQL
|
||||||
|
|
||||||
When you use `:return-keys true` with `execute!` or `execute-one!` (or you use `insert!`), PostgreSQL returns the entire inserted row (unlike nearly every other database that just returns any generated keys!).
|
As you can see in this section (and elsewhere in this documentation), the
|
||||||
|
PostgreSQL JDBC driver has a number of interesting quirks and behaviors that
|
||||||
|
you need to be aware of. Although accessing PostgreSQL via JDBC is the most
|
||||||
|
common approach, there is also a non-JDBC Clojure/Java driver for PostgreSQL called
|
||||||
|
[PG2](https://github.com/igrishaev/pg2) which supports JSON operations natively
|
||||||
|
(see below for what's required for JDBC), as well as supporting Java Time natively
|
||||||
|
(see the section above about **Times, Dates, and Timezones**), and it also
|
||||||
|
quite a bit faster than using JDBC.
|
||||||
|
|
||||||
If you have a query where you want to select where a column is `IN` a sequence of values, you can use `col = ANY(?)` with a native array of the values instead of `IN (?,?,?,,,?)` and a sequence of values.
|
When you use `:return-keys true` with `execute!` or `execute-one!` (or you use `insert!`), PostgreSQL returns the entire inserted row (unlike nearly every other database that just returns any generated keys!).
|
||||||
|
_[It seems to achieve this by the equivalent of automatically appending `RETURNING *` to your SQL, if necessary.]_
|
||||||
|
|
||||||
|
The default result set builder for `next.jdbc` is `as-qualified-maps` which
|
||||||
|
uses the `.getTableName()` method on `ResultSetMetaData` to qualify the
|
||||||
|
columns in the result set. While some database drivers have this information
|
||||||
|
on hand from the original SQL operation, PostgreSQL's JDBC driver does not
|
||||||
|
and it performs an extra SQL query to fetch table names the first time this
|
||||||
|
method is called for each query. If you want to avoid those extra queries,
|
||||||
|
and you can live with unqualified column names, you can use `as-unqualified-maps`
|
||||||
|
as the result set builder instead.
|
||||||
|
|
||||||
|
If you have a query where you want to select where a column is `IN` a sequence of values, you can use `col = ANY(?)` with a native array of the values instead of `IN (?,?,?,,,?)` and a sequence of values. **Be aware of
|
||||||
|
[PostgreSQL bug 17822](https://www.postgresql.org/message-id/flat/17922-1e2e0aeedd294424%40postgresql.org)
|
||||||
|
which can cause pathological performance when the array has a single element!**
|
||||||
|
If you think you might have a single-element array, consider using `UNNEST` and
|
||||||
|
`IN` instead.
|
||||||
|
|
||||||
What does this mean for your use of `next.jdbc`? In `plan`, `execute!`, and `execute-one!`, you can use `col = ANY(?)` in the SQL string and a single primitive array parameter, such as `(int-array [1 2 3 4])`. That means that in `next.jdbc.sql`'s functions that take a where clause (`find-by-keys`, `update!`, and `delete!`) you can specify `["col = ANY(?)" (int-array data)]` for what would be a `col IN (?,?,?,,,?)` where clause for other databases and require multiple values.
|
What does this mean for your use of `next.jdbc`? In `plan`, `execute!`, and `execute-one!`, you can use `col = ANY(?)` in the SQL string and a single primitive array parameter, such as `(int-array [1 2 3 4])`. That means that in `next.jdbc.sql`'s functions that take a where clause (`find-by-keys`, `update!`, and `delete!`) you can specify `["col = ANY(?)" (int-array data)]` for what would be a `col IN (?,?,?,,,?)` where clause for other databases and require multiple values.
|
||||||
|
|
||||||
|
### Batch Statements
|
||||||
|
|
||||||
|
Even when using `next.jdbc/execute-batch!`, PostgreSQL will still send multiple statements to the database unless you specify `:reWriteBatchedInserts true` as part of the db-spec hash map or JDBC URL when the datasource is created.
|
||||||
|
|
||||||
### Streaming Result Sets
|
### Streaming Result Sets
|
||||||
|
|
||||||
You can get PostgreSQL to stream very large result sets (when you are reducing over `plan`) by setting the following options:
|
You can get PostgreSQL to stream very large result sets (when you are reducing over `plan`) by setting the following options:
|
||||||
|
|
@ -141,8 +292,8 @@ create table example(
|
||||||
```clojure
|
```clojure
|
||||||
|
|
||||||
(execute-one! db-spec
|
(execute-one! db-spec
|
||||||
["insert into example(tags) values (?)"
|
["insert into example(tags) values (?)"
|
||||||
(into-array String ["tag1" "tag2"]))
|
(into-array String ["tag1" "tag2"])])
|
||||||
|
|
||||||
(execute-one! db-spec
|
(execute-one! db-spec
|
||||||
["select * from example limit 1"])
|
["select * from example limit 1"])
|
||||||
|
|
@ -150,22 +301,95 @@ create table example(
|
||||||
;; => #:example{:tags ["tag1" "tag2"]}
|
;; => #:example{:tags ["tag1" "tag2"]}
|
||||||
```
|
```
|
||||||
|
|
||||||
Note: PostgreSQL JDBC driver supports only 7 primitive array types, but not such as `UUID[]` -
|
> Note: PostgreSQL JDBC driver supports only 7 primitive array types, but not array types like `UUID[]` -
|
||||||
[PostgreSQL™ Extensions to the JDBC API](https://jdbc.postgresql.org/documentation/head/arrays.html).
|
[PostgreSQL™ Extensions to the JDBC API](https://jdbc.postgresql.org/documentation/server-prepare/#arrays).
|
||||||
|
|
||||||
### Working with Date and Time
|
### Working with Date and Time
|
||||||
|
|
||||||
By default, PostgreSQL's JDBC driver does not always perform conversions from `java.util.Date` to a SQL data type.
|
By default, PostgreSQL's JDBC driver does not always perform conversions from `java.util.Date` to a SQL data type.
|
||||||
You can enable this by extending `SettableParameter` to the appropriate (Java) types, or by simply requiring [`next.jdbc.date-time`](https://cljdoc.org/d/seancorfield/next.jdbc/CURRENT/api/next.jdbc.date-time).
|
You can enable this by extending `SettableParameter` to the appropriate (Java) types, or by simply requiring [`next.jdbc.date-time`](https://cljdoc.org/d/com.github.seancorfield/next.jdbc/CURRENT/api/next.jdbc.date-time).
|
||||||
|
|
||||||
In addition, if you want `java.time.Instant`, `java.time.LocalDate`, and `java.time.LocalDateTime` to be automatically converted to SQL data types, requiring `next.jdbc.date-time` will enable those as well (by extending `SettableParameter` for you).
|
In addition, if you want `java.time.Instant`, `java.time.LocalDate`, and `java.time.LocalDateTime` to be automatically converted to SQL data types, requiring `next.jdbc.date-time` will enable those as well (by extending `SettableParameter` for you).
|
||||||
|
|
||||||
`next.jdbc.date-time` also includes functions that you can call at application startup to extend `ReadableColumn` to either return `java.time.Instant` or `java.time.LocalDate`/`java.time.LocalDateTime` (as well as a function to restore the default behavior of returning `java.sql.Date` and `java.sql.Timestamp`).
|
`next.jdbc.date-time` also includes functions that you can call at application startup to extend `ReadableColumn` to either return `java.time.Instant` or `java.time.LocalDate`/`java.time.LocalDateTime` (as well as a function to restore the default behavior of returning `java.sql.Date` and `java.sql.Timestamp`).
|
||||||
|
|
||||||
|
### Working with Interval
|
||||||
|
|
||||||
|
Postgres has a nonstandard SQL type Interval that is implemented in the Postgres driver as the `org.postgresql.util.PGInterval` type.
|
||||||
|
In many cases you would want to work with intervals as `java.time.Duration` type by default.
|
||||||
|
|
||||||
|
You can support `Duration` instances by extending `SettableParameter` to the `java.time.Duration` type.
|
||||||
|
Conversely you can support converting PGIntervals back to Durations by extending `ReadableColumn` to the `org.postgresql.util.PGInterval` type.
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
(import '[org.postgresql.util PGInterval])
|
||||||
|
(import '[java.sql PreparedStatement])
|
||||||
|
(import '[java.time Duration])
|
||||||
|
(require '[next.jdbc.result-set :as rs])
|
||||||
|
(require '[next.jdbc.prepare :as p])
|
||||||
|
|
||||||
|
(defn ->pg-interval
|
||||||
|
"Takes a Dudration instance and converts it into a PGInterval
|
||||||
|
instance where the interval is created as a number of seconds."
|
||||||
|
[^java.time.Duration duration]
|
||||||
|
(doto (PGInterval.)
|
||||||
|
(.setSeconds (.getSeconds duration))))
|
||||||
|
|
||||||
|
(extend-protocol p/SettableParameter
|
||||||
|
;; Convert durations to PGIntervals before inserting into db
|
||||||
|
java.time.Duration
|
||||||
|
(set-parameter [^java.time.Duration v ^PreparedStatement s ^long i]
|
||||||
|
(.setObject s i (->pg-interval v))))
|
||||||
|
|
||||||
|
|
||||||
|
(defn <-pg-interval
|
||||||
|
"Takes a PGInterval instance and converts it into a Duration
|
||||||
|
instance. Ignore sub-second units."
|
||||||
|
[^org.postgresql.util.PGInterval interval]
|
||||||
|
(-> Duration/ZERO
|
||||||
|
(.plusSeconds (.getSeconds interval))
|
||||||
|
(.plusMinutes (.getMinutes interval))
|
||||||
|
(.plusHours (.getHours interval))
|
||||||
|
(.plusDays (.getDays interval))))
|
||||||
|
|
||||||
|
(extend-protocol rs/ReadableColumn
|
||||||
|
;; Convert PGIntervals back to durations
|
||||||
|
org.postgresql.util.PGInterval
|
||||||
|
(read-column-by-label [^org.postgresql.util.PGInterval v _]
|
||||||
|
(<-pg-interval v))
|
||||||
|
(read-column-by-index [^org.postgresql.util.PGInterval v _2 _3]
|
||||||
|
(<-pg-interval v)))
|
||||||
|
```
|
||||||
|
|
||||||
|
### Working with Enumerated Types
|
||||||
|
|
||||||
|
PostgreSQL has a SQL extension for defining enumerated types and the default `set-parameter` implementation will not work for those. You can use `next.jdbc.types/as-other` to wrap string values in a way that the JDBC driver will convert them to enumerated type values:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
CREATE TYPE language AS ENUM('en','fr','de');
|
||||||
|
|
||||||
|
CREATE TABLE person (
|
||||||
|
...
|
||||||
|
speaks language NOT NULL,
|
||||||
|
...
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
(require '[next.jdbc.sql :as sql]
|
||||||
|
'[next.jdbc.types :refer [as-other]])
|
||||||
|
|
||||||
|
(sql/insert! ds :person {:speaks (as-other "fr")})
|
||||||
|
```
|
||||||
|
|
||||||
|
That call produces a vector `["fr"]` with metadata that implements `set-parameter` such that `.setObject()` is called with `java.sql.Types/OTHER` which allows PostgreSQL to "convert" the string `"fr"` to the corresponding `language` enumerated type value.
|
||||||
|
|
||||||
### Working with JSON and JSONB
|
### Working with JSON and JSONB
|
||||||
|
|
||||||
PostgreSQL has good support for [storing, querying and manipulating JSON data](https://www.postgresql.org/docs/current/datatype-json.html). Basic Clojure data structures (lists, vectors, and maps) transform pretty well to JSON data. With a little help `next.jdbc` can automatically convert Clojure data to JSON and back for us.
|
PostgreSQL has good support for [storing, querying and manipulating JSON data](https://www.postgresql.org/docs/current/datatype-json.html). Basic Clojure data structures (lists, vectors, and maps) transform pretty well to JSON data. With a little help `next.jdbc` can automatically convert Clojure data to JSON and back for us.
|
||||||
|
|
||||||
|
> Note: some PostgreSQL JSONB operators have a `?` in them which conflicts with the standard parameter placeholder in SQL. You can write the JSONB operators by doubling up the `?`, e.g., `??|` instead of just `?|`. See [PostgreSQL JSONB operators](https://www.postgresql.org/docs/current/functions-json.html#FUNCTIONS-JSONB-OP-TABLE) for more detail.
|
||||||
|
|
||||||
First we define functions for JSON encoding and decoding. We're using [metosin/jsonista](https://github.com/metosin/jsonista) in these examples but you could use any JSON library, such as [Cheshire](https://github.com/dakrone/cheshire) or [clojure.data.json](https://github.com/clojure/data.json).
|
First we define functions for JSON encoding and decoding. We're using [metosin/jsonista](https://github.com/metosin/jsonista) in these examples but you could use any JSON library, such as [Cheshire](https://github.com/dakrone/cheshire) or [clojure.data.json](https://github.com/clojure/data.json).
|
||||||
|
|
||||||
```clojure
|
```clojure
|
||||||
|
|
@ -194,13 +418,12 @@ containing JSON:
|
||||||
(.setValue (->json x)))))
|
(.setValue (->json x)))))
|
||||||
|
|
||||||
(defn <-pgobject
|
(defn <-pgobject
|
||||||
"Transform PGobject containing `json` or `jsonb` value to Clojure
|
"Transform PGobject containing `json` or `jsonb` value to Clojure data."
|
||||||
data."
|
[^PGobject v]
|
||||||
[^org.postgresql.util.PGobject v]
|
|
||||||
(let [type (.getType v)
|
(let [type (.getType v)
|
||||||
value (.getValue v)]
|
value (.getValue v)]
|
||||||
(if (#{"jsonb" "json"} type)
|
(if (#{"jsonb" "json"} type)
|
||||||
(with-meta (<-json value) {:pgtype type})
|
(some-> value <-json (with-meta {:pgtype type}))
|
||||||
value)))
|
value)))
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
@ -303,6 +526,14 @@ And those columns are nicely transformed into Clojure data when querying:
|
||||||
=> [{:demo/id 1, :foo {:a 1}}]
|
=> [{:demo/id 1, :foo {:a 1}}]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
#### Using HoneySQL with JSON and JSONB
|
||||||
|
|
||||||
|
If you are using HoneySQL to generate your SQL, there will be an inherent conflict
|
||||||
|
between the data structures you are intending HoneySQL to interpret -- as function calls
|
||||||
|
and SQL statements -- and the data structures you intend to treat as JSON. See
|
||||||
|
[General Reference > Working with JSON/JSONB (PostgreSQL)](https://cljdoc.org/d/com.github.seancorfield/honeysql/CURRENT/doc/getting-started/general-reference#working-with-jsonjsonb-postgresql)
|
||||||
|
in the HoneySQL documentation for more details.
|
||||||
|
|
||||||
#### JSON or JSONB?
|
#### JSON or JSONB?
|
||||||
|
|
||||||
* A `json` column stores JSON data as strings (reading and writing is fast but manipulation is slow, field order is preserved)
|
* A `json` column stores JSON data as strings (reading and writing is fast but manipulation is slow, field order is preserved)
|
||||||
|
|
@ -310,4 +541,60 @@ And those columns are nicely transformed into Clojure data when querying:
|
||||||
|
|
||||||
If you're unsure whether you want to use json or jsonb, use jsonb.
|
If you're unsure whether you want to use json or jsonb, use jsonb.
|
||||||
|
|
||||||
[<: Friendly SQL Functions](/doc/friendly-sql-functions.md) | [Result Set Builders :>](/doc/result-set-builders.md)
|
## SQLite
|
||||||
|
|
||||||
|
SQLite supports both `bool` and `bit` column types but, unlike pretty much every other database out there, it yields `0` or `1` as the column value instead of `false` or `true`. This means that with SQLite alone, you can't just rely on `bool` or `bit` columns being treated as truthy/falsey values in Clojure.
|
||||||
|
|
||||||
|
You can work around this using a builder that handles reading the column directly as a `Boolean`:
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
(import java.sql ResultSet ResultSetMetaData)
|
||||||
|
|
||||||
|
(jdbc/execute! ds ["select * from some_table"]
|
||||||
|
{:builder-fn (rs/builder-adapter
|
||||||
|
rs/as-maps
|
||||||
|
(fn [builder ^ResultSet rs ^Integer i]
|
||||||
|
(let [rsm ^ResultSetMetaData (:rsmeta builder)]
|
||||||
|
(rs/read-column-by-index
|
||||||
|
(if (#{"BIT" "BOOL" "BOOLEAN"} (.getColumnTypeName rsm i))
|
||||||
|
(.getBoolean rs i)
|
||||||
|
(.getObject rs i))
|
||||||
|
rsm
|
||||||
|
i))))})
|
||||||
|
```
|
||||||
|
|
||||||
|
If you are using `plan`, you'll most likely be accessing columns by just the label (as a keyword) and avoiding the result set building machinery completely. In such cases, you'll still get `bool` and `bit` columns back as `0` or `1` and you'll need to explicitly convert them on a per-column basis since you should know which columns need converting:
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
(reduce (fn [acc row]
|
||||||
|
(conj acc (-> (select-keys row [:name :is_active])
|
||||||
|
(update :is_active pos?))))
|
||||||
|
[]
|
||||||
|
(jdbc/plan ds ["select * from some_table"]))
|
||||||
|
```
|
||||||
|
|
||||||
|
See also [`datafy`, `nav`, and `:schema` > **SQLite**](/doc/datafy-nav-and-schema.md#sqlite)
|
||||||
|
for additional caveats on the `next.jdbc.datafy` namespace when using SQLite.
|
||||||
|
|
||||||
|
## XTDB
|
||||||
|
|
||||||
|
XTDB is a bitemporal, schemaless, document-oriented database that presents
|
||||||
|
itself as a PostgreSQL-compatible database, in terms of JDBC. It has a number
|
||||||
|
of SQL extensions, and some differences from common JDBC behavior. See
|
||||||
|
its documentation for details:
|
||||||
|
* [SQL Overview](https://docs.xtdb.com/quickstart/sql-overview.html)
|
||||||
|
* [SQL Queries](https://docs.xtdb.com/reference/main/sql/queries.html)
|
||||||
|
* [SQL Transactions/DML](https://docs.xtdb.com/reference/main/sql/txs.html)
|
||||||
|
|
||||||
|
`next.jdbc` officially supports XTDB as of 1.3.981 but there are some caveats:
|
||||||
|
* You can use `:dbtype "xtdb"` to identify XTDB as the database type.
|
||||||
|
* You must specify `:dbname "xtdb"` in the db-spec hash map or JDBC URL.
|
||||||
|
* XTDB does not support `.getTableName()` so you always get unqualified column names in result sets.
|
||||||
|
* The primary key on all tables is `_id` and it must be specified in all `INSERT` operations (no auto-generated keys).
|
||||||
|
* That means that `next.jdbc.sql/get-by-id` requires the 5-argument call, so that you can specify the `pk-name` as `:_id` and provide an options map.
|
||||||
|
* If you want to use `next.jdbc`'s built-in `datafy` / `nav` functionality, you need to explicitly specify `:schema-opts {:pk "_id"}` to override the default assumption of `id` as the primary key.
|
||||||
|
* DML operations (`INSERT`, `UPDATE`, and `DELETE`) are essentially asynchronous in XTDB and therefore can not return an accurate `next.jdbc/update-count` (so it is always 0).
|
||||||
|
* `INSERT` operations do not return the inserted row (like PostgreSQL does) nor even the provided `_id` primary key.
|
||||||
|
* That means that the `next.jdbc.defer` namespace functions do not work well with XTDB.
|
||||||
|
* `next.jdbc.sql/insert-multi!` returns an empty vector for XTDB (since `INSERT` operations do not return keys or update counts).
|
||||||
|
* The `next.jdbc.result-set/*-kebab-maps` functions (and associated `next.jdbc/*-kebab-opts` option maps) cause leading `_` to be stripped from column names and cannot be used with XTDB (this is inherent in the underlying library that `next.jdbc` relies on -- you can of course write your own custom result set builder function to handle this).
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,11 @@ Although `(transact transactable f)` is available, it is expected that you will
|
||||||
|
|
||||||
By default, all connections that `next.jdbc` creates are automatically committable, i.e., as each operation is performed, the effect is committed to the database directly before the next operation is performed. Any exceptions only cause the current operation to be aborted -- any prior operations have already been committed.
|
By default, all connections that `next.jdbc` creates are automatically committable, i.e., as each operation is performed, the effect is committed to the database directly before the next operation is performed. Any exceptions only cause the current operation to be aborted -- any prior operations have already been committed.
|
||||||
|
|
||||||
It is possible to tell `next.jdbc` to create connections that do not automatically commit operations: pass `{:auto-commit false}` as part of the options map to anything that creates a connection (including `get-connection` itself). You can then decide when to commit or rollback by calling `.commit` or `.rollback` on the connection object itself. You can also create save points (`(.setSavePoint con)`, `(.setSavePoint con name)`) and rollback to them (`(.rollback con save-point)`). You can also change the auto-commit state of an open connection at any time (`(.setAutoCommit con on-off)`).
|
It is possible to tell `next.jdbc` to create connections that do not automatically commit operations: pass `{:auto-commit false}` as part of the options map to anything that creates a connection (including `get-connection` itself). You can then decide when to commit or rollback by calling `.commit` or `.rollback` on the connection object itself. You can also create save points (`(.setSavepoint con)`, `(.setSavepoint con name)`) and rollback to them (`(.rollback con save-point)`). You can also change the auto-commit state of an open connection at any time (`(.setAutoCommit con on-off)`).
|
||||||
|
|
||||||
|
This is the machinery behind "transactions": one or more operations on a
|
||||||
|
`Connection` that are not automatically committed, and which can be rolled back
|
||||||
|
or committed explicitly at any point.
|
||||||
|
|
||||||
## Automatic Commit & Rollback
|
## Automatic Commit & Rollback
|
||||||
|
|
||||||
|
|
@ -35,6 +39,14 @@ You can also provide an options map as the third element of the binding vector (
|
||||||
|
|
||||||
The latter can be particularly useful in tests, to run a series of SQL operations during a test and then roll them all back at the end.
|
The latter can be particularly useful in tests, to run a series of SQL operations during a test and then roll them all back at the end.
|
||||||
|
|
||||||
|
If you use `next.jdbc/with-transaction` (or `next.jdbc/transact`), then
|
||||||
|
`next.jdbc` keeps track of whether a "transaction" is in progress or not, and
|
||||||
|
you can call `next.jdbc/active-tx?` to determine that, in your own code, in
|
||||||
|
case you want to write code that behaves differently inside or outside a
|
||||||
|
transaction.
|
||||||
|
|
||||||
|
> Note: `active-tx?` only knows about `next.jdbc` transactions -- it cannot track any transactions that you create yourself using the underlying JDBC `Connection`. In addition, this is a per-thread "global" setting and not related to just a single connection, so you can't use this setting if you are working with multiple databases in the same dynamic thread context (`binding`).
|
||||||
|
|
||||||
## Manual Rollback Inside a Transaction
|
## Manual Rollback Inside a Transaction
|
||||||
|
|
||||||
Instead of throwing an exception (which will propagate through `with-transaction` and therefore provide no result), you can also explicitly rollback if you want to return a result in that case:
|
Instead of throwing an exception (which will propagate through `with-transaction` and therefore provide no result), you can also explicitly rollback if you want to return a result in that case:
|
||||||
|
|
@ -53,20 +65,20 @@ Instead of throwing an exception (which will propagate through `with-transaction
|
||||||
|
|
||||||
In general, transactions are per-connection and do not nest in JDBC. If you nest calls to `with-transaction` using a `DataSource` argument (or a db-spec) then you will get separate connections inside each invocation and the transactions will be independent, as permitted by the isolation level.
|
In general, transactions are per-connection and do not nest in JDBC. If you nest calls to `with-transaction` using a `DataSource` argument (or a db-spec) then you will get separate connections inside each invocation and the transactions will be independent, as permitted by the isolation level.
|
||||||
|
|
||||||
If you nest such calls passing a `Connection` instead, the inner call will commit (or rollback) all operations on that connection up to that point -- including any performed in the outer call, prior to entering the inner call. The outer call will then commit (or rollback) any additional operations within its scope. This will be confusing at best and most likely buggy behavior!
|
If you nest such calls passing a `Connection` instead, the inner call will commit (or rollback) all operations on that connection up to that point -- including any performed in the outer call, prior to entering the inner call. The outer call will then commit (or rollback) any additional operations within its scope. This will be confusing at best and most likely buggy behavior! See below for ways to exercise more control over this behavior.
|
||||||
|
|
||||||
If you want the ability to selectively roll back certain groups of operations inside a transaction, you can use named or unnamed save points:
|
If you want the ability to selectively roll back certain groups of operations inside a transaction, you can use named or unnamed save points:
|
||||||
|
|
||||||
```clojure
|
```clojure
|
||||||
(jdbc/with-transaction [tx my-datasource]
|
(jdbc/with-transaction [tx my-datasource]
|
||||||
(let [result (jdbc/execute! tx ...) ; op A
|
(let [result (jdbc/execute! tx ...) ; op A
|
||||||
sp1 (.setSavepoint)] ; unnamed save point
|
sp1 (.setSavepoint tx)] ; unnamed save point
|
||||||
|
|
||||||
(jdbc/execute! tx ...) ; op B
|
(jdbc/execute! tx ...) ; op B
|
||||||
|
|
||||||
(when ... (.rollback tx sp1)) ; just rolls back op B
|
(when ... (.rollback tx sp1)) ; just rolls back op B
|
||||||
|
|
||||||
(let [sp2 (.setSavepoint "two")] ; named save point
|
(let [sp2 (.setSavepoint tx "two")] ; named save point
|
||||||
|
|
||||||
(jdbc/execute! tx ...) ; op C
|
(jdbc/execute! tx ...) ; op C
|
||||||
|
|
||||||
|
|
@ -76,4 +88,35 @@ If you want the ability to selectively roll back certain groups of operations in
|
||||||
;; (and ops B & C if they weren't rolled back above)
|
;; (and ops B & C if they weren't rolled back above)
|
||||||
```
|
```
|
||||||
|
|
||||||
[<: Prepared Statements](/doc/prepared-statements.md) | [All The Options :>](/doc/all-the-options.md)
|
### Nesting Transactions
|
||||||
|
|
||||||
|
As noted above, transactions do not nest in JDBC and `next.jdbc`'s default behavior is to allow you
|
||||||
|
to overlap transactions (i.e., nested calls to `with-transaction`) and assume you know what you are
|
||||||
|
doing, although it would generally be buggy programming to do so.
|
||||||
|
|
||||||
|
By contrast, `clojure.java.jdbc` allowed the nested calls but simply _ignored_ the inner calls and
|
||||||
|
behaved as it you had only the outermost, top-level transaction. That allowed for buggy programming
|
||||||
|
too, in a different way, but could be convenient if you wanted to override any transaction behavior
|
||||||
|
in called code, as you might wish to do with a test fixture that set up and rolled back a
|
||||||
|
transaction at the top-level -- you would just silently lose the effects of any (nested)
|
||||||
|
transactions in the code under test.
|
||||||
|
|
||||||
|
`next.jdbc` provides a way to control the behavior via a public, dynamic Var:
|
||||||
|
|
||||||
|
* `next.jdbc.transaction/*nested-tx*` is initially set to `:allow` which allows nested calls but makes them overlap (as described above),
|
||||||
|
* `(binding [next.jdbc.transaction/*nested-tx* :ignore] ...)` provides the same behavior as `clojure.java.jdbc` where nested calls are essentially ignored and only the outermost transaction takes effect,
|
||||||
|
* `(binding [next.jdbc.transaction/*nested-tx* :prohibit] ...)` will cause any attempt to start a nested transaction to throw an exception instead; this could be a useful way to detect the potentially buggy behavior described above (for either `:allow` or `:ignore`).
|
||||||
|
|
||||||
|
> Note: this is a per-thread "global" setting and not related to just a single connection, so you can't use this setting if you are working with multiple databases in the same dynamic thread context (`binding`).
|
||||||
|
|
||||||
|
### `with-options`
|
||||||
|
|
||||||
|
If you are using `with-options` to produce wrapped connectables / transactables,
|
||||||
|
it's important to be aware that `with-transaction` produces a bare Java
|
||||||
|
`java.sql.Connection` object that cannot have options -- but does allow direct
|
||||||
|
interop. If you want to use `with-options` with `with-transaction`, you must
|
||||||
|
either rewrap the `Connection` with a nested call to `with-options` or,
|
||||||
|
as of 1.3.894, you can use `with-transaction+options` which will automatically
|
||||||
|
rewrap the `Connection` in a new connectable along with the options from the
|
||||||
|
original transactable. Be aware that you cannot use Java interop on this
|
||||||
|
wrapped connectable.
|
||||||
|
|
|
||||||
21
docker-compose.yml
Normal file
21
docker-compose.yml
Normal file
|
|
@ -0,0 +1,21 @@
|
||||||
|
services:
|
||||||
|
mysql:
|
||||||
|
image: percona:5.7
|
||||||
|
environment:
|
||||||
|
- MYSQL_ROOT_PASSWORD
|
||||||
|
ports:
|
||||||
|
- "3306:3306"
|
||||||
|
command:
|
||||||
|
[--character-set-server=utf8mb4, --collation-server=utf8mb4_unicode_ci]
|
||||||
|
sqlserver:
|
||||||
|
image: mcr.microsoft.com/mssql/server:2022-latest
|
||||||
|
environment:
|
||||||
|
ACCEPT_EULA: Y
|
||||||
|
MSSQL_SA_PASSWORD: Str0ngP4ssw0rd
|
||||||
|
ports:
|
||||||
|
- "1433:1433"
|
||||||
|
xtdb:
|
||||||
|
image: ghcr.io/xtdb/xtdb:latest
|
||||||
|
# pull_policy: always
|
||||||
|
ports:
|
||||||
|
- "5432:5432"
|
||||||
59
pom.xml
59
pom.xml
|
|
@ -1,59 +0,0 @@
|
||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
|
||||||
<modelVersion>4.0.0</modelVersion>
|
|
||||||
<groupId>seancorfield</groupId>
|
|
||||||
<artifactId>next.jdbc</artifactId>
|
|
||||||
<version>1.0.445</version>
|
|
||||||
<name>next.jdbc</name>
|
|
||||||
<description>The next generation of clojure.java.jdbc: a new low-level Clojure wrapper for JDBC-based access to databases.</description>
|
|
||||||
<url>https://github.com/seancorfield/next-jdbc</url>
|
|
||||||
<licenses>
|
|
||||||
<license>
|
|
||||||
<name>Eclipse Public License</name>
|
|
||||||
<url>http://www.eclipse.org/legal/epl-v10.html</url>
|
|
||||||
</license>
|
|
||||||
</licenses>
|
|
||||||
<developers>
|
|
||||||
<developer>
|
|
||||||
<name>Sean Corfield</name>
|
|
||||||
</developer>
|
|
||||||
</developers>
|
|
||||||
<scm>
|
|
||||||
<url>https://github.com/seancorfield/next-jdbc</url>
|
|
||||||
<connection>scm:git:git://github.com/seancorfield/next-jdbc.git</connection>
|
|
||||||
<developerConnection>scm:git:ssh://git@github.com/seancorfield/next-jdbc.git</developerConnection>
|
|
||||||
<tag>v1.0.445</tag>
|
|
||||||
</scm>
|
|
||||||
<dependencies>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.clojure</groupId>
|
|
||||||
<artifactId>clojure</artifactId>
|
|
||||||
<version>1.10.1</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.clojure</groupId>
|
|
||||||
<artifactId>java.data</artifactId>
|
|
||||||
<version>1.0.64</version>
|
|
||||||
</dependency>
|
|
||||||
</dependencies>
|
|
||||||
<build>
|
|
||||||
<sourceDirectory>src</sourceDirectory>
|
|
||||||
</build>
|
|
||||||
<repositories>
|
|
||||||
<repository>
|
|
||||||
<id>clojars</id>
|
|
||||||
<url>https://repo.clojars.org/</url>
|
|
||||||
</repository>
|
|
||||||
<repository>
|
|
||||||
<id>sonatype</id>
|
|
||||||
<url>https://oss.sonatype.org/content/repositories/snapshots/</url>
|
|
||||||
</repository>
|
|
||||||
</repositories>
|
|
||||||
<distributionManagement>
|
|
||||||
<repository>
|
|
||||||
<id>clojars</id>
|
|
||||||
<name>Clojars repository</name>
|
|
||||||
<url>https://clojars.org/repo</url>
|
|
||||||
</repository>
|
|
||||||
</distributionManagement>
|
|
||||||
</project>
|
|
||||||
|
|
@ -0,0 +1,2 @@
|
||||||
|
Args=--initialize-at-build-time=clojure,next,camel_snake_kebab \
|
||||||
|
--initialize-at-build-time=java.sql.SQLException
|
||||||
|
|
@ -0,0 +1,8 @@
|
||||||
|
{:hooks
|
||||||
|
{:analyze-call
|
||||||
|
{next.jdbc/with-transaction
|
||||||
|
hooks.com.github.seancorfield.next-jdbc/with-transaction
|
||||||
|
next.jdbc/with-transaction+options
|
||||||
|
hooks.com.github.seancorfield.next-jdbc/with-transaction+options}}
|
||||||
|
:lint-as {next.jdbc/on-connection clojure.core/with-open
|
||||||
|
next.jdbc/on-connection+options clojure.core/with-open}}
|
||||||
|
|
@ -0,0 +1,34 @@
|
||||||
|
(ns hooks.com.github.seancorfield.next-jdbc
|
||||||
|
(:require [clj-kondo.hooks-api :as api]))
|
||||||
|
|
||||||
|
(defn with-transaction
|
||||||
|
"Expands (with-transaction [tx expr opts] body)
|
||||||
|
to (let [tx expr] opts body) per clj-kondo examples."
|
||||||
|
[{:keys [:node]}]
|
||||||
|
(let [[binding-vec & body] (rest (:children node))
|
||||||
|
[sym val opts] (:children binding-vec)]
|
||||||
|
(when-not (and sym val)
|
||||||
|
(throw (ex-info "No sym and val provided" {})))
|
||||||
|
(let [new-node (api/list-node
|
||||||
|
(list*
|
||||||
|
(api/token-node 'let)
|
||||||
|
(api/vector-node [sym val])
|
||||||
|
opts
|
||||||
|
body))]
|
||||||
|
{:node new-node})))
|
||||||
|
|
||||||
|
(defn with-transaction+options
|
||||||
|
"Expands (with-transaction+options [tx expr opts] body)
|
||||||
|
to (let [tx expr] opts body) per clj-kondo examples."
|
||||||
|
[{:keys [:node]}]
|
||||||
|
(let [[binding-vec & body] (rest (:children node))
|
||||||
|
[sym val opts] (:children binding-vec)]
|
||||||
|
(when-not (and sym val)
|
||||||
|
(throw (ex-info "No sym and val provided" {})))
|
||||||
|
(let [new-node (api/list-node
|
||||||
|
(list*
|
||||||
|
(api/token-node 'let)
|
||||||
|
(api/vector-node [sym val])
|
||||||
|
opts
|
||||||
|
body))]
|
||||||
|
{:node new-node})))
|
||||||
34
run-tests.clj
Executable file
34
run-tests.clj
Executable file
|
|
@ -0,0 +1,34 @@
|
||||||
|
#!/usr/bin/env bb
|
||||||
|
|
||||||
|
(require '[babashka.process :as p])
|
||||||
|
|
||||||
|
(defn- run-tests [env v]
|
||||||
|
(when v (println "\nTesting Clojure" v))
|
||||||
|
(let [{:keys [exit]}
|
||||||
|
(p/shell {:extra-env env}
|
||||||
|
"clojure"
|
||||||
|
(str "-M"
|
||||||
|
(when v (str ":" v))
|
||||||
|
":test:runner"
|
||||||
|
;; jdk21+ adds xtdb:
|
||||||
|
(when (System/getenv "NEXT_JDBC_TEST_XTDB")
|
||||||
|
":jdk21")
|
||||||
|
;; to suppress native access warnings on JDK24:
|
||||||
|
":jdk24")
|
||||||
|
"--output" "dots")]
|
||||||
|
(when-not (zero? exit)
|
||||||
|
(System/exit exit))))
|
||||||
|
|
||||||
|
(let [maria? (some #(= "maria" %) *command-line-args*)
|
||||||
|
xtdb? (some #(= "xtdb" %) *command-line-args*)
|
||||||
|
all? (some #(= "all" %) *command-line-args*)
|
||||||
|
env
|
||||||
|
(cond-> {"NEXT_JDBC_TEST_MSSQL" "yes"
|
||||||
|
"NEXT_JDBC_TEST_MYSQL" "yes"
|
||||||
|
"MSSQL_SA_PASSWORD" "Str0ngP4ssw0rd"}
|
||||||
|
maria?
|
||||||
|
(assoc "NEXT_JDBC_TEST_MARIADB" "yes")
|
||||||
|
xtdb?
|
||||||
|
(assoc "NEXT_JDBC_TEST_XTDB" "yes"))]
|
||||||
|
(doseq [v (if all? ["1.10" "1.11" "1.12"] [nil])]
|
||||||
|
(run-tests env v)))
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
;; copyright (c) 2018-2020 Sean Corfield, all rights reserved
|
;; copyright (c) 2018-2025 Sean Corfield, all rights reserved
|
||||||
|
|
||||||
(ns next.jdbc
|
(ns next.jdbc
|
||||||
"The public API of the next generation java.jdbc library.
|
"The public API of the next generation java.jdbc library.
|
||||||
|
|
@ -14,8 +14,8 @@
|
||||||
* `get-connection` -- given a connectable, obtain a new `java.sql.Connection`
|
* `get-connection` -- given a connectable, obtain a new `java.sql.Connection`
|
||||||
from it and return that,
|
from it and return that,
|
||||||
* `plan` -- given a connectable and SQL + parameters or a statement,
|
* `plan` -- given a connectable and SQL + parameters or a statement,
|
||||||
return a reducible that, when reduced will execute the SQL and consume
|
return a reducible that, when reduced (with an initial value) will
|
||||||
the `ResultSet` produced,
|
execute the SQL and consume the `ResultSet` produced,
|
||||||
* `execute!` -- given a connectable and SQL + parameters or a statement,
|
* `execute!` -- given a connectable and SQL + parameters or a statement,
|
||||||
execute the SQL, consume the `ResultSet` produced, and return a vector
|
execute the SQL, consume the `ResultSet` produced, and return a vector
|
||||||
of hash maps representing the rows (@1); this can be datafied to allow
|
of hash maps representing the rows (@1); this can be datafied to allow
|
||||||
|
|
@ -26,6 +26,10 @@
|
||||||
return a hash map representing that row; this can be datafied to allow
|
return a hash map representing that row; this can be datafied to allow
|
||||||
navigation of foreign keys into other tables (either by convention or
|
navigation of foreign keys into other tables (either by convention or
|
||||||
via a schema definition),
|
via a schema definition),
|
||||||
|
* `execute-batch!` -- given a `PreparedStatement` and groups of parameters,
|
||||||
|
execute the statement in batch mode (via `.executeBatch`); given a
|
||||||
|
connectable, a SQL string, and groups of parameters, create a new
|
||||||
|
`PreparedStatement` from the SQL and execute it in batch mode.
|
||||||
* `prepare` -- given a `Connection` and SQL + parameters, construct a new
|
* `prepare` -- given a `Connection` and SQL + parameters, construct a new
|
||||||
`PreparedStatement`; in general this should be used with `with-open`,
|
`PreparedStatement`; in general this should be used with `with-open`,
|
||||||
* `transact` -- the functional implementation of `with-transaction`,
|
* `transact` -- the functional implementation of `with-transaction`,
|
||||||
|
|
@ -57,11 +61,15 @@
|
||||||
|
|
||||||
In addition, wherever a `PreparedStatement` is created, you may specify:
|
In addition, wherever a `PreparedStatement` is created, you may specify:
|
||||||
* `:return-keys` -- either `true` or a vector of key names to return."
|
* `:return-keys` -- either `true` or a vector of key names to return."
|
||||||
(:require [next.jdbc.connection]
|
(:require [camel-snake-kebab.core :refer [->kebab-case ->snake_case]]
|
||||||
[next.jdbc.prepare]
|
[next.jdbc.connection]
|
||||||
|
[next.jdbc.default-options :as opts]
|
||||||
|
[next.jdbc.prepare :as prepare]
|
||||||
[next.jdbc.protocols :as p]
|
[next.jdbc.protocols :as p]
|
||||||
[next.jdbc.result-set]
|
[next.jdbc.result-set :as rs]
|
||||||
[next.jdbc.transaction]))
|
[next.jdbc.sql-logging :as logger]
|
||||||
|
[next.jdbc.transaction :as tx])
|
||||||
|
(:import (java.sql PreparedStatement)))
|
||||||
|
|
||||||
(set! *warn-on-reflection* true)
|
(set! *warn-on-reflection* true)
|
||||||
|
|
||||||
|
|
@ -84,7 +92,8 @@
|
||||||
can be `:none` which means the host/port segment of the JDBC URL should
|
can be `:none` which means the host/port segment of the JDBC URL should
|
||||||
be omitted entirely (for 'local' databases)
|
be omitted entirely (for 'local' databases)
|
||||||
* `:port` -- the port for the database connection (the default is database-
|
* `:port` -- the port for the database connection (the default is database-
|
||||||
specific -- see below)
|
specific -- see below); can be `:none` which means the port segment of
|
||||||
|
the JDBC URL should be omitted entirely
|
||||||
* `:classname` -- if you need to override the default for the `:dbtype`
|
* `:classname` -- if you need to override the default for the `:dbtype`
|
||||||
(or you want to use a database that next.jdbc does not know about!)
|
(or you want to use a database that next.jdbc does not know about!)
|
||||||
|
|
||||||
|
|
@ -95,6 +104,11 @@
|
||||||
the database name in the JDBC URL
|
the database name in the JDBC URL
|
||||||
* `:host-prefix` -- override the `//` that normally precedes the IP
|
* `:host-prefix` -- override the `//` that normally precedes the IP
|
||||||
address or hostname in the JDBC URL
|
address or hostname in the JDBC URL
|
||||||
|
* `:property-separator` -- an optional string that can be used to override
|
||||||
|
the separators used in `jdbc-url` for the properties (after the initial
|
||||||
|
JDBC URL portion); by default `?` and `&` are used to build JDBC URLs
|
||||||
|
with properties; for SQL Server drivers (both MS and jTDS)
|
||||||
|
`:property-separator \";\"` is used
|
||||||
|
|
||||||
In the second format, this key is required:
|
In the second format, this key is required:
|
||||||
* `:jdbcUrl` -- a JDBC URL string
|
* `:jdbcUrl` -- a JDBC URL string
|
||||||
|
|
@ -105,6 +119,7 @@
|
||||||
Database types supported (for `:dbtype`), and their defaults:
|
Database types supported (for `:dbtype`), and their defaults:
|
||||||
* `derby` -- `org.apache.derby.jdbc.EmbeddedDriver` -- also pass `:create true`
|
* `derby` -- `org.apache.derby.jdbc.EmbeddedDriver` -- also pass `:create true`
|
||||||
if you want the database to be automatically created
|
if you want the database to be automatically created
|
||||||
|
* `duckdb` -- `org.duckdb.DuckDBDriver` -- embedded database
|
||||||
* `h2` -- `org.h2.Driver` -- for an on-disk database
|
* `h2` -- `org.h2.Driver` -- for an on-disk database
|
||||||
* `h2:mem` -- `org.h2.Driver` -- for an in-memory database
|
* `h2:mem` -- `org.h2.Driver` -- for an in-memory database
|
||||||
* `hsqldb`, `hsql` -- `org.hsqldb.jdbcDriver`
|
* `hsqldb`, `hsql` -- `org.hsqldb.jdbcDriver`
|
||||||
|
|
@ -122,9 +137,10 @@
|
||||||
* `sqlserver`, `mssql` -- `com.microsoft.sqlserver.jdbc.SQLServerDriver` -- `1433`
|
* `sqlserver`, `mssql` -- `com.microsoft.sqlserver.jdbc.SQLServerDriver` -- `1433`
|
||||||
* `timesten:client` -- `com.timesten.jdbc.TimesTenClientDriver`
|
* `timesten:client` -- `com.timesten.jdbc.TimesTenClientDriver`
|
||||||
* `timesten:direct` -- `com.timesten.jdbc.TimesTenDriver`
|
* `timesten:direct` -- `com.timesten.jdbc.TimesTenDriver`
|
||||||
|
* `xtdb` -- `xtdb.jdbc.Driver` -- an XTDB wrapper around `postgresql`
|
||||||
|
|
||||||
For more details about `:dbtype` and `:classname` values, see:
|
For more details about `:dbtype` and `:classname` values, see:
|
||||||
https://cljdoc.org/d/seancorfield/next.jdbc/CURRENT/api/next.jdbc.connection#dbtypes"
|
https://cljdoc.org/d/com.github.seancorfield/next.jdbc/CURRENT/api/next.jdbc.connection#dbtypes"
|
||||||
^javax.sql.DataSource
|
^javax.sql.DataSource
|
||||||
[spec]
|
[spec]
|
||||||
(p/get-datasource spec))
|
(p/get-datasource spec))
|
||||||
|
|
@ -143,13 +159,31 @@
|
||||||
and applies the `:auto-commit` and/or `:read-only` options, if provided.
|
and applies the `:auto-commit` and/or `:read-only` options, if provided.
|
||||||
|
|
||||||
If you call `get-connection` on anything else, it will call `get-datasource`
|
If you call `get-connection` on anything else, it will call `get-datasource`
|
||||||
first to try to get a `DataSource`, and then call `get-connection` on that."
|
first to try to get a `DataSource`, and then call `get-connection` on that.
|
||||||
|
|
||||||
|
If you want different per-connection username/password values, you can
|
||||||
|
either put `:user` and `:password` into the `opts` hash map or pass them
|
||||||
|
as positional arguments."
|
||||||
(^java.sql.Connection
|
(^java.sql.Connection
|
||||||
[spec]
|
[spec]
|
||||||
(p/get-connection spec {}))
|
(p/get-connection spec {}))
|
||||||
(^java.sql.Connection
|
(^java.sql.Connection
|
||||||
[spec opts]
|
[spec opts]
|
||||||
(p/get-connection spec opts)))
|
(p/get-connection spec opts))
|
||||||
|
(^java.sql.Connection
|
||||||
|
[spec user password]
|
||||||
|
(p/get-connection spec {:user user :password password}))
|
||||||
|
(^java.sql.Connection
|
||||||
|
[spec user password opts]
|
||||||
|
(p/get-connection spec (assoc opts :user user :password password))))
|
||||||
|
|
||||||
|
(defn- ensure-sql-params [sql-params]
|
||||||
|
(when-not (or (nil? sql-params)
|
||||||
|
(and (seqable? sql-params)
|
||||||
|
(or (empty? sql-params)
|
||||||
|
(string? (first sql-params)))))
|
||||||
|
(throw (ex-info "sql-params should be a vector containing a SQL string and any parameters"
|
||||||
|
{:sql-params sql-params}))))
|
||||||
|
|
||||||
(defn prepare
|
(defn prepare
|
||||||
"Given a connection to a database, and a vector containing SQL and any
|
"Given a connection to a database, and a vector containing SQL and any
|
||||||
|
|
@ -165,16 +199,24 @@
|
||||||
See the list of options above (in the namespace docstring) for what can
|
See the list of options above (in the namespace docstring) for what can
|
||||||
be passed to prepare."
|
be passed to prepare."
|
||||||
(^java.sql.PreparedStatement
|
(^java.sql.PreparedStatement
|
||||||
[connection sql-params]
|
[connection sql-params]
|
||||||
(p/prepare connection sql-params {}))
|
(ensure-sql-params sql-params)
|
||||||
|
(p/prepare connection sql-params {}))
|
||||||
(^java.sql.PreparedStatement
|
(^java.sql.PreparedStatement
|
||||||
[connection sql-params opts]
|
[connection sql-params opts]
|
||||||
(p/prepare connection sql-params opts)))
|
(ensure-sql-params sql-params)
|
||||||
|
(p/prepare connection sql-params opts)))
|
||||||
|
|
||||||
(defn plan
|
(defn plan
|
||||||
"General SQL execution function (for working with result sets).
|
"General SQL execution function (for working with result sets).
|
||||||
|
|
||||||
Returns a reducible that, when reduced, runs the SQL and yields the result.
|
Returns a reducible that, when reduced (with an initial value), runs the
|
||||||
|
SQL and yields the result. `plan` returns an `IReduceInit` object so you
|
||||||
|
must provide an initial value when calling `reduce` on it.
|
||||||
|
|
||||||
|
The reducible is also foldable (in the `clojure.core.reducers` sense) but
|
||||||
|
see the **Tips & Tricks** section of the documentation for some important
|
||||||
|
caveats about that.
|
||||||
|
|
||||||
Can be called on a `PreparedStatement`, a `Connection`, or something that can
|
Can be called on a `PreparedStatement`, a `Connection`, or something that can
|
||||||
produce a `Connection` via a `DataSource`.
|
produce a `Connection` via a `DataSource`.
|
||||||
|
|
@ -196,30 +238,37 @@
|
||||||
(or they can be different, depending on how you want the row to be built,
|
(or they can be different, depending on how you want the row to be built,
|
||||||
and how you want any subsequent lazy navigation to be handled)."
|
and how you want any subsequent lazy navigation to be handled)."
|
||||||
(^clojure.lang.IReduceInit
|
(^clojure.lang.IReduceInit
|
||||||
[stmt]
|
[stmt]
|
||||||
(p/-execute stmt [] {}))
|
(p/-execute stmt [] {}))
|
||||||
(^clojure.lang.IReduceInit
|
(^clojure.lang.IReduceInit
|
||||||
[connectable sql-params]
|
[connectable sql-params]
|
||||||
(p/-execute connectable sql-params
|
(ensure-sql-params sql-params)
|
||||||
{:next.jdbc/sql-params sql-params}))
|
(p/-execute connectable sql-params
|
||||||
|
{:next.jdbc/sql-params sql-params}))
|
||||||
(^clojure.lang.IReduceInit
|
(^clojure.lang.IReduceInit
|
||||||
[connectable sql-params opts]
|
[connectable sql-params opts]
|
||||||
(p/-execute connectable sql-params
|
(ensure-sql-params sql-params)
|
||||||
(assoc opts :next.jdbc/sql-params sql-params))))
|
(p/-execute connectable sql-params
|
||||||
|
(assoc opts :next.jdbc/sql-params sql-params))))
|
||||||
|
|
||||||
(defn execute!
|
(defn execute!
|
||||||
"General SQL execution function.
|
"General SQL execution function.
|
||||||
|
|
||||||
Returns a fully-realized result set.
|
Returns a fully-realized result set. When `:multi-rs true` is provided, will
|
||||||
|
return multiple result sets, as a vector of result sets. Each result set is
|
||||||
|
a vector of hash maps, by default, but can be controlled by the `:builder-fn`
|
||||||
|
option.
|
||||||
|
|
||||||
Can be called on a `PreparedStatement`, a `Connection`, or something that can
|
Can be called on a `PreparedStatement`, a `Connection`, or something that can
|
||||||
produce a `Connection` via a `DataSource`."
|
produce a `Connection` via a `DataSource`."
|
||||||
([stmt]
|
([stmt]
|
||||||
(p/-execute-all stmt [] {}))
|
(p/-execute-all stmt [] {}))
|
||||||
([connectable sql-params]
|
([connectable sql-params]
|
||||||
|
(ensure-sql-params sql-params)
|
||||||
(p/-execute-all connectable sql-params
|
(p/-execute-all connectable sql-params
|
||||||
{:next.jdbc/sql-params sql-params}))
|
{:next.jdbc/sql-params sql-params}))
|
||||||
([connectable sql-params opts]
|
([connectable sql-params opts]
|
||||||
|
(ensure-sql-params sql-params)
|
||||||
(p/-execute-all connectable sql-params
|
(p/-execute-all connectable sql-params
|
||||||
(assoc opts :next.jdbc/sql-params sql-params))))
|
(assoc opts :next.jdbc/sql-params sql-params))))
|
||||||
|
|
||||||
|
|
@ -236,12 +285,143 @@
|
||||||
([stmt]
|
([stmt]
|
||||||
(p/-execute-one stmt [] {}))
|
(p/-execute-one stmt [] {}))
|
||||||
([connectable sql-params]
|
([connectable sql-params]
|
||||||
|
(ensure-sql-params sql-params)
|
||||||
(p/-execute-one connectable sql-params
|
(p/-execute-one connectable sql-params
|
||||||
{:next.jdbc/sql-params sql-params}))
|
{:next.jdbc/sql-params sql-params}))
|
||||||
([connectable sql-params opts]
|
([connectable sql-params opts]
|
||||||
|
(ensure-sql-params sql-params)
|
||||||
(p/-execute-one connectable sql-params
|
(p/-execute-one connectable sql-params
|
||||||
(assoc opts :next.jdbc/sql-params sql-params))))
|
(assoc opts :next.jdbc/sql-params sql-params))))
|
||||||
|
|
||||||
|
(defn execute-batch!
|
||||||
|
"Given a `PreparedStatement` and a vector containing parameter groups,
|
||||||
|
i.e., a vector of vector of parameters, use `.addBatch` to add each group
|
||||||
|
of parameters to the prepared statement (via `set-parameters`) and then
|
||||||
|
call `.executeBatch`. A vector of update counts is returned.
|
||||||
|
|
||||||
|
An options hash map may also be provided, containing `:batch-size` which
|
||||||
|
determines how to partition the parameter groups for submission to the
|
||||||
|
database. If omitted, all groups will be submitted as a single command.
|
||||||
|
If you expect the update counts to be larger than `Integer/MAX_VALUE`,
|
||||||
|
you can specify `:large true` and `.executeLargeBatch` will be called
|
||||||
|
instead.
|
||||||
|
|
||||||
|
Alternatively, given a connectable, a SQL string, a vector containing
|
||||||
|
parameter groups, and an options hash map, create a new `PreparedStatement`
|
||||||
|
(after possibly creating a new `Connection`), and execute the SQL with
|
||||||
|
the specified parameter groups. That new `PreparedStatement` (and the
|
||||||
|
new `Connection`, if created) will be closed automatically after use.
|
||||||
|
|
||||||
|
By default, returns a Clojure vector of update counts. Some databases
|
||||||
|
allow batch statements to also return generated keys and you can attempt that
|
||||||
|
if you ensure the `PreparedStatement` is created with `:return-keys true`
|
||||||
|
and you also provide `:return-generated-keys true` in the options passed
|
||||||
|
to `execute-batch!`. Some databases will only return one generated key
|
||||||
|
per batch, some return all the generated keys, some will throw an exception.
|
||||||
|
If that is supported, `execute-batch!` will return a vector of hash maps
|
||||||
|
containing the generated keys as fully-realized, datafiable result sets,
|
||||||
|
whose content is database-dependent.
|
||||||
|
|
||||||
|
May throw `java.sql.BatchUpdateException` if any part of the batch fails.
|
||||||
|
You may be able to call `.getUpdateCounts` on that exception object to
|
||||||
|
get more information about which parts succeeded and which failed.
|
||||||
|
|
||||||
|
For additional caveats and database-specific options you may need, see:
|
||||||
|
https://cljdoc.org/d/com.github.seancorfield/next.jdbc/CURRENT/doc/getting-started/prepared-statements#caveats
|
||||||
|
|
||||||
|
Not all databases support batch execution."
|
||||||
|
([ps param-groups]
|
||||||
|
(execute-batch! ps param-groups {}))
|
||||||
|
([^PreparedStatement ps param-groups opts]
|
||||||
|
(let [params (if-let [n (:batch-size opts)]
|
||||||
|
(if (and (number? n) (pos? n))
|
||||||
|
(partition-all n param-groups)
|
||||||
|
(throw (IllegalArgumentException.
|
||||||
|
":batch-size must be positive")))
|
||||||
|
[param-groups])]
|
||||||
|
(into []
|
||||||
|
(mapcat (fn [group]
|
||||||
|
(run! #(.addBatch (prepare/set-parameters ps %)) group)
|
||||||
|
(let [result (if (:large opts)
|
||||||
|
(.executeLargeBatch ps)
|
||||||
|
(.executeBatch ps))]
|
||||||
|
(if (:return-generated-keys opts)
|
||||||
|
(rs/datafiable-result-set (.getGeneratedKeys ps)
|
||||||
|
(p/get-connection ps {})
|
||||||
|
opts)
|
||||||
|
result))))
|
||||||
|
params)))
|
||||||
|
([connectable sql param-groups opts]
|
||||||
|
(when-not (string? sql)
|
||||||
|
(throw (IllegalArgumentException. "execute-batch! requires a SQL string")))
|
||||||
|
(if (instance? java.sql.Connection (p/unwrap connectable))
|
||||||
|
(with-open [ps (prepare connectable [sql] opts)]
|
||||||
|
(execute-batch! ps param-groups opts))
|
||||||
|
(with-open [con (get-connection connectable)]
|
||||||
|
(execute-batch! con sql param-groups opts)))))
|
||||||
|
|
||||||
|
(defmacro on-connection
|
||||||
|
"Given a connectable object, gets a connection and binds it to `sym`,
|
||||||
|
then executes the `body` in that context.
|
||||||
|
|
||||||
|
This allows you to write generic, `Connection`-based code without
|
||||||
|
needing to know the exact type of an incoming datasource:
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
(on-connection [conn datasource]
|
||||||
|
(let [metadata (.getMetadata conn)
|
||||||
|
catalog (.getCatalog conn)]
|
||||||
|
...))
|
||||||
|
```
|
||||||
|
|
||||||
|
If passed a `Connection` or a `Connectable` that wraps a `Connection`,
|
||||||
|
then that `Connection` is used as-is.
|
||||||
|
|
||||||
|
Otherwise, creates a new `Connection` object from the connectable,
|
||||||
|
executes the body, and automatically closes it for you."
|
||||||
|
[[sym connectable] & body]
|
||||||
|
(let [con-sym (vary-meta sym assoc :tag 'java.sql.Connection)]
|
||||||
|
`(let [con-obj# ~connectable
|
||||||
|
bare-con# (p/unwrap con-obj#)]
|
||||||
|
(if (instance? java.sql.Connection bare-con#)
|
||||||
|
((^{:once true} fn* [~con-sym] ~@body) bare-con#)
|
||||||
|
(with-open [con# (get-connection con-obj#)]
|
||||||
|
((^{:once true} fn* [~con-sym] ~@body) con#))))))
|
||||||
|
|
||||||
|
(defmacro on-connection+options
|
||||||
|
"Given a connectable object, assumed to be wrapped with options, gets
|
||||||
|
a connection, rewraps it with those options, and binds it to `sym`,
|
||||||
|
then executes the `body` in that context.
|
||||||
|
|
||||||
|
This allows you to write generic, **wrapped** connectable code without
|
||||||
|
needing to know the exact type of an incoming datasource:
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
(on-connection+options [conn datasource]
|
||||||
|
(execute! conn some-insert-sql)
|
||||||
|
(execute! conn some-update-sql))
|
||||||
|
```
|
||||||
|
|
||||||
|
If passed a `Connection` then that `Connection` is used as-is.
|
||||||
|
|
||||||
|
If passed a `Connectable` that wraps a `Connection`, then that
|
||||||
|
`Connectable` is used as-is.
|
||||||
|
|
||||||
|
Otherwise, creates a new `Connection` object from the connectable,
|
||||||
|
wraps that with options, executes the body, and automatically closes
|
||||||
|
the new `Connection` for you.
|
||||||
|
|
||||||
|
Note: the bound `sym` will be a **wrapped** connectable and not a plain
|
||||||
|
Java object, so you cannot call JDBC methods directly on it like you can
|
||||||
|
with `on-connection`."
|
||||||
|
[[sym connectable] & body]
|
||||||
|
`(let [con-obj# ~connectable]
|
||||||
|
(if (instance? java.sql.Connection (p/unwrap con-obj#))
|
||||||
|
((^{:once true} fn* [~sym] ~@body) con-obj#)
|
||||||
|
(with-open [con# (get-connection con-obj#)]
|
||||||
|
((^{:once true} fn* [~sym] ~@body)
|
||||||
|
(with-options con# (:options con-obj# {})))))))
|
||||||
|
|
||||||
(defn transact
|
(defn transact
|
||||||
"Given a transactable object and a function (taking a `Connection`),
|
"Given a transactable object and a function (taking a `Connection`),
|
||||||
execute the function over the connection in a transactional manner.
|
execute the function over the connection in a transactional manner.
|
||||||
|
|
@ -257,11 +437,134 @@
|
||||||
then executes the `body` in that context, committing any changes if the body
|
then executes the `body` in that context, committing any changes if the body
|
||||||
completes successfully, otherwise rolling back any changes made.
|
completes successfully, otherwise rolling back any changes made.
|
||||||
|
|
||||||
|
Like `with-open`, if `with-transaction` creates a new `Connection` object,
|
||||||
|
it will automatically close it for you.
|
||||||
|
|
||||||
|
If you are working with default options via `with-options`, you might want
|
||||||
|
to use `with-transaction+options` instead.
|
||||||
|
|
||||||
The options map supports:
|
The options map supports:
|
||||||
* `:isolation` -- `:none`, `:read-committed`, `:read-uncommitted`,
|
* `:isolation` -- `:none`, `:read-committed`, `:read-uncommitted`,
|
||||||
`:repeatable-read`, `:serializable`,
|
`:repeatable-read`, `:serializable`,
|
||||||
* `:read-only` -- `true` / `false`,
|
* `:read-only` -- `true` / `false` (`true` will make the `Connection` readonly),
|
||||||
* `:rollback-only` -- `true` / `false`."
|
* `:rollback-only` -- `true` / `false` (`true` will make the transaction
|
||||||
|
rollback, even if it would otherwise succeed)."
|
||||||
[[sym transactable opts] & body]
|
[[sym transactable opts] & body]
|
||||||
(let [con (vary-meta sym assoc :tag 'java.sql.Connection)]
|
(let [con (vary-meta sym assoc :tag 'java.sql.Connection)]
|
||||||
`(transact ~transactable (^{:once true} fn* [~con] ~@body) ~(or opts {}))))
|
`(transact ~transactable (^{:once true} fn* [~con] ~@body) ~(or opts {}))))
|
||||||
|
|
||||||
|
(defn active-tx?
|
||||||
|
"Returns true if `next.jdbc` has a currently active transaction in the
|
||||||
|
current thread, else false.
|
||||||
|
|
||||||
|
With no arguments, tells you if any transaction is currently active.
|
||||||
|
|
||||||
|
With a `Connection` argument, tells you if a transaction is currently
|
||||||
|
active on that specific connection.
|
||||||
|
|
||||||
|
Note: transactions are a convention of operations on a `Connection` so
|
||||||
|
this predicate only reflects `next.jdbc/transact` and `next.jdbc/with-transaction`
|
||||||
|
operations -- it does not reflect any other operations on a `Connection`,
|
||||||
|
performed via JDBC interop directly."
|
||||||
|
([]
|
||||||
|
(boolean (seq @#'tx/*active-tx*)))
|
||||||
|
([con]
|
||||||
|
(contains? @#'tx/*active-tx* con)))
|
||||||
|
|
||||||
|
(defn with-options
|
||||||
|
"Given a connectable/transactable object and a set of (default) options
|
||||||
|
that should be used on all operations on that object, return a new
|
||||||
|
wrapper object that can be used in its place.
|
||||||
|
|
||||||
|
Bear in mind that `get-datasource`, `get-connection`, and `with-transaction`
|
||||||
|
return plain Java objects, so if you call any of those on this wrapped
|
||||||
|
object, you'll need to re-wrap the Java object `with-options` again. See
|
||||||
|
the Datasources, Connections & Transactions section of Getting Started for
|
||||||
|
more details, and some examples of use with these functions.
|
||||||
|
|
||||||
|
`with-transaction+options` exists to automatically rewrap a `Connection`
|
||||||
|
with the options from a `with-options` wrapper."
|
||||||
|
[connectable opts]
|
||||||
|
(let [c (:connectable connectable)
|
||||||
|
o (:options connectable)]
|
||||||
|
(if (and c o)
|
||||||
|
(opts/->DefaultOptions c (merge o opts))
|
||||||
|
(opts/->DefaultOptions connectable opts))))
|
||||||
|
|
||||||
|
(defmacro with-transaction+options
|
||||||
|
"Given a transactable object, assumed to be wrapped with options, gets a
|
||||||
|
connection, rewraps it with those options, and binds it to `sym`, then
|
||||||
|
executes the `body` in that context, committing any changes if the body
|
||||||
|
completes successfully, otherwise rolling back any changes made.
|
||||||
|
|
||||||
|
Like `with-open`, if `with-transaction+options` creates a new `Connection`
|
||||||
|
object, it will automatically close it for you.
|
||||||
|
|
||||||
|
Note: the bound `sym` will be a **wrapped** connectable and not a plain
|
||||||
|
Java object, so you cannot call JDBC methods directly on it like you can
|
||||||
|
with `with-transaction`.
|
||||||
|
|
||||||
|
The options map supports:
|
||||||
|
* `:isolation` -- `:none`, `:read-committed`, `:read-uncommitted`,
|
||||||
|
`:repeatable-read`, `:serializable`,
|
||||||
|
* `:read-only` -- `true` / `false` (`true` will make the `Connection` readonly),
|
||||||
|
* `:rollback-only` -- `true` / `false` (`true` will make the transaction
|
||||||
|
rollback, even if it would otherwise succeed)."
|
||||||
|
[[sym transactable opts] & body]
|
||||||
|
`(let [tx# ~transactable]
|
||||||
|
(transact tx#
|
||||||
|
(^{:once true} fn*
|
||||||
|
[con#] ; this is the unwrapped java.sql.connection
|
||||||
|
(let [~sym (with-options con# (:options tx# {}))]
|
||||||
|
~@body))
|
||||||
|
~(or opts {}))))
|
||||||
|
|
||||||
|
(defn with-logging
|
||||||
|
"Given a connectable/transactable object and a sql/params logging
|
||||||
|
function and an optional result logging function that should be used
|
||||||
|
on all operations on that object, return a new wrapper object that can
|
||||||
|
be used in its place.
|
||||||
|
|
||||||
|
The sql/params logging function will be called with two arguments:
|
||||||
|
* a symbol indicating which operation is being performed:
|
||||||
|
* `next.jdbc/plan`, `next.jdbc/execute-one!`, `next.jdbc/execute!`,
|
||||||
|
or `next.jdbc/prepare`
|
||||||
|
* the vector containing the SQL string and its parameters
|
||||||
|
|
||||||
|
Whatever the sql/params logging function returns will be passed as a
|
||||||
|
`state` argument to the optional result logging function. This means you can
|
||||||
|
use this mechanism to provide some timing information, since your sql/params
|
||||||
|
logging function can return the current system time, and your result logging
|
||||||
|
function can then calculate the elapsed time. There is an example of this in
|
||||||
|
the Naive Logging with Timing section of Getting Started.
|
||||||
|
|
||||||
|
The result logging function, if provided, will be called with the
|
||||||
|
same symbol passed to the sql/params logging function, the `state`
|
||||||
|
returned by the sql/params logging function, and either the result of
|
||||||
|
the `execute!` or `execute-one!` call or an exception if the call
|
||||||
|
failed. The result logging function is not called for the `plan`
|
||||||
|
or `prepare` call (since they do not produce result sets directly).
|
||||||
|
|
||||||
|
Bear in mind that `get-datasource`, `get-connection`, and `with-transaction`
|
||||||
|
return plain Java objects, so if you call any of those on this wrapped
|
||||||
|
object, you'll need to re-wrap the Java object `with-logging` again. See
|
||||||
|
the Datasources, Connections & Transactions section of Getting Started for
|
||||||
|
more details, and some examples of use with these functions."
|
||||||
|
[connectable sql-logger & [result-logger]]
|
||||||
|
(logger/->SQLLogging connectable sql-logger result-logger (:options connectable)))
|
||||||
|
|
||||||
|
(def snake-kebab-opts
|
||||||
|
"A hash map of options that will convert Clojure identifiers to
|
||||||
|
snake_case SQL entities (`:table-fn`, `:column-fn`), and will convert
|
||||||
|
SQL entities to qualified kebab-case Clojure identifiers (`:builder-fn`)."
|
||||||
|
{:column-fn ->snake_case :table-fn ->snake_case
|
||||||
|
:label-fn ->kebab-case :qualifier-fn ->kebab-case
|
||||||
|
:builder-fn rs/as-kebab-maps})
|
||||||
|
|
||||||
|
(def unqualified-snake-kebab-opts
|
||||||
|
"A hash map of options that will convert Clojure identifiers to
|
||||||
|
snake_case SQL entities (`:table-fn`, `:column-fn`), and will convert
|
||||||
|
SQL entities to unqualified kebab-case Clojure identifiers (`:builder-fn`)."
|
||||||
|
{:column-fn ->snake_case :table-fn ->snake_case
|
||||||
|
:label-fn ->kebab-case :qualifier-fn ->kebab-case
|
||||||
|
:builder-fn rs/as-unqualified-kebab-maps})
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,13 @@
|
||||||
;; copyright (c) 2018-2020 Sean Corfield, all rights reserved
|
;; copyright (c) 2018-2025 Sean Corfield, all rights reserved
|
||||||
|
|
||||||
(ns next.jdbc.connection
|
(ns next.jdbc.connection
|
||||||
"Standard implementations of `get-datasource` and `get-connection`.
|
"Standard implementations of `get-datasource` and `get-connection`.
|
||||||
|
|
||||||
Also provides `dbtypes` as a map of all known database types, and
|
Also provides `dbtypes` as a map of all known database types, and
|
||||||
the `->pool` function for creating pooled datasource objects."
|
the `->pool` and `component` functions for creating pooled datasource
|
||||||
|
objects."
|
||||||
(:require [clojure.java.data :as j]
|
(:require [clojure.java.data :as j]
|
||||||
|
[clojure.string :as str]
|
||||||
[next.jdbc.protocols :as p])
|
[next.jdbc.protocols :as p])
|
||||||
(:import (java.sql Connection DriverManager)
|
(:import (java.sql Connection DriverManager)
|
||||||
(javax.sql DataSource)
|
(javax.sql DataSource)
|
||||||
|
|
@ -20,7 +22,8 @@
|
||||||
string, this table includes `:dbname-separator` and/or `:host-prefix`. The
|
string, this table includes `:dbname-separator` and/or `:host-prefix`. The
|
||||||
default prefix for `:dbname` is either `/` or `:` and for `:host` it is `//`.
|
default prefix for `:dbname` is either `/` or `:` and for `:host` it is `//`.
|
||||||
For local databases, with no `:host`/`:port` segment in their JDBC URL, a
|
For local databases, with no `:host`/`:port` segment in their JDBC URL, a
|
||||||
value of `:none` is provided for `:host` in this table.
|
value of `:none` is provided for `:host` in this table. In addition,
|
||||||
|
`:property-separator` can specify how you build the JDBC URL.
|
||||||
|
|
||||||
For known database types, you can use `:dbtype` (and omit `:classname`).
|
For known database types, you can use `:dbtype` (and omit `:classname`).
|
||||||
|
|
||||||
|
|
@ -56,7 +59,7 @@
|
||||||
|
|
||||||
or:
|
or:
|
||||||
|
|
||||||
`{com.acme/jdbc {:mvn/version \"1.2.3\"}} ; CLI/deps.edn`
|
`com.acme/jdbc {:mvn/version \"1.2.3\"} ; CLI/deps.edn`
|
||||||
|
|
||||||
Note: the `:classname` value can be a string or a vector of strings. If
|
Note: the `:classname` value can be a string or a vector of strings. If
|
||||||
a vector of strings is provided, an attempt will be made to load each
|
a vector of strings is provided, an attempt will be made to load each
|
||||||
|
|
@ -65,9 +68,13 @@
|
||||||
has changed over time (such as with MySQL)."
|
has changed over time (such as with MySQL)."
|
||||||
{"derby" {:classname "org.apache.derby.jdbc.EmbeddedDriver"
|
{"derby" {:classname "org.apache.derby.jdbc.EmbeddedDriver"
|
||||||
:host :none}
|
:host :none}
|
||||||
"h2" {:classname "org.h2.Driver"
|
"duckdb" {:classname "org.duckdb.DuckDBDriver"
|
||||||
:host :none}
|
:host :none}
|
||||||
"h2:mem" {:classname "org.h2.Driver"}
|
"h2" {:classname "org.h2.Driver"
|
||||||
|
:property-separator ";"
|
||||||
|
:host :none}
|
||||||
|
"h2:mem" {:classname "org.h2.Driver"
|
||||||
|
:property-separator ";"}
|
||||||
"hsql" {:classname "org.hsqldb.jdbcDriver"
|
"hsql" {:classname "org.hsqldb.jdbcDriver"
|
||||||
:alias-for "hsqldb"
|
:alias-for "hsqldb"
|
||||||
:host :none}
|
:host :none}
|
||||||
|
|
@ -75,14 +82,17 @@
|
||||||
:host :none}
|
:host :none}
|
||||||
"jtds" {:classname "net.sourceforge.jtds.jdbc.Driver"
|
"jtds" {:classname "net.sourceforge.jtds.jdbc.Driver"
|
||||||
:alias-for "jtds:sqlserver"
|
:alias-for "jtds:sqlserver"
|
||||||
|
:property-separator ";"
|
||||||
:port 1433}
|
:port 1433}
|
||||||
"jtds:sqlserver" {:classname "net.sourceforge.jtds.jdbc.Driver"
|
"jtds:sqlserver" {:classname "net.sourceforge.jtds.jdbc.Driver"
|
||||||
|
:property-separator ";"
|
||||||
:port 1433}
|
:port 1433}
|
||||||
"mariadb" {:classname "org.mariadb.jdbc.Driver"
|
"mariadb" {:classname "org.mariadb.jdbc.Driver"
|
||||||
:port 3306}
|
:port 3306}
|
||||||
"mssql" {:classname "com.microsoft.sqlserver.jdbc.SQLServerDriver"
|
"mssql" {:classname "com.microsoft.sqlserver.jdbc.SQLServerDriver"
|
||||||
:alias-for "sqlserver"
|
:alias-for "sqlserver"
|
||||||
:dbname-separator ";DATABASENAME="
|
:dbname-separator ";DATABASENAME="
|
||||||
|
:property-separator ";"
|
||||||
:port 1433}
|
:port 1433}
|
||||||
"mysql" {:classname ["com.mysql.cj.jdbc.Driver"
|
"mysql" {:classname ["com.mysql.cj.jdbc.Driver"
|
||||||
"com.mysql.jdbc.Driver"]
|
"com.mysql.jdbc.Driver"]
|
||||||
|
|
@ -113,28 +123,16 @@
|
||||||
:host :none}
|
:host :none}
|
||||||
"sqlserver" {:classname "com.microsoft.sqlserver.jdbc.SQLServerDriver"
|
"sqlserver" {:classname "com.microsoft.sqlserver.jdbc.SQLServerDriver"
|
||||||
:dbname-separator ";DATABASENAME="
|
:dbname-separator ";DATABASENAME="
|
||||||
|
:property-separator ";"
|
||||||
:port 1433}
|
:port 1433}
|
||||||
"timesten:client" {:classname "com.timesten.jdbc.TimesTenClientDriver"
|
"timesten:client" {:classname "com.timesten.jdbc.TimesTenClientDriver"
|
||||||
:dbname-separator ":dsn="
|
:dbname-separator ":dsn="
|
||||||
:host :none}
|
:host :none}
|
||||||
"timesten:direct" {:classname "com.timesten.jdbc.TimesTenDriver"
|
"timesten:direct" {:classname "com.timesten.jdbc.TimesTenDriver"
|
||||||
:dbname-separator ":dsn="
|
:dbname-separator ":dsn="
|
||||||
:host :none}})
|
:host :none}
|
||||||
|
"xtdb" {:classname "xtdb.jdbc.Driver"
|
||||||
(defn- ^Properties as-properties
|
:port 5432}})
|
||||||
"Convert any seq of pairs to a `java.util.Properties` instance."
|
|
||||||
[m]
|
|
||||||
(let [p (Properties.)]
|
|
||||||
(doseq [[k v] m]
|
|
||||||
(.setProperty p (name k) (str v)))
|
|
||||||
p))
|
|
||||||
|
|
||||||
(defn- get-driver-connection
|
|
||||||
"Common logic for loading the designated JDBC driver class and
|
|
||||||
obtaining the appropriate `Connection` object."
|
|
||||||
[url timeout etc]
|
|
||||||
(when timeout (DriverManager/setLoginTimeout timeout))
|
|
||||||
(DriverManager/getConnection url (as-properties etc)))
|
|
||||||
|
|
||||||
(def ^:private driver-cache
|
(def ^:private driver-cache
|
||||||
"An optimization for repeated calls to get-datasource, or for get-connection
|
"An optimization for repeated calls to get-datasource, or for get-connection
|
||||||
|
|
@ -147,12 +145,12 @@
|
||||||
As a special case, the database spec can contain jdbcUrl (just like ->pool),
|
As a special case, the database spec can contain jdbcUrl (just like ->pool),
|
||||||
in which case it will return that URL as-is and a map of any other options."
|
in which case it will return that URL as-is and a map of any other options."
|
||||||
[{:keys [dbtype dbname host port classname
|
[{:keys [dbtype dbname host port classname
|
||||||
dbname-separator host-prefix
|
dbname-separator host-prefix property-separator
|
||||||
jdbcUrl]
|
jdbcUrl]
|
||||||
:as db-spec}]
|
:as db-spec}]
|
||||||
(let [etc (dissoc db-spec
|
(let [etc (dissoc db-spec
|
||||||
:dbtype :dbname :host :port :classname
|
:dbtype :dbname :host :port :classname
|
||||||
:dbname-separator :host-prefix
|
:dbname-separator :host-prefix :property-separator
|
||||||
:jdbcUrl)]
|
:jdbcUrl)]
|
||||||
(if jdbcUrl
|
(if jdbcUrl
|
||||||
[jdbcUrl etc]
|
[jdbcUrl etc]
|
||||||
|
|
@ -180,7 +178,7 @@
|
||||||
(str "jdbc:" subprotocol ":"
|
(str "jdbc:" subprotocol ":"
|
||||||
(or host-prefix (-> dbtype dbtypes :host-prefix (or "//")))
|
(or host-prefix (-> dbtype dbtypes :host-prefix (or "//")))
|
||||||
host
|
host
|
||||||
(when port (str ":" port))
|
(when (and port (not= :none port)) (str ":" port))
|
||||||
db-sep dbname))]
|
db-sep dbname))]
|
||||||
;; verify the datasource is loadable
|
;; verify the datasource is loadable
|
||||||
(if-let [class-name (or classname (-> dbtype dbtypes :classname))]
|
(if-let [class-name (or classname (-> dbtype dbtypes :classname))]
|
||||||
|
|
@ -204,7 +202,44 @@
|
||||||
(throw (ex-info (str "Unknown dbtype: " dbtype
|
(throw (ex-info (str "Unknown dbtype: " dbtype
|
||||||
", and :classname not provided.")
|
", and :classname not provided.")
|
||||||
db-spec)))
|
db-spec)))
|
||||||
[url etc]))))
|
[url etc (or property-separator
|
||||||
|
(-> dbtype dbtypes :property-separator))]))))
|
||||||
|
|
||||||
|
(defn jdbc-url
|
||||||
|
"Given a database spec (as a hash map), return a JDBC URL with all the
|
||||||
|
attributes added to the query string. The result is suitable for use in
|
||||||
|
calls to `->pool` and `component` as the `:jdbcUrl` key in the parameter
|
||||||
|
map for the connection pooling library.
|
||||||
|
|
||||||
|
This allows you to build a connection-pooled datasource that needs
|
||||||
|
additional settings that the pooling library does not support, such as
|
||||||
|
`:serverTimezone`:
|
||||||
|
|
||||||
|
```clojure
|
||||||
|
(def db-spec {:dbtype .. :dbname .. :user .. :password ..
|
||||||
|
:serverTimezone \"UTC\"})
|
||||||
|
(def ds (next.jdbc.connection/->pool
|
||||||
|
HikariCP {:jdbcUrl (next.jdbc.connection/jdbc-url db-spec)
|
||||||
|
:maximumPoolSize 15}))
|
||||||
|
```
|
||||||
|
|
||||||
|
This also clearly separates the attributes that should be part of the
|
||||||
|
JDBC URL from the attributes that should be configured on the pool.
|
||||||
|
|
||||||
|
Since JDBC drivers can handle URL encoding differently, if you are
|
||||||
|
trying to pass attributes that might need encoding, you should make
|
||||||
|
sure they are properly URL-encoded as values in the database spec hash map.
|
||||||
|
This function does **not** attempt to URL-encode values for you!"
|
||||||
|
[db-spec]
|
||||||
|
(let [[url etc ps] (spec->url+etc db-spec)
|
||||||
|
url-and (or ps (if (str/index-of url "?") "&" "?"))]
|
||||||
|
(if (seq etc)
|
||||||
|
(str url url-and (str/join (or ps "&")
|
||||||
|
(reduce-kv (fn [pairs k v]
|
||||||
|
(conj pairs (str (name k) "=" v)))
|
||||||
|
[]
|
||||||
|
etc)))
|
||||||
|
url)))
|
||||||
|
|
||||||
(defn ->pool
|
(defn ->pool
|
||||||
"Given a (connection pooled datasource) class and a database spec, return a
|
"Given a (connection pooled datasource) class and a database spec, return a
|
||||||
|
|
@ -220,6 +255,16 @@
|
||||||
`.setJdbcUrl`). `clojure.java.data/to-java` is used to construct the
|
`.setJdbcUrl`). `clojure.java.data/to-java` is used to construct the
|
||||||
object and call the setters.
|
object and call the setters.
|
||||||
|
|
||||||
|
If you need to pass in connection URL parameters, it can be easier to use
|
||||||
|
`next.jdbc.connection/jdbc-url` to construct URL, e.g.,
|
||||||
|
|
||||||
|
(->pool HikariDataSource
|
||||||
|
{:jdbcUrl (jdbc-url {:dbtype .. :dbname .. :useSSL false})
|
||||||
|
:username .. :password ..})
|
||||||
|
|
||||||
|
Here we pass `:useSSL false` to `jdbc-url` so that it ends up in the
|
||||||
|
connection string, but pass `:username` and `:password` for the pool itself.
|
||||||
|
|
||||||
Note that the result is not type-hinted (because there's no common base
|
Note that the result is not type-hinted (because there's no common base
|
||||||
class or interface that can be assumed). In particular, connection pooled
|
class or interface that can be assumed). In particular, connection pooled
|
||||||
datasource objects may need to be closed but they don't necessarily implement
|
datasource objects may need to be closed but they don't necessarily implement
|
||||||
|
|
@ -255,6 +300,11 @@
|
||||||
called on it to shutdown the datasource (and return a new startable
|
called on it to shutdown the datasource (and return a new startable
|
||||||
entity).
|
entity).
|
||||||
|
|
||||||
|
If `db-spec` contains `:init-fn`, that is assumed to be a function
|
||||||
|
that should be called on the newly-created datasource. This allows for
|
||||||
|
modification of (mutable) connection pooled datasource and/or some sort
|
||||||
|
of database initialization/setup to be called automatically.
|
||||||
|
|
||||||
By default, the datasource is shutdown by calling `.close` on it.
|
By default, the datasource is shutdown by calling `.close` on it.
|
||||||
If the datasource class implements `java.io.Closeable` then a direct,
|
If the datasource class implements `java.io.Closeable` then a direct,
|
||||||
type-hinted call to `.close` will be used, with no reflection,
|
type-hinted call to `.close` will be used, with no reflection,
|
||||||
|
|
@ -272,7 +322,9 @@
|
||||||
(with-meta {}
|
(with-meta {}
|
||||||
{'com.stuartsierra.component/start
|
{'com.stuartsierra.component/start
|
||||||
(fn [_]
|
(fn [_]
|
||||||
(let [pool (->pool clazz db-spec)]
|
(let [init-fn (:init-fn db-spec)
|
||||||
|
pool (->pool clazz (dissoc db-spec :init-fn))]
|
||||||
|
(when init-fn (init-fn pool))
|
||||||
(with-meta (fn ^DataSource [] pool)
|
(with-meta (fn ^DataSource [] pool)
|
||||||
{'com.stuartsierra.component/stop
|
{'com.stuartsierra.component/stop
|
||||||
(fn [_]
|
(fn [_]
|
||||||
|
|
@ -280,11 +332,18 @@
|
||||||
(component clazz db-spec close-fn))})))})))
|
(component clazz db-spec close-fn))})))})))
|
||||||
|
|
||||||
(comment
|
(comment
|
||||||
(require '[com.stuartsierra.component :as component])
|
(require '[com.stuartsierra.component :as component]
|
||||||
|
'[next.jdbc.sql :as sql])
|
||||||
(import '(com.mchange.v2.c3p0 ComboPooledDataSource PooledDataSource)
|
(import '(com.mchange.v2.c3p0 ComboPooledDataSource PooledDataSource)
|
||||||
'(com.zaxxer.hikari HikariDataSource))
|
'(com.zaxxer.hikari HikariDataSource))
|
||||||
(isa? PooledDataSource java.io.Closeable) ;=> false
|
(isa? PooledDataSource java.io.Closeable) ;=> false
|
||||||
(isa? HikariDataSource java.io.Closeable) ;=> true
|
(isa? HikariDataSource java.io.Closeable) ;=> true
|
||||||
|
;; create a pool with a combination of JDBC URL and username/password:
|
||||||
|
(->pool HikariDataSource
|
||||||
|
{:jdbcUrl
|
||||||
|
(jdbc-url {:dbtype "mysql" :dbname "clojure_test"
|
||||||
|
:useSSL false})
|
||||||
|
:username "root" :password (System/getenv "MYSQL_ROOT_PASSWORD")})
|
||||||
;; use c3p0 with default reflection-based closing function:
|
;; use c3p0 with default reflection-based closing function:
|
||||||
(def dbc (component ComboPooledDataSource
|
(def dbc (component ComboPooledDataSource
|
||||||
{:dbtype "mysql" :dbname "clojure_test"
|
{:dbtype "mysql" :dbname "clojure_test"
|
||||||
|
|
@ -302,15 +361,67 @@
|
||||||
;; start the chosen datasource component:
|
;; start the chosen datasource component:
|
||||||
(def ds (component/start dbc))
|
(def ds (component/start dbc))
|
||||||
;; invoke datasource component to get the underlying javax.sql.DataSource:
|
;; invoke datasource component to get the underlying javax.sql.DataSource:
|
||||||
(next.jdbc.sql/get-by-id (ds) :fruit 1)
|
(sql/get-by-id (ds) :fruit 1)
|
||||||
;; stop the component and close the pooled datasource:
|
;; stop the component and close the pooled datasource:
|
||||||
(component/stop ds))
|
(component/stop ds)
|
||||||
|
)
|
||||||
|
|
||||||
(defn- string->url+etc
|
(defn- string->url+etc
|
||||||
"Given a JDBC URL, return it with an empty set of options with no parsing."
|
"Given a JDBC URL, return it with an empty set of options with no parsing."
|
||||||
[s]
|
[s]
|
||||||
[s {}])
|
[s {}])
|
||||||
|
|
||||||
|
(defn- as-properties
|
||||||
|
"Convert any seq of pairs to a `java.util.Properties` instance."
|
||||||
|
^Properties [m]
|
||||||
|
(let [p (Properties.)
|
||||||
|
as-is (set (:next.jdbc/as-is-properties m))]
|
||||||
|
(doseq [[k v] (dissoc m :next.jdbc/as-is-properties)]
|
||||||
|
(if (contains? as-is k)
|
||||||
|
(.put p (name k) v)
|
||||||
|
(.setProperty p (name k) (str v))))
|
||||||
|
p))
|
||||||
|
|
||||||
|
(defn uri->db-spec
|
||||||
|
"clojure.java.jdbc (and some users out there) considered the URI format
|
||||||
|
to be an acceptable JDBC URL, i.e., with credentials embdedded in the string,
|
||||||
|
rather than as query parameters.
|
||||||
|
|
||||||
|
This function accepts a URI string, optionally prefixed with `jdbc:` and
|
||||||
|
returns a db-spec hash map."
|
||||||
|
[uri]
|
||||||
|
(let [{:keys [scheme userInfo host port path query]}
|
||||||
|
(j/from-java (java.net.URI. (str/replace uri #"^jdbc:" "")))
|
||||||
|
[user password] (when (seq userInfo) (str/split userInfo #":"))
|
||||||
|
properties (when (seq query)
|
||||||
|
(into {}
|
||||||
|
(map #(let [[k v] (str/split % #"=")]
|
||||||
|
[(keyword k) v]))
|
||||||
|
(str/split query #"\&")))]
|
||||||
|
(cond-> (assoc properties
|
||||||
|
:dbtype scheme
|
||||||
|
:host host
|
||||||
|
:port port)
|
||||||
|
(seq path) (assoc :dbname (subs path 1))
|
||||||
|
user (assoc :user user)
|
||||||
|
password (assoc :password password))))
|
||||||
|
|
||||||
|
(defn- get-driver-connection
|
||||||
|
"Common logic for loading the designated JDBC driver class and
|
||||||
|
obtaining the appropriate `Connection` object."
|
||||||
|
[url timeout etc]
|
||||||
|
(when timeout (DriverManager/setLoginTimeout timeout))
|
||||||
|
(try
|
||||||
|
(DriverManager/getConnection url (as-properties etc))
|
||||||
|
(catch Exception e
|
||||||
|
(try
|
||||||
|
(let [db-spec (uri->db-spec url)
|
||||||
|
[url' etc'] (spec->url+etc db-spec)]
|
||||||
|
(DriverManager/getConnection url' (as-properties (merge etc' etc))))
|
||||||
|
(catch Exception _
|
||||||
|
;; if the fallback fails too, throw the original exception
|
||||||
|
(throw e))))))
|
||||||
|
|
||||||
(defn- url+etc->datasource
|
(defn- url+etc->datasource
|
||||||
"Given a JDBC URL and a map of options, return a `DataSource` that can be
|
"Given a JDBC URL and a map of options, return a `DataSource` that can be
|
||||||
used to obtain a new database connection."
|
used to obtain a new database connection."
|
||||||
|
|
@ -334,13 +445,17 @@
|
||||||
|
|
||||||
These options are supported:
|
These options are supported:
|
||||||
* `:auto-commit` -- whether the connection should be set to auto-commit or not;
|
* `:auto-commit` -- whether the connection should be set to auto-commit or not;
|
||||||
without this option, the defaut is `true` -- connections will auto-commit,
|
without this option, the default is `true` -- connections will auto-commit,
|
||||||
* `:read-only` -- whether the connection should be set to read-only mode,
|
* `:read-only` -- whether the connection should be set to read-only mode,
|
||||||
* `:connection` -- a hash map of camelCase properties to set on the connection,
|
* `:connection` -- a hash map of camelCase properties to set on the connection,
|
||||||
via reflection, e.g., :autoCommit, :readOnly, :schema..."
|
via reflection, e.g., :autoCommit, :readOnly, :schema..."
|
||||||
^Connection
|
^Connection
|
||||||
[^DataSource datasource opts]
|
[^DataSource datasource opts]
|
||||||
(let [^Connection connection (.getConnection datasource)]
|
(let [^Connection connection (if (and (:user opts) (:password opts))
|
||||||
|
(.getConnection datasource
|
||||||
|
(:user opts)
|
||||||
|
(:password opts))
|
||||||
|
(.getConnection datasource))]
|
||||||
;; fast, specific option handling:
|
;; fast, specific option handling:
|
||||||
(when (contains? opts :auto-commit)
|
(when (contains? opts :auto-commit)
|
||||||
(.setAutoCommit connection (boolean (:auto-commit opts))))
|
(.setAutoCommit connection (boolean (:auto-commit opts))))
|
||||||
|
|
@ -354,7 +469,13 @@
|
||||||
(extend-protocol p/Sourceable
|
(extend-protocol p/Sourceable
|
||||||
clojure.lang.Associative
|
clojure.lang.Associative
|
||||||
(get-datasource [this]
|
(get-datasource [this]
|
||||||
(url+etc->datasource (spec->url+etc this)))
|
;; #207 c.j.j compatibility:
|
||||||
|
(if-let [datasource (:datasource this)]
|
||||||
|
datasource
|
||||||
|
(url+etc->datasource
|
||||||
|
(if-let [uri (:connection-uri this)]
|
||||||
|
(string->url+etc uri)
|
||||||
|
(spec->url+etc this)))))
|
||||||
javax.sql.DataSource
|
javax.sql.DataSource
|
||||||
(get-datasource [this] this)
|
(get-datasource [this] this)
|
||||||
String
|
String
|
||||||
|
|
|
||||||
167
src/next/jdbc/datafy.clj
Normal file
167
src/next/jdbc/datafy.clj
Normal file
|
|
@ -0,0 +1,167 @@
|
||||||
|
;; copyright (c) 2020-2024 Sean Corfield, all rights reserved
|
||||||
|
|
||||||
|
(ns next.jdbc.datafy
|
||||||
|
"This namespace provides datafication of several JDBC object types,
|
||||||
|
all within the `java.sql` package:
|
||||||
|
|
||||||
|
* `Connection` -- datafies as a bean.
|
||||||
|
* `DatabaseMetaData` -- datafies as a bean; six properties
|
||||||
|
are navigable to produce fully-realized datafiable result sets.
|
||||||
|
* `ParameterMetaData` -- datafies as a vector of parameter descriptions.
|
||||||
|
* `ResultSet` -- datafies as a bean; if the `ResultSet` has an associated
|
||||||
|
`Statement` and that in turn has an associated `Connection` then an
|
||||||
|
additional key of `:rows` is provided which is a datafied result set,
|
||||||
|
from `next.jdbc.result-set/datafiable-result-set` with default options.
|
||||||
|
This is provided as a convenience, purely for datafication of other
|
||||||
|
JDBC data types -- in normal `next.jdbc` usage, result sets are
|
||||||
|
datafied under full user control.
|
||||||
|
* `ResultSetMetaData` -- datafies as a vector of column descriptions.
|
||||||
|
* `Statement` -- datafies as a bean.
|
||||||
|
|
||||||
|
Because different database drivers may throw `SQLException` for various
|
||||||
|
unimplemented or unavailable properties on objects in various states,
|
||||||
|
the default behavior is to return those exceptions using the `:qualify`
|
||||||
|
option for `clojure.java.data/from-java-shallow`, so for a property
|
||||||
|
`:foo`, if its corresponding getter throws an exception, it would instead
|
||||||
|
be returned as `:foo/exception`. This behavior can be overridden by
|
||||||
|
`binding` `next.jdbc.datafy/*datafy-failure*` to any of the other options
|
||||||
|
supported: `:group`, `:omit`, or `:return`. See the `clojure.java.data`
|
||||||
|
documentation for more details."
|
||||||
|
(:require [clojure.core.protocols :as core-p]
|
||||||
|
[clojure.java.data :as j]
|
||||||
|
[next.jdbc.result-set :as rs])
|
||||||
|
(:import (java.sql Connection
|
||||||
|
DatabaseMetaData
|
||||||
|
ParameterMetaData
|
||||||
|
ResultSet ResultSetMetaData
|
||||||
|
Statement)))
|
||||||
|
|
||||||
|
(set! *warn-on-reflection* true)
|
||||||
|
|
||||||
|
(def ^:private column-meta
|
||||||
|
{:catalog (fn [^ResultSetMetaData o i] (.getCatalogName o i))
|
||||||
|
:class (fn [^ResultSetMetaData o i] (.getColumnClassName o i))
|
||||||
|
:display-size (fn [^ResultSetMetaData o i] (.getColumnDisplaySize o i))
|
||||||
|
:label (fn [^ResultSetMetaData o i] (.getColumnLabel o i))
|
||||||
|
:name (fn [^ResultSetMetaData o i] (.getColumnName o i))
|
||||||
|
:precision (fn [^ResultSetMetaData o i] (.getPrecision o i))
|
||||||
|
:scale (fn [^ResultSetMetaData o i] (.getScale o i))
|
||||||
|
:schema (fn [^ResultSetMetaData o i] (.getSchemaName o i))
|
||||||
|
:table (fn [^ResultSetMetaData o i] (.getTableName o i))
|
||||||
|
:type (fn [^ResultSetMetaData o i] (.getColumnTypeName o i))
|
||||||
|
;; the is* fields:
|
||||||
|
:nullability (fn [^ResultSetMetaData o i]
|
||||||
|
(condp = (.isNullable o i)
|
||||||
|
ResultSetMetaData/columnNoNulls :not-null
|
||||||
|
ResultSetMetaData/columnNullable :null
|
||||||
|
:unknown))
|
||||||
|
:auto-increment (fn [^ResultSetMetaData o i] (.isAutoIncrement o i))
|
||||||
|
:case-sensitive (fn [^ResultSetMetaData o i] (.isCaseSensitive o i))
|
||||||
|
:currency (fn [^ResultSetMetaData o i] (.isCurrency o i))
|
||||||
|
:definitely-writable (fn [^ResultSetMetaData o i] (.isDefinitelyWritable o i))
|
||||||
|
:read-only (fn [^ResultSetMetaData o i] (.isReadOnly o i))
|
||||||
|
:searchable (fn [^ResultSetMetaData o i] (.isSearchable o i))
|
||||||
|
:signed (fn [^ResultSetMetaData o i] (.isSigned o i))
|
||||||
|
:writable (fn [^ResultSetMetaData o i] (.isWritable o i))})
|
||||||
|
|
||||||
|
(def ^:private parameter-meta
|
||||||
|
{:class (fn [^ParameterMetaData o i] (.getParameterClassName o i))
|
||||||
|
:mode (fn [^ParameterMetaData o i]
|
||||||
|
(condp = (.getParameterMode o i)
|
||||||
|
ParameterMetaData/parameterModeIn :in
|
||||||
|
ParameterMetaData/parameterModeInOut :in-out
|
||||||
|
ParameterMetaData/parameterModeOut :out
|
||||||
|
:unknown))
|
||||||
|
:precision (fn [^ParameterMetaData o i] (.getPrecision o i))
|
||||||
|
:scale (fn [^ParameterMetaData o i] (.getScale o i))
|
||||||
|
:type (fn [^ParameterMetaData o i] (.getParameterTypeName o i))
|
||||||
|
;; the is* fields:
|
||||||
|
:nullability (fn [^ParameterMetaData o i]
|
||||||
|
(condp = (.isNullable o i)
|
||||||
|
ParameterMetaData/parameterNoNulls :not-null
|
||||||
|
ParameterMetaData/parameterNullable :null
|
||||||
|
:unknown))
|
||||||
|
:signed (fn [^ParameterMetaData o i] (.isSigned o i))})
|
||||||
|
|
||||||
|
(def ^:dynamic *datafy-failure*
|
||||||
|
"How datafication failures should be handled, based on `clojure.java.data`.
|
||||||
|
|
||||||
|
Defaults to `:qualify`, but can be `:group`, `:omit`, `:qualify`, or `:return`."
|
||||||
|
:qualify)
|
||||||
|
|
||||||
|
(defn- safe-bean [o opts]
|
||||||
|
(try
|
||||||
|
(j/from-java-shallow o (assoc opts :add-class true :exceptions *datafy-failure*))
|
||||||
|
(catch Throwable t
|
||||||
|
(let [dex (juxt type (comp str ex-message))
|
||||||
|
cause (ex-cause t)]
|
||||||
|
(with-meta (cond-> {:exception (dex t)}
|
||||||
|
cause (assoc :cause (dex cause)))
|
||||||
|
{:exception t})))))
|
||||||
|
|
||||||
|
(defn- datafy-result-set-meta-data
|
||||||
|
[^ResultSetMetaData this]
|
||||||
|
(mapv #(reduce-kv (fn [m k f] (assoc m k (f this %)))
|
||||||
|
{}
|
||||||
|
column-meta)
|
||||||
|
(range 1 (inc (.getColumnCount this)))))
|
||||||
|
|
||||||
|
(defn- datafy-parameter-meta-data
|
||||||
|
[^ParameterMetaData this]
|
||||||
|
(mapv #(reduce-kv (fn [m k f] (assoc m k (f this %)))
|
||||||
|
{}
|
||||||
|
parameter-meta)
|
||||||
|
(range 1 (inc (.getParameterCount this)))))
|
||||||
|
|
||||||
|
(extend-protocol core-p/Datafiable
|
||||||
|
Connection
|
||||||
|
(datafy [this] (safe-bean this {}))
|
||||||
|
DatabaseMetaData
|
||||||
|
(datafy [this]
|
||||||
|
(with-meta (let [data (safe-bean this {})]
|
||||||
|
(cond-> data
|
||||||
|
(not (:exception (meta data)))
|
||||||
|
;; add an opaque object that nav will "replace"
|
||||||
|
(assoc :all-tables (Object.))))
|
||||||
|
{`core-p/nav (fn [_ k v]
|
||||||
|
(condp = k
|
||||||
|
:all-tables
|
||||||
|
(rs/datafiable-result-set (.getTables this nil nil nil nil)
|
||||||
|
(.getConnection this)
|
||||||
|
{})
|
||||||
|
:catalogs
|
||||||
|
(rs/datafiable-result-set (.getCatalogs this)
|
||||||
|
(.getConnection this)
|
||||||
|
{})
|
||||||
|
:clientInfoProperties
|
||||||
|
(rs/datafiable-result-set (.getClientInfoProperties this)
|
||||||
|
(.getConnection this)
|
||||||
|
{})
|
||||||
|
:schemas
|
||||||
|
(rs/datafiable-result-set (.getSchemas this)
|
||||||
|
(.getConnection this)
|
||||||
|
{})
|
||||||
|
:tableTypes
|
||||||
|
(rs/datafiable-result-set (.getTableTypes this)
|
||||||
|
(.getConnection this)
|
||||||
|
{})
|
||||||
|
:typeInfo
|
||||||
|
(rs/datafiable-result-set (.getTypeInfo this)
|
||||||
|
(.getConnection this)
|
||||||
|
{})
|
||||||
|
v))}))
|
||||||
|
ParameterMetaData
|
||||||
|
(datafy [this] (datafy-parameter-meta-data this))
|
||||||
|
ResultSet
|
||||||
|
(datafy [this]
|
||||||
|
;; SQLite has a combination ResultSet/Metadata object...
|
||||||
|
(if (instance? ResultSetMetaData this)
|
||||||
|
(datafy-result-set-meta-data this)
|
||||||
|
(let [s (.getStatement this)
|
||||||
|
c (when s (.getConnection s))]
|
||||||
|
(cond-> (safe-bean this {})
|
||||||
|
c (assoc :rows (rs/datafiable-result-set this c {}))))))
|
||||||
|
ResultSetMetaData
|
||||||
|
(datafy [this] (datafy-result-set-meta-data this))
|
||||||
|
Statement
|
||||||
|
(datafy [this] (safe-bean this {:omit #{:moreResults}})))
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
;; copyright (c) 2019-2020 Sean Corfield, all rights reserved
|
;; copyright (c) 2019-2024 Sean Corfield, all rights reserved
|
||||||
|
|
||||||
(ns next.jdbc.date-time
|
(ns next.jdbc.date-time
|
||||||
"Optional namespace that extends `next.jdbc.prepare/SettableParameter`
|
"Optional namespace that extends `next.jdbc.prepare/SettableParameter`
|
||||||
|
|
@ -66,7 +66,7 @@
|
||||||
will be extended to (`java.sql.Date` and) `java.sql.Timestamp` so that any
|
will be extended to (`java.sql.Date` and) `java.sql.Timestamp` so that any
|
||||||
timestamp columns will automatically be read as `java.time.Instant`.
|
timestamp columns will automatically be read as `java.time.Instant`.
|
||||||
|
|
||||||
Note that `java.sql.Date` columns will still be returns as-is because they
|
Note that `java.sql.Date` columns will still be returned as-is because they
|
||||||
cannot be converted to an instant (they lack a time component)."
|
cannot be converted to an instant (they lack a time component)."
|
||||||
[]
|
[]
|
||||||
(extend-protocol rs/ReadableColumn
|
(extend-protocol rs/ReadableColumn
|
||||||
|
|
|
||||||
48
src/next/jdbc/default_options.clj
Normal file
48
src/next/jdbc/default_options.clj
Normal file
|
|
@ -0,0 +1,48 @@
|
||||||
|
;; copyright (c) 2020-2025 Sean Corfield, all rights reserved
|
||||||
|
|
||||||
|
(ns ^:no-doc next.jdbc.default-options
|
||||||
|
"Implementation of default options logic."
|
||||||
|
(:require [next.jdbc.protocols :as p]))
|
||||||
|
|
||||||
|
(set! *warn-on-reflection* true)
|
||||||
|
|
||||||
|
(defrecord DefaultOptions [connectable options])
|
||||||
|
|
||||||
|
(extend-protocol p/Wrapped
|
||||||
|
DefaultOptions
|
||||||
|
(unwrap [this] (p/unwrap (:connectable this))))
|
||||||
|
|
||||||
|
(extend-protocol p/Sourceable
|
||||||
|
DefaultOptions
|
||||||
|
(get-datasource [this]
|
||||||
|
(p/get-datasource (:connectable this))))
|
||||||
|
|
||||||
|
(extend-protocol p/Connectable
|
||||||
|
DefaultOptions
|
||||||
|
(get-connection [this opts]
|
||||||
|
(p/get-connection (:connectable this)
|
||||||
|
(merge (:options this) opts))))
|
||||||
|
|
||||||
|
(extend-protocol p/Executable
|
||||||
|
DefaultOptions
|
||||||
|
(-execute [this sql-params opts]
|
||||||
|
(p/-execute (:connectable this) sql-params
|
||||||
|
(merge (:options this) opts)))
|
||||||
|
(-execute-one [this sql-params opts]
|
||||||
|
(p/-execute-one (:connectable this) sql-params
|
||||||
|
(merge (:options this) opts)))
|
||||||
|
(-execute-all [this sql-params opts]
|
||||||
|
(p/-execute-all (:connectable this) sql-params
|
||||||
|
(merge (:options this) opts))))
|
||||||
|
|
||||||
|
(extend-protocol p/Preparable
|
||||||
|
DefaultOptions
|
||||||
|
(prepare [this sql-params opts]
|
||||||
|
(p/prepare (:connectable this) sql-params
|
||||||
|
(merge (:options this) opts))))
|
||||||
|
|
||||||
|
(extend-protocol p/Transactable
|
||||||
|
DefaultOptions
|
||||||
|
(-transact [this body-fn opts]
|
||||||
|
(p/-transact (:connectable this) body-fn
|
||||||
|
(merge (:options this) opts))))
|
||||||
94
src/next/jdbc/defer.clj
Normal file
94
src/next/jdbc/defer.clj
Normal file
|
|
@ -0,0 +1,94 @@
|
||||||
|
;; copyright (c) 2024 Sean Corfield, all rights reserved
|
||||||
|
|
||||||
|
(ns next.jdbc.defer
|
||||||
|
"The idea behind the next.jdbc.defer namespace is to provide a
|
||||||
|
way to defer the execution of a series of SQL statements until
|
||||||
|
a later time, but still provide a way for inserted keys to be
|
||||||
|
used in later SQL statements.
|
||||||
|
|
||||||
|
The principle is to provide a core subset of the next.jdbc
|
||||||
|
and next.jdbc.sql API that produces a data structure that
|
||||||
|
describes a series of SQL operations to be performed, that
|
||||||
|
are held in a dynamic var, and that can be executed at a
|
||||||
|
later time, in a transaction."
|
||||||
|
(:require [next.jdbc :as jdbc]
|
||||||
|
[next.jdbc.sql.builder :refer [for-delete for-insert for-update]]))
|
||||||
|
|
||||||
|
(set! *warn-on-reflection* true)
|
||||||
|
|
||||||
|
(def ^:private ^:dynamic *deferred* nil)
|
||||||
|
|
||||||
|
(defn execute-one!
|
||||||
|
"Given a vector containing a SQL statement and parameters, defer
|
||||||
|
execution of that statement."
|
||||||
|
([sql-p]
|
||||||
|
(execute-one! sql-p {}))
|
||||||
|
([sql-p opts]
|
||||||
|
(swap! *deferred* conj
|
||||||
|
{:sql-p sql-p
|
||||||
|
:key-fn (or (:key-fn opts) (comp first vals))
|
||||||
|
:key (:key opts)
|
||||||
|
:opts opts})))
|
||||||
|
|
||||||
|
(defn insert!
|
||||||
|
"Given a table name, and a data hash map, defer an insertion of the
|
||||||
|
data as a single row in the database."
|
||||||
|
([table key-map]
|
||||||
|
(insert! table key-map {}))
|
||||||
|
([table key-map opts]
|
||||||
|
(swap! *deferred* conj
|
||||||
|
{:sql-p (for-insert table key-map opts)
|
||||||
|
:key-fn (or (:key-fn opts) (comp first vals))
|
||||||
|
:key (:key opts)
|
||||||
|
:opts opts})))
|
||||||
|
|
||||||
|
(defn update!
|
||||||
|
"Given a table name, a hash map of columns and values to set, and
|
||||||
|
either a hash map of columns and values to search on or a vector
|
||||||
|
of a SQL where clause and parameters, defer an update on the table."
|
||||||
|
([table key-map where-params]
|
||||||
|
(update! table key-map where-params {}))
|
||||||
|
([table key-map where-params opts]
|
||||||
|
(swap! *deferred* conj
|
||||||
|
{:sql-p (for-update table key-map where-params opts)
|
||||||
|
:opts opts})))
|
||||||
|
|
||||||
|
(defn delete!
|
||||||
|
"Given a table name, and either a hash map of columns and values
|
||||||
|
to search on or a vector of a SQL where clause and parameters,
|
||||||
|
defer a delete on the table."
|
||||||
|
([table where-params]
|
||||||
|
(delete! table where-params {}))
|
||||||
|
([table where-params opts]
|
||||||
|
(swap! *deferred* conj
|
||||||
|
{:sql-p (for-delete table where-params opts)
|
||||||
|
:opts opts})))
|
||||||
|
|
||||||
|
(defn deferrable [transactable stmts]
|
||||||
|
(reify clojure.lang.IDeref
|
||||||
|
(deref [_]
|
||||||
|
(let [keys (atom {})]
|
||||||
|
(jdbc/with-transaction [conn transactable]
|
||||||
|
(doseq [{:keys [sql-p key-fn key opts]} @stmts]
|
||||||
|
(let [sql-p
|
||||||
|
(mapv (fn [v]
|
||||||
|
(if (keyword? v)
|
||||||
|
(if (contains? @keys v)
|
||||||
|
(get @keys v)
|
||||||
|
(throw (ex-info (str "Deferred key not found " v)
|
||||||
|
{:key v})))
|
||||||
|
v))
|
||||||
|
sql-p)
|
||||||
|
result (jdbc/execute-one! conn sql-p opts)]
|
||||||
|
(when key
|
||||||
|
(swap! keys assoc key (key-fn result))))))
|
||||||
|
@keys))))
|
||||||
|
|
||||||
|
(defn defer-ops [f]
|
||||||
|
(binding [*deferred* (atom [])]
|
||||||
|
(f)
|
||||||
|
*deferred*))
|
||||||
|
|
||||||
|
(defmacro with-deferred [connectable & body]
|
||||||
|
`(let [conn# ~connectable]
|
||||||
|
(deferrable conn# (defer-ops (^{:once true} fn* [] ~@body)))))
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
;; copyright (c) 2019-2020 Sean Corfield, all rights reserved
|
;; copyright (c) 2019-2024 Sean Corfield, all rights reserved
|
||||||
|
|
||||||
(ns next.jdbc.optional
|
(ns next.jdbc.optional
|
||||||
"Builders that treat NULL SQL values as 'optional' and omit the
|
"Builders that treat NULL SQL values as 'optional' and omit the
|
||||||
|
|
@ -11,21 +11,26 @@
|
||||||
|
|
||||||
(defrecord MapResultSetOptionalBuilder [^ResultSet rs rsmeta cols]
|
(defrecord MapResultSetOptionalBuilder [^ResultSet rs rsmeta cols]
|
||||||
rs/RowBuilder
|
rs/RowBuilder
|
||||||
(->row [this] (transient {}))
|
(->row [_this] (transient {}))
|
||||||
(column-count [this] (count cols))
|
(column-count [_this] (count cols))
|
||||||
(with-column [this row i]
|
(with-column [this row i]
|
||||||
|
;; short-circuit on null to avoid column reading logic
|
||||||
(let [v (.getObject rs ^Integer i)]
|
(let [v (.getObject rs ^Integer i)]
|
||||||
(if (nil? v)
|
(if (nil? v)
|
||||||
row
|
row
|
||||||
(assoc! row
|
(rs/with-column-value this row (nth cols (dec i))
|
||||||
(nth cols (dec i))
|
(rs/read-column-by-index v rsmeta i)))))
|
||||||
(rs/read-column-by-index v rsmeta i)))))
|
(with-column-value [_this row col v]
|
||||||
(row! [this row] (persistent! row))
|
;; ensure that even if this is adapted, we omit null columns
|
||||||
|
(if (nil? v)
|
||||||
|
row
|
||||||
|
(assoc! row col v)))
|
||||||
|
(row! [_this row] (persistent! row))
|
||||||
rs/ResultSetBuilder
|
rs/ResultSetBuilder
|
||||||
(->rs [this] (transient []))
|
(->rs [_this] (transient []))
|
||||||
(with-row [this mrs row]
|
(with-row [_this mrs row]
|
||||||
(conj! mrs row))
|
(conj! mrs row))
|
||||||
(rs! [this mrs] (persistent! mrs)))
|
(rs! [_this mrs] (persistent! mrs)))
|
||||||
|
|
||||||
(defn as-maps
|
(defn as-maps
|
||||||
"Given a `ResultSet` and options, return a `RowBuilder` / `ResultSetBuilder`
|
"Given a `ResultSet` and options, return a `RowBuilder` / `ResultSetBuilder`
|
||||||
|
|
@ -71,7 +76,7 @@
|
||||||
locales where the lower case version of a character is not a valid SQL
|
locales where the lower case version of a character is not a valid SQL
|
||||||
entity name (e.g., Turkish)."
|
entity name (e.g., Turkish)."
|
||||||
[^String s]
|
[^String s]
|
||||||
(.toLowerCase s (Locale/US)))
|
(.toLowerCase s Locale/US))
|
||||||
|
|
||||||
(defn as-lower-maps
|
(defn as-lower-maps
|
||||||
"Given a `ResultSet` and options, return a `RowBuilder` / `ResultSetBuilder`
|
"Given a `ResultSet` and options, return a `RowBuilder` / `ResultSetBuilder`
|
||||||
|
|
@ -94,7 +99,7 @@
|
||||||
function, return a new builder function that uses that column reading
|
function, return a new builder function that uses that column reading
|
||||||
function instead of `.getObject` so you can override the default behavior.
|
function instead of `.getObject` so you can override the default behavior.
|
||||||
|
|
||||||
This adapter omits SQL NULL values.
|
This adapter omits SQL NULL values, even if the underlying builder does not.
|
||||||
|
|
||||||
The default column-reader behavior would be equivalent to:
|
The default column-reader behavior would be equivalent to:
|
||||||
|
|
||||||
|
|
@ -112,17 +117,25 @@
|
||||||
(let [mrsb (builder-fn rs opts)]
|
(let [mrsb (builder-fn rs opts)]
|
||||||
(reify
|
(reify
|
||||||
rs/RowBuilder
|
rs/RowBuilder
|
||||||
(->row [this] (rs/->row mrsb))
|
(->row [_this] (rs/->row mrsb))
|
||||||
(column-count [this] (rs/column-count mrsb))
|
(column-count [_this] (rs/column-count mrsb))
|
||||||
(with-column [this row i]
|
(with-column [_this row i]
|
||||||
|
;; short-circuit on null to avoid column reading logic
|
||||||
(let [v (column-reader rs (:rsmeta mrsb) i)]
|
(let [v (column-reader rs (:rsmeta mrsb) i)]
|
||||||
(if (nil? v)
|
(if (nil? v)
|
||||||
row
|
row
|
||||||
(assoc! row
|
(rs/with-column-value mrsb row (nth (:cols mrsb) (dec i))
|
||||||
(nth (:cols mrsb) (dec i))
|
(rs/read-column-by-index v (:rsmeta mrsb) i)))))
|
||||||
(rs/read-column-by-index v (:rsmeta mrsb) i)))))
|
(with-column-value [_this row col v]
|
||||||
(row! [this row] (rs/row! mrsb row))
|
;; ensure that even if this is adapted, we omit null columns
|
||||||
|
(if (nil? v)
|
||||||
|
row
|
||||||
|
(rs/with-column-value mrsb row col v)))
|
||||||
|
(row! [_this row] (rs/row! mrsb row))
|
||||||
rs/ResultSetBuilder
|
rs/ResultSetBuilder
|
||||||
(->rs [this] (rs/->rs mrsb))
|
(->rs [_this] (rs/->rs mrsb))
|
||||||
(with-row [this mrs row] (rs/with-row mrsb mrs row))
|
(with-row [_this mrs row] (rs/with-row mrsb mrs row))
|
||||||
(rs! [this mrs] (rs/rs! mrsb mrs))))))
|
(rs! [_this mrs] (rs/rs! mrsb mrs))
|
||||||
|
clojure.lang.ILookup
|
||||||
|
(valAt [_this k] (get mrsb k))
|
||||||
|
(valAt [_this k not-found] (get mrsb k not-found))))))
|
||||||
|
|
|
||||||
70
src/next/jdbc/plan.clj
Normal file
70
src/next/jdbc/plan.clj
Normal file
|
|
@ -0,0 +1,70 @@
|
||||||
|
;; copyright (c) 2020-2024 Sean Corfield, all rights reserved
|
||||||
|
|
||||||
|
(ns next.jdbc.plan
|
||||||
|
"Some helper functions that make common operations with `next.jdbc/plan`
|
||||||
|
much easier."
|
||||||
|
(:require [next.jdbc :as jdbc]))
|
||||||
|
|
||||||
|
(set! *warn-on-reflection* true)
|
||||||
|
|
||||||
|
(defn select-one!
|
||||||
|
"Execute the SQL and params using `next.jdbc/plan` and return just the
|
||||||
|
selected columns from just the first row.
|
||||||
|
|
||||||
|
`(plan/select-one! ds [:total] [\"select count(*) as total from table\"])`
|
||||||
|
;;=> {:total 42}
|
||||||
|
|
||||||
|
If the `cols` argument is a vector of columns to select, then it is
|
||||||
|
applied using `select-keys`, otherwise, the `cols` argument is used as
|
||||||
|
a function directly. That means it can be a simple keyword to return
|
||||||
|
just that column -- which is the most common expected usage:
|
||||||
|
|
||||||
|
`(plan/select-one! ds :total [\"select count(*) as total from table\"])`
|
||||||
|
;;=> 42
|
||||||
|
|
||||||
|
The usual caveats apply about operations on a raw result set that
|
||||||
|
can be done without realizing the whole row."
|
||||||
|
([connectable cols sql-params]
|
||||||
|
(select-one! connectable cols sql-params {}))
|
||||||
|
([connectable cols sql-params opts]
|
||||||
|
(reduce (fn [_ row] (reduced (if (vector? cols)
|
||||||
|
(select-keys row cols)
|
||||||
|
(cols row))))
|
||||||
|
nil
|
||||||
|
(jdbc/plan connectable sql-params opts))))
|
||||||
|
|
||||||
|
(defn select!
|
||||||
|
"Execute the SQL and params using `next.jdbc/plan` and (by default)
|
||||||
|
return a vector of rows with just the selected columns.
|
||||||
|
|
||||||
|
`(plan/select! ds [:id :name] [\"select * from table\"])`
|
||||||
|
|
||||||
|
If the `cols` argument is a vector of columns to select, then it is
|
||||||
|
applied as:
|
||||||
|
|
||||||
|
`(into [] (map #(select-keys % cols)) (jdbc/plan ...))`
|
||||||
|
|
||||||
|
Otherwise, the `cols` argument is used as a function and mapped over
|
||||||
|
the raw result set as:
|
||||||
|
|
||||||
|
`(into [] (map cols) (jdbc/plan ...))`
|
||||||
|
|
||||||
|
The usual caveats apply about operations on a raw result set that
|
||||||
|
can be done without realizing the whole row.
|
||||||
|
|
||||||
|
Note: this allows for the following usage, which returns a vector
|
||||||
|
of all the values for a single column:
|
||||||
|
|
||||||
|
`(plan/select! ds :id [\"select * from table\"])`
|
||||||
|
|
||||||
|
The result is a vector by default, but can be changed using the
|
||||||
|
`:into` option to provide the initial data structure into which
|
||||||
|
the selected columns are poured, e.g., `:into #{}`"
|
||||||
|
([connectable cols sql-params]
|
||||||
|
(select! connectable cols sql-params {}))
|
||||||
|
([connectable cols sql-params opts]
|
||||||
|
(into (or (:into opts) [])
|
||||||
|
(map (if (vector? cols)
|
||||||
|
#(select-keys % cols)
|
||||||
|
cols))
|
||||||
|
(jdbc/plan connectable sql-params opts))))
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
;; copyright (c) 2018-2020 Sean Corfield, all rights reserved
|
;; copyright (c) 2018-2024 Sean Corfield, all rights reserved
|
||||||
|
|
||||||
(ns next.jdbc.prepare
|
(ns next.jdbc.prepare
|
||||||
"Mostly an implementation namespace for how `PreparedStatement` objects are
|
"Mostly an implementation namespace for how `PreparedStatement` objects are
|
||||||
|
|
@ -7,15 +7,16 @@
|
||||||
`set-parameters` is public and may be useful if you have a `PreparedStatement`
|
`set-parameters` is public and may be useful if you have a `PreparedStatement`
|
||||||
that you wish to reuse and (re)set the parameters on it.
|
that you wish to reuse and (re)set the parameters on it.
|
||||||
|
|
||||||
`execute-batch!` provides a way to add batches of parameters to a
|
|
||||||
`PreparedStatement` and then execute it in batch mode (via `.executeBatch`).
|
|
||||||
|
|
||||||
Defines the `SettableParameter` protocol for converting Clojure values
|
Defines the `SettableParameter` protocol for converting Clojure values
|
||||||
to database-specific values.
|
to database-specific values.
|
||||||
|
|
||||||
See also https://cljdoc.org/d/seancorfield/next.jdbc/CURRENT/api/next.jdbc.date-time
|
See also https://cljdoc.org/d/com.github.seancorfield/next.jdbc/CURRENT/api/next.jdbc.date-time
|
||||||
for implementations of `SettableParameter` that provide automatic
|
for implementations of `SettableParameter` that provide automatic
|
||||||
conversion of Java Time objects to SQL data types."
|
conversion of Java Time objects to SQL data types.
|
||||||
|
|
||||||
|
See also https://cljdoc.org/d/com.github.seancorfield/next.jdbc/CURRENT/api/next.jdbc.types
|
||||||
|
for `as-xxx` functions that provide per-instance implementations of
|
||||||
|
`SettableParameter` for each of the standard `java.sql.Types` values."
|
||||||
(:require [clojure.java.data :as j]
|
(:require [clojure.java.data :as j]
|
||||||
[next.jdbc.protocols :as p])
|
[next.jdbc.protocols :as p])
|
||||||
(:import (java.sql Connection
|
(:import (java.sql Connection
|
||||||
|
|
@ -25,12 +26,13 @@
|
||||||
|
|
||||||
(set! *warn-on-reflection* true)
|
(set! *warn-on-reflection* true)
|
||||||
|
|
||||||
(defprotocol SettableParameter :extend-via-metadata true
|
(defprotocol SettableParameter
|
||||||
"Protocol for setting SQL parameters in statement objects, which
|
"Protocol for setting SQL parameters in statement objects, which
|
||||||
can convert from Clojure values. The default implementation just
|
can convert from Clojure values. The default implementation just
|
||||||
calls `.setObject` on the parameter value. It can be extended to
|
calls `.setObject` on the parameter value. It can be extended to
|
||||||
use other methods of `PreparedStatement` to convert and set parameter
|
use other methods of `PreparedStatement` to convert and set parameter
|
||||||
values. Extension via metadata is supported."
|
values. Extension via metadata is supported."
|
||||||
|
:extend-via-metadata true
|
||||||
(set-parameter [val stmt ix]
|
(set-parameter [val stmt ix]
|
||||||
"Convert a Clojure value into a SQL value and store it as the ix'th
|
"Convert a Clojure value into a SQL value and store it as the ix'th
|
||||||
parameter in the given SQL statement object."))
|
parameter in the given SQL statement object."))
|
||||||
|
|
@ -149,12 +151,13 @@
|
||||||
|
|
||||||
(defn statement
|
(defn statement
|
||||||
"Given a `Connection` and some options, return a `Statement`."
|
"Given a `Connection` and some options, return a `Statement`."
|
||||||
^java.sql.Statement
|
(^java.sql.Statement
|
||||||
([con] (statement con {}))
|
[con] (statement con {}))
|
||||||
([^Connection con
|
(^java.sql.Statement
|
||||||
{:keys [result-type concurrency cursors
|
[^Connection con
|
||||||
fetch-size max-rows timeout]
|
{:keys [result-type concurrency cursors
|
||||||
:as opts}]
|
fetch-size max-rows timeout]
|
||||||
|
:as opts}]
|
||||||
(let [^Statement stmt
|
(let [^Statement stmt
|
||||||
(cond
|
(cond
|
||||||
(and result-type concurrency)
|
(and result-type concurrency)
|
||||||
|
|
@ -185,33 +188,22 @@
|
||||||
(j/set-properties stmt props))
|
(j/set-properties stmt props))
|
||||||
stmt)))
|
stmt)))
|
||||||
|
|
||||||
(defn execute-batch!
|
(def ^:private d-r-s (volatile! nil))
|
||||||
"Given a `PreparedStatement` and a vector containing parameter groups,
|
|
||||||
i.e., a vector of vector of parameters, use `.addBatch` to add each group
|
|
||||||
of parameters to the prepared statement (via `set-parameters`) and then
|
|
||||||
call `.executeBatch`. A vector of update counts is returned.
|
|
||||||
|
|
||||||
An options hash map may also be provided, containing `:batch-size` which
|
(defn ^:no-doc execute-batch!
|
||||||
determines how to partition the parameter groups for submission to the
|
"As of 1.1.643, `next.jdbc.prepare/execute-batch!`
|
||||||
database. If omitted, all groups will be submitted as a single command.
|
(this function) is deprecated.
|
||||||
If you expect the update counts to be larger than `Integer/MAX_VALUE`,
|
|
||||||
you can specify `:large true` and `.executeLargeBatch` will be called
|
|
||||||
instead.
|
|
||||||
|
|
||||||
Returns a Clojure vector of update counts.
|
Use `next.jdbc/execute-batch!` instead."
|
||||||
|
|
||||||
May throw `java.sql.BatchUpdateException` if any part of the batch fails.
|
|
||||||
You may be able to call `.getUpdateCounts` on that exception object to
|
|
||||||
get more information about which parts succeeded and which failed.
|
|
||||||
|
|
||||||
For additional caveats and database-specific options you may need, see:
|
|
||||||
https://cljdoc.org/d/seancorfield/next.jdbc/CURRENT/doc/getting-started/prepared-statements#caveats
|
|
||||||
|
|
||||||
Not all databases support batch execution."
|
|
||||||
([ps param-groups]
|
([ps param-groups]
|
||||||
(execute-batch! ps param-groups {}))
|
(execute-batch! ps param-groups {}))
|
||||||
([^PreparedStatement ps param-groups opts]
|
([^PreparedStatement ps param-groups opts]
|
||||||
(let [params (if-let [n (:batch-size opts)]
|
(let [gen-ks (when (:return-generated-keys opts)
|
||||||
|
(when-let [drs @d-r-s]
|
||||||
|
#(drs (.getGeneratedKeys ^PreparedStatement %)
|
||||||
|
(p/get-connection ps {})
|
||||||
|
opts)))
|
||||||
|
params (if-let [n (:batch-size opts)]
|
||||||
(if (and (number? n) (pos? n))
|
(if (and (number? n) (pos? n))
|
||||||
(partition-all n param-groups)
|
(partition-all n param-groups)
|
||||||
(throw (IllegalArgumentException.
|
(throw (IllegalArgumentException.
|
||||||
|
|
@ -220,7 +212,8 @@
|
||||||
(into []
|
(into []
|
||||||
(mapcat (fn [group]
|
(mapcat (fn [group]
|
||||||
(run! #(.addBatch (set-parameters ps %)) group)
|
(run! #(.addBatch (set-parameters ps %)) group)
|
||||||
(if (:large opts)
|
(let [result (if (:large opts)
|
||||||
(.executeLargeBatch ps)
|
(.executeLargeBatch ps)
|
||||||
(.executeBatch ps))))
|
(.executeBatch ps))]
|
||||||
|
(if gen-ks (gen-ks ps) result))))
|
||||||
params))))
|
params))))
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
;; copyright (c) 2018-2020 Sean Corfield, all rights reserved
|
;; copyright (c) 2018-2025 Sean Corfield, all rights reserved
|
||||||
|
|
||||||
(ns next.jdbc.protocols
|
(ns next.jdbc.protocols
|
||||||
"This is the extensible core of the next generation java.jdbc library.
|
"This is the extensible core of the next generation java.jdbc library.
|
||||||
|
|
@ -11,15 +11,16 @@
|
||||||
|
|
||||||
(set! *warn-on-reflection* true)
|
(set! *warn-on-reflection* true)
|
||||||
|
|
||||||
(defprotocol Sourceable :extend-via-metadata true
|
(defprotocol Sourceable
|
||||||
"Protocol for producing a `javax.sql.DataSource`.
|
"Protocol for producing a `javax.sql.DataSource`.
|
||||||
|
|
||||||
Implementations are provided for strings, hash maps (`db-spec` structures),
|
Implementations are provided for strings, hash maps (`db-spec` structures),
|
||||||
and also a `DataSource` (which just returns itself).
|
and also a `DataSource` (which just returns itself).
|
||||||
|
|
||||||
Extension via metadata is supported."
|
Extension via metadata is supported."
|
||||||
|
:extend-via-metadata true
|
||||||
(get-datasource ^javax.sql.DataSource [this]
|
(get-datasource ^javax.sql.DataSource [this]
|
||||||
"Produce a `javax.sql.DataSource`."))
|
"Produce a `javax.sql.DataSource`."))
|
||||||
|
|
||||||
(defprotocol Connectable
|
(defprotocol Connectable
|
||||||
"Protocol for producing a new JDBC connection that should be closed when you
|
"Protocol for producing a new JDBC connection that should be closed when you
|
||||||
|
|
@ -37,8 +38,8 @@
|
||||||
`PreparedStatement`, and `Object`, on the assumption that an `Object` can be
|
`PreparedStatement`, and `Object`, on the assumption that an `Object` can be
|
||||||
turned into a `DataSource` and therefore used to get a `Connection`."
|
turned into a `DataSource` and therefore used to get a `Connection`."
|
||||||
(-execute ^clojure.lang.IReduceInit [this sql-params opts]
|
(-execute ^clojure.lang.IReduceInit [this sql-params opts]
|
||||||
"Produce a 'reducible' that, when reduced, executes the SQL and
|
"Produce a 'reducible' that, when reduced (with an initial value), executes
|
||||||
processes the rows of the `ResultSet` directly.")
|
the SQL and processes the rows of the `ResultSet` directly.")
|
||||||
(-execute-one [this sql-params opts]
|
(-execute-one [this sql-params opts]
|
||||||
"Executes the SQL or DDL and produces the first row of the `ResultSet`
|
"Executes the SQL or DDL and produces the first row of the `ResultSet`
|
||||||
as a fully-realized, datafiable hash map (by default).")
|
as a fully-realized, datafiable hash map (by default).")
|
||||||
|
|
@ -59,5 +60,18 @@
|
||||||
|
|
||||||
Implementations are provided for `Connection`, `DataSource`, and `Object`
|
Implementations are provided for `Connection`, `DataSource`, and `Object`
|
||||||
(on the assumption that an `Object` can be turned into a `DataSource`)."
|
(on the assumption that an `Object` can be turned into a `DataSource`)."
|
||||||
|
:extend-via-metadata true
|
||||||
(-transact [this body-fn opts]
|
(-transact [this body-fn opts]
|
||||||
"Run the `body-fn` inside a transaction."))
|
"Run the `body-fn` inside a transaction."))
|
||||||
|
|
||||||
|
(defprotocol Wrapped
|
||||||
|
"Protocol for (un)wrapping a `next.jdbc` connectable.
|
||||||
|
|
||||||
|
Implementations are provided for `Object` (identity) and `DefaultOptions`
|
||||||
|
and SQLLogging."
|
||||||
|
(unwrap [this]
|
||||||
|
"Unwrap the connectable to get the underlying connectable."))
|
||||||
|
|
||||||
|
(extend-protocol Wrapped
|
||||||
|
Object
|
||||||
|
(unwrap [this] this))
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
;; copyright (c) 2019-2020 Sean Corfield, all rights reserved
|
;; copyright (c) 2019-2024 Sean Corfield, all rights reserved
|
||||||
|
|
||||||
(ns next.jdbc.quoted
|
(ns next.jdbc.quoted
|
||||||
"Provides functions for use with the `:table-fn` and `:column-fn` options
|
"Provides functions for use with the `:table-fn` and `:column-fn` options
|
||||||
|
|
@ -8,11 +8,16 @@
|
||||||
|
|
||||||
(set! *warn-on-reflection* true)
|
(set! *warn-on-reflection* true)
|
||||||
|
|
||||||
(defn ansi "ANSI \"quoting\"" [s] (str \" s \"))
|
(defn strop
|
||||||
|
"Escape any embedded closing strop characters."
|
||||||
|
[s x e]
|
||||||
|
(str s (str/replace x (str e) (str e e)) e))
|
||||||
|
|
||||||
(defn mysql "MySQL `quoting`" [s] (str \` s \`))
|
(defn ansi "ANSI \"quoting\"" [s] (strop \" s \"))
|
||||||
|
|
||||||
(defn sql-server "SQL Server [quoting]" [s] (str \[ s \]))
|
(defn mysql "MySQL `quoting`" [s] (strop \` s \`))
|
||||||
|
|
||||||
|
(defn sql-server "SQL Server [quoting]" [s] (strop \[ s \]))
|
||||||
|
|
||||||
(def oracle "Oracle \"quoting\" (ANSI)" ansi)
|
(def oracle "Oracle \"quoting\" (ANSI)" ansi)
|
||||||
|
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,4 +1,4 @@
|
||||||
;; copyright (c) 2019-2020 Sean Corfield, all rights reserved
|
;; copyright (c) 2019-2024 Sean Corfield, all rights reserved
|
||||||
|
|
||||||
(ns next.jdbc.specs
|
(ns next.jdbc.specs
|
||||||
"Specs for the core API of next.jdbc.
|
"Specs for the core API of next.jdbc.
|
||||||
|
|
@ -9,8 +9,7 @@
|
||||||
Just `:args` are spec'd. These specs are intended to aid development
|
Just `:args` are spec'd. These specs are intended to aid development
|
||||||
with `next.jdbc` by catching simple errors in calling the library.
|
with `next.jdbc` by catching simple errors in calling the library.
|
||||||
The `connectable` argument is currently just `any?` but both
|
The `connectable` argument is currently just `any?` but both
|
||||||
`get-datasource` and `get-connection` have stricter specs. If you
|
`get-datasource` and `get-connection` have stricter specs.
|
||||||
extend `Sourceable` or `Connectable`, those specs will likely be too strict.
|
|
||||||
|
|
||||||
In addition, there is an `instrument` function that provides a simple
|
In addition, there is an `instrument` function that provides a simple
|
||||||
way to instrument all of the `next.jdbc` functions, and `unstrument`
|
way to instrument all of the `next.jdbc` functions, and `unstrument`
|
||||||
|
|
@ -20,6 +19,7 @@
|
||||||
[next.jdbc :as jdbc]
|
[next.jdbc :as jdbc]
|
||||||
[next.jdbc.connection :as connection]
|
[next.jdbc.connection :as connection]
|
||||||
[next.jdbc.prepare :as prepare]
|
[next.jdbc.prepare :as prepare]
|
||||||
|
[next.jdbc.protocols :as p]
|
||||||
[next.jdbc.sql :as sql])
|
[next.jdbc.sql :as sql])
|
||||||
(:import (java.sql Connection PreparedStatement Statement)
|
(:import (java.sql Connection PreparedStatement Statement)
|
||||||
(javax.sql DataSource)))
|
(javax.sql DataSource)))
|
||||||
|
|
@ -35,7 +35,8 @@
|
||||||
(s/def ::host (s/or :name string?
|
(s/def ::host (s/or :name string?
|
||||||
:none #{:none}))
|
:none #{:none}))
|
||||||
(s/def ::host-prefix string?)
|
(s/def ::host-prefix string?)
|
||||||
(s/def ::port pos-int?)
|
(s/def ::port (s/or :port pos-int?
|
||||||
|
:none #{:none}))
|
||||||
(s/def ::db-spec-map (s/keys :req-un [::dbtype ::dbname]
|
(s/def ::db-spec-map (s/keys :req-un [::dbtype ::dbname]
|
||||||
:opt-un [::classname
|
:opt-un [::classname
|
||||||
::user ::password
|
::user ::password
|
||||||
|
|
@ -65,17 +66,30 @@
|
||||||
:ds ::datasource))
|
:ds ::datasource))
|
||||||
(s/def ::db-spec-or-jdbc (s/or :db-spec ::db-spec-map
|
(s/def ::db-spec-or-jdbc (s/or :db-spec ::db-spec-map
|
||||||
:jdbc-url ::jdbc-url-map))
|
:jdbc-url ::jdbc-url-map))
|
||||||
|
(s/def ::proto-connectable (s/or :db-spec ::db-spec
|
||||||
|
:connectable #(satisfies? p/Connectable %)
|
||||||
|
:sourceable #(satisfies? p/Sourceable %)))
|
||||||
|
|
||||||
(s/def ::connectable any?)
|
(s/def ::connectable any?)
|
||||||
(s/def ::key-map (s/map-of keyword? any?))
|
(s/def ::key-map (s/map-of keyword? any?))
|
||||||
(s/def ::example-map (s/map-of keyword? any? :min-count 1))
|
(s/def ::example-map (s/map-of keyword? any? :min-count 1))
|
||||||
|
|
||||||
|
;; can be a simple column name (keyword) or a pair of something and as alias
|
||||||
|
;; and that something can be a simple column name (keyword) or an arbitrary
|
||||||
|
;; expression (string) where we assume you know what you're doing
|
||||||
|
(s/def ::column-spec (s/or :column keyword?
|
||||||
|
:alias (s/and vector?
|
||||||
|
(s/cat :expr (s/or :col keyword?
|
||||||
|
:str string?)
|
||||||
|
:column keyword?))))
|
||||||
|
(s/def ::columns (s/coll-of ::column-spec :kind vector?))
|
||||||
|
|
||||||
(s/def ::order-by-col (s/or :col keyword?
|
(s/def ::order-by-col (s/or :col keyword?
|
||||||
:dir (s/cat :col keyword?
|
:dir (s/cat :col keyword?
|
||||||
:dir #{:asc :desc})))
|
:dir #{:asc :desc})))
|
||||||
(s/def ::order-by (s/coll-of ::order-by-col :kind vector? :min-count 1))
|
(s/def ::order-by (s/coll-of ::order-by-col :kind vector? :min-count 1))
|
||||||
(s/def ::opts-map (s/and (s/map-of keyword? any?)
|
(s/def ::opts-map (s/and (s/map-of keyword? any?)
|
||||||
(s/keys :opt-un [::order-by])))
|
(s/keys :opt-un [::columns ::order-by])))
|
||||||
|
|
||||||
(s/def ::transactable any?)
|
(s/def ::transactable any?)
|
||||||
|
|
||||||
|
|
@ -89,21 +103,17 @@
|
||||||
(s/def ::batch-opts (s/keys :opt-un [::batch-size ::large]))
|
(s/def ::batch-opts (s/keys :opt-un [::batch-size ::large]))
|
||||||
|
|
||||||
(s/fdef jdbc/get-datasource
|
(s/fdef jdbc/get-datasource
|
||||||
:args (s/cat :spec ::db-spec))
|
:args (s/cat :spec ::proto-connectable))
|
||||||
|
|
||||||
(s/fdef jdbc/get-connection
|
(s/fdef jdbc/get-connection
|
||||||
:args (s/cat :spec ::db-spec
|
:args (s/cat :spec ::proto-connectable
|
||||||
:opts (s/? ::opts-map)))
|
:opts (s/? ::opts-map)))
|
||||||
|
|
||||||
(s/fdef jdbc/prepare
|
(s/fdef jdbc/prepare
|
||||||
:args (s/cat :connection ::connection
|
:args (s/cat :connection ::proto-connectable
|
||||||
:sql-params ::sql-params
|
:sql-params ::sql-params
|
||||||
:opts (s/? ::opts-map)))
|
:opts (s/? ::opts-map)))
|
||||||
|
|
||||||
(s/fdef jdbc/statement
|
|
||||||
:args (s/cat :connection ::connection
|
|
||||||
:opts (s/? ::opts-map)))
|
|
||||||
|
|
||||||
(s/fdef jdbc/plan
|
(s/fdef jdbc/plan
|
||||||
:args (s/alt :prepared (s/cat :stmt ::statement)
|
:args (s/alt :prepared (s/cat :stmt ::statement)
|
||||||
:sql (s/cat :connectable ::connectable
|
:sql (s/cat :connectable ::connectable
|
||||||
|
|
@ -122,31 +132,67 @@
|
||||||
:sql-params (s/nilable ::sql-params)
|
:sql-params (s/nilable ::sql-params)
|
||||||
:opts (s/? ::opts-map))))
|
:opts (s/? ::opts-map))))
|
||||||
|
|
||||||
|
(s/fdef jdbc/execute-batch!
|
||||||
|
:args (s/alt :prepared (s/cat :ps ::prepared-statement
|
||||||
|
:param-groups (s/coll-of ::params :kind sequential?)
|
||||||
|
:opts (s/? ::batch-opts))
|
||||||
|
:sql (s/cat :connectable ::connectable
|
||||||
|
:sql string?
|
||||||
|
:param-groups (s/coll-of ::params :kind sequential?)
|
||||||
|
:opts ::batch-opts)))
|
||||||
|
|
||||||
(s/fdef jdbc/transact
|
(s/fdef jdbc/transact
|
||||||
:args (s/cat :transactable ::transactable
|
:args (s/cat :transactable ::transactable
|
||||||
:f fn?
|
:f fn?
|
||||||
:opts (s/? ::opts-map)))
|
:opts (s/? ::opts-map)))
|
||||||
|
|
||||||
|
(s/fdef jdbc/with-options
|
||||||
|
:args (s/cat :connectable ::connectable
|
||||||
|
:opts ::opts-map))
|
||||||
|
|
||||||
(s/fdef jdbc/with-transaction
|
(s/fdef jdbc/with-transaction
|
||||||
:args (s/cat :binding (s/and vector?
|
:args (s/cat :binding (s/and vector?
|
||||||
(s/cat :sym simple-symbol?
|
(s/cat :sym simple-symbol?
|
||||||
:transactable ::transactable
|
:transactable ::transactable
|
||||||
:opts (s/? ::opts-map)))
|
:opts (s/? any?)))
|
||||||
:body (s/* any?)))
|
:body (s/* any?)))
|
||||||
|
|
||||||
|
(s/fdef jdbc/with-transaction+options
|
||||||
|
:args (s/cat :binding (s/and vector?
|
||||||
|
(s/cat :sym simple-symbol?
|
||||||
|
:transactable ::transactable
|
||||||
|
:opts (s/? any?)))
|
||||||
|
:body (s/* any?)))
|
||||||
|
|
||||||
|
(s/fdef jdbc/on-connection
|
||||||
|
:args (s/cat :binding (s/and vector?
|
||||||
|
(s/cat :sym simple-symbol?
|
||||||
|
:connectable ::connectable))
|
||||||
|
:body (s/* any?)))
|
||||||
|
|
||||||
|
(s/fdef jdbc/on-connection+options
|
||||||
|
:args (s/cat :binding (s/and vector?
|
||||||
|
(s/cat :sym simple-symbol?
|
||||||
|
:connectable ::connectable))
|
||||||
|
:body (s/* any?)))
|
||||||
|
|
||||||
(s/fdef connection/->pool
|
(s/fdef connection/->pool
|
||||||
:args (s/cat :clazz #(instance? Class %)
|
:args (s/cat :clazz #(instance? Class %)
|
||||||
:db-spec ::db-spec-or-jdbc))
|
:db-spec ::db-spec-or-jdbc))
|
||||||
|
|
||||||
(s/fdef prepare/execute-batch!
|
(s/fdef connection/component
|
||||||
:args (s/cat :ps ::prepared-statement
|
:args (s/cat :clazz #(instance? Class %)
|
||||||
:param-groups (s/coll-of ::params :kind sequential?)
|
:db-spec ::db-spec-or-jdbc
|
||||||
:opts (s/? ::batch-opts)))
|
:close-fn (s/? fn?)))
|
||||||
|
|
||||||
(s/fdef prepare/set-parameters
|
(s/fdef prepare/set-parameters
|
||||||
:args (s/cat :ps ::prepared-statement
|
:args (s/cat :ps ::prepared-statement
|
||||||
:params ::params))
|
:params ::params))
|
||||||
|
|
||||||
|
(s/fdef prepare/statement
|
||||||
|
:args (s/cat :connection ::connection
|
||||||
|
:opts (s/? ::opts-map)))
|
||||||
|
|
||||||
(s/fdef sql/insert!
|
(s/fdef sql/insert!
|
||||||
:args (s/cat :connectable ::connectable
|
:args (s/cat :connectable ::connectable
|
||||||
:table keyword?
|
:table keyword?
|
||||||
|
|
@ -154,16 +200,24 @@
|
||||||
:opts (s/? ::opts-map)))
|
:opts (s/? ::opts-map)))
|
||||||
|
|
||||||
(s/fdef sql/insert-multi!
|
(s/fdef sql/insert-multi!
|
||||||
:args (s/and (s/cat :connectable ::connectable
|
:args
|
||||||
:table keyword?
|
(s/or
|
||||||
:cols (s/coll-of keyword?
|
:with-rows-and-columns
|
||||||
:kind sequential?
|
(s/and (s/cat :connectable ::connectable
|
||||||
:min-count 1)
|
:table keyword?
|
||||||
:rows (s/coll-of (s/coll-of any? :kind sequential?)
|
:cols (s/coll-of keyword? :kind sequential?)
|
||||||
:kind sequential?)
|
:rows (s/coll-of (s/coll-of any?
|
||||||
:opts (s/? ::opts-map))
|
:kind sequential?
|
||||||
#(apply = (count (:cols %))
|
:min-count 1)
|
||||||
(map count (:rows %)))))
|
:kind sequential?)
|
||||||
|
:opts (s/? ::opts-map))
|
||||||
|
#(apply = (count (:cols %))
|
||||||
|
(map count (:rows %))))
|
||||||
|
:with-hash-maps
|
||||||
|
(s/cat :connectable ::connectable
|
||||||
|
:table keyword?
|
||||||
|
:hash-maps (s/coll-of map? :kind sequential?)
|
||||||
|
:opts (s/? ::opts-map))))
|
||||||
|
|
||||||
(s/fdef sql/query
|
(s/fdef sql/query
|
||||||
:args (s/cat :connectable ::connectable
|
:args (s/cat :connectable ::connectable
|
||||||
|
|
@ -174,9 +228,19 @@
|
||||||
:args (s/cat :connectable ::connectable
|
:args (s/cat :connectable ::connectable
|
||||||
:table keyword?
|
:table keyword?
|
||||||
:key-map (s/or :example ::example-map
|
:key-map (s/or :example ::example-map
|
||||||
:where ::sql-params)
|
:where ::sql-params
|
||||||
|
:all #{:all})
|
||||||
:opts (s/? ::opts-map)))
|
:opts (s/? ::opts-map)))
|
||||||
|
|
||||||
|
(s/fdef sql/aggregate-by-keys
|
||||||
|
:args (s/cat :connectable ::connectable
|
||||||
|
:table keyword?
|
||||||
|
:aggregate string?
|
||||||
|
:key-map (s/or :example ::example-map
|
||||||
|
:where ::sql-params
|
||||||
|
:all #{:all})
|
||||||
|
:opts (s/? ::opts-map)))
|
||||||
|
|
||||||
(s/fdef sql/get-by-id
|
(s/fdef sql/get-by-id
|
||||||
:args (s/alt :with-id (s/cat :connectable ::connectable
|
:args (s/alt :with-id (s/cat :connectable ::connectable
|
||||||
:table keyword?
|
:table keyword?
|
||||||
|
|
@ -210,10 +274,12 @@
|
||||||
`jdbc/plan
|
`jdbc/plan
|
||||||
`jdbc/execute!
|
`jdbc/execute!
|
||||||
`jdbc/execute-one!
|
`jdbc/execute-one!
|
||||||
|
`jdbc/execute-batch!
|
||||||
`jdbc/transact
|
`jdbc/transact
|
||||||
`jdbc/with-transaction
|
`jdbc/with-transaction
|
||||||
|
`jdbc/with-options
|
||||||
`connection/->pool
|
`connection/->pool
|
||||||
`prepare/execute-batch!
|
`connection/component
|
||||||
`prepare/set-parameters
|
`prepare/set-parameters
|
||||||
`prepare/statement
|
`prepare/statement
|
||||||
`sql/insert!
|
`sql/insert!
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
;; copyright (c) 2019-2020 Sean Corfield, all rights reserved
|
;; copyright (c) 2019-2024 Sean Corfield, all rights reserved
|
||||||
|
|
||||||
(ns next.jdbc.sql
|
(ns next.jdbc.sql
|
||||||
"Some utility functions that make common operations easier by
|
"Some utility functions that make common operations easier by
|
||||||
|
|
@ -9,7 +9,7 @@
|
||||||
`get-by-id`, `update!`, and `delete!`).
|
`get-by-id`, `update!`, and `delete!`).
|
||||||
|
|
||||||
For anything more complex, use a library like HoneySQL
|
For anything more complex, use a library like HoneySQL
|
||||||
https://github.com/jkk/honeysql to generate SQL + parameters.
|
https://github.com/seancorfield/honeysql to generate SQL + parameters.
|
||||||
|
|
||||||
The following options are supported:
|
The following options are supported:
|
||||||
* `:table-fn` -- specify a function used to convert table names (strings)
|
* `:table-fn` -- specify a function used to convert table names (strings)
|
||||||
|
|
@ -21,10 +21,11 @@
|
||||||
|
|
||||||
In addition, `find-by-keys` supports `:order-by` to add an `ORDER BY`
|
In addition, `find-by-keys` supports `:order-by` to add an `ORDER BY`
|
||||||
clause to the generated SQL."
|
clause to the generated SQL."
|
||||||
(:require [next.jdbc :refer [execute! execute-one!]]
|
(:require [clojure.string :as str]
|
||||||
|
[next.jdbc :refer [execute! execute-batch! execute-one!]]
|
||||||
[next.jdbc.sql.builder
|
[next.jdbc.sql.builder
|
||||||
:refer [for-delete for-insert for-insert-multi
|
:refer [for-delete for-insert for-insert-multi for-query
|
||||||
for-query for-update]]))
|
for-update]]))
|
||||||
|
|
||||||
(set! *warn-on-reflection* true)
|
(set! *warn-on-reflection* true)
|
||||||
|
|
||||||
|
|
@ -37,29 +38,61 @@
|
||||||
([connectable table key-map]
|
([connectable table key-map]
|
||||||
(insert! connectable table key-map {}))
|
(insert! connectable table key-map {}))
|
||||||
([connectable table key-map opts]
|
([connectable table key-map opts]
|
||||||
(execute-one! connectable
|
(let [opts (merge (:options connectable) opts)]
|
||||||
(for-insert table key-map opts)
|
(execute-one! connectable
|
||||||
(merge {:return-keys true} opts))))
|
(for-insert table key-map opts)
|
||||||
|
(merge {:return-keys true} opts)))))
|
||||||
|
|
||||||
(defn insert-multi!
|
(defn insert-multi!
|
||||||
"Syntactic sugar over `execute!` to make inserting columns/rows easier.
|
"Syntactic sugar over `execute!` or `execute-batch!` to make inserting
|
||||||
|
columns/rows easier.
|
||||||
|
|
||||||
Given a connectable object, a table name, a sequence of column names, and
|
Given a connectable object, a table name, a sequence of column names, and
|
||||||
a vector of rows of data (vectors of column values), inserts the data as
|
a vector of rows of data (vectors of column values), inserts the data as
|
||||||
multiple rows in the database and attempts to return a vector of maps of
|
multiple rows in the database and attempts to return a vector of maps of
|
||||||
generated keys.
|
generated keys.
|
||||||
|
|
||||||
Note: this expands to a single SQL statement with placeholders for every
|
Given a connectable object, a table name, a sequence of hash maps of data,
|
||||||
value being inserted -- for large sets of rows, this may exceed the limits
|
which all have the same set of keys, inserts the data as multiple rows in
|
||||||
|
the database and attempts to return a vector of maps of generated keys.
|
||||||
|
|
||||||
|
If called with `:batch` true will call `execute-batch!` - see its documentation
|
||||||
|
for situations in which the generated keys may or may not be returned as well as
|
||||||
|
additional options that can be passed.
|
||||||
|
|
||||||
|
Note: without `:batch` this expands to a single SQL statement with placeholders for
|
||||||
|
every value being inserted -- for large sets of rows, this may exceed the limits
|
||||||
on SQL string size and/or number of parameters for your JDBC driver or your
|
on SQL string size and/or number of parameters for your JDBC driver or your
|
||||||
database!"
|
database!"
|
||||||
([connectable table cols rows]
|
{:arglists '([connectable table hash-maps]
|
||||||
(insert-multi! connectable table cols rows {}))
|
[connectable table hash-maps opts]
|
||||||
|
[connectable table cols rows]
|
||||||
|
[connectable table cols rows opts])}
|
||||||
|
([connectable table hash-maps]
|
||||||
|
(insert-multi! connectable table hash-maps {}))
|
||||||
|
([connectable table hash-maps-or-cols opts-or-rows]
|
||||||
|
(if (map? (first hash-maps-or-cols))
|
||||||
|
(let [cols (keys (first hash-maps-or-cols))
|
||||||
|
->row (fn ->row [m]
|
||||||
|
(map #(get m %) cols))]
|
||||||
|
(when-not (apply = (map (comp set keys) hash-maps-or-cols))
|
||||||
|
(throw (IllegalArgumentException.
|
||||||
|
"insert-multi! hash maps must all have the same keys")))
|
||||||
|
(insert-multi! connectable table cols (map ->row hash-maps-or-cols) opts-or-rows))
|
||||||
|
(if (map? opts-or-rows)
|
||||||
|
(insert-multi! connectable table hash-maps-or-cols [] opts-or-rows)
|
||||||
|
(insert-multi! connectable table hash-maps-or-cols opts-or-rows {}))))
|
||||||
([connectable table cols rows opts]
|
([connectable table cols rows opts]
|
||||||
(if (seq rows)
|
(if (seq rows)
|
||||||
(execute! connectable
|
(let [opts (merge (:options connectable) opts)
|
||||||
(for-insert-multi table cols rows opts)
|
batch? (:batch opts)]
|
||||||
(merge {:return-keys true} opts))
|
(if batch?
|
||||||
|
(let [[sql & param-groups] (for-insert-multi table cols rows opts)]
|
||||||
|
(execute-batch! connectable sql param-groups
|
||||||
|
(merge {:return-keys true :return-generated-keys true} opts)))
|
||||||
|
(execute! connectable
|
||||||
|
(for-insert-multi table cols rows opts)
|
||||||
|
(merge {:return-keys true} opts))))
|
||||||
[])))
|
[])))
|
||||||
|
|
||||||
(defn query
|
(defn query
|
||||||
|
|
@ -70,7 +103,8 @@
|
||||||
([connectable sql-params]
|
([connectable sql-params]
|
||||||
(query connectable sql-params {}))
|
(query connectable sql-params {}))
|
||||||
([connectable sql-params opts]
|
([connectable sql-params opts]
|
||||||
(execute! connectable sql-params opts)))
|
(let [opts (merge (:options connectable) opts)]
|
||||||
|
(execute! connectable sql-params opts))))
|
||||||
|
|
||||||
(defn find-by-keys
|
(defn find-by-keys
|
||||||
"Syntactic sugar over `execute!` to make certain common queries easier.
|
"Syntactic sugar over `execute!` to make certain common queries easier.
|
||||||
|
|
@ -79,13 +113,67 @@
|
||||||
columns and values to search on or a vector of a SQL where clause and
|
columns and values to search on or a vector of a SQL where clause and
|
||||||
parameters, returns a vector of hash maps of rows that match.
|
parameters, returns a vector of hash maps of rows that match.
|
||||||
|
|
||||||
|
If `:all` is passed instead of a hash map or vector -- the query will
|
||||||
|
select all rows in the table, subject to any pagination options below.
|
||||||
|
|
||||||
|
If `:columns` is passed, only that specified subset of columns will be
|
||||||
|
returned in each row (otherwise all columns are selected).
|
||||||
|
|
||||||
If the `:order-by` option is present, add an `ORDER BY` clause. `:order-by`
|
If the `:order-by` option is present, add an `ORDER BY` clause. `:order-by`
|
||||||
should be a vector of column names or pairs of column name / direction,
|
should be a vector of column names or pairs of column name / direction,
|
||||||
which can be `:asc` or `:desc`."
|
which can be `:asc` or `:desc`.
|
||||||
|
|
||||||
|
If the `:top` option is present, the SQL Server `SELECT TOP ?` syntax
|
||||||
|
is used and the value of the option is inserted as an additional parameter.
|
||||||
|
|
||||||
|
If the `:limit` option is present, the MySQL `LIMIT ? OFFSET ?` syntax
|
||||||
|
is used (using the `:offset` option if present, else `OFFSET ?` is omitted).
|
||||||
|
PostgreSQL also supports this syntax.
|
||||||
|
|
||||||
|
If the `:offset` option is present (without `:limit`), the standard
|
||||||
|
`OFFSET ? ROWS FETCH NEXT ? ROWS ONLY` syntax is used (using the `:fetch`
|
||||||
|
option if present, else `FETCH...` is omitted)."
|
||||||
([connectable table key-map]
|
([connectable table key-map]
|
||||||
(find-by-keys connectable table key-map {}))
|
(find-by-keys connectable table key-map {}))
|
||||||
([connectable table key-map opts]
|
([connectable table key-map opts]
|
||||||
(execute! connectable (for-query table key-map opts) opts)))
|
(let [opts (merge (:options connectable) opts)]
|
||||||
|
(execute! connectable (for-query table key-map opts) opts))))
|
||||||
|
|
||||||
|
(defn aggregate-by-keys
|
||||||
|
"A wrapper over `find-by-keys` that additionally takes an aggregate SQL
|
||||||
|
expression (a string), and returns just a single result: the value of that
|
||||||
|
of that aggregate for the matching rows.
|
||||||
|
|
||||||
|
Accepts all the same options as `find-by-keys` except `:columns` since that
|
||||||
|
is used internally by this wrapper to pass the aggregate expression in."
|
||||||
|
([connectable table aggregate key-map]
|
||||||
|
(aggregate-by-keys connectable table aggregate key-map {}))
|
||||||
|
([connectable table aggregate key-map opts]
|
||||||
|
(let [opts (merge (:options connectable) opts)
|
||||||
|
_
|
||||||
|
(when-not (string? aggregate)
|
||||||
|
(throw (IllegalArgumentException.
|
||||||
|
"aggregate-by-keys requires a string aggregate expression")))
|
||||||
|
_
|
||||||
|
(when (:columns opts)
|
||||||
|
(throw (IllegalArgumentException.
|
||||||
|
"aggregate-by-keys does not support the :columns option")))
|
||||||
|
|
||||||
|
;; this should be unique enough as an alias to never clash with
|
||||||
|
;; a real column name in anyone's tables -- in addition it is
|
||||||
|
;; stable for a given aggregate expression so it should allow
|
||||||
|
;; for query caching in the JDBC driver:
|
||||||
|
;; (we use abs to avoid negative hash codes which would produce
|
||||||
|
;; a hyphen in the alias name which is not valid in SQL identifiers)
|
||||||
|
total-name (str "next_jdbc_aggregate_"
|
||||||
|
(Math/abs (.hashCode ^String aggregate)))
|
||||||
|
total-column (keyword total-name)
|
||||||
|
;; because some databases return uppercase column names:
|
||||||
|
total-col-u (keyword (str/upper-case total-name))]
|
||||||
|
(-> (find-by-keys connectable table key-map
|
||||||
|
(assoc opts :columns [[aggregate total-column]]))
|
||||||
|
(first)
|
||||||
|
(as-> row (or (get row total-column) (get row total-col-u)))))))
|
||||||
|
|
||||||
(defn get-by-id
|
(defn get-by-id
|
||||||
"Syntactic sugar over `execute-one!` to make certain common queries easier.
|
"Syntactic sugar over `execute-one!` to make certain common queries easier.
|
||||||
|
|
@ -94,13 +182,21 @@
|
||||||
a hash map of the first row that matches.
|
a hash map of the first row that matches.
|
||||||
|
|
||||||
By default, the primary key is assumed to be `id` but that can be overridden
|
By default, the primary key is assumed to be `id` but that can be overridden
|
||||||
in the five-argument call."
|
in the five-argument call.
|
||||||
|
|
||||||
|
As with `find-by-keys`, you can specify `:columns` to return just a
|
||||||
|
subset of the columns in the returned row.
|
||||||
|
|
||||||
|
Technically, this also supports `:order-by`, `:top`, `:limit`, `:offset`,
|
||||||
|
and `:fetch` -- like `find-by-keys` -- but they don't make as much sense
|
||||||
|
here since only one row is ever returned."
|
||||||
([connectable table pk]
|
([connectable table pk]
|
||||||
(get-by-id connectable table pk :id {}))
|
(get-by-id connectable table pk :id {}))
|
||||||
([connectable table pk opts]
|
([connectable table pk opts]
|
||||||
(get-by-id connectable table pk :id opts))
|
(get-by-id connectable table pk :id opts))
|
||||||
([connectable table pk pk-name opts]
|
([connectable table pk pk-name opts]
|
||||||
(execute-one! connectable (for-query table {pk-name pk} opts) opts)))
|
(let [opts (merge (:options connectable) opts)]
|
||||||
|
(execute-one! connectable (for-query table {pk-name pk} opts) opts))))
|
||||||
|
|
||||||
(defn update!
|
(defn update!
|
||||||
"Syntactic sugar over `execute-one!` to make certain common updates easier.
|
"Syntactic sugar over `execute-one!` to make certain common updates easier.
|
||||||
|
|
@ -111,9 +207,10 @@
|
||||||
([connectable table key-map where-params]
|
([connectable table key-map where-params]
|
||||||
(update! connectable table key-map where-params {}))
|
(update! connectable table key-map where-params {}))
|
||||||
([connectable table key-map where-params opts]
|
([connectable table key-map where-params opts]
|
||||||
(execute-one! connectable
|
(let [opts (merge (:options connectable) opts)]
|
||||||
(for-update table key-map where-params opts)
|
(execute-one! connectable
|
||||||
opts)))
|
(for-update table key-map where-params opts)
|
||||||
|
opts))))
|
||||||
|
|
||||||
(defn delete!
|
(defn delete!
|
||||||
"Syntactic sugar over `execute-one!` to make certain common deletes easier.
|
"Syntactic sugar over `execute-one!` to make certain common deletes easier.
|
||||||
|
|
@ -124,4 +221,5 @@
|
||||||
([connectable table where-params]
|
([connectable table where-params]
|
||||||
(delete! connectable table where-params {}))
|
(delete! connectable table where-params {}))
|
||||||
([connectable table where-params opts]
|
([connectable table where-params opts]
|
||||||
(execute-one! connectable (for-delete table where-params opts) opts)))
|
(let [opts (merge (:options connectable) opts)]
|
||||||
|
(execute-one! connectable (for-delete table where-params opts) opts))))
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,10 @@
|
||||||
;; copyright (c) 2019-2020 Sean Corfield, all rights reserved
|
;; copyright (c) 2019-2024 Sean Corfield, all rights reserved
|
||||||
|
|
||||||
(ns next.jdbc.sql.builder
|
(ns next.jdbc.sql.builder
|
||||||
"Some utility functions for building SQL strings.
|
"Some utility functions for building SQL strings.
|
||||||
|
|
||||||
These were originally private functions in `next.jdbc.sql` but
|
These were originally private functions in `next.jdbc.sql` but
|
||||||
they may proof useful to developers who want to write their own
|
they may prove useful to developers who want to write their own
|
||||||
'SQL sugar' functions, such as a database-specific `upsert!` etc."
|
'SQL sugar' functions, such as a database-specific `upsert!` etc."
|
||||||
(:require [clojure.string :as str]))
|
(:require [clojure.string :as str]))
|
||||||
|
|
||||||
|
|
@ -13,16 +13,65 @@
|
||||||
(defn as-?
|
(defn as-?
|
||||||
"Given a hash map of column names and values, or a vector of column names,
|
"Given a hash map of column names and values, or a vector of column names,
|
||||||
return a string of `?` placeholders for them."
|
return a string of `?` placeholders for them."
|
||||||
[key-map opts]
|
[key-map _]
|
||||||
(str/join ", " (repeat (count key-map) "?")))
|
(str/join ", " (repeat (count key-map) "?")))
|
||||||
|
|
||||||
|
;; similar to honeysql, by default we disallow suspicious
|
||||||
|
;; characters in table and column names when building SQL:
|
||||||
|
(def ^:private ^:dynamic *allow-suspicious-entities* false)
|
||||||
|
|
||||||
|
(defn- safe-name
|
||||||
|
"A wrapper for `name` that throws an exception if the
|
||||||
|
resulting string looks 'suspicious' as a table or column."
|
||||||
|
[k]
|
||||||
|
(let [entity (name k)
|
||||||
|
suspicious #";"]
|
||||||
|
(when-not *allow-suspicious-entities*
|
||||||
|
(when (re-find suspicious entity)
|
||||||
|
(throw (ex-info (str "suspicious character found in entity: " entity)
|
||||||
|
{:disallowed suspicious}))))
|
||||||
|
entity))
|
||||||
|
|
||||||
|
(defn as-cols
|
||||||
|
"Given a sequence of raw column names, return a string of all the
|
||||||
|
formatted column names.
|
||||||
|
|
||||||
|
If a raw column name is a keyword, apply `:column-fn` to its name,
|
||||||
|
from the options if present.
|
||||||
|
|
||||||
|
If a raw column name is a vector pair, treat it as an expression with
|
||||||
|
an alias. If the first item is a keyword, apply `:column-fn` to its
|
||||||
|
name, else accept it as-is. The second item should be a keyword and
|
||||||
|
that will have `:column-fn` applied to its name.
|
||||||
|
|
||||||
|
This allows columns to be specified as simple names, e.g., `:foo`,
|
||||||
|
as simple aliases, e.g., `[:foo :bar]`, or as expressions with an
|
||||||
|
alias, e.g., `[\"count(*)\" :total]`."
|
||||||
|
[cols opts]
|
||||||
|
(let [col-fn (:column-fn opts identity)]
|
||||||
|
(str/join ", " (map (fn [raw]
|
||||||
|
(if (vector? raw)
|
||||||
|
(if (keyword? (first raw))
|
||||||
|
(str (col-fn (safe-name (first raw)))
|
||||||
|
" AS "
|
||||||
|
(col-fn (safe-name (second raw))))
|
||||||
|
(str (first raw)
|
||||||
|
" AS "
|
||||||
|
(col-fn (safe-name (second raw)))))
|
||||||
|
(col-fn (safe-name raw))))
|
||||||
|
cols))))
|
||||||
|
|
||||||
|
|
||||||
(defn as-keys
|
(defn as-keys
|
||||||
"Given a hash map of column names and values, return a string of all the
|
"Given a hash map of column names and values, return a string of all the
|
||||||
column names.
|
column names.
|
||||||
|
|
||||||
Applies any `:column-fn` supplied in the options."
|
Applies any `:column-fn` supplied in the options."
|
||||||
[key-map opts]
|
[key-map opts]
|
||||||
(str/join ", " (map (comp (:column-fn opts identity) name) (keys key-map))))
|
(as-cols (keys key-map) opts))
|
||||||
|
|
||||||
|
(defn- validate [expr ^String msg]
|
||||||
|
(when-not expr (throw (IllegalArgumentException. msg))))
|
||||||
|
|
||||||
(defn by-keys
|
(defn by-keys
|
||||||
"Given a hash map of column names and values and a clause type
|
"Given a hash map of column names and values and a clause type
|
||||||
|
|
@ -32,14 +81,14 @@
|
||||||
[key-map clause opts]
|
[key-map clause opts]
|
||||||
(let [entity-fn (:column-fn opts identity)
|
(let [entity-fn (:column-fn opts identity)
|
||||||
[where params] (reduce-kv (fn [[conds params] k v]
|
[where params] (reduce-kv (fn [[conds params] k v]
|
||||||
(let [e (entity-fn (name k))]
|
(let [e (entity-fn (safe-name k))]
|
||||||
(if (and (= :where clause) (nil? v))
|
(if (and (= :where clause) (nil? v))
|
||||||
[(conj conds (str e " IS NULL")) params]
|
[(conj conds (str e " IS NULL")) params]
|
||||||
[(conj conds (str e " = ?")) (conj params v)])))
|
[(conj conds (str e " = ?")) (conj params v)])))
|
||||||
[[] []]
|
[[] []]
|
||||||
key-map)]
|
key-map)]
|
||||||
(assert (seq where) "key-map may not be empty")
|
(validate (seq where) "key-map may not be empty")
|
||||||
(into [(str (str/upper-case (name clause)) " "
|
(into [(str (str/upper-case (safe-name clause)) " "
|
||||||
(str/join (if (= :where clause) " AND " ", ") where))]
|
(str/join (if (= :where clause) " AND " ", ") where))]
|
||||||
params)))
|
params)))
|
||||||
|
|
||||||
|
|
@ -58,7 +107,7 @@
|
||||||
(by-keys where-params :where opts)
|
(by-keys where-params :where opts)
|
||||||
(into [(str "WHERE " (first where-params))]
|
(into [(str "WHERE " (first where-params))]
|
||||||
(rest where-params)))]
|
(rest where-params)))]
|
||||||
(into [(str "DELETE FROM " (entity-fn (name table))
|
(into [(str "DELETE FROM " (entity-fn (safe-name table))
|
||||||
" " (first where-params)
|
" " (first where-params)
|
||||||
(when-let [suffix (:suffix opts)]
|
(when-let [suffix (:suffix opts)]
|
||||||
(str " " suffix)))]
|
(str " " suffix)))]
|
||||||
|
|
@ -76,8 +125,8 @@
|
||||||
(let [entity-fn (:table-fn opts identity)
|
(let [entity-fn (:table-fn opts identity)
|
||||||
params (as-keys key-map opts)
|
params (as-keys key-map opts)
|
||||||
places (as-? key-map opts)]
|
places (as-? key-map opts)]
|
||||||
(assert (seq key-map) "key-map may not be empty")
|
(validate (seq key-map) "key-map may not be empty")
|
||||||
(into [(str "INSERT INTO " (entity-fn (name table))
|
(into [(str "INSERT INTO " (entity-fn (safe-name table))
|
||||||
" (" params ")"
|
" (" params ")"
|
||||||
" VALUES (" places ")"
|
" VALUES (" places ")"
|
||||||
(when-let [suffix (:suffix opts)]
|
(when-let [suffix (:suffix opts)]
|
||||||
|
|
@ -91,37 +140,49 @@
|
||||||
|
|
||||||
Applies any `:table-fn` / `:column-fn` supplied in the options.
|
Applies any `:table-fn` / `:column-fn` supplied in the options.
|
||||||
|
|
||||||
|
If `:batch` is set to `true` in `opts` the INSERT statement will be prepared
|
||||||
|
using a single set of placeholders and remaining parameters in the vector will
|
||||||
|
be grouped at the row level.
|
||||||
|
|
||||||
If `:suffix` is provided in `opts`, that string is appended to the
|
If `:suffix` is provided in `opts`, that string is appended to the
|
||||||
`INSERT ...` statement."
|
`INSERT ...` statement."
|
||||||
[table cols rows opts]
|
[table cols rows opts]
|
||||||
(assert (apply = (count cols) (map count rows))
|
(validate (apply = (count cols) (map count rows))
|
||||||
"column counts are not consistent across cols and rows")
|
"column counts are not consistent across cols and rows")
|
||||||
;; to avoid generating bad SQL
|
;; to avoid generating bad SQL
|
||||||
(assert (seq cols) "cols may not be empty")
|
(validate (seq cols) "cols may not be empty")
|
||||||
(assert (seq rows) "rows may not be empty")
|
(validate (seq rows) "rows may not be empty")
|
||||||
(let [table-fn (:table-fn opts identity)
|
(let [table-fn (:table-fn opts identity)
|
||||||
column-fn (:column-fn opts identity)
|
batch? (:batch opts)
|
||||||
params (str/join ", " (map (comp column-fn name) cols))
|
params (as-cols cols opts)
|
||||||
places (as-? (first rows) opts)]
|
places (as-? (first rows) opts)]
|
||||||
(into [(str "INSERT INTO " (table-fn (name table))
|
(into [(str "INSERT INTO " (table-fn (safe-name table))
|
||||||
" (" params ")"
|
" (" params ")"
|
||||||
" VALUES "
|
" VALUES "
|
||||||
(str/join ", " (repeat (count rows) (str "(" places ")")))
|
(if batch?
|
||||||
|
(str "(" places ")")
|
||||||
|
(str/join ", " (repeat (count rows) (str "(" places ")"))))
|
||||||
(when-let [suffix (:suffix opts)]
|
(when-let [suffix (:suffix opts)]
|
||||||
(str " " suffix)))]
|
(str " " suffix)))]
|
||||||
cat
|
(if batch? identity cat)
|
||||||
rows)))
|
rows)))
|
||||||
|
|
||||||
|
(comment
|
||||||
|
(as-cols [:aa :bb :cc] {})
|
||||||
|
(for-insert-multi :table [:aa :bb :cc] [[1 2 3] [4 5 6]]
|
||||||
|
{:table-fn str/upper-case :column-fn str/capitalize})
|
||||||
|
)
|
||||||
|
|
||||||
(defn for-order-col
|
(defn for-order-col
|
||||||
"Given a column name, or a pair of column name and direction,
|
"Given a column name, or a pair of column name and direction,
|
||||||
return the sub-clause for addition to `ORDER BY`."
|
return the sub-clause for addition to `ORDER BY`."
|
||||||
[col opts]
|
[col opts]
|
||||||
(let [entity-fn (:column-fn opts identity)]
|
(let [entity-fn (:column-fn opts identity)]
|
||||||
(cond (keyword? col)
|
(cond (keyword? col)
|
||||||
(entity-fn (name col))
|
(entity-fn (safe-name col))
|
||||||
|
|
||||||
(and (vector? col) (= 2 (count col)) (keyword? (first col)))
|
(and (vector? col) (= 2 (count col)) (keyword? (first col)))
|
||||||
(str (entity-fn (name (first col)))
|
(str (entity-fn (safe-name (first col)))
|
||||||
" "
|
" "
|
||||||
(or (get {:asc "ASC" :desc "DESC"} (second col))
|
(or (get {:asc "ASC" :desc "DESC"} (second col))
|
||||||
(throw (IllegalArgumentException.
|
(throw (IllegalArgumentException.
|
||||||
|
|
@ -137,7 +198,7 @@
|
||||||
[order-by opts]
|
[order-by opts]
|
||||||
(when-not (vector? order-by)
|
(when-not (vector? order-by)
|
||||||
(throw (IllegalArgumentException. ":order-by must be a vector")))
|
(throw (IllegalArgumentException. ":order-by must be a vector")))
|
||||||
(assert (seq order-by) ":order-by may not be empty")
|
(validate (seq order-by) ":order-by may not be empty")
|
||||||
(str "ORDER BY "
|
(str "ORDER BY "
|
||||||
(str/join ", " (map #(for-order-col % opts) order-by))))
|
(str/join ", " (map #(for-order-col % opts) order-by))))
|
||||||
|
|
||||||
|
|
@ -148,18 +209,49 @@
|
||||||
|
|
||||||
Applies any `:table-fn` / `:column-fn` supplied in the options.
|
Applies any `:table-fn` / `:column-fn` supplied in the options.
|
||||||
|
|
||||||
|
Handles pagination options (`:top`, `:limit` / `:offset`, or `:offset` /
|
||||||
|
`:fetch`) for SQL Server, MySQL / SQLite, ANSI SQL respectively.
|
||||||
|
|
||||||
|
By default, this selects all columns, but if the `:columns` option is
|
||||||
|
present the select will only be those columns.
|
||||||
|
|
||||||
If `:suffix` is provided in `opts`, that string is appended to the
|
If `:suffix` is provided in `opts`, that string is appended to the
|
||||||
`SELECT ...` statement."
|
`SELECT ...` statement."
|
||||||
[table where-params opts]
|
[table where-params opts]
|
||||||
(let [entity-fn (:table-fn opts identity)
|
(let [entity-fn (:table-fn opts identity)
|
||||||
where-params (if (map? where-params)
|
where-params (cond (map? where-params)
|
||||||
(by-keys where-params :where opts)
|
(by-keys where-params :where opts)
|
||||||
(into [(str "WHERE " (first where-params))]
|
(= :all where-params)
|
||||||
(rest where-params)))]
|
[nil]
|
||||||
(into [(str "SELECT * FROM " (entity-fn (name table))
|
:else
|
||||||
" " (first where-params)
|
(into [(str "WHERE " (first where-params))]
|
||||||
|
(rest where-params)))
|
||||||
|
where-params (cond-> (if (:top opts)
|
||||||
|
(into [(first where-params)]
|
||||||
|
(cons (:top opts) (rest where-params)))
|
||||||
|
where-params)
|
||||||
|
(:limit opts) (conj (:limit opts))
|
||||||
|
(:offset opts) (conj (:offset opts))
|
||||||
|
(:fetch opts) (conj (:fetch opts)))]
|
||||||
|
(into [(str "SELECT "
|
||||||
|
(when (:top opts)
|
||||||
|
"TOP ? ")
|
||||||
|
(if-let [cols (seq (:columns opts))]
|
||||||
|
(as-cols cols opts)
|
||||||
|
"*")
|
||||||
|
" FROM " (entity-fn (safe-name table))
|
||||||
|
(when-let [clause (first where-params)]
|
||||||
|
(str " " clause))
|
||||||
(when-let [order-by (:order-by opts)]
|
(when-let [order-by (:order-by opts)]
|
||||||
(str " " (for-order order-by opts)))
|
(str " " (for-order order-by opts)))
|
||||||
|
(when (:limit opts)
|
||||||
|
" LIMIT ?")
|
||||||
|
(when (:offset opts)
|
||||||
|
(if (:limit opts)
|
||||||
|
" OFFSET ?"
|
||||||
|
" OFFSET ? ROWS"))
|
||||||
|
(when (:fetch opts)
|
||||||
|
" FETCH NEXT ? ROWS ONLY")
|
||||||
(when-let [suffix (:suffix opts)]
|
(when-let [suffix (:suffix opts)]
|
||||||
(str " " suffix)))]
|
(str " " suffix)))]
|
||||||
(rest where-params))))
|
(rest where-params))))
|
||||||
|
|
@ -181,7 +273,7 @@
|
||||||
(by-keys where-params :where opts)
|
(by-keys where-params :where opts)
|
||||||
(into [(str "WHERE " (first where-params))]
|
(into [(str "WHERE " (first where-params))]
|
||||||
(rest where-params)))]
|
(rest where-params)))]
|
||||||
(-> [(str "UPDATE " (entity-fn (name table))
|
(-> [(str "UPDATE " (entity-fn (safe-name table))
|
||||||
" " (first set-params)
|
" " (first set-params)
|
||||||
" " (first where-params)
|
" " (first where-params)
|
||||||
(when-let [suffix (:suffix opts)]
|
(when-let [suffix (:suffix opts)]
|
||||||
|
|
|
||||||
68
src/next/jdbc/sql_logging.clj
Normal file
68
src/next/jdbc/sql_logging.clj
Normal file
|
|
@ -0,0 +1,68 @@
|
||||||
|
;; copyright (c) 2021-2025 Sean Corfield, all rights reserved
|
||||||
|
|
||||||
|
(ns ^:no-doc next.jdbc.sql-logging
|
||||||
|
"Implementation of sql-logging logic."
|
||||||
|
(:require [next.jdbc.protocols :as p]))
|
||||||
|
|
||||||
|
(set! *warn-on-reflection* true)
|
||||||
|
|
||||||
|
(defrecord SQLLogging [connectable sql-logger result-logger options])
|
||||||
|
|
||||||
|
(extend-protocol p/Wrapped
|
||||||
|
SQLLogging
|
||||||
|
(unwrap [this] (p/unwrap (:connectable this))))
|
||||||
|
|
||||||
|
(extend-protocol p/Sourceable
|
||||||
|
SQLLogging
|
||||||
|
(get-datasource [this]
|
||||||
|
(p/get-datasource (:connectable this))))
|
||||||
|
|
||||||
|
(extend-protocol p/Connectable
|
||||||
|
SQLLogging
|
||||||
|
(get-connection [this opts]
|
||||||
|
(p/get-connection (:connectable this)
|
||||||
|
(merge (:options this) opts))))
|
||||||
|
|
||||||
|
(defn- result-logger-helper [result this sym state]
|
||||||
|
(when-let [logger (:result-logger this)]
|
||||||
|
(logger sym state result)))
|
||||||
|
|
||||||
|
(extend-protocol p/Executable
|
||||||
|
SQLLogging
|
||||||
|
(-execute [this sql-params opts]
|
||||||
|
;; no result-logger call possible:
|
||||||
|
((:sql-logger this) 'next.jdbc/plan sql-params)
|
||||||
|
(p/-execute (:connectable this) sql-params
|
||||||
|
(merge (:options this) opts)))
|
||||||
|
(-execute-one [this sql-params opts]
|
||||||
|
(let [state ((:sql-logger this) 'next.jdbc/execute-one! sql-params)]
|
||||||
|
(try
|
||||||
|
(doto (p/-execute-one (:connectable this) sql-params
|
||||||
|
(merge (:options this) opts))
|
||||||
|
(result-logger-helper this 'next.jdbc/execute-one! state))
|
||||||
|
(catch Throwable t
|
||||||
|
(result-logger-helper t this 'next.jdbc/execute-one! state)
|
||||||
|
(throw t)))))
|
||||||
|
(-execute-all [this sql-params opts]
|
||||||
|
(let [state ((:sql-logger this) 'next.jdbc/execute! sql-params)]
|
||||||
|
(try
|
||||||
|
(doto (p/-execute-all (:connectable this) sql-params
|
||||||
|
(merge (:options this) opts))
|
||||||
|
(result-logger-helper this 'next.jdbc/execute! state))
|
||||||
|
(catch Throwable t
|
||||||
|
(result-logger-helper t this 'next.jdbc/execute! state)
|
||||||
|
(throw t))))))
|
||||||
|
|
||||||
|
(extend-protocol p/Preparable
|
||||||
|
SQLLogging
|
||||||
|
(prepare [this sql-params opts]
|
||||||
|
;; no result-logger call possible:
|
||||||
|
((:sql-logger this) 'next.jdbc/prepare sql-params)
|
||||||
|
(p/prepare (:connectable this) sql-params
|
||||||
|
(merge (:options this) opts))))
|
||||||
|
|
||||||
|
(extend-protocol p/Transactable
|
||||||
|
SQLLogging
|
||||||
|
(-transact [this body-fn opts]
|
||||||
|
(p/-transact (:connectable this) body-fn
|
||||||
|
(merge (:options this) opts))))
|
||||||
|
|
@ -1,12 +1,45 @@
|
||||||
;; copyright (c) 2018-2020 Sean Corfield, all rights reserved
|
;; copyright (c) 2018-2024 Sean Corfield, all rights reserved
|
||||||
|
|
||||||
(ns ^:no-doc next.jdbc.transaction
|
(ns next.jdbc.transaction
|
||||||
"Implementation of SQL transaction logic."
|
"Implementation of SQL transaction logic.
|
||||||
|
|
||||||
|
In general, you cannot nest transactions. `clojure.java.jdbc` would
|
||||||
|
ignore any attempt to create a nested transaction, even tho' some
|
||||||
|
databases do support it. `next.jdbc` allows you to call `with-transaction`
|
||||||
|
even while you are inside an active transaction, but the behavior may
|
||||||
|
vary across databases and the commit or rollback boundaries may not be
|
||||||
|
what you expect. In order to avoid two transactions constructed on the
|
||||||
|
same connection from interfering with each other, `next.jdbc` locks on
|
||||||
|
the `Connection` object (this prevents concurrent transactions on separate
|
||||||
|
threads from interfering but will cause deadlock on a single thread --
|
||||||
|
so beware).
|
||||||
|
|
||||||
|
Consequently, this namespace exposes a dynamic variable, `*nested-tx*`,
|
||||||
|
which can be used to vary the behavior when an attempt is made to start
|
||||||
|
a transaction when you are already inside a transaction."
|
||||||
(:require [next.jdbc.protocols :as p])
|
(:require [next.jdbc.protocols :as p])
|
||||||
(:import (java.sql Connection)))
|
(:import (java.sql Connection)))
|
||||||
|
|
||||||
(set! *warn-on-reflection* true)
|
(set! *warn-on-reflection* true)
|
||||||
|
|
||||||
|
(defonce ^:dynamic
|
||||||
|
^{:doc "Controls the behavior when a nested transaction is attempted.
|
||||||
|
|
||||||
|
Possible values are:
|
||||||
|
* `:allow` -- the default: assumes you know what you are doing!
|
||||||
|
* `:ignore` -- the same behavior as `clojure.java.jdbc`: the nested
|
||||||
|
transaction is simply ignored and any SQL operations inside it are
|
||||||
|
executed in the context of the outer transaction.
|
||||||
|
* `:prohibit` -- any attempt to create a nested transaction will throw
|
||||||
|
an exception: this is the safest but most restrictive approach so
|
||||||
|
that you can make sure you don't accidentally have any attempts
|
||||||
|
to create nested transactions (since that might be a bug in your code)."}
|
||||||
|
*nested-tx*
|
||||||
|
:allow)
|
||||||
|
|
||||||
|
(defonce ^:private ^:dynamic ^{:doc "Used to detect nested transactions."}
|
||||||
|
*active-tx* #{})
|
||||||
|
|
||||||
(def ^:private isolation-levels
|
(def ^:private isolation-levels
|
||||||
"Transaction isolation levels."
|
"Transaction isolation levels."
|
||||||
{:none Connection/TRANSACTION_NONE
|
{:none Connection/TRANSACTION_NONE
|
||||||
|
|
@ -79,15 +112,44 @@
|
||||||
(.setReadOnly con old-readonly)
|
(.setReadOnly con old-readonly)
|
||||||
(catch Exception _))))))))
|
(catch Exception _))))))))
|
||||||
|
|
||||||
|
(defn- raw-connection ^Connection [^Connection con]
|
||||||
|
(try ; because some drivers do not implement this :(
|
||||||
|
(if (.isWrapperFor con Connection)
|
||||||
|
(.unwrap con Connection)
|
||||||
|
con)
|
||||||
|
(catch Throwable _ ; to catch AbstractMethodError :(
|
||||||
|
con)))
|
||||||
|
|
||||||
(extend-protocol p/Transactable
|
(extend-protocol p/Transactable
|
||||||
java.sql.Connection
|
java.sql.Connection
|
||||||
(-transact [this body-fn opts]
|
(-transact [this body-fn opts]
|
||||||
(locking this
|
(let [raw (raw-connection this)]
|
||||||
(transact* this body-fn opts)))
|
(cond
|
||||||
|
(and (not (contains? *active-tx* raw)) (= :ignore *nested-tx*))
|
||||||
|
;; #245 do not lock when in c.j.j compatibility mode:
|
||||||
|
(binding [*active-tx* (conj *active-tx* raw)]
|
||||||
|
(transact* this body-fn opts))
|
||||||
|
(or (not (contains? *active-tx* raw)) (= :allow *nested-tx*))
|
||||||
|
(locking this
|
||||||
|
(binding [*active-tx* (conj *active-tx* raw)]
|
||||||
|
(transact* this body-fn opts)))
|
||||||
|
(= :ignore *nested-tx*)
|
||||||
|
(body-fn this)
|
||||||
|
(= :prohibit *nested-tx*)
|
||||||
|
(throw (IllegalStateException. "Nested transactions are prohibited"))
|
||||||
|
:else
|
||||||
|
(throw (IllegalArgumentException.
|
||||||
|
(str "*nested-tx* ("
|
||||||
|
*nested-tx*
|
||||||
|
") was not :allow, :ignore, or :prohibit"))))))
|
||||||
javax.sql.DataSource
|
javax.sql.DataSource
|
||||||
(-transact [this body-fn opts]
|
(-transact [this body-fn opts]
|
||||||
(with-open [con (p/get-connection this opts)]
|
(with-open [con (p/get-connection this opts)]
|
||||||
(transact* con body-fn opts)))
|
;; this connection is assumed unique so we do not need the active-tx check:
|
||||||
|
(let [raw (raw-connection con)]
|
||||||
|
;; we don't lock either, per #293:
|
||||||
|
(binding [*active-tx* (conj *active-tx* raw)]
|
||||||
|
(transact* con body-fn opts)))))
|
||||||
Object
|
Object
|
||||||
(-transact [this body-fn opts]
|
(-transact [this body-fn opts]
|
||||||
(p/-transact (p/get-datasource this) body-fn opts)))
|
(p/-transact (p/get-datasource this) body-fn opts)))
|
||||||
|
|
|
||||||
39
src/next/jdbc/types.clj
Normal file
39
src/next/jdbc/types.clj
Normal file
|
|
@ -0,0 +1,39 @@
|
||||||
|
;; copyright (c) 2018-2024 Sean Corfield, all rights reserved
|
||||||
|
|
||||||
|
(ns next.jdbc.types
|
||||||
|
"Provides convenience functions for wrapping values you pass into SQL
|
||||||
|
operations that have per-instance implementations of `SettableParameter`
|
||||||
|
so that `.setObject()` is called with the appropriate `java.sql.Types` value."
|
||||||
|
(:require [clojure.string :as str]
|
||||||
|
[next.jdbc.prepare])
|
||||||
|
(:import (java.lang.reflect Field Modifier)
|
||||||
|
(java.sql PreparedStatement)))
|
||||||
|
|
||||||
|
(set! *warn-on-reflection* true)
|
||||||
|
|
||||||
|
(defmacro ^:private all-types
|
||||||
|
[]
|
||||||
|
(let [names
|
||||||
|
(into []
|
||||||
|
(comp (filter #(Modifier/isStatic (.getModifiers ^Field %)))
|
||||||
|
(map #(.getName ^Field %)))
|
||||||
|
(.getDeclaredFields java.sql.Types))]
|
||||||
|
`(do
|
||||||
|
~@(for [n names]
|
||||||
|
(let [as-n (symbol (str "as-"
|
||||||
|
(-> n
|
||||||
|
(str/lower-case)
|
||||||
|
(str/replace "_" "-"))))]
|
||||||
|
`(defn ~as-n
|
||||||
|
~(str "Wrap a Clojure value in a thunk with metadata to implement `set-parameter`
|
||||||
|
so that `.setObject()` is called with the `java.sql.Types/" n "` SQL type.")
|
||||||
|
[~'obj]
|
||||||
|
(with-meta (constantly ~'obj)
|
||||||
|
{'next.jdbc.prepare/set-parameter
|
||||||
|
(fn [vf# ^PreparedStatement s# ^long i#]
|
||||||
|
(.setObject s# i# (vf#) ~(symbol "java.sql.Types" n)))})))))))
|
||||||
|
|
||||||
|
(all-types)
|
||||||
|
|
||||||
|
(comment
|
||||||
|
(macroexpand '(all-types)))
|
||||||
28
test/log4j2-info.properties
Normal file
28
test/log4j2-info.properties
Normal file
|
|
@ -0,0 +1,28 @@
|
||||||
|
# Sean's normal mode, shows INFO and above, with highlighting:
|
||||||
|
rootLogger.level = info
|
||||||
|
rootLogger.appenderRef.stdout.ref = STDOUT
|
||||||
|
|
||||||
|
appender.console.type = Console
|
||||||
|
appender.console.name = STDOUT
|
||||||
|
appender.console.layout.type = PatternLayout
|
||||||
|
appender.console.layout.pattern = [%c] %highlight{%m}%n
|
||||||
|
|
||||||
|
# I do not care about any of c3p0's INFO messages...
|
||||||
|
logger.c3p0.name = com.mchange.v2
|
||||||
|
logger.c3p0.level = warn
|
||||||
|
logger.c3p0.appenderRef.stdout.ref = STDOUT
|
||||||
|
|
||||||
|
# ...nor HikariCP...
|
||||||
|
logger.hikari.name = com.zaxxer.hikari
|
||||||
|
logger.hikari.level = warn
|
||||||
|
logger.hikari.appenderRef.stdout.ref = STDOUT
|
||||||
|
|
||||||
|
# ...nor embedded HSQLDB...
|
||||||
|
logger.hsqldb.name = hsqldb.db
|
||||||
|
logger.hsqldb.level = warn
|
||||||
|
logger.hsqldb.appenderRef.stdout.ref = STDOUT
|
||||||
|
|
||||||
|
# ...nor embedded PostgreSQL...
|
||||||
|
logger.postgres.name = io.zonky.test.db.postgres.embedded
|
||||||
|
logger.postgres.level = warn
|
||||||
|
logger.postgres.appenderRef.stdout.ref = STDOUT
|
||||||
56
test/next/jdbc/connection_string_test.clj
Normal file
56
test/next/jdbc/connection_string_test.clj
Normal file
|
|
@ -0,0 +1,56 @@
|
||||||
|
;; copyright (c) 2019-2025 Sean Corfield, all rights reserved
|
||||||
|
|
||||||
|
(ns next.jdbc.connection-string-test
|
||||||
|
"Tests for the main hash map spec to JDBC URL logic and the get-datasource
|
||||||
|
and get-connection protocol implementations.
|
||||||
|
|
||||||
|
At some point, the datasource/connection tests should probably be extended
|
||||||
|
to accept EDN specs from an external source (environment variables?)."
|
||||||
|
(:require [clojure.string :as str]
|
||||||
|
[lazytest.core :refer [around set-ns-context!]]
|
||||||
|
[lazytest.experimental.interfaces.clojure-test :refer [deftest is testing]]
|
||||||
|
[next.jdbc.connection :as c]
|
||||||
|
[next.jdbc.protocols :as p]
|
||||||
|
[next.jdbc.specs :as specs]
|
||||||
|
[next.jdbc.test-fixtures :refer [db with-test-db]])
|
||||||
|
(:import [java.util Properties]))
|
||||||
|
|
||||||
|
(set! *warn-on-reflection* true)
|
||||||
|
|
||||||
|
(set-ns-context! [(around [f] (with-test-db f))])
|
||||||
|
|
||||||
|
(specs/instrument)
|
||||||
|
|
||||||
|
(deftest test-uri-strings
|
||||||
|
(testing "datasource via String"
|
||||||
|
(let [db-spec (db)
|
||||||
|
db-spec (if (= "embedded-postgres" (:dbtype db-spec))
|
||||||
|
(assoc db-spec :dbtype "postgresql")
|
||||||
|
db-spec)
|
||||||
|
[url etc] (#'c/spec->url+etc db-spec)
|
||||||
|
{:keys [user password]} etc
|
||||||
|
etc (dissoc etc :user :password)
|
||||||
|
uri (-> url
|
||||||
|
;; strip jdbc: prefix for fun
|
||||||
|
(str/replace #"^jdbc:" "")
|
||||||
|
(str/replace #";" "?") ; for SQL Server tests
|
||||||
|
(str/replace #":sqlserver" "") ; for SQL Server tests
|
||||||
|
(cond-> (and user password)
|
||||||
|
(str/replace #"://" (str "://" user ":" password "@"))))
|
||||||
|
ds (p/get-datasource (assoc etc :jdbcUrl uri))]
|
||||||
|
(when (and user password)
|
||||||
|
(with-open [con (p/get-connection ds {})]
|
||||||
|
(is (instance? java.sql.Connection con)))))))
|
||||||
|
|
||||||
|
(deftest property-tests
|
||||||
|
(is (string? (.getProperty ^Properties (#'c/as-properties {:foo [42]}) "foo")))
|
||||||
|
(is (string? (.get ^Properties (#'c/as-properties {:foo [42]}) "foo")))
|
||||||
|
(is (vector? (.get ^Properties (#'c/as-properties
|
||||||
|
{:foo [42]
|
||||||
|
:next.jdbc/as-is-properties [:foo]})
|
||||||
|
"foo")))
|
||||||
|
;; because .getProperty drops non-string values!
|
||||||
|
(is (nil? (.getProperty ^Properties (#'c/as-properties
|
||||||
|
{:foo [42]
|
||||||
|
:next.jdbc/as-is-properties [:foo]})
|
||||||
|
"foo"))))
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
;; copyright (c) 2019-2020 Sean Corfield, all rights reserved
|
;; copyright (c) 2019-2025 Sean Corfield, all rights reserved
|
||||||
|
|
||||||
(ns next.jdbc.connection-test
|
(ns next.jdbc.connection-test
|
||||||
"Tests for the main hash map spec to JDBC URL logic and the get-datasource
|
"Tests for the main hash map spec to JDBC URL logic and the get-datasource
|
||||||
|
|
@ -7,7 +7,7 @@
|
||||||
At some point, the datasource/connection tests should probably be extended
|
At some point, the datasource/connection tests should probably be extended
|
||||||
to accept EDN specs from an external source (environment variables?)."
|
to accept EDN specs from an external source (environment variables?)."
|
||||||
(:require [clojure.string :as str]
|
(:require [clojure.string :as str]
|
||||||
[clojure.test :refer [deftest is testing]]
|
[lazytest.experimental.interfaces.clojure-test :refer [deftest is testing]]
|
||||||
[next.jdbc.connection :as c]
|
[next.jdbc.connection :as c]
|
||||||
[next.jdbc.protocols :as p])
|
[next.jdbc.protocols :as p])
|
||||||
(:import (com.zaxxer.hikari HikariDataSource)
|
(:import (com.zaxxer.hikari HikariDataSource)
|
||||||
|
|
@ -54,28 +54,76 @@
|
||||||
(#'c/spec->url+etc {:dbtype "sqlserver" :dbname db-name :port 1433})))))
|
(#'c/spec->url+etc {:dbtype "sqlserver" :dbname db-name :port 1433})))))
|
||||||
|
|
||||||
(deftest custom-dbtypes
|
(deftest custom-dbtypes
|
||||||
(is (= ["jdbc:acme:my-db" {}]
|
(is (= ["jdbc:acme:my-db" {} nil]
|
||||||
(#'c/spec->url+etc {:dbtype "acme" :classname "java.lang.String"
|
(#'c/spec->url+etc {:dbtype "acme" :classname "java.lang.String"
|
||||||
:dbname "my-db" :host :none})))
|
:dbname "my-db" :host :none})))
|
||||||
(is (= ["jdbc:acme://127.0.0.1/my-db" {}]
|
(is (= ["jdbc:acme://127.0.0.1/my-db" {} nil]
|
||||||
(#'c/spec->url+etc {:dbtype "acme" :classname "java.lang.String"
|
(#'c/spec->url+etc {:dbtype "acme" :classname "java.lang.String"
|
||||||
:dbname "my-db"})))
|
:dbname "my-db"})))
|
||||||
(is (= ["jdbc:acme://12.34.56.70:1234/my-db" {}]
|
(is (= ["jdbc:acme://12.34.56.70:1234/my-db" {} nil]
|
||||||
(#'c/spec->url+etc {:dbtype "acme" :classname "java.lang.String"
|
(#'c/spec->url+etc {:dbtype "acme" :classname "java.lang.String"
|
||||||
:dbname "my-db" :host "12.34.56.70" :port 1234})))
|
:dbname "my-db" :host "12.34.56.70" :port 1234})))
|
||||||
(is (= ["jdbc:acme:dsn=my-db" {}]
|
(is (= ["jdbc:acme:dsn=my-db" {} nil]
|
||||||
(#'c/spec->url+etc {:dbtype "acme" :classname "java.lang.String"
|
(#'c/spec->url+etc {:dbtype "acme" :classname "java.lang.String"
|
||||||
:dbname "my-db" :host :none
|
:dbname "my-db" :host :none
|
||||||
:dbname-separator ":dsn="})))
|
:dbname-separator ":dsn="})))
|
||||||
(is (= ["jdbc:acme:(*)127.0.0.1/my-db" {}]
|
(is (= ["jdbc:acme:(*)127.0.0.1/my-db" {} nil]
|
||||||
(#'c/spec->url+etc {:dbtype "acme" :classname "java.lang.String"
|
(#'c/spec->url+etc {:dbtype "acme" :classname "java.lang.String"
|
||||||
:dbname "my-db"
|
:dbname "my-db"
|
||||||
:host-prefix "(*)"})))
|
:host-prefix "(*)"})))
|
||||||
(is (= ["jdbc:acme:(*)12.34.56.70:1234/my-db" {}]
|
(is (= ["jdbc:acme:(*)12.34.56.70:1234/my-db" {} nil]
|
||||||
(#'c/spec->url+etc {:dbtype "acme" :classname "java.lang.String"
|
(#'c/spec->url+etc {:dbtype "acme" :classname "java.lang.String"
|
||||||
:dbname "my-db" :host "12.34.56.70" :port 1234
|
:dbname "my-db" :host "12.34.56.70" :port 1234
|
||||||
|
:host-prefix "(*)"})))
|
||||||
|
(is (= ["jdbc:acme:(*)12.34.56.70/my-db" {} nil]
|
||||||
|
(#'c/spec->url+etc {:dbtype "acme" :classname "java.lang.String"
|
||||||
|
:dbname "my-db" :host "12.34.56.70" :port :none
|
||||||
:host-prefix "(*)"}))))
|
:host-prefix "(*)"}))))
|
||||||
|
|
||||||
|
(deftest jdbc-url-tests
|
||||||
|
(testing "basic URLs work"
|
||||||
|
(is (= "jdbc:acme:my-db"
|
||||||
|
(c/jdbc-url {:dbtype "acme" :classname "java.lang.String"
|
||||||
|
:dbname "my-db" :host :none})))
|
||||||
|
(is (= "jdbc:acme://127.0.0.1/my-db"
|
||||||
|
(c/jdbc-url {:dbtype "acme" :classname "java.lang.String"
|
||||||
|
:dbname "my-db"})))
|
||||||
|
(is (= "jdbc:acme://12.34.56.70:1234/my-db"
|
||||||
|
(c/jdbc-url {:dbtype "acme" :classname "java.lang.String"
|
||||||
|
:dbname "my-db" :host "12.34.56.70" :port 1234})))
|
||||||
|
(is (= "jdbc:acme:dsn=my-db"
|
||||||
|
(c/jdbc-url {:dbtype "acme" :classname "java.lang.String"
|
||||||
|
:dbname "my-db" :host :none
|
||||||
|
:dbname-separator ":dsn="})))
|
||||||
|
(is (= "jdbc:acme:(*)127.0.0.1/my-db"
|
||||||
|
(c/jdbc-url {:dbtype "acme" :classname "java.lang.String"
|
||||||
|
:dbname "my-db"
|
||||||
|
:host-prefix "(*)"})))
|
||||||
|
(is (= "jdbc:acme:(*)12.34.56.70:1234/my-db"
|
||||||
|
(c/jdbc-url {:dbtype "acme" :classname "java.lang.String"
|
||||||
|
:dbname "my-db" :host "12.34.56.70" :port 1234
|
||||||
|
:host-prefix "(*)"}))))
|
||||||
|
(testing "URLs with properties work"
|
||||||
|
(is (= "jdbc:acme:my-db?useSSL=true"
|
||||||
|
(c/jdbc-url {:dbtype "acme" :classname "java.lang.String"
|
||||||
|
:dbname "my-db" :host :none
|
||||||
|
:useSSL true})))
|
||||||
|
(is (boolean (#{"jdbc:acme:my-db?useSSL=true&user=dba"
|
||||||
|
"jdbc:acme:my-db?user=dba&useSSL=true"}
|
||||||
|
(c/jdbc-url {:dbtype "acme" :classname "java.lang.String"
|
||||||
|
:dbname "my-db" :host :none
|
||||||
|
:useSSL true :user "dba"}))))
|
||||||
|
|
||||||
|
(is (= "jdbc:jtds:sqlserver:my-db;useSSL=true"
|
||||||
|
(c/jdbc-url {:dbtype "jtds"
|
||||||
|
:dbname "my-db" :host :none
|
||||||
|
:useSSL true})))
|
||||||
|
(is (boolean (#{"jdbc:jtds:sqlserver:my-db;useSSL=true;user=dba"
|
||||||
|
"jdbc:jtds:sqlserver:my-db;user=dba;useSSL=true"}
|
||||||
|
(c/jdbc-url {:dbtype "jtds"
|
||||||
|
:dbname "my-db" :host :none
|
||||||
|
:useSSL true :user "dba"}))))))
|
||||||
|
|
||||||
;; these are the 'local' databases that we can always test against
|
;; these are the 'local' databases that we can always test against
|
||||||
(def test-db-type ["derby" "h2" "h2:mem" "hsqldb" "sqlite"])
|
(def test-db-type ["derby" "h2" "h2:mem" "hsqldb" "sqlite"])
|
||||||
|
|
||||||
|
|
@ -85,6 +133,12 @@
|
||||||
(= "derby" db)
|
(= "derby" db)
|
||||||
(assoc :create true))))
|
(assoc :create true))))
|
||||||
|
|
||||||
|
(deftest test-sourceable-via-metadata
|
||||||
|
(doseq [db test-dbs]
|
||||||
|
(let [ds (p/get-datasource
|
||||||
|
^{`p/get-datasource (fn [v] (p/get-datasource (first v)))} [db])]
|
||||||
|
(is (instance? javax.sql.DataSource ds)))))
|
||||||
|
|
||||||
(deftest test-get-connection
|
(deftest test-get-connection
|
||||||
(doseq [db test-dbs]
|
(doseq [db test-dbs]
|
||||||
(println 'test-get-connection (:dbtype db))
|
(println 'test-get-connection (:dbtype db))
|
||||||
|
|
@ -138,3 +192,13 @@
|
||||||
(testing "connection via map (Object)"
|
(testing "connection via map (Object)"
|
||||||
(with-open [con (p/get-connection db {})]
|
(with-open [con (p/get-connection db {})]
|
||||||
(is (instance? java.sql.Connection con))))))
|
(is (instance? java.sql.Connection con))))))
|
||||||
|
|
||||||
|
(deftest issue-243-uri->db-spec
|
||||||
|
(is (= {:dbtype "mysql" :dbname "mydb"
|
||||||
|
:host "myserver" :port 1234
|
||||||
|
:user "foo" :password "bar"}
|
||||||
|
(c/uri->db-spec "mysql://foo:bar@myserver:1234/mydb")))
|
||||||
|
(is (= {:dbtype "mysql" :dbname "mydb"
|
||||||
|
:host "myserver" :port 1234
|
||||||
|
:user "foo" :password "bar"}
|
||||||
|
(c/uri->db-spec "jdbc:mysql://myserver:1234/mydb?user=foo&password=bar"))))
|
||||||
|
|
|
||||||
148
test/next/jdbc/datafy_test.clj
Normal file
148
test/next/jdbc/datafy_test.clj
Normal file
|
|
@ -0,0 +1,148 @@
|
||||||
|
;; copyright (c) 2020-2025 Sean Corfield, all rights reserved
|
||||||
|
|
||||||
|
(ns next.jdbc.datafy-test
|
||||||
|
"Tests for the datafy extensions over JDBC types."
|
||||||
|
(:require [clojure.datafy :as d]
|
||||||
|
[clojure.set :as set]
|
||||||
|
[lazytest.core :refer [around set-ns-context!]]
|
||||||
|
[lazytest.experimental.interfaces.clojure-test :refer [deftest is testing]]
|
||||||
|
[next.jdbc :as jdbc]
|
||||||
|
[next.jdbc.datafy]
|
||||||
|
[next.jdbc.result-set :as rs]
|
||||||
|
[next.jdbc.specs :as specs]
|
||||||
|
[next.jdbc.test-fixtures
|
||||||
|
:refer [db derby? ds jtds? mysql? postgres? sqlite? with-test-db
|
||||||
|
xtdb?]]))
|
||||||
|
|
||||||
|
(set! *warn-on-reflection* true)
|
||||||
|
|
||||||
|
(set-ns-context! [(around [f] (with-test-db f))])
|
||||||
|
|
||||||
|
(specs/instrument)
|
||||||
|
|
||||||
|
(def ^:private basic-connection-keys
|
||||||
|
"Generic JDBC Connection fields."
|
||||||
|
#{:autoCommit :catalog :clientInfo :holdability :metaData
|
||||||
|
:networkTimeout :schema :transactionIsolation :typeMap :warnings
|
||||||
|
;; boolean properties
|
||||||
|
:closed :readOnly
|
||||||
|
;; configured to be added as if by clojure.core/bean
|
||||||
|
:class})
|
||||||
|
|
||||||
|
(deftest connection-datafy-tests
|
||||||
|
(testing "connection datafication"
|
||||||
|
(with-open [con (jdbc/get-connection (ds))]
|
||||||
|
(let [reference-keys (cond-> basic-connection-keys
|
||||||
|
(derby?) (-> (disj :networkTimeout)
|
||||||
|
(conj :networkTimeout/exception))
|
||||||
|
(jtds?) (-> (disj :clientInfo :networkTimeout :schema)
|
||||||
|
(conj :clientInfo/exception
|
||||||
|
:networkTimeout/exception
|
||||||
|
:schema/exception)))
|
||||||
|
data (set (keys (d/datafy con)))]
|
||||||
|
(when-let [diff (seq (set/difference data reference-keys))]
|
||||||
|
(println (format "%6s :%-10s %s"
|
||||||
|
(:dbtype (db)) "connection" (str (sort diff)))))
|
||||||
|
(is (= reference-keys
|
||||||
|
(set/intersection reference-keys data)))))))
|
||||||
|
|
||||||
|
(def ^:private basic-database-metadata-keys
|
||||||
|
"Generic JDBC Connection fields."
|
||||||
|
#{:JDBCMajorVersion :JDBCMinorVersion :SQLKeywords :SQLStateType :URL
|
||||||
|
:catalogSeparator :catalogTerm :catalogs
|
||||||
|
:clientInfoProperties :connection
|
||||||
|
:databaseMajorVersion :databaseMinorVersion
|
||||||
|
:databaseProductName :databaseProductVersion
|
||||||
|
:defaultTransactionIsolation
|
||||||
|
:driverMajorVersion :driverMinorVersion :driverName :driverVersion
|
||||||
|
:extraNameCharacters :identifierQuoteString
|
||||||
|
:maxBinaryLiteralLength :maxCatalogNameLength :maxCharLiteralLength
|
||||||
|
:maxColumnNameLength :maxColumnsInGroupBy :maxColumnsInIndex
|
||||||
|
:maxColumnsInOrderBy :maxColumnsInSelect :maxColumnsInTable
|
||||||
|
:maxConnections
|
||||||
|
:maxCursorNameLength :maxIndexLength
|
||||||
|
:maxProcedureNameLength :maxRowSize :maxSchemaNameLength
|
||||||
|
:maxStatementLength :maxStatements :maxTableNameLength
|
||||||
|
:maxTablesInSelect :maxUserNameLength :numericFunctions
|
||||||
|
:procedureTerm :resultSetHoldability :rowIdLifetime
|
||||||
|
:schemaTerm :schemas :searchStringEscape :stringFunctions
|
||||||
|
:systemFunctions :tableTypes :timeDateFunctions
|
||||||
|
:typeInfo :userName
|
||||||
|
;; boolean properties
|
||||||
|
:catalogAtStart :readOnly
|
||||||
|
;; configured to be added as if by clojure.core/bean
|
||||||
|
:class
|
||||||
|
;; added by next.jdbc.datafy if the datafication succeeds
|
||||||
|
:all-tables})
|
||||||
|
|
||||||
|
(deftest database-metadata-datafy-tests
|
||||||
|
(testing "database metadata datafication"
|
||||||
|
(with-open [con (jdbc/get-connection (ds))]
|
||||||
|
(let [reference-keys (cond-> basic-database-metadata-keys
|
||||||
|
(jtds?) (-> (disj :clientInfoProperties :rowIdLifetime)
|
||||||
|
(conj :clientInfoProperties/exception
|
||||||
|
:rowIdLifetime/exception))
|
||||||
|
(postgres?) (-> (disj :rowIdLifetime)
|
||||||
|
(conj :rowIdLifetime/exception))
|
||||||
|
(xtdb?) (-> (disj :clientInfoProperties
|
||||||
|
:defaultTransactionIsolation
|
||||||
|
:maxCatalogNameLength
|
||||||
|
:maxColumnNameLength
|
||||||
|
:maxCursorNameLength
|
||||||
|
:maxProcedureNameLength
|
||||||
|
:maxSchemaNameLength
|
||||||
|
:maxTableNameLength
|
||||||
|
:maxUserNameLength
|
||||||
|
:rowIdLifetime)
|
||||||
|
(conj :clientInfoProperties/exception
|
||||||
|
:defaultTransactionIsolation/exception
|
||||||
|
:maxCatalogNameLength/exception
|
||||||
|
:maxColumnNameLength/exception
|
||||||
|
:maxCursorNameLength/exception
|
||||||
|
:maxProcedureNameLength/exception
|
||||||
|
:maxSchemaNameLength/exception
|
||||||
|
:maxTableNameLength/exception
|
||||||
|
:maxUserNameLength/exception
|
||||||
|
:rowIdLifetime/exception))
|
||||||
|
(sqlite?) (-> (disj :clientInfoProperties :rowIdLifetime)
|
||||||
|
(conj :clientInfoProperties/exception
|
||||||
|
:rowIdLifetime/exception)))
|
||||||
|
data (set (keys (d/datafy (.getMetaData con))))]
|
||||||
|
(when-let [diff (seq (set/difference data reference-keys))]
|
||||||
|
(println (format "%6s :%-10s %s"
|
||||||
|
(:dbtype (db)) "db-meta" (str (sort diff)))))
|
||||||
|
(is (= reference-keys
|
||||||
|
(set/intersection reference-keys data))))))
|
||||||
|
(testing "nav to catalogs yields object"
|
||||||
|
(with-open [con (jdbc/get-connection (ds))]
|
||||||
|
(let [data (d/datafy (.getMetaData con))]
|
||||||
|
(doseq [k (cond-> #{:catalogs :clientInfoProperties :schemas :tableTypes :typeInfo}
|
||||||
|
(jtds?) (disj :clientInfoProperties)
|
||||||
|
(sqlite?) (disj :clientInfoProperties)
|
||||||
|
(xtdb?) (disj :clientInfoProperties))]
|
||||||
|
(let [rs (d/nav data k nil)]
|
||||||
|
(is (vector? rs))
|
||||||
|
(is (every? map? rs))))))))
|
||||||
|
|
||||||
|
(deftest result-set-metadata-datafy-tests
|
||||||
|
(testing "result set metadata datafication"
|
||||||
|
(let [data (reduce (fn [_ row] (reduced (rs/metadata row)))
|
||||||
|
nil
|
||||||
|
(jdbc/plan (ds) [(str "SELECT * FROM "
|
||||||
|
(if (mysql?) "fruit" "FRUIT"))]))]
|
||||||
|
(is (vector? data))
|
||||||
|
(is (= 5 (count data)))
|
||||||
|
(is (every? map? data))
|
||||||
|
(is (every? :label data)))))
|
||||||
|
|
||||||
|
(comment
|
||||||
|
(def con (jdbc/get-connection (ds)))
|
||||||
|
(rs/datafiable-result-set (.getTables (.getMetaData con) nil nil nil nil) con {})
|
||||||
|
(def ps (jdbc/prepare con ["SELECT * FROM fruit WHERE grade > ?"]))
|
||||||
|
(require '[next.jdbc.prepare :as prep])
|
||||||
|
(prep/set-parameters ps [30])
|
||||||
|
(.execute ps)
|
||||||
|
(.getResultSet ps)
|
||||||
|
(.close ps)
|
||||||
|
(.close con)
|
||||||
|
)
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
;; copyright (c) 2019-2020 Sean Corfield, all rights reserved
|
;; copyright (c) 2019-2025 Sean Corfield, all rights reserved
|
||||||
|
|
||||||
(ns next.jdbc.date-time-test
|
(ns next.jdbc.date-time-test
|
||||||
"Date/time parameter auto-conversion tests.
|
"Date/time parameter auto-conversion tests.
|
||||||
|
|
@ -6,44 +6,45 @@
|
||||||
These tests contain no assertions. Without requiring `next.jdbc.date-time`
|
These tests contain no assertions. Without requiring `next.jdbc.date-time`
|
||||||
several of the `insert` operations would throw exceptions for some databases
|
several of the `insert` operations would throw exceptions for some databases
|
||||||
so the test here just checks those operations 'succeed'."
|
so the test here just checks those operations 'succeed'."
|
||||||
(:require [clojure.test :refer [deftest is testing use-fixtures]]
|
(:require [lazytest.core :refer [around set-ns-context!]]
|
||||||
|
[lazytest.experimental.interfaces.clojure-test :refer [deftest]]
|
||||||
[next.jdbc :as jdbc]
|
[next.jdbc :as jdbc]
|
||||||
[next.jdbc.date-time] ; to extend SettableParameter to date/time
|
[next.jdbc.date-time] ; to extend SettableParameter to date/time
|
||||||
[next.jdbc.test-fixtures :refer [with-test-db db ds
|
[next.jdbc.test-fixtures :refer [with-test-db ds
|
||||||
mssql?]]
|
mssql? xtdb?]]
|
||||||
[next.jdbc.specs :as specs])
|
[next.jdbc.specs :as specs]))
|
||||||
(:import (java.sql ResultSet)))
|
|
||||||
|
|
||||||
(set! *warn-on-reflection* true)
|
(set! *warn-on-reflection* true)
|
||||||
|
|
||||||
(use-fixtures :once with-test-db)
|
(set-ns-context! [(around [f] (with-test-db f))])
|
||||||
|
|
||||||
(specs/instrument)
|
(specs/instrument)
|
||||||
|
|
||||||
(deftest issue-73
|
(deftest issue-73
|
||||||
(try
|
(when-not (xtdb?)
|
||||||
(jdbc/execute-one! (ds) ["drop table fruit_time"])
|
(try
|
||||||
(catch Throwable _))
|
(jdbc/execute-one! (ds) ["drop table fruit_time"])
|
||||||
(jdbc/execute-one! (ds) [(str "create table fruit_time (id int not null, deadline "
|
(catch Throwable _))
|
||||||
(if (mssql?) "datetime" "timestamp")
|
(jdbc/execute-one! (ds) [(str "create table fruit_time (id int not null, deadline "
|
||||||
" not null)")])
|
(if (mssql?) "datetime" "timestamp")
|
||||||
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 1 (java.util.Date.)])
|
" not null)")])
|
||||||
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 2 (java.time.Instant/now)])
|
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 1 (java.util.Date.)])
|
||||||
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 3 (java.time.LocalDate/now)])
|
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 2 (java.time.Instant/now)])
|
||||||
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 4 (java.time.LocalDateTime/now)])
|
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 3 (java.time.LocalDate/now)])
|
||||||
(try
|
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 4 (java.time.LocalDateTime/now)])
|
||||||
(jdbc/execute-one! (ds) ["drop table fruit_time"])
|
(try
|
||||||
(catch Throwable _))
|
(jdbc/execute-one! (ds) ["drop table fruit_time"])
|
||||||
(jdbc/execute-one! (ds) ["create table fruit_time (id int not null, deadline time not null)"])
|
(catch Throwable _))
|
||||||
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 1 (java.util.Date.)])
|
(jdbc/execute-one! (ds) ["create table fruit_time (id int not null, deadline time not null)"])
|
||||||
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 2 (java.time.Instant/now)])
|
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 1 (java.util.Date.)])
|
||||||
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 3 (java.time.LocalDate/now)])
|
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 2 (java.time.Instant/now)])
|
||||||
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 4 (java.time.LocalDateTime/now)])
|
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 3 (java.time.LocalDate/now)])
|
||||||
(try
|
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 4 (java.time.LocalDateTime/now)])
|
||||||
(jdbc/execute-one! (ds) ["drop table fruit_time"])
|
(try
|
||||||
(catch Throwable _))
|
(jdbc/execute-one! (ds) ["drop table fruit_time"])
|
||||||
(jdbc/execute-one! (ds) ["create table fruit_time (id int not null, deadline date not null)"])
|
(catch Throwable _))
|
||||||
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 1 (java.util.Date.)])
|
(jdbc/execute-one! (ds) ["create table fruit_time (id int not null, deadline date not null)"])
|
||||||
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 2 (java.time.Instant/now)])
|
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 1 (java.util.Date.)])
|
||||||
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 3 (java.time.LocalDate/now)])
|
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 2 (java.time.Instant/now)])
|
||||||
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 4 (java.time.LocalDateTime/now)]))
|
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 3 (java.time.LocalDate/now)])
|
||||||
|
(jdbc/execute-one! (ds) ["insert into fruit_time (id, deadline) values (?,?)" 4 (java.time.LocalDateTime/now)])))
|
||||||
|
|
|
||||||
8
test/next/jdbc/default_options_test.clj
Normal file
8
test/next/jdbc/default_options_test.clj
Normal file
|
|
@ -0,0 +1,8 @@
|
||||||
|
;; copyright (c) 2020-2025 Sean Corfield, all rights reserved
|
||||||
|
|
||||||
|
(ns next.jdbc.default-options-test
|
||||||
|
"Stub test namespace for default options. Nothing can really be tested
|
||||||
|
at this level tho'..."
|
||||||
|
(:require [next.jdbc.default-options]))
|
||||||
|
|
||||||
|
(set! *warn-on-reflection* true)
|
||||||
55
test/next/jdbc/defer_test.clj
Normal file
55
test/next/jdbc/defer_test.clj
Normal file
|
|
@ -0,0 +1,55 @@
|
||||||
|
;; copyright (c) 2024-2025 Sean Corfield, all rights reserved
|
||||||
|
|
||||||
|
(ns next.jdbc.defer-test
|
||||||
|
"The idea behind the next.jdbc.defer namespace is to provide a
|
||||||
|
way to defer the execution of a series of SQL statements until
|
||||||
|
a later time, but still provide a way for inserted keys to be
|
||||||
|
used in later SQL statements.
|
||||||
|
|
||||||
|
The principle is to provide a core subset of the next.jdbc
|
||||||
|
and next.jdbc.sql API that produces a data structure that
|
||||||
|
describes a series of SQL operations to be performed, that
|
||||||
|
are held in a dynamic var, and that can be executed at a
|
||||||
|
later time, in a transaction."
|
||||||
|
(:require [lazytest.core :refer [around set-ns-context!]]
|
||||||
|
[lazytest.experimental.interfaces.clojure-test :refer [deftest is testing]]
|
||||||
|
[next.jdbc :as jdbc]
|
||||||
|
[next.jdbc.defer :as sut]
|
||||||
|
[next.jdbc.test-fixtures
|
||||||
|
:refer [ds with-test-db xtdb?]]))
|
||||||
|
|
||||||
|
(set! *warn-on-reflection* true)
|
||||||
|
|
||||||
|
(set-ns-context! [(around [f] (with-test-db f))])
|
||||||
|
|
||||||
|
(deftest basic-test
|
||||||
|
(when-not (xtdb?)
|
||||||
|
(testing "data structures"
|
||||||
|
(is (= [{:sql-p ["INSERT INTO foo (name) VALUES (?)" "Sean"]
|
||||||
|
:key-fn :GENERATED_KEY
|
||||||
|
:key :id
|
||||||
|
:opts {:key-fn :GENERATED_KEY :key :id}}]
|
||||||
|
@(sut/defer-ops
|
||||||
|
#(sut/insert! :foo {:name "Sean"} {:key-fn :GENERATED_KEY :key :id})))))
|
||||||
|
(testing "execution"
|
||||||
|
(let [effects (sut/with-deferred (ds)
|
||||||
|
(sut/insert! :fruit {:name "Mango"} {:key :test}))]
|
||||||
|
(is (= {:test 1} @effects))
|
||||||
|
(is (= 1 (count (jdbc/execute! (ds)
|
||||||
|
["select * from fruit where name = ?"
|
||||||
|
"Mango"])))))
|
||||||
|
(let [effects (sut/with-deferred (ds)
|
||||||
|
(sut/insert! :fruit {:name "Dragonfruit"} {:key :test})
|
||||||
|
(sut/update! :fruit {:cost 123} {:name "Dragonfruit"})
|
||||||
|
(sut/delete! :fruit {:name "Dragonfruit"}))]
|
||||||
|
(is (= {:test 1} @effects))
|
||||||
|
(is (= 0 (count (jdbc/execute! (ds)
|
||||||
|
["select * from fruit where name = ?"
|
||||||
|
"Dragonfruit"])))))
|
||||||
|
(let [effects (sut/with-deferred (ds)
|
||||||
|
(sut/insert! :fruit {:name "Grapefruit" :bad_column 0} {:key :test}))]
|
||||||
|
(is (= :failed (try @effects
|
||||||
|
(catch Exception _ :failed))))
|
||||||
|
(is (= 0 (count (jdbc/execute! (ds)
|
||||||
|
["select * from fruit where name = ?"
|
||||||
|
"Grapefruit"]))))))))
|
||||||
|
|
@ -1,115 +0,0 @@
|
||||||
;; copyright (c) 2019-2020 Sean Corfield, all rights reserved
|
|
||||||
|
|
||||||
(ns next.jdbc.middleware
|
|
||||||
"This is just an experimental sketch of what it might look like to be
|
|
||||||
able to provide middleware that can wrap SQL execution in a way that
|
|
||||||
behavior can be extended in interesting ways, to support logging, timing.
|
|
||||||
and other cross-cutting things.
|
|
||||||
|
|
||||||
Since it's just an experiment, there's no guarantee that this -- or
|
|
||||||
anything like it -- will actually end up in a next.jdbc release. You've
|
|
||||||
been warned!
|
|
||||||
|
|
||||||
So far these execution points can be hooked into:
|
|
||||||
* start -- pre-process the SQL & parameters and options
|
|
||||||
* (execute SQL)
|
|
||||||
* ????? -- process the options (and something else?)
|
|
||||||
* row -- post-process each row and options
|
|
||||||
* rs -- post-process the whole result set and options
|
|
||||||
|
|
||||||
For the rows and result set, it's 'obvious' that the functions should
|
|
||||||
take the values and return them (or updated versions). For the start
|
|
||||||
function with SQL & parameters, it also makes sense to take and return
|
|
||||||
that vector.
|
|
||||||
|
|
||||||
For timing middleware, you'd need to pass data through the call chain
|
|
||||||
somehow -- unless you control the whole middleware and this isn't sufficient
|
|
||||||
for that yet. Hence the decision to allow processing of the options and
|
|
||||||
passing data through those -- which leads to a rather odd call chain:
|
|
||||||
start can return the vector or a map of updated options (with a payload),
|
|
||||||
and the ????? point can process the options again (e.g., to update timing
|
|
||||||
data etc). And that's all kind of horrible."
|
|
||||||
(:require [next.jdbc.protocols :as p]
|
|
||||||
[next.jdbc.result-set :as rs]))
|
|
||||||
|
|
||||||
(defn post-processing-adapter
|
|
||||||
"Given a builder function (e.g., `as-lower-maps`), return a new builder
|
|
||||||
function that post-processes rows and the result set. The options may
|
|
||||||
contain post-processing functions that are called on each row and on the
|
|
||||||
the result set. The options map is provided as a second parameter to these
|
|
||||||
functions, which should include `:next.jdbc/sql-params` (the vector of SQL
|
|
||||||
and parameters, in case post-processing needs it):
|
|
||||||
|
|
||||||
* `:execute-fn` -- called immediately after the SQL operation completes
|
|
||||||
^ This is a horrible name and it needs to return the options which
|
|
||||||
is weird so I don't like this approach overall...
|
|
||||||
* `:row!-fn` -- called on each row as it is fully-realized
|
|
||||||
* `:rs!-fn` -- called on the whole result set once it is fully-realized
|
|
||||||
|
|
||||||
The results of these functions are returned as the rows/result set."
|
|
||||||
[builder-fn]
|
|
||||||
(fn [rs opts]
|
|
||||||
(let [id2 (fn [x _] x)
|
|
||||||
id2' (fn [_ x] x)
|
|
||||||
exec-fn (get opts :execute-fn id2')
|
|
||||||
opts (exec-fn rs opts)
|
|
||||||
mrsb (builder-fn rs opts)
|
|
||||||
row!-fn (get opts :row!-fn id2)
|
|
||||||
rs!-fn (get opts :rs!-fn id2)]
|
|
||||||
(reify
|
|
||||||
rs/RowBuilder
|
|
||||||
(->row [this] (rs/->row mrsb))
|
|
||||||
(column-count [this] (rs/column-count mrsb))
|
|
||||||
(with-column [this row i] (rs/with-column mrsb row i))
|
|
||||||
(row! [this row] (row!-fn (rs/row! mrsb row) opts))
|
|
||||||
rs/ResultSetBuilder
|
|
||||||
(->rs [this] (rs/->rs mrsb))
|
|
||||||
(with-row [this mrs row] (rs/with-row mrsb mrs row))
|
|
||||||
(rs! [this mrs] (rs!-fn (rs/rs! mrsb mrs) opts))))))
|
|
||||||
|
|
||||||
(defrecord JdbcMiddleware [db global-opts]
|
|
||||||
p/Executable
|
|
||||||
(-execute [this sql-params opts]
|
|
||||||
(let [opts (merge global-opts opts)
|
|
||||||
id2 (fn [x _] x)
|
|
||||||
builder-fn (get opts :builder-fn rs/as-maps)
|
|
||||||
sql-params-fn (get opts :sql-params-fn id2)
|
|
||||||
result (sql-params-fn sql-params opts)
|
|
||||||
sql-params' (if (map? result)
|
|
||||||
(or (:next.jdbc/sql-params result) sql-params)
|
|
||||||
result)]
|
|
||||||
(p/-execute db sql-params'
|
|
||||||
(assoc (if (map? result) result opts)
|
|
||||||
:builder-fn (post-processing-adapter builder-fn)
|
|
||||||
:next.jdbc/sql-params sql-params'))))
|
|
||||||
(-execute-one [this sql-params opts]
|
|
||||||
(let [opts (merge global-opts opts)
|
|
||||||
id2 (fn [x _] x)
|
|
||||||
builder-fn (get opts :builder-fn rs/as-maps)
|
|
||||||
sql-params-fn (get opts :sql-params-fn id2)
|
|
||||||
result (sql-params-fn sql-params opts)
|
|
||||||
sql-params' (if (map? result)
|
|
||||||
(or (:next.jdbc/sql-params result) sql-params)
|
|
||||||
result)]
|
|
||||||
(p/-execute-one db sql-params'
|
|
||||||
(assoc (if (map? result) result opts)
|
|
||||||
:builder-fn (post-processing-adapter builder-fn)
|
|
||||||
:next.jdbc/sql-params sql-params'))))
|
|
||||||
(-execute-all [this sql-params opts]
|
|
||||||
(let [opts (merge global-opts opts)
|
|
||||||
id2 (fn [x _] x)
|
|
||||||
builder-fn (get opts :builder-fn rs/as-maps)
|
|
||||||
sql-params-fn (get opts :sql-params-fn id2)
|
|
||||||
result (sql-params-fn sql-params opts)
|
|
||||||
sql-params' (if (map? result)
|
|
||||||
(or (:next.jdbc/sql-params result) sql-params)
|
|
||||||
result)]
|
|
||||||
(p/-execute-all db sql-params'
|
|
||||||
(assoc (if (map? result) result opts)
|
|
||||||
:builder-fn (post-processing-adapter builder-fn)
|
|
||||||
:next.jdbc/sql-params sql-params')))))
|
|
||||||
|
|
||||||
(defn wrapper
|
|
||||||
""
|
|
||||||
([db] (JdbcMiddleware. db {}))
|
|
||||||
([db opts] (JdbcMiddleware. db opts)))
|
|
||||||
|
|
@ -1,79 +0,0 @@
|
||||||
;; copyright (c) 2019-2020 Sean Corfield, all rights reserved
|
|
||||||
|
|
||||||
(ns next.jdbc.middleware-test
|
|
||||||
(:require [clojure.test :refer [deftest is testing use-fixtures]]
|
|
||||||
[next.jdbc :as jdbc]
|
|
||||||
[next.jdbc.connection :as c]
|
|
||||||
[next.jdbc.middleware :as mw]
|
|
||||||
[next.jdbc.test-fixtures :refer [with-test-db db ds
|
|
||||||
default-options
|
|
||||||
derby? postgres?]]
|
|
||||||
[next.jdbc.prepare :as prep]
|
|
||||||
[next.jdbc.result-set :as rs]
|
|
||||||
[next.jdbc.specs :as specs])
|
|
||||||
(:import (java.sql ResultSet ResultSetMetaData)))
|
|
||||||
|
|
||||||
(set! *warn-on-reflection* true)
|
|
||||||
|
|
||||||
(use-fixtures :once with-test-db)
|
|
||||||
|
|
||||||
(specs/instrument)
|
|
||||||
|
|
||||||
(deftest logging-test
|
|
||||||
(let [logging (atom [])
|
|
||||||
logger (fn [data _] (swap! logging conj data) data)
|
|
||||||
|
|
||||||
sql-p ["select * from fruit where id in (?,?) order by id desc" 1 4]]
|
|
||||||
(jdbc/execute! (mw/wrapper (ds))
|
|
||||||
sql-p
|
|
||||||
(assoc (default-options)
|
|
||||||
:builder-fn rs/as-lower-maps
|
|
||||||
:sql-params-fn logger
|
|
||||||
:row!-fn logger
|
|
||||||
:rs!-fn logger))
|
|
||||||
;; should log four things
|
|
||||||
(is (= 4 (-> @logging count)))
|
|
||||||
;; :next.jdbc/sql-params value
|
|
||||||
(is (= sql-p (-> @logging (nth 0))))
|
|
||||||
;; first row (with PK 4)
|
|
||||||
(is (= 4 (-> @logging (nth 1) :fruit/id)))
|
|
||||||
;; second row (with PK 1)
|
|
||||||
(is (= 1 (-> @logging (nth 2) :fruit/id)))
|
|
||||||
;; full result set with two rows
|
|
||||||
(is (= 2 (-> @logging (nth 3) count)))
|
|
||||||
(is (= [4 1] (-> @logging (nth 3) (->> (map :fruit/id)))))
|
|
||||||
;; now repeat without the row logging
|
|
||||||
(reset! logging [])
|
|
||||||
(jdbc/execute! (mw/wrapper (ds)
|
|
||||||
{:builder-fn rs/as-lower-maps
|
|
||||||
:sql-params-fn logger
|
|
||||||
:rs!-fn logger})
|
|
||||||
sql-p
|
|
||||||
(default-options))
|
|
||||||
;; should log two things
|
|
||||||
(is (= 2 (-> @logging count)))
|
|
||||||
;; :next.jdbc/sql-params value
|
|
||||||
(is (= sql-p (-> @logging (nth 0))))
|
|
||||||
;; full result set with two rows
|
|
||||||
(is (= 2 (-> @logging (nth 1) count)))
|
|
||||||
(is (= [4 1] (-> @logging (nth 1) (->> (map :fruit/id)))))))
|
|
||||||
|
|
||||||
(deftest timing-test
|
|
||||||
(let [timing (atom {:calls 0 :total 0.0})
|
|
||||||
start-fn (fn [sql-p opts]
|
|
||||||
(swap! (:timing opts) update :calls inc)
|
|
||||||
(assoc opts :start (System/nanoTime)))
|
|
||||||
exec-fn (fn [_ opts]
|
|
||||||
(let [end (System/nanoTime)]
|
|
||||||
(swap! (:timing opts) update :total + (- end (:start opts)))
|
|
||||||
opts))
|
|
||||||
sql-p ["select * from fruit where id in (?,?) order by id desc" 1 4]]
|
|
||||||
(jdbc/execute! (mw/wrapper (ds) {:timing timing
|
|
||||||
:sql-params-fn start-fn
|
|
||||||
:execute-fn exec-fn})
|
|
||||||
sql-p)
|
|
||||||
(jdbc/execute! (mw/wrapper (ds) {:timing timing
|
|
||||||
:sql-params-fn start-fn
|
|
||||||
:execute-fn exec-fn})
|
|
||||||
sql-p)
|
|
||||||
(println (db) (:calls @timing) "calls took" (long (:total @timing)) "nanoseconds")))
|
|
||||||
|
|
@ -1,23 +1,25 @@
|
||||||
;; copyright (c) 2019-2020 Sean Corfield, all rights reserved
|
;; copyright (c) 2019-2025 Sean Corfield, all rights reserved
|
||||||
|
|
||||||
(ns next.jdbc.optional-test
|
(ns next.jdbc.optional-test
|
||||||
"Test namespace for the optional builder functions."
|
"Test namespace for the optional builder functions."
|
||||||
(:require [clojure.string :as str]
|
(:require [clojure.string :as str]
|
||||||
[clojure.test :refer [deftest is testing use-fixtures]]
|
[lazytest.core :refer [around set-ns-context!]]
|
||||||
|
[lazytest.experimental.interfaces.clojure-test :refer [deftest is testing]]
|
||||||
[next.jdbc.optional :as opt]
|
[next.jdbc.optional :as opt]
|
||||||
[next.jdbc.protocols :as p]
|
[next.jdbc.protocols :as p]
|
||||||
[next.jdbc.test-fixtures :refer [with-test-db ds column
|
[next.jdbc.test-fixtures :refer [col-kw column default-options ds index
|
||||||
default-options]])
|
with-test-db]])
|
||||||
(:import (java.sql ResultSet ResultSetMetaData)))
|
(:import
|
||||||
|
(java.sql ResultSet ResultSetMetaData)))
|
||||||
|
|
||||||
(set! *warn-on-reflection* true)
|
(set! *warn-on-reflection* true)
|
||||||
|
|
||||||
(use-fixtures :once with-test-db)
|
(set-ns-context! [(around [f] (with-test-db f))])
|
||||||
|
|
||||||
(deftest test-map-row-builder
|
(deftest test-map-row-builder
|
||||||
(testing "default row builder"
|
(testing "default row builder"
|
||||||
(let [row (p/-execute-one (ds)
|
(let [row (p/-execute-one (ds)
|
||||||
["select * from fruit where id = ?" 1]
|
[(str "select * from fruit where " (index) " = ?") 1]
|
||||||
(assoc (default-options)
|
(assoc (default-options)
|
||||||
:builder-fn opt/as-maps))]
|
:builder-fn opt/as-maps))]
|
||||||
(is (map? row))
|
(is (map? row))
|
||||||
|
|
@ -26,7 +28,7 @@
|
||||||
(is (= "Apple" ((column :FRUIT/NAME) row)))))
|
(is (= "Apple" ((column :FRUIT/NAME) row)))))
|
||||||
(testing "unqualified row builder"
|
(testing "unqualified row builder"
|
||||||
(let [row (p/-execute-one (ds)
|
(let [row (p/-execute-one (ds)
|
||||||
["select * from fruit where id = ?" 2]
|
[(str "select * from fruit where " (index) " = ?") 2]
|
||||||
{:builder-fn opt/as-unqualified-maps})]
|
{:builder-fn opt/as-unqualified-maps})]
|
||||||
(is (map? row))
|
(is (map? row))
|
||||||
(is (not (contains? row (column :COST))))
|
(is (not (contains? row (column :COST))))
|
||||||
|
|
@ -34,23 +36,23 @@
|
||||||
(is (= "Banana" ((column :NAME) row)))))
|
(is (= "Banana" ((column :NAME) row)))))
|
||||||
(testing "lower-case row builder"
|
(testing "lower-case row builder"
|
||||||
(let [row (p/-execute-one (ds)
|
(let [row (p/-execute-one (ds)
|
||||||
["select * from fruit where id = ?" 3]
|
[(str "select * from fruit where " (index) " = ?") 3]
|
||||||
(assoc (default-options)
|
(assoc (default-options)
|
||||||
:builder-fn opt/as-lower-maps))]
|
:builder-fn opt/as-lower-maps))]
|
||||||
(is (map? row))
|
(is (map? row))
|
||||||
(is (not (contains? row :fruit/appearance)))
|
(is (not (contains? row (col-kw :fruit/appearance))))
|
||||||
(is (= 3 (:fruit/id row)))
|
(is (= 3 ((col-kw :fruit/id) row)))
|
||||||
(is (= "Peach" (:fruit/name row)))))
|
(is (= "Peach" ((col-kw :fruit/name) row)))))
|
||||||
(testing "unqualified lower-case row builder"
|
(testing "unqualified lower-case row builder"
|
||||||
(let [row (p/-execute-one (ds)
|
(let [row (p/-execute-one (ds)
|
||||||
["select * from fruit where id = ?" 4]
|
[(str "select * from fruit where " (index) " = ?") 4]
|
||||||
{:builder-fn opt/as-unqualified-lower-maps})]
|
{:builder-fn opt/as-unqualified-lower-maps})]
|
||||||
(is (map? row))
|
(is (map? row))
|
||||||
(is (= 4 (:id row)))
|
(is (= 4 ((col-kw :id) row)))
|
||||||
(is (= "Orange" (:name row)))))
|
(is (= "Orange" ((col-kw :name) row)))))
|
||||||
(testing "custom row builder"
|
(testing "custom row builder"
|
||||||
(let [row (p/-execute-one (ds)
|
(let [row (p/-execute-one (ds)
|
||||||
["select * from fruit where id = ?" 3]
|
[(str "select * from fruit where " (index) " = ?") 3]
|
||||||
(assoc (default-options)
|
(assoc (default-options)
|
||||||
:builder-fn opt/as-modified-maps
|
:builder-fn opt/as-modified-maps
|
||||||
:label-fn str/lower-case
|
:label-fn str/lower-case
|
||||||
|
|
@ -61,13 +63,13 @@
|
||||||
(is (= "Peach" ((column :FRUIT/name) row))))))
|
(is (= "Peach" ((column :FRUIT/name) row))))))
|
||||||
|
|
||||||
(defn- default-column-reader
|
(defn- default-column-reader
|
||||||
[^ResultSet rs ^ResultSetMetaData rsmeta ^Integer i]
|
[^ResultSet rs ^ResultSetMetaData _ ^Integer i]
|
||||||
(.getObject rs i))
|
(.getObject rs i))
|
||||||
|
|
||||||
(deftest test-map-row-adapter
|
(deftest test-map-row-adapter
|
||||||
(testing "default row builder"
|
(testing "default row builder"
|
||||||
(let [row (p/-execute-one (ds)
|
(let [row (p/-execute-one (ds)
|
||||||
["select * from fruit where id = ?" 1]
|
[(str "select * from fruit where " (index) " = ?") 1]
|
||||||
(assoc (default-options)
|
(assoc (default-options)
|
||||||
:builder-fn (opt/as-maps-adapter
|
:builder-fn (opt/as-maps-adapter
|
||||||
opt/as-maps
|
opt/as-maps
|
||||||
|
|
@ -78,7 +80,7 @@
|
||||||
(is (= "Apple" ((column :FRUIT/NAME) row)))))
|
(is (= "Apple" ((column :FRUIT/NAME) row)))))
|
||||||
(testing "unqualified row builder"
|
(testing "unqualified row builder"
|
||||||
(let [row (p/-execute-one (ds)
|
(let [row (p/-execute-one (ds)
|
||||||
["select * from fruit where id = ?" 2]
|
[(str "select * from fruit where " (index) " = ?") 2]
|
||||||
{:builder-fn (opt/as-maps-adapter
|
{:builder-fn (opt/as-maps-adapter
|
||||||
opt/as-unqualified-maps
|
opt/as-unqualified-maps
|
||||||
default-column-reader)})]
|
default-column-reader)})]
|
||||||
|
|
@ -88,27 +90,27 @@
|
||||||
(is (= "Banana" ((column :NAME) row)))))
|
(is (= "Banana" ((column :NAME) row)))))
|
||||||
(testing "lower-case row builder"
|
(testing "lower-case row builder"
|
||||||
(let [row (p/-execute-one (ds)
|
(let [row (p/-execute-one (ds)
|
||||||
["select * from fruit where id = ?" 3]
|
[(str "select * from fruit where " (index) " = ?") 3]
|
||||||
(assoc (default-options)
|
(assoc (default-options)
|
||||||
:builder-fn (opt/as-maps-adapter
|
:builder-fn (opt/as-maps-adapter
|
||||||
opt/as-lower-maps
|
opt/as-lower-maps
|
||||||
default-column-reader)))]
|
default-column-reader)))]
|
||||||
(is (map? row))
|
(is (map? row))
|
||||||
(is (not (contains? row :fruit/appearance)))
|
(is (not (contains? row (col-kw :fruit/appearance))))
|
||||||
(is (= 3 (:fruit/id row)))
|
(is (= 3 ((col-kw :fruit/id) row)))
|
||||||
(is (= "Peach" (:fruit/name row)))))
|
(is (= "Peach" ((col-kw :fruit/name) row)))))
|
||||||
(testing "unqualified lower-case row builder"
|
(testing "unqualified lower-case row builder"
|
||||||
(let [row (p/-execute-one (ds)
|
(let [row (p/-execute-one (ds)
|
||||||
["select * from fruit where id = ?" 4]
|
[(str "select * from fruit where " (index) " = ?") 4]
|
||||||
{:builder-fn (opt/as-maps-adapter
|
{:builder-fn (opt/as-maps-adapter
|
||||||
opt/as-unqualified-lower-maps
|
opt/as-unqualified-lower-maps
|
||||||
default-column-reader)})]
|
default-column-reader)})]
|
||||||
(is (map? row))
|
(is (map? row))
|
||||||
(is (= 4 (:id row)))
|
(is (= 4 ((col-kw :id) row)))
|
||||||
(is (= "Orange" (:name row)))))
|
(is (= "Orange" ((col-kw :name) row)))))
|
||||||
(testing "custom row builder"
|
(testing "custom row builder"
|
||||||
(let [row (p/-execute-one (ds)
|
(let [row (p/-execute-one (ds)
|
||||||
["select * from fruit where id = ?" 3]
|
[(str "select * from fruit where " (index) " = ?") 3]
|
||||||
(assoc (default-options)
|
(assoc (default-options)
|
||||||
:builder-fn (opt/as-maps-adapter
|
:builder-fn (opt/as-maps-adapter
|
||||||
opt/as-modified-maps
|
opt/as-modified-maps
|
||||||
|
|
|
||||||
75
test/next/jdbc/plan_test.clj
Normal file
75
test/next/jdbc/plan_test.clj
Normal file
|
|
@ -0,0 +1,75 @@
|
||||||
|
;; copyright (c) 2020-2025 Sean Corfield, all rights reserved
|
||||||
|
|
||||||
|
(ns next.jdbc.plan-test
|
||||||
|
"Tests for the plan helpers."
|
||||||
|
(:require [lazytest.core :refer [around]]
|
||||||
|
[lazytest.experimental.interfaces.clojure-test :refer [deftest is]]
|
||||||
|
[next.jdbc.plan :as plan]
|
||||||
|
[next.jdbc.specs :as specs]
|
||||||
|
[next.jdbc.test-fixtures
|
||||||
|
:refer [with-test-db ds col-kw index]]
|
||||||
|
[clojure.string :as str]))
|
||||||
|
|
||||||
|
(set! *warn-on-reflection* true)
|
||||||
|
|
||||||
|
(specs/instrument)
|
||||||
|
|
||||||
|
(deftest select-one!-tests
|
||||||
|
{:context [(around [f] (with-test-db f))]}
|
||||||
|
(is (= {(col-kw :id) 1}
|
||||||
|
(plan/select-one! (ds) [(col-kw :id)] [(str "select * from fruit order by " (index))])))
|
||||||
|
(is (= 1
|
||||||
|
(plan/select-one! (ds) (col-kw :id) [(str "select * from fruit order by " (index))])))
|
||||||
|
(is (= "Banana"
|
||||||
|
(plan/select-one! (ds) :name [(str "select * from fruit where " (index) " = ?") 2])))
|
||||||
|
(is (= [1 "Apple"]
|
||||||
|
(plan/select-one! (ds) (juxt (col-kw :id) :name)
|
||||||
|
[(str "select * from fruit order by " (index))])))
|
||||||
|
(is (= {(col-kw :id) 1 :name "Apple"}
|
||||||
|
(plan/select-one! (ds) #(select-keys % [(col-kw :id) :name])
|
||||||
|
[(str "select * from fruit order by " (index))]))))
|
||||||
|
|
||||||
|
(deftest select-vector-tests
|
||||||
|
{:context [(around [f] (with-test-db f))]}
|
||||||
|
(is (= [{(col-kw :id) 1} {(col-kw :id) 2} {(col-kw :id) 3} {(col-kw :id) 4}]
|
||||||
|
(plan/select! (ds) [(col-kw :id)] [(str "select * from fruit order by " (index))])))
|
||||||
|
(is (= [1 2 3 4]
|
||||||
|
(plan/select! (ds) (col-kw :id) [(str "select * from fruit order by " (index))])))
|
||||||
|
(is (= ["Banana"]
|
||||||
|
(plan/select! (ds) :name [(str "select * from fruit where " (index) " = ?") 2])))
|
||||||
|
(is (= [[2 "Banana"]]
|
||||||
|
(plan/select! (ds) (juxt (col-kw :id) :name)
|
||||||
|
[(str "select * from fruit where " (index) " = ?") 2])))
|
||||||
|
(is (= [{(col-kw :id) 2 :name "Banana"}]
|
||||||
|
(plan/select! (ds) [(col-kw :id) :name]
|
||||||
|
[(str "select * from fruit where " (index) " = ?") 2]))))
|
||||||
|
|
||||||
|
(deftest select-set-tests
|
||||||
|
{:context [(around [f] (with-test-db f))]}
|
||||||
|
(is (= #{{(col-kw :id) 1} {(col-kw :id) 2} {(col-kw :id) 3} {(col-kw :id) 4}}
|
||||||
|
(plan/select! (ds) [(col-kw :id)] [(str "select * from fruit order by " (index))]
|
||||||
|
{:into #{}})))
|
||||||
|
(is (= #{1 2 3 4}
|
||||||
|
(plan/select! (ds) (col-kw :id) [(str "select * from fruit order by " (index))]
|
||||||
|
{:into #{}}))))
|
||||||
|
|
||||||
|
(deftest select-map-tests
|
||||||
|
{:context [(around [f] (with-test-db f))]}
|
||||||
|
(is (= {1 "Apple", 2 "Banana", 3 "Peach", 4 "Orange"}
|
||||||
|
(plan/select! (ds) (juxt (col-kw :id) :name) [(str "select * from fruit order by " (index))]
|
||||||
|
{:into {}}))))
|
||||||
|
|
||||||
|
(deftest select-issue-227
|
||||||
|
{:context [(around [f] (with-test-db f))]}
|
||||||
|
(is (= ["Apple"]
|
||||||
|
(plan/select! (ds) :name [(str "select * from fruit where " (index) " = ?") 1]
|
||||||
|
{:column-fn #(str/replace % "-" "_")})))
|
||||||
|
(is (= ["Apple"]
|
||||||
|
(plan/select! (ds) :foo/name [(str "select * from fruit where " (index) " = ?") 1]
|
||||||
|
{:column-fn #(str/replace % "-" "_")})))
|
||||||
|
(is (= ["Apple"]
|
||||||
|
(plan/select! (ds) #(get % "name") [(str "select * from fruit where " (index) " = ?") 1]
|
||||||
|
{:column-fn #(str/replace % "-" "_")})))
|
||||||
|
(is (= [["Apple"]]
|
||||||
|
(plan/select! (ds) (juxt :name) [(str "select * from fruit where " (index) " = ?") 1]
|
||||||
|
{:column-fn #(str/replace % "-" "_")}))))
|
||||||
|
|
@ -1,79 +1,47 @@
|
||||||
;; copyright (c) 2019-2020 Sean Corfield, all rights reserved
|
;; copyright (c) 2019-2025 Sean Corfield, all rights reserved
|
||||||
|
|
||||||
(ns next.jdbc.prepare-test
|
(ns next.jdbc.prepare-test
|
||||||
"Stub test namespace for PreparedStatement creation etc.
|
"Stub test namespace for PreparedStatement creation etc.
|
||||||
|
|
||||||
Most of this functionality is core to all of the higher-level stuff
|
Most of this functionality is core to all of the higher-level stuff
|
||||||
so it gets tested that way, but there are some specific tests for
|
so it gets tested that way.
|
||||||
`execute-batch!` here."
|
|
||||||
(:require [clojure.test :refer [deftest is testing use-fixtures]]
|
The tests for the deprecated version of `execute-batch!` are here
|
||||||
|
as a guard against regressions."
|
||||||
|
(:require [lazytest.core :refer [around set-ns-context!]]
|
||||||
|
[lazytest.experimental.interfaces.clojure-test :refer [deftest is testing]]
|
||||||
[next.jdbc :as jdbc]
|
[next.jdbc :as jdbc]
|
||||||
[next.jdbc.test-fixtures :refer [with-test-db ds postgres? sqlite?]]
|
[next.jdbc.test-fixtures
|
||||||
|
:refer [with-test-db ds jtds? mssql? sqlite? xtdb?]]
|
||||||
[next.jdbc.prepare :as prep]
|
[next.jdbc.prepare :as prep]
|
||||||
[next.jdbc.specs :as specs]))
|
[next.jdbc.specs :as specs]))
|
||||||
|
|
||||||
(set! *warn-on-reflection* true)
|
(set! *warn-on-reflection* true)
|
||||||
|
|
||||||
(use-fixtures :once with-test-db)
|
(set-ns-context! [(around [f] (with-test-db f))])
|
||||||
|
|
||||||
(specs/instrument)
|
(specs/instrument)
|
||||||
|
|
||||||
(deftest execute-batch-tests
|
(deftest execute-batch-tests
|
||||||
(testing "simple batch insert"
|
(when-not (xtdb?)
|
||||||
(is (= [1 1 1 1 1 1 1 1 1 13]
|
(testing "simple batch insert"
|
||||||
(jdbc/with-transaction [t (ds) {:rollback-only true}]
|
(is (= [1 1 1 1 1 1 1 1 1 13]
|
||||||
(with-open [ps (jdbc/prepare t ["
|
(jdbc/with-transaction [t (ds) {:rollback-only true}]
|
||||||
|
(with-open [ps (jdbc/prepare t ["
|
||||||
INSERT INTO fruit (name, appearance) VALUES (?,?)
|
INSERT INTO fruit (name, appearance) VALUES (?,?)
|
||||||
"])]
|
"])]
|
||||||
(let [result (prep/execute-batch! ps [["fruit1" "one"]
|
(let [result (prep/execute-batch! ps [["fruit1" "one"]
|
||||||
["fruit2" "two"]
|
["fruit2" "two"]
|
||||||
["fruit3" "three"]
|
["fruit3" "three"]
|
||||||
["fruit4" "four"]
|
["fruit4" "four"]
|
||||||
["fruit5" "five"]
|
["fruit5" "five"]
|
||||||
["fruit6" "six"]
|
["fruit6" "six"]
|
||||||
["fruit7" "seven"]
|
["fruit7" "seven"]
|
||||||
["fruit8" "eight"]
|
["fruit8" "eight"]
|
||||||
["fruit9" "nine"]])]
|
["fruit9" "nine"]])]
|
||||||
(conj result (count (jdbc/execute! t ["select * from fruit"]))))))))
|
(conj result (count (jdbc/execute! t ["select * from fruit"]))))))))
|
||||||
(is (= 4 (count (jdbc/execute! (ds) ["select * from fruit"])))))
|
(is (= 4 (count (jdbc/execute! (ds) ["select * from fruit"])))))
|
||||||
(testing "small batch insert"
|
(testing "small batch insert"
|
||||||
(is (= [1 1 1 1 1 1 1 1 1 13]
|
|
||||||
(jdbc/with-transaction [t (ds) {:rollback-only true}]
|
|
||||||
(with-open [ps (jdbc/prepare t ["
|
|
||||||
INSERT INTO fruit (name, appearance) VALUES (?,?)
|
|
||||||
"])]
|
|
||||||
(let [result (prep/execute-batch! ps [["fruit1" "one"]
|
|
||||||
["fruit2" "two"]
|
|
||||||
["fruit3" "three"]
|
|
||||||
["fruit4" "four"]
|
|
||||||
["fruit5" "five"]
|
|
||||||
["fruit6" "six"]
|
|
||||||
["fruit7" "seven"]
|
|
||||||
["fruit8" "eight"]
|
|
||||||
["fruit9" "nine"]]
|
|
||||||
{:batch-size 3})]
|
|
||||||
(conj result (count (jdbc/execute! t ["select * from fruit"]))))))))
|
|
||||||
(is (= 4 (count (jdbc/execute! (ds) ["select * from fruit"])))))
|
|
||||||
(testing "big batch insert"
|
|
||||||
(is (= [1 1 1 1 1 1 1 1 1 13]
|
|
||||||
(jdbc/with-transaction [t (ds) {:rollback-only true}]
|
|
||||||
(with-open [ps (jdbc/prepare t ["
|
|
||||||
INSERT INTO fruit (name, appearance) VALUES (?,?)
|
|
||||||
"])]
|
|
||||||
(let [result (prep/execute-batch! ps [["fruit1" "one"]
|
|
||||||
["fruit2" "two"]
|
|
||||||
["fruit3" "three"]
|
|
||||||
["fruit4" "four"]
|
|
||||||
["fruit5" "five"]
|
|
||||||
["fruit6" "six"]
|
|
||||||
["fruit7" "seven"]
|
|
||||||
["fruit8" "eight"]
|
|
||||||
["fruit9" "nine"]]
|
|
||||||
{:batch-size 8})]
|
|
||||||
(conj result (count (jdbc/execute! t ["select * from fruit"]))))))))
|
|
||||||
(is (= 4 (count (jdbc/execute! (ds) ["select * from fruit"])))))
|
|
||||||
(testing "large batch insert"
|
|
||||||
(when-not (or (postgres?) (sqlite?))
|
|
||||||
(is (= [1 1 1 1 1 1 1 1 1 13]
|
(is (= [1 1 1 1 1 1 1 1 1 13]
|
||||||
(jdbc/with-transaction [t (ds) {:rollback-only true}]
|
(jdbc/with-transaction [t (ds) {:rollback-only true}]
|
||||||
(with-open [ps (jdbc/prepare t ["
|
(with-open [ps (jdbc/prepare t ["
|
||||||
|
|
@ -88,7 +56,70 @@ INSERT INTO fruit (name, appearance) VALUES (?,?)
|
||||||
["fruit7" "seven"]
|
["fruit7" "seven"]
|
||||||
["fruit8" "eight"]
|
["fruit8" "eight"]
|
||||||
["fruit9" "nine"]]
|
["fruit9" "nine"]]
|
||||||
{:batch-size 4
|
{:batch-size 3})]
|
||||||
:large true})]
|
|
||||||
(conj result (count (jdbc/execute! t ["select * from fruit"]))))))))
|
(conj result (count (jdbc/execute! t ["select * from fruit"]))))))))
|
||||||
(is (= 4 (count (jdbc/execute! (ds) ["select * from fruit"])))))))
|
(is (= 4 (count (jdbc/execute! (ds) ["select * from fruit"])))))
|
||||||
|
(testing "big batch insert"
|
||||||
|
(is (= [1 1 1 1 1 1 1 1 1 13]
|
||||||
|
(jdbc/with-transaction [t (ds) {:rollback-only true}]
|
||||||
|
(with-open [ps (jdbc/prepare t ["
|
||||||
|
INSERT INTO fruit (name, appearance) VALUES (?,?)
|
||||||
|
"])]
|
||||||
|
(let [result (prep/execute-batch! ps [["fruit1" "one"]
|
||||||
|
["fruit2" "two"]
|
||||||
|
["fruit3" "three"]
|
||||||
|
["fruit4" "four"]
|
||||||
|
["fruit5" "five"]
|
||||||
|
["fruit6" "six"]
|
||||||
|
["fruit7" "seven"]
|
||||||
|
["fruit8" "eight"]
|
||||||
|
["fruit9" "nine"]]
|
||||||
|
{:batch-size 8})]
|
||||||
|
(conj result (count (jdbc/execute! t ["select * from fruit"]))))))))
|
||||||
|
(is (= 4 (count (jdbc/execute! (ds) ["select * from fruit"])))))
|
||||||
|
(testing "large batch insert"
|
||||||
|
(when-not (or (jtds?) (sqlite?))
|
||||||
|
(is (= [1 1 1 1 1 1 1 1 1 13]
|
||||||
|
(jdbc/with-transaction [t (ds) {:rollback-only true}]
|
||||||
|
(with-open [ps (jdbc/prepare t ["
|
||||||
|
INSERT INTO fruit (name, appearance) VALUES (?,?)
|
||||||
|
"])]
|
||||||
|
(let [result (prep/execute-batch! ps [["fruit1" "one"]
|
||||||
|
["fruit2" "two"]
|
||||||
|
["fruit3" "three"]
|
||||||
|
["fruit4" "four"]
|
||||||
|
["fruit5" "five"]
|
||||||
|
["fruit6" "six"]
|
||||||
|
["fruit7" "seven"]
|
||||||
|
["fruit8" "eight"]
|
||||||
|
["fruit9" "nine"]]
|
||||||
|
{:batch-size 4
|
||||||
|
:large true})]
|
||||||
|
(conj result (count (jdbc/execute! t ["select * from fruit"]))))))))
|
||||||
|
(is (= 4 (count (jdbc/execute! (ds) ["select * from fruit"]))))))
|
||||||
|
(testing "return generated keys"
|
||||||
|
(when-not (or (mssql?) (sqlite?))
|
||||||
|
(let [results
|
||||||
|
(jdbc/with-transaction [t (ds) {:rollback-only true}]
|
||||||
|
(with-open [ps (jdbc/prepare t ["
|
||||||
|
INSERT INTO fruit (name, appearance) VALUES (?,?)
|
||||||
|
"]
|
||||||
|
{:return-keys true})]
|
||||||
|
(let [result (prep/execute-batch! ps [["fruit1" "one"]
|
||||||
|
["fruit2" "two"]
|
||||||
|
["fruit3" "three"]
|
||||||
|
["fruit4" "four"]
|
||||||
|
["fruit5" "five"]
|
||||||
|
["fruit6" "six"]
|
||||||
|
["fruit7" "seven"]
|
||||||
|
["fruit8" "eight"]
|
||||||
|
["fruit9" "nine"]]
|
||||||
|
{:batch-size 4
|
||||||
|
:return-generated-keys true})]
|
||||||
|
(conj result (count (jdbc/execute! t ["select * from fruit"]))))))]
|
||||||
|
(is (= 13 (last results)))
|
||||||
|
(is (every? map? (butlast results)))
|
||||||
|
;; Derby and SQLite only return one generated key per batch so there
|
||||||
|
;; are only three keys, plus the overall count here:
|
||||||
|
(is (< 3 (count results))))
|
||||||
|
(is (= 4 (count (jdbc/execute! (ds) ["select * from fruit"]))))))))
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,8 @@
|
||||||
;; copyright (c) 2019-2020 Sean Corfield, all rights reserved
|
;; copyright (c) 2019-2025 Sean Corfield, all rights reserved
|
||||||
|
|
||||||
(ns next.jdbc.protocols-test
|
(ns next.jdbc.protocols-test
|
||||||
"Stub test namespace for low-level protocols. Nothing can really be tested
|
"Stub test namespace for low-level protocols. Nothing can really be tested
|
||||||
at this level tho'..."
|
at this level tho'..."
|
||||||
(:require [clojure.test :refer [deftest is testing]]
|
(:require [next.jdbc.protocols]))
|
||||||
[next.jdbc.protocols :refer :all]))
|
|
||||||
|
|
||||||
(set! *warn-on-reflection* true)
|
(set! *warn-on-reflection* true)
|
||||||
|
|
|
||||||
|
|
@ -1,34 +1,30 @@
|
||||||
;; copyright (c) 2019-2020 Sean Corfield, all rights reserved
|
;; copyright (c) 2019-2025 Sean Corfield, all rights reserved
|
||||||
|
|
||||||
(ns next.jdbc.quoted-test
|
(ns next.jdbc.quoted-test
|
||||||
"Basic tests for quoting strategies. These are also tested indirectly
|
"Basic tests for quoting strategies. These are also tested indirectly
|
||||||
via the next.jdbc.sql tests."
|
via the next.jdbc.sql tests."
|
||||||
(:require [clojure.test :refer [deftest are testing]]
|
(:require [lazytest.core :refer [defdescribe describe it expect]]
|
||||||
[next.jdbc.quoted :refer [ansi mysql sql-server oracle postgres
|
[next.jdbc.quoted :refer [ansi mysql sql-server oracle postgres
|
||||||
schema]]))
|
schema]]))
|
||||||
|
|
||||||
(set! *warn-on-reflection* true)
|
(set! *warn-on-reflection* true)
|
||||||
|
|
||||||
(deftest basic-quoting
|
(def ^:private quote-fns [ansi mysql sql-server oracle postgres])
|
||||||
(are [quote-fn quoted] (= (quote-fn "x") quoted)
|
|
||||||
ansi "\"x\""
|
|
||||||
mysql "`x`"
|
|
||||||
sql-server "[x]"
|
|
||||||
oracle "\"x\""
|
|
||||||
postgres "\"x\""))
|
|
||||||
|
|
||||||
(deftest schema-quoting
|
(defdescribe quoted-functionality
|
||||||
(testing "verify non-schema behavior"
|
(describe "base quoting"
|
||||||
(are [quote-fn quoted] (= (quote-fn "x.y") quoted)
|
(it "should correctly quote simple names"
|
||||||
ansi "\"x.y\""
|
(doseq [[f e] (map vector quote-fns
|
||||||
mysql "`x.y`"
|
["\"x\"" "`x`" "[x]" "\"x\"" "\"x\""])]
|
||||||
sql-server "[x.y]"
|
(expect (= (f "x") e)))))
|
||||||
oracle "\"x.y\""
|
(describe "dotted name quoting"
|
||||||
postgres "\"x.y\""))
|
(describe "basic quoting"
|
||||||
(testing "verify schema behavior"
|
(it "should quote dotted names 'as-is'"
|
||||||
(are [quote-fn quoted] (= ((schema quote-fn) "x.y") quoted)
|
(doseq [[f e] (map vector quote-fns
|
||||||
ansi "\"x\".\"y\""
|
["\"x.y\"" "`x.y`" "[x.y]" "\"x.y\"" "\"x.y\""])]
|
||||||
mysql "`x`.`y`"
|
(expect (= (f "x.y") e)))))
|
||||||
sql-server "[x].[y]"
|
(describe "schema quoting"
|
||||||
oracle "\"x\".\"y\""
|
(it "should split and quote dotted names with schema"
|
||||||
postgres "\"x\".\"y\"")))
|
(doseq [[f e] (map vector quote-fns
|
||||||
|
["\"x\".\"y\"" "`x`.`y`" "[x].[y]" "\"x\".\"y\"" "\"x\".\"y\""])]
|
||||||
|
(expect (= ((schema f) "x.y") e)))))))
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
;; copyright (c) 2019-2020 Sean Corfield, all rights reserved
|
;; copyright (c) 2019-2025 Sean Corfield, all rights reserved
|
||||||
|
|
||||||
(ns next.jdbc.result-set-test
|
(ns next.jdbc.result-set-test
|
||||||
"Test namespace for the result set functions.
|
"Test namespace for the result set functions.
|
||||||
|
|
@ -8,18 +8,19 @@
|
||||||
(:require [clojure.core.protocols :as core-p]
|
(:require [clojure.core.protocols :as core-p]
|
||||||
[clojure.datafy :as d]
|
[clojure.datafy :as d]
|
||||||
[clojure.string :as str]
|
[clojure.string :as str]
|
||||||
[clojure.test :refer [deftest is testing use-fixtures]]
|
[lazytest.core :refer [around set-ns-context!]]
|
||||||
|
[lazytest.experimental.interfaces.clojure-test :refer [deftest is testing]]
|
||||||
[next.jdbc.protocols :as p]
|
[next.jdbc.protocols :as p]
|
||||||
[next.jdbc.result-set :as rs]
|
[next.jdbc.result-set :as rs]
|
||||||
[next.jdbc.specs :as specs]
|
[next.jdbc.specs :as specs]
|
||||||
[next.jdbc.test-fixtures :refer [with-test-db ds column
|
[next.jdbc.test-fixtures :refer [with-test-db ds column index col-kw
|
||||||
default-options
|
default-options
|
||||||
derby? mssql? mysql? postgres?]])
|
derby? mssql? mysql? postgres? xtdb?]])
|
||||||
(:import (java.sql ResultSet ResultSetMetaData)))
|
(:import (java.sql ResultSet ResultSetMetaData)))
|
||||||
|
|
||||||
(set! *warn-on-reflection* true)
|
(set! *warn-on-reflection* true)
|
||||||
|
|
||||||
(use-fixtures :once with-test-db)
|
(set-ns-context! [(around [f] (with-test-db f))])
|
||||||
|
|
||||||
(specs/instrument)
|
(specs/instrument)
|
||||||
|
|
||||||
|
|
@ -27,7 +28,9 @@
|
||||||
(testing "default schema"
|
(testing "default schema"
|
||||||
(let [connectable (ds)
|
(let [connectable (ds)
|
||||||
test-row (rs/datafiable-row {:TABLE/FRUIT_ID 1} connectable
|
test-row (rs/datafiable-row {:TABLE/FRUIT_ID 1} connectable
|
||||||
(default-options))
|
(cond-> (default-options)
|
||||||
|
(xtdb?)
|
||||||
|
(assoc :schema-opts {:pk "_id"})))
|
||||||
data (d/datafy test-row)
|
data (d/datafy test-row)
|
||||||
v (get data :TABLE/FRUIT_ID)]
|
v (get data :TABLE/FRUIT_ID)]
|
||||||
;; check datafication is sane
|
;; check datafication is sane
|
||||||
|
|
@ -40,7 +43,10 @@
|
||||||
(let [connectable (ds)
|
(let [connectable (ds)
|
||||||
test-row (rs/datafiable-row {:foo/bar 2} connectable
|
test-row (rs/datafiable-row {:foo/bar 2} connectable
|
||||||
(assoc (default-options)
|
(assoc (default-options)
|
||||||
:schema {:foo/bar :fruit/id}))
|
:schema {:foo/bar
|
||||||
|
(if (xtdb?)
|
||||||
|
:fruit/_id
|
||||||
|
:fruit/id)}))
|
||||||
data (d/datafy test-row)
|
data (d/datafy test-row)
|
||||||
v (get data :foo/bar)]
|
v (get data :foo/bar)]
|
||||||
;; check datafication is sane
|
;; check datafication is sane
|
||||||
|
|
@ -53,7 +59,10 @@
|
||||||
(let [connectable (ds)
|
(let [connectable (ds)
|
||||||
test-row (rs/datafiable-row {:foo/bar 3} connectable
|
test-row (rs/datafiable-row {:foo/bar 3} connectable
|
||||||
(assoc (default-options)
|
(assoc (default-options)
|
||||||
:schema {:foo/bar [:fruit/id]}))
|
:schema {:foo/bar
|
||||||
|
[(if (xtdb?)
|
||||||
|
:fruit/_id
|
||||||
|
:fruit/id)]}))
|
||||||
data (d/datafy test-row)
|
data (d/datafy test-row)
|
||||||
v (get data :foo/bar)]
|
v (get data :foo/bar)]
|
||||||
;; check datafication is sane
|
;; check datafication is sane
|
||||||
|
|
@ -67,7 +76,7 @@
|
||||||
(let [connectable (ds)
|
(let [connectable (ds)
|
||||||
test-row (rs/datafiable-row {:foo/bar 2} connectable
|
test-row (rs/datafiable-row {:foo/bar 2} connectable
|
||||||
(assoc (default-options)
|
(assoc (default-options)
|
||||||
:schema {:foo/bar [:fruit :id]}))
|
:schema {:foo/bar [:fruit (col-kw :id)]}))
|
||||||
data (d/datafy test-row)
|
data (d/datafy test-row)
|
||||||
v (get data :foo/bar)]
|
v (get data :foo/bar)]
|
||||||
;; check datafication is sane
|
;; check datafication is sane
|
||||||
|
|
@ -79,7 +88,7 @@
|
||||||
(let [connectable (ds)
|
(let [connectable (ds)
|
||||||
test-row (rs/datafiable-row {:foo/bar 3} connectable
|
test-row (rs/datafiable-row {:foo/bar 3} connectable
|
||||||
(assoc (default-options)
|
(assoc (default-options)
|
||||||
:schema {:foo/bar [:fruit :id :many]}))
|
:schema {:foo/bar [:fruit (col-kw :id) :many]}))
|
||||||
data (d/datafy test-row)
|
data (d/datafy test-row)
|
||||||
v (get data :foo/bar)]
|
v (get data :foo/bar)]
|
||||||
;; check datafication is sane
|
;; check datafication is sane
|
||||||
|
|
@ -93,7 +102,7 @@
|
||||||
(deftest test-map-row-builder
|
(deftest test-map-row-builder
|
||||||
(testing "default row builder"
|
(testing "default row builder"
|
||||||
(let [row (p/-execute-one (ds)
|
(let [row (p/-execute-one (ds)
|
||||||
["select * from fruit where id = ?" 1]
|
[(str "select * from fruit where " (index) " = ?") 1]
|
||||||
(default-options))]
|
(default-options))]
|
||||||
(is (map? row))
|
(is (map? row))
|
||||||
(is (contains? row (column :FRUIT/GRADE)))
|
(is (contains? row (column :FRUIT/GRADE)))
|
||||||
|
|
@ -101,7 +110,7 @@
|
||||||
(is (= 1 ((column :FRUIT/ID) row)))
|
(is (= 1 ((column :FRUIT/ID) row)))
|
||||||
(is (= "Apple" ((column :FRUIT/NAME) row))))
|
(is (= "Apple" ((column :FRUIT/NAME) row))))
|
||||||
(let [rs (p/-execute-all (ds)
|
(let [rs (p/-execute-all (ds)
|
||||||
["select * from fruit order by id"]
|
[(str "select * from fruit order by " (index))]
|
||||||
(default-options))]
|
(default-options))]
|
||||||
(is (every? map? rs))
|
(is (every? map? rs))
|
||||||
(is (= 1 ((column :FRUIT/ID) (first rs))))
|
(is (= 1 ((column :FRUIT/ID) (first rs))))
|
||||||
|
|
@ -110,7 +119,7 @@
|
||||||
(is (= "Orange" ((column :FRUIT/NAME) (last rs))))))
|
(is (= "Orange" ((column :FRUIT/NAME) (last rs))))))
|
||||||
(testing "unqualified row builder"
|
(testing "unqualified row builder"
|
||||||
(let [row (p/-execute-one (ds)
|
(let [row (p/-execute-one (ds)
|
||||||
["select * from fruit where id = ?" 2]
|
[(str "select * from fruit where " (index) " = ?") 2]
|
||||||
{:builder-fn rs/as-unqualified-maps})]
|
{:builder-fn rs/as-unqualified-maps})]
|
||||||
(is (map? row))
|
(is (map? row))
|
||||||
(is (contains? row (column :COST)))
|
(is (contains? row (column :COST)))
|
||||||
|
|
@ -119,24 +128,44 @@
|
||||||
(is (= "Banana" ((column :NAME) row)))))
|
(is (= "Banana" ((column :NAME) row)))))
|
||||||
(testing "lower-case row builder"
|
(testing "lower-case row builder"
|
||||||
(let [row (p/-execute-one (ds)
|
(let [row (p/-execute-one (ds)
|
||||||
["select * from fruit where id = ?" 3]
|
[(str "select * from fruit where " (index) " = ?") 3]
|
||||||
(assoc (default-options)
|
(assoc (default-options)
|
||||||
:builder-fn rs/as-lower-maps))]
|
:builder-fn rs/as-lower-maps))]
|
||||||
(is (map? row))
|
(is (map? row))
|
||||||
(is (contains? row :fruit/appearance))
|
(is (contains? row (col-kw :fruit/appearance)))
|
||||||
(is (nil? (:fruit/appearance row)))
|
(is (nil? ((col-kw :fruit/appearance) row)))
|
||||||
(is (= 3 (:fruit/id row)))
|
(is (= 3 ((col-kw :fruit/id) row)))
|
||||||
(is (= "Peach" (:fruit/name row)))))
|
(is (= "Peach" ((col-kw :fruit/name) row)))))
|
||||||
(testing "unqualified lower-case row builder"
|
(testing "unqualified lower-case row builder"
|
||||||
(let [row (p/-execute-one (ds)
|
(let [row (p/-execute-one (ds)
|
||||||
["select * from fruit where id = ?" 4]
|
[(str "select * from fruit where " (index) " = ?") 4]
|
||||||
{:builder-fn rs/as-unqualified-lower-maps})]
|
{:builder-fn rs/as-unqualified-lower-maps})]
|
||||||
(is (map? row))
|
(is (map? row))
|
||||||
|
(is (= 4 ((col-kw :id) row)))
|
||||||
|
(is (= "Orange" ((col-kw :name) row)))))
|
||||||
|
(testing "kebab-case row builder"
|
||||||
|
(let [row (p/-execute-one (ds)
|
||||||
|
[(str "select " (index) ",name,appearance as looks_like from fruit where " (index) " = ?") 3]
|
||||||
|
(assoc (default-options)
|
||||||
|
:builder-fn rs/as-kebab-maps))]
|
||||||
|
(is (map? row))
|
||||||
|
(is (contains? row (col-kw :fruit/looks-like)))
|
||||||
|
(is (nil? ((col-kw :fruit/looks-like) row)))
|
||||||
|
;; kebab-case strips leading _ from _id (XTDB):
|
||||||
|
(is (= 3 ((if (xtdb?) :id :fruit/id) row)))
|
||||||
|
(is (= "Peach" ((col-kw :fruit/name) row)))))
|
||||||
|
(testing "unqualified kebab-case row builder"
|
||||||
|
(let [row (p/-execute-one (ds)
|
||||||
|
[(str "select " (index) ",name,appearance as looks_like from fruit where " (index) " = ?") 4]
|
||||||
|
{:builder-fn rs/as-unqualified-kebab-maps})]
|
||||||
|
(is (map? row))
|
||||||
|
(is (contains? row :looks-like))
|
||||||
|
(is (= "juicy" (:looks-like row)))
|
||||||
(is (= 4 (:id row)))
|
(is (= 4 (:id row)))
|
||||||
(is (= "Orange" (:name row)))))
|
(is (= "Orange" (:name row)))))
|
||||||
(testing "custom row builder 1"
|
(testing "custom row builder 1"
|
||||||
(let [row (p/-execute-one (ds)
|
(let [row (p/-execute-one (ds)
|
||||||
["select fruit.*, id + 100 as newid from fruit where id = ?" 3]
|
[(str "select fruit.*, " (index) " + 100 as newid from fruit where " (index) " = ?") 3]
|
||||||
(assoc (default-options)
|
(assoc (default-options)
|
||||||
:builder-fn rs/as-modified-maps
|
:builder-fn rs/as-modified-maps
|
||||||
:label-fn str/lower-case
|
:label-fn str/lower-case
|
||||||
|
|
@ -149,7 +178,7 @@
|
||||||
(is (= "Peach" ((column :FRUIT/name) row)))))
|
(is (= "Peach" ((column :FRUIT/name) row)))))
|
||||||
(testing "custom row builder 2"
|
(testing "custom row builder 2"
|
||||||
(let [row (p/-execute-one (ds)
|
(let [row (p/-execute-one (ds)
|
||||||
["select fruit.*, id + 100 as newid from fruit where id = ?" 3]
|
[(str "select fruit.*, " (index) " + 100 as newid from fruit where " (index) " = ?") 3]
|
||||||
(assoc (default-options)
|
(assoc (default-options)
|
||||||
:builder-fn rs/as-modified-maps
|
:builder-fn rs/as-modified-maps
|
||||||
:label-fn str/lower-case
|
:label-fn str/lower-case
|
||||||
|
|
@ -157,12 +186,12 @@
|
||||||
(is (map? row))
|
(is (map? row))
|
||||||
(is (contains? row :vegetable/appearance))
|
(is (contains? row :vegetable/appearance))
|
||||||
(is (nil? (:vegetable/appearance row)))
|
(is (nil? (:vegetable/appearance row)))
|
||||||
(is (= 3 (:vegetable/id row)))
|
(is (= 3 ((if (xtdb?) :vegetable/_id :vegetable/id) row)))
|
||||||
(is (= 103 (:vegetable/newid row))) ; constant qualifier here
|
(is (= 103 (:vegetable/newid row))) ; constant qualifier here
|
||||||
(is (= "Peach" (:vegetable/name row)))))
|
(is (= "Peach" (:vegetable/name row)))))
|
||||||
(testing "adapted row builder"
|
(testing "adapted row builder"
|
||||||
(let [row (p/-execute-one (ds)
|
(let [row (p/-execute-one (ds)
|
||||||
["select * from fruit where id = ?" 3]
|
[(str "select * from fruit where " (index) " = ?") 3]
|
||||||
(assoc
|
(assoc
|
||||||
(default-options)
|
(default-options)
|
||||||
:builder-fn (rs/as-maps-adapter
|
:builder-fn (rs/as-maps-adapter
|
||||||
|
|
@ -182,6 +211,32 @@
|
||||||
(is (contains? row (column :FRUIT/appearance)))
|
(is (contains? row (column :FRUIT/appearance)))
|
||||||
(is (nil? ((column :FRUIT/appearance) row)))
|
(is (nil? ((column :FRUIT/appearance) row)))
|
||||||
(is (= 3 ((column :FRUIT/id) row)))
|
(is (= 3 ((column :FRUIT/id) row)))
|
||||||
|
(is (= "Peach" ((column :FRUIT/name) row))))
|
||||||
|
(let [builder (rs/as-maps-adapter
|
||||||
|
rs/as-modified-maps
|
||||||
|
(fn [^ResultSet rs _ ^Integer i]
|
||||||
|
(.getObject rs i)))
|
||||||
|
row (p/-execute-one (ds)
|
||||||
|
[(str "select * from fruit where " (index) " = ?") 3]
|
||||||
|
(assoc
|
||||||
|
(default-options)
|
||||||
|
:builder-fn (rs/as-maps-adapter
|
||||||
|
builder
|
||||||
|
(fn [^ResultSet rs
|
||||||
|
^ResultSetMetaData rsmeta
|
||||||
|
^Integer i]
|
||||||
|
(condp = (.getColumnType rsmeta i)
|
||||||
|
java.sql.Types/VARCHAR
|
||||||
|
(.getString rs i)
|
||||||
|
java.sql.Types/INTEGER
|
||||||
|
(.getLong rs i)
|
||||||
|
(.getObject rs i))))
|
||||||
|
:label-fn str/lower-case
|
||||||
|
:qualifier-fn identity))]
|
||||||
|
(is (map? row))
|
||||||
|
(is (contains? row (column :FRUIT/appearance)))
|
||||||
|
(is (nil? ((column :FRUIT/appearance) row)))
|
||||||
|
(is (= 3 ((column :FRUIT/id) row)))
|
||||||
(is (= "Peach" ((column :FRUIT/name) row))))))
|
(is (= "Peach" ((column :FRUIT/name) row))))))
|
||||||
|
|
||||||
(deftest test-row-number
|
(deftest test-row-number
|
||||||
|
|
@ -191,7 +246,7 @@
|
||||||
(testing "row-numbers on bare abstraction"
|
(testing "row-numbers on bare abstraction"
|
||||||
(is (= [1 2 3]
|
(is (= [1 2 3]
|
||||||
(into [] (map rs/row-number)
|
(into [] (map rs/row-number)
|
||||||
(p/-execute (ds) ["select * from fruit where id < ?" 4]
|
(p/-execute (ds) [(str "select * from fruit where " (index) " < ?") 4]
|
||||||
;; we do not need a real builder here...
|
;; we do not need a real builder here...
|
||||||
(cond-> {:builder-fn (constantly nil)}
|
(cond-> {:builder-fn (constantly nil)}
|
||||||
(derby?)
|
(derby?)
|
||||||
|
|
@ -202,7 +257,7 @@
|
||||||
(is (= [1 2 3]
|
(is (= [1 2 3]
|
||||||
(into [] (comp (map #(rs/datafiable-row % (ds) {}))
|
(into [] (comp (map #(rs/datafiable-row % (ds) {}))
|
||||||
(map rs/row-number))
|
(map rs/row-number))
|
||||||
(p/-execute (ds) ["select * from fruit where id < ?" 4]
|
(p/-execute (ds) [(str "select * from fruit where " (index) " < ?") 4]
|
||||||
;; ...but datafiable-row requires a real builder
|
;; ...but datafiable-row requires a real builder
|
||||||
(cond-> {:builder-fn rs/as-arrays}
|
(cond-> {:builder-fn rs/as-arrays}
|
||||||
(derby?)
|
(derby?)
|
||||||
|
|
@ -212,7 +267,7 @@
|
||||||
|
|
||||||
(deftest test-column-names
|
(deftest test-column-names
|
||||||
(testing "column-names on bare abstraction"
|
(testing "column-names on bare abstraction"
|
||||||
(is (= #{"id" "appearance" "grade" "cost" "name"}
|
(is (= #{(index) "appearance" "grade" "cost" "name"}
|
||||||
(reduce (fn [_ row]
|
(reduce (fn [_ row]
|
||||||
(-> row
|
(-> row
|
||||||
(->> (rs/column-names)
|
(->> (rs/column-names)
|
||||||
|
|
@ -220,11 +275,11 @@
|
||||||
(set)
|
(set)
|
||||||
(reduced))))
|
(reduced))))
|
||||||
nil
|
nil
|
||||||
(p/-execute (ds) ["select * from fruit where id < ?" 4]
|
(p/-execute (ds) [(str "select * from fruit where " (index) " < ?") 4]
|
||||||
;; column-names require a real builder
|
;; column-names require a real builder
|
||||||
{:builder-fn rs/as-arrays})))))
|
{:builder-fn rs/as-arrays})))))
|
||||||
(testing "column-names on realized row"
|
(testing "column-names on realized row"
|
||||||
(is (= #{"id" "appearance" "grade" "cost" "name"}
|
(is (= #{(index) "appearance" "grade" "cost" "name"}
|
||||||
(reduce (fn [_ row]
|
(reduce (fn [_ row]
|
||||||
(-> row
|
(-> row
|
||||||
(rs/datafiable-row (ds) {})
|
(rs/datafiable-row (ds) {})
|
||||||
|
|
@ -233,7 +288,7 @@
|
||||||
(set)
|
(set)
|
||||||
(reduced))))
|
(reduced))))
|
||||||
nil
|
nil
|
||||||
(p/-execute (ds) ["select * from fruit where id < ?" 4]
|
(p/-execute (ds) [(str "select * from fruit where " (index) " < ?") 4]
|
||||||
{:builder-fn rs/as-arrays}))))))
|
{:builder-fn rs/as-arrays}))))))
|
||||||
|
|
||||||
(deftest test-over-partition-all
|
(deftest test-over-partition-all
|
||||||
|
|
@ -254,31 +309,31 @@
|
||||||
(testing "no row builder is used"
|
(testing "no row builder is used"
|
||||||
(is (= [true]
|
(is (= [true]
|
||||||
(into [] (map map?) ; it looks like a real map now
|
(into [] (map map?) ; it looks like a real map now
|
||||||
(p/-execute (ds) ["select * from fruit where id = ?" 1]
|
(p/-execute (ds) [(str "select * from fruit where " (index) " = ?") 1]
|
||||||
{:builder-fn (constantly nil)}))))
|
{:builder-fn (constantly nil)}))))
|
||||||
(is (= ["Apple"]
|
(is (= ["Apple"]
|
||||||
(into [] (map :name) ; keyword selection works
|
(into [] (map :name) ; keyword selection works
|
||||||
(p/-execute (ds) ["select * from fruit where id = ?" 1]
|
(p/-execute (ds) [(str "select * from fruit where " (index) " = ?") 1]
|
||||||
{:builder-fn (constantly nil)}))))
|
{:builder-fn (constantly nil)}))))
|
||||||
(is (= [[2 [:name "Banana"]]]
|
(is (= [[2 [:name "Banana"]]]
|
||||||
(into [] (map (juxt #(get % "id") ; get by string key works
|
(into [] (map (juxt #(get % (index)) ; get by string key works
|
||||||
#(find % :name))) ; get MapEntry works
|
#(find % :name))) ; get MapEntry works
|
||||||
(p/-execute (ds) ["select * from fruit where id = ?" 2]
|
(p/-execute (ds) [(str "select * from fruit where " (index) " = ?") 2]
|
||||||
{:builder-fn (constantly nil)}))))
|
{:builder-fn (constantly nil)}))))
|
||||||
(is (= [{:id 3 :name "Peach"}]
|
(is (= [{(col-kw :id) 3 :name "Peach"}]
|
||||||
(into [] (map #(select-keys % [:id :name])) ; select-keys works
|
(into [] (map #(select-keys % [(col-kw :id) :name])) ; select-keys works
|
||||||
(p/-execute (ds) ["select * from fruit where id = ?" 3]
|
(p/-execute (ds) [(str "select * from fruit where " (index) " = ?") 3]
|
||||||
{:builder-fn (constantly nil)}))))
|
{:builder-fn (constantly nil)}))))
|
||||||
(is (= [[:orange 4]]
|
(is (= [[:orange 4]]
|
||||||
(into [] (map #(vector (if (contains? % :name) ; contains works
|
(into [] (map #(vector (if (contains? % :name) ; contains works
|
||||||
(keyword (str/lower-case (:name %)))
|
(keyword (str/lower-case (:name %)))
|
||||||
:unnamed)
|
:unnamed)
|
||||||
(get % :id 0))) ; get with not-found works
|
(get % (col-kw :id) 0))) ; get with not-found works
|
||||||
(p/-execute (ds) ["select * from fruit where id = ?" 4]
|
(p/-execute (ds) [(str "select * from fruit where " (index) " = ?") 4]
|
||||||
{:builder-fn (constantly nil)}))))
|
{:builder-fn (constantly nil)}))))
|
||||||
(is (= [{}]
|
(is (= [{}]
|
||||||
(into [] (map empty) ; return empty map without building
|
(into [] (map empty) ; return empty map without building
|
||||||
(p/-execute (ds) ["select * from fruit where id = ?" 1]
|
(p/-execute (ds) [(str "select * from fruit where " (index) " = ?") 1]
|
||||||
{:builder-fn (constantly nil)})))))
|
{:builder-fn (constantly nil)})))))
|
||||||
(testing "count does not build a map"
|
(testing "count does not build a map"
|
||||||
(let [count-builder (fn [_1 _2]
|
(let [count-builder (fn [_1 _2]
|
||||||
|
|
@ -286,7 +341,7 @@
|
||||||
(column-count [_] 13)))]
|
(column-count [_] 13)))]
|
||||||
(is (= [13]
|
(is (= [13]
|
||||||
(into [] (map count) ; count relies on columns, not row fields
|
(into [] (map count) ; count relies on columns, not row fields
|
||||||
(p/-execute (ds) ["select * from fruit where id = ?" 1]
|
(p/-execute (ds) [(str "select * from fruit where " (index) " = ?") 1]
|
||||||
{:builder-fn count-builder}))))))
|
{:builder-fn count-builder}))))))
|
||||||
(testing "assoc, dissoc, cons, seq, and = build maps"
|
(testing "assoc, dissoc, cons, seq, and = build maps"
|
||||||
(is (map? (reduce (fn [_ row] (reduced (assoc row :x 1)))
|
(is (map? (reduce (fn [_ row] (reduced (assoc row :x 1)))
|
||||||
|
|
@ -322,7 +377,40 @@
|
||||||
(p/-execute (ds) ["select * from fruit"] {})))))
|
(p/-execute (ds) ["select * from fruit"] {})))))
|
||||||
(is (every? map-entry? (reduce (fn [_ row] (reduced (seq row)))
|
(is (every? map-entry? (reduce (fn [_ row] (reduced (seq row)))
|
||||||
nil
|
nil
|
||||||
(p/-execute (ds) ["select * from fruit"] {})))))
|
(p/-execute (ds) ["select * from fruit"] {}))))
|
||||||
|
(is (map? (reduce (fn [_ row] (reduced (conj row {:a 1})))
|
||||||
|
nil
|
||||||
|
(p/-execute (ds) ["select * from fruit"] {}))))
|
||||||
|
(is (map? (reduce (fn [_ row] (reduced (conj row [:a 1])))
|
||||||
|
nil
|
||||||
|
(p/-execute (ds) ["select * from fruit"] {}))))
|
||||||
|
(is (map? (reduce (fn [_ row] (reduced (conj row {:a 1 :b 2})))
|
||||||
|
nil
|
||||||
|
(p/-execute (ds) ["select * from fruit"] {}))))
|
||||||
|
(is (= 1 (:a (reduce (fn [_ row] (reduced (conj row {:a 1})))
|
||||||
|
nil
|
||||||
|
(p/-execute (ds) ["select * from fruit"] {})))))
|
||||||
|
(is (= 1 (:a (reduce (fn [_ row] (reduced (conj row [:a 1])))
|
||||||
|
nil
|
||||||
|
(p/-execute (ds) ["select * from fruit"] {})))))
|
||||||
|
(is (= 1 (:a (reduce (fn [_ row] (reduced (conj row {:a 1 :b 2})))
|
||||||
|
nil
|
||||||
|
(p/-execute (ds) ["select * from fruit"] {})))))
|
||||||
|
(is (= 2 (:b (reduce (fn [_ row] (reduced (conj row {:a 1 :b 2})))
|
||||||
|
nil
|
||||||
|
(p/-execute (ds) ["select * from fruit"] {})))))
|
||||||
|
(is (vector? (reduce (fn [_ row] (reduced (conj row :a)))
|
||||||
|
nil
|
||||||
|
(p/-execute (ds) ["select * from fruit"]
|
||||||
|
{:builder-fn rs/as-arrays}))))
|
||||||
|
(is (= :a (peek (reduce (fn [_ row] (reduced (conj row :a)))
|
||||||
|
nil
|
||||||
|
(p/-execute (ds) ["select * from fruit"]
|
||||||
|
{:builder-fn rs/as-arrays})))))
|
||||||
|
(is (= :b (peek (reduce (fn [_ row] (reduced (conj row :a :b)))
|
||||||
|
nil
|
||||||
|
(p/-execute (ds) ["select * from fruit"]
|
||||||
|
{:builder-fn rs/as-arrays}))))))
|
||||||
(testing "datafiable-row builds map; with metadata"
|
(testing "datafiable-row builds map; with metadata"
|
||||||
(is (map? (reduce (fn [_ row] (reduced (rs/datafiable-row row (ds) {})))
|
(is (map? (reduce (fn [_ row] (reduced (rs/datafiable-row row (ds) {})))
|
||||||
nil
|
nil
|
||||||
|
|
@ -336,21 +424,29 @@
|
||||||
|
|
||||||
(defrecord Fruit [id name appearance cost grade])
|
(defrecord Fruit [id name appearance cost grade])
|
||||||
|
|
||||||
(defn fruit-builder [^ResultSet rs _]
|
(defn fruit-builder [^ResultSet rs ^ResultSetMetaData rsmeta]
|
||||||
(reify
|
(reify
|
||||||
rs/RowBuilder
|
rs/RowBuilder
|
||||||
(->row [_] (->Fruit (.getObject rs "id")
|
(->row [_] (->Fruit (.getObject rs ^String (index))
|
||||||
(.getObject rs "name")
|
(.getObject rs "name")
|
||||||
(.getObject rs "appearance")
|
(.getObject rs "appearance")
|
||||||
(.getObject rs "cost")
|
(.getObject rs "cost")
|
||||||
(.getObject rs "grade")))
|
(.getObject rs "grade")))
|
||||||
(with-column [_ row i] row)
|
|
||||||
(column-count [_] 0) ; no need to iterate over columns
|
(column-count [_] 0) ; no need to iterate over columns
|
||||||
|
(with-column [_ row i] row)
|
||||||
|
(with-column-value [_ row col v] row)
|
||||||
(row! [_ row] row)
|
(row! [_ row] row)
|
||||||
rs/ResultSetBuilder
|
rs/ResultSetBuilder
|
||||||
(->rs [_] (transient []))
|
(->rs [_] (transient []))
|
||||||
(with-row [_ rs row] (conj! rs row))
|
(with-row [_ rs row] (conj! rs row))
|
||||||
(rs! [_ rs] (persistent! rs))))
|
(rs! [_ rs] (persistent! rs))
|
||||||
|
clojure.lang.ILookup ; only supports :cols and :rsmeta
|
||||||
|
(valAt [this k] (get this k nil))
|
||||||
|
(valAt [this k not-found]
|
||||||
|
(case k
|
||||||
|
:cols [(col-kw :id) :name :appearance :cost :grade]
|
||||||
|
:rsmeta rsmeta
|
||||||
|
not-found))))
|
||||||
|
|
||||||
(deftest custom-map-builder
|
(deftest custom-map-builder
|
||||||
(let [row (p/-execute-one (ds)
|
(let [row (p/-execute-one (ds)
|
||||||
|
|
@ -381,7 +477,7 @@
|
||||||
metadata))))
|
metadata))))
|
||||||
|
|
||||||
(deftest clob-reading
|
(deftest clob-reading
|
||||||
(when-not (or (mssql?) (mysql?) (postgres?)) ; no clob in these
|
(when-not (or (mssql?) (mysql?) (postgres?) (xtdb?)) ; no clob in these
|
||||||
(with-open [con (p/get-connection (ds) {})]
|
(with-open [con (p/get-connection (ds) {})]
|
||||||
(try
|
(try
|
||||||
(p/-execute-one con ["DROP TABLE CLOBBER"] {})
|
(p/-execute-one con ["DROP TABLE CLOBBER"] {})
|
||||||
|
|
@ -411,10 +507,10 @@ CREATE TABLE CLOBBER (
|
||||||
(testing "get n on bare abstraction over arrays"
|
(testing "get n on bare abstraction over arrays"
|
||||||
(is (= [1 2 3]
|
(is (= [1 2 3]
|
||||||
(into [] (map #(get % 0))
|
(into [] (map #(get % 0))
|
||||||
(p/-execute (ds) ["select id from fruit where id < ?" 4]
|
(p/-execute (ds) [(str "select " (index) " from fruit where " (index) " < ? order by " (index)) 4]
|
||||||
{:builder-fn rs/as-arrays})))))
|
{:builder-fn rs/as-arrays})))))
|
||||||
(testing "nth on bare abstraction over arrays"
|
(testing "nth on bare abstraction over arrays"
|
||||||
(is (= [1 2 3]
|
(is (= [1 2 3]
|
||||||
(into [] (map #(nth % 0))
|
(into [] (map #(nth % 0))
|
||||||
(p/-execute (ds) ["select id from fruit where id < ?" 4]
|
(p/-execute (ds) [(str "select " (index) " from fruit where " (index) " < ? order by " (index)) 4]
|
||||||
{:builder-fn rs/as-arrays}))))))
|
{:builder-fn rs/as-arrays}))))))
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,10 @@
|
||||||
;; copyright (c) 2019-2020 Sean Corfield, all rights reserved
|
;; copyright (c) 2019-2025 Sean Corfield, all rights reserved
|
||||||
|
|
||||||
(ns next.jdbc.specs-test
|
(ns next.jdbc.specs-test
|
||||||
"Stub test namespace for the specs.
|
"Stub test namespace for the specs.
|
||||||
|
|
||||||
The specs are used (and 'tested') as part of the tests for the
|
The specs are used (and 'tested') as part of the tests for the
|
||||||
next.jdbc and next.jdbc.sql namespaces."
|
next.jdbc and next.jdbc.sql namespaces."
|
||||||
(:require [clojure.test :refer [deftest is testing]]
|
(:require [next.jdbc.specs]))
|
||||||
[next.jdbc.specs :refer :all]))
|
|
||||||
|
|
||||||
(set! *warn-on-reflection* true)
|
(set! *warn-on-reflection* true)
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,8 @@
|
||||||
;; copyright (c) 2019-2020 Sean Corfield, all rights reserved
|
;; copyright (c) 2019-2025 Sean Corfield, all rights reserved
|
||||||
|
|
||||||
(ns next.jdbc.sql.builder-test
|
(ns next.jdbc.sql.builder-test
|
||||||
"Tests for the SQL string building functions in next.jdbc.sql.builder."
|
"Tests for the SQL string building functions in next.jdbc.sql.builder."
|
||||||
(:require [clojure.test :refer [deftest is testing]]
|
(:require [lazytest.experimental.interfaces.clojure-test :refer [deftest is testing thrown?]]
|
||||||
[next.jdbc.quoted :refer [mysql sql-server]]
|
[next.jdbc.quoted :refer [mysql sql-server]]
|
||||||
[next.jdbc.sql.builder :as builder]))
|
[next.jdbc.sql.builder :as builder]))
|
||||||
|
|
||||||
|
|
@ -11,17 +11,39 @@
|
||||||
(deftest test-by-keys
|
(deftest test-by-keys
|
||||||
(testing ":where clause"
|
(testing ":where clause"
|
||||||
(is (= (builder/by-keys {:a nil :b 42 :c "s"} :where {})
|
(is (= (builder/by-keys {:a nil :b 42 :c "s"} :where {})
|
||||||
|
["WHERE a IS NULL AND b = ? AND c = ?" 42 "s"]))
|
||||||
|
(is (= (builder/by-keys {:q/a nil :q/b 42 :q/c "s"} :where {})
|
||||||
["WHERE a IS NULL AND b = ? AND c = ?" 42 "s"])))
|
["WHERE a IS NULL AND b = ? AND c = ?" 42 "s"])))
|
||||||
(testing ":set clause"
|
(testing ":set clause"
|
||||||
(is (= (builder/by-keys {:a nil :b 42 :c "s"} :set {})
|
(is (= (builder/by-keys {:a nil :b 42 :c "s"} :set {})
|
||||||
|
["SET a = ?, b = ?, c = ?" nil 42 "s"]))
|
||||||
|
(is (= (builder/by-keys {:q/a nil :q/b 42 :q/c "s"} :set {})
|
||||||
["SET a = ?, b = ?, c = ?" nil 42 "s"]))))
|
["SET a = ?, b = ?, c = ?" nil 42 "s"]))))
|
||||||
|
|
||||||
|
(deftest test-as-cols
|
||||||
|
(is (= (builder/as-cols [:a :b :c] {})
|
||||||
|
"a, b, c"))
|
||||||
|
(is (= (builder/as-cols [[:a :aa] :b ["count(*)" :c]] {})
|
||||||
|
"a AS aa, b, count(*) AS c"))
|
||||||
|
(is (= (builder/as-cols [[:a :aa] :b ["count(*)" :c]] {:column-fn mysql})
|
||||||
|
"`a` AS `aa`, `b`, count(*) AS `c`"))
|
||||||
|
(is (= (builder/as-cols [:q/a :q/b :q/c] {})
|
||||||
|
"a, b, c"))
|
||||||
|
(is (= (builder/as-cols [[:q/a :q/aa] :q/b ["count(*)" :q/c]] {})
|
||||||
|
"a AS aa, b, count(*) AS c"))
|
||||||
|
(is (= (builder/as-cols [[:q/a :q/aa] :q/b ["count(*)" :q/c]] {:column-fn mysql})
|
||||||
|
"`a` AS `aa`, `b`, count(*) AS `c`")))
|
||||||
|
|
||||||
(deftest test-as-keys
|
(deftest test-as-keys
|
||||||
(is (= (builder/as-keys {:a nil :b 42 :c "s"} {})
|
(is (= (builder/as-keys {:a nil :b 42 :c "s"} {})
|
||||||
|
"a, b, c"))
|
||||||
|
(is (= (builder/as-keys {:q/a nil :q/b 42 :q/c "s"} {})
|
||||||
"a, b, c")))
|
"a, b, c")))
|
||||||
|
|
||||||
(deftest test-as-?
|
(deftest test-as-?
|
||||||
(is (= (builder/as-? {:a nil :b 42 :c "s"} {})
|
(is (= (builder/as-? {:a nil :b 42 :c "s"} {})
|
||||||
|
"?, ?, ?"))
|
||||||
|
(is (= (builder/as-? {:q/a nil :q/b 42 :q/c "s"} {})
|
||||||
"?, ?, ?")))
|
"?, ?, ?")))
|
||||||
|
|
||||||
(deftest test-for-query
|
(deftest test-for-query
|
||||||
|
|
@ -37,6 +59,18 @@
|
||||||
{:id nil}
|
{:id nil}
|
||||||
{:table-fn sql-server :column-fn mysql
|
{:table-fn sql-server :column-fn mysql
|
||||||
:suffix "FOR UPDATE"})
|
:suffix "FOR UPDATE"})
|
||||||
|
["SELECT * FROM [user] WHERE `id` IS NULL FOR UPDATE"]))
|
||||||
|
(is (= (builder/for-query
|
||||||
|
:t/user
|
||||||
|
{:q/id 9}
|
||||||
|
{:table-fn sql-server :column-fn mysql :order-by [:x/a [:x/b :desc]]})
|
||||||
|
["SELECT * FROM [user] WHERE `id` = ? ORDER BY `a`, `b` DESC" 9]))
|
||||||
|
(is (= (builder/for-query :t/user {:q/id nil} {:table-fn sql-server :column-fn mysql})
|
||||||
|
["SELECT * FROM [user] WHERE `id` IS NULL"]))
|
||||||
|
(is (= (builder/for-query :t/user
|
||||||
|
{:q/id nil}
|
||||||
|
{:table-fn sql-server :column-fn mysql
|
||||||
|
:suffix "FOR UPDATE"})
|
||||||
["SELECT * FROM [user] WHERE `id` IS NULL FOR UPDATE"])))
|
["SELECT * FROM [user] WHERE `id` IS NULL FOR UPDATE"])))
|
||||||
(testing "by where clause"
|
(testing "by where clause"
|
||||||
(is (= (builder/for-query
|
(is (= (builder/for-query
|
||||||
|
|
@ -44,7 +78,59 @@
|
||||||
["id = ? and opt is null" 9]
|
["id = ? and opt is null" 9]
|
||||||
{:table-fn sql-server :column-fn mysql :order-by [:a [:b :desc]]})
|
{:table-fn sql-server :column-fn mysql :order-by [:a [:b :desc]]})
|
||||||
[(str "SELECT * FROM [user] WHERE id = ? and opt is null"
|
[(str "SELECT * FROM [user] WHERE id = ? and opt is null"
|
||||||
" ORDER BY `a`, `b` DESC") 9]))))
|
" ORDER BY `a`, `b` DESC") 9])))
|
||||||
|
(testing "by :all"
|
||||||
|
(is (= (builder/for-query
|
||||||
|
:user
|
||||||
|
:all
|
||||||
|
{:table-fn sql-server :column-fn mysql :order-by [:a [:b :desc]]})
|
||||||
|
["SELECT * FROM [user] ORDER BY `a`, `b` DESC"])))
|
||||||
|
(testing "top N"
|
||||||
|
(is (= (builder/for-query
|
||||||
|
:user
|
||||||
|
{:id 9}
|
||||||
|
{:table-fn sql-server :column-fn mysql :order-by [:a [:b :desc]]
|
||||||
|
:top 42})
|
||||||
|
["SELECT TOP ? * FROM [user] WHERE `id` = ? ORDER BY `a`, `b` DESC"
|
||||||
|
42 9])))
|
||||||
|
(testing "limit"
|
||||||
|
(testing "without offset"
|
||||||
|
(is (= (builder/for-query
|
||||||
|
:user
|
||||||
|
{:id 9}
|
||||||
|
{:table-fn sql-server :column-fn mysql :order-by [:a [:b :desc]]
|
||||||
|
:limit 42})
|
||||||
|
[(str "SELECT * FROM [user] WHERE `id` = ?"
|
||||||
|
" ORDER BY `a`, `b` DESC LIMIT ?")
|
||||||
|
9 42])))
|
||||||
|
(testing "with offset"
|
||||||
|
(is (= (builder/for-query
|
||||||
|
:user
|
||||||
|
{:id 9}
|
||||||
|
{:table-fn sql-server :column-fn mysql :order-by [:a [:b :desc]]
|
||||||
|
:limit 42 :offset 13})
|
||||||
|
[(str "SELECT * FROM [user] WHERE `id` = ?"
|
||||||
|
" ORDER BY `a`, `b` DESC LIMIT ? OFFSET ?")
|
||||||
|
9 42 13]))))
|
||||||
|
(testing "offset"
|
||||||
|
(testing "without fetch"
|
||||||
|
(is (= (builder/for-query
|
||||||
|
:user
|
||||||
|
{:id 9}
|
||||||
|
{:table-fn sql-server :column-fn mysql :order-by [:a [:b :desc]]
|
||||||
|
:offset 13})
|
||||||
|
[(str "SELECT * FROM [user] WHERE `id` = ?"
|
||||||
|
" ORDER BY `a`, `b` DESC OFFSET ? ROWS")
|
||||||
|
9 13])))
|
||||||
|
(testing "with fetch"
|
||||||
|
(is (= (builder/for-query
|
||||||
|
:user
|
||||||
|
{:id 9}
|
||||||
|
{:table-fn sql-server :column-fn mysql :order-by [:a [:b :desc]]
|
||||||
|
:offset 13 :fetch 42})
|
||||||
|
[(str "SELECT * FROM [user] WHERE `id` = ?"
|
||||||
|
" ORDER BY `a`, `b` DESC OFFSET ? ROWS FETCH NEXT ? ROWS ONLY")
|
||||||
|
9 13 42])))))
|
||||||
|
|
||||||
(deftest test-for-delete
|
(deftest test-for-delete
|
||||||
(testing "by example"
|
(testing "by example"
|
||||||
|
|
@ -52,17 +138,27 @@
|
||||||
:user
|
:user
|
||||||
{:opt nil :id 9}
|
{:opt nil :id 9}
|
||||||
{:table-fn sql-server :column-fn mysql})
|
{:table-fn sql-server :column-fn mysql})
|
||||||
|
["DELETE FROM [user] WHERE `opt` IS NULL AND `id` = ?" 9]))
|
||||||
|
(is (= (builder/for-delete
|
||||||
|
:t/user
|
||||||
|
{:q/opt nil :q/id 9}
|
||||||
|
{:table-fn sql-server :column-fn mysql})
|
||||||
["DELETE FROM [user] WHERE `opt` IS NULL AND `id` = ?" 9])))
|
["DELETE FROM [user] WHERE `opt` IS NULL AND `id` = ?" 9])))
|
||||||
(testing "by where clause"
|
(testing "by where clause"
|
||||||
(is (= (builder/for-delete
|
(is (= (builder/for-delete
|
||||||
:user
|
:user
|
||||||
["id = ? and opt is null" 9]
|
["id = ? and opt is null" 9]
|
||||||
{:table-fn sql-server :column-fn mysql})
|
{:table-fn sql-server :column-fn mysql})
|
||||||
|
["DELETE FROM [user] WHERE id = ? and opt is null" 9]))
|
||||||
|
(is (= (builder/for-delete
|
||||||
|
:t/user
|
||||||
|
["id = ? and opt is null" 9]
|
||||||
|
{:table-fn sql-server :column-fn mysql})
|
||||||
["DELETE FROM [user] WHERE id = ? and opt is null" 9]))))
|
["DELETE FROM [user] WHERE id = ? and opt is null" 9]))))
|
||||||
|
|
||||||
(deftest test-for-update
|
(deftest test-for-update
|
||||||
(testing "empty example (would be a SQL error)"
|
(testing "empty example (would be a SQL error)"
|
||||||
(is (thrown? AssertionError ; changed in #44
|
(is (thrown? IllegalArgumentException
|
||||||
(builder/for-update :user
|
(builder/for-update :user
|
||||||
{:status 42}
|
{:status 42}
|
||||||
{}
|
{}
|
||||||
|
|
@ -72,6 +168,11 @@
|
||||||
{:status 42}
|
{:status 42}
|
||||||
{:id 9}
|
{:id 9}
|
||||||
{:table-fn sql-server :column-fn mysql})
|
{:table-fn sql-server :column-fn mysql})
|
||||||
|
["UPDATE [user] SET `status` = ? WHERE `id` = ?" 42 9]))
|
||||||
|
(is (= (builder/for-update :t/user
|
||||||
|
{:q/status 42}
|
||||||
|
{:q/id 9}
|
||||||
|
{:table-fn sql-server :column-fn mysql})
|
||||||
["UPDATE [user] SET `status` = ? WHERE `id` = ?" 42 9])))
|
["UPDATE [user] SET `status` = ? WHERE `id` = ?" 42 9])))
|
||||||
(testing "by where clause, with nil set value"
|
(testing "by where clause, with nil set value"
|
||||||
(is (= (builder/for-update :user
|
(is (= (builder/for-update :user
|
||||||
|
|
@ -85,12 +186,38 @@
|
||||||
(is (= (builder/for-insert :user
|
(is (= (builder/for-insert :user
|
||||||
{:id 9 :status 42 :opt nil}
|
{:id 9 :status 42 :opt nil}
|
||||||
{:table-fn sql-server :column-fn mysql})
|
{:table-fn sql-server :column-fn mysql})
|
||||||
|
["INSERT INTO [user] (`id`, `status`, `opt`) VALUES (?, ?, ?)" 9 42 nil]))
|
||||||
|
(is (= (builder/for-insert :t/user
|
||||||
|
{:q/id 9 :q/status 42 :q/opt nil}
|
||||||
|
{:table-fn sql-server :column-fn mysql})
|
||||||
["INSERT INTO [user] (`id`, `status`, `opt`) VALUES (?, ?, ?)" 9 42 nil])))
|
["INSERT INTO [user] (`id`, `status`, `opt`) VALUES (?, ?, ?)" 9 42 nil])))
|
||||||
(testing "multi-row insert"
|
(testing "multi-row insert (normal mode)"
|
||||||
(is (= (builder/for-insert-multi :user
|
(is (= (builder/for-insert-multi :user
|
||||||
[:id :status]
|
[:id :status]
|
||||||
[[42 "hello"]
|
[[42 "hello"]
|
||||||
[35 "world"]
|
[35 "world"]
|
||||||
[64 "dollars"]]
|
[64 "dollars"]]
|
||||||
{:table-fn sql-server :column-fn mysql})
|
{:table-fn sql-server :column-fn mysql})
|
||||||
["INSERT INTO [user] (`id`, `status`) VALUES (?, ?), (?, ?), (?, ?)" 42 "hello" 35 "world" 64 "dollars"]))))
|
["INSERT INTO [user] (`id`, `status`) VALUES (?, ?), (?, ?), (?, ?)" 42 "hello" 35 "world" 64 "dollars"]))
|
||||||
|
(is (= (builder/for-insert-multi :t/user
|
||||||
|
[:q/id :q/status]
|
||||||
|
[[42 "hello"]
|
||||||
|
[35 "world"]
|
||||||
|
[64 "dollars"]]
|
||||||
|
{:table-fn sql-server :column-fn mysql})
|
||||||
|
["INSERT INTO [user] (`id`, `status`) VALUES (?, ?), (?, ?), (?, ?)" 42 "hello" 35 "world" 64 "dollars"])))
|
||||||
|
(testing "multi-row insert (batch mode)"
|
||||||
|
(is (= (builder/for-insert-multi :user
|
||||||
|
[:id :status]
|
||||||
|
[[42 "hello"]
|
||||||
|
[35 "world"]
|
||||||
|
[64 "dollars"]]
|
||||||
|
{:table-fn sql-server :column-fn mysql :batch true})
|
||||||
|
["INSERT INTO [user] (`id`, `status`) VALUES (?, ?)" [42 "hello"] [35 "world"] [64 "dollars"]]))
|
||||||
|
(is (= (builder/for-insert-multi :t/user
|
||||||
|
[:q/id :q/status]
|
||||||
|
[[42 "hello"]
|
||||||
|
[35 "world"]
|
||||||
|
[64 "dollars"]]
|
||||||
|
{:table-fn sql-server :column-fn mysql :batch true})
|
||||||
|
["INSERT INTO [user] (`id`, `status`) VALUES (?, ?)" [42 "hello"] [35 "world"] [64 "dollars"]]))))
|
||||||
|
|
|
||||||
|
|
@ -1,134 +1,262 @@
|
||||||
;; copyright (c) 2019-2020 Sean Corfield, all rights reserved
|
;; copyright (c) 2019-2025 Sean Corfield, all rights reserved
|
||||||
|
|
||||||
(ns next.jdbc.sql-test
|
(ns next.jdbc.sql-test
|
||||||
"Tests for the syntactic sugar SQL functions."
|
"Tests for the syntactic sugar SQL functions."
|
||||||
(:require [clojure.test :refer [deftest is testing use-fixtures]]
|
(:require [lazytest.core :refer [around set-ns-context!]]
|
||||||
|
[lazytest.experimental.interfaces.clojure-test :refer [deftest is testing thrown?]]
|
||||||
|
[next.jdbc :as jdbc]
|
||||||
[next.jdbc.specs :as specs]
|
[next.jdbc.specs :as specs]
|
||||||
[next.jdbc.sql :as sql]
|
[next.jdbc.sql :as sql]
|
||||||
[next.jdbc.test-fixtures
|
[next.jdbc.test-fixtures
|
||||||
:refer [with-test-db ds column default-options
|
:refer [col-kw column default-options derby? ds index jtds?
|
||||||
derby? maria? mssql? mysql? postgres? sqlite?]]))
|
maria? mssql? mysql? postgres? sqlite? with-test-db xtdb?]]
|
||||||
|
[next.jdbc.types :refer [as-other as-real as-varchar]]))
|
||||||
|
|
||||||
(set! *warn-on-reflection* true)
|
(set! *warn-on-reflection* true)
|
||||||
|
|
||||||
(use-fixtures :once with-test-db)
|
(set-ns-context! [(around [f] (with-test-db f))])
|
||||||
|
|
||||||
(specs/instrument)
|
(specs/instrument)
|
||||||
|
|
||||||
(deftest test-query
|
(deftest test-query
|
||||||
(let [rs (sql/query (ds) ["select * from fruit order by id"]
|
(let [ds-opts (jdbc/with-options (ds) (default-options))
|
||||||
(default-options))]
|
rs (sql/query ds-opts [(str "select * from fruit order by " (index))])]
|
||||||
(is (= 4 (count rs)))
|
(is (= 4 (count rs)))
|
||||||
(is (every? map? rs))
|
(is (every? map? rs))
|
||||||
(is (every? meta rs))
|
(is (every? meta rs))
|
||||||
(is (= 1 ((column :FRUIT/ID) (first rs))))
|
(is (= 1 ((column :FRUIT/ID) (first rs))))
|
||||||
(is (= 4 ((column :FRUIT/ID) (last rs))))))
|
(is (= 4 ((column :FRUIT/ID) (last rs))))))
|
||||||
|
|
||||||
(deftest test-find-by-keys
|
(deftest test-find-all-offset
|
||||||
(let [rs (sql/find-by-keys (ds) :fruit {:appearance "neon-green"})]
|
(let [ds-opts (jdbc/with-options (ds) (default-options))
|
||||||
(is (vector? rs))
|
rs (sql/find-by-keys
|
||||||
(is (= [] rs)))
|
ds-opts :fruit :all
|
||||||
(let [rs (sql/find-by-keys (ds) :fruit {:appearance "yellow"}
|
(assoc
|
||||||
(default-options))]
|
(if (or (mysql?) (sqlite?))
|
||||||
(is (= 1 (count rs)))
|
{:limit 2 :offset 1}
|
||||||
|
{:offset 1 :fetch 2})
|
||||||
|
:columns [(col-kw :ID)
|
||||||
|
["CASE WHEN grade > 91 THEN 'ok ' ELSE 'bad' END"
|
||||||
|
:QUALITY]]
|
||||||
|
:order-by [(col-kw :id)]))]
|
||||||
|
(is (= 2 (count rs)))
|
||||||
(is (every? map? rs))
|
(is (every? map? rs))
|
||||||
(is (every? meta rs))
|
(is (every? meta rs))
|
||||||
(is (= 2 ((column :FRUIT/ID) (first rs))))))
|
(is (every? #(= 2 (count %)) rs))
|
||||||
|
(is (= 2 ((column :FRUIT/ID) (first rs))))
|
||||||
|
(is (= "ok " ((column :QUALITY) (first rs))))
|
||||||
|
(is (= 3 ((column :FRUIT/ID) (last rs))))
|
||||||
|
(is (= "bad" ((column :QUALITY) (last rs))))))
|
||||||
|
|
||||||
|
(deftest test-find-by-keys
|
||||||
|
(let [ds-opts (jdbc/with-options (ds) (default-options))]
|
||||||
|
(let [rs (sql/find-by-keys ds-opts :fruit {:appearance "neon-green"})]
|
||||||
|
(is (vector? rs))
|
||||||
|
(is (= [] rs)))
|
||||||
|
(let [rs (sql/find-by-keys ds-opts :fruit {:appearance "yellow"})]
|
||||||
|
(is (= 1 (count rs)))
|
||||||
|
(is (every? map? rs))
|
||||||
|
(is (every? meta rs))
|
||||||
|
(is (= 2 ((column :FRUIT/ID) (first rs)))))))
|
||||||
|
|
||||||
|
(deftest test-aggregate-by-keys
|
||||||
|
(let [ds-opts (jdbc/with-options (ds) (default-options))]
|
||||||
|
(let [count-v (sql/aggregate-by-keys ds-opts :fruit "count(*)" {:appearance "neon-green"})]
|
||||||
|
(is (number? count-v))
|
||||||
|
(is (= 0 count-v)))
|
||||||
|
(let [count-v (sql/aggregate-by-keys ds-opts :fruit "count(*)" {:appearance "yellow"})]
|
||||||
|
(is (= 1 count-v)))
|
||||||
|
(let [count-v (sql/aggregate-by-keys ds-opts :fruit "count(*)" :all)]
|
||||||
|
(is (= 4 count-v)))
|
||||||
|
(let [max-id (sql/aggregate-by-keys ds-opts :fruit (str "max(" (index) ")") :all)]
|
||||||
|
(is (= 4 max-id)))
|
||||||
|
(when-not (xtdb?) ; XTDB does not support min/max on strings?
|
||||||
|
(let [min-name (sql/aggregate-by-keys ds-opts :fruit "min(name)" :all)]
|
||||||
|
(is (= "Apple" min-name))))
|
||||||
|
(is (thrown? IllegalArgumentException
|
||||||
|
(sql/aggregate-by-keys ds-opts :fruit "count(*)" :all {:columns []})))))
|
||||||
|
|
||||||
(deftest test-get-by-id
|
(deftest test-get-by-id
|
||||||
(is (nil? (sql/get-by-id (ds) :fruit -1)))
|
(let [ds-opts (jdbc/with-options (ds) (default-options))]
|
||||||
(let [row (sql/get-by-id (ds) :fruit 3 (default-options))]
|
(is (nil? (sql/get-by-id ds-opts :fruit -1 (col-kw :id) {})))
|
||||||
(is (map? row))
|
(let [row (sql/get-by-id ds-opts :fruit 3 (col-kw :id) {})]
|
||||||
(is (= "Peach" ((column :FRUIT/NAME) row))))
|
(is (map? row))
|
||||||
(let [row (sql/get-by-id (ds) :fruit "juicy" :appearance (default-options))]
|
(is (= "Peach" ((column :FRUIT/NAME) row))))
|
||||||
(is (map? row))
|
(let [row (sql/get-by-id ds-opts :fruit "juicy" :appearance {})]
|
||||||
(is (= 4 ((column :FRUIT/ID) row)))
|
(is (map? row))
|
||||||
(is (= "Orange" ((column :FRUIT/NAME) row))))
|
(is (= 4 ((column :FRUIT/ID) row)))
|
||||||
(let [row (sql/get-by-id (ds) :fruit "Banana" :FRUIT/NAME (default-options))]
|
(is (= "Orange" ((column :FRUIT/NAME) row))))
|
||||||
(is (map? row))
|
(let [row (sql/get-by-id ds-opts :fruit "Banana" :FRUIT/NAME {})]
|
||||||
(is (= 2 ((column :FRUIT/ID) row)))))
|
(is (map? row))
|
||||||
|
(is (= 2 ((column :FRUIT/ID) row))))))
|
||||||
|
|
||||||
|
(defn- update-count [n]
|
||||||
|
(if (xtdb?)
|
||||||
|
{:next.jdbc/update-count 0}
|
||||||
|
{:next.jdbc/update-count n}))
|
||||||
|
|
||||||
(deftest test-update!
|
(deftest test-update!
|
||||||
(try
|
(let [ds-opts (jdbc/with-options (ds) (default-options))]
|
||||||
(is (= {:next.jdbc/update-count 1}
|
(try
|
||||||
(sql/update! (ds) :fruit {:appearance "brown"} {:id 2})))
|
(is (= (update-count 1)
|
||||||
(is (= "brown" ((column :FRUIT/APPEARANCE)
|
(sql/update! ds-opts :fruit {:appearance "brown"} {(col-kw :id) 2})))
|
||||||
(sql/get-by-id (ds) :fruit 2 (default-options)))))
|
(is (= "brown" ((column :FRUIT/APPEARANCE)
|
||||||
(finally
|
(sql/get-by-id ds-opts :fruit 2 (col-kw :id) {}))))
|
||||||
(sql/update! (ds) :fruit {:appearance "yellow"} {:id 2})))
|
(finally
|
||||||
(try
|
(sql/update! ds-opts :fruit {:appearance "yellow"} {(col-kw :id) 2})))
|
||||||
(is (= {:next.jdbc/update-count 1}
|
(try
|
||||||
(sql/update! (ds) :fruit {:appearance "green"}
|
(is (= (update-count 1)
|
||||||
["name = ?" "Banana"])))
|
(sql/update! ds-opts :fruit {:appearance "green"}
|
||||||
(is (= "green" ((column :FRUIT/APPEARANCE)
|
["name = ?" "Banana"])))
|
||||||
(sql/get-by-id (ds) :fruit 2 (default-options)))))
|
(is (= "green" ((column :FRUIT/APPEARANCE)
|
||||||
(finally
|
(sql/get-by-id ds-opts :fruit 2 (col-kw :id) {}))))
|
||||||
(sql/update! (ds) :fruit {:appearance "yellow"} {:id 2}))))
|
(finally
|
||||||
|
(sql/update! ds-opts :fruit {:appearance "yellow"} {(col-kw :id) 2})))))
|
||||||
|
|
||||||
(deftest test-insert-delete
|
(deftest test-insert-delete
|
||||||
(let [new-key (cond (derby?) :1
|
(let [new-key (cond (derby?) :1
|
||||||
|
(jtds?) :ID
|
||||||
(maria?) :insert_id
|
(maria?) :insert_id
|
||||||
(mssql?) :GENERATED_KEYS
|
(mssql?) :GENERATED_KEYS
|
||||||
(mysql?) :GENERATED_KEY
|
(mysql?) :GENERATED_KEY
|
||||||
(postgres?) :fruit/id
|
(postgres?) :fruit/id
|
||||||
(sqlite?) (keyword "last_insert_rowid()")
|
;; XTDB does not return the generated key so we fix it
|
||||||
|
;; to be the one we insert here, and then fake it in all
|
||||||
|
;; the other tests.
|
||||||
|
(xtdb?) (constantly 5)
|
||||||
:else :FRUIT/ID)]
|
:else :FRUIT/ID)]
|
||||||
(testing "single insert/delete"
|
(testing "single insert/delete"
|
||||||
(is (== 5 (new-key (sql/insert! (ds) :fruit
|
(is (== 5 (new-key (sql/insert! (ds) :fruit
|
||||||
{:name "Kiwi" :appearance "green & fuzzy"
|
(cond-> {:name (as-varchar "Kiwi")
|
||||||
:cost 100 :grade 99.9}))))
|
:appearance "green & fuzzy"
|
||||||
|
:cost 100 :grade (as-real 99.9)}
|
||||||
|
(xtdb?)
|
||||||
|
(assoc :_id 5))
|
||||||
|
{:suffix
|
||||||
|
(when (sqlite?)
|
||||||
|
"RETURNING *")}))))
|
||||||
(is (= 5 (count (sql/query (ds) ["select * from fruit"]))))
|
(is (= 5 (count (sql/query (ds) ["select * from fruit"]))))
|
||||||
(is (= {:next.jdbc/update-count 1}
|
(is (= (update-count 1)
|
||||||
(sql/delete! (ds) :fruit {:id 5})))
|
(sql/delete! (ds) :fruit {(col-kw :id) 5})))
|
||||||
(is (= 4 (count (sql/query (ds) ["select * from fruit"])))))
|
(is (= 4 (count (sql/query (ds) ["select * from fruit"])))))
|
||||||
(testing "multiple insert/delete"
|
(testing "multiple insert/delete"
|
||||||
(is (= (cond (derby?)
|
(is (= (cond (derby?)
|
||||||
[nil] ; WTF Apache Derby?
|
[nil] ; WTF Apache Derby?
|
||||||
(mssql?)
|
(mssql?)
|
||||||
[8M]
|
[8M]
|
||||||
(sqlite?)
|
(maria?)
|
||||||
[8]
|
[6]
|
||||||
|
(xtdb?)
|
||||||
|
[]
|
||||||
:else
|
:else
|
||||||
[6 7 8])
|
[6 7 8])
|
||||||
(mapv new-key
|
(mapv new-key
|
||||||
(sql/insert-multi! (ds) :fruit
|
(sql/insert-multi! (ds) :fruit
|
||||||
[:name :appearance :cost :grade]
|
(cond->> [:name :appearance :cost :grade]
|
||||||
[["Kiwi" "green & fuzzy" 100 99.9]
|
(xtdb?) (cons :_id))
|
||||||
["Grape" "black" 10 50]
|
(cond->> [["Kiwi" "green & fuzzy" 100 99.9]
|
||||||
["Lemon" "yellow" 20 9.9]]))))
|
["Grape" "black" 10 50]
|
||||||
|
["Lemon" "yellow" 20 9.9]]
|
||||||
|
(xtdb?)
|
||||||
|
(map cons [6 7 8]))
|
||||||
|
{:suffix
|
||||||
|
(when (sqlite?)
|
||||||
|
"RETURNING *")}))))
|
||||||
(is (= 7 (count (sql/query (ds) ["select * from fruit"]))))
|
(is (= 7 (count (sql/query (ds) ["select * from fruit"]))))
|
||||||
(is (= {:next.jdbc/update-count 1}
|
(is (= (update-count 1)
|
||||||
(sql/delete! (ds) :fruit {:id 6})))
|
(sql/delete! (ds) :fruit {(col-kw :id) 6})))
|
||||||
(is (= 6 (count (sql/query (ds) ["select * from fruit"]))))
|
(is (= 6 (count (sql/query (ds) ["select * from fruit"]))))
|
||||||
(is (= {:next.jdbc/update-count 2}
|
(is (= (update-count 2)
|
||||||
(sql/delete! (ds) :fruit ["id > ?" 4])))
|
(sql/delete! (ds) :fruit [(str (index) " > ?") 4])))
|
||||||
(is (= 4 (count (sql/query (ds) ["select * from fruit"])))))
|
(is (= 4 (count (sql/query (ds) ["select * from fruit"])))))
|
||||||
(testing "multiple insert/delete with sequential cols/rows" ; per #43
|
(testing "multiple insert/delete with sequential cols/rows" ; per #43
|
||||||
(is (= (cond (derby?)
|
(is (= (cond (derby?)
|
||||||
[nil] ; WTF Apache Derby?
|
[nil] ; WTF Apache Derby?
|
||||||
(mssql?)
|
(mssql?)
|
||||||
[11M]
|
[11M]
|
||||||
(sqlite?)
|
(maria?)
|
||||||
[11]
|
[9]
|
||||||
|
(xtdb?)
|
||||||
|
[]
|
||||||
:else
|
:else
|
||||||
[9 10 11])
|
[9 10 11])
|
||||||
(mapv new-key
|
(mapv new-key
|
||||||
(sql/insert-multi! (ds) :fruit
|
(sql/insert-multi! (ds) :fruit
|
||||||
'(:name :appearance :cost :grade)
|
(cond->> '(:name :appearance :cost :grade)
|
||||||
'(("Kiwi" "green & fuzzy" 100 99.9)
|
(xtdb?) (cons :_id))
|
||||||
("Grape" "black" 10 50)
|
(cond->> '(("Kiwi" "green & fuzzy" 100 99.9)
|
||||||
("Lemon" "yellow" 20 9.9))))))
|
("Grape" "black" 10 50)
|
||||||
|
("Lemon" "yellow" 20 9.9))
|
||||||
|
(xtdb?)
|
||||||
|
(map cons [9 10 11]))
|
||||||
|
{:suffix
|
||||||
|
(when (sqlite?)
|
||||||
|
"RETURNING *")}))))
|
||||||
(is (= 7 (count (sql/query (ds) ["select * from fruit"]))))
|
(is (= 7 (count (sql/query (ds) ["select * from fruit"]))))
|
||||||
(is (= {:next.jdbc/update-count 1}
|
(is (= (update-count 1)
|
||||||
(sql/delete! (ds) :fruit {:id 9})))
|
(sql/delete! (ds) :fruit {(col-kw :id) 9})))
|
||||||
(is (= 6 (count (sql/query (ds) ["select * from fruit"]))))
|
(is (= 6 (count (sql/query (ds) ["select * from fruit"]))))
|
||||||
(is (= {:next.jdbc/update-count 2}
|
(is (= (update-count 2)
|
||||||
(sql/delete! (ds) :fruit ["id > ?" 4])))
|
(sql/delete! (ds) :fruit [(str (index) " > ?") 4])))
|
||||||
(is (= 4 (count (sql/query (ds) ["select * from fruit"])))))
|
(is (= 4 (count (sql/query (ds) ["select * from fruit"])))))
|
||||||
(testing "empty insert-multi!" ; per #44
|
(testing "multiple insert/delete with maps"
|
||||||
|
(is (= (cond (derby?)
|
||||||
|
[nil] ; WTF Apache Derby?
|
||||||
|
(mssql?)
|
||||||
|
[14M]
|
||||||
|
(maria?)
|
||||||
|
[12]
|
||||||
|
(xtdb?)
|
||||||
|
[]
|
||||||
|
:else
|
||||||
|
[12 13 14])
|
||||||
|
(mapv new-key
|
||||||
|
(sql/insert-multi! (ds) :fruit
|
||||||
|
(cond->> [{:name "Kiwi"
|
||||||
|
:appearance "green & fuzzy"
|
||||||
|
:cost 100
|
||||||
|
:grade 99.9}
|
||||||
|
{:name "Grape"
|
||||||
|
:appearance "black"
|
||||||
|
:cost 10
|
||||||
|
:grade 50}
|
||||||
|
{:name "Lemon"
|
||||||
|
:appearance "yellow"
|
||||||
|
:cost 20
|
||||||
|
:grade 9.9}]
|
||||||
|
(xtdb?)
|
||||||
|
(map #(assoc %2 :_id %1) [12 13 14]))
|
||||||
|
{:suffix
|
||||||
|
(when (sqlite?)
|
||||||
|
"RETURNING *")}))))
|
||||||
|
(is (= 7 (count (sql/query (ds) ["select * from fruit"]))))
|
||||||
|
(is (= (update-count 1)
|
||||||
|
(sql/delete! (ds) :fruit {(col-kw :id) 12})))
|
||||||
|
(is (= 6 (count (sql/query (ds) ["select * from fruit"]))))
|
||||||
|
(is (= (update-count 2)
|
||||||
|
(sql/delete! (ds) :fruit [(str (index) " > ?") 10])))
|
||||||
|
(is (= 4 (count (sql/query (ds) ["select * from fruit"])))))
|
||||||
|
(testing "empty insert-multi!" ; per #44 and #264
|
||||||
(is (= [] (sql/insert-multi! (ds) :fruit
|
(is (= [] (sql/insert-multi! (ds) :fruit
|
||||||
[:name :appearance :cost :grade]
|
[:name :appearance :cost :grade]
|
||||||
[]))))))
|
[]
|
||||||
|
{:suffix
|
||||||
|
(when (sqlite?)
|
||||||
|
"RETURNING *")})))
|
||||||
|
;; per #264 the following should all be legal too:
|
||||||
|
(is (= [] (sql/insert-multi! (ds) :fruit
|
||||||
|
[]
|
||||||
|
{:suffix
|
||||||
|
(when (sqlite?)
|
||||||
|
"RETURNING *")})))
|
||||||
|
(is (= [] (sql/insert-multi! (ds) :fruit
|
||||||
|
[]
|
||||||
|
[]
|
||||||
|
{:suffix
|
||||||
|
(when (sqlite?)
|
||||||
|
"RETURNING *")})))
|
||||||
|
(is (= [] (sql/insert-multi! (ds) :fruit [])))
|
||||||
|
(is (= [] (sql/insert-multi! (ds) :fruit [] []))))))
|
||||||
|
|
||||||
(deftest no-empty-example-maps
|
(deftest no-empty-example-maps
|
||||||
(is (thrown? clojure.lang.ExceptionInfo
|
(is (thrown? clojure.lang.ExceptionInfo
|
||||||
|
|
@ -142,13 +270,23 @@
|
||||||
(is (thrown? clojure.lang.ExceptionInfo
|
(is (thrown? clojure.lang.ExceptionInfo
|
||||||
(sql/insert-multi! (ds) :fruit [] [[] [] []]))))
|
(sql/insert-multi! (ds) :fruit [] [[] [] []]))))
|
||||||
|
|
||||||
|
(deftest no-mismatched-columns
|
||||||
|
(is (thrown? IllegalArgumentException
|
||||||
|
(sql/insert-multi! (ds) :fruit [{:name "Apple"} {:cost 1.23}]))))
|
||||||
|
|
||||||
(deftest no-empty-order-by
|
(deftest no-empty-order-by
|
||||||
(is (thrown? clojure.lang.ExceptionInfo
|
(is (thrown? clojure.lang.ExceptionInfo
|
||||||
(sql/find-by-keys (ds) :fruit
|
(sql/find-by-keys (ds) :fruit
|
||||||
{:name "Apple"}
|
{:name "Apple"}
|
||||||
{:order-by []}))))
|
{:order-by []}))))
|
||||||
|
|
||||||
(deftest array-in
|
(deftest array-in
|
||||||
(when (postgres?)
|
(when (postgres?)
|
||||||
(let [data (sql/find-by-keys (ds) :fruit ["id = any(?)" (int-array [1 2 3 4])])]
|
(let [data (sql/find-by-keys (ds) :fruit [(str (index) " = any(?)") (int-array [1 2 3 4])])]
|
||||||
(is (= 4 (count data))))))
|
(is (= 4 (count data))))))
|
||||||
|
|
||||||
|
(deftest enum-pg
|
||||||
|
(when (postgres?)
|
||||||
|
(let [r (sql/insert! (ds) :lang-test {:lang (as-other "fr")}
|
||||||
|
jdbc/snake-kebab-opts)]
|
||||||
|
(is (= {:lang-test/lang "fr"} r)))))
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
;; copyright (c) 2019-2020 Sean Corfield, all rights reserved
|
;; copyright (c) 2019-2024 Sean Corfield, all rights reserved
|
||||||
|
|
||||||
(ns next.jdbc.test-fixtures
|
(ns next.jdbc.test-fixtures
|
||||||
"Multi-database testing fixtures."
|
"Multi-database testing fixtures."
|
||||||
|
|
@ -6,8 +6,7 @@
|
||||||
[next.jdbc :as jdbc]
|
[next.jdbc :as jdbc]
|
||||||
[next.jdbc.prepare :as prep]
|
[next.jdbc.prepare :as prep]
|
||||||
[next.jdbc.sql :as sql])
|
[next.jdbc.sql :as sql])
|
||||||
(:import (com.opentable.db.postgres.embedded EmbeddedPostgres)
|
(:import (io.zonky.test.db.postgres.embedded EmbeddedPostgres)))
|
||||||
(javax.sql DataSource)))
|
|
||||||
|
|
||||||
(set! *warn-on-reflection* true)
|
(set! *warn-on-reflection* true)
|
||||||
|
|
||||||
|
|
@ -22,53 +21,110 @@
|
||||||
(def ^:private test-sqlite {:dbtype "sqlite" :dbname "clojure_test_sqlite"})
|
(def ^:private test-sqlite {:dbtype "sqlite" :dbname "clojure_test_sqlite"})
|
||||||
|
|
||||||
;; this is just a dummy db-spec -- it's handled in with-test-db below
|
;; this is just a dummy db-spec -- it's handled in with-test-db below
|
||||||
(def ^:private test-postgres {:dbtype "embedded-postgres"})
|
(def ^:private test-postgres-map {:dbtype "embedded-postgres" :dbname "clojure_test"})
|
||||||
|
(def ^:private test-postgres
|
||||||
|
(when-not (System/getenv "NEXT_JDBC_NO_POSTGRES") test-postgres-map))
|
||||||
;; it takes a while to spin up so we kick it off at startup
|
;; it takes a while to spin up so we kick it off at startup
|
||||||
(defonce embedded-pg (future (EmbeddedPostgres/start)))
|
(defonce embedded-pg (when test-postgres (future (EmbeddedPostgres/start))))
|
||||||
|
|
||||||
(def ^:private test-mysql-map
|
(def ^:private test-mysql-map
|
||||||
(merge (if (System/getenv "NEXT_JDBC_TEST_MARIADB")
|
(merge (if (System/getenv "NEXT_JDBC_TEST_MARIADB")
|
||||||
{:dbtype "mariadb"}
|
{:dbtype "mariadb"}
|
||||||
{:dbtype "mysql" :disableMariaDbDriver true})
|
{:dbtype "mysql" :disableMariaDbDriver true})
|
||||||
{:dbname "clojure_test" :useSSL false
|
{:dbname "clojure_test" :useSSL false :allowMultiQueries true
|
||||||
:user "root" :password (System/getenv "MYSQL_ROOT_PASSWORD")}))
|
:user "root" :password (System/getenv "MYSQL_ROOT_PASSWORD")}))
|
||||||
(def ^:private test-mysql
|
(def ^:private test-mysql
|
||||||
(when (System/getenv "NEXT_JDBC_TEST_MYSQL") test-mysql-map))
|
(when (System/getenv "NEXT_JDBC_TEST_MYSQL") test-mysql-map))
|
||||||
|
|
||||||
|
(defn- create-clojure-test []
|
||||||
|
(when test-mysql
|
||||||
|
(let [mysql (assoc test-mysql :dbname "mysql")]
|
||||||
|
(println "Creating clojure-test database in MySQL...")
|
||||||
|
(loop [n 0]
|
||||||
|
(when (try
|
||||||
|
(jdbc/execute-one! mysql ["create database if not exists clojure_test"])
|
||||||
|
false ; done
|
||||||
|
(catch Throwable t
|
||||||
|
(when (< 10 n) (throw t))
|
||||||
|
(println "\t" (ex-message t) "(will retry)")
|
||||||
|
(Thread/sleep 3000)
|
||||||
|
true))
|
||||||
|
(recur (inc n))))
|
||||||
|
(println "...done!"))))
|
||||||
|
|
||||||
(def ^:private test-mssql-map
|
(def ^:private test-mssql-map
|
||||||
{:dbtype "mssql" :dbname "model"
|
{:dbtype "mssql" :dbname "model" :encrypt false :trustServerCertificate true
|
||||||
:user "sa" :password (System/getenv "MSSQL_SA_PASSWORD")})
|
:user "sa" :password (System/getenv "MSSQL_SA_PASSWORD")})
|
||||||
(def ^:private test-mssql
|
(def ^:private test-mssql
|
||||||
(when (System/getenv "NEXT_JDBC_TEST_MSSQL") test-mssql-map))
|
(when (System/getenv "NEXT_JDBC_TEST_MSSQL") test-mssql-map))
|
||||||
|
|
||||||
|
(def ^:private test-jtds-map
|
||||||
|
{:dbtype "jtds" :dbname "model"
|
||||||
|
:user "sa" :password (System/getenv "MSSQL_SA_PASSWORD")})
|
||||||
|
(def ^:private test-jtds
|
||||||
|
(when (System/getenv "NEXT_JDBC_TEST_MSSQL") test-jtds-map))
|
||||||
|
|
||||||
|
(def ^:private test-xtdb-map {:dbtype "xtdb" :dbname "xtdb"})
|
||||||
|
|
||||||
|
(def ^:private test-xtdb
|
||||||
|
(when (and (System/getenv "NEXT_JDBC_TEST_XTDB")
|
||||||
|
;; only if we're on jdk21+
|
||||||
|
(str/starts-with? (System/getProperty "java.version") "2"))
|
||||||
|
test-xtdb-map))
|
||||||
|
|
||||||
(def ^:private test-db-specs
|
(def ^:private test-db-specs
|
||||||
(cond-> [test-derby test-h2-mem test-h2 test-hsql test-sqlite test-postgres]
|
(cond-> [test-derby test-h2-mem test-h2 test-hsql test-sqlite]
|
||||||
test-mysql (conj test-mysql)
|
test-postgres (conj test-postgres)
|
||||||
test-mssql (conj test-mssql)))
|
test-mysql (conj test-mysql)
|
||||||
|
test-mssql (conj test-mssql test-jtds)
|
||||||
|
test-xtdb (conj test-xtdb)))
|
||||||
|
|
||||||
(def ^:private test-db-spec (atom nil))
|
(def ^:private test-db-spec (atom nil))
|
||||||
|
|
||||||
(defn derby? [] (= "derby" (:dbtype @test-db-spec)))
|
(defn derby? [] (= "derby" (:dbtype @test-db-spec)))
|
||||||
|
|
||||||
|
(defn h2? [] (str/starts-with? (:dbtype @test-db-spec) "h2"))
|
||||||
|
|
||||||
|
(defn hsqldb? [] (= "hsqldb" (:dbtype @test-db-spec)))
|
||||||
|
|
||||||
|
(defn jtds? [] (= "jtds" (:dbtype @test-db-spec)))
|
||||||
|
|
||||||
(defn maria? [] (= "mariadb" (:dbtype @test-db-spec)))
|
(defn maria? [] (= "mariadb" (:dbtype @test-db-spec)))
|
||||||
|
|
||||||
(defn mssql? [] (= "mssql" (:dbtype @test-db-spec)))
|
(defn mssql? [] (#{"jtds" "mssql"} (:dbtype @test-db-spec)))
|
||||||
|
|
||||||
(defn mysql? [] (#{"mariadb" "mysql"} (:dbtype @test-db-spec)))
|
(defn mysql? [] (#{"mariadb" "mysql"} (:dbtype @test-db-spec)))
|
||||||
|
|
||||||
(defn postgres? [] (= "embedded-postgres" (:dbtype @test-db-spec)))
|
(defn postgres? [] (= "embedded-postgres" (:dbtype @test-db-spec)))
|
||||||
|
|
||||||
|
(defn xtdb? [] (= "xtdb" (:dbtype @test-db-spec)))
|
||||||
|
|
||||||
(defn sqlite? [] (= "sqlite" (:dbtype @test-db-spec)))
|
(defn sqlite? [] (= "sqlite" (:dbtype @test-db-spec)))
|
||||||
|
|
||||||
|
(defn stored-proc? [] (not (#{"derby" "h2" "h2:mem" "sqlite" "xtdb"}
|
||||||
|
(:dbtype @test-db-spec))))
|
||||||
|
|
||||||
(defn column [k]
|
(defn column [k]
|
||||||
(let [n (namespace k)]
|
(let [n (namespace k)]
|
||||||
(keyword (when n (cond (postgres?) (str/lower-case n)
|
(keyword (when n (cond (postgres?) (str/lower-case n)
|
||||||
(mssql?) (str/lower-case n)
|
(mssql?) (str/lower-case n)
|
||||||
(mysql?) (str/lower-case n)
|
(mysql?) (str/lower-case n)
|
||||||
|
(xtdb?) nil
|
||||||
:else n))
|
:else n))
|
||||||
(cond (postgres?) (str/lower-case (name k))
|
(cond (postgres?) (str/lower-case (name k))
|
||||||
|
(xtdb?) (let [c (str/lower-case (name k))]
|
||||||
|
(if (= "id" c) "_id" c))
|
||||||
:else (name k)))))
|
:else (name k)))))
|
||||||
|
|
||||||
|
(defn index []
|
||||||
|
(if (xtdb?) "_id" "id"))
|
||||||
|
|
||||||
|
(defn col-kw [k]
|
||||||
|
(if (xtdb?)
|
||||||
|
(let [n (name k)]
|
||||||
|
(if (= "id" (str/lower-case n)) :_id (keyword n)))
|
||||||
|
k))
|
||||||
|
|
||||||
(defn default-options []
|
(defn default-options []
|
||||||
(if (mssql?) ; so that we get table names back from queries
|
(if (mssql?) ; so that we get table names back from queries
|
||||||
{:result-type :scroll-insensitive :concurrency :read-only}
|
{:result-type :scroll-insensitive :concurrency :read-only}
|
||||||
|
|
@ -86,6 +142,16 @@
|
||||||
[]
|
[]
|
||||||
@test-datasource)
|
@test-datasource)
|
||||||
|
|
||||||
|
(defn- do-commands
|
||||||
|
"Example from migration docs: this serves as a test for it."
|
||||||
|
[connectable commands]
|
||||||
|
(if (instance? java.sql.Connection connectable)
|
||||||
|
(with-open [stmt (prep/statement connectable)]
|
||||||
|
(run! #(.addBatch stmt %) commands)
|
||||||
|
(into [] (.executeBatch stmt)))
|
||||||
|
(with-open [conn (jdbc/get-connection connectable)]
|
||||||
|
(do-commands conn commands))))
|
||||||
|
|
||||||
(defn with-test-db
|
(defn with-test-db
|
||||||
"Given a test function (or suite), run it in the context of an in-memory
|
"Given a test function (or suite), run it in the context of an in-memory
|
||||||
H2 database set up with a simple fruit table containing four rows of data.
|
H2 database set up with a simple fruit table containing four rows of data.
|
||||||
|
|
@ -100,8 +166,9 @@
|
||||||
(.getPostgresDatabase ^EmbeddedPostgres @embedded-pg))
|
(.getPostgresDatabase ^EmbeddedPostgres @embedded-pg))
|
||||||
(reset! test-datasource (jdbc/get-datasource db)))
|
(reset! test-datasource (jdbc/get-datasource db)))
|
||||||
(let [fruit (if (mysql?) "fruit" "FRUIT") ; MySQL is case sensitive!
|
(let [fruit (if (mysql?) "fruit" "FRUIT") ; MySQL is case sensitive!
|
||||||
|
btest (if (mysql?) "btest" "BTEST")
|
||||||
auto-inc-pk
|
auto-inc-pk
|
||||||
(cond (or (derby?) (= "hsqldb" (:dbtype db)))
|
(cond (or (derby?) (hsqldb?))
|
||||||
(str "GENERATED ALWAYS AS IDENTITY"
|
(str "GENERATED ALWAYS AS IDENTITY"
|
||||||
" (START WITH 1, INCREMENT BY 1)"
|
" (START WITH 1, INCREMENT BY 1)"
|
||||||
" PRIMARY KEY")
|
" PRIMARY KEY")
|
||||||
|
|
@ -115,10 +182,54 @@
|
||||||
:else
|
:else
|
||||||
"AUTO_INCREMENT PRIMARY KEY")]
|
"AUTO_INCREMENT PRIMARY KEY")]
|
||||||
(with-open [con (jdbc/get-connection (ds))]
|
(with-open [con (jdbc/get-connection (ds))]
|
||||||
(try
|
(if (xtdb?) ; no DDL for creation
|
||||||
(jdbc/execute-one! con [(str "DROP TABLE " fruit)])
|
(do
|
||||||
(catch Exception _))
|
(try
|
||||||
(jdbc/execute-one! con [(str "
|
(do-commands con ["ERASE FROM fruit WHERE true"])
|
||||||
|
(catch Throwable _))
|
||||||
|
(try
|
||||||
|
(do-commands con ["ERASE FROM btest WHERE true"])
|
||||||
|
(catch Throwable _))
|
||||||
|
(sql/insert-multi! con :fruit
|
||||||
|
[:_id :name :appearance :cost]
|
||||||
|
[[1 "Apple" "red" 59]]
|
||||||
|
{:return-keys false})
|
||||||
|
(sql/insert-multi! con :fruit
|
||||||
|
[:_id :name :appearance :grade]
|
||||||
|
[[2 "Banana" "yellow" 92.2]]
|
||||||
|
{:return-keys false})
|
||||||
|
(sql/insert-multi! con :fruit
|
||||||
|
[:_id :name :cost :grade]
|
||||||
|
[[3 "Peach" 139 90.0]]
|
||||||
|
{:return-keys false})
|
||||||
|
(sql/insert-multi! con :fruit
|
||||||
|
[:_id :name :appearance :cost :grade]
|
||||||
|
[[4 "Orange" "juicy" 89 88.6]]
|
||||||
|
{:return-keys false}))
|
||||||
|
(do
|
||||||
|
(when (stored-proc?)
|
||||||
|
(try
|
||||||
|
(jdbc/execute-one! con ["DROP PROCEDURE FRUITP"])
|
||||||
|
(catch Throwable _)))
|
||||||
|
(try
|
||||||
|
(do-commands con [(str "DROP TABLE " fruit)])
|
||||||
|
(catch Exception _))
|
||||||
|
(try
|
||||||
|
(do-commands con [(str "DROP TABLE " btest)])
|
||||||
|
(catch Exception _))
|
||||||
|
(when (postgres?)
|
||||||
|
(try
|
||||||
|
(do-commands con ["DROP TABLE LANG_TEST"])
|
||||||
|
(catch Exception _))
|
||||||
|
(try
|
||||||
|
(do-commands con ["DROP TYPE LANGUAGE"])
|
||||||
|
(catch Exception _))
|
||||||
|
(do-commands con ["CREATE TYPE LANGUAGE AS ENUM('en','fr','de')"])
|
||||||
|
(do-commands con ["
|
||||||
|
CREATE TABLE LANG_TEST (
|
||||||
|
LANG LANGUAGE NOT NULL
|
||||||
|
)"]))
|
||||||
|
(do-commands con [(str "
|
||||||
CREATE TABLE " fruit " (
|
CREATE TABLE " fruit " (
|
||||||
ID INTEGER " auto-inc-pk ",
|
ID INTEGER " auto-inc-pk ",
|
||||||
NAME VARCHAR(32),
|
NAME VARCHAR(32),
|
||||||
|
|
@ -126,32 +237,69 @@ CREATE TABLE " fruit " (
|
||||||
COST INT DEFAULT NULL,
|
COST INT DEFAULT NULL,
|
||||||
GRADE REAL DEFAULT NULL
|
GRADE REAL DEFAULT NULL
|
||||||
)")])
|
)")])
|
||||||
(sql/insert-multi! con :fruit
|
(let [created (atom false)]
|
||||||
[:name :appearance :cost :grade]
|
;; MS SQL Server does not support bool/boolean:
|
||||||
[["Apple" "red" 59 nil]
|
(doseq [btype ["BOOL" "BOOLEAN" "BIT"]]
|
||||||
["Banana" "yellow" nil 92.2]
|
;; Derby does not support bit:
|
||||||
["Peach" nil 139 90.0]
|
(doseq [bitty ["BIT" "SMALLINT"]]
|
||||||
["Orange" "juicy" 89 88.6]]
|
(try
|
||||||
{:return-keys false})
|
(when-not @created
|
||||||
(t)))))
|
(do-commands con [(str "
|
||||||
|
CREATE TABLE " btest " (
|
||||||
|
NAME VARCHAR(32),
|
||||||
|
IS_IT " btype ",
|
||||||
|
TWIDDLE " bitty "
|
||||||
|
)")])
|
||||||
|
(reset! created true))
|
||||||
|
(catch Throwable _))))
|
||||||
|
(when-not @created
|
||||||
|
(println (:dbtype db) "failed btest creation")
|
||||||
|
#_(throw (ex-info (str (:dbtype db) " has no boolean type?") {}))))
|
||||||
|
(when (stored-proc?)
|
||||||
|
(let [[begin end] (if (postgres?) ["$$" "$$"] ["BEGIN" "END"])]
|
||||||
|
(try
|
||||||
|
(do-commands con [(str "
|
||||||
|
CREATE PROCEDURE FRUITP" (cond (hsqldb?) "() READS SQL DATA DYNAMIC RESULT SETS 2 "
|
||||||
|
(mssql?) " AS "
|
||||||
|
(postgres?) "() LANGUAGE SQL AS "
|
||||||
|
:else "() ") "
|
||||||
|
" begin " " (if (hsqldb?)
|
||||||
|
(str "ATOMIC
|
||||||
|
DECLARE result1 CURSOR WITH RETURN FOR SELECT * FROM " fruit " WHERE COST < 90;
|
||||||
|
DECLARE result2 CURSOR WITH RETURN FOR SELECT * FROM " fruit " WHERE GRADE >= 90.0;
|
||||||
|
OPEN result1;
|
||||||
|
OPEN result2;")
|
||||||
|
(str "
|
||||||
|
SELECT * FROM " fruit " WHERE COST < 90;
|
||||||
|
SELECT * FROM " fruit " WHERE GRADE >= 90.0;")) "
|
||||||
|
" end "
|
||||||
|
")])
|
||||||
|
(catch Throwable t
|
||||||
|
(println 'procedure (:dbtype db) (ex-message t))))))
|
||||||
|
(sql/insert-multi! con :fruit
|
||||||
|
[:name :appearance :cost :grade]
|
||||||
|
[["Apple" "red" 59 nil]
|
||||||
|
["Banana" "yellow" nil 92.2]
|
||||||
|
["Peach" nil 139 90.0]
|
||||||
|
["Orange" "juicy" 89 88.6]]
|
||||||
|
{:return-keys false})))
|
||||||
|
(t)))))
|
||||||
|
|
||||||
|
(create-clojure-test)
|
||||||
|
|
||||||
(comment
|
(comment
|
||||||
;; this is a convenience to bring next.jdbc's test dependencies
|
;; this is a convenience to bring next.jdbc's test dependencies
|
||||||
;; into any REPL that has the add-lib branch of tools.deps.alpha
|
;; into any REPL running Clojure 1.12.0's new add-libs API
|
||||||
;; which allows me to develop and test next.jdbc inside my work's
|
;; which allows me to develop and test next.jdbc inside my work's
|
||||||
;; "everything" REBL environment
|
;; "everything" REPL environment
|
||||||
(require '[clojure.tools.deps.alpha.repl :refer [add-lib]]
|
(require '[clojure.repl.deps :refer [add-libs]]
|
||||||
'[clojure.java.io :as io]
|
|
||||||
'[clojure.edn :as edn])
|
'[clojure.edn :as edn])
|
||||||
(def repo-path "/Developer/workspace/next.jdbc")
|
(def test-deps (-> (slurp "https://raw.githubusercontent.com/seancorfield/next-jdbc/develop/deps.edn")
|
||||||
(def test-deps (-> (io/reader (str repo-path "/deps.edn"))
|
(edn/read-string)
|
||||||
(java.io.PushbackReader.)
|
|
||||||
(edn/read)
|
|
||||||
:aliases
|
:aliases
|
||||||
:test
|
:test
|
||||||
:extra-deps))
|
:extra-deps))
|
||||||
(doseq [[coord version] test-deps]
|
(add-libs test-deps)
|
||||||
(add-lib coord version))
|
|
||||||
;; now you can load this file... and then you can load other test
|
;; now you can load this file... and then you can load other test
|
||||||
;; files and run their tests as needed... which will leave (ds)
|
;; files and run their tests as needed... which will leave (ds)
|
||||||
;; set to the embedded PostgreSQL datasource -- reset it with this:
|
;; set to the embedded PostgreSQL datasource -- reset it with this:
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,10 @@
|
||||||
;; copyright (c) 2019-2020 Sean Corfield, all rights reserved
|
;; copyright (c) 2019-2025 Sean Corfield, all rights reserved
|
||||||
|
|
||||||
(ns next.jdbc.transaction-test
|
(ns next.jdbc.transaction-test
|
||||||
"Stub test namespace for transaction handling."
|
"Stub test namespace for transaction handling."
|
||||||
(:require [clojure.test :refer [deftest is testing]]
|
(:require [next.jdbc.specs :as specs]
|
||||||
[next.jdbc.transaction :refer :all]))
|
[next.jdbc.transaction]))
|
||||||
|
|
||||||
(set! *warn-on-reflection* true)
|
(set! *warn-on-reflection* true)
|
||||||
|
|
||||||
|
(specs/instrument)
|
||||||
|
|
|
||||||
19
test/next/jdbc/types_test.clj
Normal file
19
test/next/jdbc/types_test.clj
Normal file
|
|
@ -0,0 +1,19 @@
|
||||||
|
;; copyright (c) 2020-2025 Sean Corfield, all rights reserved
|
||||||
|
|
||||||
|
(ns next.jdbc.types-test
|
||||||
|
"Some tests for the type-assist functions."
|
||||||
|
(:require [lazytest.core :refer [defdescribe describe it expect]]
|
||||||
|
[next.jdbc.types :refer [as-varchar]]))
|
||||||
|
|
||||||
|
(set! *warn-on-reflection* true)
|
||||||
|
|
||||||
|
(defdescribe as-varchar-tests
|
||||||
|
(let [v (as-varchar "Hello")]
|
||||||
|
(describe "produces a function"
|
||||||
|
(it "yields the original value when invoked"
|
||||||
|
(expect (fn? v))
|
||||||
|
(expect (= "Hello" (v)))))
|
||||||
|
(describe "carries metadata"
|
||||||
|
(it "has a `set-parameter` function"
|
||||||
|
(expect (contains? (meta v) 'next.jdbc.prepare/set-parameter))
|
||||||
|
(expect (fn? (get (meta v) 'next.jdbc.prepare/set-parameter)))))))
|
||||||
File diff suppressed because it is too large
Load diff
Loading…
Reference in a new issue